所以我试图让我的脚本从我的数据库中取出第一篇文章(它来自谷歌新闻的第一页)并显示描述,标题等,但我也希望显示一个巨大的图像来自文章的原始来源(如果有的话 - 但很可能是一个代表总是如此的文章),所以我试图再次请求进入我刚刚从我的信息中删除的信息的链接数据库,但响应给我“未定义”。第一个请求有效。
这是代码(第一个请求是“var checkArticle =”而第二个是底部函数,名为“function DiveIn()”,第二个函数在console.log中给我未定义但我这样做与第一个请求完全一样,所以它应该提供数据,但不是)
const express = require('express');
const router = express.Router();
const request = require('request');
const request2 = require('request');
const cheerio = require('cheerio');
const fs = require('fs');
const AWS = require('aws-sdk');
const ep = new AWS.Endpoint('*censored*.com');
const db = new AWS.DynamoDB.DocumentClient({ // Dynamo database constructor
"apiVersion": '2012-08-10',
"region": 'us-west-1',
"endpoint": ep,
"accessKeyId": '*censored*',
"secretAccessKey": '+v*censored*o6n'
});
require('events').EventEmitter.defaultMaxListeners = Infinity;
var url = "https://www.google.com/search?hl=en&gl=us&tbm=nws&authuser=0&q=u.s.&oq=u.s.&num=100&start=0";
//------ START MIDDLE WARE STUFF -------------- --------------------------------------------------------
router.use(function timeLog (req, res, next) {
next();
});
/* GET home page. */
router.get('/', function(req, res, next) {
res.render('index');
});
module.exports = router; /*function(req,res,next) {
res.render('index');
};*/
//------ END MIDDLE WARE STUFF -------------- --------------------------------------------------------
// ------------------------------UPDATE FRONT PAGE ARTICLE DATABASE------------------------------------ //
// this will be called after we collect our "subject" article"
var checkArticle = function (title, date, source, url, description) {
this.title = title;
this.date = date;
this.url = url;
this.source = source;
this.description = description;
// set it to date inserted to database
date = new Date().toLocaleString();
let params = {
TableName: "Articles"
};
db.scan(params, function(err,scannedData) {
if (err) throw err;
// scanData = target check
// title + date + url + author = our subject article to check
//asynchronously invoke a seperate function to display the current news right away, so there is no wait time
console.log(`\n\n-------------------------------Subject Article:------------------------------- \n`);
console.log("Title: " + title);
console.log("Date: " + date);
console.log("Source: " + source)
console.log("description: " + description + '\n');
console.log("URL: " + url);
// pass this step to next function
return Result(title, date, source, url, description, scannedData);
});
function Result(title, date, source, url, description,scannedData) {
console.log("Peforming check...")
let counter = 0;
// peform check on all items. using 'counter' as the logic variable
for (var i = 0; i < scannedData.Items.length; i++) {
if (title === scannedData.Items[i].title) {
counter++;
console.log("We found a match. We are not performing anymore operations. \n -----------------------------------------------------------------------------");
return console.log("Stopped from inserting article to database.");
}
}
// if a match was found, it will execute this. Adds item to database
if (counter === 0) {
let params = {
TableName: "Articles",
Item: {
title: title,
date: date,
url: url,
source: source,
description: description
}
}
db.put(params, function(err) {
if (err) throw err;
else { return; }
});
console.log("No matches found. Inserted article to 'Articles' database");
}
}
}
// This is for Front Page, news portion initiation
request(url, 'w+', function(err,data,body) {
if (err) throw err;
var $ = cheerio.load(body);
//console.log($(".slp").prev().eq(2).text()); // title
//console.log($(".slp").prev().children().eq(0).attr('href')); // url
//console.log($(".slp").children().html()); // New York Times - 3 hours ago //source - date
//console.log($("div.st").eq(0).text()); // description
let regExp = / - /;
let urlSlice1 = $(".slp").prev().children().eq(0).attr('href').slice(7,Infinity);
let urlSlice2 = urlSlice1.search(/&/);
let urlSliced = urlSlice1.slice(0, urlSlice2);
let hyphenBreakerForSource = $(".slp").children().html().search(regExp, ""); // gives the index # of found regExp
// got our meat of the burger. // / / // //
let source = $(".slp").children().html().slice(0,hyphenBreakerForSource); //source
let date = $(".slp").children().html().slice(hyphenBreakerForSource + 3); //date
let url = urlSliced;
url2 = urlSliced;
let description = $("div.st").eq(0).text();
let title = $(".slp").prev().eq(0).text();
// /// / / //// / / /
checkArticle(title, date, source, url, description);
});
// ------------------------------END FRONT PAGE ARTICLE OPERATION ------------------------------------- //
DiveIn();
function DiveIn() {
let params = { TableName: "Articles" }
db.scan(params, function(err,articleData) {
let DiveInUrl = articleData.Items[0].url;
request2(DiveInUrl, 'w+', function(err,body,DiveInData) {
console.log(DiveinData);
}) ;
});
}