我正在使用puppeteer!做一个网络爬虫!。
循环将有效的URL传递给函数(loadPage(URL)),但是当通过抓取处理N个页面时,希望暂停循环。
我考虑过要进行超时,即让小狗运行平均时间。但是我认为这不是解决方案。但是,我愿意进行任何讨论。
谢谢。
---编辑以供将来参考 ---
const puppeteer = require('puppeteer');
const stores = require('./data.json').stores;
const MAX_CONCURRENT_TASKS = 5;
let TOTAL_PAGES = 0;
const start = async () => {
//@TODO Create a separate log routine
console.log('Total de Lojas', stores.length)
let actualStore = null;
let activatedStores = [];
for (const store of stores) {
if (store.active) {
activatedStores.push(store)
}
}
//@TODO Create a separate log routine
console.log('Lojas ativas', activatedStores.length)
try {
const browser = await puppeteer.launch({
headless: false //Debug porpouse
});
const pagePool = await Promise.all(Array.from(
new Array(MAX_CONCURRENT_TASKS),
() => browser.newPage()
))
while (activatedStores.length !== 0) {
//@TODO Create a separate log routine
console.log(`Stores left: ${activatedStores.length - MAX_CONCURRENT_TASKS}!`)
await Promise.all(
activatedStores.splice(0, MAX_CONCURRENT_TASKS)
.map((store, i) => loadPage(store.siteMap,
pagePool[i], store))
)
}
await browser.close();
} catch (error) {
//@TODO create function to generate error logs
console.error(error)
}
}
/**
*Function to load pages
*
* @param {string} url - a valid url
* @param {puppeter} page - puppeteer browser.newPage()
* @param {Object} store - the settings of this store
*/
const loadPage = async (url, page, store) => {
const opts = {
timeout: 0,
waitUntil: 'domcontentloaded'
}
page.setUserAgent('Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36')
await page.goto(url, opts);
//@TODO Create a separate log routine
console.log(await page.evaluate(() => document.location.href));
}
start()
答案 0 :(得分:2)
我无法在此处为您提供代码示例,但可以肯定的是,您应该研究iterators and generators concept。生成器使用非阻塞暂停的原理,该原理允许您执行一些计算,停止并执行其他逻辑,返回到您的计算。
答案 1 :(得分:2)
没有代码,很难确定您到底需要什么。也许这个例子可以给你一些提示。
'use strict';
const puppeteer = require('puppeteer');
(async function main() {
try {
const urls = Array.from(
new Array(20),
(_, i) => `https://example.org/?foo=${i}`
);
const numberOfConcurrentTasks = 3;
const browser = await puppeteer.launch();
const pagePool = await Promise.all(Array.from(
new Array(numberOfConcurrentTasks),
() => browser.newPage()
));
while (urls.length !== 0) {
console.log(`URLs left: ${urls.length}.`);
await Promise.all(
urls.splice(0, numberOfConcurrentTasks)
.map((url, i) => processDoc(url, pagePool[i]))
);
}
await browser.close();
} catch (err) {
console.error(err);
}
})();
async function processDoc(url, page) {
await page.goto(url);
console.log(await page.evaluate(() => document.location.href));
}