我正在学习 nodejs/puppeteer,并且在让 Puppeteer 将 CSV 文件中的 UPC 编号填充到图书网站的搜索栏上时遇到问题。如果我使用单个 UPC 号码,我设法让网络抓取工具抓取网站。
但是当我使用异步函数将 UPC 值从 CSV 解析到网络爬虫时,出现以下错误
(node:5876) UnhandledPromiseRejectionWarning: TypeError: text is not iterable
这是示例 CSV
DATE,QUANTITY,NAME,CODECONTENT,CODETYPE
2021-10-13 20:16:44 +1100,1,"Book 1","9781250035288",9
2021-10-13 20:16:40 +1100,1,"Book 2","9781847245601",9
2021-10-13 20:16:35 +1100,1,"Book 3","9780007149247",9
2021-10-13 20:16:30 +1100,1,"Book 4","9780749958084",9
2021-10-13 20:16:26 +1100,1,"Book 5","9781405920384",9
我解析 CSV 的方式有问题吗?
function readCsvAsync(filename, delimiter=',', encoding='utf-8') {
return new Promise((resolve, reject) => {
const rows = [];
try {
fs.createReadStream(filename, {encoding: encoding})
.pipe(parse({delimiter: delimiter}))
.on('data', (row) => rows.push(+row.CODECONTENT))
.on('end', () => resolve(rows))
.on('error', reject);
} catch (err) {
reject(err);
}
});
}
async function upcData() {
try {
const rows = await readCsvAsync('Book_Bulk.csv', ':');
// console.log(csvData);
// call puppeteer or whatever
return rows;
} catch (err) {
console.log(err);
}
}
完整代码如下:
const puppeteer = require('puppeteer');
const parse = require('csv-parser');
const fs = require('fs');
async function getpageData(page,upc){
await page.goto('https://www.bookdepository.com/');
await page.type('#book-search-form > div.el-wrap.header-search-el-wrap > input.text-input',upc);
await page.click('#book-search-form > div.el-wrap.header-search-el-wrap > button');
//Title
await page.waitForSelector('.item-info h1');
const title = await page.$eval('.item-info h1', h1 => h1.textContent);
//Author
await page.waitForSelector('div.author-info.hidden-md > span > a > span');
const author = await page.$eval('div.author-info.hidden-md > span > a > span', span => span.innerText);
//Genre
await page.waitForSelector('.active a');
const genre = await page.$eval('.active a', a => a.innerText);
//Format
await page.waitForSelector('.item-info li');
const format = await page.$eval('.item-info li', li => li.innerText);
//Publisher
await page.waitForSelector('div.biblio-wrap > div > ul > li:nth-child(4) > span > a > span');
const publisher = await page.$eval('div.biblio-wrap > div > ul > li:nth-child(4) > span > a > span', span => span.innerText);
//Year
await page.waitForSelector('div.biblio-wrap > div > ul > li:nth-child(3) > span');
const year = await page.$eval('div.biblio-wrap > div > ul > li:nth-child(3) > span', span => span.innerText);
const newyear = year.slice(-4)
// Price
try {
await page.waitForSelector('div.price.item-price-wrap.hidden-xs.hidden-sm > span', { timeout: 1000 });
const price = await page.$eval('div.price.item-price-wrap.hidden-xs.hidden-sm > span', span => span.innerText);
var newprice = price.slice(-6);
} catch {
await page.waitForSelector('p.list-price');
const price = await page.$eval('p.list-price', p => p.innerText);
var newprice = price.slice(-6);
} finally {
await page.waitForSelector('div.price.item-price-wrap.hidden-xs.hidden-sm > span.sale-price');
const price = await page.$eval('div.price.item-price-wrap.hidden-xs.hidden-sm > span.sale-price', span => span.innerText);
var newprice = price.slice(-6);
}
// console.log(title);
// console.log(author);
// console.log(genre);
// console.log(format);
// console.log(publisher);
// console.log(newyear);
// console.log(newprice);
return {
title: title,
author: author,
genre: genre,
format: format,
publisher: publisher,
year: newyear,
price: newprice
}
};
function readCsvAsync(filename, delimiter=',', encoding='utf-8') {
return new Promise((resolve, reject) => {
const rows = [];
try {
fs.createReadStream(filename, {encoding: encoding})
.pipe(parse({delimiter: delimiter}))
.on('data', (row) => rows.push(+row.CODECONTENT))
.on('end', () => resolve(rows))
.on('error', reject);
} catch (err) {
reject(err);
}
});
}
async function upcData() {
try {
const rows = await readCsvAsync('Book_Bulk.csv', ':');
// console.log(csvData);
// call puppeteer or whatever
return rows;
} catch (err) {
console.log(err);
}
}
async function main(){
const allupcs = await upcData();
// console.log(allupcs);
const browser = await puppeteer.launch({ headless: false, defaultViewport: null, args: ['--start-maximized']});
const page = await browser.newPage();
const scrapedData = [];
for(let upc of allupcs){
const data = await getpageData(page,upc);
scrapedData.push(data);
}
console.log(scrapedData);
}
main();