Js puppeteer cant find elements after scroll - javascript

I am making this scraper to collect post from public Facebook pages. My problem is when I turn the scraper up to collect more than like 10 post, it's unable to scrape elements after scrolling way down. So the way it works is the scraper goes to a public page then scrolls the entire feed until it grabs the IDs for the number of post you want to collect. After collecting all the IDs it will then go to every post and collect what specific info like comments, shares, reactions etc. But when I'm far down the feed it can't find the post higher up the feed by ID, even tho when in Chrome dev tools the selector works in puppeteer its undefined. So my question is why does the scroll location affect puppeteer being able to read the dom. And if there is a better way for me to collect this information.
-Sidenote: this scraper is expected to grab thosands of post
Here is my code so far
const { scrollPageToBottom } = require('puppeteer-autoscroll-down')
const puppeteer = require('puppeteer');
const prompt = require('prompt-sync')();
const ObjectsToCsv = require('objects-to-csv');
(async () => {
const fbPage = prompt('What FaceBook Page?');
const browser = await puppeteer.launch({
headless: false,
defaultViewport: null,
args: ['--start-maximized']
});
const page = await browser.newPage();
await page.goto(`https://www.facebook.com/OfficialMensHumor/`, {waitUntil : 'networkidle2' }).catch(e => void 0);
await scrapeArticles(page)
await browser.close();
})();
async function autoScroll(page){
await page.evaluate(async () => {
await new Promise((resolve, reject) => {
var totalHeight = 0;
var distance = 100;
var timer = setInterval(() => {
var scrollHeight = document.body.scrollHeight;
window.scrollBy(0, distance);
totalHeight += distance;
if(totalHeight >= scrollHeight - window.innerHeight){
clearInterval(timer);
resolve();
}
}, 1000);
});
});
}
async function getText(spans){
for (const ele of spans){
// const text = ele.getProperty('innerText')
const text = await (await ele.getProperty('innerText')).jsonValue()
console.log(text)
}
}
async function scrapeArticles(
page,
// extractItems,
postCount=100,
scrollDelay = 800,
) {
let post = [];
try {
let previousHeight;
while (post.length < postCount) {
const content = await page.$('div[role="main"] > div.k4urcfbm')
post = await content.evaluate(()=>{
const postDivs = Array.from(document.querySelectorAll('div.du4w35lb.l9j0dhe7 div[class=lzcic4wl][role="article"]'))
return postDivs.map(post=>({id:post.getAttribute('aria-posinset')}))
})
console.log(post)
let isLoadingAvailable = true
await scrollPageToBottom(page, { size: 500 , delay: 250})
}
console.log(1)
await getPostUrls(page, post)
await getComments(page, post)
await getShares(page, post)
await getReactions(page, post)
await getPostImg(page, post)
await getTime(page, post)
console.log(post)
saveToFile(post)
} catch(e) {
console.log(e)
}
// return items;
}
const getComments = async (page, articleNums) =>{
for (const obj of articleNums){
for(const key in obj){
if(key == 'id'){
const article = await page.$(`div[aria-posinset="${obj[key]}"]`)
// const handle = await article.waitForFunction('document.querySelector("span.d2edcug0.hpfvmrgz.qv66sw1b.c1et5uql.lr9zc1uh.a8c37x1j.fe6kdd0r.mau55g9w.c8b282yb.keod5gw0.nxhoafnm.aigsh9s9.d3f4x2em.iv3no6db.jq4qci2q.a3bd9o3v.b1v8xokw.m9osqain").innerText')
// const handle = await article.waitForXPath("//span[contains(text(), 'Comments')]", {visible: true})
const handle = await article.waitForSelector('div[aria-posinset="1"] div.gtad4xkn')
// Comment String
const commentNum = await (await handle.getProperty('innerText')).jsonValue()
obj['commentsNum'] = commentNum
}
}
}
// console.log(articleNums)
}
const getShares = async (page, articleNums) => {
for (const obj of articleNums){
for(const key in obj){
if(key == 'id'){
const article = await page.$(`div[aria-posinset="${obj[key]}"]`)
const handle = await article.waitForXPath("//span[contains(text(), 'Shares')]", {visible: true})
// Share String
const shareNum = await (await handle[0].getProperty('innerText')).jsonValue()
obj['sharesNum'] = shareNum
}
}
}
// console.log(articleNums)
}
const getReactions = async (page, articleNums) =>{
for (const obj of articleNums){
for(const key in obj){
if(key == 'id'){
const article = await page.$(`div[aria-posinset="${obj[key]}"]`)
const handle = await article.$('span[aria-label="See who reacted to this"] + span[aria-hidden="true"]')
// Share String
const reactionsNum = await (await handle.getProperty('innerText')).jsonValue()
obj['reactionsNum'] = reactionsNum
}
}
}
// console.log(articleNums)
}
const getPostImg = async (page, articleNums)=>{
for (const obj of articleNums){
for(const key in obj){
if(key == 'id'){
const article = await page.$(`div[aria-posinset="${obj[key]}"]`)
const imgDiv = await article.$('div[class="pmk7jnqg kr520xx4"]')
const handle = await imgDiv.$('img[alt]')
// Share String
const imgUrl = await (await handle.getProperty('src')).jsonValue()
obj['imgUrl'] = imgUrl
}
}
}
// console.log(articleNums)
}
// And timestamp
const getTime = async (page, articleNums)=>{
for (const obj of articleNums){
for (const key in obj){
if(key == 'postUrl'){
await page.goto(obj[key])
const timeStamp = await page.$eval('abbr[data-shorten]', abbr=>abbr.dataset.tooltipContent)
obj['timestamp'] = timeStamp
}
}
}
}
const getPostUrls = async (page, articleNums)=>{
for (const obj of articleNums){
for(const key in obj){
if(key == 'id'){
const article = await page.$(`div[aria-posinset="${obj[key]}"]`)
const postURLHandle = await article.$('a[role="link"][aria-label]')
// Share String
const postURL = await (await postURLHandle.getProperty('href')).jsonValue()
obj['postUrl'] = postURL
}
}
}
console.log(articleNums)
}
const saveToFile = async (list) =>{
const csv = new ObjectsToCsv(list);
// Save to file:
await csv.toDisk('./post_sample.csv');
}
These are the lines in question that are continously returning undefined
/ const handle = await article.waitForFunction('document.querySelector("span.d2edcug0.hpfvmrgz.qv66sw1b.c1et5uql.lr9zc1uh.a8c37x1j.fe6kdd0r.mau55g9w.c8b282yb.keod5gw0.nxhoafnm.aigsh9s9.d3f4x2em.iv3no6db.jq4qci2q.a3bd9o3v.b1v8xokw.m9osqain").innerText')
// const handle = await article.waitForXPath("//span[contains(text(), 'Comments')]", {visible: true})
const handle = await article.waitForSelector('div[aria-posinset="1"] div.gtad4xkn')

Related

Retrieving SRC attribute of all HTML IMG tags on a webpage using Puppeteer

I am attempting to write a script that locates the largest image on a page. The first step of this process would be to retrieve all the image sources on a particular website. This is where I am stuck.
const puppeteer = require('puppeteer');
function ImageFetcher(pageURL, partName) {
return new Promise( async (resolve, reject) => {
try {
const browser = await puppeteer.launch({
headless: false,
});
const page1 = await browser.newPage();
await page1.goto(pageURL);
try {
const images = await page.$$eval("img", els => els.map(x => x.getAttribute("src")));
console.log(images);
} catch(e) {console.log("ERR Locator")};
await page1.close();
await browser.close();
return resolve();
} catch(e) {console.log(`Error Image Fetcher Part Name: ${partName}`)};
});
}
async function start() {
pageURL = "https://www.grainger.com/product/NVENT-CADDY-Cushioned-Pipe-Clamp-1RVC3";
partName = "10000";
ImageFetcher(pageURL, partName);
} start();
//ERR Locator
How about this:
const puppeteer = require("puppeteer");
let testing = async () => {
const browser = await puppeteer.launch({
headless: false
});
const page = await browser.newPage();
await page.goto('https://www.grainger.com/product/NVENT-CADDY-Cushioned-Pipe-Clamp-1RVC3');
const image = await extractLargestImage(page);
return image;
};
async function extractLargestImage(page) {
return page.evaluate(() => {
let imgs = document.querySelectorAll('img');
let largestImgSrc = 'none yet';
let largestImgSize = 0;
for (var img of imgs) {
let imgSize = Number(img.height) * Number(img.width);
if (imgSize > largestImgSize) {
largestImgSize = imgSize;
largestImgSrc = img.src;
}
}
return largestImgSrc;
});
}
testing().then((value) => {
console.dir(value, {'maxArrayLength': null});
});

Resolving async function after a loop end

I expect when I call an async function to resolve promise at the end, not before.
const urls = await uploadImages({ imageChanges, questions });
// ...next step
// I will use urls
But after calling await uploadImages() it continues to run until const data = await fetch(image.src);
And then ...next step starts. How can I make it wait for imageChanges.forEach loop finish ? Should I create another nested function inside ?
const uploadImages = async ({ imageChanges, questions }) => {
if (!imageChanges.length) return null;
const storage = firebase.storage();
let urls;
try {
//** convert each new image's src from blob to downloadUrl. */
imageChanges.forEach(async image => {
const questionId = questions.findIndex(q => q.id === image.questionId);
const imagePath = `${questionId}.jpg`;
const storageRef = storage.ref(imagePath);
// **
const data = await fetch(image.src);
const blob = await data.blob();
const uploadTaskSnapshot = await storageRef.put(blob);
const downloadURL = await uploadTaskSnapshot.ref.getDownloadURL();
urls.push(downloadURL)
});
return urls;
} catch (error) {
console.log(error.message);
}
};
forEach with async doesn't work as expected. Read this answer for more info.
Try like this
const uploadImages = async ({ imageChanges, questions }) => {
if (!imageChanges.length) return null;
const storage = firebase.storage();
try {
const imageChangesUrlPromise = imageChanges.map(async () => {
const questionId = questions.findIndex(q => q.id === image.questionId);
const imagePath = `${questionId}.jpg`;
const storageRef = storage.ref(imagePath);
const data = await fetch(image.src);
const blob = await data.blob();
const uploadTaskSnapshot = await storageRef.put(blob);
const downloadURL = await uploadTaskSnapshot.ref.getDownloadURL();
return downloadURL;
})
return await Promise.all(imageChangesUrlPromise);
} catch (error) {
console.log(error.message);
}
};
and then
const urls = await uploadImages({ imageChanges, questions });
...
JavaScript does this because forEach is not promise-aware. It cannot support async and await. You cannot use await in forEach.
If you use await in a map, map will always return an array of promises. This is because asynchronous functions always return promises.
By littile modification to your code, this should work,
const uploadImages = async ({ imageChanges, questions }) => {
if (!imageChanges.length) return null;
const storage = firebase.storage();
let urls;
try {
//** convert each new image's src from blob to downloadUrl. */
await Promise.all(imageChanges.map(async image => {
const questionId = questions.findIndex(q => q.id === image.questionId);
const imagePath = `${questionId}.jpg`;
const storageRef = storage.ref(imagePath);
// **
const data = await fetch(image.src);
const blob = await data.blob();
const uploadTaskSnapshot = await storageRef.put(blob);
const downloadURL = await uploadTaskSnapshot.ref.getDownloadURL();
urls.push(downloadURL)
}));
return urls;
} catch (error) {
console.log(error.message);
}
};
const urls = await uploadImages({ imageChanges, questions });

Asynchronous function recognizes function is done when really it still has more to do

The page.on is reconized by the async for loop at the bottom as finished and ready to run the function again, but its not actually done. It still needs to run everything up to page.close. How do I let the async function know that it is done after page.close, not page.on? Let me know if you need anymore info, thanks.
const puppeteer = require('puppeteer');
const fs = require('fs');
const req = require('request');
const got = require('got');
const NodeID3 = require('node-id3');
const readline = require('readline');
const selectors = require('./selectors');
const getDownloadUrl = async (url, browser) => {
const page = await browser.newPage();
await page.goto(url);
page.setRequestInterception(true);
await page._client.send('Page.setDownloadBehavior', {behavior: 'allow', downloadPath: './Songs'})
const baseUrl = 'https://cf-hls-media.sndcdn.com/media/';
await page.on('request', async (request) => {
if(request.url().includes(baseUrl)){
const downloadUrl = fixUrl(request.url());
const info = await getSongInfo(page);
downloadSong(downloadUrl, info.title);
await tagSong(info);
await request.abort();
await page.close();
} else {
request.continue();
}
});
};
const fixUrl = (url) => {
...
};
const downloadSong = (url, title) => {
...
};
const getSongInfo = async (page) => {
...
};
const tagSong = async (info) => {
...
};
(() => {
const readInterface = readline.createInterface({
input: fs.createReadStream('../Song Urls.csv'),
output: process.stdout,
console: false,
terminal: false,
});
let urls = [];
readInterface.on('line', function(line) {
urls.push(line);
}).on('close', async () => {
const browser = await puppeteer.launch({headless: false});
for (let i = 0; i < urls.length; i++) {
const url = urls[i];
await getDownloadUrl(url, browser);
}
});
})();
/*
Issue: The loop recognizes that the getDownloadUrl function is done even though it's
not and continues anyways.
*/
await only works with promises, and page.on looks to be a callback-based event listener, not something that returns a promise. If you want to be able to await it, you will need to create a promise around it.
await new Promise((resolve) => {
page.on('request', async (request) => {
if(request.url().includes(baseUrl)){
const downloadUrl = fixUrl(request.url());
const info = await getSongInfo(page);
downloadSong(downloadUrl, info.title);
await tagSong(info);
await request.abort();
await page.close();
resolve();
} else {
request.continue();
}
});
})

Not able to read the dom content in puppeteer [node.js] using class & objects

I'm trying to read the dom content from indian superleague for example goals, attacking, mins per goal, etc using class and object. It gives an error like this
Evaluation failed: TypeError: Cannot read property 'textContent' of undefined
at puppeteer_evaluation_script:7:64
Here's the code
config.js
const puppeteer = require('puppeteer')
class Puppeteer{
constructor(){
this.param = {
path: 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe',
url: 'https://indiansuperleague.com',
}
}
async connect(){
this.param.browser = await puppeteer.launch({executablePath: this.param.path, headless: false})
this.param.page = await this.param.browser.newPage()
await this.param.page.goto(this.param.url, {timeout: 0})
}
async disconnect(){
await this.param.browser.close()
}
}
module.exports = Puppeteer
states.js
class States{
constructor(param){
this.param = param
}
async fetchData(){
const page = this.param.page
const res = await page.evaluate(() => {
const title = 'si-fkt-sctn-title', value = 'si-fkt-sctn-number'
// const titleArray = document.getElementsByClassName(title)
// const valueArray = document.getElementsByClassName(value)
let key = document.getElementsByClassName(title)[0].textContent.trim()
let num = document.getElementsByClassName(value)[0].textContent.trim()
/* for(let i=0; i<titleArray.length; i++){
key[i] = titleArray[i].textContent.trim()
num[i] = valueArray[i].textContent.trim()
// Object.defineProperty(temp, key, {value:num,writable: true,configurable: true,enumerable: true})
} */
return {key, num}
})
console.log(res)
}
}
module.exports = States
app.js
const Puppeteer = require('./config')
const States = require('./modules/states')
const puppeteer = new Puppeteer()
const states = new States(puppeteer.param)
puppeteer.connect().then(async() => {
let res = await states.fetchData()
console.log(res)
await puppeteer.disconnect()
}).catch(e => console.log(e))
What is the solution?
The elements may be created dynamically after some time. You can try to use page.waitForSelector() before retrieving the data from them. For example:
'use strict';
const puppeteer = require('puppeteer');
(async function main() {
try {
const browser = await puppeteer.launch();
const [page] = await browser.pages();
await page.goto('https://indiansuperleague.com');
await Promise.all([
page.waitForSelector('.si-fkt-sctn-title'),
page.waitForSelector('.si-fkt-sctn-number'),
]);
const data = await page.evaluate(() => {
return [
document.querySelector('.si-fkt-sctn-title').textContent,
document.querySelector('.si-fkt-sctn-number').textContent,
];
});
console.log(data);
await browser.close();
} catch (err) {
console.error(err);
}
})();
Output:
[ ' Goals', ' 63' ]

Loop through async requests with nested async requests

I have a scenario where I am calling an API that has pagination.
What I'd like to do is the following, 1 page at a time.
Call API Page 1
For each of the items in the response, call a Promise to get more data and store in an array
Send the array to an API
Repeat until all pages are complete
What I currently have is the following, however I think I am possibly complicating this too much, although unsure on how to proceed.
export const importData = async() {
const pSize = 15;
const response = await getItems(pSize, 1);
const noPage = Math.ceil(response.totalMerchandiseCount/pSize);
for (let i = 1; i < noPage; i++) {
const items = [];
const data = await getItems(pSize, i);
await async.each(data.merchandiseList, async(i, cb) => {
const imageURL = await getImageURL(i.id, i.type);
items.push({
id: i.id,
imageURL: imageURL,
});
cb();
}, async() => {
return await api.mockable('sync', items);
});
}
}
export const getImageURL = async(id, type) => {
let url = `https://example.com/${id}`;
return axios.get(url)
.then((response) => {
const $ = cheerio.load(response.data);
// do stuff to get imageUrl
return image;
})
.catch((e) => {
console.log(e);
return null;
})
};
The issue I have at the moment is that it seems to wait until all pages are complete before calling api.mockable. Items is also empty at this point.
Can anyone suggest a way to make this a bit neater and help me get it working?
If this is all meant to be serial, then you can just use a for-of loop:
export const importData = async() {
const pSize = 15;
const response = await getItems(pSize, 1);
const noPage = Math.ceil(response.totalMerchandiseCount/pSize);
for (let i = 1; i < noPage; i++) { // Are you sure this shouldn't be <=?
const items = [];
const data = await getItems(pSize, i);
for (const {id, type} of data.merchandiseList) {
const imageURL = await getImageURL(id, type);
items.push({id, imageURL});
}
await api.mockable('sync', items);
}
}
I also threw some destructuring and shorthand properties in there. :-)
If it's just the pages in serial but you can get the items in parallel, you can replace the for-of with map and Promise.all on the items:
export const importData = async() {
const pSize = 15;
const response = await getItems(pSize, 1);
const noPage = Math.ceil(response.totalMerchandiseCount/pSize);
for (let i = 1; i < noPage; i++) { // Are you sure this shouldn't be <=?
const data = await getItems(pSize, i);
const items = await Promise.all(data.merchandiseList.map(async ({id, type}) => {
const imageURL = await getImageURL(id, type);
return {id, imageURL};
}));
await api.mockable('sync', items);
}
}
That async function call to map can be slightly more efficient as a non-async function:
export const importData = async() {
const pSize = 15;
const response = await getItems(pSize, 1);
const noPage = Math.ceil(response.totalMerchandiseCount/pSize);
for (let i = 1; i < noPage; i++) {
const data = await getItems(pSize, i);
const items = await Promise.all(data.merchandiseList.map(({id, type}) =>
getImageURL(id, type).then(imageURL => ({id, imageURL}))
));
await api.mockable('sync', items);
}
}

Categories