How to print an HTML document using Puppeteer? - javascript

Recently I started to crawl the web using Puppeteer. Below is a code for extracting a specific product name from the shopping mall.
const puppeteer = require('puppeteer');
(async () => {
const width = 1600, height = 1040;
const option = { headless: false, slowMo: true, args: [`--window-size=${width},${height}`] };
const browser = await puppeteer.launch(option);
const page = await browser.newPage();
const vp = {width: width, height: height};
await page.setViewport(vp);
const navigationPromise = page.waitForNavigation();
await page.goto('https://shopping.naver.com/home/p/index.nhn');
await navigationPromise;
await page.waitFor(2000);
const textBoxId = 'co_srh_input';
await page.type('.' + textBoxId, '양말', {delay: 100});
await page.keyboard.press('Enter');
await page.waitFor(5000);
await page.waitForSelector('div.info > a.tit');
const stores = await page.evaluate(() => {
const links = Array.from(document.querySelectorAll('div.info > a.tit'));
return links.map(link => link.innerText).slice(0, 10) // 10개 제품만 가져오기
});
console.log(stores);
await browser.close();
})();
I have a question. How can I output the crawled results to an HTML document (without using the database)? Please use sample code to explain it.

I used what was seen on blog.kowalczyk.info
const puppeteer = require("puppeteer");
const fs = require("fs");
async function run() {
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.goto("https://www.google.com/", { waitUntil: "networkidle2" });
// hacky defensive move but I don't know a better way:
// wait a bit so that the browser finishes executing JavaScript
await page.waitFor(1 * 1000);
const html = await page.content();
fs.writeFileSync("index.html", html);
await browser.close();
}
run();

fs.writeFile()
You can use the following write_file function that returns a Promise that resolves or rejects when fs.writeFile() succeeds or fails.
Then, you can await the Promise from within your anonymous, asynchronous function and check whether or not the data was written to the file:
'use strict';
const fs = require('fs');
const puppeteer = require('puppeteer');
const write_file = (file, data) => new Promise((resolve, reject) => {
fs.writeFile(file, data, 'utf8', error => {
if (error) {
console.error(error);
reject(false);
} else {
resolve(true);
}
});
});
(async () => {
// ...
const stores = await page.evaluate(() => {
return Array.from(document.querySelectorAll('div.info > a.tit'), link => link.innerText).slice(0, 10); // 10개 제품만 가져오기
});
if (await write_file('example.html', stores.toString()) === false) {
console.error('Error: Unable to write stores to example.html.');
}
// ...
});

Related

Puppeteer querySelector returns empty object [duplicate]

Recently I started to crawl the web using Puppeteer. Below is a code for extracting a specific product name from the shopping mall.
const puppeteer = require('puppeteer');
(async () => {
const width = 1600, height = 1040;
const option = { headless: false, slowMo: true, args: [`--window-size=${width},${height}`] };
const browser = await puppeteer.launch(option);
const page = await browser.newPage();
const vp = {width: width, height: height};
await page.setViewport(vp);
const navigationPromise = page.waitForNavigation();
await page.goto('https://shopping.naver.com/home/p/index.nhn');
await navigationPromise;
await page.waitFor(2000);
const textBoxId = 'co_srh_input';
await page.type('.' + textBoxId, '양말', {delay: 100});
await page.keyboard.press('Enter');
await page.waitFor(5000);
await page.waitForSelector('div.info > a.tit');
const stores = await page.evaluate(() => {
const links = Array.from(document.querySelectorAll('div.info > a.tit'));
return links.map(link => link.innerText).slice(0, 10) // 10개 제품만 가져오기
});
console.log(stores);
await browser.close();
})();
I have a question. How can I output the crawled results to an HTML document (without using the database)? Please use sample code to explain it.
I used what was seen on blog.kowalczyk.info
const puppeteer = require("puppeteer");
const fs = require("fs");
async function run() {
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.goto("https://www.google.com/", { waitUntil: "networkidle2" });
// hacky defensive move but I don't know a better way:
// wait a bit so that the browser finishes executing JavaScript
await page.waitFor(1 * 1000);
const html = await page.content();
fs.writeFileSync("index.html", html);
await browser.close();
}
run();
fs.writeFile()
You can use the following write_file function that returns a Promise that resolves or rejects when fs.writeFile() succeeds or fails.
Then, you can await the Promise from within your anonymous, asynchronous function and check whether or not the data was written to the file:
'use strict';
const fs = require('fs');
const puppeteer = require('puppeteer');
const write_file = (file, data) => new Promise((resolve, reject) => {
fs.writeFile(file, data, 'utf8', error => {
if (error) {
console.error(error);
reject(false);
} else {
resolve(true);
}
});
});
(async () => {
// ...
const stores = await page.evaluate(() => {
return Array.from(document.querySelectorAll('div.info > a.tit'), link => link.innerText).slice(0, 10); // 10개 제품만 가져오기
});
if (await write_file('example.html', stores.toString()) === false) {
console.error('Error: Unable to write stores to example.html.');
}
// ...
});

Overriding showOpenFilePicker with Puppeteer

As illustrated in here here, Puppeteer allows to override Javascript functions. I want to override showOpenFilePicker function. That is, when the showOpenFilePicker invoked by the web page. I want to run another function before the showOpenFilePicker.
const puppeteer = require("puppeteer");
(async () => {
const browser = await puppeteer.launch({ headless: false });
const page = await browser.newPage();
await page.evaluateOnNewDocument(() => {
Object.defineProperty(HTMLCanvasElement.prototype, "toBlob", {
value: () => {
console.log("Hey there");
},
});
});
await page.goto("https://example.com");
await page.evaluate(() => {
console.log(HTMLCanvasElement.prototype.toBlob.toString());
});
// await browser.close();
})();
You can override built-in functions in Puppeteer like in the code sample below. This replaces the original function with an override that logs the arguments to the console.
const puppeteer = require("puppeteer");
(async () => {
const browser = await puppeteer.launch({ headless: false });
const page = await browser.newPage();
await page.evaluateOnNewDocument(() => {
const originalShowOpenFilePicker = window.showOpenFilePicker;
window.showOpenFilePicker = (...args) => {
console.log('Modified `showOpenFilePicker` called with these arguments:', args);
return originalShowOpenFilePicker(...args);
};
});
await page.goto("https://example.com");
await page.evaluate(() => {
console.log(showOpenFilePicker());
});
// await browser.close();
})();

Asynchronous function recognizes function is done when really it still has more to do

The page.on is reconized by the async for loop at the bottom as finished and ready to run the function again, but its not actually done. It still needs to run everything up to page.close. How do I let the async function know that it is done after page.close, not page.on? Let me know if you need anymore info, thanks.
const puppeteer = require('puppeteer');
const fs = require('fs');
const req = require('request');
const got = require('got');
const NodeID3 = require('node-id3');
const readline = require('readline');
const selectors = require('./selectors');
const getDownloadUrl = async (url, browser) => {
const page = await browser.newPage();
await page.goto(url);
page.setRequestInterception(true);
await page._client.send('Page.setDownloadBehavior', {behavior: 'allow', downloadPath: './Songs'})
const baseUrl = 'https://cf-hls-media.sndcdn.com/media/';
await page.on('request', async (request) => {
if(request.url().includes(baseUrl)){
const downloadUrl = fixUrl(request.url());
const info = await getSongInfo(page);
downloadSong(downloadUrl, info.title);
await tagSong(info);
await request.abort();
await page.close();
} else {
request.continue();
}
});
};
const fixUrl = (url) => {
...
};
const downloadSong = (url, title) => {
...
};
const getSongInfo = async (page) => {
...
};
const tagSong = async (info) => {
...
};
(() => {
const readInterface = readline.createInterface({
input: fs.createReadStream('../Song Urls.csv'),
output: process.stdout,
console: false,
terminal: false,
});
let urls = [];
readInterface.on('line', function(line) {
urls.push(line);
}).on('close', async () => {
const browser = await puppeteer.launch({headless: false});
for (let i = 0; i < urls.length; i++) {
const url = urls[i];
await getDownloadUrl(url, browser);
}
});
})();
/*
Issue: The loop recognizes that the getDownloadUrl function is done even though it's
not and continues anyways.
*/
await only works with promises, and page.on looks to be a callback-based event listener, not something that returns a promise. If you want to be able to await it, you will need to create a promise around it.
await new Promise((resolve) => {
page.on('request', async (request) => {
if(request.url().includes(baseUrl)){
const downloadUrl = fixUrl(request.url());
const info = await getSongInfo(page);
downloadSong(downloadUrl, info.title);
await tagSong(info);
await request.abort();
await page.close();
resolve();
} else {
request.continue();
}
});
})

Puppeteer not working as expected when clicking button

My problem is that I need to set the comment selector to "all comments" whit puppeteer but the comments don't render after that puppeteer clicks on the correct button, "all the comments", the comment section just disappears, I will provide the code and a video of the browser in action.
const $ = require('cheerio');
const puppeteer = require('puppeteer');
const url = 'https://www.facebook.com/pg/SamsungGlobal/posts/';
const main = async () => {
const browser = await puppeteer.launch({
headless: false,
args: ['--no-sandbox', '--disable-setuid-sandbox']
});
const page = await browser.newPage();
await page.setViewport({
width: 1920,
height: 1080
});
await page.goto(url, {
waitUntil: 'networkidle2',
timeout: 0
});
page.mouse.click(50, 540, {});
for (var a = 0; a < 18; a++) {
setTimeout(() => {}, 16);
await page.keyboard.press('ArrowDown');
}
let bodyHTML = await page.evaluate(() => document.body.innerHTML);
var id = "#" + $("._427x ._4-u2.mbm._4mrt", bodyHTML).attr('id'); // selects id of first post
try {
var exp = await page.$(`${id} a._21q1`); // clicks on "most relevant" from the first post
await exp.evaluate(exp => exp.click());
await page.click('div[data-ordering="RANKED_UNFILTERED"]'); // selects "all the comments"
var exp = await page.$(`${id} a._42ft`); // should click on "more comments" but it doesn't load
await exp.evaluate(exp => exp.click());
await page.waitForSelector(`${id} a._5v47.fss`); // wait for the "others" in facebook comments
var exp = await page.$$(`${id} a._5v47.fss`);
await exp.evaluate(exp => exp.click());
await page.screenshot({
path: "./srn4.png"
});
// var post = await page.$eval(id + " .userContentWrapper", el => el.innerHTML);
// console.log("that's the post " + post);
} catch (e) {
console.log(e);
}
setTimeout(async function() {
await browser.close(); //close after some time
}, 1500);
};
main();
That's the video of the full execution process: https://youtu.be/jXpSOBfVskg
That's a slow motion of the moment it click on the menu: https://youtu.be/1OgfFNokxsA
You can try a variant with selectors:
'use strict';
const puppeteer = require('puppeteer');
(async function main() {
try {
const browser = await puppeteer.launch({ headless: false });
const [page] = await browser.pages();
await page.goto('https://www.facebook.com/pg/SamsungGlobal/posts/');
await page.waitForSelector('[data-ordering="RANKED_THREADED"]');
await page.click('[data-ordering="RANKED_THREADED"]');
await page.waitForSelector('[data-ordering="RANKED_UNFILTERED"]');
await page.click('[data-ordering="RANKED_UNFILTERED"]');
} catch (err) {
console.error(err);
}
})();
page.mouse.click(50, 540, {});
This is not going to work necessarily. What are you trying to click? You need to use CSS selectors to find elements that you want to click.
Also, dynamic elements might not appear in the page right away. You should use waitForSelector as needed.

Create async loop using js

I'm trying to implement an async on each loop on nodejs.
I have a variable html which contains the page content. There I want to iterate through all divs that have a particular class. Inside those divs, there are some links that I want to navigate and get some content from them too. So basically since each expects synchronous function it doesn't wait for the other code to be executed.
I tried to do it like this:
const browser = await puppeteer.launch({
headless: true
});
const page = await browser.newPage();
const page2 = await browser.newPage();
const mainUrl = "http ... ";
const html = await page.goto(mainUrl)
.then(function() {
return page.content();
});
await $('.data-row', html).each(function() => {
const url = await $(this).find(".link-details a").attr("href");
page2.goto(url)
.then(function() {
const title = await page.evaluate(el => el.innerHTML, await page.$('#title'));
// do other things
});
// do other things
// create a json with data add it to a list
});
But the title gives undefined and it's executed after the loop finishes executing ... What can I do here?
I've edited your code to show how Puppeteer was supposed to be used. Your main problem here was using jQuery where it was not needed and attempting to await things that were not asynchronous; while mixing in a promise chain.
(async () => {
const browser = await puppeteer.launch({
headless: true
});
const page = await browser.newPage();
const page2 = await browser.newPage();
const mainUrl = "http ... ";
/*const html = await page.goto(mainUrl)
.then(function() {
return page.content();
});*/
await (page.goto(mainUrl))
await page.waitForSelector('.data-row');
const dataRows = await page.evaluate(() =>
document.querySelectorAll('.data-row');
)
/*await $('.data-row', html).each(function() => {
const url = await $(this).find(".link-details a").attr("href");
await page2.goto(url)
.then(function() {
const title = await page.evaluate(el => el.innerHTML, await page.$('#title'));
// do other things
});
// do other things
// create a json with data add it to a list
});*/
for (const row of dataRows) {
const url = dataRows.querySelector(".link-details a").href;
await page2.goto(url)
const title = await page2.evaluate(() => document.title)
console.log(title)
}
})()
You can't await jQuery.each, to you can try doing the following.
const rows = await $('.data-row', html).toArray();
for(const row of rows){
const url = await $(this).find(".link-details a").attr("href");
page2.goto(url)
.then(function() {
const title = await page.evaluate(el => el.innerHTML, await page.$('#title'));
// do other things
});
// do other things
// create a json with data add it to a list
}

Categories