Using puppeteer on an actual API - javascript

I'm sorry for the long question, I'm a newbie at node but I have made a CRUD API before with authentication and everything, I just need to understand how to integrate puppeteer to the API, so let me begin:
This is my project structure:
puppeteer-api
controllers - puppeteer.controller.js
routes - puppeteer.routes.js
index.js
This is my index.js file:
const puppeteer = require('puppeteer');
const express = require('express');
const booking = require('./routes/puppeteer.routes')
const app = express();
app.use('/booking', booking);
let port = 8080;
app.listen(port, () => {
console.log('Server is running on https://localhost:8080/');
});
puppeteer.routes.js:
const express = require('express');
const router = express.Router();
const puppeteer_controller = require('../controllers/puppeteer.controller');
router.get('/', puppeteer_controller.get_booking);
module.exports = router;
puppeteer.controller.js:
const puppeteer = require('puppeteer');
exports.get_booking = (req, res, next) => {
res.json = (async function main() {
try {
const browser = await puppeteer.launch({ headless: true});
const page = await browser.newPage();
await page.goto('https://www.booking.com/searchresults.es-ar.html?label=gen173nr-1DCAEoggI46AdIM1gEaAyIAQGYASy4ARfIAQzYAQPoAQGIAgGoAgM&lang=es-ar&sid=bc11c3e819d105b3c501d0c7a501c718&sb=1&src=index&src_elem=sb&error_url=https%3A%2F%2Fwww.booking.com%2Findex.es-ar.html%3Flabel%3Dgen173nr-1DCAEoggI46AdIM1gEaAyIAQGYASy4ARfIAQzYAQPoAQGIAgGoAgM%3Bsid%3Dbc11c3e819d105b3c501d0c7a501c718%3Bsb_price_type%3Dtotal%26%3B&ss=El+Bols%C3%B3n%2C+R%C3%ADo+Negro%2C+Argentina&is_ski_area=&checkin_year=&checkin_month=&checkout_year=&checkout_month=&no_rooms=1&group_adults=2&group_children=0&b_h4u_keep_filters=&from_sf=1&ss_raw=el+bols&ac_position=0&ac_langcode=es&ac_click_type=b&dest_id=-985282&dest_type=city&place_id_lat=-41.964452&place_id_lon=-71.532732&search_pageview_id=06d48fb6823e00e9&search_selected=true&search_pageview_id=06d48fb6823e00e9&ac_suggestion_list_length=5&ac_suggestion_theme_list_length=0');
await page.waitForSelector('.sr_item');
page.on('console', consoleObj => console.log(consoleObj.text()));
console.log('Retrieving hotels data');
const hoteles = page.evaluate(() => {
let hoteles = [];
let x = document.getElementsByClassName('sr_item');
hoteles.push(x);
let navigation = document.getElementsByClassName('sr_pagination_item');
for (const nav of navigation) {
nav.click();
hoteles.push(document.getElementsByClassName('sr_item'));
}
console.log('Finished looping through');
return hoteles;
});
} catch(e) {
console.log('error', e);
}
})();
};
So, what I want is to be able to send a GET request from my app and get a response from my API with a list of hotels from booking, it's just a personal project, the thing is, using Postman I'm sending the GET request but I get no response at all, so I'm wondering what I'm doing wrong and what direction to follow, if anyone would be able to point me in the right direction I would be so grateful.

The block ({})() runs your code instantly instead of on a request.
res.json is a function, you should not reassign this.
Instead, move the function somewhere else and call it like below,
async function scraper() {
try {
const browser = await puppeteer.launch({ headless: true});
const page = await browser.newPage();
await page.goto('https://www.booking.com/searchresults.es-ar.html?label=gen173nr-1DCAEoggI46AdIM1gEaAyIAQGYASy4ARfIAQzYAQPoAQGIAgGoAgM&lang=es-ar&sid=bc11c3e819d105b3c501d0c7a501c718&sb=1&src=index&src_elem=sb&error_url=https%3A%2F%2Fwww.booking.com%2Findex.es-ar.html%3Flabel%3Dgen173nr-1DCAEoggI46AdIM1gEaAyIAQGYASy4ARfIAQzYAQPoAQGIAgGoAgM%3Bsid%3Dbc11c3e819d105b3c501d0c7a501c718%3Bsb_price_type%3Dtotal%26%3B&ss=El+Bols%C3%B3n%2C+R%C3%ADo+Negro%2C+Argentina&is_ski_area=&checkin_year=&checkin_month=&checkout_year=&checkout_month=&no_rooms=1&group_adults=2&group_children=0&b_h4u_keep_filters=&from_sf=1&ss_raw=el+bols&ac_position=0&ac_langcode=es&ac_click_type=b&dest_id=-985282&dest_type=city&place_id_lat=-41.964452&place_id_lon=-71.532732&search_pageview_id=06d48fb6823e00e9&search_selected=true&search_pageview_id=06d48fb6823e00e9&ac_suggestion_list_length=5&ac_suggestion_theme_list_length=0');
await page.waitForSelector('.sr_item');
page.on('console', consoleObj => console.log(consoleObj.text()));
console.log('Retrieving hotels data');
const hoteles = page.evaluate(() => {
let hoteles = [];
let x = document.getElementsByClassName('sr_item');
hoteles.push(x);
let navigation = document.getElementsByClassName('sr_pagination_item');
for (const nav of navigation) {
nav.click();
hoteles.push(document.getElementsByClassName('sr_item'));
}
console.log('Finished looping through');
return hoteles;
});
} catch(e) {
console.log('error', e);
}
}
// Call the scraper
exports.get_booking = async (req, res, next) => {
const scraperData = await scraper();
res.json(scraperData)
}
It will make the controller into a promise and return the JSON data.

Related

Puppeteer querySelector returns empty object [duplicate]

Recently I started to crawl the web using Puppeteer. Below is a code for extracting a specific product name from the shopping mall.
const puppeteer = require('puppeteer');
(async () => {
const width = 1600, height = 1040;
const option = { headless: false, slowMo: true, args: [`--window-size=${width},${height}`] };
const browser = await puppeteer.launch(option);
const page = await browser.newPage();
const vp = {width: width, height: height};
await page.setViewport(vp);
const navigationPromise = page.waitForNavigation();
await page.goto('https://shopping.naver.com/home/p/index.nhn');
await navigationPromise;
await page.waitFor(2000);
const textBoxId = 'co_srh_input';
await page.type('.' + textBoxId, '양말', {delay: 100});
await page.keyboard.press('Enter');
await page.waitFor(5000);
await page.waitForSelector('div.info > a.tit');
const stores = await page.evaluate(() => {
const links = Array.from(document.querySelectorAll('div.info > a.tit'));
return links.map(link => link.innerText).slice(0, 10) // 10개 제품만 가져오기
});
console.log(stores);
await browser.close();
})();
I have a question. How can I output the crawled results to an HTML document (without using the database)? Please use sample code to explain it.
I used what was seen on blog.kowalczyk.info
const puppeteer = require("puppeteer");
const fs = require("fs");
async function run() {
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.goto("https://www.google.com/", { waitUntil: "networkidle2" });
// hacky defensive move but I don't know a better way:
// wait a bit so that the browser finishes executing JavaScript
await page.waitFor(1 * 1000);
const html = await page.content();
fs.writeFileSync("index.html", html);
await browser.close();
}
run();
fs.writeFile()
You can use the following write_file function that returns a Promise that resolves or rejects when fs.writeFile() succeeds or fails.
Then, you can await the Promise from within your anonymous, asynchronous function and check whether or not the data was written to the file:
'use strict';
const fs = require('fs');
const puppeteer = require('puppeteer');
const write_file = (file, data) => new Promise((resolve, reject) => {
fs.writeFile(file, data, 'utf8', error => {
if (error) {
console.error(error);
reject(false);
} else {
resolve(true);
}
});
});
(async () => {
// ...
const stores = await page.evaluate(() => {
return Array.from(document.querySelectorAll('div.info > a.tit'), link => link.innerText).slice(0, 10); // 10개 제품만 가져오기
});
if (await write_file('example.html', stores.toString()) === false) {
console.error('Error: Unable to write stores to example.html.');
}
// ...
});

ExpressJS Middleware - Execution order of Promises not correct

I want to create an Express Middleware to perform a basic check if the user/password pair in the authorization header exists in a JSON file (educational purpose). I added it on a very simple unit converter app.
The problem is that I receive a 403 instead of the resource when the username/password are correct.
I found that when I perform a request, the Promise.then in the middleware is executed before the Promise is fulfilled in my function findUserByCredentials. See an illustration of the problem in the third code snippet below.
index.js
const express = require('express')
const app = express()
const port = process.env.PORT || 3000
const findUserByCredentials = require("./lib/find-user");
app.use(function (req, res, next) {
if (req.headers) {
let header = req.headers.authorization || '';
let [type, payload] = header.split(' ');
if (type === 'Basic') {
let credentials = Buffer.from(payload, 'base64').toString('ascii');
let [username, password] = credentials.split(':');
findUserByCredentials({username, password}).then(() => {
console.log("next")
next();
}).catch(() => {
console.log("403")
res.sendStatus(403);
});
}
} else {
next();
}
});
app.get('/inchtocm', (req, res) => {
const cm = parseFloat(req.query.inches) * 2.54;
res.send({"unit": "cm", "value": cm});
});
app.listen(port, () => {
console.log(`Example app listening at http://localhost:${port}`)
})
module.exports = app;
./lib/find-user.js
const bcrypt = require('bcrypt');
const jsonfile = require('../users.json');
let findUserByCredentials = () => (object) => {
const username = object.username;
const password = object.password;
return new Promise((resolve, reject) => {
jsonfile.forEach(user => {
if (user.username === username) {
bcrypt.compare(password, user.password).then((buffer) => {
if (buffer) {
console.log("resolve")
resolve();
} else {
console.log("reject")
reject();
}
});
}
});
reject();
});
};
module.exports = findUserByCredentials();
Server console after sending a request
Example app listening at http://localhost:3000
403
resolve
How can I force Express to wait for the first Promise to finish before performing the second operation ?
To have a better control over the order of your promises and also have a less nested code, you should use the async/await syntax. You can read more about it here.
What it essentially does is lets you...well, await for an async operation to finish before proceeding. If you use await before something that returns a Promise (like your findUserByCredentials), it will assign to the variable what you resolve your promise with, for example:
const myPromise = () => {
return new Promise(resolve => resolve(3));
}
const myFunc = async () => {
const number = await myPromise();
console.log(number); // Output: 3
}
I would rephrase your findUserByCredentials function like so:
const bcrypt = require('bcrypt');
const jsonfile = require('../users.json');
const findUserByCredentials = async (object) => {
const username = object.username;
const password = object.password;
// This assumes there's only a single unique user with a specific username
const potentialUser = jsonfile.find(user => user.username === username);
if (!potentialUser) {
throw new Error('wrong credentials')
}
const passHashCompare = await bcrypt.compare(password, potentialUser.password);
if (!passHashCompare) {
throw new Error('wrong credentials')
}
};
module.exports = findUserByCredentials;
This way it's less nested, more readable and works in the order you need.
You can go even further with this principle and make your middleware (the function you're passing to app.use) an async function as well and use the await keyword instead of .then() and .catch()
I would switch the code to use the latest features of the javascript language related to asynchronous code async/await in that way you can have better control of your execution flow.
I will modify your code in the following way:
Firstly for the findUserByCredentials function:
const bcrypt = require('bcrypt');
const jsonfile = require('../users.json');
const findUserByCredentials = async (object) => {
const username = object.username;
const password = object.password;
const user = jsonfile.find(user => user.username == username);
if (user)
await bcrypt.compareSync(user.password, password);
return false;
};
module.exports = findUserByCredentials;
Secondly for the index.js
const express = require('express')
const app = express()
const port = process.env.PORT || 3000
const findUserByCredentials = require("./lib/find-user");
app.use(async (req, res, next) => {
if (req.headers) {
let header = req.headers.authorization || '';
let [type, payload] = header.split(' ');
if (type === 'Basic') {
const credentials = Buffer.from(payload, 'base64').toString('ascii');
const [username, password] = credentials.split(':');
const result = await findUserByCredentials({username, password})
if(result) {
return next()
}
return res.sendStatus(403);
}
return res.sendStatus(403);
}
return next();
});
app.get('/inchtocm', (req, res) => {
const cm = parseFloat(req.query.inches) * 2.54;
res.send({"unit": "cm", "value": cm});
});
app.listen(port, () => {
console.log(`Example app listening at http://localhost:${port}`)
})
module.exports = app;
It's very important learn how to use asynchronous code in Nodejs because it's a single thread and if it's not used correctly you can block it and your performance of your app won't be optimal, also async/await is syntactical sugar of javascript languages which allows to write clean asynchronous code, try to use the latest things of the specification of language because they are created to ease our lives.

Jest+Puppeteer after run test TypeError: Cannot read property 'waitForSelector' of undefined

How is the correct way to move a group of tests into a separate class or Jest+puppeteer module, which would be called in other tests?
For example, I have a describe with tests, and I want to use this describe in other describe.
login.spec.js
'use strict'
const testConst = require('./const')
const selectors = require('./selectors')
const action = require('./actions')
const br = require('./browser')
const Auth = require('./login')
let page
let browser
beforeAll(async () => {
// set up Puppeteer
browser = await br.set_up_Puppeteer(true)
console.log('browser.isConnected')
// Web page
console.log('browser ready')
page = await br.see_webPage(browser, testConst.browser.url_address)
console.log('page ready')
console.log('fill field email and click login btn')
console.log('logged')
}, testConst.browser.timeout)
afterAll(async () => {
await browser.close()
}, testConst.browser.timeout)
Auth.login(page)
login.js
'use strict'
const testConst = require('../const.js')
const selectors = require('../selectors')
const action = require('../actions')
class login{
static async login(page) {
describe('go to OrgTracker from launchpad', () => {
test('open card Org Tracker tool in launchpad', async () => {
await page.waitForSelector(selectors, {
visible: true
})
await page.click(selectors)
await page.waitForSelector(selectors, {
visible: true
})
}, testConst.browser.timeout)
})
}
}
module.exports=login
But this combo not working, after run test:login I see error TypeError: Cannot read property 'waitForSelector' of undefined
Your problem is the Auth.login(page) statement at the end of your code. The page variable isn't initialized yet at that point. Besides, you're already calling it inside your beforeAll which seems about correct, depending on what you're trying to achieve.
works like this
login.spec.js
'use strict'
const testConst = require('../const')
const selectors = require('../selectors')
const action = require('../actions')
const br = require('../browser')
const Auth = require('../login')
let page
let browser
const emails_correctData = testConst.users.correctData.emails
const password_correctData = testConst.users.correctData.passwords
beforeAll(async () => {
// set up Puppeteer
browser = await br.set_up_Puppeteer(true);
console.log('browser.isConnected');
// Web page
page = await br.see_webPage(browser, testConst.browser.url_address, 'user');
console.log('browser ready');
}, testConst.browser.timeout)
afterAll(async () => {
await browser.close();
}, testConst.browser.timeout)
describe('login on system', async() => {
test('login page as ' + emails_correctData.email1 + ' logged correctly', async () => {
console.log('fill field email and click login btn')
await Auth.login( page, emails_correctData.email1, password_correctData.password)
// full page screenshot
await action.make_screenshot(page, testConst.screenshot.path_afterLogin, '_after_login.jpg')
console.log('logged')
}, testConst.browser.timeout)
})
login.js
'use strict'
const testConst = require('../const')
const selectors = require('../selectors')
const action = require('../actions')
class Auth{
static async login( page, emails_correctData, password) {
await page.click(selectors.login.signIn).then(async()=>{
await expect(page.waitForSelector(selectors.login.email)).toBeTruthy()
})
console.log('click signIn')
await action.fillField(page, selectors.login.email, emails_correctData)
console.log('fillField email')
await action.fillField(page, selectors.login.password, password)
console.log('fillField password')
await page.click(selectors.login.btnLogin).then(async()=>{
console.log('click btn signIn')
const userName = await page.$eval(selectors.menu.start, e => e.innerHTML)
expect(userName).toBe(testConst.users.correctData.userName.userName1)
},testConst.browser.timeout)
}
}
module.exports=Auth
But that doesn't solve my problem. I wanted to specify whole groups of tests in the class and make them where I need it.

Scraping with puppeteer and returning JSON

I'm trying to create a node app that requires a URL from the user, the URL is then passed to scrape.js and using puppeteer, scrapes certain fields, and then passes the data back to app.js in a json format (so that I can then upset it into a doc). But what I receive is the entire ServerResponse and not the data in a json format as I'm intending.
I was hoping someone with more experience could shed some light. Here is what I have so far:
// app.js
const scrape = require('./scrape');
const router = express.Router();
router.get( '/', ( req, res ) => {
const url = req.body.url;
const item = new Promise((resolve, reject) => {
scrape
.scrapeData()
.then((data) => res.json(data))
.catch(err => reject('Scraping failed...'))
})
});
// scrape.js
const puppeteer = require('puppeteer');
const scrapeData = async () => {
const browser = await puppeteer.launch({ headless: true });
const page = await browser.newPage();
await page.setViewport({ width: 360, height: 640 });
await page.goto(url);
let scrapedData = await page.evaluate(() => {
let scrapedDetails = [];
let elements = document.querySelectorAll('#a-page');
elements.forEach(element => {
let detailsJson = {};
try {
detailsJson.title = element.querySelector('h1#title').innerText;
detailsJson.desc = element.querySelector('#description_box').innerText;
} catch (exception) {}
scrapedDetails.push(detailsJson);
});
return scrapedDetails;
}));
// console.dir(scrapeData) - logs the data successfully.
};
module.exports.scrapeData = scrapeData
You have a naming problem. scrape.js is exporting the scrapeData function. Inside that function, you declared a scrapedData variable, which is not the same thing.
Where you put a:
console.dir(scrapeData) - logs the data successfully.
Add
return scrapeData;
That should solve your issue.

Puppeteer doesn't close browser

I'm running puppeteer on express/node/ubuntu as follow:
var puppeteer = require('puppeteer');
var express = require('express');
var router = express.Router();
/* GET home page. */
router.get('/', function(req, res, next) {
(async () => {
headless = true;
const browser = await puppeteer.launch({headless: true, args:['--no-sandbox']});
const page = await browser.newPage();
url = req.query.url;
await page.goto(url);
let bodyHTML = await page.evaluate(() => document.body.innerHTML);
res.send(bodyHTML)
await browser.close();
})();
});
running this script multiple times leaves hundred of Zombies:
$ pgrep chrome | wc -l
133
Which clogs the srv,
How do I fix this?
Running kill from a Express JS script could solve it?
Is there a better way to get the same result other than puppeteer and headless chrome?
Ahhh! This is a simple oversight. What if an error occurs and your await browser.close() never executes thus leaving you with zombies.
Using shell.js seems to be a hacky way of solving this issue.
The better practice is to use try..catch..finally. The reason being you would want the browser to be closed irrespective of a happy flow or an error being thrown.
And unlike the other code snippet, you don't have to try and close the browser in the both the catch block and finally block. finally block is always executed irrespective of whether an error is thrown or not.
So, your code should look like,
const puppeteer = require('puppeteer');
const express = require('express');
const router = express.Router();
/* GET home page. */
router.get('/', function(req, res, next) {
(async () => {
const browser = await puppeteer.launch({
headless: true,
args: ['--no-sandbox'],
});
try {
const page = await browser.newPage();
url = req.query.url;
await page.goto(url);
const bodyHTML = await page.evaluate(() => document.body.innerHTML);
res.send(bodyHTML);
} catch (e) {
console.log(e);
} finally {
await browser.close();
}
})();
});
Hope this helps!
wrap your code in try-catch like this and see if it helps
headless = true;
const browser = await puppeteer.launch({headless: true, args:['--no-sandbox']});
try {
const page = await browser.newPage();
url = req.query.url;
await page.goto(url);
let bodyHTML = await page.evaluate(() => document.body.innerHTML);
res.send(bodyHTML);
await browser.close();
} catch (error) {
console.log(error);
} finally {
await browser.close();
}
From my experience, the browser closing process may take some time after close is called. Anyway, you can check the browser process property to check if it's still not closed and force kill it.
if (browser && browser.process() != null) browser.process().kill('SIGINT');
I'm also posting the full code of my puppeteer resources manager below. Take a look at bw.on('disconnected', async () => {
const puppeteer = require('puppeteer-extra')
const randomUseragent = require('random-useragent');
const StealthPlugin = require('puppeteer-extra-plugin-stealth')
const USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36';
puppeteer.use(StealthPlugin())
function ResourceManager(loadImages) {
let browser = null;
const _this = this;
let retries = 0;
let isReleased = false;
this.init = async () => {
isReleased = false;
retries = 0;
browser = await runBrowser();
};
this.release = async () => {
isReleased = true;
if (browser) await browser.close();
}
this.createPage = async (url) => {
if (!browser) browser = await runBrowser();
return await createPage(browser,url);
}
async function runBrowser () {
const bw = await puppeteer.launch({
headless: true,
devtools: false,
ignoreHTTPSErrors: true,
slowMo: 0,
args: ['--disable-gpu','--no-sandbox','--no-zygote','--disable-setuid-sandbox','--disable-accelerated-2d-canvas','--disable-dev-shm-usage', "--proxy-server='direct://'", "--proxy-bypass-list=*"]
});
bw.on('disconnected', async () => {
if (isReleased) return;
console.log("BROWSER CRASH");
if (retries <= 3) {
retries += 1;
if (browser && browser.process() != null) browser.process().kill('SIGINT');
await _this.init();
} else {
throw "===================== BROWSER crashed more than 3 times";
}
});
return bw;
}
async function createPage (browser,url) {
const userAgent = randomUseragent.getRandom();
const UA = userAgent || USER_AGENT;
const page = await browser.newPage();
await page.setViewport({
width: 1920 + Math.floor(Math.random() * 100),
height: 3000 + Math.floor(Math.random() * 100),
deviceScaleFactor: 1,
hasTouch: false,
isLandscape: false,
isMobile: false,
});
await page.setUserAgent(UA);
await page.setJavaScriptEnabled(true);
await page.setDefaultNavigationTimeout(0);
if (!loadImages) {
await page.setRequestInterception(true);
page.on('request', (req) => {
if(req.resourceType() == 'stylesheet' || req.resourceType() == 'font' || req.resourceType() == 'image'){
req.abort();
} else {
req.continue();
}
});
}
await page.evaluateOnNewDocument(() => {
//pass webdriver check
Object.defineProperty(navigator, 'webdriver', {
get: () => false,
});
});
await page.evaluateOnNewDocument(() => {
//pass chrome check
window.chrome = {
runtime: {},
// etc.
};
});
await page.evaluateOnNewDocument(() => {
//pass plugins check
const originalQuery = window.navigator.permissions.query;
return window.navigator.permissions.query = (parameters) => (
parameters.name === 'notifications' ?
Promise.resolve({ state: Notification.permission }) :
originalQuery(parameters)
);
});
await page.evaluateOnNewDocument(() => {
// Overwrite the `plugins` property to use a custom getter.
Object.defineProperty(navigator, 'plugins', {
// This just needs to have `length > 0` for the current test,
// but we could mock the plugins too if necessary.
get: () => [1, 2, 3, 4, 5],
});
});
await page.evaluateOnNewDocument(() => {
// Overwrite the `plugins` property to use a custom getter.
Object.defineProperty(navigator, 'languages', {
get: () => ['en-US', 'en'],
});
});
await page.goto(url, { waitUntil: 'networkidle2',timeout: 0 } );
return page;
}
}
module.exports = {ResourceManager}
I solve it with https://www.npmjs.com/package/shelljs
var shell = require('shelljs');
shell.exec('pkill chrome')
try to close the browser before sending the response
var puppeteer = require('puppeteer');
var express = require('express');
var router = express.Router();
router.get('/', function(req, res, next) {
(async () => {
headless = true;
const browser = await puppeteer.launch({headless: true});
const page = await browser.newPage();
url = req.query.url;
await page.goto(url);
let bodyHTML = await page.evaluate(() => document.body.innerHTML);
await browser.close();
res.send(bodyHTML);
})();
});
I ran into the same issue and while your shelljs solution did work, it kills all chrome processes, which might interrupt one that is still processing a request. Here is a better solution that should work.
var puppeteer = require('puppeteer');
var express = require('express');
var router = express.Router();
router.get('/', function (req, res, next) {
(async () => {
await puppeteer.launch({ headless: true }).then(async browser => {
const page = await browser.newPage();
url = req.query.url;
await page.goto(url);
let bodyHTML = await page.evaluate(() => document.body.innerHTML);
await browser.close();
res.send(bodyHTML);
});
})();
});
use
(await browser).close()
that happens because what the browser contains is a promise you have to solve it, I suffered a lot for this I hope it helps
I use the following basic setup for running Puppeteer:
const puppeteer = require("puppeteer");
let browser;
(async () => {
browser = await puppeteer.launch();
const [page] = await browser.pages();
/* use the page */
})()
.catch(err => console.error(err))
.finally(() => browser?.close())
;
Here, the finally block guarantees the browser will close correctly regardless of whether an error was thrown. Errors are logged (if desired). I like .catch and .finally as chained calls because the mainline Puppeteer code is one level flatter, but this accomplishes the same thing:
const puppeteer = require("puppeteer");
(async () => {
let browser;
try {
browser = await puppeteer.launch();
const [page] = await browser.pages();
/* use the page */
}
catch (err) {
console.error(err);
}
finally {
await browser?.close();
}
})();
There's no reason to call newPage because Puppeteer starts with a page open.
As for Express, you need only place the entire code above, including let browser; and excluding require("puppeteer"), into your route, and you're good to go, although you might want to use an async middleware error handler.
You ask:
Is there a better way to get the same result other than puppeteer and headless chrome?
That depends on what you're doing and what you mean by "better". If your goal is to get document.body.innerHTML and the page content you're interested in is baked into the static HTML, you can dump Puppeteer entirely and just make a request to get the resource, then use Cheerio to extract the desired information.
Another consideration is that you may not need to load and close a whole browser per request. If you can use one new page per request, consider the following strategy:
const express = require("express");
const puppeteer = require("puppeteer");
const asyncHandler = fn => (req, res, next) =>
Promise.resolve(fn(req, res, next)).catch(next)
;
const browserReady = puppeteer.launch({
args: ["--no-sandbox", "--disable-setuid-sandbox"]
});
const app = express();
app
.set("port", process.env.PORT || 5000)
.get("/", asyncHandler(async (req, res) => {
const browser = await browserReady;
const page = await browser.newPage();
try {
await page.goto(req.query.url || "http://www.example.com");
return res.send(await page.content());
}
catch (err) {
return res.status(400).send(err.message);
}
finally {
await page.close();
}
}))
.use((err, req, res, next) => res.sendStatus(500))
.listen(app.get("port"), () =>
console.log("listening on port", app.get("port"))
)
;
Finally, make sure to never set any timeouts to 0 (for example, page.setDefaultNavigationTimeout(0);), which introduces the potential for the script to hang forever. If you need a generous timeout, at most set it to a few minutes--long enough not to trigger false positives.
See also:
Parallelism of Puppeteer with Express Router Node JS. How to pass page between routes while maintaining concurrency
Puppeteer unable to run on heroku

Categories