Electron Spotify not opening in desktop mode - javascript

Electron is not opening spotify in desktop mode, as you can see in the screenshot below.
Here is the code:
const {BrowserWindow, app} = require("electron");
const pie = require("puppeteer-in-electron")
const puppeteer = require("puppeteer-core");
const fs = require("fs");
const path = require("path");
const main = async () => {
const cookiesPath = path.join(__dirname, "cookies/open.spotify.com.cookies.json");
const cookies = JSON.parse(await fs.readFileSync(cookiesPath, 'utf8'));
await pie.initialize(app);
const browser = await pie.connect(app, puppeteer);
const window = new BrowserWindow();
const url = "https://example.com/";
await window.loadURL(url);
const page = await pie.getPage(browser, window);
await page.goto("https://open.spotify.com");
for (const cookie of cookies) {
if (cookie.name !== 'ig_lang') {
await page.setCookie(cookie);
}
}
await page.reload();
};
main();
Note I'm using puppeteer-in-electron so that I can automate web process even in electron.
But, this is not an issue because even if I use electron normally without puppeteer the issue persists.
This is how it should've been: https://cdn.discordapp.com/attachments/1026704902925324410/1026710664611377202/unknown.png
This is how it is: https://cdn.discordapp.com/attachments/1026704902925324410/1026704903055343626/Screenshot_42.png
Hope I've explained it well.
Thanks

install package "https://github.com/castlabs/electron-releases#v20.0.0+wvcus"
like this:
npm install "https://github.com/castlabs/electron-releases#v20.0.0+wvcus" --save-dev
import also components
const { BrowserWindow, app, components } = require("electron");
to open in desktop mode just add userAgent:
window.loadURL(url, {
userAgent: "Chrome/105.0.0.0",
});
and create BrowserWindow after app and components are ready
app.whenReady().then(async () => {
await components.whenReady();
main();
});
full code:
const { BrowserWindow, app, components } = require("electron");
const pie = require("puppeteer-in-electron");
const puppeteer = require("puppeteer-core");
const fs = require("fs");
const path = require("path");
pie.initialize(app);
const main = async () => {
const cookiesPath = path.join(
__dirname,
"cookies/open.spotify.com.cookies.json",
);
const cookies = JSON.parse(await fs.readFileSync(cookiesPath, "utf8"));
const browser = await pie.connect(app, puppeteer);
const window = new BrowserWindow();
const url = "https://example.com/";
await window.loadURL(url, {
userAgent: "Chrome/105.0.0.0",
});
const page = await pie.getPage(browser, window);
await page.goto("https://open.spotify.com");
for (const cookie of cookies) {
if (cookie.name !== "ig_lang") {
await page.setCookie(cookie);
}
}
await page.reload();
};
app.whenReady().then(async () => {
await components.whenReady()
main();
});
short version of code:
const { BrowserWindow, app, components } = require("electron");
const main = () => {
const window = new BrowserWindow();
const url = "https://open.spotify.com";
window.loadURL(url, {
userAgent: "Chrome/105.0.0.0",
});
};
app.whenReady().then(async () => {
await components.whenReady();
main();
});

Related

Why does im getting ["Invalid Form Body"]?

DiscordAPIError[50035]: Invalid Form Body
0[CONTENT_TYPE_INVALID]: Expected "Content-Type" header to be one of {'application/json'}.
Im getting this while starting my bot.
const fs = require("fs");
const colors = require('colors');
const { REST } = require("#discordjs/rest");
const { Routes } = require("discord-api-types/v10");
module.exports = (client) => {
client.handleCommands = async () => {
const commandFolders = fs.readdirSync("./src/commands");
for (const folder of commandFolders) {
const commandFiles = fs
.readdirSync(`./src/commands/${folder}`)
.filter((file) => file.endsWith(".js"));
const { commands, commandArray } = client;
for (const file of commandFiles) {
const command = require(`../../commands/${folder}/${file}`);
commands.set(command.data.name, command);
commandArray.push(command.data.toJSON());
}
}
const clientId = "0";
const guildId = "0";
const rest = new REST({ version: "10" }).setToken(process.env.TOKEN);
try {
console.log("Started refreshing application (/) commands.".yellow);
await rest.put(Routes.applicationGuildCommands(clientId, guildId), {
body: client.commandArray,
});
console.log("Successfully reloaded application (/) commands.".green);
} catch (error) {
console.error(`${error}`.red);
}
};
};
This is the code of handleCommands.js, and error above is getting by this script, anyway to fix?
(client id and guild id is blurred.)
Im beginner in the js so i dont really know much abt it.
I were trying to do slash command loader, but actually it gave me some kind of errors.

Puppeteer too much CPU

I'm trying to start this script that takes care of making visits to a site with different IPs through a proxy pool but after a few visits the CPU rises to 100% and begins to slow down more and more, you could help me to optimize it.
I state that I am not a programmer and I thank anyone who can help me solve this problem
const express = require('express');
const app = express();
const port = process.env.PORT || 8080;
const validUrl = require('valid-url');
const parseUrl = function (url) {
url = decodeURIComponent(url)
if (!/^(?:f|ht)tps?:\/\//.test(url)) {
url = 'https://' + url;
}
return url;
};
const getRandomDevice = () => {
const puppeteer = require('puppeteer');
const devices = Object.entries(puppeteer.devices)
return devices[Math.floor(Math.random() * devices.length)][1]
}
app.get('/', function (req, res) {
// const url = parseUrl(req.query.url);
const url = 'https://www.example.com';
const tries = req.query.tries || 100000;
if (validUrl.isWebUri(url)) {
console.log('Handling: ' + url);
(async () => {
const puppeteer = require('puppeteer');
const browser = await puppeteer.launch({
headless: true,
// userDataDir: './myUserDataDir',
args: [
'--no-sandbox',
'--disable-setuid-sandbox',
'--proxy-server=EXAMPLE-POOL-PROXY:13012'
]
});
let [page] = await browser.pages();
for (let i = 0; i < tries; i++) {
// enable request interception
await page.setRequestInterception(true);
const device = getRandomDevice()
await page.setUserAgent(device.userAgent);
await page.setViewport(device.viewport)
// add header for the navigation requests
page.on('request', request => {
// Add a new header for navigation request.
const headers = request.headers();
headers['User-Agent'] = device.userAgent;
headers['user-agent'] = device.userAgent;
request.continue({headers});
});
await page.goto(url, {waitUntil: 'networkidle2', timeout: 1500000});
try {
//console.log(page);
} catch (error) {
console.error(error)
} finally {
// console.log(urls);
// console.log(await page._client.send('Network.getAllCookies'));
// await page.screenshot().then(function (buffer) {
// res.setHeader('Content-Disposition', 'attachment;filename="' + url + '.png"');
// res.setHeader('Content-Type', 'image/png');
// res.send(buffer)
// });
await page.screenshot({path: 'screenshot-' + i + '.png',fullPage: true})
// If everything correct then no 'HeadlessChrome' sub string on userAgent
console.log(await page.evaluate(() => navigator.userAgent));
page = await browser.newPage();
}
}
setTimeout(async () => {
await browser.close();
}, 500);
})();
} else {
res.send('Invalid url: ' + url);
}
});
app.listen(port, function () {
console.log('App listening on port ' + port)
})

Execution context was destroyed, most likely because of a navigation Puppeteer

In my Puppeteer Node JS app I need to read localStorage and cookies from a browser web page, but for some reason I'm getting the following error:
UnhandledPromiseRejectionWarning: Error: Execution context was destroyed, most likely because of a navigation.
What am I doing wrong/missing from my JS:
const dayjs = require('dayjs');
const AdvancedFormat = require('dayjs/plugin/advancedFormat');
dayjs.extend(AdvancedFormat);
const puppeteer = require('puppeteer');
const { config } = require('./config');
const helpers = require('./helpers');
const logs = require('./logs');
const runEmulation = async (body) => {
logs.debug('starting emulation');
// vars
const argOptions = [], journey = [];
// sandbox config
if ((config.puppeteer.run_in_sandbox === 'true')) {
argOptions.push('--no-sandbox');
}
// initiate a Puppeteer instance with options and launch
const browser = await puppeteer.launch({
args: argOptions,
headless: (config.puppeteer.run_in_headless === 'true') ? true : false
});
// launch a new page
const page = await browser.newPage()
// initiate a new CDP session
const client = await page.target().createCDPSession();
await client.send('Network.enable');
await client.on('Network.requestWillBeSent', async (e) => {
// if not a document, skip
if (e.type !== "Document") return;
const scrapablePageData = async () => {
function getLocalStorage () {
const values = [];
const keys = Object.keys(localStorage);
let index = keys.length;
while (index--) {
values.push({
key: keys[index],
value: localStorage.getItem(keys[index])
});
}
return values ? values : [];
}
return {
localStorage: getLocalStorage()
}
}
const scrapable = await page.evaluate(scrapablePageData);
const cookies = await page.cookies();
// the data we want to log
journey.push({
url: e.documentURL,
type: e.redirectResponse ? e.redirectResponse.status : 'JS Redirection',
storage: {
cookies: cookies ?? [],
local: scrapable.localStorage ?? []
},
duration_in_ms: 0,
duration_in_sec: 0,
loaded_at: dayjs().valueOf()
})
})
// set userAgent and go to the URL
await page.setUserAgent(body.userAgent);
await page.goto(body.url);
await page.waitForNavigation();
console.log(journey)
}
exports.runEmulation = runEmulation

Create pdf using node.js and puppeteer

I have an issue with pdf generation.
const puppeteer = require("puppeteer");
const fs = require("fs-extra");
const path = require("path");
const hbs = require("handlebars");
const data = require("./database.json");
const { v4 } = require("uuid");
const compile = async function(templateName, data) {
const filePath = path.join(process.cwd(), "views", `${templateName}.hbs`);
const html = await fs.readFile(filePath, "utf-8");
return hbs.compile(html)(data);
};
async function createPDF(data) {
try {
const pdfFileName = v4();
const browser = await puppeteer.launch();
const page = await browser.newPage();
console.log(data); <-- object
const content = await compile("index", data); <--- problem here
await page.setContent(content);
await page.emulateMedia("screen");
await page.pdf({
path: path.join(__dirname, `pdfs/${pdfFileName}.pdf`),
format: "A4",
printBackground: true
});
await browser.close();
} catch (error) {
console.log(error)
}
};
Everything works fine. I can generate my new pdf file, but for some reason, my data object in handlebars templates is undefined.
Any idea how to solve it?

Web Scraping with Puppeteer + Next.js and Express

Trying to find out why my scrape function runs twice. I am trying to create a SSR app that hits a website and scrapes the data. It does work and correctly grabs the data I need but I set headless to false and I can see the browser opening correctly and then opening again and searching for the term [object Object]...
I need it to only run on the server that is why I have gone the express + next.js combo.
server.js
const express = require("express");
const next = require("next");
const scraper = require("./utils/scraper");
const port = parseInt(process.env.PORT, 10) || 3000;
const dev = process.env.NODE_ENV !== "production";
const nextApp = next({ dev });
const nextHandle = nextApp.getRequestHandler();
nextApp.prepare().then(() => {
const server = express();
server.get("/search/:query", async (req, res) => {
const { query } = req.params;
const listings = await scraper.scrape(query);
return nextApp.render(req, res, "/search", { search: query, listings });
});
server.get("*", (req, res) => {
return nextHandle(req, res);
});
server.listen(port, err => {
if (err) {
throw err;
}
console.log(`> Ready on http://localhost:${port}`);
});
});
scraper.js
const puppeteer = require("puppeteer");
const scrape = async term => {
const browser = await puppeteer.launch({ headless: false });
const page = await browser.newPage();
await page.goto(`https://...&query=${term}`);
const scrapedData = await page.evaluate(() => {
const items = document.querySelectorAll(".results");
return Array.from(items).map(listing => {
return { ... build up my obj here };
});
});
await browser.close();
return scrapedData;
};
module.exports.scrape = scrape;
Search.jsx (the next.js page)
import React, { Component } from "react";
export default class extends Component {
static async getInitialProps(ctx) {
return {
search: ctx.query.search,
listings: ctx.query.listings
};
}
render() {
const { search, listings } = this.props;
console.log(listings, "client");
return (
<div>
<h1>{search} search!</h1>
{ ...listings.map() }
</div>
);
}
}
UPDATE 1
I noticed that puppeteer will correctly only open once if I do not pass my listings array into the nextApp.render and just log out the results on the server. But as soon as I pass it along to the page to to getInitialProps I experience the double loading as explained above.

Categories