I'm trying to launch an exe file from an Electron app with React/Redux.
From the component i'm doing dispatch(launch(titleId, titleName))
The problem is i'm getting path undefined when i'm waiting for readFolders() async.
Any idea what i'm doing wrong and what should i change in my aproach?
Thanks in advance!
launch.js
export const launch = async (titleId, titleName) => {
const path = await readFolders(titleId);
console.log('path:', path) //undefined
execFile(path, (err, data) => {
if (err) {
console.log('err', err);
} else if (data) {
console.log('data:', data)
} else {
console.log('success');
}
});
return {
type: 'LAUNCH',
};
};
readFolders.js
import fs from 'fs';
import { homedir } from 'os';
const fsPromises = fs.promises;
const isExeFile = file => file.match(/.*\.exe$/i);
export const readFolders = async titleId => {
const userDir = homedir();
const folderPath = `${userDir}/downloads`;
const fullPath = `${folderPath}/${titleId}`;
try {
const contents = await fsPromises.readdir(fullPath);
contents.forEach(async item => {
if (isExeFile(item)) {
console.log('isExeFile');
return `${fullPath}/${item}`;
}
try {
const nestedFolder = await fsPromises.readdir(`${fullPath}/${item}`);
nestedFolder.forEach(nestedItem => {
if (isExeFile(nestedItem)) {
return `${fullPath}/${item}/${nestedItem}`;
}
return null;
});
} catch (err) {
console.log('err:', err);
}
});
} catch (err) {
console.log('err main:', err);
}
};
Edit:
I also tried this way and now const path = await readFolders(titleId); returns the correct result, but this way eslint is complaining (https://eslint.org/docs/rules/no-async-promise-executor) and it doesn't feel like a good solution.
return new Promise(async (resolve, reject) => {
try {
const contents = await fsPromises.readdir(fullPath);
contents.forEach(async item => {
if (isExeFile(item)) {
console.log(`${fullPath}/${item}`);
return resolve(`${fullPath}/${item}`);
}
try {
const nestedFolder = await fsPromises.readdir(`${fullPath}/${item}`);
nestedFolder.forEach(nestedItem => {
if (isExeFile(nestedItem)) {
console.log(`${fullPath}/${item}/${nestedItem}`);
return resolve(`${fullPath}/${item}/${nestedItem}`);
}
return null;
});
} catch (err) {
console.log('err:', err);
reject(err);
}
});
} catch (err) {
console.log('err main:', err);
reject(err);
}
});
Missing return at the end. When you return in forEach, It returns from callback anonymous function only. return ${fullPath}/${item}/${nestedItem};
For more you can read my blog on it:
https://medium.com/#deepak_v/weird-part-how-to-break-the-loop-in-javascript-8bba3e658267
Updated code:
export const readFolders = async (titleId) => {
const userDir = homedir();
const folderPath = `${userDir}/downloads`;
const fullPath = `${folderPath}/${titleId}`;
try {
const contents = await fsPromises.readdir(fullPath);
let path = "";
contents.some(async (item) => {
if (isExeFile(item)) {
console.log("isExeFile");
path = `${fullPath}/${item}`;
return path;
}
try {
const nestedFolder = await fsPromises.readdir(`${fullPath}/${item}`);
const found = nestedFolder.some((nestedItem) => {
if (isExeFile(nestedItem)) {
path = `${fullPath}/${item}/${nestedItem}`;
return path;
}
return false;
});
if (found) return path;
else return false;
} catch (err) {}
});
return path;
} catch (err) {
console.log("err main:", err);
}
};
Related
I have a Lambda function that is meant to download a directory of files from s3, convert them, delete the old files, and upload the new output files back to s3. The output for each file will be at least one file and a folder.
Everything seems to be working as intended, except for the upload. No errors are thrown, it just ends without putting.
I'm a novice, so feel free to point out I've done it all wrong.
exports.handler = async ({ dirName }) => {
// const jsonIn = JSON.parse(event.body);
// const dirName = jsonIn.dirName;
const localDir = `/tmp/${dirName}`;
const params = {
Bucket: 'to-pdf-test',
Delimiter: '/',
Prefix: dirName + '/',
StartAfter: dirName + '/'
};
var s3List;
var localList = [];
execSync(`mkdir ${localDir}`);
try {
s3List = await s3.listObjectsV2(params).promise();
} catch (e) {
throw e;
}
await Promise.all(
s3List.Contents.map(async (file) => {
let f = await getFiles(file);
localList.push(f);
})
).then(res => {console.log('Get Successful' + res) } )
.catch(err => {console.log('error' + err) } );
await Promise.all(
localList.map(async (file) => {
convertFile(file);
})
).then(res => {console.log('Convert Successful' + res) } )
.catch(err => {console.log('error' + err) } );
dirSync(localDir, async (filePath, stat) => {
let bucketPath = filePath.substring(5);
let uploadParams = { Bucket: 'to-pdf-test',
Key: `${bucketPath}`,
Body: fs.readFileSync(filePath) };
console.log('DS fPath ' + filePath);
console.log('DS bPath ' + bucketPath);
console.log(uploadParams.Body);
try {
let res = await s3.putObject(uploadParams).promise();
console.log('Upload Complete', res);
} catch (e) {
console.log('Error', e);
}
});
};
async function getFiles(file) {
let filePath = `/tmp/${file.Key}`;
let fileParams = {
Bucket: 'to-pdf-test',
Key: file.Key
};
try {
const { Body: inputFileBuffer } = await s3.getObject(fileParams).promise();
fs.writeFileSync(filePath, inputFileBuffer);
} catch (e) {
throw (e);
}
return filePath;
}
function convertFile(file) {
const noPath = getFilename(file);
const fPath = getFilePath(file);
if (path.extname(noPath) === '.msg') {
execSync(`cd ${fPath} && ${command} ${noPath}`);
} else {
console.log(`${noPath} not run. Not .msg`);
}
fs.unlinkSync(file);
}
function getFilename(fullPath) {
return fullPath.replace(/^.*[\\\/]/, '');
}
function getFilePath(fullPath) {
return fullPath.substring(fullPath.lastIndexOf('/'), 0);
}
function dirSync(dirPath, callback) {
fs.readdirSync(dirPath).forEach((name) => {
var filePath = path.join(dirPath, name);
var stat = fs.statSync(filePath);
if (stat.isDirectory()) {
dirSync(filePath, callback);
} else {
callback(filePath, stat);
}
});
}
I had the upload working in a previous version of this function, so thanks to this post for when it was working.
My solution for the moment - Read the local directory separately, push the paths of the files to localList then .map the array with all the paths to upload them.
localList = [];
//read dir and push to localList array
await dirSync(localDir, (filePath, stat) => {
localList.push(filePath);
});
console.log(localList);
await Promise.all(
localList.map( async (file) => {
let bucketPath = file.substring(5);
let uploadParams = {
Bucket: 'to-pdf-test',
Key: bucketPath,
Body: fs.readFileSync(file) };
console.log('Uploading', file);
await s3.putObject(uploadParams).promise()
.then((res) => {console.log('Upload Successful', bucketPath) } )
.catch((err) => {console.log('error' + err) } );
})
);
If there is better (or proper) way to do this, someone let me know :)
for a school project I have to rewrite a Json Object using node fs. I wrote a module and if I use deleteCity or addCity on their own, they work perfectly fine. But when I call both after another only one works. In my JSON File there is one array with 10 objects. In my main javascript file I require my module and call upon addCity and deleteCity functions.
//Modules
const fs = require("fs");
//Variablen
const citiesPath = "./cities.json";
//Funktionen
const deleteCity = (name) => {
fs.readFile(citiesPath, "utf-8", (err, jstring) => {
if (err) {
console.log(err);
}
try {
let data = JSON.parse(jstring);
for (let i = 0; i < data.length; i++) {
if (data[i].Name == name) {
data.splice(i, 1);
}
}
fs.writeFile(citiesPath, JSON.stringify(data, null, 2), (err) => {
if (err) {
console.log(err);
}
});
} catch (error) {
console.log(error);
}
});
};
const addCity = (obj) => {
fs.readFile(citiesPath, "utf-8", (err, jstring) => {
if (err) {
console.log(err);
}
try {
let data = JSON.parse(jstring);
data.push(obj);
fs.writeFile(citiesPath, JSON.stringify(data, null, 2), (err) => {
if (err) {
console.log(err);
}
});
} catch (error) {
console.log(error);
}
});
};
const showCity = () => {
fs.readFile(citiesPath, "utf-8", (err, jstring) => {
if (err) {
console.log(err);
}
try {
let data = JSON.parse(jstring);
console.log(data);
} catch (error) {
console.log(error);
}
});
};
//Exporte
module.exports = {
deleteCity,
addCity,
showCity
};
I suppose you are calling both function synchronously, i.e.
deleteCity("London");
addCity({ "Name": "Paris" });
The problem here is that the calls are both asynchronous and the second one will start before the first call terminates, so basically before a city has been deleted.
If this is a school project the simplest solution to fix your code is using the synchronous version of the fs calls fs.readFileSync and fs.writeFileSync:
//Modules
const fs = require("fs");
//Variablen
const citiesPath = "./cities.json";
//Funktionen
const deleteCity = (name) => {
const jstring = fs.readFileSync(citiesPath, "utf-8");
let data = JSON.parse(jstring);
for (let i = 0; i < data.length; i++) {
if (data[i].Name == name) {
data.splice(i, 1);
}
}
fs.writeFileSync(citiesPath, JSON.stringify(data, null, 2));
};
const addCity = (obj) => {
const jstring = fs.readFileSync(citiesPath, "utf-8");
let data = JSON.parse(jstring);
data.push(obj);
fs.writeFileSync(citiesPath, JSON.stringify(data, null, 2));
};
const showCity = () => {
const jstring = fs.readFileSync(citiesPath, "utf-8");
let data = JSON.parse(jstring);
console.log(data);
};
//Exporte
module.exports = {
deleteCity,
addCity,
showCity
};
Note that you don't need to catch the errors only to log them inside your synchronous functions. If an error is thrown and not caught, Node.js will log it for your.
The scenario is I have two large CSV files csv1.csv and csv2.csv. In both the files, there is an email column and I have to read csv1.csv row by row and check if the email exists in csv2.csv and if matches write the row of csv2.csv in csv3.csv. I have tried read stream as well but it is not working as expected. Any guidance or help is appreciated.
Thanks to all in advance.
Following are the CSV files
csv1.csv
email,header1,header2
test1#example.com,test1,test1
test2#example.com,test2,test2
test3#example.com,test3,test3
test4#example.com,test4,test4
test5#example.com,test5,test5
csv2.csv
email,header1,header2
test4#example.com,test4,test4
test5#example.com,test5,test5
test6#example.com,test6,test6
test7#example.com,test7,test7
test8#example.com,test8,test8
Following is the code that I tried
const fs = require('fs');
const csv = require('fast-csv')
class CsvHelper {
static write(filestream, rows, options) {
return new Promise((res, rej) => {
csv.writeToStream(filestream, rows, options)
.on('error', err => rej(err))
.on('finish', () => res());
});
}
constructor(opts) {
this.headers = opts.headers;
this.path = opts.path;
this.writeOpts = {
headers: this.headers,
includeEndRowDelimeter: true
};
}
create(rows) {
return CsvHelper.write(fs.createWriteStream(this.path, { flags: 'a' }), rows, { ...this.writeOpts });
}
append(rows) {
return CsvHelper.write(fs.createWriteStream(this.path, { flags: 'a' }), rows, {
...this.writeOpts,
writeHeaders: false,
});
}
}
class Helper {
async matchCsv (outerRow) {
try {
const filePath2 = "csv2.csv";
const filePath3 = "csv3.csv";
let row = [];
const csvFile = new CsvHelper({
path: filePath3,
headers: ["Email", "Active"]
});
return new Promise((resolve, reject) => {
fs.createReadStream(filePath2)
.on("error", err => {
reject(err);
})
.pipe(csv.parse({headers: true}))
.on("error", err => {
reject(err);
})
.on("data", async innerRow => {
if(outerRow["email"] === innerRow["email"]) {
console.log("====================");
console.log("match found");
console.log(innerRow);
console.log("====================");
row.push([innerRow["email"], "yes"]);
console.log("row: ", row);
}
})
.on("finish", async() => {
if (!fs.existsSync(filePath3)) {
await csvFile.create(row).then(() => {
resolve("Done from matchCsv");
})
} else {
await csvFile.append(row).then(() => {
resolve("Done from matchCsv");
})
}
})
});
} catch (err) {
throw(err);
}
}
async generateCsv () {
try {
const filePath1 = "csv1.csv";
return new Promise((resolve, reject) => {
fs.createReadStream(filePath1)
.on("error", err => {
reject(err);
})
.pipe(csv.parse({headers: true}))
.on("error", err => {
reject(err);
})
.on("data", async outerRow => {
const result = await this.matchCsv(outerRow);
console.log("result: ", result);
})
.on("finish", () => {
resolve("Generated csv3.csv file.");
});
});
} catch (err) {
throw(err);
}
}
}
async function main() {
const helper = new Helper();
const result = await helper.generateCsv()
console.log(result);
}
main();
So the question is a little confusing, but I think I know what you want. Here's what I would do to check if the email exists. It will add all the rows to an array, cycle through them, then if the email address matches the email you're looking for, it will do something else... I think you said you wanted to write to a csv file again with the row, but that should be simple enough.
const csv = require('csv-parser');
const fs = require('fs');
const filepath = "./example_data.csv";
const emailAdd = "myemail#email.com";
var rowsArr = [];
fs.createReadStream(filepath)
.on('error', () => {
// handle error
})
.pipe(csv())
.on('data', (row) => {
rowsArr.push(row);
})
.on('end', () => {
for (var i = 0; i <= rowsArr.length; i++) {
if (rowsArr[i].emailAddress == emailAdd) {
//do something
}
}
})
The login function uses the ExecuteSQL function is used to check whether a user exists? I'm getting the following error while I'm running this file. async await.
ReferenceError: Cannot access 'result' before initialization
at /Users/naseefali/Documents/Projects/common_express_api/data/DL.js:100:25
at async login (/Users/naseefali/Documents/Projects/common_express_api/data/DL.js:99:24)
SELECT fUserPwd FROM tblUser WHERE fUserID ='ADMIN'
{
recordsets: [ [ [Object] ] ],
recordset: [ { fUserPwd: '006060061500675006630067300667' } ],
output: {},
rowsAffected: [ 1 ]
}
Code
async function testConnection() {
try {
const pool = await getConnection();
if (pool) {
const result = await pool.request()
.query('SELECT * FROM tblUser', function (err, sqlResult) {
if (err) {
console.log(err);
}
else {
console.log(sqlResult);
}
});
}
else console.log(pool);
} catch (err) {
console.log(err);
}
};
async function ExecuteSQL(strSQL) {
try {
const pool = await getConnection();
if (pool) {
const result = await pool.request()
.query(strSQL, async function (err, sqlResult) {
if (err) {
console.log(err);
}
else {
console.log(strSQL);
console.log(sqlResult);
return sqlResult;
}
});
}
else console.log(pool);
} catch (err) {
console.log(err);
}
};
async function login(strUID) {
const strSQL = `SELECT fUserPwd FROM tblUser WHERE fUserID ='${strUID}'`;
try {
const result = await ExecuteSQL(strSQL).then(await function () {
console.log(result);
});
} catch (err) {
console.log(err);
}
};
login('ADMIN');
because you didn't return a value on then, and access result before assigning it
async function login(strUID) {
const strSQL = `SELECT fUserPwd FROM tblUser WHERE fUserID ='${strUID}'`;
try {
const result = await ExecuteSQL(strSQL).then(await function () {
console.log(result);
});
} catch (err) {
console.log(err);
}
};
try change it to
const result = await ExecuteSQL(strSQL).then(function (data) {
return data
});
console.log(result);
or just
const result = await ExecuteSQL(strSQL);
console.log(result);
here I'm just returning anything from ExecuteSQL.
I did
async function ExecuteSQL(strSQL) {
try {
const pool = await getConnection();
//you don't need to check for the pool, because getConnection() will throw if there is an error
const result = await pool.request().query(strSQL);
return result;
} catch (err) {
console.log(err);
}
};
async function login(strUID) {
const strSQL = `SELECT fUserPwd FROM tblUser WHERE fUserID ='${strUID}'`;
try {
const result = await ExecuteSQL(strSQL);
console.log(result);
return result;
} catch (err) {
console.log(err);
}
};
I am coding a post request which downloads all URL HTML,zips them and email it back. This all should happen in the backend. I am storing all the data in an array and extract the first element to start these operations.
I have while loop inside which I am calling some functions. Each function gets executed at a certain time.
I used async, await and promises to make sure they run one after the
other.
Coming to my problem.
My while loop starts getting executed again before all the
functions inside it are executed.
app.post('/?', async (req, res) => {
var urls = req.query.urls
var email = req.query.email;
var new_stack = [urls, email]
stack.push(new_stack)
res.send("Mail sent")
if (isFunctionRunning === false) { //initially it is false
console.log(isFunctionRunning, stack.length)
send_mails();
}
});
const getGoogleIndexHTML = (url) => {
return new Promise((resolve, reject) => {
request(url, (err, res, body) => err ? reject(err) : resolve(body))
})
}
const some_function_to_download = async (url) => {
try {
const a = url.split(".")
let googleIndexHTML = await getGoogleIndexHTML(url)
await fs.writeFile(directory + '/' + a[1] + '.html', googleIndexHTML, (err) => {
if (err) throw err
})
console.log('File created.')
} catch (err) {
console.log(err)
}
}
const html_to_zip_file = async () => {
await zipper.zip(directory, function (error, zipped) {
if (!error) {
zipped.compress();
zipped.save('./package.zip', function (error) {
if (!error) {
console.log("Saved successfully !");
}
});
} else {
console.log(error)
}
})
}
const send_mails = async () => {
while (stack.length > 0) {
isFunctionRunning = true
var a = stack.shift()
var urls = a[0]
var collection_urls = urls.split(",");
var to_email = a[1]
rimraf(directory, function () {
console.log("done");
});
fs.mkdirSync(directory);
for (url of collection_urls) {
await some_function_to_download(url); // 5 sec per download
}
await html_to_zip_file() // takes 5 sec to zip
.then(result => {
transporter.sendMail(set_mail_options(to_email)) //2 sec to send mail
.then(result => {
console.log("Mail sent")
})
.catch(err => {
console.log(err)
})
})
.catch(err => {
console.log(err)
})
console.log("reached") // this is reached before zip is done and mail sent. I want to prevent this
}
isFunctionRunning = false
}
You need to return transporter.sendMail in sendMail, fs.writeFile in someFunctionToDownload and zipper.zip in htmlToZipFile otherwise the await won't work as expected (I'm assuming that they actually do return promises, I'm only familiar with fs.writeFile)
Also: CamelCase is used in JS, not snake_case 🙃
And are you sure rimraf is synchronous?
const sendMails = async () => {
while (stack.length > 0) {
isFunctionRunning = true;
const [urls, toEmail] = stack.shift();
var collectionUrls = urls.split(",");
rimraf(directory, function() {
console.log("done");
});
await fs.mkdir(directory);
await Promise.All(collectionUrls.map(someFunctionToDownload)); // 5 sec per download
await htmlToZipFile() // takes 5 sec to zip
.then(result => transporter.sendMail(set_mail_options(toEmail))) //2 sec to send mail
.then(result => {
console.log("Mail sent");
})
.catch(err => {
console.log(err);
});
console.log("reached"); // this is reached before zip is done and mail sent. I want to prevent this
}
isFunctionRunning = false;
};
const someFunctionToDownload = async url => {
const a = url.split(".");
const googleIndexHTML = await getGoogleIndexHTML(url);
return fs.writeFile(`${directory}/${a[1]}.html`, googleIndexHTML, err => {
if (err) throw err;
});
};
const htmlToZipFile = async () => {
return zipper.zip(directory, function(error, zipped) {
if (!error) {
zipped.compress();
zipped.save("./package.zip", function(error) {
if (!error) {
console.log("Saved successfully!");
}
});
} else {
console.log(error);
}
});
};
Try using the following
while (stack.length > 0) {
isFunctionRunning = true
var a = stack.shift()
var urls = a[0]
var collection_urls = urls.split(",");
var to_email = a[1]
rimraf(directory, function () {
console.log("done");
});
fs.mkdirSync(directory);
for (url of collection_urls) {
await some_function_to_download(url); // 5 sec per download
}
try {
const result = await html_to_zip_file() // takes 5 sec to zip
const sendMailResult = await transporter.sendMail(set_mail_options(to_email))
} catch(e)
{
console.log(e)
}
console.log("reached")
}
Since html_to_zip_file() and sendMail function are independent
we can use
const result = await Promise.all([html_to_zip_file(),transporter.sendMail(set_mail_options(to_email))]);