How can I access functions output across Node.js? - javascript

I have 3 files:
Ingredients.js
const fs = require("fs");
const readline = require('readline');
const stream = require('stream');
const ingredients = () => {
const instream = fs.createReadStream('ingredients.txt');
const outstream = new stream;
const rl = readline.createInterface(instream, outstream);
const listIngredients = {};
rl.on('line', function (line) {
let lower = line.toLowerCase();
listIngredients[lower] = 0;
});
rl.on('close', function () {
console.log('listIngredients', listIngredients);
});
}
module.exports = ingredients;
cookbook.js:
let fs = require("fs");
const book = () => {
const regex = /\b(\w+)\b/g;
fs.readFile('cook-book.txt', 'utf8', function (err, data) {
let book = data;
let lower = book.toLowerCase();
let split = lower.match(regex);
console.log(split);
});
}
module.exports = book;
compare.js
const ingredients = require('./ingredients');
const book = require('./book');
I'm trying to increase the key values of ingredients every time they are mentioned in the cookbook. I think this should go into a different js file to make it cleaner.
Whilst i can console.log out the information from the above files, I cannot figure out how to actually access the data and make changes to the ingredients object in compare.js?

as others noticed your ingredients and book variables are functions having required information inside their scope and not returning it outside. to fix it, you have to return values.
as you're working with asynchronous stuff, your functions should be wrapped into Promise's to handle the flow correctly.
this code should help you:
const fs = require('fs');
const readline = require('readline');
const { Writable } = require('stream');
const fsp = fs.promises;
// ingredients.js
const getIngredients = async () => new Promise((resolve, reject) => {
const instream = fs.createReadStream('ingredients.txt');
const outstream = new Writable();
const rl = readline.createInterface(instream, outstream);
const listIngredients = {};
rl.on('line', line => {
const lower = line.toLowerCase();
listIngredients[lower] = 0;
});
rl.on('error', reject);
rl.on('close', () => resolve(listIngredients));
});
// cookbook.js
const getBookContent = async () => new Promise(async (resolve, reject) => {
try {
const wordRegEx = /\b(\w+)\b/g;
const book = await fsp.readFile('cook-book.txt', 'utf8')
const lower = book.toLowerCase();
return resolve(lower.match(wordRegEx));
} catch (error) {
return reject(error);
}
});
// compare.js
(async () => {
const ingredients = await getIngredients();
const words = await getBookContent();
console.log(ingredients);
console.log(words);
})();
the names of functions have been change for better representations of their instances.
i've also used an async iife to use async/await syntax, however you can still work with Promises themselves

Related

Joining stores in IndexedDB with promises

Here's some code to join something from 3 object stores:
let db;
indexedDB.open('db', 1).onsuccess = ev => {
db = ev.target.result;
const tran = db.transaction(['s1', 's2', 's3']);
tran.objectStore('s1').get('third').onsuccess = ev1 =>
tran.objectStore('s2').index('connectTo').get('third').onsuccess = ev2 =>
tran.objectStore('s3').index('connectTo').get('third').onsuccess = ev3 => {
const [res1, res2, res3] = [ev1.target.result, ev2.target.result, ev3.target.result];
const result = {...res1, ...res2, ...res3};
......
}
}
Can I use promises or other means like async/await to avoid the heavy nesting? It'd be good if I can put these query processes in a function and get the result object as the return value.
Something like this should work.
const someFunction = async () => {
let db
const openDB = await indexedDB.open('db', 1)
db = openDB.target.result
const tran = db.transaction(['s1', 's2', 's3'])
const ev1 = await tran.objectStore('s1').get('third')
const ev2 = await tran.objectStore('s2').index('connectTo').get('third')
const ev3 = await tran.objectStore('s3').index('connectTo').get('third')
const [res1, res2, res3] = [
ev1.target.result,
ev2.target.result,
ev3.target.result,
]
const result = { ...res1, ...res2, ...res3 }
}
someFunction()
Personally I would store the results like this and eliminate the need for copies (if possible for you).
const result = { ...ev1.target.result, ...ev2.target.result, ...ev3.target.result }

Promise.all() stops working before finishing

I've a very simple script that gets me some info by mapping over an array of around 150 records and the code seems to work fine with smaller number of records but everytime I run it with this 150 records it just stops working and doesn't continue and I think it might be a Promise.all problem.
any idea?
code:
const request = require('request');
const axios = require('axios');
const cheerio = require('cheerio');
const fs = require('fs').promises;
let champions = [];
const getChampData = async hrefs => {
const requests = hrefs.map(async ({ href }) => {
try {
const html = await axios.get(href);
const $ = cheerio.load(html.data);
const champName = $('.style__Title-sc-14gxj1e-3 span').text();
let skins = [];
$('.style__CarouselItemText-sc-1tlyqoa-16').each((_, el) => {
const skinName = $(el).text();
skins.push(skinName);
});
const champion = {
champName,
skins
};
console.log(champion);
return champion;
} catch (err) {
console.error(err);
}
});
const results = await Promise.all(requests);
await fs.writeFile('json/champions-skins.json', JSON.stringify(results));
return results;
};
edit #1:
I used a package called p-map with it and now everything works just fine!
const axios = require('axios');
const pMap = require('p-map');
const cheerio = require('cheerio');
const fs = require('fs').promises;
const getChampData = async hrefs => {
// const champions = JSON.parse(await fs.readFile('json/champions.json'));
try {
let champsList = await pMap(hrefs, async ({ href }) => {
const { data } = await axios(href);
const $ = cheerio.load(data);
const champName = $('.style__Title-sc-14gxj1e-3 span').text();
let skins = [];
$('.style__CarouselItemText-sc-1tlyqoa-16').each((_, el) => {
const skinName = $(el).text();
skins.push(skinName);
});
const champion = {
champName,
skins
};
console.log(champion);
return champion;
});
await fs.writeFile(
'champions-with-skins-list.json',
JSON.stringify(champsList)
);
} catch (err) {
console.error(err.message);
}
};
On Error return is missing. Look like issue with some url to fetch.
const getChampData = async hrefs => {
const requests = hrefs.map(async ({ href }) => {
try {
const html = await axios.get(href);
// rest of the code
} catch (err) {
console.error(err);
return []
}
});
const results = await Promise.all(requests);
await fs.writeFile("json/champions-skins.json", JSON.stringify(results));
return results;
};

How to write this with Promises?

var pdfParser = require('pdf-parser')
var fs = require('fs')
var PDF_PATH = __dirname + '/pdfs'
var results = []
var failed = []
fs.readdir(PDF_PATH, function(err, files){
if(err){
return console.log(err)
}
for(const file of files){
let the_ent = {
'name': '',
'other data': []
}
pdfParser.pdf2json(PDF_PATH + '/' + file, function(error, pdf){
if(error != null){
console.log(error)
}else if(pdf['pages'] == undefined){
failed.push(file)
console.log(file +' failed')
}else{
//populate 'results' array
}
console.log(/*pdf_data*/)
results.push(/*pdf_data*/)
})
}
console.log(results)
console.log(failed)
results = JSON.stringify(results)
//fs.writeFileSync() write results to json
})
I don't know what is wrong with me this morning, I can't work out how to write this in async; obviously the logs/writefile at the bottom fire as soon as the script executes.
I have tried wrapping in async functions and awaiting the readdir / pdf parsing instead of using callbacks - clearly not correctly. I'm just trying to parse every pdf in a folder - push what I want to some arrays and then log them once the loop finishes zzz.
Wrap the smallest asynchronous tasks into Promises, then use async/await to combine them:
// the Promise wrapper:
const parsePdf = file => new Promise((res, rej) => pdfParser.pdf2json(file, (err, r) => err ? rej(err) : res(r));
(async function () { // start an asynchronous context
const PDF_PATH = __dirname + '/pdfs';
const results = [], failed = []; // prefer const over let
// reading files in a promising way is already provided natively:
const files = await fs.promises.readdir(PDF_PATH);
for(const file of files){ // this is in series, in parallel would probably be faster
let the_ent = {
name: '',
'other data': [], // avoid whitespaces!
};
const pdf = await parsePdf(PDF_PATH + '/' +file);
if(pdf.pages === undefined) { // prefer equality (===) over comparison (==)
failed.push(file);
console.log(file + ' failed');
} else {
// populate 'results' array
}
}
console.log(results, failed);
})();
You can probably process the files in parallel too.
I would promisify the async operations and use async/await. For the fs operations, use the new fs.promises API. For others, use util.promisify() to make promisified versions.
The resolved value of the parsePDFs function I create will be an array of JSON and an array of failed filenames so you get both pieces of information back:
const util = require('util');
const pdfParser = require('pdf-parser');
// make promisified version of the function
const pdfParser.pdf2jsonP = util.promisify(pdfParser.pdf2json);
const fsp = require('fs').promises;
const path = require('path');
const PDF_PATH = path.join(__dirname, 'pdfs');
async function parsePDFs(dir) {
const files = await fsp.readdir(dir);
const results = [];
const failed = [];
for (const file of files) {
let the_ent = {
'name': '',
'other data': []
}
try {
let pdf = await pdfParser.pdf2jsonP(path.join(dir, file));
if (!pdf || pdf.pages === undefined) {
throw new Error("pdf.pages is empty")
}
results.push(pdf);
} catch(e){
console.log(e);
failed.push(file);
}
}
// this will be the resolved value of the returned promise
return {results, failed};
}
parsePDFs(PDF_PATH).then(data => {
console.log("failed filenames: " data.failed);
console.log("json results: ", data.results);
// do something with data.results and data.failed
}).catch(err => {
console.log(err);
});
Note: You declare, but never use the variable the_ent.
You can use util.promisify to promisify the sync functions:
const readdir = util.promisify(fs.readdir);
const reader = util.promisify(pdfParser.pdf2json);
Minimal demo:
const fs = require('fs');
const util = require('util');
var pdfParser = require('pdf-parser');
const readdir = util.promisify(fs.readdir);
const reader = util.promisify(pdfParser.pdf2json);
var PDF_PATH = __dirname + '/pdfs';
(async () => {
async function processFiles() {
let results = [];
let files = await readdir(PDF_PATH);
for (const file of files) {
let pdf = await reader(PDF_PATH + '/' + file);
results.push(pdf);
}
return results;
}
const result = await processFiles();
console.info(result);
})();

'pipe' function in Javascript not populating from CSV as expected

I had this code working earlier, but made some changes and I'm not sure what I did to break it. The path to the .csv file is correct, and the code seems correct, but the array raw_data is empty after the function call.
require('./trip.js');
const parser = require('csv-parser');
const fs = require('fs');
let raw_data = [];
function readFile() {
fs.createReadStream('./trips.csv')
.pipe(parser())
.on('data', (data) => raw_data.push(data))
.on('end', () => console.log('CSV has been piped into an array'));
}
const trips = async () => {
await readFile();
console.log(raw_data.length)
};
I expect the raw_data array to contain 9999 items. It contains zero. I am also not getting the console.log statement to execute on 'end'.
readFile must return Promise like this
require('./trip.js');
const parser = require('csv-parser');
const fs = require('fs');
let raw_data = [];
function readFile() {
return new Promise(resolve =>
fs.createReadStream('./trips.csv')
.pipe(parser())
.on('data', (data) => raw_data.push(data))
.on('end', resolve)
);
}
const trips = async () => {
await readFile();
console.log(raw_data.length)
};

How to set variable = a value from a function result inside async function

Inside a function, I would like to set the value of a variable (foldersInDir) to the results of getting the contents of a directory using fs.readdir();
I thought using await would force the console.log line to wait for a response, but it's not.
How can I set foldersInDir = the return value?
/*Begin function*/
const listContents = async (myPath) => {
var fs = require('fs');
let foldersInDir = await fs.readdir(myPath, function(err, items) {
console.log(items); //works
return items;
});
console.log(foldersInDir); //does not work, undefined
}
You need to convert readdir to a promise, e.g.:
const foldersPromised = (path) =>
new Promise((resolve, reject) =>
fs.readdir(path, (err, items) =>
err !== undefined ? reject(err) : resolve(items)
)
);
try {
let foldersInDir = await foldersPromised(myPath);
} catch(err) {
console.log(err);
}
const fs = require('fs');
const test = () => {
let folders = fs.readdirSync('.');
return folders;
}
console.log(test());
Edit: sorry, need to promisify() the function
const fs = require('fs');
const { promisify } = require('util') // available in node v8 onwards
const readdir = promisify(fs.readdir)
async function listContents() {
try { // wrap in try-catch in lieu of .then().catch() syntax
const foldersInDir = await readdir(myPath) // call promised function
console.log('OK, folders:', foldersInDir) // success
} catch (e) {
console.log('FAIL reading dir:', e) // fail
}
}
listContents('path/to/folder') // run test
I recommend using the promisify function provided by Node.js to fix the problem. This function will convert a callback-based function to a promise-based function, which can then be used using the await keyword.
const fs = require('fs');
const {
promisify
} = require('util');
const readdirAsync = promisify(fs.readdir);
/*Begin function*/
const listContents = async(myPath) => {
let foldersInDir = await readdirAsync(myPath);
console.log(foldersInDir);
}

Categories