Node Function Returning Empty Array - javascript

In the following node function, it is returning an empty array. Not sure why its doing that. Could this be a async await issue? Would appreciate any help. Thank you
const folderPath = '/public/home.html'
function getCircuitAndFuse(folderPath){
//List containing circuit name with its fuse
let temporaryList = [];
let finalCircuitAndFuseList = []
fs.readFile(__dirname + folderPath, (error, data)=>{
if(error){
console.log(`Unable to read file: ${error}`)
}else{
var $ = cheerio.load(data)
$('img').each(function(index, element){
let getClassAtr = element.attribs.class
temporaryList.push(getClassAtr.slice(0, getClassAtr.lastIndexOf(" ")))
})
finalCircuitAndFuseList = [...new Set(temporaryList)]
}
})
return finalCircuitAndFuseList;
}
let getInfo = getCircuitAndFuse(folderPath)
// Returning empty array
console.log(getInfo)
***Server code****
const server = http.createServer(function(req, res){
res.writeHead(200, {'Content-Type': 'text/plain'})
res.end()
}).listen(port, ()=>{
console.log(`Server listening on port ${port}. Press Ctrl-C to terminate...`)
})

getCircuitAndFuse must return Promise like this:
function getCircuitAndFuse(folderPath) {
return new Promise((resolve, reject) => {
//List containing circuit name with its fuse
let temporaryList = [];
fs.readFile(__dirname + folderPath, (error, data) => {
if (error) {
console.log(`Unable to read file: ${error}`);
} else {
var $ = cheerio.load(data);
$('img').each(function (index, element) {
let getClassAtr = element.attribs.class;
temporaryList.push(
getClassAtr.slice(0, getClassAtr.lastIndexOf(' '))
);
});
resolve([...new Set(temporaryList)]);
}
});
});
}
getCircuitAndFuse(folderPath).then((getInfo) => {
// do something with `getInfo`
});

Another alternative to Faruk's answer would be to just use fs.readFileSync instead of wrapping your function in a promise and requiring some of that extra ceremony. Using fs.readFileSync will ensure that your function doesn't return prematurely.
Here is your code rewritten with that in mind:
function getCircuitAndFuse(folderPath) {
try {
let temporaryList = [];
const data = fs.readFileSync(__dirname + folderPath);
const $ = cheerio.load(data);
$("img").each(function (index, element) {
let getClassAtr = element.attribs.class;
temporaryList.push(getClassAtr.slice(0, getClassAtr.lastIndexOf(" ")));
});
return [...new Set(temporaryList)];
} catch (error) {
console.log(error);
}
}

Related

mongoose.Query.prototype.exec patching causes other Query prototype functions to not work

Caching patch code
import mongoose, { mongo } from 'mongoose';
import { createClient } from 'redis';
//redis init
const redisUrl = 'redis://127.0.0.1/6379'
const client = createClient();
client.on('error', (err) => console.log('Redis Client Error', err));
client.connect();
client.on("connect", (err) => {
if (err) throw err;
else console.log("Redis Connected!");
});
mongoose.Query.prototype._exec = mongoose.Query.prototype.exec;
mongoose.Query.prototype.cache = function (options = {}) {
this.useCache = true;
this.hashKey = JSON.stringify(options.hashKey || '')
return this;
}
mongoose.Query.prototype.exec = async function () {
if (!this.useCache) {
console.log();
return await mongoose.Query.prototype._exec.apply(this,arguments);
}
const key = JSON.stringify(Object.assign({}, this.getQuery(), {
collection: this.mongooseCollection.name + this.op,
}))
//See if we have a value for 'key' in redis
let cachedValue = await client.sendCommand(['HGET', this.hashKey, key])
//return if present
if (cachedValue) {
cachedValue = JSON.parse(cachedValue);
//Hydrating Model and Arrays of Models
return Array.isArray(cachedValue) ?
cachedValue.map(d => new this.model(d)) :
this.model(cachedValue);
}
//otherwise set into redis
let result = await mongoose.Query.prototype._exec.apply(this,arguments);
await client.hSet(this.hashKey, key, JSON.stringify(result));
client.expire(this.hashKey, 3000)
return result;
}
let projects = await db.Project.find({
companyId: userCompanyId,
})
.limit(limit)
.cache({ hashKey: userCompanyId });
Trying to apply caching to a paginated response, however the caching function is working whereas others like skip and limit are not working.
I tried the solution from mongoose.Query.prototype.exec() patching not working with populate however it did not resolve the issue.

Sending res to client before finishing async processes in Node.js

I have the following code that I can´t figure out how to make this entire process finish before sending a response back to the client. It has a for loop and make the requests to 2 external functions.
The problem that I can see in the console is the response which goes way faster than the process takes to complete the processing. It makes the client believe everything is fine, but, actually some error might has happened. I´ve tried to await, I´ve tried reading other posts, I´ve tried making this to return a promise and return a resolve, I´ve tried an index to check the array length... If someone could help me out, I would appreciate that. Thanks in advance.
var updateProdCharTree = async (req, res, next) => {
return new Promise((resolve, reject) => {
var data = req.body.raw.data;
var productLine = req.body.raw.productLine;
var bu = req.body.raw.bu;
let contErros = null;
let contAcertos = 0;
var maxId = 0;
let queryMaxId = `SELECT max(ProductCharFatherId) as maxId FROM maxiplastmatriz.productchar1 WHERE bu=${bu} and prodline=${productLine}`;
database.query(queryMaxId)
.then(resultqueryMaxId => {
if(resultqueryMaxId.length){
maxId = resultqueryMaxId[0]['maxId'];
}else{
maxId = 0;
}
let queryAllNodes = `SELECT Id FROM productchar1 WHERE bu=${bu} and prodline=${productLine}`;
database.query(queryAllNodes)
.then( async resultqueryAllNodes => {
for (let index = 0; index < data.children.length; index++) {
const element = data.children[index];
if (data.children[index].dbId != undefined) {
let query = `SELECT Id FROM productchar1 WHERE bu=${bu} and prodline=${productLine} and Id=${data.children[index].dbId} and ProductCharFatherId=${data.children[index].id}`;
database.query(query)
.then( async result => {
if (result.length) { // Update char in productchar1
maxId++;
var params = {element: element, productLine: productLine, bu: bu, index: index};
waitResUp = await updateProductChar1(params, req, res); //***CALL EXTERNAL METHOD 2***
contAcertos++;
} else { // Add char in productchar1 shouldn´t get in here
console.log("Erro em: updateProdCharTree > addProductProductChar1");
}
})
.catch(err => {
console.log("Erro em query");
contErros = 1;
})
}else{ // Add new char in productchar1
maxId++;
var params = {element: element, productLine: productLine, bu: bu, index: index, maxId: maxId};
waitRes = await addProductProductChar1(params, req, res); //***CALL EXTERNAL METHOD 2***
console.log("waitRes", waitRes.insertId);
contAcertos++;
}
}
})
.catch(err => {
console.log("Erro em queryAllNodes", err);
contErros = 1;
})
})
.catch(err => {
console.log("Erro em queryMaxId");
contErros = 1;
});
if (contAcertos == data.children.length) {
resolve("result"); // ***RES ATTEMPT***
}
})
}
Beginner here. Please be pacient. You were once either.
For starters, you can refactor your code to something like below so that response is returned after all async functions within loop are executed:
var updateProdCharTree = async (req, res, next) => {
return new Promise((resolve, reject) => {
var data = req.body.raw.data;
var productLine = req.body.raw.productLine;
var bu = req.body.raw.bu;
let contErros = null;
let contAcertos = 0;
var maxId = 0;
let queryMaxId = `SELECT max(ProductCharFatherId) as maxId FROM maxiplastmatriz.productchar1 WHERE bu=${bu} and prodline=${productLine}`;
database
.query(queryMaxId)
.then((resultqueryMaxId) => {
if (resultqueryMaxId.length) {
maxId = resultqueryMaxId[0]["maxId"];
} else {
maxId = 0;
}
let queryAllNodes = `SELECT Id FROM productchar1 WHERE bu=${bu} and prodline=${productLine}`;
database
.query(queryAllNodes)
.then(async (resultqueryAllNodes) => {
return Promise.all(
data.chilren.map(async (element) => {
if (data.children[index].dbId != undefined) {
let query = `SELECT Id FROM productchar1 WHERE bu=${bu} and prodline=${productLine} and Id=${data.children[index].dbId} and ProductCharFatherId=${data.children[index].id}`;
let result = await database.query(query);
if (result.length) {
// Update char in productchar1
maxId++;
var params = {
element: element,
productLine: productLine,
bu: bu,
index: index,
};
waitResUp = await updateProductChar1(params, req, res); //***CALL EXTERNAL METHOD 2***
contAcertos++;
} else {
// Add char in productchar1 shouldn´t get in here
console.log(
"Erro em: updateProdCharTree > addProductProductChar1"
);
}
} else {
// Add new char in productchar1
maxId++;
var params = {
element: element,
productLine: productLine,
bu: bu,
index: index,
maxId: maxId,
};
waitRes = await addProductProductChar1(params, req, res); //***CALL EXTERNAL METHOD 2***
console.log("waitRes", waitRes.insertId);
contAcertos++;
}
})
);
})
.then(() => {
// This will be called after all queries are executed
resolve("result");
})
.catch((err) => {
console.log("Erro em queryAllNodes", err);
contErros = 1;
// NOTE: Ideally we should be rejecting promise in case of error
});
})
.catch((err) => {
console.log("Erro em queryMaxId");
contErros = 1;
// NOTE: Ideally we should be rejecting promise in case of error
});
});
};
As #Jeremy Thille suggested this code can be improvised further to only use async-await instead of Promises with then callback so that it is more cleaner and easy to understand.

How to pull out handler using module exports?

I am building a node application, and trying to neatly organize my code. I wrote a serial module that imports the serial libs and handles the connection. My intention was to write a basic module and then reuse it over and over again in different projects as needed. The only part that changes per use is how the incoming serial data is handled. For this reason I would like to pull out following handler and redefine it as per the project needs. How can I use module exports to redefine only this section of the file?
I have tried added myParser to exports, but that gives me a null and I would be out of scope.
Handler to redefine/change/overload for each new project
myParser.on('data', (data) => {
console.log(data)
//DO SOMETHING WITH DATA
});
Example usage: main.js
const serial = require('./serial');
const dataParser = require('./dataParser');
const serial = require('./serial');
//call connect with CL args
serial.connect(process.argv[2], Number(process.argv[3]))
serial.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
serial.send('Error');
});
Full JS Module below serial.js
const SerialPort = require('serialport');
const ReadLine = require('#serialport/parser-readline');
const _d = String.fromCharCode(13); //char EOL
let myPort = null;
let myParser = null;
function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
myPort = new SerialPort(portName, {baudRate: baudRate})
myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
}
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
module.exports = {
connect, getPorts, send, close
}
The problem is that a module is used where a class or a factory would be appropriate. myParser cannot exist without connect being called, so it doesn't make sense to make it available as module property, it would be unavailable by default, and multiple connect calls would override it.
It can be a factory:
module.exports = function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
let myPort = new SerialPort(portName, {baudRate: baudRate})
let myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
return {
myParser, getPorts, send, close
};
}
So it could be used like:
const serial = require('./serial');
const connection = serial(...);
connection.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
connection.send('Error');
});

Append currently logged in user to JSON file

I am trying to find a way to get the currently logged in user and than append them to a JSON file. Below is my code to first read the dir, then get the most recent file, return it and then append the current user that is logged in.
I can append a string to the file but when trying to perform req.user it states
Cannot read property 'user' of undefined
What would I need to include in this file so that it knows what user is?
let fs = require("fs"),
express = require("express"),
_ = require("underscore"),
User = require("./models/user"),
path = require("path");
let getFileAddUser = () => {
let filePath = '../automation_projects/wss-automation-u/results/temp/';
fs.readdir(filePath, (err, files) => {
if (err) { throw err; }
let file = getMostRecentFile(files, filePath);
console.log(file);
fs.readFile(filePath + file, 'utf8', (err, data) => {
let json = JSON.parse(data);
if(err){
console.error(err);
return;
} else {
//Un-comment to write to most recent file.
//==================================================
//This should find the currently logged in user and append them to the most recent file found.
json.currentuser = req.user;
fs.writeFile(filePath + file, JSON.stringify(json), (error) => {
if(error){
console.error(error);
return;
} else {
console.log(json);
}
});
//==================================================
console.log(data);
}
});
});
};
//Get the most recent file from the results folder.
function getMostRecentFile(files, path) {
let out = [];
files.forEach(function(file) {
let stats = fs.statSync(path + "/" +file);
if(stats.isFile()) {
out.push({"file":file, "mtime": stats.mtime.getTime()});
}
});
out.sort(function(a,b) {
return b.mtime - a.mtime;
})
return (out.length>0) ? out[0].file : "";
}
module.exports = getFileAddUser;
Thanks to a knowledgeable co-worker and some further research we were able to get this working. I'd like to share the code we came up with to append the currently logged in user to our results file. You will also notice we got some help using the Ramada.js library.
let fs = require("fs"),
express = require("express"),
_ = require("underscore"),
User = require("./models/user"),
r = require("ramda"),
path = require("path");
//This will be our function to get the most recent file from our dir and
//return it to us. We than user this function below.
function getMostRecentFile(files, path) {
let out = [];
let f = r.tail(files);
console.log(files);
f.forEach(function(file) {
let stats = fs.statSync(path + "/" +file);
if(stats.isFile()) {
out.push({"file":file, "mtime": stats.mtime.getTime()});
}
});
out.sort(function(a,b) {
return b.mtime - a.mtime;
})
return (out.length>0) ? out[0].file : "";
}
//Passing in 'u' as a argument which can than be used in a route and pass in
//anything that we want it to be. In our case it was the currently logged
//in user.
let getUser = (u) => {
let user = u;
let filePath = '../automation_projects/wss-automation-u/results/temp/';
//Comment above and uncomment below for testing locally.
// let filePath = "./temp/";
let file = "";
//Below we read our dir then get the most recent file using the
//getMostRecentfile function above.
read_directory(filePath).then( files => {
file = getMostRecentFile(files, filePath)
console.log(file);
return(read_file(filePath + file))
}).then( x => {
// Here we parse through our data with x representing the data that we
//returned above.
let json = JSON.parse(x);
return new Promise(function(resolve, reject) {
json.currentuser = u;
//And finally we write to the end of the latest file.
fs.writeFile(filePath + file, JSON.stringify(json), (error) => {
if(error) reject(error);
else resolve(json);
// console.log(json);
});
});
});
}
let read_directory = (path) => {
return new Promise((resolve, reject) => {
fs.readdir(path, (err, items) => {
if (err){
return reject(err)
}
return resolve([path, ...items])
})
})
}
let read_file = (path) => {
return new Promise((resolve, reject) => {
fs.readFile(path, "utf8", (err, items) => {
if (err){
return reject(err)
}
return resolve(items)
})
})
}
module.exports = getUser;
Than below is an example route with how to use the getUser module. You will want to require it like you do everything else with node.js and dependencies. Hope this helps someone in the future.
let getUser = require("getuser");
//Make a route to use the getUser module and pass in our argument value.
app.get("/", (req, res) => {
//With in the get user function pass in whatever you want to equal 'u' from the getuser module.
getUser(req.user.username);
res.render("index", { username: req.user });
});

Node.js - How to return after all asynchronous calls have finished

As shown bellow, I am pushing the object link_to_json returns into an array allShirts declared in html_to_json.
However, the console.dir on the third last line and the return value of html_to_json logs an array of undefined references. Which I presume is because console.dir and return is executed before link_to_json functions finished.
How do I ensure the return value of html_to_json is a filled up allShirts array?
//Go to individual links and scrape relevant info
const link_to_json = (link) => {
request(link, (err, res, body) => {
if (!error_handler(err, res, link)) {
const $ = cheerio.load(body);
const shirt_detail = $('.shirt-details').find('h1').text();
const Title = shirt_detail.substr(shirt_detail.indexOf(' ') + 1);
const Price = shirt_detail.substr(0, shirt_detail.indexOf(' '));
const ImageURL = $('.shirt-picture').find('img').attr('src');
const URL = link;
return new Shirt(Title, Price, ImageURL, URL);
} else return {};
});
}
//Crawl through all individual links listed in Root
const html_to_json = body => {
const allShirts = [];
const $ = cheerio.load(body);
$('.products').find('a').each((index, val) => {
allShirts.push(link_to_json(rootURL + $(val).attr('href')));
});
console.dir(allShirts); // <--- HERE
return allShirts;
}
There's a few ways to go after this, but I like the Async library for this sort of thing.
How I'd handle your problem is to actually get all the URLs first, so changing your body scrape to something like this instead:
const shirtLinks = [];
$('.products').find('a').each((index, val) => {
shirtLinks.push(rootURL + $(val).attr('href'));
});
You need your conversion function to be asynchronous as well:
const linkToJSON = (link, cb) => {
request(link, (err, res, body) => {
if (!error_handler(err, res, link)) {
const $ = cheerio.load(body);
const shirt_detail = $('.shirt-details').find('h1').text();
const Title = shirt_detail.substr(shirt_detail.indexOf(' ') + 1);
const Price = shirt_detail.substr(0, shirt_detail.indexOf(' '));
const ImageURL = $('.shirt-picture').find('img').attr('src');
const URL = link;
return cb(null, new Shirt(Title, Price, ImageURL, URL));
}
return cb();
});
}
Then use async to map them across the async function that fetches the data:
async.map(shirtLinks, linkToJSON, (err, results) => {
console.dir(results);
});
This is how I would do it. I find it easier to debug this way.
let getShirtDetailsBody = (link) => {
return new Promise((resolve, reject) => {
request(link, (err, res, body) => {
if (err) {
reject(err)
} else {
resolve(body)
}
})
})
}
let getShirt = (body) => {
const $ = cheerio.load(body);
const shirt_detail = $('.shirt-details').find('h1').text();
const Title = shirt_detail.substr(shirt_detail.indexOf(' ') + 1)
const Price = shirt_detail.substr(0, shirt_detail.indexOf(' '))
const ImageURL = $('.shirt-picture').find('img').attr('src')
const URL = link
return new Shirt(Title, Price, ImageURL, URL)
}
let getAllProductsShirtsBody = (body) => {
const $ = cheerio.load(body)
return Promise.all($('.products').find('a').map((index, val) => {
return getShirtDetailsBody(`rootURL${$(val).attr('href')}`)
}))
}
getAllProductsShirtsBody(yourbody).then(allShirtsBody => {
const allShirts = allShirtsBody.map(shirtBody => { return getShirt(shirtBody) })
console.log(allShirts)
}).catch(err => { console.log(err) })

Categories