I've been trying to diagnose this bug for some time now but can't figure out why my completed() function executes before all my asynch functions are done. I'm using the async library:
async.forEach(data.DBInstances, function (dbInstance, fcallback) {
let dbtype = dbInstance.Engine;
let logFilename = log[dbtype].log();
let instanceId = dbInstance.DBInstanceIdentifier;
if (tagFilter) {
let arn = dbInstance.DBInstanceArn;
checkRDSTag(arn, tagFilter, function (err, found) {
if (!err) {
//tag was found, continue processing and check other filters...
if (found) {
if (noFilter || (instanceTypes && instanceTypes.indexOf(dbtype))) {
//console.log('db type is: ' + dbtype);
processOrCreateLog(instanceId, dbType, function (err, data) {
if (!err) {
console.log("Data: " + JSON.stringify(data));
completed.push(data);
fcallback(null);
} else {
cb(err, null);
}
});
}
} else {
//tag wasn't found but was specified, don't process anything...
console.log("tag specified was not found on instance: " + instanceId);
}
} else {
console.log("Error checking RDS Tag");
cb(err, null);
}
});
}
//only process filtered types...
else if (noFilter || (instanceTypes && instanceTypes.indexOf(dbtype))) {
console.log('db type is: ' + dbtype);
processOrCreateLog(instanceId, dbtype, fcallback, function (err, data, fcallback) {
if (!err) {
console.log("Data: " + JSON.stringify(data));
completed.push(data);
fcallback(null);
} else {
cb(err, null);
}
});
}
}, testme(completed));
My async functions are running correctly and each completing correctly but my testme(completed) runs immediately before any of my asynch functions ever finish. Not sure why..
my testme(completed) is simply:
function testme(completed) {
console.log("Completed: " + JSON.stringify(completed));
}
One note, my function to execution on each element itself has asynch functions inside of it (checkRDSTag(), processOrCreateLog(), etc). I'm guessing its something to do with the callback() that async is expecting / tracking executing out of place or something? Not really sure..
Retrun callback only when last item iterating
var index=0;
async.forEach(data.DBInstances, function (dbInstance, fcallback) {
let dbtype = dbInstance.Engine;
let logFilename = log[dbtype].log();
let instanceId = dbInstance.DBInstanceIdentifier;
if (tagFilter) {
let arn = dbInstance.DBInstanceArn;
checkRDSTag(arn, tagFilter, function (err, found) {
if (!err) {
//increament index here
index++;
//tag was found, continue processing and check other filters...
if (found) {
if (noFilter || (instanceTypes && instanceTypes.indexOf(dbtype))) {
//console.log('db type is: ' + dbtype);
processOrCreateLog(instanceId, dbType, function (err, data) {
if (!err) {
console.log("Data: " + JSON.stringify(data));
completed.push(data);
//check if last item running
if(index===data.DBInstances.length){
return fcallback(null);
}else{
fcallback()
}
} else {
cb(err, null);
}
});
}
} else {
//tag wasn't found but was specified, don't process anything...
console.log("tag specified was not found on instance: " + instanceId);
}
} else {
console.log("Error checking RDS Tag");
cb(err, null);
}
});
}
//only process filtered types...
else if (noFilter || (instanceTypes && instanceTypes.indexOf(dbtype))) {
console.log('db type is: ' + dbtype);
processOrCreateLog(instanceId, dbtype, fcallback, function (err, data, fcallback) {
if (!err) {
console.log("Data: " + JSON.stringify(data));
completed.push(data);
//check if last item running
if(index===data.DBInstances.length){
return fcallback(null);
}else{
fcallback()
}
} else {
cb(err, null);
}
});
}
}, testme(completed));
My problem ended up being in my other asynchronous call (processOrCreateLog()) within my iteratee. There was flow control logic in my asynchronous calls that didn't callback so fcallback() never ran.
Also to clarify, async is the async node.js library: https://caolan.github.io/async/docs.html#each
As long as you execute the callback on the iteratee for each element with either an error or null it can track all executions and will then run your final callback properly.
Related
app.get('/',function (req,res) {
client.getAllOffers(null,function(err, resp) {
if (!err) {
offer = JSON.parse(resp);
test1 = offer.allOffersList.length;
res.send(offer);
for(var i=0;i<test1;i++) {
var stmt = "INSERT INTO offers(description,start_time,end_time) VALUES (?, ?, ?)";
connection.query(stmt, [offer.allOffersList[i].description, offer.allOffersList[i].startTime, offer.allOffersList[i].endTime], function (err, result) {
if (err) throw err.message;
console.log("Number of records inserted: " + result.affectedRows);
});
}
}
else {
console.log(err);
}
});
});
In this case i'm having function in getAllOffers in the second line of the code.
If I encounter an error how can I recall the function untill I get the response.
How I will be done?
You can do this recursively by wrapping the getAllOffers call in a function. I would recommend putting a limit to the number of retries to prevent an infinite loop.
const getAllOffers = function(callCount) {
if(callCount < 5) {
client.getAllOffers(err) => {
if(!err) {
console.log('yay!');
else {
getAllOffers(++callCount);
}
}
}
}
getAllOffers(0);
I am stuck in nodejs during calling of zendesk api.
As i called zendesk.tickets.incremental Api, it provides me ticketId and that used in another function for getting any change from previous by calling zendesk.tickets.exportAudit.
I also get response too but during fetching the data another ticketId called so previously flag an error in response "error: item not found " and than fetch the data for new ticketId and so on.
What I need, I need it block the process until data of first Id completely .
This is my code.
//Calling ticketIncremental Details ticketId (likes 1, 2 etc)
app.get('/', function (req, res) {
zendesk.tickets.incremental(0, function(err, statusList, body, responseList, resultList) {
if (err) {
console.log(err);
return;
}
var ticketIncreDetails = (JSON.stringify(body));
res.end(ticketIncreDetails);
for (var i=0; i< body.length; i++ ) {
ticketValues(body[i].id) //within this function another API of zendek calling for exportAudit
}
});
//This is for exportAudit
function ticketValues(ticketId) {
zendesk.tickets.exportAudit(ticketId, function(err,statusList, body, responseList, resultList) {
if(err) {
console.log(err);
return;
}
console.log("ticketExportAudit: " + JSON.stringify(body)) });
As #qxz say, it's better to check out if there is sync package or not, or you need to handle this focusing on callback because zendesk.tickets.exportAudit need time to complete its work, but for loop wouldn't act like that, the code below handle this problem with callback, you could have a look.
//Calling ticketIncremental Details ticketId (likes 1, 2 etc)
app.get('/', function (req, res) {
zendesk.tickets.incremental(0, function(err, statusList, body, responseList, resultList) {
if (err) {
console.log(err);
return;
}
var ticketIncreDetails = (JSON.stringify(body));
res.end(ticketIncreDetails);
ticketValues(body,body.length,0);
//ticketValues(body,body.length,0,function(){..if you wanna do something after..});
});
});
//This is for exportAudit
function ticketValues(ticket,length,index,callback) {
zendesk.tickets.exportAudit(ticke[index].id, function(err,statusList, body, responseList, resultList) {
if(index<length){
if(err) {
console.log(err);
return;
}else{
console.log("ticketExportAudit: " + JSON.stringify(body));
index++;
ticketValues(ticket,length,index,callback);
}
}else{
if(callback)
callback();
}
});
}
Assume I have an array with paths to multiple files. I would like to delete these files asynchronously.
var files = ['file1.txt', 'file2.txt'];
fs.unlink(..., callback())
I came across with this solution Delete several files in node.js but I think it violates node.js practices (asynchronous function inside a for loop). Is there any other, better solution for this?
If you want to run a an arbitrary list of tasks (unlinking files) asynchronously, but know when they are all done, you can use the async.js module. It can run tasks in series and parallel.
So push all your unlink function calls into an array, then call async.parallel() and let them fly. Then when they are all done, you land in a single manageble callback.
var files = ['file1.txt', 'file2.txt'];
var myParallelTasks = [];
files.forEach( function( fileName )
{
myParallelTasks.push( fs.unlink( fileName, function(callback)
{
callback();
})
);
}
async.parallel( myParallelTasks, function()
{
// all done
console.log( "all done" );
});
Try the option of recursion (code from your link in the question):
function deleteFiles(files, callback){
var i = files.length;
var file = files.pop();
if ( file == undefined ) {
callback();
} else {
// do the unlinking ...
deleteFiles(files, callback);
}
}
async deleteAll(filePathsList) {
try {
await this.deleteFiles(filePathsList);
logger.log('info', "Following files deleted successfully from EFS --> " + filePathsList.toString());
return true;
} catch (error) {
logger.log('error', error.stack || error);
logger.log('error', "Error occured while deleting files from EFS");
return false;
}
}
async deleteFiles(files) {
return new Promise((resolve, reject) => {
let i = files.length;
files.forEach(function(filepath) {
fs.unlink(filepath, function(err) {
i--;
if (err && err.code == 'ENOENT') {
// file doens't exist
logger.log('info', "Following file doesn't exist, So won't be deleted-->" + (filepath || ''));
} else if (err) {
// other errors, e.g. maybe we don't have enough permission
logger.log('error', "Error occured while deleting the file " + (filepath || '') + " due to error" + err);
reject(err);
return;
} else if (i <= 0) {
resolve();
}
});
});
})
}
I am facing following issue:
I am calling in foreach cycle following browse function. When the rb.wsc.browse(symbol) is called the program do some WebSocket request and when the message is returned the event is emmited. The problem is that I always get the same browseData even when I know that the event is emited with different one. I think that this is closure issue, but I don't know how to solve it.
function browse(rb, symbol, callback) {
var result = function(wsc, browseData) {
wsc.off('browse', result);
wsc.off('failed', result);
var err = null;
if (wsc.errno < 0) {
err = new Error("Browsing symbol " + symbol + " failed!");
err.status = wsc.errno;
} else {
saveBrowseData(rb, browseData);
}
callback(err, symbol);
};
// Register temporary listeners
rb.wsc.on('browse', result);
rb.wsc.on('failed', result);
// Browse symbol
rb.wsc.browse(symbol);
}
RexBrowser.prototype.refresh = function() {
var that = this;
var browseRequestNumber = 1;
var browseResult = function(err, symbol) {
browseRequestNumber--;
var item = that.getSymbol(symbol);
_.each(item.children, function(child) {
if (child.browse) {
browseRequestNumber++;
debug("Browsing: " + child.cstring);
browse(that,child.cstring, browseResult);
}
});
if (browseRequestNumber === 0) {
that.emit('refresh', that);
}
};
// Start recursive browsing
browse(this,'$', browseResult);
};-
You could try using a IIFE:
} else {
function(rbInner, browseDataInner){
saveBrowseData(rbInner, browseDataInner);
}(rb, browseData);
}
This makes sure the variables used by / in saveBrowseData have the values they have when the function is called.
I'm trying to scrape data from a word document with node.js.
My current problem is that the below console log will return the value inside the juice block as the appropriate varaible. If I move that to outside the juice block it is completely lost. I tried putting return
function getMargin(id, content){
var newMargin = content.css("margin-left");
if(newMargin === undefined){
var htmlOfTarget = content.toString(),
whereToCut = theRaw.indexOf("<div class=WordSection1>");
fs.writeFile("bin/temp/temp_" + id + ".htm", theRaw.slice(0, whereToCut) + htmlOfTarget + "</body> </html>", function (err){
if (err) {
throw err;
}
});
juice("bin/temp/temp_" + id + ".htm", function (err, html) {
if (err) {
throw err;
}
var innerLoad = cheerio.load(html);
newMargin = innerLoad("p").css("margin-left");
console.log(newMargin); // THIS newMargin AS VALUE
});
}
console.log(newMargin);//THIS RETURNS newMargin UNDEFINED
return newMargin;
}
I think the problem lies with fs.write and juice being Asyc functions. I just have no idea how to get around it. I have to be able to call getMargin at certain points, in a sequential order.
As mentioned in comment, change your program flow to run in callbacks, after async code has completed...
// accept callback as parameter, and run it after async methods complete...
function getMargin(id, content, callback){
var newMargin = content.css("margin-left");
if(newMargin === undefined){
var htmlOfTarget = content.toString(),
whereToCut = theRaw.indexOf("<div class=WordSection1>");
fs.writeFile("bin/temp/temp_" + id + ".htm", theRaw.slice(0, whereToCut) + htmlOfTarget + "</body> </html>", function (err){
if (err) {
throw err;
}
// move the juice call inside the callback of the file write operation
juice("bin/temp/temp_" + id + ".htm", function (err, html) {
if (err) {
throw err;
}
var innerLoad = cheerio.load(html);
newMargin = innerLoad("p").css("margin-left");
console.log(newMargin); // THIS newMargin AS VALUE
// now run the callback passed in the beginning...
callback();
});
});
}
}
// call getMargin with callback to run once complete...
getMargin("myId", "myContent", function(){
// continue program execution in here....
});