What is the right way (nested functions or...) - javascript

Are both my examples the same in terms of functionality considering the fact that in error handeling I'm terminating by res.json(400, err)? Also I would like to know that in my second example the second async.each always run after the first async.each, so using results1 in the second async.each is safe? Sorry I'm new to Node and async!
Example1: where I'm using the results of each async.each in the last block as an input of the other async.each:
var results1 = {};
var results2 = {};
async.each(inputs, function (input, callback) {
//Do something here and add some data to results1
callback();
}, function (err) {
if (err) {
//Handeling error
} else {
async.each(results1, function (item, callback) {
//Do something here and add some data to results2
}, function (err) {
if (err) {
//Handeling error
} else {
console.log("Final result", results2);
}
});
}
});
or Example2: where I have separate async.each blocks
var results1 = {};
async.each(inputs, function (input, callback) {
//Do something here and add some data to results1
callback();
}, function (err) {
if (err) {
//Handeling error
}
});
var results2 = {};
async.each(results1, function (item, callback) {
//Do something here and add some data to results2
callback();
}, function (err) {
if (err) {
//Handeling error
} else {
console.log("Final result", results2);
}
});
UPDATED:
Since the second approach is not right way and it is not guaranteed that the second async.each runs after the first one the problem is: Does it mean I cannot have a simple for loop like the following example either? If yes, it is easy to change this one to async.each, but the problem is this one is recursive and that's make it complicated! If this needs to be async as well and not a for loop, do you know how I can have this recursive functionality here?
var results1 = {};
var results2 = [];
var results3 = {};
async.each(inputs, function (input, callback) {
//Do something here and add some data to results1
callback();
}, function (err) {
if (err) {
//Handeling error
} else {
// So in this case that I need to have nested function, does it mean I cannot have a simple for loop like this as well?
// If yes, it is easy to change this one to async.each, but the problem is this one is recursive and that's make it complicated! If this needs to be async as well, do you know how I can have this recursive functionality here?
for (var input in inputs) {
inferFromUnion(inputs[input], results1);
results2.push(inputs[input]);
}
async.each(results2, function (item, callback) {
//Do something here and add some data to results2
}, function (err) {
if (err) {
//Handeling error
} else {
console.log("Final result", results3);
}
});
}
});
// Here just checking each object schema and if they are missing any fields from results1 we add that field with a value of null
function inferFromUnion(obj, allFields) {
Object.keys(allFields).forEach(function (key) {
if (lodash.isUndefined(obj[key])) {
if (lodash.isPlainObject(allFields[key])) {
obj[key] = {};
inferFromUnion(obj[key], allFields[key]);
} else {
obj[key] = null;
}
}
});
}

The first example is the way to go, if you want to use results of the first bunch of calls in the second bunch. The second example won't work, because the second async.each() is guaranteed to run before the callbacks bound to your asynchronous operations.
Asynchronous recursion with loops is very much possible:
(function doSomeAsyncRecursion (results) {
async.each(someItems, function (item, callback) {
// ...
}, function () {
if (results /* ... (are incomplete) */) {
doSomeAsyncRecursion(results);
} else {
// ... (results are complete now, do something with them)
}
});
})(/* initial value of results */);

These two examples are different in desing. First example will run second async after first async is successful. But second example will run second async everytime, if theres an error or not.

Related

How to use async for callbacks using nodejs?

I have search function once i have search String from clinet i want to loop through files and match the string from files in fs, I have problem in loop i want to get all match result and send result to client. Below trying to achieve but getting an error pasted in question. New to async library any help will be appreciated.
app.js
app.get('/serverSearch', function (req, res) {
var searchTxt = req.query.searchTxt;
dirDirectory.readDirectory(function(logFiles){
// res.json(logFiles);
if(logFiles){
searchFileService.readFile(searchTxt,logFiles,function(lines,err){
console.log('Logs',lines);
if (err)
return res.send();
res.json(lines);
})
}
});
console.log('Search text', searchTxt);
});
service.js
var fs = require('fs');
var path = require('path');
var async = require('async');
var searchStr;
var result = [];
//Async Method
function readFile(str, logFiles, callback) {
async.series([
//Load user to get `userId` first
function(callback) {
searchStr = str;
for (var i = 0; i < logFiles.length; i++) {
if (logFiles[i].filename !== '.gitignore') {
fs.readFile('logs/dit/' + logFiles[i].filename, 'utf8', function(err, data) {
if (err) {
return console.log(err);
}
inspectFile(data);
});
}
callback(result);
}
},
//Load posts (won't be called before task 1's "task callback" has been called)
function() {
function inspectFile(data, callback) {
var lines = data.split('\n'); // get the lines
lines.forEach(function(line) { // for each line in lines
if (line.indexOf(searchStr) != -1) { // if the line contain the searchSt
result.push(line);
// then log it
return line;
}
});
}
}
], function(err) { //This function gets called after the two tasks have called their "task callbacks"
if (err) return err;
});
};
Error
if (fn === null) throw new Error("Callback was already called.");
You should be using async.map instead of series. You are miss understanding what series does, series process request top down. You are attempting to break this chain by accessing a function within the series itself. Which is a no, no.
for example:
async.series([
function() {
let i = 0;
do {
console.log("I'm first in the series: ", i);
i++;
} while (i < 3);
callback(); // This tells us this function has finished.
},
function() {
let i = 0;
do {
console.log("I'm next in the series: ", i);
i++;
} while (i < 3);
callback(); // This tells us this function has finished.
}
]);
The output of this would be:
I'm next in the series: 0
I'm next in the series: 1
I'm next in the series: 2
until the callback is fired, which then tells async to move to the next function in the series array.
The output then would be:
I'm last in the series: 0
I'm last in the series: 1
I'm last in the series: 2
At no point in this series should you be accessing the function within the series after the current. So you should never be trying to cross access that.
With async.map you can actually perform on operation on each entity within your array, which is essentially what you are trying to do.
var results = [];
async.map(logFiles, function(logfile, callback) {
if (logfile.filename !== '.gitignore') {
fs.readFile('logs/dit/' + logfile.filename, 'utf8', function(err, data) {
if (err) {
callback(err, null);
}
var lines = data.split('\n'); // get the lines
lines.forEach(function(line) { // for each line in lines
if (line.indexOf(searchStr) != -1) { // if the line contain the searchSt
results.push(line);
callback(null, results);
}
});
}
}), function(error, result) {
results.map(result => {
console.log(result);
});
});
Also you should use util.inspect instead of console.log, it's much cleaner and has more options.
The documentation on this is a bit rough, but here it is. https://caolan.github.io/async/docs.html#map hope this helps!
You should use async.eachSeries method:
function readFile(str, logFiles, callback) {
async.eachSeries(array, function(item, cb){
//Process item
cb(error,item);
}, function(err){
if (err) {
console.log("Some error in one of the items");
callback(err);
} else {
console.log("All arrays items have been treated successfully");
callback(null);
}
});
}
And I would recommend to load the user and posts before using the async.eachSeries function.

Callback problems

I am new into javascript, and currently I'm trying to learning callback to my script. This script should return reduced words in array of objects
var fs = require('fs')
var dict = ['corpus.txt','corpus1.txt','corpus2.txt'];
mapping(dict, function(error,data){
if(error) throw error
console.log(data)
})
function mapping(list, callback){
var txtObj = []
list.forEach(function (val) {
readFile(val, function(error,data){
txtObj.push(data)
})
})
function readFile(src, cb){
fs.readFile(src,'utf8', function (error,data) {
if (error) return callback(error,null)
return mapred(data)
})
}
return callback(null,txtObj)
}
But it returns empty array. Any help would be appreciated.
Thanks!
`fs.readFile`
is an asynchronous function, before it's done and result callback is invoked, you are returning the empty txtObj array.
how to fix it ?
call return callback(null,txtObj) after fs.readFile is finished running.
and also, as you are running asynchronous function on an array of items one-by-one, it might not still work the way you want. might want to use modudles like async in nodejs
Here comes an asynchronous version using module async. synchronous file operation is strongly objected :)
var fs = require('fs')
var dict = ['corpus.txt','corpus1.txt','corpus2.txt'];
mapping(dict, function(error,data){
if(error) throw error
console.log(data)
})
function mapping(list, callback){
var txtObj = [],
async = require('async');
async.each(list, readFile, function(err) {
callback(err,txtObj)
});
function readFile(src, cb) {
fs.readFile(src,'utf8', function (error,data) {
if (error) {
cb(error);
}
else {
txtObj.push(mapred(data));
cb(null);
}
})
}
}
EDIT : You can do this without async, but it is little bit dirty isn't it ? also its OK if you remove the self invoking function inside the forEach, i included so that you can access the val, even after the callback is done
var fs = require('fs')
var dict = ['corpus.txt','corpus1.txt','corpus2.txt'];
mapping(dict, function(error,data){
if(error) throw error
console.log(data)
})
function mapping(list, callback){
var txtObj = [],
counter = list.length,
start = 0;
list.forEach(function (val) {
(function(val)
readFile(val, function(error,data) {
txtObj.push(data);
start++;
if(error || (start === counter)) {
callback(error,txtObj);
}
}))(val);
})
function readFile(src, cb) {
fs.readFile(src,'utf8', function (error,data) {
if (error) {
cb(error);
}
else {
txtObj.push(mapred(data));
cb(null);
}
})
}
}
The reason you are getting an empty array result is that you are performing the callback before the readFile function has a chance to populate the array. You are performing multiple asynchronous actions but not letting them to complete before continuing.
If there was only one async action, you would call callback() in the callback function of readFile, but as you need to perform multiple async actions before calling callback(), you should consider using fs.readFileSync().
Sometimes sync cannot be avoided.
function mapping(list, callback)
{
var txtObj = []
list.forEach(function(val)
{
try { txtObj.push(mapred(fs.readFileSync(val, 'utf8'))) }
catch(err) { callback(err) }
})
callback(null, txtObj)
}

sync independet callbacks result

I am searching for an elegant way to sync indepentent callbacks result invoked in unknown order.
function callback1() {
var result;
};
function callback2() {
var result;
};
//When done then call
function success(res1, res2) {
// do whatever
}
I know I can do something like:
var res = {};
var dfd = $.Deferred();
function callback1() {
var result;
res.res1 = result;
(res.res1 && res.res2) && (dfd.resolve(res));
};
function callback1() {
var result;
res.res2 = result;
(res.res1 && res.res2) && (dfd.resolve(res));
};
dfd.done(function(result){
// do whatever
});
but I would appreciate if somebody comes up with more elegant solution
If you return promises (builtin promises, not jQuery deferreds) and you don't care about order, then you can use Promise.all:
function callback1() {
return Promise.resolve(1)
}
function callback2() {
return Promise.resolve(2)
}
var ps = [callback1(), callback2()]
function add(x, y) {
return x + y
}
Promise.all(ps).then(function(result) {
return result.reduce(add)
}).then(console.log) // => 3
If you want to sequence them you can do it in such a way that you can apply a curried function that expects as many arguments as resolved promises there are by lifting it into the promise world. In other words:
function apply(pa, pf) {
return pf.then(function(f) {
return pa.then(f)
})
}
function lift(f, ps) {
return ps.reduce(function(pa, pb) {
return apply(pb, pa)
}, Promise.resolve(f))
}
function add(x) {
return function(y) {
return x + y
}
}
lift(add, ps).then(console.log) //=> 3
You can also sequence them in such a way that you don't need a curried function, by collecting the results in an array first then reducing it:
function sequence(ps) {
return ps.reduceRight(function(pa, pb) {
return pa.then(function(a) {
return pb.then(function(b) {
return [b].concat(a)
})
})
}, Promise.resolve([]))
}
function add(x, y) {
return x + y
}
// This looks similar to the Promise.all approach
// but they run in order
sequence(ps).then(function(result) {
return result.reduce(add)
}).then(console.log) // => 3
There are libraries that do this, such as the async library, but here's a "from scratch" solution. I'm also avoiding promises to avoid overwhelming you, but you should really read about them as they are the most elegant solution, albeit complicated for first timers.
function runInParallel(jobs, done) {
// Store all our results in an array.
var results = [];
// If one job fails, set this to true and use it to
// ignore all job results that follow.
var failureOccurred = false;
// Iterate over each of our registered jobs.
jobs.forEach(function (runJob, index) {
// Create a jobDone callback to pass to the job.
var jobDone = function (err, result) {
// If another job failed previously, abort processing
// this job's result. We no longer care.
if (failureOccurred) return;
// If this job passed in an error, set failure to true
// and pass the error to the final done callback.
if (err) {
failureOccurred = true;
done(err);
return;
}
// If we made it this far then push the job result into
// the results array at the same position as the job in
// the jobs array.
results[index] = result;
// If the results array contains as many results as the
// jobs array had jobs then we have finished processing
// them all. Invoke our done callback with an array of
// all results.
if (results.length === jobs.length) {
done(null, results);
}
};
// Begin the job and pass in our jobDone callback.
runJob(jobDone);
});
}
This will call all of your job functions in the array, passing in a jobDone callback the job should call when finished. If any job passes an error in then the function will immediately invoke the result callback with the error and ignore everything else. If the jobs succeed then you'll end up with an array of job results in the same positions as the jobs were in the jobs array. Simply modify your job functions to accept the jobDone callback.
var jobs = [
function job1(done) {
try {
var result;
done(null, result);
} catch (err) {
done(err);
}
},
function job2(done) {
try {
var result;
done(null, result);
} catch (err) {
done(err);
}
}
];
runInParallel(jobs, function (err, results) {
if (err) {
console.error(err);
return;
}
// results[0] = jobs[0] result
// results[1] = jobs[1] result
// etc...
});
Instead of an array of jobs you could modify this code to accept an object with property names. Then instead of assigning the results to the same position as the jobs in the jobs array you could assign the results to an object using the same property names.
Example (without comments this time):
function runInParallel(jobs, done) {
var results = {};
var failureOccurred = false;
Object.keys(jobs).forEach(function (jobName) {
var jobDone = function (err, result) {
if (failureOccurred) return;
if (err) {
failureOccurred = true;
done(err);
return;
}
results[jobName] = result;
if (results.length === jobs.length) {
done(null, results);
}
};
jobs[jobName](jobDone);
});
}
Then you can consume it like this:
var jobs = {
job1: function (done) {
try {
var result;
done(null, result);
} catch (err) {
done(err);
}
},
job2: function (done) {
try {
var result;
done(null, result);
} catch (err) {
done(err);
}
}
};
runInParallel(jobs, function (err, results) {
if (err) {
console.error(err);
return;
}
// results.job1 = job1 result
// results.job2 = job2 result
// etc...
});
The parallel function in the async library does almost exactly what we've done above. It even accepts an array of jobs or an object of named jobs like we did :)
Assuming your tasks (callback1() and callback2()) are synchronous, you might choose to write a reusable generalisation of the code in the question, in the form of a function that returns a function, trapping a couple of private vars in a closure :
function resultAggregator(n, fn) {
var results = {},
count = 0;
return function(id, res) {
count++;
results[id] = res;
if (count == n) {
fn(results);
}
}
}
So after calling resultAccumulator(), you have a function that can kept in-scope of other functions or passed to other parts of your code base. It makes no assumptions about ids or the nature of the results, except that they are synchronously derived. It will fire its callback when n results have been delivered.
var myResults = resultAggregator(2, function(results) {
// do whatever;
});
//The following commands may be in different parts of your code base
myResults('id1', synchTask1());
...
myResults('id2', synchTask2());
...
myResults('id3', synchTask3());
//The second tasks to deliver its data (ostensibly `synchTask1()` and `synchTask2()`, but not necessarily) will trigger the callback.
Demo
This is just one way to perform result aggregation. You might do something different depending on the exact scenario. Here's a slightly different formulation which records the order in which the results arrived :
Demo
Whatever you write, Deferreds/Promises are not necessary for the aggregation of synchronously derived data.
However, if any one task is, or may be, asynchronous then you may need a promise aggregator, eg jQuery.when() or Promise.all(), somewhere in the pattern.

saving multiple data with async nature of node.js with mongodb

I have a array of ids:
var ids = ['53asd3','53asd2','53asd5'];
Each id has a corresponding document in the mongodb.
I want to generate a object by populating data from each of them and save in some other document. Like this:
{
person: { /* data from some collection populated with first id */},
company : { /* data from some collection populated with first id */},
employee : {/* data from some collection populated with first id */}
}
WHAT I DID
var document = {}
models.persons.find({_id:'53asd3'},function(err,data){
if(!err) {
document['persons']=data;
models.company.find({_id:'53asd2'},function(err,data){
if(!err) {
document['company']=data;
models.employee.find({_id:'53asd2'},function(err,data){
if(!err) {
document['employee']=data;
document.save(function(err){ });
}
});
}
});
}
});
So I just use nested calls using callbacks and somewhat make it synchronous. Is there a chance where I can execute all these three find queries in parallel and then execute the save command? I actually want to leverage the async nature of node.js. Any suggestions?
You could build something like async.parallel yourself if you don't want to include an external library. Here's what a simple parallel function might look like. It could be a nice exercise to implement the other functions in the async library.
var parallel = function () {
var functs = arguments[0];
var callback = arguments[1];
// Since we're dealing with a sparse array when we insert the results,
// we cannot trust the `length` property of the results.
// Instead we count the results separately
var numResults = 0;
var results = [];
var getCallback = function (i) {
return function (err, res) {
if (err) {
callback(err)
}
else {
results[i] = res;
numResults += 1;
if (numResults === functs.length) {
callback(null, results);
}
}
}
}
functs.forEach(function (fn, i) {
fn(getCallback(i));
});
};
var getTest = function (timeout) {
return function (callback) {
setTimeout(function () {
callback(null, timeout);
}, timeout);
}
};
parallel([getTest(99), getTest(1000), getTest(199)], console.log.bind(console));
>> null [99, 1000, 199]
Then in your case you can do something like
var findItem = function (collection, id) {
return function (callback) {
collection.find({
_id: id
}, callback);
};
};
parallel([
findItem(models.persons, '53asd3'),
findItem(models.company, '53asd2'),
findItem(models.employee, '53dsa2')
], function (err, results) {
document.persons = results[0];
document.company = results[1];
document.employee = results[2];
document.save(function (err) {
// and so on
});
});

Recursively traverse tree in Javascript

This is super simple task to do in Java but the asynchronous nature of javascript makes this task(for me) almost impossible, at least with my knowledge now.(I'm not trying to bash javascript. Love the language!).
It's very basic. A top level tree has a parent of null in my mysql database. It's easy finding children. The children have lines available to them. The depth of the tree is variable.
private static Set<Tree> getBranches( Tree trunk ) {
Set<Tree> treeSet = new HashSet<Tree>();
if ( trunk != null ) {
if ( trunk.hasLines() ) { //queries if tree has lines. returns true or false
treeSet.add( trunk );
}
for ( Tree tree : trunk.treeList ) {
treeSet.addAll( getBranches( tree ) );
}
}
return treeSet;
}
Basically the method tests if the tree has lines available. If it does it adds all of those to a set. If not it continues until it finds lines.
The asynchronous nature of the mysql node library turns this task into hell.
Here is what I have now
function hasLines(tree_id, callback) {
var ret;
pool.query('SELECT * from pkg_line_tree where tree_id = ?', [tree_id], function (err, rows) {
if (rows.length > 0) {
ret = true;
} else {
ret = false;
}
callback(ret);
});
}
function dig(tree_id, treeArray, callback) {
pool.query('SELECT * from tree where parent_id = ?', [tree_id], function (err, rows) {
if (rows) {
for (var i in rows) {
hasLines(rows[i].tree_id, function (t) {
if (t) {
treeArray.push(rows[i].tree_id);
} else {
treeArray.concat(dig(rows[i].tree_id, treeArray));
}
});
}
if (callback) {
callback(treeArray);
}
}
});
return treeArray;
}
var treeArray = [];
dig(52, treeArray, function (t) {
res.json(t);
});
I really just need to output all the children available in this root tree.
Please let me know if this doesn't make sense. I'll try to refactor. I'm hoping I got some kind of point across. I'd hate to use something like Fibers to get this done but I'm out of options. Thanks.
Your use of dig() isn't currently consistent:
// asynchronous with callback
dig(52, treeArray, function (t) {
res.json(t);
});
// then synchronous with `return`?
treeArray.concat(dig(rows[i].tree_id, treeArray));
Also, the concat in the last line isn't actually doing much, since it doesn't alter the array it's called on. You probably wouldn't actually want it to as dig passes around the treeArray rather than defining a new treeSet like in getBranches. So, if it did, it would append treeArray onto the end of itself each time.
You could still use concat with multiple treeSets, but you'd have to store its return value:
treeSet = treeSet.concat(subSet);
And, you'll have to replace the for loop this with an asynchronous iterator as the loop won't wait for asynchronous operations before continuing. The async library has a few options for this, if you're up for trying it.
So, with multiple treeSets, concat, and async.forEachSeries, you could try:
function dig(tree_id, callback) {
var treeSet = [];
hasLines(tree_id, function (yep) {
if (yep) {
treeSet.push(tree_id);
}
pool.query('SELECT * from tree where parent_id = ?', [tree_id], function (err, rows) {
function each(row, next) {
dig(row.tree_id, function (subSet) {
treeSet = treeSet.concat(subSet);
next(null);
});
}
function done() {
callback(treeSet);
}
async.forEachSeries(rows, each, done);
});
});
}
dig(52, function (treeSet) {
res.json(treeSet);
});
you have to use async https://github.com/caolan/async
I have modified your dig function to use async's forEach method
function dig(tree_id, treeArray, AllDone) {
pool.query('SELECT * from tree where parent_id = ?', [tree_id], function (err, rows) {
if (rows) {
async.forEach(
rows,
function(row, callback) {
hasLine(row.tree_id, function(t){
if (t) {
treeArray.push(row.tree_id);
callback();
}
else {
dig(row.tree_id, treeArray, callback);
}
});
},
function(err) {
if (err) AllDone(err, treeArray);
else AllDone(null, treeArray);
});
}
else
AllDone(null, treeArray)
});
}
treeArray = [];
dig(52, treeArray, function(err, t) {
res.json(t);
});
assuming rows is an array.. forEach go through each row and perform hasLine, each iteration will call the callback function when it finish, and AllDone will be called when all callback functions are called. the tricky part here is the recursion, each recursive call will have a forEach loop, and it will call the AllDone method only when all callbacks are finish.
however forEach execute in parallel, so order is not perserved
I think this should work, if you don't care about order.
Edit : you can use forEachSeries to solve the order problem.

Categories