I'm attempting to use the async module with NodeJS for the first time, and I'm running into a problem when I try and dynamically construct an array of functions for the async.parallel function to run:
methods = [];
for (key in entries) {
methods.push(function(callback) {
return callback(null, key);
});
}
return async.parallel(methods, function(err, results) {
console.log(results);
return render_views(req, res, 'view_blog_all', {
entries: entries
});
});
The output I keep seeing is:
[ 'powerful_sms_communication_for_teams',
'powerful_sms_communication_for_teams',
'powerful_sms_communication_for_teams' ]
And my 'entries' object I'm looping over definitely has three different keys. Something I'm missing?
ASYNC MODULE:
https://github.com/caolan/async
This is a common problem people run into with asynchronous logic. The key is to remember that your callback will not run until you call async.parallel, and at that point, key will the last key value in the loop.
One way to solve this is to capture the key value within a new scope using an IIFE.
methods = [];
for (key in entries) {
(function(key){
methods.push(function(callback) {
return callback(null, key);
});
})(key);
}
You can also use async.map with a normal array and single iterator instead.
return async.map(
entries,
function(key, callback){
callback(null, key);
},
function(err, results) {
console.log(results);
return render_views(req, res, 'view_blog_all', {
entries: entries
});
}
);
Here's a rewrite using https://github.com/caolan/async#map
async.map(entries, function(entry, callback) {
//do your stuff, fully parallel and async ;)
callback(null, entry);
}, function(err, results) {
console.log(results);
return render_views(req, res, 'view_blog_all', {
entries: entries
});
});
Related
High level
I'm new to JS and Node. I'm working on an API endpoint that should return a status value. To compute the status value I need to make two sequential mongo queries where the second set of queries depend on the first query. The second set of queries will give me a status for each value found in the first query, of which I will pick one based on some logic. What is the best way to do it in NodeJS?
Specifics
Here are parts of my first attempt.
function getItemStatus(key, value, callback) {
MongoClient.connect(mongo_url, function(err, db) {
if (err) { return console.dir(err); }
db.collection('status', function(err, coll) {
if (err) { return console.dir(err); }
coll.distinct("_id.metric", function(err, metrics) {
if (err) { return console.dir(err); }
console.log('metrics : ', metrics);
_.foreach(metrics, function(metric) {
var query = {"_id": {
"$gte" : {key: key, value: value, created: new Date("1800-01-01T00:00:00"), metric : metric},
"$lte" : {key: key, value: value, created: new Date("2100-01-01T00:00:00"), metric : metric}}};
coll.find(query, {sort: {"_id.created": -1}, limit: 1})
I make a connection, query for a set of metric values using a distinct query. For each metric I then want to ask for the latest status. Ideally I'd like to have the entire set of statuses so that I could write a function taking this set and deciding on which status will be returned. My problem is passing the statuses back "up the chain" so that I can process the set of statuses.
In a synchronous situation I would simply write something like this
val metrics = getDistinctMetrics(key, value)
val statuses = metrics.map(getStatusForMetric)
val status = filterStatuses(statuses)
How can I accomplish this in JavaScript/NodeJS?
UPDATED to highlight the fact that the first queries will trigger several queries in the second step, i.e. one for each result found by the first query.
As I understand your question you want to execute queries parallel or in a waterfall mode and do some logic on the final result. You should look into a library allowing parallel/waterfall execution. Like this
Waterfall: Waterfall
async.waterfall([
function(callback) {
callback(null, 'one', 'two');
},
function(arg1, arg2, callback) {
// arg1 now equals 'one' and arg2 now equals 'two'
callback(null, 'three');
},
function(arg1, callback) {
// arg1 now equals 'three'
callback(null, 'done');
}
], function (err, result) {
// result now equals 'done'
});
Parallel: Parallel
async.parallel({
collectionOne: function (callback) {
collectionOne.find(query, function (err, result) {
if (err) {
return handleError(res, err);
}
callback(null, result);
})
},
collectionTwo: function (callback) {
collectionTwo.find(query, function (err, result) {
if (err) {
return handleError(res, err);
}
callback(null, result);
})
},
collectionThree: function (callback) {
collectionThree.find(query, function (err, result) {
if (err) {
return handleError(res, err);
}
callback(null, result);
})
},
collectionFour: function (callback) {
collectionFour.find(query, function (err, result) {
if (err) {
return handleError(res, err);
}
callback(null, result);
})
},
}, function (err, results) {
return res.status(200).json(results);
});
And in the final callback you can doo some logic or return response.
In your sample code, you are making network calls inside another network calls, which can lead to callback hell which can lead to misbehave of queries, in order to overcome that, you can use promises.
This will help you in avoiding callback hell as well as your query will also be resolved.
Sample code:-
new Promise (function(resolve, reject){
return db.collection.find(function).exec()
.then(function(result_of_first_query){
return db.collection.findOne(function).exec() //in this yopu can user the result of first query
}).then(function(result_of_second_query){
resolve(result_of_second_query);
})
})
You can add more queries with each .then
I'm using both _.map and async.map in the Node portion of an application I'm working on. I'm running into some confusion while using these libraries together.
I have an array of arrays called results which looks something like this:
[[1, 2, 3], [2, 4, 6], [1, 3, 5]]
I would like to use _.map to access each inner array, and then async.map to make an API call for each value within each of these inner arrays. I would then like to use the results of this API call to replace each integer within my inner arrays with an object.
So at the end my array of arrays of integers will instead be an array of arrays of objects based on API call results.
[[{id: 1, email: 'test#example.com', state: 'active'}], ...]
This is the current code I have, and I believe I'm on the right path. My first console.log gives me the object I'm aiming for, but the second simply returns the integer:
_.map(results, function(result) {
async.map(result, function(user, callback) {
db.users.getById(user, function(err, userDetails) {
if (err) {
callback(null, null);
} else {
user = _.pick(userDetails, 'id', 'email', 'state');
console.log(user);
}
});
console.log(user);
})
});
From docs:
_.map(results, function(result) {
async.map(result, function(user, callback) {
db.users.getById(user, function(err, userDetails) {
callback(err, _.pick(userDetails, 'id', 'email', 'state'));
});
}, function(err, users){
// here you will have your populated array of (arrays of) users
})
});
But soon you'll find Promise.all more expressive in this regard.
This is asynchronous, so, you will get the final result in a callback only.
You should pass 3 parameters to async.map(array, iterator, callback)
async.map(results, userArrayHandler, function(err, yourFinalArray) {
console.log(yourFinalArray);
});
function userArrayHandler(userArray, callback) {
async.map(userArray, getUser, function(err, results) {
callback(null, results);
});
}
function getUser(userId, callback) {
db.users.getById(userId, function(err, userDetails) {
if (err) {
callback(err);
} else {
var user = _.pick(userDetails, 'id', 'email', 'state');
callback(null, user);
}
});
}
using async.map for both inner and outer array instead of _.map will be convenient.
I'm working with NodeJS and the Async api to create an api function that returns a list of stories. The get can be shallow (only contains Object ID's referencing other objects) or deep (all ID references are dereferenced by replacing the ID with the object referenced by it). The shallow get works fine, however when I run the deep copy, it hangs. You can see in my callbacks I placed console.log(#) to log which callback is fired, but none are fired.
I feel like the issue lies within if I'm mistaking how async handles the callback function parameter for the .each, .serial and .parallel functions. I need a function that will be fired once async completes all of its tasks, but the callback function is instead called after every operation each, serial or parallel completed.
router.get('/stories', function(req, res, next) {
var db = req.db,
options = {
deep : req.query.deep != null ? parseInt(req.query.deep) : false,
offset : req.query.offset || 0,
limit : req.query.limit || 0
};
Story.listStories(db, options, function(err, stories){
if (!options.deep){
res.json(new Response(err, stories));
res.end();
}else{
if (err || stories == null){
res.json(new Response(err, null));
res.end();
return;
}
async.each(stories,
function(story, cb1){
var articles = [],
galleries = [];
async.series([
function(cb2){
async.parallel([
//Extract the story's articles and their outlets
function(cb3){
async.each(story.articles,
function(article_id, cb4){
Article.getArticle(db, article_id, function(err, article){
if (err){
cb4(err);
return;
}
Outlet.getOutlet(db, article.outlet, function(err, outlet){
if (err){
cb4(err);
return;
}
article.outlet = outlet;
articles.push(article);
});
});
},
function(err){console.log(4);
if (err)
cb3(err);
});
}
],
function(err){console.log(3); //Parallel callback
if (err)
cb1(err);
});
},
function(cb2){
story.articles = articles;
}
],
function(err){console.log(2);
if (err)
cb1(err);
});
},
function(err){console.log(1);
res.json(new Response(err, stories));
res.end();
}
);
}
});
});
You're calling those async callbacks (cb1, cb2, cb3, cb4, and etc) only for error cases. you need to call for non-error cases also. Example:
if (err) {
return cb1(err);
}
cb1(null); // or cb1()
I am kind of confused with the logic of results which go from one task to the other task in async.auto. For example in the following code logic I added some data to models in task1, which is initially an output from initialtask and in finalTask added data to models from task1 is reflected in results.initialTask1 as well. Similarly added data in task2 is reflected in results.initialTask1 in finalTask.
To sum up all of results.initialTask1, results.task1[0], results.task2[0], results.task3[0] are identical in finalTask. Is this the logic of async.auto? Or is it something like reference by pointer in C++ which causes whatever changes for models in task1, it reflects in models in initialTask as well?
async.auto({
initialTask: function(callback) {
//Do some operations
callback(null, name, initialModels);
},
task1: ['initialTask', function(callback, results) {
var models = results.initialTask[1];
//Add some more data to models
callback(null, models);
}],
task2: ['initialTask', function(callback, results) {
var models = results.initialTask[1];
//Add some more data to models
callback(null, models);
}],
task3: ['initialTask', function(callback, results) {
var models = results.initialTask[1];
//Add some more data to models
callback(null, models);
}],
finalTask: ['task1', 'task2', 'task3', function(callback, results) {
//Here the followings are the same: results.initialTask[1], results.task1[0], results.task2[0], results.task3[0]
}]
});
I'm looking for any answer which helps me make sure that is the logic or not? I'm not necessarily looking for any official documents or ...
This is expected behavior. Basically async.auto will execute all the functions in the order it deems necessary. So in your case initialTask will be called first. Then task1, task2, and task3 will be called in parallel. Finally finalTask will be called with the results. The reason all the values are the same is related to JavaScript's call-by-sharing, meaning if you change a function parameter itself, then it won't affect the item that was fed into the parameter. If you change the internals of the parameter, it will carry up to the item.
More info here.
Example:
async.auto({
// this function will just be passed a callback
readData: async.apply(fs.readFile, 'data.txt', 'utf-8'),
showData: ['readData', function(results, cb) {
// results.readData is the file's contents
// ...
}]
}, callback);
async.auto({
get_data: function(callback) {
console.log('in get_data');
// async code to get some data
callback(null, 'data', 'converted to array');
},
make_folder: function(callback) {
console.log('in make_folder');
// async code to create a directory to store a file in
// this is run at the same time as getting the data
callback(null, 'folder');
},
write_file: ['get_data', 'make_folder', function(results, callback) {
console.log('in write_file', JSON.stringify(results));
// once there is some data and the directory exists,
// write the data to a file in the directory
callback(null, 'filename');
}],
email_link: ['write_file', function(results, callback) {
console.log('in email_link', JSON.stringify(results));
// once the file is written let's email a link to it...
// results.write_file contains the filename returned by write_file.
callback(null, {'file':results.write_file,
'email':'user#example.com'});
}]
}, function(err, results) {
console.log('err = ', err);
console.log('results = ', results);
});
async.auto is very useful and powerful function which is provided by Async Lib .it have 3 fields
1-task
2- concurrency
3-callback
In Async.auto, Each function depends on its parent function except the first function, if any function will get any error during execution .then their child function or say .. their below-defined function will not get executed further, an error will occur with callback and the main callback will immediately return with an error
1- Task :- an Object
2- concurrency :- An optional integer for determining the maximum number of tasks that can be run in parallel. By default, as many as possible.
3- callback:- return the response
exapmle-
AnyService.prototype.forgetPassword = function (res, email, isMobile, callback) {
Logger.info("In AnyService service forgetPassword email...", email);
db.User.findOne({
email: email.toLowerCase(),
deleted: false
}, function (err, user) {
if (!user) {
configurationHolder.responseHandler(res, null, configurationHolder.LoginMessage.registerFirst, true, 403)
} else {
async.auto({
token: function (next, results) {
return gereratePasswordToken(next, email, user, isMobile);
},
sendMail: ['token', function (next, result) {
return SendMailService.prototype.forgetPasswordMail(next, result.token, email, user.fullName);
}]
}, function (err, result) {
if (err == null && result != null) {
configurationHolder.ResponseUtil.responseHandler(res, null, configurationHolder.LoginMessage.forgotPassword, false, 200)
} else {
callback(new Error(configurationHolder.errorMessage.oops))
}
})
}
});
}
The mobile app is sending the server an array of records to be saved to the database. The server is to iterate through the array of records, save each one, build an arrary of newly created ids and then return the array of ids back to the app.
The following code is saving the records correctly, but due to the asynchronous nature of javascript, when the outer function returns the inner function has not completed yet and the array of ids is still empty.
How do I get this function to return the returnIDs array AFTER it has been filled?
create: function(req, res) {
var returnIDs = [];
for(var i in req.body){
Test_session.create(req.body[i], function test_SessionCreated(err, test_Session) {
if (err) {
console.log(err);
return res.json(err);
}
returnIDs.push(test_Session.id);
});
}
return res.json({ "ids": returnIDs.toJSON}, 200);
}
I think use async.js is easier than promise. If you use async.js then do this.
create: function(req, res) {
var returnIDs = [];
async.each(req.body, function(item, callback) {
Test_session.create(item, function(err, test_Session) {
if (err) {
console.log(err);
callback(err);
}
else
callback();
});
}, function(err){
res.json(200, { "ids": returnIDs});
});
}