GET request inside of a loop in JavaScript - javascript

So, my code looks something like
for(int n = 0; n < object.length; n++){
/*Other code */
$.get(...,function(data){
//do stuff
});}
Now, the other code executes multiple times like it should. However, when the get command is ran, it is only ran once and that is when n reaches the object.length. This causes all sorts of errors. n is only being incremented in the for loop.
Can you not loop get/post commands? Or if you can, what am doing wrong? Thanks.

The for-loop won't wait for the $.get call to finish so you need to add some async flow control around this.
Check out async.eachSeries. The done callback below is the key to controlling the loop. After the success/fail of each $.get request you call done(); (or if there's an error, you call done(someErr);). This will advance the array iterator and move to the next item in the loop.
var async = require("async");
var list = ["foo", "bar", "qux"];
async.eachSeries(list, function(item, done) {
// perform a GET request for each item in the list
// GET url?somevar=foo
// GET url?somevar=bar
// GET url?somevar=qux
$.get(url, {somevar: item}, function(data) {
// do stuff, then call done
done();
});
}, function(err) {
if (err) {
throw err;
}
console.log("All requests are done");
});

Do you want all the get requests to be done at the same time or one after the other?
You can do all at once with a forEach loop. forEach works better than a normal for loop because it gives you a new instance of elem and n in each iteration.
object.forEach(function(elem, n) {
/*Other code */
$.get(...,function(data){
//do stuff
});
});
But if you want to do the requests one after the other you could do it this way.
(function loop(n) {
if(n >= object.length) return;
/*Other code */
$.get(...,function(data){
//do stuff
loop(n + 1);
});
})(0);

Related

Node asynchronous calls in sequence

I have a couple of asynchronous requests that fetch some data from a url. The problem I am having is that I actually want to delay sending the json response until all requests have come back. The code looks something like this:
getFirstStuff(callback) //imagine this is async
{
console.log('gettingFirstStuff');
callback(stuff);
}
function getFurtherStuff(callback) //imagine this is async
{
console.log('gettingFurtherStuff');
callBack(thing);
}
function getStuff(callBack)
{
getFirstStuff(function(stuff) // async
{
// stuff is an array of 3 items
stuff = stuff.map(function(item) // map is synchronous
{
// For each item in stuff make another async request
getFurtherStuff( function(thing) { // this is also async
stuff.thing = thing;
});
return item;
});
callback(stuff);
});
}
router.get('/getstuff', function(req, res, next) {
getStuff(function(stuff)
{
console.log('finished stuff');
// RETURN RESPONSE AS JSON
res.json(stuff);
});
});
The output will be:
gettingFirstStuff
finished stuff
gettingFurtherStuff
gettingFurtherStuff
gettingFurtherStuff
but it should be:
gettingFirstStuff
gettingFurtherStuff
gettingFurtherStuff
gettingFurtherStuff
finished stuff
I understand that the reason is that getFurtherStuff is async and item will be returned from map before the getFurtherStuff async calls are back with a result. My question is, what is the standard way to wait for these calls to finish before calling the final callback 'callback(stuff)'
There are a bunch of ways to solve this problem. Libraries like async and queue would probably be the best choice, if you have no problem adding dependencies.
The easiest option without external libs is just to count the async jobs and finish when they're all done:
// assuming stuff is an array
var counter = 0;
var jobCount = stuff.length;
// wrap callback in one that checks the counter
var doneCallback = function() {
if (counter >= jobCount) {
// we're ready to go
callback(stuff);
}
};
// run jobs
stuff.map(function(item) {
getFurtherStuff(item, function(itemThing) {
// process async response
stuff.thing = itemThing;
// increment counter;
counter++;
// call the wrapped callback, which won't fire
// until all jobs are complete
doneCallback();
});
});
npm install async
You would then simply throw your functions in to an async.parallel()
More info at https://www.npmjs.com/package/async

JS wait for callback to complete execution inside a for loop

I am having an array which consist of 3 data which I need to insert in pouchdb, so I'm using a for loop to insert the data, but the problem is it is not inserting complete data because the loop finishes before callback.
for(var i=0;i<data.length;i++){
var jsondoc=doc1;//document
console.log(i) //getting console for all data. eg:1,2,3
console.log(data[i])//getting console for all data. eg:hi, hello
jsondoc.messages.push({msg: data[i]}); //updating message, here need to be done somthing
db.put(jsondoc, function(err2, response2){
if (err2) { console.log(JSON.stringify(err2)); }
console.log("this is not repeating") ;
});
}
Since the db insertion runs async, you cannot put the loop on hold until the operation completes. One thing that you can do is to serialise the db inserts with a helper function like this:
function insertItem(data, i, completeCallback) {
// check if we still have items to send
if(i < data.length) {
var jsondoc=doc1;//document
//updating message, here need to be done somthing
jsondoc.messages.push({msg: data[i]});
db.put(jsondoc, function(err2, response2){
if (err2) {
console.log(JSON.stringify(err2));
}
// recursively call to push the next message
insertItem(data, i+1, completeCallback);
});
} else {
// no more items to send, execute the callback
if(typeof completeCallback === "function") {
completeCallback();
}
}
}
You'll have to update your code so that instead of continuing the execution after the call of the function, to pass that code into the callback of the pushMessage function, so if your original code looks like this:
// ... code before the loop
for(var i=0;i<data.length;i++){
// ... original body of the loop
}
// ... code to execute after the loop and that currently causes problems
you'll need to change it like this:
// ... original code that was before the loop
insertItem(data, 0, function() {
// ... original code hat was executed after the loop and that caused problems
// but now it gets executed after all items were inserted in db
}
Another alternative would be to send all inserts in parallel and perform a join() on those operations; you'll still need the callback workaround though. Something along the lines:
function insertItems(data, callback) {
var remainingItems = data.length;
if(remainingItems === 0 && typeof callback === "function") {
callback();
}
for(var i=0;i<data.length;i++){
var jsondoc=doc1;//document
console.log(i) //getting console for all data. eg:1,2,3
console.log(data[i])//getting console for all data. eg:hi, hello
jsondoc.messages.push({msg: data[i]}); //updating message, here need to be done somthing
db.put(jsondoc, function(err2, response2){
if (err2) { console.log(JSON.stringify(err2)); }
remainingItems--;
if(remainingItems === 0 && typeof callback === "function") {
// I know, code redundancy :P
callback();
}
});
}
}
The usage of this second function is the same as for insertItem.
If I understand you correctly, your problem is a scoping issue. jsondoc or data[i], or whichever variable is causing the problem, is changed before your callback can complete.
Take a look at this jsFiddle, which shows how to solve such a scoping problem.
for(var i = 0; i < 3; i++){
(function(){
var j = i;
setTimeout(function(){
callback(j)
}, 500);
})();
}
If you look at your js console when the jsFiddle runs you'll see that the first loop prints 3 times 3, which is the finishing value for i. While the second, where we store the value to a new variable inside a new scope, outputs 1, 2, 3 as expected.

Populating async array with a function called right before.

var request = require('request'),
requests = [],
values = [],
request("url1", function());
function() {
.....
for (x in list){
requests.push(requestFunction(x));
}
}
requestFunction(x){
request("url2", function (e,r,b) {
....
return function(callback) {
values[i] = b
}
});
}
async.parallel(requests, function (allResults) {
// values array is ready at this point
// the data should also be available in the allResults array
console.log(values);
});
I new to node. Issue is that the request needs to be called to populate the requests callback array. But the issue is the async.parallel will run before the requests array is full and need run all the callbacks. Where do I move this async so it runs after the requests array is full?
Asynchronous programming is all about chaining blocks. This allows node to efficiently run its event queue, while ensuring that your steps are done in order. For example, here's a query from a web app I wrote:
app.get("/admin", isLoggedIn, isVerified, isAdmin, function (req, res) {
User.count({}, function (err, users) {
if (err) throw err;
User.count({"verified.isVerified" : true}, function (err2, verifiedUsers) {
if (err2) throw err2;
Course.count({}, function (err3, courses) {
// and this continues on and on — the admin page
// has a lot of information on all the documents in the database
})
})
})
})
Notice how I chained function calls inside of one another. Course.count({}, ...) could only be called once User.count({"verified.isVerified" : true}, ...) was called. This means the i/o is never blocked and the /admin page is never rendered without the required information.
You didn't really give enough information regarding your problem (so there might be a better way to fix it), but I think you could, for now, do this:
var request = require('request'),
requests = [],
values = [],
length; // a counter to store the number of times to run the loop
request("url1", function() {
length = Object.keys(list).length;
// referring to the list below;
// make sure list is available at this level of scope
for (var x in list){
requests.push(requestFunction(x));
length--;
if (length == 0) {
async.parallel(requests, function (allResults) {
console.log(values); // prints the values array
});
}
}
}
function requestFunction(x) {
request("url2", function (e,r,b) {
values[i] = b;
return b;
}
}
I am assuming that requestFunction() takes a while to load, which is why async.parallel is running before the for (var x in list) loop finishes. To force async.parallel to run after the loop finishes, you'll need a counter.
var length = Object.keys(list).length;
This returns the number of keys in the list associative array (aka object). Now, every time you run through the for loop, you decrement length. When length == 0, you then run your async.parallel process.
edit: You could also write the requests.push() part as:
requests.push(
(function () {
request("url2", function (e,r,b) {
values[i] = b;
return b;
}
})()
);
I think it's redundant to store b in both values and requests, but I have kept it as you had it.

How to write an asynchronous for-each loop in Express.js and mongoose?

I have a function that returns an array of items from MongoDB:
var getBooks = function(callback){
Comment.distinct("doc", function(err, docs){
callback(docs);
}
});
};
Now, for each of the items returned in docs, I'd like to execute another mongoose query, gather the count for specific fields, gather them all in a counts object, and finally pass that on to res.render:
getBooks(function(docs){
var counts = {};
docs.forEach(function(entry){
getAllCount(entry, ...){};
});
});
If I put res.render after the forEach loop, it will execute before the count queries have finished. However, if I include it in the loop, it will execute for each entry. What is the proper way of doing this?
I'd recommend using the popular NodeJS package, async. It's far easier than doing the work/counting, and eventual error handling would be needed by another answer.
In particular, I'd suggest considering each (reference):
getBooks(function(docs){
var counts = {};
async.each(docs, function(doc, callback){
getAllCount(entry, ...);
// call the `callback` with a error if one occured, or
// empty params if everything was OK.
// store the value for each doc in counts
}, function(err) {
// all are complete (or an error occurred)
// you can access counts here
res.render(...);
});
});
or you could use map (reference):
getBooks(function(docs){
async.map(docs, function(doc, transformed){
getAllCount(entry, ...);
// call transformed(null, theCount);
// for each document (or transformed(err); if there was an error);
}, function(err, results) {
// all are complete (or an error occurred)
// you can access results here, which contains the count value
// returned by calling: transformed(null, ###) in the map function
res.render(...);
});
});
If there are too many simultaneous requests, you could use the mapLimit or eachLimit function to limit the amount of simultaneous asynchronous mongoose requests.
forEach probably isn't your best bet here, unless you want all of your calls to getAllCount happening in parallel (maybe you do, I don't know — or for that matter, Node is still single-threaded by default, isn't it?). Instead, just keeping an index and repeating the call for each entry in docs until you're done seems better. E.g.:
getBooks(function(docs){
var counts = {},
index = 0,
entry;
loop();
function loop() {
if (index < docs.length) {
entry = docs[index++];
getAllCount(entry, gotCount);
}
else {
// Done, call `res.render` with the result
}
}
function gotCount(count) {
// ...store the count, it relates to `entry`...
// And loop
loop();
}
});
If you want the calls to happen in parallel (or if you can rely on this working in the single thread), just remember how many are outstanding so you know when you're done:
// Assumes `docs` is not sparse
getBooks(function(docs){
var counts = {},
received = 0,
outstanding;
outstanding = docs.length;
docs.forEach(function(entry){
getAllCount(entry, function(count) {
// ...store the count, note that it *doesn't* relate to `entry` as we
// have overlapping calls...
// Done?
--outstanding;
if (outstanding === 0) {
// Yup, call `res.render` with the result
}
});
});
});
In fact, getAllCount on first item must callback getAllCount on second item, ...
Two way: you can use a framework, like async : https://github.com/caolan/async
Or create yourself the callback chain. It's fun to write the first time.
edit
The goal is to have a mechanism that proceed like we write.
getAllCountFor(1, function(err1, result1) {
getAllCountFor(2, function(err2, result2) {
...
getAllCountFor(N, function(errN, resultN) {
res.sender tout ca tout ca
});
});
});
And that's what you will construct with async, using the sequence format.

MongoDB and Node js Asynchronous programming

I am trying to solve an exam problem, so I cannot post my exam code as it is. So I have simplified such that it addresses the core concept that I do not understand. Basically, I do not know how to slow down node's asynchronous execution so that my mongo code can catch up with it. Here is the code:
MongoClient.connect('mongodb://localhost:27017/somedb', function(err, db) {
if (err) throw err;
var orphans = [];
for (var i; i < 100000; i++) {
var query = { 'images' : i };
db.collection('albums').findOne(query, function(err, doc_album) {
if(err) throw err;
if (doc_album === null) {
orphans.push(i);
}
});
}
console.dir(orphans.length);
return db.close();
});
So I am trying to create an array of those images who do not match my query criteria. I end up with a orphans.length value of 0 since Node does not wait for the callbacks to finish. How can I modify the code such that the callbacks finish executing before I count the number of images in the array that did not meet my query criteria?
Thanks in advance for your time.
Bharat
I assume you want to do 100000 parallel DB calls. To "wait" 10000 calls completion in each call callback we increase finished calls counter and invoke main callback when last one finished. Note that very common mistake here is to use for loop variable as a closure inside callback. This does not work as expected as all 10000 handlers scheduled first and by the time first is executed loop variable is of the same, maximum value.
function getOrphans(cb) {
MongoClient.connect('mongodb://localhost:27017/somedb', function(err, db) {
if (err) cb(err);
var orphans = [];
var numResponses = 0;
var maxIndex = 100000
for (var i = 0; i < maxIndex; i++) {
// problem: by the time you get reply "i" would be 100000.
// closure variable changed to function argument:
(function(index) {
var query = { 'images' : index };
db.collection('albums').findOne(query, function(err, doc_album) {
numResponses++;
if(err) cb(err);
if (doc_album === null) {
orphans.push(index);
}
if (numResponses == maxIndex) {
db.close();
cb(null, orphans);
}
});
})(i); // this is "immediately executed function
}
});
}
getOrphans(function(err, o) {
if (err)
return console.log('error:', err);
console.log(o.length);
});
Im not suggesting this is the best way to handle this specific problem in Mongo, but if you need to wait to the DB to reply before continuing then just use the callback to start next request.
This is not obvious at first, but you can refer to the result processing function inside the function itself:
var i = 0;
var mycback = function(err, doc_album) {
// ... process i-th result ...
if (++i < 100000) {
db.collections("album").findOne({'images': i}, mycback);
} else {
// request is complete, "return" result
result_cback(null, res);
}
};
db.collections('album').findOne({'images': 0}, mycback);
This also means that your function itself will be async (i.e. will want a result_cback parameter to call with the result instead of using return).
Writing a sync function that calls an async one is just not possible.
You cannot "wait" for an event in Javascript... you must set up an handler for the result and then terminate.
Waiting for an event is done in event-based processing by writing a "nested event loop" and this is for example how message boxes are handled in most GUI frameworks. This is a capability that Javascript designers didn't want to give to programmers (not really sure why, though).
Since you know it does not wait for the call to come back. You can do the console.dir inside your callback function, this should work (although I haven't tested it)
db.collection('albums').findOne(query, function(err, doc_album) {
if(err) throw err;
if (doc_album === null) {
orphans.push(i);
}
console.dir(orphans.length);
});
You don't need to slow anything down. If you are simply trying to load 100,000 images from the albums collection, you could consider using the async framework. This will let you assign tasks until the job is complete.
Also, you probably don't want request 100,000 records one-by-one. Instead, you probably want to page them.

Categories