Ajax asynchronicity calls end function before inner loop function - javascript

This is the code i am struggling with:
$(document).ready(function () {
var dataSets = [];
getSensorIDs().done(function (result) {
var sensorIDs = jQuery.parseJSON(result);
$.each(sensorIDs, function (index, value) { //loop through each id
getDataBySensorID(value.id).done(function (res) {
var temperatureData = jQuery.parseJSON(res);
var dataPoints = [];
$.each(temperatureData, function (index, value) {
dataPoints.push({
x: index
, y: value.value
});
})
dataSets.push(buildDataset(dataPoints, 1));
alert("Pushed something.");
});
});
alert("Drawed something.");
drawChart(dataSets);
})
});
The problem is: Drawed something happens ahead of Pushed something even though the program flow doesn't like look it would. I thought i fixed the problem of ajax (it's asynchron) with .done() but it doesn't seem like that. What would be a proper fix for this problem?

It is working exactly as it should because the "pushed something" code is in a NESTED AJAX request's done callback. That will be sometime in the future. And so, the JavaScript engine continues on processing the rest of the code, which includes "Drawed something" in the top-level AJAX request next, so that happens.
At some point in the future, the AJAX call will complete and the done callback will fire and then you will get "pushed something".
// 1. When the document is ready...
$(document).ready(function () {
var dataSets = [];
// 2. Make the AJAX call, BUT DONE RUN .done YET!
getSensorIDs().done(function (result) {
var sensorIDs = jQuery.parseJSON(result);
// loop through each id
$.each(sensorIDs, function (index, value) {
// 3. As part of the first AJAX call, make a SECOND one
getDataBySensorID(value.id).done(function (res) {
var temperatureData = jQuery.parseJSON(res);
var dataPoints = [];
$.each(temperatureData, function (index, value) {
dataPoints.push({
x: index
, y: value.value
});
});
// 5. The nested call finishes last
dataSets.push(buildDataset(dataPoints, 1));
alert("Pushed something.");
});
});
// 4. This runs first because the first AJAX call is finishing
// before the nested one (above)
alert("Drawed something.");
drawChart(dataSets);
});
});

Related

Executing ajax queries in a loop followed by a function that uses their combined output

I have an ajax query followed by some functions, and I use the .then() promise callback to execute them in order:
var pictures = [];
var venues = **an array of venues**
$.get(url).then(functionA, functionFail).then(function B);
But functionA, the first success callback, includes a loop that fires off 'n' ajax requests:
for(var i=0; i<n; i++) {
var venue = venues[i];
var new_url = **some_url**
$.ajax({url: new_url, async: false}).done(function(data) {
var pics = data.response.photos.items;
pictures.push(pics[0]);
pictures.push(pics[1]);
}).fail(function() {
console.log('Failed!');
});
}
These looped ajax requests fill up the global pictures array. The pictures array is then used by functionB, but because of the async nature, the array doesn't get filled up fully and executes right away.
I tried to make the requests synchronous with async: false but it's not completely effective (it leaves out the very last request of the loop).
How can I make sure that functionB is only executed after all the ajax requests have finished? I don't want to use timeouts but if nothing else I'll fall back on that.
As you're using jQuery, it looks like jQuery.when() can take multiple results and then call done once they're all resolved.
Not sure if this is the best answer, but one of them! Just count the number of times request is completed, and when all of them are, execute your function.
var completed_requests = 0,
returned_data = [];
for (var i=0; i<n; i++) {
var venue = venues[i];
var new_url = **some_url**
var done = function(data_array) {
var pics = [];
data_array.forEach(function(data) {
pics = pics.concat(data.response.photos.items);
});
};
$.ajax({url: new_url}).done(function(data) {
completed_requests++;
returned_data.push(data);
if (completed_requests == n) {
done(returned_data);
}
}).fail(function() {
console.log('Failed!');
});
}
My example also saves data from all requests until you need it.
you can use Promise.all, code of function A:
var requests = []
for(var i=0; i<n; i++) {
var venue = venues[i];
var new_url = **some_url**
request.push(new Promise((resolve, reject) => {
$.ajax({url: new_url}).done(function(data) {
var pics = data.response.photos.items;
resolve(pics)
}).fail(function(err) {
reject(err)
console.log('Failed!');
});
}))
}
return Promise.all(requests)
when all request are run successful, the whole return values of requests will be pushed to array and passed to function B.
You can handle the looping yourself, handling only one item in venues at a time. Then upon the the completion of one, call a function to handle the next one, and when venues is empty then call your functionB
ar pictures = [];
var venues = **an array of venues**
$.get(url).then(functionA, functionFail);
function functionA() {
var venue = venues.shift();
var new_url = **some_url**;
$.ajax({
url: new_url,
async: true
}).done(function(data) {
var pics = data.response.photos.items;
pictures.push(pics[0]);
pictures.push(pics[1]);
if(venues.length !== 0) {
functionA();
}
else {
functionB();
}
}).fail(function() {
console.log('Failed!');
});
}

Callback runs first? - learnyounode juggling

Can someone help me understand why my solution does not work? It seems like the callback function is running before the juggle function is finished.
My code works fine if I remove the comments. It's just that I don't understand why the log function does not get called after the juggle function is finished. That is how callbacks are supposed to work right?
Thanks in advance :)
var http = require('http')
links = process.argv.slice(2)
var contents = []
//var cbacks = 0
function juggle(callback) {
links.forEach(function(link, i, links) {
http.get(link, function(response) {
response.setEncoding("utf8")
var str = ""
response.on("data", function(data) {
str = str.concat(data)
})
response.on("end", function(){
contents[i] = str
//cbacks++
//if(cbacks === 3) {
// callback()
//}
})
})
})
callback()
}
function log() {
contents.forEach(function(content, i, contents) {
console.log(contents[i])
})
}
juggle(log)
http.get is asynchronous. forEach is executed against your links which calls http.get, which registers a connection to be processed. It doesn't actually complete the connection/request.
if you need to execute the callback when all the forEach functions complete you could use a library like async to accomplish it.
async supports a forEach method. Using async, the first param to forEach would take an additional callback function which should be called to denote item processing has been finished. You could place that callback in the response.on('end') callback. When all of those callbacks have been called, or when an error has occurred async.forEach will execute the onComplete callback you provide to it as the 3rd parameter, accomplishing your goal.
So you have global scope which is what I would use to actually juggle the requests.
When each link gets registered to an EventEmitter, you can store it inside of a map.
var link_map = {};
var request_counter = 0;
links.forEach( function (link, index) {
link_map[link] = '';
...
then in your requests you can append data from a specific request
response.on('data', function (chunk) {
link_map[link] += chunk.toString();
...
and finally at each end, check if all requests have finished
response.on('end', function () {
request_counter += 1;
if ( links.length === request_counter ) {
// do your logging stuff here and you have all
// the data that you need inside link_map
...
the link variable inside of the anonymous function declared in the forEach is stored for that closure. So each time that the 'data' event is emitted, the link variable is going to refer to the request for a specific link that was registered to the callback. That is why I chose to use a map data structure and map specific data to each link which we are using as a key.
EventEmitters and callbacks can get kind of harry if you are unfamiliar with them. Keep practicing though and it will eventually become easier.
And using an array as you did is not incorrect or anything, I just prefer to use Objects with key => value pairs when I can.
RUN THIS CODE on your machine to see it in action.
const http = require('http');
var links = [
'http://www.google.com',
'http://www.example.com',
'http://www.yahoo.com'
];
var link_map = {};
var request_counter = 0;
links.forEach( function (link, index) {
link_map[link] = '';
http.get(link, function(response) {
response.on('data', function (chunk) {
link_map[link] += chunk.toString();
});
response.on('end', function () {
request_counter += 1;
if ( links.length === request_counter ) {
links.forEach( function(link) {
require('fs').writeFileSync(link.split('//')[1],link_map[link]);
});
}
});
});
});
You can see an output of files from links in the parent directory.
You didn't wait for http response and call callback function immediately. At this point of code contents array is empty.

jQuery Deferred/Promises dynamic array not executing callbacks in correct order

Grateful for any insight into what I'm misunderstanding here. My requirement is as follows:
I have an array of URLs. I want to fire off an AJAX request for each URL simultaneously, and as soon as the first request completes, call the first callback. Then, if and when the second request completes, call that callback, and so on.
Option 1:
for (var i = 0; i < myUrlArray.length; i++) {
$.ajax({
url: myUrlArray[i]
}).done(function(response) {
// Do something with response
});
}
Obviously this doesn't work, as there is no guarantee the responses will complete in the correct order.
Option 2:
var promises = [];
for (var i = 0; i < myUrlArray.length; i++) {
promises.push($.ajax({
url: myUrlArray[i]
}));
}
$.when.apply($, promises).then(function() {
// Do something with each response
});
This should work, but the downside is that it waits until all AJAX requests have completed, before firing any of the callbacks.
Ideally, I should be able to call the first callback as soon as it's complete, then chain the second callback to execute whenever that response is received (or immediately if it's already resolved), then the third, and so on.
The array length is completely variable and could contain any number of requests at any given time, so just hard coding the callback chain isn't an option.
My attempt:
var promises = [];
for (var i = 0; i < myUrlArray.length; i++) {
promises.push($.ajax({
url: myUrlArray[i] // Add each AJAX Deferred to the promises array
}));
}
(function handleAJAX() {
var promise;
if (promises.length) {
promise = promises.shift(); // Grab the first one in the stack
promise.then(function(response) { // Set up 'done' callback
// Do something with response
if (promises.length) {
handleAJAX(); // Move onto the next one
}
});
}
}());
The problem is that the callbacks execute in a completely random order! For example, if I add 'home.html', 'page2.html', 'page3.html' to the array, the order of responses won't necessarily be 'home.html', 'page2.html', 'page3.html'.
I'm obviously fundamentally misunderstanding something about the way promises work. Any help gratefully appreciated!
Cheers
EDIT
OK, now I'm even more confused. I made this JSFiddle with one array using Alnitak's answer and another using JoeFletch's answer and neither of them work as I would expect! Can anyone see what is going on here?
EDIT 2
Got it working! Based on JoeFletch's answer below, I adapted the solution as follows:
var i, responseArr = [];
for (i = 0; i < myUrlArray.length; i++) {
responseArr.push('0'); // <-- Add 'unprocessed' flag for each pending request
(function(ii) {
$.ajax({
url: myUrlArray[ii]
}).done(function(response) {
responseArr[ii] = response; // <-- Store response in array
}).fail(function(xhr, status, error) {
responseArr[ii] = 'ERROR';
}).always(function(response) {
for (var iii = 0; iii < responseArr.length; iii++) { // <-- Loop through entire response array from the beginning
if (responseArr[iii] === '0') {
return; // As soon as we hit an 'unprocessed' request, exit loop
}
else if (responseArr[iii] !== 'done') {
$('#target').append(responseArr[iii]); // <-- Do actual callback DOM append stuff
responseArr[iii] = 'done'; // <-- Set 'complete' flag for this request
}
}
});
}(i)); // <-- pass current value of i into closure to encapsulate
}
TL;DR: I don't understand jQuery promises, got it working without them. :)
Don't forget that you don't need to register the callbacks straight away.
I think this would work, the main difference with your code being that I've used .done rather than .then and refactored a few lines.
var promises = myUrlArray.map(function(url) {
return $.ajax({url: url});
});
(function serialize() {
var def = promises.shift();
if (def) {
def.done(function() {
callback.apply(null, arguments);
serialize();
});
}
})();
Here's my attempt at solving this. I updated my answer to include error handling for a failed .ajax call. I also moved some code to the complete method of the .ajax call.
var urlArr = ["url1", "url2"];
var responseArr = [];
for(var i = 0; i < length; i++) {
responseArr.push("0");//0 meaning unprocessed to the DOM
}
$.each(urlArr, function(i, url){
$.ajax({
url: url,
success: function(data){
responseArr[i] = data;
},
error: function (xhr, status, error) {
responseArr[i] = "Failed Response";//enter whatever you want to place here to notify the end user
},
complete: function() {
$.each(responseArr, function(i, element){
if (responseArr[i] == "0") {
return;
}
else if (responseArr[i] != "done")
{
//do something with the response
responseArr[i] = "done";
}
});
}
});
})
Asynchronous requests aren't guaranteed to finish in the same order that they are sent. some may take longer than others depending on server load and the amount of data being transferred.
The only options are either to wait until they are all done, only send one at a time, or just deal with them being called possibly out of order.

Any better way to combine multiple callbacks?

I need to call an async function (with loop) for multiple values and wait for those results. Right now I'm using the following code:
(function(){
var when_done = function(r){ alert("Completed. Sum of lengths is: [" + r + "]"); }; // call when ready
var datain = ['google','facebook','youtube','twitter']; // the data to be parsed
var response = {pending:0, fordone:false, data:0}; // control object, "data" holds summed response lengths
response.cb = function(){
// if there are pending requests, or the loop isn't ready yet do nothing
if(response.pending||!response.fordone) return;
// otherwise alert.
return when_done.call(null,response.data);
}
for(var i=0; i<datain; i++)(function(i){
response.pending++; // increment pending requests count
$.ajax({url:'http://www.'+datain[i]+'.com', complete:function(r){
response.data+= (r.responseText.length);
response.pending--; // decrement pending requests count
response.cb(); // call the callback
}});
}(i));
response.fordone = true; // mark the loop as done
response.cb(); // call the callback
}());
This isn't all very elegant but it does the job.
Is there any better way to do it? Perhaps a wrapper?
Async JS to the rescue (for both client-side and server-side JavaScript)! Your code may look like this (after including async.js):
var datain = ['google','facebook','youtube','twitter'];
var calls = [];
$.each(datain, function(i, el) {
calls.push( function(callback) {
$.ajax({
url : 'http://www.' + el +'.com',
error : function(e) {
callback(e);
},
success : function(r){
callback(null, r);
}
});
});
});
async.parallel(calls, function(err, result) {
/* This function will be called when all calls finish the job! */
/* err holds possible errors, while result is an array of all results */
});
By the way: async has some other really helpful functions.
By the way 2: note the use of $.each.
You can use the jQuery Deferred object for this purpose.
var def = $.when.apply(null, xhrs) with xhrs being an array containing the return values of your $.ajax() requests. Then you can register a callback def.done(function() { ... }); and use the arguments array-like object to access the responses of the various requests. to properly process them, remove your complete callback and add dataType: 'text' and use the following callback for done():
function() {
var response = Array.prototype.join.call(arguments, '');
// do something with response
}

getJSON wont allow array to pass through

function near_home(lat,long,miles) {
var url = "api_url";
var places = [];
$.getJSON(url, function(data) {
$.each(data['results'], function(i, place) {
places.push(place);
});
console.log(places);
});
console.log(places);
return places;
}
So the first console.log() will return the desired objects. But the second console method results in null data. I've rewritten this thing a few times over and can't seem to find reason for this madness. What am I missing?
AJAX requests are asynchronous. You need to execute all following code in a callback.
A possible solution would be this:
function near_home(lat,long,miles,cb) {
var url = "api_url";
var places = [];
$.getJSON(url, function(data) {
$.each(data.results, function(i, place) {
places.push(place);
});
cb(places);
});
}
When using the function, call it like this:
near_home(lat, long, miles, function(places) {
// your code using the returned array
});
The getJSON() method is asynchronous so places will not have been filled at the second console.log-statement.

Categories