learnyounode - juggling async - why doesnt my solution work - javascript

Can someone explain why the following does not work as a solution to the "juggling async" lesson in the learnyounode workshop?
FYI I have left my log lines in if that helps. I am struggling to see any fundamental differences between my solution and an answer I found online here:
https://github.com/olizilla/nodeschooling/blob/master/learnyounode-answers/09-juggling-async.js
Thanks in advance!
var urlList = process.argv.slice(2, process.argv.length);
//console.log(urlList);
var urlResponseList = {};
for(var i=0; i < urlList.length; i++)
{
urlResponseList[urlList[i]] = '';
}
var http = require('http');
console.log(urlList);
for(var i = 0; i < urlList.length; i++) {
//console.log(i);
var url = urlList[i];
console.log("1 " + url);
http.get(url, function (response) {
console.log("2 " + url);
console.log("3 " + i);
response.setEncoding('utf8');
response.on('data', function (data) {
urlResponseList[url] = urlResponseList[url] + data;
});
response.on('end', function () {
//console.log(stringResponse);
//console.log(url);
});
});
}
console.log(urlResponseList);
for(var i=0; i < urlList.length; i++){
console.log(urlResponseList[urlList[i]]);
}
I also have a question about a solution I found posted online here:
https://github.com/olizilla/nodeschooling/blob/master/learnyounode-answers/09-juggling-async.js
urls.forEach(function (item, index) {
http.get(item, function (req) {
req.setEncoding('utf8')
req.pipe(concat(function (res) {
data[index] = res;
responseCount++
if (responseCount === urls.length) {
console.log(data.join('\n'));
}
}))
})
if http.get is async and can the "index" variable be trusted in the http.get callback even though it is being set outside of the callback (in the foreach loop)?
I just wanted to post the updated solution below. Thanks for all the help. My issue was I didn't fully understand how closures worked.
var urlList = process.argv.slice(2, process.argv.length);
//console.log(urlList);
var urlResponseList = [];
for(var i=0; i < urlList.length; i++)
{
urlResponseList.push('');
}
var http = require('http');
var responseCount = 0;
//console.log(urlList);
urlList.forEach(function(item, index){
//console.log(i);
var url = urlList[index];
//console.log("1 " + url);
http.get(item, function (response) {
//console.log("2 " + url);
//console.log("3 " + i);
response.setEncoding('utf8');
response.on('data', function (data) {
urlResponseList[index] = urlResponseList[index] + data;
});
response.on('end', function () {
responseCount++;
if(responseCount == urlList.length)
{
//console.log("help");
console.log(urlResponseList.join('\n'));
}
});
});
});
//console.log(urlResponseList);

http.get() returns its result asynchronously (e.g. sometime later) in the callback. Your code execution does not wait for that result - the rest of your code keeps running even though the response is not yet here.
Thus, all requests will be sent at once from your first for loop, then your second for loop will run and THEN sometime later, the responses will arrive and the callbacks will be called. Your responses are going to arrive async and thus will NOT be available at the end of the first for loop. So, your first console.log(urlResponseList); will be empty and the second for loop will have nothing to do.
Async responses MUST be processed inside the callback in which they are delivered or they must be stored somewhere until the last response is done and then all responses can be processed at that time.
If you don't understand the issues behind asynchronous responses, then read these two references:
How do I return the response from an asynchronous call?
Why is my variable unaltered after I modify it inside of a function? - Asynchronous code reference
You also have issues with local variables declared before the callback that will have changed before the callback is called.
Here's an example of how you solve the for loop issue: JavaScript closure inside loops – simple practical example

Related

learnyounode - Juggling Async - different order

This is from the learnyounode tutorial exercise 9 on node.js. I'm having trouble understanding why my code doesn't print out the data in order.
let http = require('http'),
bl = require('bl'),
urlArray = [process.argv[2], process.argv[3], process.argv[4]]
results = []
//counter = 0;
function collectData(i) {
http.get(urlArray[i], (res) => {
res.pipe(bl((err, data) => {
if (err) {
return console.log(err);
}
data = data.toString();
results[i] = data;
//counter++;
//if (counter === 3) {
if (results.length === 3) {
results.forEach((result) => {
console.log(result);
})
}
}))
})
}
for (let i = 0; i < urlArray.length; i++) {
collectData(i);
}
The for loop should start from the first url and go through to the last in order. From my understanding, whatever happens in the current iteration of the loop must resolve for the loop to move to the next iteration. However, the results seem to be random. If I run my solution on the command line, sometimes the results are in order and sometimes they're not.
Edit: This is my current solution which works. I added the counter variable and put the http request into a function.
The reason you're getting different results on each run is because the get-function of http is implemented asynchronously (async). You're doing the requests in the right order, but the webserver on the get-URL responds not instantly.
So basically, if you have two URLs to call:
http://stackoverflow.com
http://google.com
You call them in this order, but google have a good response time this run, like 10ms, stackoverflow needs a little bit longer like 20ms, your callback function for google is called at first, then the callback-function for stackoverflow.
The response times can be different each run, thats why you experience different results each run.
This is your callback-function:
res.pipe(bl((err, data) => {
if (err) {
return console.log(err);
}
data = data.toString();
console.log(data);
}
The entire problem is with the variable "i" and the asynchronous calls. With this particular logic, you don't have control over the value of i because of the async calls.
In order to understand the problem with your code, print console.log after the line:
results[i] = data;
This is my solution to the problem:
var http = require('http');
var count =3;
var contentResults = [];
function hitRequests(i) {
http.get(process.argv[i+1],function(response){
response.setEncoding('utf8');
var entireContent='';
response.on('data', function(chunk){
entireContent += chunk;
});
response.on('end', function(chunk){
contentResults[i] = entireContent;
count --;
if(count <= 0) {
printAll();
}
});
}).on('error',function(e){
console.log('error'+e);
});
}
for(i=1;i<=3;i++) {
hitRequests(i);
}
function printAll() {
contentResults.forEach(function(result){
console.log(result);
});
}

Using closure with a promise in AngularJS

I don't have a lot of experience with JavaScript closures nor AngularJS promises. So, here is my scenario
Goal:
I need to make $http requests calls within a for loop
(Obvious) problem
Even though the loop is done, my variables still have not been updated
Current implementation
function getColumns(fieldParameters)
{
return $http.get("api/fields", { params: fieldParameters });
}
for(var i = 0; i < $scope.model.Fields.length; i++)
{
var current = $scope.model.Fields[i];
(function(current){
fieldParameters.uid = $scope.model.Uid;
fieldParameters.type = "Columns";
fieldParameters.tableId = current.Value.Uid;
var promise = getColumns(fieldParameters);
promise.then(function(response){
current.Value.Columns = response.data;
}, error);
})(current);
}
//at this point current.Value.Columns should be filled with the response. However
//it's still empty
What can I do to achieve this?
Thanks
If I understand your question correctly, you have a list of fields that you need to do some work on. Then when all of that async work is done, you want to continue. So using the $q.all() should do the trick. It will resolve when all of the list of promises handed to it resolve. So it's essentially like "wait until all of this stuff finishes, then do this"
You could try something like this:
var promises = [];
for(var i=0; i< $scope.model.Fields.length; i++) {
var current = $scope.model.Fields[i];
promises.push(getColumns(fieldParameters).then(function(response) {
current.Value.Columns = response.data;
}));
}
return $q.all(promises).then(function() {
// This is when all of your promises are completed.
// So check your $scope.model.Fields here.
});
EDIT:
Try this since you are not seeing the right item updated. Update your getColumns method to accept the field, the send the field in the getColumns call:
function getColumns(fieldParameters, field)
{
return $http.get("api/fields", { params: fieldParameters}).then(function(response) {
field.Value.Columns = response.data;
});
}
...
promises.push(getColumns(fieldParameters, $scope.model.Fields[i])...
var promises = [];
for(var i = 0; i < $scope.model.Fields.length; i++)
{
var current = $scope.model.Fields[i];
promises.push(function(current){
//blahblah
return promise
});
}
$q.all(promises).then(function(){
/// everything has finished all variables updated
});

Parse cloud code delay each iteraton of for loop

I need to run a function on a list of ParseObjects, but since my progressTournament function is asynchronous it:
A: Cancels any existing calls to progressTournament.
B: Returns status.success prematurely.
My understanding is that I need to have progressTournament return a promise, and somehow delay my code until that promise is resolved, however my understanding of promises is too limited to figure out how to implement it.
I would really appreciate some help on this, hope to hear back from someone soon.
Background Job Code
Parse.Cloud.job('progressTournaments', function(request, status)
{
var tournamentsQuery = new Parse.Query('Tournament');
tournamentsQuery.lessThan('nextRoundTime', moment().toDate());
console.log('Finding tournaments that are due to progress...');
tournamentsQuery.find(
{
success: function(results)
{
console.log('Progressing ' + results.length + 'tournaments')
for (var i = 0; i < results.length; i++)
{
progressTournament(results[i], null);
// Wait here until progressTournament finishes!
}
status.success();
},
error: function(error)
{
status.error();
}
});
});
Was able to achieve what I wanted using Parse.Promise.when()
tournamentsQuery.find(
function(tournaments)
{
var promises = [];
for (var i = 0; i < tournaments.length; i++)
promises.push(tournamentUtils.progressTournament(tournaments[i]));
return Parse.Promise.when(promises);
}).then(
function(progressedTournaments)
{
response.success("All tournaments progressed");
},
function(error)
{
response.error(error);
});
If they needed to be done in series rather than in parallel, could use Parse.Query.each() instead
Should note that now using response instead of status since migrated to self hosted parse server since this question was posted.

Callback runs first? - learnyounode juggling

Can someone help me understand why my solution does not work? It seems like the callback function is running before the juggle function is finished.
My code works fine if I remove the comments. It's just that I don't understand why the log function does not get called after the juggle function is finished. That is how callbacks are supposed to work right?
Thanks in advance :)
var http = require('http')
links = process.argv.slice(2)
var contents = []
//var cbacks = 0
function juggle(callback) {
links.forEach(function(link, i, links) {
http.get(link, function(response) {
response.setEncoding("utf8")
var str = ""
response.on("data", function(data) {
str = str.concat(data)
})
response.on("end", function(){
contents[i] = str
//cbacks++
//if(cbacks === 3) {
// callback()
//}
})
})
})
callback()
}
function log() {
contents.forEach(function(content, i, contents) {
console.log(contents[i])
})
}
juggle(log)
http.get is asynchronous. forEach is executed against your links which calls http.get, which registers a connection to be processed. It doesn't actually complete the connection/request.
if you need to execute the callback when all the forEach functions complete you could use a library like async to accomplish it.
async supports a forEach method. Using async, the first param to forEach would take an additional callback function which should be called to denote item processing has been finished. You could place that callback in the response.on('end') callback. When all of those callbacks have been called, or when an error has occurred async.forEach will execute the onComplete callback you provide to it as the 3rd parameter, accomplishing your goal.
So you have global scope which is what I would use to actually juggle the requests.
When each link gets registered to an EventEmitter, you can store it inside of a map.
var link_map = {};
var request_counter = 0;
links.forEach( function (link, index) {
link_map[link] = '';
...
then in your requests you can append data from a specific request
response.on('data', function (chunk) {
link_map[link] += chunk.toString();
...
and finally at each end, check if all requests have finished
response.on('end', function () {
request_counter += 1;
if ( links.length === request_counter ) {
// do your logging stuff here and you have all
// the data that you need inside link_map
...
the link variable inside of the anonymous function declared in the forEach is stored for that closure. So each time that the 'data' event is emitted, the link variable is going to refer to the request for a specific link that was registered to the callback. That is why I chose to use a map data structure and map specific data to each link which we are using as a key.
EventEmitters and callbacks can get kind of harry if you are unfamiliar with them. Keep practicing though and it will eventually become easier.
And using an array as you did is not incorrect or anything, I just prefer to use Objects with key => value pairs when I can.
RUN THIS CODE on your machine to see it in action.
const http = require('http');
var links = [
'http://www.google.com',
'http://www.example.com',
'http://www.yahoo.com'
];
var link_map = {};
var request_counter = 0;
links.forEach( function (link, index) {
link_map[link] = '';
http.get(link, function(response) {
response.on('data', function (chunk) {
link_map[link] += chunk.toString();
});
response.on('end', function () {
request_counter += 1;
if ( links.length === request_counter ) {
links.forEach( function(link) {
require('fs').writeFileSync(link.split('//')[1],link_map[link]);
});
}
});
});
});
You can see an output of files from links in the parent directory.
You didn't wait for http response and call callback function immediately. At this point of code contents array is empty.

jQuery JavaScript Nested Asynchronous Functions callback

I'm a little confused how to determine when async function called multiple times from another one is finished a call from the last iteration:
function MainAsyncFunction(callback) {
for (var i = 0; i < 10; i++) {
SubAsyncFunction(function(success) {
if (i >= 10 && success) { // THIS IS WRONG?!
callback(true); // happens too early
}
});
}
};
function SubAsyncFunction(callback) {
SubSubAsyncFunction(function() {
callback(true);
});
}
What I'm doing is calling the Google Distance Matrix service, which has a limitation of 25 destinations, hence I'm having to split my array of destinations to call this service multiple times but I don't understand when it's finished.
and in the main bit of code I can tell that the second iteration of the loop in the MainAsyncFunction hasn't yet completed when it does a call back.
I think my problem is I haven't got my head around the order of events when dealing with Async functions in JavaScript... please explain how the subject is normally achieved.
You could use the jQuery Deferred object, which acts as a token representing the status of an async operation.
The following is a simplified example:
//set up your sub method so that it returns a Deferred object
function doSomethingAsync() {
var token = $.Deferred();
myAsyncMethodThatTakesACallback(function() {
//resolve the token once the async operation is complete
token.resolve();
});
return token.promise();
};
//then keep a record of the tokens from the main function
function doSomethingAfterAllSubTasks() {
var tokens = [];
for (var i=0; i < 100; i++) {
//store all the returned tokens
tokens.push(doSomethingAsync());
}
$.when.apply($,tokens)
.then(function() {
//once ALL the sub operations are completed, this callback will be invoked
alert("all async calls completed");
});
};
The following is an updated version of the OP's updated code:
function MainAsyncFunction(callback) {
var subFunctionTokens = [];
for (var i = 0; i < 10; i++) {
subFunctionTokens.push(SubAsyncFunction());
}
$.when.apply($,subFunctionTokens)
.then(function() {
callback(true);
});
};
function SubAsyncFunction() {
var token = $.Deferred();
SubSubAsyncFunction(function() {
token.resolve();
});
return token.promise();
};​
Perhaps the ajaxStop() event? This is a jQuery event that only fires when all active AJAX requests are completed.
The problem is that the value of i is constantly changing in the loop, finally being out of bounds after failing the loop conditional.
The easiest way to fix this is:
for( i=0; i<5; i++) { // or whatever your loop is
(function(i) {
// the value of i is now "anchored" in this block.
})(i);
}

Categories