Can someone help me understand why my solution does not work? It seems like the callback function is running before the juggle function is finished.
My code works fine if I remove the comments. It's just that I don't understand why the log function does not get called after the juggle function is finished. That is how callbacks are supposed to work right?
Thanks in advance :)
var http = require('http')
links = process.argv.slice(2)
var contents = []
//var cbacks = 0
function juggle(callback) {
links.forEach(function(link, i, links) {
http.get(link, function(response) {
response.setEncoding("utf8")
var str = ""
response.on("data", function(data) {
str = str.concat(data)
})
response.on("end", function(){
contents[i] = str
//cbacks++
//if(cbacks === 3) {
// callback()
//}
})
})
})
callback()
}
function log() {
contents.forEach(function(content, i, contents) {
console.log(contents[i])
})
}
juggle(log)
http.get is asynchronous. forEach is executed against your links which calls http.get, which registers a connection to be processed. It doesn't actually complete the connection/request.
if you need to execute the callback when all the forEach functions complete you could use a library like async to accomplish it.
async supports a forEach method. Using async, the first param to forEach would take an additional callback function which should be called to denote item processing has been finished. You could place that callback in the response.on('end') callback. When all of those callbacks have been called, or when an error has occurred async.forEach will execute the onComplete callback you provide to it as the 3rd parameter, accomplishing your goal.
So you have global scope which is what I would use to actually juggle the requests.
When each link gets registered to an EventEmitter, you can store it inside of a map.
var link_map = {};
var request_counter = 0;
links.forEach( function (link, index) {
link_map[link] = '';
...
then in your requests you can append data from a specific request
response.on('data', function (chunk) {
link_map[link] += chunk.toString();
...
and finally at each end, check if all requests have finished
response.on('end', function () {
request_counter += 1;
if ( links.length === request_counter ) {
// do your logging stuff here and you have all
// the data that you need inside link_map
...
the link variable inside of the anonymous function declared in the forEach is stored for that closure. So each time that the 'data' event is emitted, the link variable is going to refer to the request for a specific link that was registered to the callback. That is why I chose to use a map data structure and map specific data to each link which we are using as a key.
EventEmitters and callbacks can get kind of harry if you are unfamiliar with them. Keep practicing though and it will eventually become easier.
And using an array as you did is not incorrect or anything, I just prefer to use Objects with key => value pairs when I can.
RUN THIS CODE on your machine to see it in action.
const http = require('http');
var links = [
'http://www.google.com',
'http://www.example.com',
'http://www.yahoo.com'
];
var link_map = {};
var request_counter = 0;
links.forEach( function (link, index) {
link_map[link] = '';
http.get(link, function(response) {
response.on('data', function (chunk) {
link_map[link] += chunk.toString();
});
response.on('end', function () {
request_counter += 1;
if ( links.length === request_counter ) {
links.forEach( function(link) {
require('fs').writeFileSync(link.split('//')[1],link_map[link]);
});
}
});
});
});
You can see an output of files from links in the parent directory.
You didn't wait for http response and call callback function immediately. At this point of code contents array is empty.
Related
I am finding it really strange that the same piece of code it executing when I place it in a .html and check the console logs all looks fine, but when I'm placing exactly same code block inside a cloud function that does not execute and is being skipped over for some reason. All I need to do is fetch some config parameters from a config object and use them as needed.
var db = admin.database().ref();
var configRef = db.child("config");
configRef.on('value', function(snap){
var snapvalue = snap.val();
var totalLength = Object.keys(snapvalue).length;
$.each( snapvalue, function( key, value ) {
console.log(value,key);
});
});
Completed cloud function looks like this:
exports.myFunction = functions.database.ref('/object/{list}').onWrite(event =>{
console.log("Inside myFunction...");
var db = admin.database().ref();
var configRef = db.child("config");
configRef.on('value', function(snap){
var snapvalue = snap.val();
$.each( snapvalue, function( key, value ) {
console.log(value,key);
});
});
console.log("Leaving myFunction...");
})
I am getting the console logs before and after on('value') block so function is executing but not the on('value') code block.
With Cloud Functions, you need to return a promise (note the comments in the code samples) that resolves when all of the asynchronous work is done in the function. once() executes asynchronously and returns a promise, so you should return its promise so Cloud Functions knows when it's time to clean up.
Also, it's probably easier to use then() to continue your work when the data is available.
exports.myFunction = functions.database.ref('/object/{list}').onWrite(event => {
console.log("Inside myFunction...");
var db = admin.database().ref();
var configRef = db.child("config");
return configRef.once('value').then(snap => {
var snapvalue = snap.val();
$.each(snapvalue, function( key, value ) {
console.log(value,key);
});
});
})
If you're going to work with Cloud Functions, you will really have to also learn about how to use promises.
I have the following http endpoint in nodejs using the express library:
app.get("/api/stocks/lookup/:qry", function(req, res) {
getJson(lookupSearch(req.params.qry), function(json) {
var quotes = [];
und.forEach(json, function(d) {
getJson(quoteSearch(d.Symbol), function(j) {
quotes.push(j);
});
});
res.send(quotes); //how can I make this execute after the .forEach is finished?
});
});
Here, getJson looks like this:
var getJson = function(search, cb) {
http.request(search, function(response) {
var raw = '';
response.on('data', function(d) {
raw += d;
});
response.on('end', function() {
cb(JSON.parse(raw));
});
response.on('error', function(err) {
console.error(err);
});
}).end();
};
I see why this won't work as the http requests inside getJson are asynchronous and so res.send(quotes) will be sent back almost immediately. So, how can I get res.send(quotes) to be sent after the forEach loop is complete. Can I attach a callback onto a the forEach function?
In summary,
How can I use res.send(quotes) after the forEach loop is complete.
Is it possible to attach callbacks (such as a callback to be executed after the forEach loop) onto objects? What can I attach callbacks to? To be clear, the idea of a 'callback' to me means that the event loop will call it after the function/object in which the callback is attached to is finished executing.
Thanks for all the help!
Converting your getJson into a promise would be a good idea, as promises are nice to work with. Without the promises, the manual way is to keep a counter of outstanding requests:
var outstanding = 0;
json.forEach(function(d) {
outstanding++;
getJson(quoteSearch(d.Symbol), function(j) {
quotes.push(j);
if (!--outstanding) {
res.send(quotes);
}
});
});
If you did go the promises way, you would make a map over json, and return the promise of the request; you could then specify a then over the array of promises. If you used jQuery instead of your own homebrew solution, for example,
var requests = json.map(function(d) {
return $.getJSON(quoteSearch(d.Symbol), function(j) {
quotes.push(j);
});
});
$.when(requests).then(function() {
res.send(quotes);
});
(untested code).
I'm having trouble with the following function:
function getLengthData(date, driverId) {
var length = [];
$
.get('http://xx/x/x/' + date + '/' + driverId + '')
.done(function (data) {
var test = data.length;
length.push(test);
});
return length;
}
This returns nothing while it should return an array with 1 element, the length of the data array. The next function uses the same way and works perfectly:
function getStopsFromStorage() {
var stops = [];
_2XLMobileApp.db.stopsData.load().done(function (result) {
$.each(result, function () {
stops.push(this.Id);
});
})
return stops;
}
I kinda have an idea what the problem is but no idea how to fix.
Any help would be greatly appreciated!
Thx
As you've already learned, you won't be able to use return with asynchronous functions like $.get(). The return statement that follows $.get() will occur before the request has actually completed.
One option you can do is to adjust the function to accept its own callback. Another is to return a Deferred/Promise so the calling code can apply the callback itself.
Better still may be to support both like $.get() and other Ajax methods.
function getLengthData(date, driverId, callback) {
return $.get('http://xx/x/x/' + date + '/' + driverId + '')
.then(function (data) {
return data.length;
})
.done(callback);
}
getLengthData('2013-07-31', '1234', function (length) {
// use `length` here
});
getLengthData('2013-07-31', '1234').done(function (length) {
// use `length` here
});
This snippet does require jQuery 1.8+ as it makes use of chaining deferreds with .then(). In short, the return data.length within getLengthData just changes the arguments for any further .done() callbacks, such as callback.
Example: http://jsfiddle.net/jMrVP/
Are you sure your host and port match in http://194.78.58.118:9001?
This can be Same origin policy problem.
EDIT:
AJAX is asynchronous JavaScript and XML. So you returning value before you get response from server.
I'm trying to sync up multiple ajax callbacks using jQuery.Deferrd objects. Obviously jQuery.when handles this for you however my code is architected in such a way that the ajax requests aren't called in the same method. So for example this is the flow:
// A Button is clicked
// Module 1 requests a snippet of html and updates the DOM
// Module 2 requests a different snippet of html and updates the DOM
I need both Modules to update the DOM at the same time meaning I need to ensure the callbacks are run after both requests have returned.
Module 1 and Module 2 need to be able to exist without each other and should have no knowledge of one another so the requests can't be made together using $.when(doMod1Request(), doMod2Request()).then(function () { ... }) and the callbacks should be independent too.
I've therefore written a wrapper around ajax which adds the callbacks to a deferred object and in a similar way to $.when resolves the deferred object once the ajax requests have returned the same number of times as the number of callbacks on the deferred object.
My dilemma is however deferred.resolve() can only be called with one set of arguments so each callback get's the same value.
e.g.
var deferred = new $.Deferred();
deferred.done(function (response) {
console.log(response); // <div class="html-snippet-1"></div>
});
deferred.done(function (response) {
console.log(response); // <div class="html-snippet-1"></div>
});
deferred.resolve('<div class="html-snippet-1"></div>');
Whereas I'd want something like this:
var deferred = new $.Deferred();
deferred.done(function (response) {
console.log(response); // <div class="html-snippet-1"></div>
});
deferred.done(function (response) {
console.log(response); // <div class="html-snippet-2"></div>
});
deferred.resolve(['<div class="html-snippet-1"></div>', '<div class="html-snippet-2"></div>']);
Is this possible or am I going about this incorrectly?
I'd say this is perfectly valid. Assuming your independent modules, you would do (with two Promises):
doMod1Request().done(doMod1Update);
doMod2Request().done(doMod2Update);
Now, if you want to to execute the updates together and only if the two requests both succeeded, just write
$.when(doMod1Request(), doMod2Request()).done(function(mod1result, mod2result) {
doMod1Update(mod1result);
doMod2Update(mod2result);
});
This only gets ugly if you call your resolve functions with multiple arguments, as jQuery is a bit inconsistent there and does not really distinguish multiple arguments from one array argument.
To uncouple them with that publish-subscribe pattern you are using, I'd recommend the following:
function Combination() {
this.deferreds = [];
this.success = [];
this.error = [];
}
Combination.prototype.add = function(def, suc, err) {
this.deffereds.push(def);
this.success.push(suc);
this.error.push(err);
};
Combination.prototype.start = function() {
var that = this;
return $.when.apply($, this.deferreds).always(function() {
for (var i=0; i<that.deferreds.length; i++)
that.deferreds[i].done(that.success[i]).fail(that.error[i]);
// of course we could also call them directly with the arguments[i]
});
};
// Then do
var comb = new Combination();
window.notifyModules("something happened", comb); // get deferreds and handlers
comb.start();
// and in each module
window.listen("something happended", function(c) {
c.add(doRequest(), doUpdate, doErrorHandling);
});
Let's assume your modules look something like this :
var MODULE_1 = function() {
function getSnippet() {
return $.ajax({
//ajax options here
});
}
return {
getSnippet: getSnippet
}
}();
var MODULE_2 = function() {
function getSnippet() {
return $.ajax({
//ajax options here
});
}
return {
getSnippet: getSnippet
}
}();
Don't worry if your modules are different, the important thing is that the getSnippet functions each return a jqXHR object, which (as of jQuery 1.5) implements the Promise interface.
Now, let's assume you want to fetch the two snippets in response to some event (say a button click) and do something when both ajax responses have been received, then the click handler will be something like this:
$("myButton").on('click', function(){
var snippets = [];
var promises_1 = MODULE_1.getSnippet().done(function(response){
snippets.push({
target: $("#div_1"),
response: response
});
});
var promise_2 = MODULE_2.getSnippet().done(function(response){
snippets.push({
target: $("#div_2"),
response: response
});
});
$.when(promise_1, promise_2).done(function() {
$.each(snippets, function(i, snippetObj) {
snippetObj.target.html(snippetObj.response);
});
});
});
Slightly more elaborate, and better if you have many similarly constructed modules to fetch many snippets, would be something like this:
$(function(){
$("myButton").on('click', function(){
var promises = [];
var snippets = [];
var modules = [MODULE_1, MODULE_2, MODULE_3 .....];
for (var i=1; i<=10; i++) {
promises.push(modules[i].getSnippet().done(function(response){
snippets.push({
target: $("#div_" + i),
response: response
};
}));
}
$.when.apply(this, promises).done(function() {
$.each(snippets, function(i, snippetObj) {
snippetObj.target.html(snippetObj.response);
});
});
});
});
As you can see, I've made heaps of assumptions here, but you should get some idea of how to proceed.
To ensure each callback is passed the appropriate arguments I've done the following:
var guid = 0,
deferreds = [];
window.request = function (url, deferred, success) {
var requestId = guid++;
if ($.inArray(deferred) === -1) {
deferreds.push(deferred);
$.extend(deferred, {
requestCount: 0,
responseCount: 0,
args: {}
});
}
deferred.requestCount++;
deferred
.done(function () {
// Corresponding arguments are passed into success callback using requestId
// which is unique to each request.
success.apply(this, deferred.args[requestId]);
});
$.ajax(url, {
success: function () {
// Store arguments on deferrds args obj.
deferred.args[requestId] = arguments;
deferred.responseCount++;
if (deferred.requestCount === deferred.responseCount) {
deferred.resolveWith(this);
}
}
});
};
So the arguments are managed through the closure. This allows me to ensure that both modules have no knowledge of each other and won't break if the other doesn't exist, e.g:
var MODULE_1 = function () {
$(".myButton").on('click', function() {
// Cross module communication is achieved through notifications.
// Pass along a new deferred object with notification for use in window.request
window.notify('my-button-clicked', new $.Deferred);
});
}();
var MODULE_2 = function () {
// run get snippet when 'my-button-clicked' notification is fired
window.listen('my-button-clicked', getSnippet);
function getSnippet (deferred) {
window.request('/module2', deferred, function () {
console.log('module2 success');
});
}
}();
var MODULE_3 = function () {
// run get snippet when 'my-button-clicked' notification is fired
window.listen('my-button-clicked', getSnippet);
function getSnippet (deferred) {
window.request('/module3', deferred, function () {
console.log('module3 success');
});
}
}();
The above allows each module to function independently meaning one will work without the other which loosely couples the code and because both MODULE_2 and MODULE_3 pass the same deferred object into window.request they will be resolved once both requests have successfully returned.
This was my final implementation:
https://github.com/richardscarrott/ply/blob/master/src/ajax.js
I need to call an async function (with loop) for multiple values and wait for those results. Right now I'm using the following code:
(function(){
var when_done = function(r){ alert("Completed. Sum of lengths is: [" + r + "]"); }; // call when ready
var datain = ['google','facebook','youtube','twitter']; // the data to be parsed
var response = {pending:0, fordone:false, data:0}; // control object, "data" holds summed response lengths
response.cb = function(){
// if there are pending requests, or the loop isn't ready yet do nothing
if(response.pending||!response.fordone) return;
// otherwise alert.
return when_done.call(null,response.data);
}
for(var i=0; i<datain; i++)(function(i){
response.pending++; // increment pending requests count
$.ajax({url:'http://www.'+datain[i]+'.com', complete:function(r){
response.data+= (r.responseText.length);
response.pending--; // decrement pending requests count
response.cb(); // call the callback
}});
}(i));
response.fordone = true; // mark the loop as done
response.cb(); // call the callback
}());
This isn't all very elegant but it does the job.
Is there any better way to do it? Perhaps a wrapper?
Async JS to the rescue (for both client-side and server-side JavaScript)! Your code may look like this (after including async.js):
var datain = ['google','facebook','youtube','twitter'];
var calls = [];
$.each(datain, function(i, el) {
calls.push( function(callback) {
$.ajax({
url : 'http://www.' + el +'.com',
error : function(e) {
callback(e);
},
success : function(r){
callback(null, r);
}
});
});
});
async.parallel(calls, function(err, result) {
/* This function will be called when all calls finish the job! */
/* err holds possible errors, while result is an array of all results */
});
By the way: async has some other really helpful functions.
By the way 2: note the use of $.each.
You can use the jQuery Deferred object for this purpose.
var def = $.when.apply(null, xhrs) with xhrs being an array containing the return values of your $.ajax() requests. Then you can register a callback def.done(function() { ... }); and use the arguments array-like object to access the responses of the various requests. to properly process them, remove your complete callback and add dataType: 'text' and use the following callback for done():
function() {
var response = Array.prototype.join.call(arguments, '');
// do something with response
}