minimal node.js synchronous wait on http request - javascript

Consider a node.js loop to fire off http requests, like this:
var http = require('http');
function sendOne(opts, body)
{
var post_options = {
...
}
var sender = http.request(post_options, function(res) {
// process response
});
sender.write(body)
sender.end()
return sender;
}
for( var i =0; i < maxUploadIterations; i++)
{
var body = // construct payload
var sent = sendOne(opts, body);
// somehow wait on sent...
}
Note that the http.request object has a callback function specified for handling the response. My question is how do I synchronously wait on the "Sent" object returned from sendOne using the built in Node primitives.
I understand there are multiple frameworks like Express and Futures that can handle this, but I want to understand the primitive behavior rather than using a "magic" framework.
Is it possible to take "Sent" and put a handler on it like this:
var sent = ...
sent.on("complete", function() { ... } );
?
If so, exactly which handler and how to format the loop with it?

Another option, use old fashion callbacks... Though promises may be cleaner.
var http = require('http');
function sendOne(opts, body, next) {
var post_options = {
// ...
};
var sender = http.request(post_options, function (res) {
// process response
next();
});
sender.write(body);
sender.end();
}
var maxUploadIterations = 100;
function next(index) {
// termination condition
if (index >= maxUploadIterations) {
return;
}
// setup next callback
var nextCallback = next.bind(null, index + 1);
// get these from wherever
var opts = {};
var body = {};
sendOne(opts, body, nextCallback);
}
next(0);
If you have more work to do afterwards, you can change the termination condition to call something else, rather than return.

Related

How to continue ajax remaining requests if file not found?

I'm trying to script a function which takes all the css/js files marked by attribute-data and refreshes if any of the scripts have been modified on the server side. My initial attempt involved php and jquery/javascript. This new attempt is based on javascript/jquery only!
My problem is that while chaining the ajax requests to these files (for Modification date), all ajax requests stop if file not found. For example, if I rename (existing) style.css to (doesn't exist) style_.css, all the chained ajax requests get aborted, and the code doesn't continue.
var file_url = [url1, url1, url3, url4, url5];
function getLatestModificationDate(file_url){
$.when.apply($, file_url.map(function(url) {
return $.ajax({ type: 'HEAD', url: url, beforeSend: function(jqXHR, settings) { jqXHR.url = settings.url; } });
})).done(function() {
var results = [], lastModified, file_jqXHR;
//each argument passed to this callback for ajax, is of this form [data, statusText, jqXHR]
for (var i = 0; i < arguments.length; i++) {
var obj= {};
file_jqXHR = arguments[i][2]; //jqXHR
lastModified = file_jqXHR.getResponseHeader('Last-Modified');
obj['file'] = file_jqXHR.url;
obj['modDate'] = lastModified;
fileArray.push(obj);
}
mainFunction(fileArray); //the main function, not in scope for this question.
});
}
I tried adding error option in ajax after beforeSend, that didn't allow continuing of remaining ajax requests. Don't know if return ajax apply(.., ..)
could return false to skip the current request for 404, cause I don't know how to skip or return false for the ajax? Is there any quick way to check if the file exists? So that I add only existing files to the file_url array that's passed to the function getLatestModificationDate(file_url){...}
EDIT: Here's a screenshot from the Chrome-Console.
EDIT :
I found this question's answer that uses a new deffered for the ajax complete... could someone provide any simplification on how that code can be used for my question? Thanks!
var myDeferred = $.Deferred();
var origDeferred = $.ajax(...);
// if request is ok, i resolve my deferred
origDeferred.done(function() {
myDeferred.resolve.apply(this, arguments);
});
// if request failed, i also resolve my deferred
origDeferred.fail(function() {
myDeferred.resolve.apply(this, arguments);
});
You can use Try-Catch block:
for (var i = 0; i < arguments.length; i++) {
try{
var obj= {};
file_jqXHR = arguments[i][2]; //jqXHR
lastModified = file_jqXHR.getResponseHeader('Last-Modified');
obj['file'] = file_jqXHR.url;
obj['modDate'] = lastModified;
fileArray.push(obj);
}catch(e){}
}

Parse.Cloud.job promise not working

What I am trying to do here are:
Remove all contents in a class first, because every day the events.json file will be updated. I have my first question here: is there a better way to remove all contents from a database class on Parse?
Then I will send a request to get the events.json and store "name" and "id" of the result into a 2D array.
Then I will send multiple requests to get json files of each "name" and "id" pairs.
Finally, I will store the event detail into database. (one event per row) But now my code will terminate before it downloaded the json files.
Code:
function newLst(results) {
var event = Parse.Object.extend("event");
for (var i = 0; i < results.length; i++){
Parse.Cloud.httpRequest({
url: 'https://api.example.com/events/'+ results[i].name +'/'+ results[i].id +'.json',
success: function(newLst) {
var newJson = JSON.parse(newLst.text);
var newEvent = new event();
newEvent.set("eventId",newJson.data.id);
newEvent.set("eventName",newJson.data.title);
newEvent.save(null, {
success: function(newEvent) {
alert('New object created with objectId: ' + newEvent.id);
},
error: function(newEvent, error) {
alert('Failed to create new object, with error code: ' + error.message);
}
});
},
error: function(newLst) {
}
});
}
};
Parse.Cloud.job("getevent", function(request, status) {
var event = Parse.Object.extend("event");
var query = new Parse.Query(event);
query.notEqualTo("objectId", "lol");
query.limit(1000);
query.find({
success: function(results) {
for (var i = 0; i < results.length; i++) {
var myObject = results[i];
myObject.destroy({
success: function(myObject) {
},
error: function(myObject, error) {
}
});
}
},
error: function(error) {
alert("Error: " + error.code + " " + error.message);
}
});
var params = { url: 'https://api.example.com/events.json'};
Parse.Cloud.httpRequest(params).then(function(httpResponse) {
var results = [];
var jsonobj = JSON.parse(httpResponse.text);
for (var i = 0; i < jsonobj.data.length; i++) {
var tmp2D = {"name":"id"}
tmp2D.name = [jsonobj.data[i].name];
tmp2D.id = [jsonobj.data[i].id];
results.push(tmp2D);
}
newLst(results);
}).then(function() {
status.success("run job");
}, function(error) {
status.error(error);
});
});
I think my original answer is correct as a standalone. Rather than make it unreadable with the additional code, here it is made very specific to your edit.
The key is to eliminate passed callback functions. Everything below uses promises. Another key idea is decompose the activities into logical chunks.
A couple of caveats: (1) There's a lot of code there, and the chances that either your code is mistaken or mine is are still high, but this should communicate the gist of a better design. (2) We're doing enough work in these functions that we might bump into a parse-imposed timeout. Start out by testing all this with small counts.
Start with your question about destroying all instances of class...
// return a promise to destroy all instances of the "event" class
function destroyEvents() {
// is your event class really named with lowercase? uppercase is conventional
var query = new Parse.Query("event");
query.notEqualTo("objectId", "lol"); // doing this because the OP code did it. not sure why
query.limit(1000);
return query.find().then(function(results) {
return Parse.Object.destroyAll(results);
});
}
Next, get remote events and format them as simple JSON. See the comment. I'm pretty sure your idea of a "2D array" was ill-advised, but I may be misunderstanding your data...
// return a promise to fetch remote events and format them as an array of objects
//
// note - this differs from the OP data. this will evaluate to:
// [ { "name":"someName0", id:"someId0" }, { "name":"someName1", id:"someId1" }, ...]
//
// original code was producing:
// [ { "name":["someName0"], id:["someId0"] }, { "name":["someName1"], id:["someId1"] }, ...]
//
function fetchRemoteEvents() {
var params = { url: 'https://api.example.com/events.json'};
return Parse.Cloud.httpRequest(params).then(function(httpResponse) {
var results = [];
var remoteEvents = JSON.parse(httpResponse.text).data;
for (var i = 0; i < remoteEvents.length; i++) {
var remoteEvent = { "name": remoteEvents[i].name, "id": remoteEvents[i].id };
results.push(remoteEvent);
}
return results;
});
}
Please double check all of my work above regarding the format (e.g. response.text, JSON.parse().data, etc).
Its too easy to get confused when you mix callbacks and promises, and even worse when you're generating promises in a loop. Here again, we break out a simple operation, to create a single parse.com object based on one of the single remote events we got in the function above...
// return a promise to create a new native event based on a remoteEvent
function nativeEventFromRemoteEvent(remoteEvent) {
var url = 'https://api.example.com/events/'+ remoteEvent.name +'/'+ remoteEvent.id +'.json';
return Parse.Cloud.httpRequest({ url:url }).then(function(response) {
var eventDetail = JSON.parse(response.text).data;
var Event = Parse.Object.extend("event");
var event = new Event();
event.set("eventId", eventDetail.id);
event.set("eventName", eventDetail.title);
return event.save();
});
}
Finally, we can bring it together in a job that is simple to read, certain to do things in the desired order, and certain to call success() when (and only when) it finishes successfully...
// the parse job removes all events, fetches remote data that describe events
// then builds events from those descriptions
Parse.Cloud.job("getevent", function(request, status) {
destroyEvents().then(function() {
return fetchRemoteEvents();
}).then(function(remoteEvents) {
var newEventPromises = [];
for (var i = 0; i < remoteEvents.length; i++) {
var remoteEvent = remoteEvents[i];
newEventPromises.push(nativeEventFromRemoteEvent(remoteEvent));
}
return Parse.Promise.when(newEventPromises);
}).then(function() {
status.success("run job");
}, function(error) {
status.error(error);
});
});
The posted code does just one http request so there's no need for an array of promises or the invocation of Promise.when(). The rest of what might be happening is obscured by mixing the callback parameters to httpRequest with the promises and the assignment inside the push.
Here's a clarified rewrite:
Parse.Cloud.job("getevent", function(request, status) {
var promises = [];
var params = { url: 'https://api.example.com'};
Parse.Cloud.httpRequest(params).then(function(httpResponse) {
var results = [];
var jsonobj = JSON.parse(httpResponse.text);
for (var i = 0; i < jsonobj.data.length; i++) {
// some code
}
}).then(function() {
status.success("run job");
}, function(error) {
status.error(error);
});
});
But there's a very strong caveat here: this works only if ("// some code") that appears in your original post doesn't itself try to do any asynch work, database or otherwise.
Lets say you do need to do asynch work in that loop. Move that work to a promise-returning function collect those in an array, and then use Promise.when(). e.g....
// return a promise to look up some object, change it and save it...
function findChangeSave(someJSON) {
var query = new Parse.Query("SomeClass");
query.equalTo("someAttribute", someJSON.lookupAttribute);
return query.first().then(function(object) {
object.set("someOtherAttribute", someJSON.otherAttribute);
return object.save();
});
}
Then, in your loop...
var jsonobj = JSON.parse(httpResponse.text);
var promises = [];
for (var i = 0; i < jsonobj.data.length; i++) {
// some code, which is really:
var someJSON = jsonobj.data[i];
promises.push(findChangeSave(someJSON));
}
return Parse.Promise.when(promises);

Javascript : return XMLHttpRequest out of scope

Javascript : return XMLHttpRequest out of scope
I need to return the data from my AJAX call
series: [{
data: ( )
in order to update one of the keys data in dictionary series but my function retrieve does not seem to return the data that I am getting.
var myPopulation = {
series: [{
data: (
function() {
function retrieve() {
var httpRequest = new XMLHttpRequest();
httpRequest.onreadystatechange = function() {
if (httpRequest.readyState === 4) {
if (httpRequest.status === 200) {
var obj = JSON.parse(httpRequest.responseText)
console.log(obj.V1, obj.V2, obj.V3, obj.V4);
var data = [],
time = (new Date()).getTime(),
i;
for (i = -60; i <= 0; i++) {
console.log(obj.V2)
data.push({
x: time + i * 60 * 1000,
y: obj.V2
});
}
myPopulation.series[1].data = data
// ???
console.log(data)
}
}
};
httpRequest.open('GET', "/myCall");
httpRequest.send();
}
retrieve();
}()
)
}],
What should I do to return the value out of the function and update data?
Well, since you are using jQuery tag, I think my answer could be valid and I prefer doing this way for what you need and I understood (it is well explained so please read code comments and check browser console, this can be found at the end of the answer).
Remember that you won't be able to return a XMLHttpRequest because ajax calls are async but you can force an ajax call to be sync in order to get your data on a return statement from any function or do other things as you expected. However, forcing is not a good approach to do because UI will freeze for the user until getting the response back from server and you really don't know how much time that will take (specially if you are expecting a big amount of data to be returned - I know that's not entirely a metric but other factors may apply).
Hope this helps and please take your time and read the following post and user comments: Reasons not to use native XMLHttpRequest - why is $.ajax mandatory?
Live Demo: http://jsfiddle.net/4mbjjfx8/
HTML:
<div id="wrapper">
<div id="loader"></div>
<div id="content"></div>
</div>
jQuery
$(function() {
var series = [], // Your series array
loader = $('#loader'), // UI loader sample
request = {}; // Request params
/**
* Set request method, url and data if needed
* In this case I am sending an object with a text property
* that will be returned from jsfiddle "echo" service
*/
request.method = 'GET';
request.url = '/echo/jsonp/';
request.data = {
text: 'Second message returned from "echo" service'
};
// Send ajax call
retrieveData(request, series, handleData);
// Set loading message to UI
loader.html('Loading...');
// Just do some logging to know how process goes
console.log('Populating series for the first time');
/**
* Populate series for the first time, at this point process
* will go on and after the response from server was finally
* done, process will go to the callback (since ajax calls
* are async).
*/
populate(series);
// Just do some logging to know how process goes
console.log('End populating series for the first time');
});
function populate(series) {
var dummy = {
text: 'First message populated over process'
};
// Set dummy object to series array
series.push(dummy);
};
/**
* Used to make ajax call and return data from server
*/
function retrieveData(cfg, series, callback) {
$.ajax({
type: cfg.method,
url: cfg.url,
data: cfg.data
}).done(function(data, status, xhr) {
// Pass args to callback function if defined
if (callback) callback(series, data);
}).fail(function(xhr, status) {
/**
* Pass args to callback function if defined
* At this point, request wasn't success so
* force data arg at callback to be 'null'
*/
if (callback) callback(series, null);
});
};
/**
* Used to handle data returned from server
* Note: Your series array can be modified here since you
* passed it into the callback
*/
function handleData(series, data) {
var loader = $('#loader');
// Just do some logging to know how process goes
console.log('Populating series from server');
// Check if data is defined and not an empty object
if(data && !($.isEmptyObject(data))) {
// Add it to series array
series.push(data);
}
// Set UI loader empty
loader.html('');
// Retrieve series
showData(series);
};
function showData(series) {
var contentDiv = $('#content');
// Loop process and append to UI
for(var i = 0; i < series.length; ++i) {
contentDiv.append(series[i].text + '<br>');
}
};
You should put retrieve function outside. You can invoke retrieve function. And, It will call ajax. When ajax is success, it will update data of population. Like this.
var myPopulation = {
series: [{
data: undefined
}]
};
function retrieve() {
var httpRequest = new XMLHttpRequest();
httpRequest.onreadystatechange = function() {
if (httpRequest.readyState === 4) {
if (httpRequest.status === 200) {
var obj = JSON.parse(httpRequest.responseText)
console.log(obj.V1, obj.V2, obj.V3, obj.V4);
var data = [],
time = (new Date()).getTime(),
i;
for (i = -60; i <= 0; i++) {
console.log(obj.V2)
data.push({
x: time + i * 60 * 1000,
y: obj.V2
});
}
myPopulation.series[0].data = data
// ???
console.log(data)
}
}
};
httpRequest.open('GET', "/myCall");
httpRequest.send();
}
retrieve();
Assuming you're simply trying to set the value of myPopulation.series[0].data when the array is first defined...
myPopulation.series[1].data = data
...should be...
myPopulation.series[0].data = data;
Also, some parts of you code are missing closing semicolons, closing brackets and/or curly brackets. Please make sure you end all statements with a semicolon and you have an equal number of opening and closing (curly) brackets.
I've tested your code with the above changes. The HTTP request I made returned a simple "Test successful" string, so I've replaced the code which handles the structure of the response text to simply var data = httpRequest.responeText;. This worked fine. Of course, this assumes the code which handles the structure of the returned httpRequest.responeText in your case is correct, as I have no way of knowing what the responseText in your case looks like. If you receive any errors regarding this part of your code, we'll need to see what the responseText looks like before we can help you.
I'm not judging whether you are doing the right thing. Im merely presenting you a working version of your code.
Errors in your code:
You mean to set result of the "function" to data but your function is not returning anything in the first place.
XMLHttpRequest is async so even if you return you will not have the data set, simply because the outer function exited after making the http request setting a callback to trigger when it is completed.
Note: The fix is by making XMLHttpRequest synchronous.
https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/Synchronous_and_Asynchronous_Requests
Here is the corrected version of your code
var myPopulation = {
series: [{
data: (
function() {
function retrieve() {
var httpRequest = new XMLHttpRequest();
var result = []; //[1] just renamed data to result to avoid confusion
httpRequest.onreadystatechange = function() {
if (httpRequest.readyState === 4) {
if (httpRequest.status === 200) {
var obj = JSON.parse(httpRequest.responseText)
console.log(obj.V1, obj.V2, obj.V3, obj.V4);
var time = (new Date()).getTime(),
i;
for (i = -60; i <= 0; i++) {
console.log(obj.V2)
result.push({
x: time + i * 60 * 1000,
y: obj.V2
});
}
//myPopulation.series[1].data = data //[2] commented this as it is not necessary
// ???
console.log(result)
}
}
};
httpRequest.open('GET', "/myCall", false); //[3] Added 3rd argument 'false' to make the call synchronous
httpRequest.send();
return result //[4] to convey the result outside
}
return retrieve(); //[5] added return to set it to the data
}()
)
}],
The above code is not tested however. Here is a tested solution http://jsfiddle.net/98f9amo8/1/
The jsfiddle content is different for obvious reasons.
Working with async code means you have to change the way you code because the code is not executed top-down any more.
So, in your case, you would do something like:
var myPopulation = {series: []};
$.get(..., my_function_that_will_format_the_data_after_they_have_been_received);
...
my_function_that_will_format_the_data_after_they_have_been_received() {
// Do stuff here
var formattedData = ...
myPopulation.series.push(formattedData);
// ONLY NOW, myPopulation is ... populated with data.
// So, whatever you use this for, need to be called here
doMagicWith(myPopulation);
}
...
/// Here, myPopulation is empty. doMagicWith(myPopulation) will fail here.
I do not know the context of how you are doing this, seeing no jQuery tells me you wish to avoid it.
So no matter what happens the call is going to take time, and you need to wait for it for whatever you may need to do with it. Loaders can help tell a user that its processing but there are other ways to do that as well. The common factor is no matter what the data is not going to be there when you need it unless you do some sort of callback.
So here is an idea, create your on onload event more or less. There are many things to keep an eye on so jQuery's is probably the most complete, but going to keep it simple here.
window.isLoaded = false;
window.ajaxLoaded = false;
window.onload = function(){
if(window.ajaxLoaded){
onReadyFunction();
}else{
window.isLoaded = true;
}
}
//skipping most of your code, the key part is the retrieve function.
//So its the only part I am going to include in this part.
function retrieve() {
var httpRequest = new XMLHttpRequest();
httpRequest.onreadystatechange = function() {
if (httpRequest.readyState === 4) {
if (httpRequest.status === 200) {
var obj = JSON.parse(httpRequest.responseText)
console.log(obj.V1, obj.V2, obj.V3, obj.V4);
var data = [],
time = (new Date()).getTime(),
i;
for (i = -60; i <= 0; i++) {
console.log(obj.V2)
data.push({
x: time + i * 60 * 1000,
y: obj.V2
});
}
myPopulation.series[1].data = data
// ???
console.log(data)
}
}
//here is my only addition
if(window.isLoaded){
onReadyFunction();
}else{
window.ajaxLoaded = true;
}
};
httpRequest.open('GET', "/myCall");
httpRequest.send();
}
So all I am doing is adding another part to the typical DOM load. Waiting for the data you need to be available before it initialized the rest of the JS. Doing this you can keep the least downtime for your app (although it depends on where you are trying to get this data though). All you need is to define the onReadyFunction like so.
function onReadyFunction(){
//all the rest of your JS here
}
This can be expanded and organized very easy, just a simple example to get started.

nodejs multiple http requests in loop

I'm trying to make simple feed reader in node and I'm facing a problem with multiple requests in node.js.
For example, I got table with urls something like:
urls = [
"http://url1.com/rss.xml",
"http://url2.com",
"http://url3.com"];
Now I want to get contents of each url. First idea was to use for(var i in urls) but it's not good idea. the best option would be to do it asynchronously but I don't know how to make it.
Any ideas?
EDIT:
I got this code:
var data = [];
for(var i = 0; i<urls.length; i++){
http.get(urls[i], function(response){
console.log('Reponse: ', response.statusCode, ' from url: ', urls[i]);
var body = '';
response.on('data', function(chunk){
body += chunk;
});
response.on('end', function() {
data.push(body);
});
}).on('error', function(e){
console.log('Error: ', e.message);
});
}
Problem is that first is call line "http.get..." for each element in loop and after that event response.on('data') is called and after that response.on('end'). It makes mess and I don't know how to handle this.
I know this is an old question, but I think a better solution would be to use JavaScripts Promise.all():
const request = require('request-promise');
const urls = ["http://www.google.com", "http://www.example.com"];
const promises = urls.map(url => request(url));
Promise.all(promises).then((data) => {
// data = [promise1,promise2]
});
By default node http requests are asynchronous. You can start them sequentially in your code and call a function that'll start when all requests are done. You can either do it by hand (count the finished vs started request) or use async.js
This is the no-dependency way (error checking omitted):
var http = require('http');
var urls = ["http://www.google.com", "http://www.example.com"];
var responses = [];
var completed_requests = 0;
for (i in urls) {
http.get(urls[i], function(res) {
responses.push(res);
completed_requests++;
if (completed_requests == urls.length) {
// All download done, process responses array
console.log(responses);
}
});
}
You need to check that on end (data complete event) has been called the exact number of requests... Here's a working example:
var http = require('http');
var urls = ['http://adrianmejia.com/atom.xml', 'http://twitrss.me/twitter_user_to_rss/?user=amejiarosario'];
var completed_requests = 0;
urls.forEach(function(url) {
var responses = [];
http.get(url, function(res) {
res.on('data', function(chunk){
responses.push(chunk);
});
res.on('end', function(){
if (completed_requests++ == urls.length - 1) {
// All downloads are completed
console.log('body:', responses.join());
}
});
});
})
You can use any promise library with ".all" implementation. I use RSVP library, Its simple enough.
var downloadFileList = [url:'http://stuff',dataname:'filename to download']
var ddownload = downloadFileList.map(function(id){
var dataname = id.dataname;
var url = id.url;
return new RSVP.Promise(function(fulfill, reject) {
var stream = fs.createWriteStream(dataname);
stream.on('close', function() {
console.log(dataname+' downloaded');
fulfill();
});
request(url).on('error', function(err) {
console.log(err);
reject();
}).pipe(stream);
});
});
return new RSVP.hashSettled(ddownload);
Promise.allSettled will not stop at error. It make sure you process all responses, even if some have an error.
Promise.allSettled(promises)
.then((data) => {
// do your stuff here
})
.catch((err) => {
console.log(JSON.stringify(err, null, 4));
});
The problem can be easily solved using closure. Make a function to handle the request and call that function in the loop. Every time the function would be called, it would have it's own lexical scope and using closure, it would be able to retain the address of the URL even if the loop ends. And even is the response is in streams, closure would handle that stuff too.
const request = require("request");
function getTheUrl(data) {
var options = {
url: "https://jsonplaceholder.typicode.com/posts/" + data
}
return options
}
function consoleTheResult(url) {
request(url, function (err, res, body) {
console.log(url);
});
}
for (var i = 0; i < 10; i++) {
consoleTheResult(getTheUrl(i))
}

Getting a process.tick error when trying to queue up http requests

I made my Request object queue up individual HTTP requests and process them one by one using process.nextTick. However, I am getting an error that I don't know how to solve:
node.js:244
callback();
^
TypeError: undefined is not a function
at process.startup.processNextTick.process._tickCallback (node.js:244:9)
I'm not sure what I am doing wrong. Here is the relevant class.
var Request = function() {
return this;
};
Request.prototype = {
queue_: []
};
Request.prototype.send = function(url, done) {
this.queue_.push(new QueueableRequest(url, done));
this.processRequest_();
}
Request.prototype.processRequest_ = function() {
if (this.queue_.length > 0) {
var request = this.queue_.shift();
var data = '';
http.get(request.url_, function(res) {
res.setEncoding('utf8');
res.on('data', function(chunk) {
data += chunk;
}).on('end', function() {
request.callback_(null, JSON.parse(data));
process.nextTick(this.processRequest_);
}).on('error', function(err) {
request.callback_(err, null);
process.nextTick(this.processRequest_);
});
});
}
}
My other question is whether this is a good method to slowing down my HTTP requests? What I am trying to do is this... I make an HTTP request for a list of threads (about 15-20), and then for each thread, I make another request to obtain its replies. Sometimes within replies, I have to make another request for the deeply nested replies. My initial solution was simply call http.get for every request, but I find that my node.js stops responding after a few requests and I have to keep restarting the server and refreshing the page. My thought was that I am perhaps sending too many requests at once, so I tried to implement this queue.
Your this inside your event handlers is incorrect, so your this.processRequest_ is undefined.
Request.prototype.processRequest_ = function() {
// Assign the outer request object to a variable so you can access it.
var self = this;
if (this.queue_.length > 0) {
var request = this.queue_.shift();
var data = '';
http.get(request.url_, function(res) {
res.setEncoding('utf8');
res.on('data', function(chunk) {
data += chunk;
}).on('end', function() {
request.callback_(null, JSON.parse(data));
process.nextTick(function(){
// Call 'processRequest_' on the correct object.
self.processRequest_()
});
}).on('error', function(err) {
request.callback_(err, null);
process.nextTick(function(){
// Call 'processRequest_' on the correct object.
self.processRequest_()
});
});
});
}
}
That said, you might consider using the request module to simplify this.
var request = require('request');
Request.prototype.processRequest_ = function() {
var self = this;
if (this.queue_.length > 0) {
var requestData = this.queue_.shift();
request(requestData.url_, function(error, response, body){
requestData.callback_(err, err ? null : JSON.parse(body));
process.nextTick(function(){
self.processRequest_();
});
});
}
};

Categories