My question is similar to this question: Node Js :is it possible to hit 1000 http get request to some api from a node js server but their solution of throttle did not work for me.
I am making calls to our api/website hosted on Google App Engine to trigger a task to update the memcache for specific product pages. This function updateCache is called from an async.eachSeries. The code is pretty straight forward: (this is a sanitized version)
function updateCache(object_name, callback) {
var options = {
host : 'www.example.com',
path : '/update-cache/' + object_name,
method : 'GET',
agent : false,
};
var req = https.request(options, function(res) {
res.on('data', function() {
if (res.statusCode !== 200) {
console.log('successful');
}
});
res.on('end', function() {
console.log('end');
callback();
});
});
req.on('error', function(e) {
console.log('failed');
console.error(e.stack);
callback();
});
req.end();
}
It runs perfectly on my Mac machine but I need the script to run on a Windows PC using Windows 7 and that is were it gets this error:
events.js:141
throw er; // Unhandled 'error' event
^
Error: read ECONNRESET
at exports._errnoException (util.js:870:11)
at TCP.onread (net.js:552:26)
I followed this solution and it looked something like this:
var req = https.request(options, function(res) {
res.on('data', function() {
if (res.statusCode !== 200) {
// HANDLE
}
});
res.on('end', function() {
if (res.statusCode !== 200 && app.retry_count < 10) {
app.retry_count += 1;
retriggerFunction(param, callback);
} else {
app.retry_count = 0;
return callback();
}
});
});
req.on('error', function(e) {
app.retry_count += 1;
retriggerFunction(param, callback);
});
req.on('timeout', function(e) {
app.retry_count += 1;
retriggerFunction(param, callback);
req.abort();
});
req.on('uncaughtException', function(e) {
app.retry_count += 1;
retriggerFunction(param, callback);
req.abort();
});
retriggerFunction would just be the function that I wrapped this in. It may not be "correct" but it is currently working for me. If anyone has improvements on this please let me know
Related
In my AWS Lambda Node.js code, I've following code that calls the post method to index a document to AWS Elasticsearch service:
var endpoint = 'ABC-XYZ.us-east-1.es.amazonaws.com';
exports.handler = function(input, context) {
...
// post documents to the Amazon Elasticsearch Service
post(endpoint, elasticsearchBulkData, function(error, success, statusCode, failedItems) {
if (error) {
console.log('...');
if (failedItems && failedItems.length > 0) {
console.log(...);
}
// NOTE: Instead of failing, we are forcing a success, as we do not want retries
context.succeed('Success');
} else {
// console.log('Success: ' + JSON.stringify(success));
context.succeed('Success');
}
});
}
...
...
function post(endpoint, body, callback, lastTimeout) {
lastTimeout || (lastTimeout = 500);
var requestParams = buildRequest(endpoint, body);
var request = https.request(requestParams, function(response) {
var responseBody = '';
response.on('data', function(chunk) {
responseBody += chunk;
});
response.on('end', function() {
var info = JSON.parse(responseBody);
var failedItems;
var success;
if (response.statusCode >= 200 && response.statusCode < 299) {
failedItems = info.items.filter(function(x) {
return x.index.status >= 300;
});
success = { ...};
}
var error = response.statusCode !== 200 || info.errors === true ? {
"statusCode": response.statusCode,
"responseBody": responseBody
} : null;
callback(error, success, response.statusCode, failedItems);
});
}).on('error', function(e) {
console.error(e.stack || e);
//callback(e);
lastTimeout *= 2;
console.log('lastTimeout: ' + lastTimeout + " for cluster: " + endpoint) ;
setTimeout(function() {
post(endpoint, body, callback, lastTimeout);
}, lastTimeout);
});
request.end(requestParams.body);
}
...
At times I get Error: socket hang up ECONNRESET.
My question is: What would be the best way to catch this error and retry?
I added the setTimeout snippet based on this answer and it looks like it does work but I'm not sure if that's the right way to do.
Node.js version is 4.3.
I was thinking of using Promise with resolve and reject but being a JS Newbie, I'm not sure how to make use of promise in my post call.
I also went through this link but not clear on how can I wrap my post call with fetch_retry
I have a node application that, from time to time throws an exception that I can not catch:
Error: read ECONNRESET at TLSWrap.onread (net.js:622:25)
I don't know if it's related to your issue, but it seems so.
After some research it seems that is a bug: https://github.com/nodejs/node/issues/23237 and it has been addressed in the last version.
Right now I am running node version 8, and I have noticed that you are using version 4. I will update the production server in the near future, maybe you can try that also. If the question does not have an answer until I have updated my server, I will come back here with the results.
I'm creating a script that will make a request 2 times per second to a localserver of cameras network and after it gets a positive response that camera detected someone I want to log three images.
In the json config file I have the triggerURL of the server, the interval port, the dataDir where logged images should be saved and a track array which contains the url of those images and the fileName they should receive.
This is the code of the script I use after reading the JSON file:
var configGet = {
host: config.triggerURL
, port: config.interval
, method: 'GET'
};
setInterval(function () {
var request = http.request(configGet, function (response) {
var content = "";
// Handle data chunks
response.on('data', function (chunk) {
content += chunk;
});
// Once we're done streaming the response, parse it as json.
response.on('end', function () {
var data = JSON.parse(response);
if (data.track.length > 0) {
//log images
var download = function (uri, filename, callback) {
request.head(uri, function (err, res, body) {
request(uri)
.pipe(fs.createWriteStream(filename))
.on('close', callback);
});
};
for (var image in data.track) {
var path = config.dataDir + '/' + image.fileName
download(image.url, path.format(config.timestamp), function () {
console.log('done');
});
}
}
});
// Report errors
request.on('error', function (error) {
console.log("Error while calling endpoint.", error);
});
request.end();
}, 500);
});
I have the following questions:
This method produces some kind of error with the download process of the images.Can you identify it?
Is there a better way of doing this process?
Without running the code or deeper inspection; should not "data = JSON.parse(response)" rather be "data = JSON.parse(content)"? Also, if data is undefined or does not contain "track" the "if (data.track.length > 0)" will throw an error. This can be fixed with "if (data && data.track && data.track.length > 0)".
I can not think of a very much better way. I would break it up more in functions to make the code more clear though.
I'm trying to replicate the phantomJS netlog.js functionality, only in nodeJS. I'm using the phantomjs-node module as a bridge.
Normally, this would be run headlessly in the command line using phantomjs netlog.js http://www.google.com/. It would return a lot of json containing all network requests and responses.
What I'm doing here is trying to run the code from netlog.js inside of a page created using the phantomjs-node module (ignoring the line var page = require('webpage').create() from netlog.js.
While the code doesn't break, I'm not getting the return of the json. What's wrong here? Do I need to somehow pipe the page request?
In app.js:
var phantom = require('phantom');
siteUrl = "http://www.google.com/"
phantom.create(function (ph) {
ph.createPage(function (page) {
var system = require('system'),
address;
page.open(siteUrl, function (status) {
// console.log("opened " + siteUrl +"\n",status+"\n");
page.evaluate(function () {
if (system.args.length === 1) {
console.log('Usage: netlog.js <some URL>');
phantom.exit(1);
} else {
console.log(system.args[1])
address = system.args[1];
page.onResourceRequested = function (req) {
console.log('requested: ' + JSON.stringify(req, undefined, 4));
};
page.onResourceReceived = function (res) {
console.log('received: ' + JSON.stringify(res, undefined, 4));
};
page.open(address, function (status) {
if (status !== 'success') {
console.log('FAIL to load the address');
}
phantom.exit();
});
}
}, function finished(result) {
ph.exit();
},thirdLayerLinks);
});
});
}, {
dnodeOpts: {
weak: false
}
});
You made a mistake during copy-paste. There shouldn't be a page.evaluate call and only one page.open call. You took a little too much from the basic phantomjs-node code.
PhantomJS and Node.js have different runtimes and vastly different modules. There is no phantom reference. Additionally there is no system in node. You probably mean process.
Then the docs say the following:
Callbacks can't be set directly, instead use page.set('callbackName', callback)
Fixed code:
var phantom = require('phantom');
var address = "http://google.com/";
phantom.create(function (ph) {
ph.createPage(function (page) {
page.set("onResourceRequested", function (req) {
console.log('requested: ' + JSON.stringify(req, undefined, 4));
});
page.set("onResourceReceived", function (res) {
console.log('received: ' + JSON.stringify(res, undefined, 4));
});
page.open(address, function (status) {
if (status !== 'success') {
console.log('FAIL to load the address');
}
ph.exit();
});
});
}, {
dnodeOpts: {
weak: false
}
});
I need to keep hammering the same ajax request every 2 seconds or so until a response is received. This is because the device it is being sent to goes to sleep for 3 seconds at a time--it needs to catch the request when it's awake. Is there a simple way to do this by adding something to my code? Without using jquery...
This what I have for only sending one request:
function ePOST(url, postData, callback) {
var postReq = AjaxRequest();
postReq.onreadystatechange = function() {
if (postReq.readyState == 4) {
if (postReq.error) {
callback(1, "Request had an error.");
alert('postReq Error');
} else {
var stat;
try {
stat = postReq.status;
} catch (err) {
callback(1, "Failed to get HTTP status from server.");
return;
}
if (stat == 200 || stat == 0) {
callback(0, postReq.responseText);
} else {
callback(1, "Unexpected HTTP Status: "
+ postReq.status);
alert('Unexpected HTTP Status: '
+ postReq.status);
}
}
}
}
if (postReq.overrideMimeType){
postReq.overrideMimeType("text/xml");
}
setTimeout('',1000);
postReq.open("POST", url, true);
postReq.send(postData);
return postReq;
}
I think you should be able to do this with very little modification to that function.
var intervalId = window.setInterval(function() {
epost(<url>,<data>,function(error,response) {
if (!error) {
window.clearInterval(intervalId);
} else {
alert(response); // or handle error some other way
}
// do something with the data
});
},2000);
I'm not 100% sure that intervalId will be available to the closure but if not you can always make it like this:
var intervalId;
intervalId = window.setInterval(....
The only issue I can think of is if the ajax call takes longer then 2 seconds to return, in which case you may not clear the interval in time.
I made my Request object queue up individual HTTP requests and process them one by one using process.nextTick. However, I am getting an error that I don't know how to solve:
node.js:244
callback();
^
TypeError: undefined is not a function
at process.startup.processNextTick.process._tickCallback (node.js:244:9)
I'm not sure what I am doing wrong. Here is the relevant class.
var Request = function() {
return this;
};
Request.prototype = {
queue_: []
};
Request.prototype.send = function(url, done) {
this.queue_.push(new QueueableRequest(url, done));
this.processRequest_();
}
Request.prototype.processRequest_ = function() {
if (this.queue_.length > 0) {
var request = this.queue_.shift();
var data = '';
http.get(request.url_, function(res) {
res.setEncoding('utf8');
res.on('data', function(chunk) {
data += chunk;
}).on('end', function() {
request.callback_(null, JSON.parse(data));
process.nextTick(this.processRequest_);
}).on('error', function(err) {
request.callback_(err, null);
process.nextTick(this.processRequest_);
});
});
}
}
My other question is whether this is a good method to slowing down my HTTP requests? What I am trying to do is this... I make an HTTP request for a list of threads (about 15-20), and then for each thread, I make another request to obtain its replies. Sometimes within replies, I have to make another request for the deeply nested replies. My initial solution was simply call http.get for every request, but I find that my node.js stops responding after a few requests and I have to keep restarting the server and refreshing the page. My thought was that I am perhaps sending too many requests at once, so I tried to implement this queue.
Your this inside your event handlers is incorrect, so your this.processRequest_ is undefined.
Request.prototype.processRequest_ = function() {
// Assign the outer request object to a variable so you can access it.
var self = this;
if (this.queue_.length > 0) {
var request = this.queue_.shift();
var data = '';
http.get(request.url_, function(res) {
res.setEncoding('utf8');
res.on('data', function(chunk) {
data += chunk;
}).on('end', function() {
request.callback_(null, JSON.parse(data));
process.nextTick(function(){
// Call 'processRequest_' on the correct object.
self.processRequest_()
});
}).on('error', function(err) {
request.callback_(err, null);
process.nextTick(function(){
// Call 'processRequest_' on the correct object.
self.processRequest_()
});
});
});
}
}
That said, you might consider using the request module to simplify this.
var request = require('request');
Request.prototype.processRequest_ = function() {
var self = this;
if (this.queue_.length > 0) {
var requestData = this.queue_.shift();
request(requestData.url_, function(error, response, body){
requestData.callback_(err, err ? null : JSON.parse(body));
process.nextTick(function(){
self.processRequest_();
});
});
}
};