Node.js catch and retry on ECONNRESET - javascript

In my AWS Lambda Node.js code, I've following code that calls the post method to index a document to AWS Elasticsearch service:
var endpoint = 'ABC-XYZ.us-east-1.es.amazonaws.com';
exports.handler = function(input, context) {
...
// post documents to the Amazon Elasticsearch Service
post(endpoint, elasticsearchBulkData, function(error, success, statusCode, failedItems) {
if (error) {
console.log('...');
if (failedItems && failedItems.length > 0) {
console.log(...);
}
// NOTE: Instead of failing, we are forcing a success, as we do not want retries
context.succeed('Success');
} else {
// console.log('Success: ' + JSON.stringify(success));
context.succeed('Success');
}
});
}
...
...
function post(endpoint, body, callback, lastTimeout) {
lastTimeout || (lastTimeout = 500);
var requestParams = buildRequest(endpoint, body);
var request = https.request(requestParams, function(response) {
var responseBody = '';
response.on('data', function(chunk) {
responseBody += chunk;
});
response.on('end', function() {
var info = JSON.parse(responseBody);
var failedItems;
var success;
if (response.statusCode >= 200 && response.statusCode < 299) {
failedItems = info.items.filter(function(x) {
return x.index.status >= 300;
});
success = { ...};
}
var error = response.statusCode !== 200 || info.errors === true ? {
"statusCode": response.statusCode,
"responseBody": responseBody
} : null;
callback(error, success, response.statusCode, failedItems);
});
}).on('error', function(e) {
console.error(e.stack || e);
//callback(e);
lastTimeout *= 2;
console.log('lastTimeout: ' + lastTimeout + " for cluster: " + endpoint) ;
setTimeout(function() {
post(endpoint, body, callback, lastTimeout);
}, lastTimeout);
});
request.end(requestParams.body);
}
...
At times I get Error: socket hang up ECONNRESET.
My question is: What would be the best way to catch this error and retry?
I added the setTimeout snippet based on this answer and it looks like it does work but I'm not sure if that's the right way to do.
Node.js version is 4.3.
I was thinking of using Promise with resolve and reject but being a JS Newbie, I'm not sure how to make use of promise in my post call.
I also went through this link but not clear on how can I wrap my post call with fetch_retry

I have a node application that, from time to time throws an exception that I can not catch:
Error: read ECONNRESET at TLSWrap.onread (net.js:622:25)
I don't know if it's related to your issue, but it seems so.
After some research it seems that is a bug: https://github.com/nodejs/node/issues/23237 and it has been addressed in the last version.
Right now I am running node version 8, and I have noticed that you are using version 4. I will update the production server in the near future, maybe you can try that also. If the question does not have an answer until I have updated my server, I will come back here with the results.

Related

How to combine Service worker Pre-fetching with fetching across navigations?

I'm trying to use " Pre-fetching" and fetch "collect" techniques to cache js, CSS, and stuffs on a SPA application.
To pre-fetching scripts I have tried a code very like this snippet:
self.addEventListener('install', function(event) {
var now = Date.now();
var urlsToPrefetch = [
'static/pre_fetched.txt',
'static/pre_fetched.html'
];
event.waitUntil(
caches.open(CURRENT_CACHES.prefetch).then(function(cache) {
var cachePromises = urlsToPrefetch.map(function(urlToPrefetch) {
var url = new URL(urlToPrefetch, location.href);
url.search += (url.search ? '&' : '?') + 'cache-bust=' + now;
var request = new Request(url, {mode: 'no-cors'});
return fetch(request).then(function(response) {
if (response.status >= 400) {
throw new Error('request for ' + urlToPrefetch +
' failed with status ' + response.statusText);
}
return cache.put(urlToPrefetch, response);
}).catch(function(error) {
console.error('Not caching ' + urlToPrefetch + ' due to ' + error);
});
});
return Promise.all(cachePromises).then(function() {
console.log('Pre-fetching complete.');
});
}).catch(function(error) {
console.error('Pre-fetching failed:', error);
})
);
});
Full code can be checked here
After pre-fetching, I have almost all critical scripts on cache (such as angular.js, modules and controllers and maybe some jqueries), so, I do a fetch event to collect all others scripts that load by require.js asynchronously.
self.addEventListener('fetch', function (event) {
if (event.request.method === "GET" && testes_to_know_if_it_area_a_js_or_css) {
event.respondWith(
caches.match(event.request)
.then(function (response) {
if (response) {
loggger && console.log('From Cache', event.request.url);
return response;
}
// IMPORTANT: Clone the request. A request is a stream and
// can only be consumed once. Since we are consuming this
// once by cache and once by the browser for fetch, we need
// to clone the response
var fetchRequest = event.request.clone();
return fetch(fetchRequest).then(
function (response) {
// Check if we received a valid response
if (!response || response.status !== 200 || response.type !== 'basic') {
return response;
}
// IMPORTANT: Clone the response. A response is a stream
// and because we want the browser to consume the response
// as well as the cache consuming the response, we need
// to clone it so we have 2 stream.
var responseToCache = response.clone();
caches.open(CURRENT_CACHES['general-cache'])
.then(function (cache) {
try {
loggger && console.log('Add to Cache', event.request.url);
cache.put(event.request, responseToCache);
} catch (e) {
console.error(e);
}
});
return response;
}
);
})
);
}
});
Sorry, I don't found the original script that I based to build this one.
Both, are working very well, but not as expected. The second fetch add it to cache again, I Think it's because caches.match(event.request) doesn't really match. So, I put a console to see both request objects, the synthetic created by pre-fetch and the cloned from fetch.
The synthetic:
The cloned:
So, I'm not sure if I can overwrite these properties to synthetic be same as cloned, can I do that safely? How can I solve that?
PS: This code isn't run as common scripts. The snippet was just to organize.
I did not found any reference to confirm my solution, but, its works.
The solution was create a 2 different caches and "Normalize" the request cloning it into a syntetic request, removing all references, keeping only the basic:
var CURRENT_CACHES = {
'prefetch-cache': 'prefetch-cache-v' + CACHE_VERSION, //Prefetch cach
'general-cache': 'general-cache-v' + CACHE_VERSION,
};
The prefetch-cache is responsible to store all files that I want to prefetch on my service worker and the general-cache is for all other files(It make sense when you have a SPA and want to accumulate some requests like translation files, js components, css and other stuffs).
You can make an array with URI of all files that you want to prefetch:
var urlsToPrefetch = [
//JS
"plugin/angular/angular.min.js", "plugin/requirejs/require.min.js","app/main.js","app/app.js","app/includes.js"
//CSS
,"styles/css/print.css","styles/css/bootstrap.css","styles/css/fixes.css",
//Html
,"app/layout/partials/menu.tpl.html", "app/layout/public.tpl.html",
//JSON
,"app/i18n/languages.json","app/i18n/pt-br.json", "app/i18n/en.json"
];
And into install event you should create new Requests of all files from this array and store into prefetch-cache:
self.addEventListener('install', function (event) {
logger && console.log('Handling install event:', event);
//var now = Date.now();
// All of these logging statements should be visible via the "Inspect" interface
// for the relevant SW accessed via chrome://serviceworker-internals
if (urlsToPrefetch.length > 0) {
logger && console.log('Handling install event. Resources to prefetch:', urlsToPrefetch.length , "resources");
event.waitUntil(
caches.open(CURRENT_CACHES['prefetch-cache']).then(function (cache) {
var cachePromises = urlsToPrefetch.map(function (urlToPrefetch) {
urlToPrefetch += '?v=' + CACHE_VERSION;
// This constructs a new URL object using the service worker's script location as the base
// for relative URLs.
//var url = new URL(urlToPrefetch + '?v=' + CACHE_VERSION, location.href);
var url = new URL(urlToPrefetch, location.href);
// Append a cache-bust=TIMESTAMP URL parameter to each URL's query string.
// This is particularly important when precaching resources that are later used in the
// fetch handler as responses directly, without consulting the network (i.e. cache-first).
// If we were to get back a response from the HTTP browser cache for this precaching request
// then that stale response would be used indefinitely, or at least until the next time
// the service worker script changes triggering the install flow.
//url.search += (url.search ? '&' : '?') + 'v=' + CACHE_VERSION;
// It's very important to use {mode: 'no-cors'} if there is any chance that
// the resources being fetched are served off of a server that doesn't support
// CORS (http://en.wikipedia.org/wiki/Cross-origin_resource_sharing).
// In this example, www.chromium.org doesn't support CORS, and the fetch()
// would fail if the default mode of 'cors' was used for the fetch() request.
// The drawback of hardcoding {mode: 'no-cors'} is that the response from all
// cross-origin hosts will always be opaque
// (https://slightlyoff.github.io/ServiceWorker/spec/service_worker/index.html#cross-origin-resources)
// and it is not possible to determine whether an opaque response represents a success or failure
// (https://github.com/whatwg/fetch/issues/14).
var request = new Request(url, {mode: 'no-cors'});
return fetch(request).then(function (response) {
logger && console.log('Add to Cache (Prefetch)', url.href);
if (!response || response.status !== 200 || response.type !== 'basic') {
throw new Error('request for ' + urlToPrefetch +
' failed with status ' + response.statusText);
}
//var responseToCache = response.clone();
// Use the original URL without the cache-busting parameter as the key for cache.put().
// return cache.put(urlToPrefetch, responseToCache);
return cache.put(urlToPrefetch, response);
}).catch(function (error) {
logger && console.error('Not caching ' + urlToPrefetch + ' due to ' + error);
});
});
return Promise.all(cachePromises).then(function () {
logger && console.log('Pre-fetching complete.');
});
}).catch(function (error) {
logger && console.error('Pre-fetching failed:', error);
}));
}
// Perform install steps
// if (urlsToPrefetch.length > 0) {
// event.waitUntil(
// caches.open(CURRENT_CACHES['perma-cache'])
// .then(function (cache) {
// return cache.addAll(urlsToPrefetch);
// })
// );
// }
// `skipWaiting()` forces the waiting ServiceWorker to become the
// active ServiceWorker, triggering the `onactivate` event.
// Together with `Clients.claim()` this allows a worker to take effect
// immediately in the client(s).
return self.skipWaiting();
});
For all others files that will be stored in cache in future you must declare it into fetch event listener and store this requests into general-cache:
self.addEventListener('fetch', function (event) {
//console.log(event);
if (event.request.method === "GET") {
var qSFilter = "" + ((event.request.url).split('?'))[0];//Filtrar Quetry Stirng
//console.log(event.request.url, qSFilter, qSFilter.split(CACHE_SCOPE), CACHE_SCOPE);
var leUrl = (qSFilter.split(CACHE_SCOPE))[1];
//Is possible to implement some logic to skip backend calls and other uncachable calls
if (/^(app|style|plugin).*(js|css|html|jpe?g|png|gif|json|woff2?)$/.test(leUrl)
|| /^backend\/server\/file\/i18n\/((?!client).+)\//.test(leUrl)
|| /^backend\/server\/static\/images\/.*$/.test(leUrl)
|| /^backend\/server\/static\/style.*$/.test(leUrl)
) {
var url = new URL(leUrl + '?v=' + CACHE_VERSION, location.href);
var synthetic = new Request(url, {mode: 'no-cors'});
//console.log(event.request,response.clone(),synthetic);
event.respondWith(
// caches.match(event.request)
caches.match(synthetic)
.then(function (response) {
// Cache hit - return response
if (response) {
logger && console.log('From Cache', event.request.url);
return response;
}
// IMPORTANT: Clone the request. A request is a stream and
// can only be consumed once. Since we are consuming this
// once by cache and once by the browser for fetch, we need
// to clone the response
var fetchRequest = event.request.clone();
return fetch(fetchRequest).then(
function (response) {
// Check if we received a valid response
if (!response || response.status !== 200 || response.type !== 'basic') {
return response;
}
// IMPORTANT: Clone the response. A response is a stream
// and because we want the browser to consume the response
// as well as the cache consuming the response, we need
// to clone it so we have 2 stream.
var responseToCache = response.clone();
caches.open(CURRENT_CACHES['general-cache'])
.then(function (cache) {
try {
logger && console.log('Add to Cache', event.request.url, qSFilter,leUrl);
cache.put(event.request, responseToCache);
} catch (e) {
console.error(e);
}
});
return response;
}
);
})
);
}
}
});
The full working script could be accessed here:
https://gist.github.com/LeonanCarvalho/0527526a6b784b23facf56fa3cc12d22

Log images after successful http request using Node.js

I'm creating a script that will make a request 2 times per second to a localserver of cameras network and after it gets a positive response that camera detected someone I want to log three images.
In the json config file I have the triggerURL of the server, the interval port, the dataDir where logged images should be saved and a track array which contains the url of those images and the fileName they should receive.
This is the code of the script I use after reading the JSON file:
var configGet = {
host: config.triggerURL
, port: config.interval
, method: 'GET'
};
setInterval(function () {
var request = http.request(configGet, function (response) {
var content = "";
// Handle data chunks
response.on('data', function (chunk) {
content += chunk;
});
// Once we're done streaming the response, parse it as json.
response.on('end', function () {
var data = JSON.parse(response);
if (data.track.length > 0) {
//log images
var download = function (uri, filename, callback) {
request.head(uri, function (err, res, body) {
request(uri)
.pipe(fs.createWriteStream(filename))
.on('close', callback);
});
};
for (var image in data.track) {
var path = config.dataDir + '/' + image.fileName
download(image.url, path.format(config.timestamp), function () {
console.log('done');
});
}
}
});
// Report errors
request.on('error', function (error) {
console.log("Error while calling endpoint.", error);
});
request.end();
}, 500);
});
I have the following questions:
This method produces some kind of error with the download process of the images.Can you identify it?
Is there a better way of doing this process?
Without running the code or deeper inspection; should not "data = JSON.parse(response)" rather be "data = JSON.parse(content)"? Also, if data is undefined or does not contain "track" the "if (data.track.length > 0)" will throw an error. This can be fixed with "if (data && data.track && data.track.length > 0)".
I can not think of a very much better way. I would break it up more in functions to make the code more clear though.

Node.js https get request ECONNRESET

My question is similar to this question: Node Js :is it possible to hit 1000 http get request to some api from a node js server but their solution of throttle did not work for me.
I am making calls to our api/website hosted on Google App Engine to trigger a task to update the memcache for specific product pages. This function updateCache is called from an async.eachSeries. The code is pretty straight forward: (this is a sanitized version)
function updateCache(object_name, callback) {
var options = {
host : 'www.example.com',
path : '/update-cache/' + object_name,
method : 'GET',
agent : false,
};
var req = https.request(options, function(res) {
res.on('data', function() {
if (res.statusCode !== 200) {
console.log('successful');
}
});
res.on('end', function() {
console.log('end');
callback();
});
});
req.on('error', function(e) {
console.log('failed');
console.error(e.stack);
callback();
});
req.end();
}
It runs perfectly on my Mac machine but I need the script to run on a Windows PC using Windows 7 and that is were it gets this error:
events.js:141
throw er; // Unhandled 'error' event
^
Error: read ECONNRESET
at exports._errnoException (util.js:870:11)
at TCP.onread (net.js:552:26)
I followed this solution and it looked something like this:
var req = https.request(options, function(res) {
res.on('data', function() {
if (res.statusCode !== 200) {
// HANDLE
}
});
res.on('end', function() {
if (res.statusCode !== 200 && app.retry_count < 10) {
app.retry_count += 1;
retriggerFunction(param, callback);
} else {
app.retry_count = 0;
return callback();
}
});
});
req.on('error', function(e) {
app.retry_count += 1;
retriggerFunction(param, callback);
});
req.on('timeout', function(e) {
app.retry_count += 1;
retriggerFunction(param, callback);
req.abort();
});
req.on('uncaughtException', function(e) {
app.retry_count += 1;
retriggerFunction(param, callback);
req.abort();
});
retriggerFunction would just be the function that I wrapped this in. It may not be "correct" but it is currently working for me. If anyone has improvements on this please let me know

Javascript multiple promises not working with q and request middle

So this is my dilemma. I have a list of movies, witch I have scraped from a website, then I want to add additional properties to my newly constructed object(json)
Now the omdi api witch I am using supports searching for a movie by title.
Then I make a get request using request and q middlewares. When I receive information from omdb api in the call back I add that data to the object.
Now the next part is where my problem lies. Now I want to return a new Request using data from the previous request. Now I make an new get Request and return it but then() func isin't returning anything. But I don't seem to realize what I am doing wrong.
Here is my code..
var promises = [];
films.forEach(function (film) {
// Get omdbapi information
promises.push(HttpService.getContent(configExternal.omodburl + '?t=' + film.title.trim() + '&y=' + film.year + '&plot=true&tomatoes=true&r=json').then(function (data) {
var result = JSON.parse(data);
if(Boolean(result.Response) === true) {
film.omdb.push(result);
}
var imdbid = result.imdbID;
return HttpService.getContent(configExternal.themoviedburl + imdbid + '/videos?api_key=' + configExternal.themoviedbkey);
}).then(function(data) {
film.trailers = [];
film.trailers.push(JSON.parse(data));
}).catch(function (err) {
logger.error().info('Error getting ' + film.title + ' from omdb, ErrorMessage : ' + err);
}));
});
//--------------------------------
// When all promises have finished
//--------------------------------
Promise.all(promises).then(function (data, err) {
// do stuff with the data
});
And here is my getContent func
var Service = {
getContent: function(url) {
var deferred = q.defer();
request(url, function (error, response, body) {
if (!error && response.statusCode == 200) {
deferred.resolve(body);
} else {
deferred.reject(error);
}
});
return deferred.promise;
}
};
Problem solved. There wasn't anything wrong with the request as Roamer said. But the moviedata base limits by 40 request per 10 sek witch I didn't know :)

Getting a process.tick error when trying to queue up http requests

I made my Request object queue up individual HTTP requests and process them one by one using process.nextTick. However, I am getting an error that I don't know how to solve:
node.js:244
callback();
^
TypeError: undefined is not a function
at process.startup.processNextTick.process._tickCallback (node.js:244:9)
I'm not sure what I am doing wrong. Here is the relevant class.
var Request = function() {
return this;
};
Request.prototype = {
queue_: []
};
Request.prototype.send = function(url, done) {
this.queue_.push(new QueueableRequest(url, done));
this.processRequest_();
}
Request.prototype.processRequest_ = function() {
if (this.queue_.length > 0) {
var request = this.queue_.shift();
var data = '';
http.get(request.url_, function(res) {
res.setEncoding('utf8');
res.on('data', function(chunk) {
data += chunk;
}).on('end', function() {
request.callback_(null, JSON.parse(data));
process.nextTick(this.processRequest_);
}).on('error', function(err) {
request.callback_(err, null);
process.nextTick(this.processRequest_);
});
});
}
}
My other question is whether this is a good method to slowing down my HTTP requests? What I am trying to do is this... I make an HTTP request for a list of threads (about 15-20), and then for each thread, I make another request to obtain its replies. Sometimes within replies, I have to make another request for the deeply nested replies. My initial solution was simply call http.get for every request, but I find that my node.js stops responding after a few requests and I have to keep restarting the server and refreshing the page. My thought was that I am perhaps sending too many requests at once, so I tried to implement this queue.
Your this inside your event handlers is incorrect, so your this.processRequest_ is undefined.
Request.prototype.processRequest_ = function() {
// Assign the outer request object to a variable so you can access it.
var self = this;
if (this.queue_.length > 0) {
var request = this.queue_.shift();
var data = '';
http.get(request.url_, function(res) {
res.setEncoding('utf8');
res.on('data', function(chunk) {
data += chunk;
}).on('end', function() {
request.callback_(null, JSON.parse(data));
process.nextTick(function(){
// Call 'processRequest_' on the correct object.
self.processRequest_()
});
}).on('error', function(err) {
request.callback_(err, null);
process.nextTick(function(){
// Call 'processRequest_' on the correct object.
self.processRequest_()
});
});
});
}
}
That said, you might consider using the request module to simplify this.
var request = require('request');
Request.prototype.processRequest_ = function() {
var self = this;
if (this.queue_.length > 0) {
var requestData = this.queue_.shift();
request(requestData.url_, function(error, response, body){
requestData.callback_(err, err ? null : JSON.parse(body));
process.nextTick(function(){
self.processRequest_();
});
});
}
};

Categories