How to combine Service worker Pre-fetching with fetching across navigations? - javascript

I'm trying to use " Pre-fetching" and fetch "collect" techniques to cache js, CSS, and stuffs on a SPA application.
To pre-fetching scripts I have tried a code very like this snippet:
self.addEventListener('install', function(event) {
var now = Date.now();
var urlsToPrefetch = [
'static/pre_fetched.txt',
'static/pre_fetched.html'
];
event.waitUntil(
caches.open(CURRENT_CACHES.prefetch).then(function(cache) {
var cachePromises = urlsToPrefetch.map(function(urlToPrefetch) {
var url = new URL(urlToPrefetch, location.href);
url.search += (url.search ? '&' : '?') + 'cache-bust=' + now;
var request = new Request(url, {mode: 'no-cors'});
return fetch(request).then(function(response) {
if (response.status >= 400) {
throw new Error('request for ' + urlToPrefetch +
' failed with status ' + response.statusText);
}
return cache.put(urlToPrefetch, response);
}).catch(function(error) {
console.error('Not caching ' + urlToPrefetch + ' due to ' + error);
});
});
return Promise.all(cachePromises).then(function() {
console.log('Pre-fetching complete.');
});
}).catch(function(error) {
console.error('Pre-fetching failed:', error);
})
);
});
Full code can be checked here
After pre-fetching, I have almost all critical scripts on cache (such as angular.js, modules and controllers and maybe some jqueries), so, I do a fetch event to collect all others scripts that load by require.js asynchronously.
self.addEventListener('fetch', function (event) {
if (event.request.method === "GET" && testes_to_know_if_it_area_a_js_or_css) {
event.respondWith(
caches.match(event.request)
.then(function (response) {
if (response) {
loggger && console.log('From Cache', event.request.url);
return response;
}
// IMPORTANT: Clone the request. A request is a stream and
// can only be consumed once. Since we are consuming this
// once by cache and once by the browser for fetch, we need
// to clone the response
var fetchRequest = event.request.clone();
return fetch(fetchRequest).then(
function (response) {
// Check if we received a valid response
if (!response || response.status !== 200 || response.type !== 'basic') {
return response;
}
// IMPORTANT: Clone the response. A response is a stream
// and because we want the browser to consume the response
// as well as the cache consuming the response, we need
// to clone it so we have 2 stream.
var responseToCache = response.clone();
caches.open(CURRENT_CACHES['general-cache'])
.then(function (cache) {
try {
loggger && console.log('Add to Cache', event.request.url);
cache.put(event.request, responseToCache);
} catch (e) {
console.error(e);
}
});
return response;
}
);
})
);
}
});
Sorry, I don't found the original script that I based to build this one.
Both, are working very well, but not as expected. The second fetch add it to cache again, I Think it's because caches.match(event.request) doesn't really match. So, I put a console to see both request objects, the synthetic created by pre-fetch and the cloned from fetch.
The synthetic:
The cloned:
So, I'm not sure if I can overwrite these properties to synthetic be same as cloned, can I do that safely? How can I solve that?
PS: This code isn't run as common scripts. The snippet was just to organize.

I did not found any reference to confirm my solution, but, its works.
The solution was create a 2 different caches and "Normalize" the request cloning it into a syntetic request, removing all references, keeping only the basic:
var CURRENT_CACHES = {
'prefetch-cache': 'prefetch-cache-v' + CACHE_VERSION, //Prefetch cach
'general-cache': 'general-cache-v' + CACHE_VERSION,
};
The prefetch-cache is responsible to store all files that I want to prefetch on my service worker and the general-cache is for all other files(It make sense when you have a SPA and want to accumulate some requests like translation files, js components, css and other stuffs).
You can make an array with URI of all files that you want to prefetch:
var urlsToPrefetch = [
//JS
"plugin/angular/angular.min.js", "plugin/requirejs/require.min.js","app/main.js","app/app.js","app/includes.js"
//CSS
,"styles/css/print.css","styles/css/bootstrap.css","styles/css/fixes.css",
//Html
,"app/layout/partials/menu.tpl.html", "app/layout/public.tpl.html",
//JSON
,"app/i18n/languages.json","app/i18n/pt-br.json", "app/i18n/en.json"
];
And into install event you should create new Requests of all files from this array and store into prefetch-cache:
self.addEventListener('install', function (event) {
logger && console.log('Handling install event:', event);
//var now = Date.now();
// All of these logging statements should be visible via the "Inspect" interface
// for the relevant SW accessed via chrome://serviceworker-internals
if (urlsToPrefetch.length > 0) {
logger && console.log('Handling install event. Resources to prefetch:', urlsToPrefetch.length , "resources");
event.waitUntil(
caches.open(CURRENT_CACHES['prefetch-cache']).then(function (cache) {
var cachePromises = urlsToPrefetch.map(function (urlToPrefetch) {
urlToPrefetch += '?v=' + CACHE_VERSION;
// This constructs a new URL object using the service worker's script location as the base
// for relative URLs.
//var url = new URL(urlToPrefetch + '?v=' + CACHE_VERSION, location.href);
var url = new URL(urlToPrefetch, location.href);
// Append a cache-bust=TIMESTAMP URL parameter to each URL's query string.
// This is particularly important when precaching resources that are later used in the
// fetch handler as responses directly, without consulting the network (i.e. cache-first).
// If we were to get back a response from the HTTP browser cache for this precaching request
// then that stale response would be used indefinitely, or at least until the next time
// the service worker script changes triggering the install flow.
//url.search += (url.search ? '&' : '?') + 'v=' + CACHE_VERSION;
// It's very important to use {mode: 'no-cors'} if there is any chance that
// the resources being fetched are served off of a server that doesn't support
// CORS (http://en.wikipedia.org/wiki/Cross-origin_resource_sharing).
// In this example, www.chromium.org doesn't support CORS, and the fetch()
// would fail if the default mode of 'cors' was used for the fetch() request.
// The drawback of hardcoding {mode: 'no-cors'} is that the response from all
// cross-origin hosts will always be opaque
// (https://slightlyoff.github.io/ServiceWorker/spec/service_worker/index.html#cross-origin-resources)
// and it is not possible to determine whether an opaque response represents a success or failure
// (https://github.com/whatwg/fetch/issues/14).
var request = new Request(url, {mode: 'no-cors'});
return fetch(request).then(function (response) {
logger && console.log('Add to Cache (Prefetch)', url.href);
if (!response || response.status !== 200 || response.type !== 'basic') {
throw new Error('request for ' + urlToPrefetch +
' failed with status ' + response.statusText);
}
//var responseToCache = response.clone();
// Use the original URL without the cache-busting parameter as the key for cache.put().
// return cache.put(urlToPrefetch, responseToCache);
return cache.put(urlToPrefetch, response);
}).catch(function (error) {
logger && console.error('Not caching ' + urlToPrefetch + ' due to ' + error);
});
});
return Promise.all(cachePromises).then(function () {
logger && console.log('Pre-fetching complete.');
});
}).catch(function (error) {
logger && console.error('Pre-fetching failed:', error);
}));
}
// Perform install steps
// if (urlsToPrefetch.length > 0) {
// event.waitUntil(
// caches.open(CURRENT_CACHES['perma-cache'])
// .then(function (cache) {
// return cache.addAll(urlsToPrefetch);
// })
// );
// }
// `skipWaiting()` forces the waiting ServiceWorker to become the
// active ServiceWorker, triggering the `onactivate` event.
// Together with `Clients.claim()` this allows a worker to take effect
// immediately in the client(s).
return self.skipWaiting();
});
For all others files that will be stored in cache in future you must declare it into fetch event listener and store this requests into general-cache:
self.addEventListener('fetch', function (event) {
//console.log(event);
if (event.request.method === "GET") {
var qSFilter = "" + ((event.request.url).split('?'))[0];//Filtrar Quetry Stirng
//console.log(event.request.url, qSFilter, qSFilter.split(CACHE_SCOPE), CACHE_SCOPE);
var leUrl = (qSFilter.split(CACHE_SCOPE))[1];
//Is possible to implement some logic to skip backend calls and other uncachable calls
if (/^(app|style|plugin).*(js|css|html|jpe?g|png|gif|json|woff2?)$/.test(leUrl)
|| /^backend\/server\/file\/i18n\/((?!client).+)\//.test(leUrl)
|| /^backend\/server\/static\/images\/.*$/.test(leUrl)
|| /^backend\/server\/static\/style.*$/.test(leUrl)
) {
var url = new URL(leUrl + '?v=' + CACHE_VERSION, location.href);
var synthetic = new Request(url, {mode: 'no-cors'});
//console.log(event.request,response.clone(),synthetic);
event.respondWith(
// caches.match(event.request)
caches.match(synthetic)
.then(function (response) {
// Cache hit - return response
if (response) {
logger && console.log('From Cache', event.request.url);
return response;
}
// IMPORTANT: Clone the request. A request is a stream and
// can only be consumed once. Since we are consuming this
// once by cache and once by the browser for fetch, we need
// to clone the response
var fetchRequest = event.request.clone();
return fetch(fetchRequest).then(
function (response) {
// Check if we received a valid response
if (!response || response.status !== 200 || response.type !== 'basic') {
return response;
}
// IMPORTANT: Clone the response. A response is a stream
// and because we want the browser to consume the response
// as well as the cache consuming the response, we need
// to clone it so we have 2 stream.
var responseToCache = response.clone();
caches.open(CURRENT_CACHES['general-cache'])
.then(function (cache) {
try {
logger && console.log('Add to Cache', event.request.url, qSFilter,leUrl);
cache.put(event.request, responseToCache);
} catch (e) {
console.error(e);
}
});
return response;
}
);
})
);
}
}
});
The full working script could be accessed here:
https://gist.github.com/LeonanCarvalho/0527526a6b784b23facf56fa3cc12d22

Related

Node.js catch and retry on ECONNRESET

In my AWS Lambda Node.js code, I've following code that calls the post method to index a document to AWS Elasticsearch service:
var endpoint = 'ABC-XYZ.us-east-1.es.amazonaws.com';
exports.handler = function(input, context) {
...
// post documents to the Amazon Elasticsearch Service
post(endpoint, elasticsearchBulkData, function(error, success, statusCode, failedItems) {
if (error) {
console.log('...');
if (failedItems && failedItems.length > 0) {
console.log(...);
}
// NOTE: Instead of failing, we are forcing a success, as we do not want retries
context.succeed('Success');
} else {
// console.log('Success: ' + JSON.stringify(success));
context.succeed('Success');
}
});
}
...
...
function post(endpoint, body, callback, lastTimeout) {
lastTimeout || (lastTimeout = 500);
var requestParams = buildRequest(endpoint, body);
var request = https.request(requestParams, function(response) {
var responseBody = '';
response.on('data', function(chunk) {
responseBody += chunk;
});
response.on('end', function() {
var info = JSON.parse(responseBody);
var failedItems;
var success;
if (response.statusCode >= 200 && response.statusCode < 299) {
failedItems = info.items.filter(function(x) {
return x.index.status >= 300;
});
success = { ...};
}
var error = response.statusCode !== 200 || info.errors === true ? {
"statusCode": response.statusCode,
"responseBody": responseBody
} : null;
callback(error, success, response.statusCode, failedItems);
});
}).on('error', function(e) {
console.error(e.stack || e);
//callback(e);
lastTimeout *= 2;
console.log('lastTimeout: ' + lastTimeout + " for cluster: " + endpoint) ;
setTimeout(function() {
post(endpoint, body, callback, lastTimeout);
}, lastTimeout);
});
request.end(requestParams.body);
}
...
At times I get Error: socket hang up ECONNRESET.
My question is: What would be the best way to catch this error and retry?
I added the setTimeout snippet based on this answer and it looks like it does work but I'm not sure if that's the right way to do.
Node.js version is 4.3.
I was thinking of using Promise with resolve and reject but being a JS Newbie, I'm not sure how to make use of promise in my post call.
I also went through this link but not clear on how can I wrap my post call with fetch_retry
I have a node application that, from time to time throws an exception that I can not catch:
Error: read ECONNRESET at TLSWrap.onread (net.js:622:25)
I don't know if it's related to your issue, but it seems so.
After some research it seems that is a bug: https://github.com/nodejs/node/issues/23237 and it has been addressed in the last version.
Right now I am running node version 8, and I have noticed that you are using version 4. I will update the production server in the near future, maybe you can try that also. If the question does not have an answer until I have updated my server, I will come back here with the results.

AngularJS: Migrating http from 1.5.x to 1.6+ with django backend

I have been fumbling with AngularJS for weeks now and trying to cobble together the parts of this, that, and the other thing together to create a file serving web application with a Django backend. I thought things were going well until I found myself trying to upload a file with all of my other form data. My HTML form consistently showed up as having no file attached during the validation step before sending the request. Well, that's no good! Anyways, this ended up being some manner of unsupported operation, for one reason or another. I turned to ng-file-upload, a third party file upload service for AngularJS. The most current iteration of ng-file-upload uses AngularJS 1.6 style requests and my third party registration application angular-django-registration-auth uses $http previous to 1.6.
I need to update the third party registration application but it has the following code.
'request': function(args) {
// Let's retrieve the token from the cookie, if available
if($cookies.token){
$http.defaults.headers.common.Authorization = 'Token ' + $cookies.token;
}
// Continue
params = args.params || {}
args = args || {};
var deferred = $q.defer(),
url = this.API_URL + args.url,
method = args.method || "GET",
params = params,
data = args.data || {};
// Fire the request, as configured.
$http({
url: url,
withCredentials: this.use_session,
method: method.toUpperCase(),
headers: {'X-CSRFToken': $cookies['csrftoken']},
params: params,
data: data
})
.success(angular.bind(this,function(data, status, headers, config) {
deferred.resolve(data, status);
}))
.error(angular.bind(this,function(data, status, headers, config) {
console.log("error syncing with: " + url);
// Set request status
if(data){
data.status = status;
}
if(status == 0){
if(data == ""){
data = {};
data['status'] = 0;
data['non_field_errors'] = ["Could not connect. Please try again."];
}
// or if the data is null, then there was a timeout.
if(data == null){
// Inject a non field error alerting the user
// that there's been a timeout error.
data = {};
data['status'] = 0;
data['non_field_errors'] = ["Server timed out. Please try again."];
}
}
deferred.reject(data, status, headers, config);
}));
return deferred.promise;
},
Beginning at var deferred = (this is defining a promise object, right?) I am unclear on what is going on. The assignments are easy to understand for the most part, with exception granted to the promise object (How does data = args.data || {}; end up in the right-handside of one of $http provider's compound assignment?), but what exactly is happening in the success and error cases where angular.bind() is called? I can't seem to find any good examples where angular seems to bind to a promise.
Fixed this with then() calls after finding some decent resources. Here is what my code ended up looking like, I'm including my logging because it may help someone else.
"request": function(args) {
// Let"s retrieve the token from the cookie, if available
if($cookies.get("token")){
$http.defaults.headers.common.Authorization = "Token " + $cookies.get("token");
}
// Continue
params = args.params || {};
args = args || {};
var deferred = $q.defer(),
url = this.API_URL + args.url,
method = args.method || "GET",
params = params,
data = args.data || {};
// Fire the request, as configured.
$http({
url: url,
withCredentials: this.use_session,
method: method.toUpperCase(),
headers: {"X-CSRFToken": $cookies["csrftoken"]},
params: params,
data: data
})
.then(function(response) {
console.log("Success case: " + url);
console.log("Headers: " + JSON.stringify(response.headers(),null, 4));
console.log("Config: " + response.config);
console.log("Status: " + response.status);
console.log('Response: ');
console.log('JSON: ' + JSON.stringify(response.data, null, 4));
deferred.resolve(response.data, response.status);
}, function(response) {
console.log("Error case: " + url);
console.log("Headers: " + JSON.stringify(response.headers(),null, 4));
console.log("Config: " + response.config);
console.log("Status: " + response.status);
console.log('Response: ');
console.log('JSON:' + JSON.stringify(response.data, null, 4));
if(response.data){ response.data.status = status; }
if(status == 0){
if(response.data == ""){
response.data = {};
response.data["status"] = 0;
response.data["non_field_errors"] = ["Could not connect. Please try again."];
}
if(data == null){
response.data = {};
response.data["status"] = 0;
response.data["non_field_errors"] = ["Server timed out. Please try again."];
}
}
deferred.reject(response.data, response.status, response.headers, response.config);
});
return deferred.promise;
},

Angularjs $http request caching on browser back button

Here's my code.
$scope.init = function () {
$scope.urlParam = $location.search();
if ($scope.urlParam.token == undefined || $scope.urlParam.id == undefined) {
$scope.is_start = false;
alert("您没有权限投票");
} else {
$http.get('/vote/validate_querystring?token=' + $scope.urlParam.token + "&id=" + $scope.urlParam.id)
.success(function (response) {
if (response.status == 3) {
$scope.is_start = false;
alert("您没有权限投票");
} else if (response.status == 4) {
$scope.is_start = false;
alert("您已完成投票");
} else {
$http.get('/vote/r_vote_setting')
.success(function (response) {
if (response.status == 1) {
$scope.is_start = false;
alert("投票尚未开始");
} else {
$scope.is_start = true;
$scope.voteData = response.data;
}
});
}
})
}
};
I put this function in ng-init so it will be invoked every time the page is loaded.
As you can see, there are two $http.get in this function. The problem is when I hit the back button to go back to this page, $http.get('/vote/validate_querystring?token=') would be loaded from browser cache while $http.get('/vote/r_vote_setting') makes a new request to the server. I found this from chrome console.
Request URL:http://localhost:8080/vote/validate_querystring?token=202cb962ac59075b964b07152d234b70&id=1
Request Method:GET
Status Code:200 OK (from disk cache)
Remote Address:[::1]:8080
Referrer Policy:no-referrer-when-downgrade
Request URL:http://localhost:8080/vote/r_vote_setting
Request Method:GET
Status Code:200 OK
Remote Address:[::1]:8080
Referrer Policy:no-referrer-when-downgrade
I want to know why this happens and how to make them both send request to the server rather than cache when hitting back button.
You can use cache: false option of $http.
$http.get('/vote/validate_querystring?token=' + $scope.urlParam.token + "&id=" + $scope.urlParam.id,cache: false);
Use $httpProvider to set caching false
myModule.config(['$httpProvider', function($httpProvider) {
//initialize get if not there
if (!$httpProvider.defaults.headers.get) {
$httpProvider.defaults.headers.get = {};
}
// Answer edited to include suggestions from comments
// because previous version of code introduced browser-related errors
//disable IE ajax request caching
$httpProvider.defaults.headers.get['If-Modified-Since'] = 'Mon, 26 Jul 1997 05:00:00 GMT';
// extra
$httpProvider.defaults.headers.get['Cache-Control'] = 'no-cache';
$httpProvider.defaults.headers.get['Pragma'] = 'no-cache';
}]);
You can also include a time parameter to the call, that would every call unique, thus avoid unwanted caching for only that specific call instead having to mess with $http default settings and potentially affect every other calls on the page
$http.get({
url: sampleUrl,
method: 'GET',
params: {
time: new Date()
}
}).error(function (err) {
console.log('Error encountered: ' + err);
}).success(function (data) {
console.log(data);
});

Javascript multiple promises not working with q and request middle

So this is my dilemma. I have a list of movies, witch I have scraped from a website, then I want to add additional properties to my newly constructed object(json)
Now the omdi api witch I am using supports searching for a movie by title.
Then I make a get request using request and q middlewares. When I receive information from omdb api in the call back I add that data to the object.
Now the next part is where my problem lies. Now I want to return a new Request using data from the previous request. Now I make an new get Request and return it but then() func isin't returning anything. But I don't seem to realize what I am doing wrong.
Here is my code..
var promises = [];
films.forEach(function (film) {
// Get omdbapi information
promises.push(HttpService.getContent(configExternal.omodburl + '?t=' + film.title.trim() + '&y=' + film.year + '&plot=true&tomatoes=true&r=json').then(function (data) {
var result = JSON.parse(data);
if(Boolean(result.Response) === true) {
film.omdb.push(result);
}
var imdbid = result.imdbID;
return HttpService.getContent(configExternal.themoviedburl + imdbid + '/videos?api_key=' + configExternal.themoviedbkey);
}).then(function(data) {
film.trailers = [];
film.trailers.push(JSON.parse(data));
}).catch(function (err) {
logger.error().info('Error getting ' + film.title + ' from omdb, ErrorMessage : ' + err);
}));
});
//--------------------------------
// When all promises have finished
//--------------------------------
Promise.all(promises).then(function (data, err) {
// do stuff with the data
});
And here is my getContent func
var Service = {
getContent: function(url) {
var deferred = q.defer();
request(url, function (error, response, body) {
if (!error && response.statusCode == 200) {
deferred.resolve(body);
} else {
deferred.reject(error);
}
});
return deferred.promise;
}
};
Problem solved. There wasn't anything wrong with the request as Roamer said. But the moviedata base limits by 40 request per 10 sek witch I didn't know :)

Abstracting the making of requests with Node.js

Traditionally I use jQuery for all my JS code, but I'm tasked to launch a simple API with node.js. Today is my first day with Node but I know enough about JS and closures to do OK. One of the tasks of the API is to authenticate across a third party service and being a python guy, I wanted to abstract all my outbound request calls like so:
EDIT
var http = require('http');
var init = function(nconf) {
var methods = {
/*
Helper method to create the request header
*/
headers: function(params) {
var content = JSON.stringify(params);
return {
'Content-Type': 'application/json',
'Content-Length': content.length
}
},
/*
Helper method to create the options object
which is used in making any type of
outbound http request
*/
options: function(host, path, method, params) {
return {
host: host,
port: 80,
path: path,
method: method,
headers: methods.headers(params)
}
},
/*
Helper method to abstract the making of
outbound http requests
*/
call: function(options, params, success, err) {
var req = http.request(options, success);
req.on('error', err);
req.write(params);
req.end();
},
/*
Helper method to parse the response
and return a json object
*/
parse: function(res, result) {
var responseString = '';
res.on('data', function(data) {
responseString += data;
});
res.on('end', function() {
result = JSON.parse(responseString);
});
},
/*
API method to return the latest
release and tag names
*/
latest: function(req, res, next){
// // var url = nconf.get('prod:authenticate');
//authenticate the test user
msg = methods.authenticate(nconf.get('test:user'), nconf.get("test:password"));
res.send(msg);
next();
},
/*
Method used by this API to authenticate users.
It is used to de-couple this API from the Database
Schema by calling out to the TTCPAS App and requesting it
to handle the authentication
*/
authenticate: function(username, password){
// create post parameters with API key
var params = {"username": username, "password": password, "api_key": nconf.get('api_key')};
//construct options object with params and header
var options = methods.options(nconf.get('ttcpas:host'), nconf.get('ttcpas:auth_url'), 'POST', params);
var result;
var success = function(res) {
res.setEncoding('utf-8');
methods.parse(res, result);
};
methods.call(options, params, success, function(err){});
while (typeof(result.statusCode) == 'undefined') {
//wait 1 second;
setTimeout(function(){
console.log("waiting on request at " + nconf.get('ttcpas:host') + nconf.get('ttcpas:auth_url'));
}, 1000);
}
//then down here
if (result.statusCode == 200) {return result};//success
if (result.statusCode == 403) {return "forbidden"};//forbidden
}
}
return methods;
};
module.exports.init = init;
#jfriend00 As I said I don't know how node.js is supposed to be styled. I wanted to just abstract as much as possible to make the code clean and reusable
Now when I do http://localhost:9000/latest/
I get:
{"code":"InternalError","message":"first argument must be a string or Buffer"}
Uhhh, this part will simply not work:
while (typeof(result.statusCode) == 'undefined') {
//wait 1 second;
setTimeout(function(){
console.log("waiting on request at " + nconf.get('ttcpas:host') + nconf.get('ttcpas:auth_url'));
}, 1000);
}
If result.statusCode is ever undefined, this will spin forever piling up setTimeout() calls in the event queue until eventually something fills up or you run out of memory.
Because node.js is primarily single threaded, you can't loop waiting for something to change. Because you never finish this while loop, no other node.js code gets to run so result.statusCode can never change. Thus, you have an infinite loop here.
All of your nodejs code needs to be event driven, not spin/wait loops. FYI, this is similar to browser-based Javascript.

Categories