Crossdomain ServiceWorker load balancing - javascript

I'm trying to implement smth like crossdomain load balancing with ServiceWorker API.
My concept is:
After install on every request on fetch event I try to access main domain (https://example.com/)
If success I should return this to user with like event.respondWith(__response);
If failed (timed out or any other exception) I make CORS request to other server (https://balancer.com/) which returns other accessible domain (https://mirror1.example.com) and browser is redirected;
And I'm stucked on redirection step(((
So my current code is here
self.oninstall = function (event) {
event.waitUntil(self.skipWaiting());
};
self.onactivate = function (event) {
event.waitUntil(self.clients.claim());
};
self.initialUrl = false;
self.onfetch = async function (event) {
if (!self.initialUrl)
self.initialUrl = event.request.url;
if (self.initialUrl) {
event.respondWith(self.tryAccess(event))
} else {
event.respondWith(fetch(event.request));
}
};
self.tryAccess = async (event) => {
return new Promise(async (resolve, reject) => {
self.clients
.matchAll({type: 'window'})
.then(async (clients) => {
for (var i in clients) {
var _c = clients[0];
if (_c.url === event.request.url) {
try {
let __tryResponse = await fetch(event.request);
resolve(__tryResponse);
return;
} catch (e) {
let __json = await (await fetch("https://balancer.com/")).json();
return _c.navigate(__json.path).then(client => client.focus());
}
} else {
resolve();
}
}
});
});
};

Getting a reference to a WindowClient and forcibly changing its URL from inside of a fetch handler isn't the right way to redirect.
Instead, inside of your fetch handler, you can respond with a redirection response created by Response.redirect(). From the perspective of the browser, this will be treated just like any other redirection that might have originated from the server.
One thing to note is that if you initially request a subresource via a same-origin URL that results in a redirect to a cross-origin response, you might run into some issues. If your original requests are for cross-origin URLs and your potential redirects are also to cross-origin URLs, I think you'll be fine.
self.addEventListener('fetch', (event) => {
const fetchWithRedirection = async () => {
try {
// Use fetch(), caches.match(), etc. to get a response.
const response = await ...;
// Optional: also check response.ok, but that
// will always be false for opaque responses.
if (response) {
return response;
}
// If we don't have a valid response, trigger catch().
throw new Error('Unable to get a response.');
} catch (error) {
// Use whatever logic you need to get the redirection URL.
const redirectionURL = await ...;
if (redirectionURL) {
// HTTP 302 indicates a temporary redirect.
return Response.redirect(redirectionURL, 302);
}
// If we get to this point, redirection isn't possible,
// so just trigger a NetworkError.
throw error;
}
};
// You will probably want to wrap this in an if() to ensure
// that it's a request that you want to handle with redirection.
if (/* some routing criteria */) {
event.respondWith(fetchWithRedirection());
} else {
// Optionally use different response generation logic.
// Or just don't call event.respondWith(), and the
// browser will proceed without service worker involvement.
}
});

Related

How to prevent fetch() to ignore offline pages?

The JavaScript code below retrieves some texts from server by using Fetch API.
fetch("index.php?user_id=1234", {
method: "GET"
}).then(function(response) {
return response.text();
}).then(function(output) {
document.getElementById("output").innerHTML = output;
});
But during network errors, it retrieves the offline page (offline.html) due to service-worker.
"use strict";
self.addEventListener("install", function() {
self.skipWaiting();
});
self.addEventListener("activate", function(activation) {
activation.waitUntil(
caches.keys().then(function(cache_names) {
for (let cache_name of cache_names) {
caches.delete(cache_name);
};
caches.open("client_cache").then(function(cache) {
return cache.add("offline.html");
});
})
);
});
self.addEventListener("fetch", function(fetching) {
fetching.respondWith(
caches.match(fetching.request).then(function(cached_response) {
return cached_response || fetch(fetching.request);
}).catch(function() {
return caches.match("offline.html");
})
);
});
I want to let the fetch request know about the network error.
And I do not want to use window.navigator. So, what can I do?
(I prefer vanilla solutions.)
You should structure your service worker's fetch event handler so that it only returns offline.html when there's a network error/cache miss and the original request is for a navigation. If the original request is not a navigation, then responding with offline.html is (as you've seen) going to result in getting back HTML for every failure.
self.addEventListener('fetch', (event) => {
event.respondWith(
caches.match(event.request).then((cachedResponse) => {
return cachedResponse || fetch(event.request);
}).catch((error) => {
if (event.request.mode === 'navigate') {
return caches.match('offline.html');
}
throw error;
})
);
});

How to handle HTTPs json format errors from APIs in Node.js?

INTRODUCTION
I am implementing a function for making any kind of https request to any endpoint (using the https native module). When I make a request to a specific API I get an error response in JSON format. Like this:
{
"error": {
"code": 404,
"message": "ID not found"
}
}
How can I handle this kind of errors? At a first moment, I supposed that they were handled in
request.on("error", (err) => {
reject(err);
});
HTTPs Request function code
I have comment '<---------' in the relevant parts of the code
const https = require("https");
exports.httpsRequest = function (options, body = null) {
/*
This function is useful for making requests over the HTTPs protocol
*/
return new Promise((resolve, reject) => {
const request = https.request(options, (response) => {
// Get the response content type
const contentType =
response.headers["content-type"] &&
response.headers["content-type"].split(";")[0];
// Cumulate data
let chuncks = [];
response.on("data", (chunck) => {
chuncks.push(chunck);
});
response.on("end", () => {
// Concat all received chunks
let response = Buffer.concat(chuncks);
// Some responses might be in JSON format...
if (contentType === "application/json") {
// Jsonify the response
response = JSON.parse(response);
}
// (For the future) TODO - Check and parse more content types if needed.
// Resolve the promise with the HTTPs response
resolve(response); // <--------- The JSON format error responses are resolved too!!
});
});
// Reject on request error
request.on("error", (err) => {
// <------------- At a first moment, I supposed that all error responses were handled in this part of the code
reject(err);
});
// Write the body
if (body) {
request.write(body);
}
// Close HTTPs connection.
request.end();
});
};
Question
Why the error response is not handled in request.on("error", ...) ?
Thank you. I would appreciate any help or suggestion.
You need to create a different code path for when the content type isn't what you were expecting in which you call reject() and you also need to try/catch around JSON parsing errors so you can properly catch them and reject on them too. You can solve those issues with this code:
exports.httpsRequest = function (options, body = null) {
/*
This function is useful for making requests over the HTTPs protocol
*/
return new Promise((resolve, reject) => {
const request = https.request(options, (response) => {
// Get the response content type
const contentType =
response.headers["content-type"] &&
response.headers["content-type"].split(";")[0];
// Cumulate data
let chuncks = [];
response.on("data", (chunck) => {
chuncks.push(chunck);
});
response.on("end", () => {
// Concat all received chunks
let response = Buffer.concat(chuncks);
// Some responses might be in JSON format...
if (contentType === "application/json") {
try {
// Jsonify the response
response = JSON.parse(response);
resolve(response);
return;
} catch(e) {
reject(e);
return;
}
}
reject(new Error("Not JSON content-type"))
});
});
// Reject on request error
request.on("error", (err) => {
reject(err);
});
// Write the body
if (body) {
request.write(body);
}
// Close HTTPs connection.
request.end();
});
};
FYI, libraries such as got() and others listed here, all do this work for you automatically and have a lot of other useful features. You don't really need to build this yourself.

Chrome Tab duplicate not making network request

I have 3 api calls on application load (componentDidMount).
When I reload the page or open the new tab, all the api calls are made but on duplicating the tab only one of them is fired.
How do I trigger all the api calls on page duplication
Note:
Browser: chrome.
use Memopromise tools its an effective tool to handle asynchronous processes.
class MemoPromise {
constructor(getPromise) {
this.cache = {};
this.getPromise = getPromise;
this.request = this.request.bind(this);
}
request({ uniqueKey, ...rest }) {
if (!uniqueKey) {
return Promise.reject(new Error('Unique key not passed'));
}
if (this.cache[uniqueKey]) {
return this.cache[uniqueKey];
}
const promise = this.getPromise(rest);
this.cache[uniqueKey] = promise
.then((res) => {
delete this.cache[uniqueKey];
return res;
})
.catch((err) => {
delete this.cache[uniqueKey];
throw err;
});
return this.cache[uniqueKey];
}
}
In the above example, I have created a MemoPromise class whose instantiated object can memorize the promises returned by the passed function in the constructor till the time they are not resolved or rejected.
see execution code
const memoPromise = new MemoPromise(fn);
// invocation
const { request } = memoPromise;
request({ uniqueKey: url, apiType, url, payload });
// not required now
// fn({ apiType, url, payload });
after integrating the Memopromise class your browser you might not be getting face any problem with your API requests. it deals effectively with your duplicate requests.

How to avoid sending multiple duplicate AJAX requests in axios

Is it possible to automatically throttle all requests going to a particular list of endpoints using axios? Perhaps using axios interceptor?
Currently I throttle the user action that sends the axios request, but the problem with that is that I have to write this everywhere I have a user action that results in some AJAX request. Like this
const throttledDismissNotification = throttle(dismissNotification, 1000)
const dismiss = (event: any) => {
throttledDismissNotification();
};
render() {
return (
<Button onClick={dismiss}>Dismiss Notification</Button>
)
}
This results in a lot of clutter and I was wondering if this could be automated.
Something like:
if(request.url in listOfEndpointsToThrottle && request.params in cacheOfPreviousRequestsToThisEndpoint) {
StopRequest();
}
Obviously this is pseudocode but you get the idea.
Perhaps you could try to use the Cancellation feature that axios provides.
With it, you can ensure that you don't have any two (or more, depending on your implementation) similar requests in a pending state.
Below, you will find a small simplified example of how to ensure that only the latest request is processed. You can adjust it a bit to make it function like a pool of requests
import axios, { CancelToken } from 'axios';
const pendingRequests = {};
const makeCancellable = (headers, requestId) => {
if (!requestId) {
return headers;
}
if (pendingRequests[requestId]) {
// cancel an existing request
pendingRequests[requestId].cancel();
}
const source = CancelToken.source();
const newHeaders = {
...headers,
cancelToken: source.token
};
pendingRequests[requestId] = source;
return newHeaders;
};
const request = ({
url,
method = 'GET',
headers,
id
}) => {
const requestConfig = {
url,
method,
headers: makeCancellable(headers || {}, id)
};
return axios.request(requestConfig)
.then((res) => {
delete pendingRequests[id];
return ({ data: res.data });
})
.catch((error) => {
delete pendingRequests[id];
if (axios.isCancel(error)) {
console.log(`A request to url ${url} was cancelled`); // cancelled
} else {
return handleReject(error);
}
});
};
export default request;
It's quite easy to throttle an axios request itself. The real headache is how to handle the promises that are returned from nullified requests. What is considered sane behavior when dealing with promises that are returned from a nullified axios request? Should they stay pending forever?
I don't see any perfect solution to this problem. But then I come to a solution that is kind of cheating:
What if we don't throttle the axios call, instead we throttle the actual XMLHttpRequest?
This makes things way easier, because it avoids the promise problem, and it's easier to implement. The idea is to implement a cache for recent requests, and if a new request matches a recent one, you just pull the result from cache and skip the XMLHttpRequest.
Because of the way axios interceptors work, the following snippet can be used to skip a certain XHR call conditionally:
// This should be the *last* request interceptor to add
axios.interceptors.request.use(function (config) {
/* check the cache, if hit, then intentionally throw
* this will cause the XHR call to be skipped
* but the error is still handled by response interceptor
* we can then recover from error to the cached response
**/
if (requestCache.isCached(config)) {
const skipXHRError = new Error('skip')
skipXHRError.isSkipXHR = true
skipXHRError.request = config
throw skipXHRError
} else {
/* if not cached yet
* check if request should be throttled
* then open up the cache to wait for a response
**/
if (requestCache.shouldThrottle(config)) {
requestCache.waitForResponse(config)
}
return config;
}
});
// This should be the *first* response interceptor to add
axios.interceptors.response.use(function (response) {
requestCache.setCachedResponse(response.config, response)
return response;
}, function (error) {
/* recover from error back to normality
* but this time we use an cached response result
**/
if (error.isSkipXHR) {
return requestCache.getCachedResponse(error.request)
}
return Promise.reject(error);
});
I have a similar problem, thru my research it seems to lack a good solution. All I saw were some ad hoc solutions so I open an issue for axios, hoping someone can answer my question https://github.com/axios/axios/issues/2118
I also find this article Throttling Axios requests but I did not try the solution he suggested.
And I have a discussion related to this My implementation of debounce axios request left the promise in pending state forever, is there a better way?
I finish one, #hackape thank you for you answer, the code is as follows:
const pendings = {}
const caches = {}
const cacheUtils = {
getUniqueUrl: function (config) {
// you can set the rule based on your own requirement
return config.url + '&' + config.method
},
isCached: function (config) {
let uniqueUrl = this.getUniqueUrl(config)
return caches[uniqueUrl] !== undefined
},
isPending: function (config) {
let uniqueUrl = this.getUniqueUrl(config)
if (!pendings[uniqueUrl]) {
pendings[uniqueUrl] = [config]
return false
} else {
console.log(`cache url: ${uniqueUrl}`)
pendings[uniqueUrl].push(config)
return true
}
},
setCachedResponse: function (config, response) {
let uniqueUrl = this.getUniqueUrl(config)
caches[uniqueUrl] = response
if (pendings[uniqueUrl]) {
pendings[uniqueUrl].forEach(configItem => {
configItem.isFinished = true
})
}
},
getError: function(config) {
const skipXHRError = new Error('skip')
skipXHRError.isSkipXHR = true
skipXHRError.requestConfig = config
return skipXHRError
},
getCachedResponse: function (config) {
let uniqueUrl = this.getUniqueUrl(config)
return caches[uniqueUrl]
}
}
// This should be the *last* request interceptor to add
axios.interceptors.request.use(function (config) {
// to avoid careless bug, only the request that explicitly declares *canCache* parameter can use cache
if (config.canCache) {
if (cacheUtils.isCached(config)) {
let error = cacheUtils.getError(config)
throw error
}
if (cacheUtils.isPending(config)) {
return new Promise((resolve, reject) => {
let interval = setInterval(() => {
if(config.isFinished) {
clearInterval(interval)
let error = cacheUtils.getError(config)
reject(error)
}
}, 200)
});
} else {
// the head of cacheable requests queue, get the response by http request
return config
}
} else {
return config
}
});

How do I notify my app that it's receiving cached responses from a service worker?

I want to use a service worker to cache responses that can be used when the user is either offline or the my app's backend is down. For user experience reasons, I'd like to show a notification to the user that the app's backend currently can't be reached and that cached content is being served instead. What's the best way to do that? I can add a header to the response in the service worker but I'm not sure that's the "right way"... it seems like there should be a more straight-forward pattern. This is my service worker code:
self.addEventListener('fetch', event => {
console.log(`fetch event`, event);
event.respondWith(doFetch(event.request));
});
// fetch from network, fallback to cache
function doFetch(request) {
return fetch(request)
.then(response => {
return caches.open(CACHE)
.then(cache => {
cache.put(request, response.clone());
return response;
})
})
.catch(error => {
console.warn(`fetch to ${request.url} failed`, error);
return fromCache(request);
});
}
function fromCache(request) {
return caches.open(CACHE)
.then(cache => cache.match(request))
.then(match => {
if (match) {
// response.clone doesn't work here because I need to modify it
return cloneResponse(match);
} else {
throw new Error(`no match for ${request.url}`);
}
});
}
// this clones a response in a way that let's me modify it
function cloneResponse(response) {
let init = {
status: response.status,
statusText: response.statusText,
headers: { 'X-From-SW-Cache': 'true' }
};
response.headers.forEach((v, k) => {
init.headers[k] = v;
});
return response.text().then((body) => new Response(body, init));
}
Adding a header to the response is definitely one option.
Another option to consider is to use postMessage() to send a message to the client page letting it know that a cached response is being used. You could extend that logic to also send a message to the client letting it know when a network response is used, if you want to toggle some UI element once you're back to using the network.
Here's the code to accomplish that in your service worker:
async function postToClient(clientId, message) {
const client = await clients.get(clientId);
client.postMessage(message);
}
async function responseLogic(event) {
const cache = await caches.open('cache-name');
try {
const response = await fetch(event.request);
await cache.put(event.request, response.clone());
event.waitUntil(postToClient(event.request.clientId, 'FROM_NETWORK'));
return response;
} catch (error) {
const cachedResponse = await cache.match(event.request);
if (cachedResponse) {
event.waitUntil(postToClient(event.request.clientId, 'FROM_CACHE'));
return cachedResponse;
}
throw error;
}
}
self.addEventListener('fetch', (event) => {
// Check to see if this is a request for your API:
if (event.request.url === 'https://example.com/api') {
event.respondWith(responseLogic(event));
}
});
And then on your client page, you can register a message listener to respond to those messages from the service worker by showing or hiding some UI element, depending on whether the last response came from the network or cache:
navigator.serviceWorker.addEventListener('message', (event) => {
if (event.data === 'FROM_NETWORK') {
// Do something in your UI.
} else if (event.data === 'FROM_CACHE') {
// Do something in your UI.
}
});

Categories