Related
I have built a React progressive web application that makes use of service workers.
The service worker gets registered and is activated:
I have been trying to detect the "activate" event using this:
service-worker.js
navigator.serviceWorker.addEventListener("activate", function (event) {
console.log("service worker activated");
});
I added that at the end of the service-worker file. But, this event never gets triggered and I have no idea why.
I also tried to implement push notifications and trigger the from the backend. For this, I needed a "push" event listener that would listen to these events from the server:
navigator.serviceWorker.addEventListener("push", async function (event) {
const message = await event.data.json();
let { title, description, image } = message;
await event.waitUntil(showPushNotification(title, description, image));
});
This is how showPushNotification is defined:
export function showPushNotification(title, description, image) {
if (!("serviceWorker" in navigator)) {
console.log("Service Worker is not supported in this browser");
return;
}
navigator.serviceWorker.ready.then(function (registration) {
registration.showNotification(title, {
body: description,
icon: image,
actions: [
{
title: "Say hi",
action: "Say hi",
},
],
});
});
}
I tested calling that function manually and it successfully triggerss a push notification.
This is the server code that triggers the push notification:
const sendPushNotification = async (user_id, title, description, image) => {
const search_option = { user: user_id };
const users_subscriptions = await PushNotificationSubscription.find(
search_option
);
const number_of_users_subscriptions = users_subscriptions.length;
const options = {
vapidDetails: {
subject: "mailto:xxxx#xxxx.com",
publicKey: VAPID_PUBLIC_KEY,
privateKey: VAPID_PRIVATE_KEY,
},
};
let push_notif_sending_results = {};
for (let i = 0; i < number_of_users_subscriptions; i++) {
const user_subscription = users_subscriptions[i];
await webPush
.sendNotification(
user_subscription,
JSON.stringify({
title,
description,
image,
}),
options
)
.then((notif_send_result) => {
push_notif_sending_results[i] = { success: notif_send_result };
})
.catch((error) => {
push_notif_sending_results[i] = { error: error };
});
}
return push_notif_sending_results;
};
This is the part responsible for sending the push notification:
webPush
.sendNotification(
user_subscription,
JSON.stringify({
title,
description,
image,
}),
options
)
And it's successfully executed as it returns a 201 HTTP response.
So the "push" event listener is supposed to detect it and trigger a push notification.
I think everything regarding the push notification has been successfully implementing and the problem is how the "push" event listener is added since the "activate" event listener also doesn't work.
So I tried moving the two event listeners here right after the registration of the service worker is successful:
function registerValidSW(swUrl, config) {
navigator.serviceWorker.register(swUrl).then((registration) => {
registration.addEventListener("activate", (event) => {
console.log(
"🚀 ~ file: serviceWorker.js:159 ~ navigator.serviceWorker.register ~ event",
event
);
});
registration.addEventListener("push", async function (event) {
const message = await event.data.json();
let { title, description, image } = message;
await event.waitUntil(
showPushNotification(title, description, image)
);
});
});
}
But, it's still the same result.
Neither the "push" nor the "activate" event listeners get triggered.
Any idea what's going on?
Here's the whole service-worker file:
service-worker.js
import axios from "axios";
const isLocalhost = Boolean(
window.location.hostname === "localhost" ||
// [::1] is the IPv6 localhost address.
window.location.hostname === "[::1]" ||
// 127.0.0.0/8 are considered localhost for IPv4.
window.location.hostname.match(
/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/
)
);
export function register(config) {
if (process.env.NODE_ENV === "production" && "serviceWorker" in navigator) {
// The URL constructor is available in all browsers that support SW.
const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);
if (publicUrl.origin !== window.location.origin) {
// Our service worker won't work if PUBLIC_URL is on a different origin
// from what our page is served on. This might happen if a CDN is used to
// serve assets;
return;
}
window.addEventListener("load", () => {
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
if (isLocalhost) {
// This is running on localhost. Let's check if a service worker still exists or not.
checkValidServiceWorker(swUrl, config);
// Add some additional logging to localhost, pointing developers to the
// service worker/PWA documentation.
} else {
// Is not localhost. Just register service worker
console.log(
"Is not localhost. Just register a service worker, by calling registerValidSW"
);
registerValidSW(swUrl, config);
}
});
}
}
async function subscribeToPushNotifications(serviceWorkerReg) {
let subscription = await serviceWorkerReg.pushManager.getSubscription();
if (subscription === null) {
const dev_public_vapid_key = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
const prod_public_vapid_key =
"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
const public_vapid_key = isLocalhost
? dev_public_vapid_key
: prod_public_vapid_key;
subscription = await serviceWorkerReg.pushManager.subscribe({
userVisibleOnly: true,
applicationServerKey: public_vapid_key,
});
axios
.post("/api/push_notif_subscription/subscribe", subscription)
.then((response) => {})
.catch((error) => {});
}
}
export function showPushNotification(title, description, image) {
if (!("serviceWorker" in navigator)) {
console.log("Service Worker is not supported in this browser");
return;
}
navigator.serviceWorker.ready.then(function (registration) {
registration.showNotification(title, {
body: description,
icon: image,
actions: [
{
title: "Say hi",
action: "Say hi",
},
],
});
});
}
function registerValidSW(swUrl, config) {
navigator.serviceWorker.register(swUrl).then((registration) => {
subscribeToPushNotifications(registration);
registration.onupdatefound = () => {
const installingWorker = registration.installing;
if (!installingWorker) {
return;
}
installingWorker.onstatechange = () => {
if (installingWorker.state === "installed") {
if (navigator.serviceWorker.controller) {
// At this point, the updated precached content has been fetched,
// but the previous service worker will still serve the older
// content until all client tabs are closed.
// Execute callback
if (config && config.onUpdate) {
config.onUpdate(registration);
}
} else {
// At this point, everything has been preached.
// It's the perfect time to display a
// "Content is cached for offline use." message.
// Execute callback
if (config && config.onSuccess) {
config.onSuccess(registration);
}
}
}
};
};
});
}
function checkValidServiceWorker(swUrl, config) {
// Check if the service worker can be found. If it can't reload the page.
fetch(swUrl, {
headers: { "Service-Worker": "script" },
}).then((response) => {
// Ensure the service worker exists, and that we really are getting a JS file.
const contentType = response.headers.get("content-type");
if (
response.status === 404 ||
(!!contentType && contentType.indexOf("javascript") === -1)
) {
// No service worker found. Probably a different app. Reload the page.
navigator.serviceWorker.ready.then((registration) => {
registration.unregister().then(() => {
window.location.reload();
});
});
} else {
// Service worker found. Proceed as normal.
console.log("Service worker found, calling registerValidSW");
registerValidSW(swUrl, config);
}
});
}
export function unregister() {
if ("serviceWorker" in navigator) {
navigator.serviceWorker.ready.then((registration) => {
registration.unregister();
});
}
}
navigator.serviceWorker.addEventListener("activate", function (event) {
console.log("service worker activated");
});
navigator.serviceWorker.addEventListener("push", async function (event) {
const message = await event.data.json();
let { title, description, image } = message;
await event.waitUntil(showPushNotification(title, description, image));
});
The events "push" and "activate" are part of the ServiceWorkerGlobalScope as within the Service Worker API.
Push notifications must be handled within the service worker itself.
Therefore only the service worker can register an "activate" event listener.
The same applies for a "push" listener.
Specially in terms of the "push" listener this makes sense.
The idea of push events is to receive them, even if the main app (in this case the website) has been closed.
The service worker is an exception, as it even runs without the page being loaded.
Therefore move the "push" event into your service worker.
Your code (within the service worker) may look like this:
this.addEventListener("push", async function (event) {
const message = await event.data.json();
let { title, description, image } = message;
await event.waitUntil(showPushNotification(title, description, image));
});
function showPushNotification(title, description, image) {
registration.showNotification(title, {
body: description,
icon: image,
actions: [
{
title: "Say hi",
action: "Say hi",
},
],
});
}
The rest seems fine to me.
Update (Some more explanation)
I took a more careful look at your service-worker.js and it seems it contains general methods for registering the service worker.
As mentioned above the main app and the service worker are two completely separate chunks of code, running in different spaces. So this means everything which is not supposed to run in the service worker itself must be put outside of the service-worker.js. The service worker (in your case) should only contain the code for handling push notifications. It's important that you do not include the "service-worker.js" within your application.
In your case, you may seperate these functions into service-worker-register.js which contain all functions which are for managing the service worker registration but should not be executed within the service worker itself (isLocalhost, register, subscribeToPushNotifications, registerValidSW, checkValidServiceWorker, and unregister). Please note the code snippet from above and make changes accordingly to the code left within the service worker.
MDN has a pretty in depth tutorial on service workers (and there are a lot more) I recommend having a look at:
developer.mozilla.org/en-US/docs/Web/API/Service_Worker_API/Using_Service_Workers
I created a new React Project with Service Worker already written. I am making 3 different API requests in App. But only the results from the Pokémon API gets displayed when I disconnect from the internet and reload. The other 2 aren't getting cached. Here is the code in App. Really simple.
function App() {
const [pokemon, setPokemon] = useState([])
const [word, setWord] = useState("")
const [color, setColor] = useState("")
useEffect(() => {
getAPI()
}, [])
const getAPI = async () => {
const response = await fetch("https://pokeapi.co/api/v2/pokemon?limit=100")
const data = await response.json()
setPokemon(data.results)
const response1 = await fetch("https://random-words-api.vercel.app/word")
const data1 = await response1.json()
setWord(data1[0].word)
const response2 = await fetch("https://random-data-api.com/api/color/random_color")
const data2 = await response2.json()
setColor(data2.color_name)
}
return (
<div className="App">
<p>Random Word: {word}</p>
<p>Random Color: {color}</p>
{pokemon.map((p,i) => <p onClick={(e)=> console.log(e.target)} key={i}>{ `${i} - ${p.name} - ${p.url}`}</p>)}
</div>
);
}
export default App;
This is the prebuilt service worker file.
/* eslint-disable no-restricted-globals */
// This service worker can be customized!
// See https://developers.google.com/web/tools/workbox/modules
// for the list of available Workbox modules, or add any other
// code you'd like.
// You can also remove this file if you'd prefer not to use a
// service worker, and the Workbox build step will be skipped.
import { clientsClaim } from 'workbox-core';
import { ExpirationPlugin } from 'workbox-expiration';
import { precacheAndRoute, createHandlerBoundToURL } from 'workbox-precaching';
import { registerRoute } from 'workbox-routing';
import { StaleWhileRevalidate } from 'workbox-strategies';
clientsClaim();
// Precache all of the assets generated by your build process.
// Their URLs are injected into the manifest variable below.
// This variable must be present somewhere in your service worker file,
// even if you decide not to use precaching. See https://cra.link/PWA
precacheAndRoute(self.__WB_MANIFEST);
// Set up App Shell-style routing, so that all navigation requests
// are fulfilled with your index.html shell. Learn more at
// https://developers.google.com/web/fundamentals/architecture/app-shell
const fileExtensionRegexp = new RegExp('/[^/?]+\\.[^/]+$');
registerRoute(
// Return false to exempt requests from being fulfilled by index.html.
({ request, url }) => {
// If this isn't a navigation, skip.
if (request.mode !== 'navigate') {
return false;
} // If this is a URL that starts with /_, skip.
if (url.pathname.startsWith('/_')) {
return false;
} // If this looks like a URL for a resource, because it contains // a file extension, skip.
if (url.pathname.match(fileExtensionRegexp)) {
return false;
} // Return true to signal that we want to use the handler.
return true;
},
createHandlerBoundToURL(process.env.PUBLIC_URL + '/index.html')
);
// An example runtime caching route for requests that aren't handled by the
// precache, in this case same-origin .png requests like those from in public/
registerRoute(
// Add in any other file extensions or routing criteria as needed.
({ url }) => url.origin === self.location.origin && url.pathname.endsWith('.png'), // Customize this strategy as needed, e.g., by changing to CacheFirst.
new StaleWhileRevalidate({
cacheName: 'images',
plugins: [
// Ensure that once this runtime cache reaches a maximum size the
// least-recently used images are removed.
new ExpirationPlugin({ maxEntries: 50 }),
],
})
);
// This allows the web app to trigger skipWaiting via
// registration.waiting.postMessage({type: 'SKIP_WAITING'})
self.addEventListener('message', (event) => {
console.error("Message")
if (event.data && event.data.type === 'SKIP_WAITING') {
self.skipWaiting();
}
});
self.addEventListener('fetch', function (event) {
event.respondWith(
fetch(event.request).catch(function () {
return caches.match(event.request);
}),
);
});
// Any other custom service worker logic can go here.
Whats the cause of this behaviour?
EDIT: It is so smart it only caches duplicate responses. But i would like it to cache the last value anyway. How?
Ok, i only see you have a cache match, but i don't see there's code to validate the cache, ex. when there's no match.
self.addEventListener('fetch', function(event) {
console.log('Handling fetch event for', event.request.url);
event.respondWith(
caches.open(CURRENT_CACHES.font).then(function(cache) {
return cache.match(event.request).then(function(response) {
if (response) {
// If there is an entry in the cache for event.request, then response will be defined
// and we can just return it. Note that in this example, only font resources are cached.
console.log(' Found response in cache:', response);
return response;
}
// Otherwise, if there is no entry in the cache for event.request, response will be
// undefined, and we need to fetch() the resource.
console.log(' No response for %s found in cache. About to fetch ' +
'from network...', event.request.url);
// We call .clone() on the request since we might use it in a call to cache.put() later on.
// Both fetch() and cache.put() "consume" the request, so we need to make a copy.
// (see https://developer.mozilla.org/en-US/docs/Web/API/Request/clone)
return fetch(event.request.clone()).then(function(response) {
console.log(' Response for %s from network is: %O',
event.request.url, response);
if (response.status < 400 &&
response.headers.has('content-type') &&
response.headers.get('content-type').match(/^font\//i)) {
// This avoids caching responses that we know are errors (i.e. HTTP status code of 4xx or 5xx).
// We also only want to cache responses that correspond to fonts,
// i.e. have a Content-Type response header that starts with "font/".
// Note that for opaque filtered responses (https://fetch.spec.whatwg.org/#concept-filtered-response-opaque)
// we can't access to the response headers, so this check will always fail and the font won't be cached.
// All of the Google Web Fonts are served off of a domain that supports CORS, so that isn't an issue here.
// It is something to keep in mind if you're attempting to cache other resources from a cross-origin
// domain that doesn't support CORS, though!
// We call .clone() on the response to save a copy of it to the cache. By doing so, we get to keep
// the original response object which we will return back to the controlled page.
// (see https://developer.mozilla.org/en-US/docs/Web/API/Request/clone)
console.log(' Caching the response to', event.request.url);
cache.put(event.request, response.clone());
} else {
console.log(' Not caching the response to', event.request.url);
}
// Return the original response object, which will be used to fulfill the resource request.
return response;
});
}).catch(function(error) {
// This catch() will handle exceptions that arise from the match() or fetch() operations.
// Note that a HTTP error response (e.g. 404) will NOT trigger an exception.
// It will return a normal response object that has the appropriate error code set.
console.error(' Error in fetch handler:', error);
throw error;
});
})
);
This is from the documentation, https://developer.mozilla.org/en-US/docs/Web/API/Cache
Chrome officially supports running the browser in headless mode (including programmatic control via the Puppeteer API and/or the CRI library).
I've searched through the documentation, but I haven't found how to programmatically capture the AJAX traffic from the instances (ie. start an instance of Chrome from code, navigate to a page, and access the background response/request calls & raw data (all from code not using the developer tools or extensions).
Do you have any suggestions or examples detailing how this could be achieved? Thanks!
Update
As #Alejandro pointed out in the comment, resourceType is a function and the return value is lowercased
page.on('request', request => {
if (request.resourceType() === 'xhr')
// do something
});
Original answer
Puppeteer's API makes this really easy:
page.on('request', request => {
if (request.resourceType === 'XHR')
// do something
});
You can also intercept requests with setRequestInterception, but it's not needed in this example if you're not going to modify the requests.
There's an example of intercepting image requests that you can adapt.
resourceTypes are defined here.
I finally found how to do what I wanted. It can be done with chrome-remote-interface (CRI), and node.js. I'm attaching the minimal code required.
const CDP = require('chrome-remote-interface');
(async function () {
// you need to have a Chrome open with remote debugging enabled
// ie. chrome --remote-debugging-port=9222
const protocol = await CDP({port: 9222});
const {Page, Network} = protocol;
await Page.enable();
await Network.enable(); // need this to call Network.getResponseBody below
Page.navigate({url: 'http://localhost/'}); // your URL
const onDataReceived = async (e) => {
try {
let response = await Network.getResponseBody({requestId: e.requestId})
if (typeof response.body === 'string') {
console.log(response.body);
}
} catch (ex) {
console.log(ex.message)
}
}
protocol.on('Network.dataReceived', onDataReceived)
})();
Puppeteer's listeners could help you capture xhr response via response and request event.
You should check wether request.resourceType() is xhr or fetch first.
listener = page.on('response', response => {
const isXhr = ['xhr','fetch'].includes(response.request().resourceType())
if (isXhr){
console.log(response.url());
response.text().then(console.log)
}
})
const browser = await puppeteer.launch();
const page = await browser.newPage();
const pageClient = page["_client"];
pageClient.on("Network.responseReceived", event => {
if (~event.response.url.indexOf('/api/chart/rank')) {
console.log(event.response.url);
pageClient.send('Network.getResponseBody', {
requestId: event.requestId
}).then(async response => {
const body = response.body;
if (body) {
try {
const json = JSON.parse(body);
}
catch (e) {
}
}
});
}
});
await page.setRequestInterception(true);
page.on("request", async request => {
request.continue();
});
await page.goto('http://www.example.com', { timeout: 0 });
We have a web app (built using AngularJS) that we're gradually adding PWA 'features' too (service worker, launchable, notifications, etc). One of the features our web app has is the ability to complete a web form while offline. At the moment, we store the data in IndexedDB when offline, and simply encourage the user to push that data to the server once they're online ("This form is saved to your device. Now you're back online, you should save it to the cloud..."). We will do this automatically at some point, but that's not necessary at the moment.
We are adding a feature to these web forms, whereby the user will be able to attach files (images, documents) to the form, perhaps at several points throughout the form.
My question is this - is there a way for service worker to handle file uploads? To somehow - perhaps - store the path to the file to be uploaded, when offline, and push that file up once the connection has been restored? Would this work on mobile devices, as do we have access to that 'path' on those devices? Any help, advice or references would be much appreciated.
When the user selects a file via an <input type="file"> element, we are able to get the selected file(s) via fileInput.files. This gives us a FileList object, each item in it being a File object representing the selected file(s). FileList and File are supported by HTML5's Structured Clone Algorithm.
When adding items to an IndexedDB store, it creates a structured clone of the value being stored. Since FileList and File objects are supported by the structured clone algorithm, this means that we can store these objects in IndexedDB directly.
To perform those file uploads once the user goes online again, you can use the Background Sync feature of service workers. Here's an introductory article on how to do that. There are a lot of other resources for that as well.
In order to be able to include file attachments in your request once your background sync code runs, you can use FormData. FormDatas allow adding File objects into the request that will be sent to your backend, and it is available from within the service worker context.
One way to handle file uploads/deletes and almost everything, is by keeping track of all the changes made during the offline requests. We can create a sync object with two arrays inside, one for pending files that will need to be uploaded and one for deleted files that will need to be deleted when we'll get back online.
tl;dr
Key phases
Service Worker Installation
Along with static data, we make sure to fetch dynamic data as the main listing of our uploaded files (in the example case /uploads GET returns JSON data with the files).
Service Worker Fetch
Handling the service worker fetch event, if the fetch fails, then we have to handle the requests for the files listing, the requests that upload a file to the server and the request that deletes a file from the server. If we don't have any of these requests, then we return a match from the default cache.
Listing GET
We get the cached object of the listing (in our case /uploads) and the sync object. We concat the default listing files with the pending files and we remove the deleted files and we return new response object with a JSON result as the server would have returned it.
Uloading PUT
We get the cached listing files and the sync pending files from the cache. If the file isn't present, then we create a new cache entry for that file and we use the mime type and the blob from the request to create a new Response object that it will be saved to the default cache.
Deleting DELETE
We check in the cached uploads and if the file is present we delete the entry from both the listing array and the cached file. If the file is pending we just delete the entry from the pending array, else if it's not already in the deleted array, then we add it. We update listing, files and sync object cache at the end.
Syncing
When the online event gets triggered, we try to synchronize with the server. We read the sync cache.
If there are pending files, then we get each file Response object from cache and we send a PUT fetch request back to the server.
If there are deleted files, then we send a DELETE fetch request for each file to the server.
Finally, we reset the sync cache object.
Code implementation
(Please read the inline comments)
Service Worker Install
const cacheName = 'pwasndbx';
const syncCacheName = 'pwasndbx-sync';
const pendingName = '__pending';
const syncName = '__sync';
const filesToCache = [
'/',
'/uploads',
'/styles.css',
'/main.js',
'/utils.js',
'/favicon.ico',
'/manifest.json',
];
/* Start the service worker and cache all of the app's content */
self.addEventListener('install', function(e) {
console.log('SW:install');
e.waitUntil(Promise.all([
caches.open(cacheName).then(async function(cache) {
let cacheAdds = [];
try {
// Get all the files from the uploads listing
const res = await fetch('/uploads');
const { data = [] } = await res.json();
const files = data.map(f => `/uploads/${f}`);
// Cache all uploads files urls
cacheAdds.push(cache.addAll(files));
} catch(err) {
console.warn('PWA:install:fetch(uploads):err', err);
}
// Also add our static files to the cache
cacheAdds.push(cache.addAll(filesToCache));
return Promise.all(cacheAdds);
}),
// Create the sync cache object
caches.open(syncCacheName).then(cache => cache.put(syncName, jsonResponse({
pending: [], // For storing the penging files that later will be synced
deleted: [] // For storing the files that later will be deleted on sync
}))),
])
);
});
Service Worker Fetch
self.addEventListener('fetch', function(event) {
// Clone request so we can consume data later
const request = event.request.clone();
const { method, url, headers } = event.request;
event.respondWith(
fetch(event.request).catch(async function(err) {
const { headers, method, url } = event.request;
// A custom header that we set to indicate the requests come from our syncing method
// so we won't try to fetch anything from cache, we need syncing to be done on the server
const xSyncing = headers.get('X-Syncing');
if(xSyncing && xSyncing.length) {
return caches.match(event.request);
}
switch(method) {
case 'GET':
// Handle listing data for /uploads and return JSON response
break;
case 'PUT':
// Handle upload to cache and return success response
break;
case 'DELETE':
// Handle delete from cache and return success response
break;
}
// If we meet no specific criteria, then lookup to the cache
return caches.match(event.request);
})
);
});
function jsonResponse(data, status = 200) {
return new Response(data && JSON.stringify(data), {
status,
headers: {'Content-Type': 'application/json'}
});
}
Service Worker Fetch Listing GET
if(url.match(/\/uploads\/?$/)) { // Failed to get the uploads listing
// Get the uploads data from cache
const uploadsRes = await caches.match(event.request);
let { data: files = [] } = await uploadsRes.json();
// Get the sync data from cache
const syncRes = await caches.match(new Request(syncName), { cacheName: syncCacheName });
const sync = await syncRes.json();
// Return the files from uploads + pending files from sync - deleted files from sync
const data = files.concat(sync.pending).filter(f => sync.deleted.indexOf(f) < 0);
// Return a JSON response with the updated data
return jsonResponse({
success: true,
data
});
}
Service Worker Fetch Uloading PUT
// Get our custom headers
const filename = headers.get('X-Filename');
const mimetype = headers.get('X-Mimetype');
if(filename && mimetype) {
// Get the uploads data from cache
const uploadsRes = await caches.match('/uploads', { cacheName });
let { data: files = [] } = await uploadsRes.json();
// Get the sync data from cache
const syncRes = await caches.match(new Request(syncName), { cacheName: syncCacheName });
const sync = await syncRes.json();
// If the file exists in the uploads or in the pendings, then return a 409 Conflict response
if(files.indexOf(filename) >= 0 || sync.pending.indexOf(filename) >= 0) {
return jsonResponse({ success: false }, 409);
}
caches.open(cacheName).then(async (cache) => {
// Write the file to the cache using the response we cloned at the beggining
const data = await request.blob();
cache.put(`/uploads/${filename}`, new Response(data, {
headers: { 'Content-Type': mimetype }
}));
// Write the updated files data to the uploads cache
cache.put('/uploads', jsonResponse({ success: true, data: files }));
});
// Add the file to the sync pending data and update the sync cache object
sync.pending.push(filename);
caches.open(syncCacheName).then(cache => cache.put(new Request(syncName), jsonResponse(sync)));
// Return a success response with fromSw set to tru so we know this response came from service worker
return jsonResponse({ success: true, fromSw: true });
}
Service Worker Fetch Deleting DELETE
// Get our custom headers
const filename = headers.get('X-Filename');
if(filename) {
// Get the uploads data from cache
const uploadsRes = await caches.match('/uploads', { cacheName });
let { data: files = [] } = await uploadsRes.json();
// Get the sync data from cache
const syncRes = await caches.match(new Request(syncName), { cacheName: syncCacheName });
const sync = await syncRes.json();
// Check if the file is already pending or deleted
const pendingIndex = sync.pending.indexOf(filename);
const uploadsIndex = files.indexOf(filename);
if(pendingIndex >= 0) {
// If it's pending, then remove it from pending sync data
sync.pending.splice(pendingIndex, 1);
} else if(sync.deleted.indexOf(filename) < 0) {
// If it's not in pending and not already in sync for deleting,
// then add it for delete when we'll sync with the server
sync.deleted.push(filename);
}
// Update the sync cache
caches.open(syncCacheName).then(cache => cache.put(new Request(syncName), jsonResponse(sync)));
// If the file is in the uplods data
if(uploadsIndex >= 0) {
// Updates the uploads data
files.splice(uploadsIndex, 1);
caches.open(cacheName).then(async (cache) => {
// Remove the file from the cache
cache.delete(`/uploads/${filename}`);
// Update the uploads data cache
cache.put('/uploads', jsonResponse({ success: true, data: files }));
});
}
// Return a JSON success response
return jsonResponse({ success: true });
}
Synching
// Get the sync data from cache
const syncRes = await caches.match(new Request(syncName), { cacheName: syncCacheName });
const sync = await syncRes.json();
// If the are pending files send them to the server
if(sync.pending && sync.pending.length) {
sync.pending.forEach(async (file) => {
const url = `/uploads/${file}`;
const fileRes = await caches.match(url);
const data = await fileRes.blob();
fetch(url, {
method: 'PUT',
headers: {
'X-Filename': file,
'X-Syncing': 'syncing' // Tell SW fetch that we are synching so to ignore this fetch
},
body: data
}).catch(err => console.log('sync:pending:PUT:err', file, err));
});
}
// If the are deleted files send delete request to the server
if(sync.deleted && sync.deleted.length) {
sync.deleted.forEach(async (file) => {
const url = `/uploads/${file}`;
fetch(url, {
method: 'DELETE',
headers: {
'X-Filename': file,
'X-Syncing': 'syncing' // Tell SW fetch that we are synching so to ignore this fetch
}
}).catch(err => console.log('sync:deleted:DELETE:err', file, err));
});
}
// Update and reset the sync cache object
caches.open(syncCacheName).then(cache => cache.put(syncName, jsonResponse({
pending: [],
deleted: []
})));
Example PWA
I have created a PWA example that implements all these, which you can find and test here. I have tested it using Chrome and Firefox and using Firefox Android on a mobile device.
You can find the full source code of the application (including an express server) in this Github repository: https://github.com/clytras/pwa-sandbox.
The Cache API is designed to store a request (as the key) and a response (as the value) in order to cache a content from the server, for the web page. Here, we're talking about caching user input for future dispatch to the server. In other terms, we're not trying to implement a cache, but a message broker, and that's not currently something handled by the Service Worker spec (Source).
You can figure it out by trying this code:
HTML:
<button id="get">GET</button>
<button id="post">POST</button>
<button id="put">PUT</button>
<button id="patch">PATCH</button>
JavaScript:
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register('/service-worker.js', { scope: '/' }).then(function (reg) {
console.log('Registration succeeded. Scope is ' + reg.scope);
}).catch(function (error) {
console.log('Registration failed with ' + error);
});
};
document.getElementById('get').addEventListener('click', async function () {
console.log('Response: ', await fetch('50x.html'));
});
document.getElementById('post').addEventListener('click', async function () {
console.log('Response: ', await fetch('50x.html', { method: 'POST' }));
});
document.getElementById('put').addEventListener('click', async function () {
console.log('Response: ', await fetch('50x.html', { method: 'PUT' }));
});
document.getElementById('patch').addEventListener('click', async function () {
console.log('Response: ', await fetch('50x.html', { method: 'PATCH' }));
});
Service Worker:
self.addEventListener('fetch', function (event) {
var response;
event.respondWith(fetch(event.request).then(function (r) {
response = r;
caches.open('v1').then(function (cache) {
cache.put(event.request, response);
}).catch(e => console.error(e));
return response.clone();
}));
});
Which throws:
TypeError: Request method 'POST' is unsupported
TypeError: Request method 'PUT' is unsupported
TypeError: Request method 'PATCH' is unsupported
Since, the Cache API can't be used, and following the Google guidelines, IndexedDB is the best solution as a data store for ongoing requests.
Then, the implementation of a message broker is the responsibility of the developer, and there is no unique generic implementation that will cover all of the use cases. There are many parameters that will determine the solution:
Which criteria will trigger the use of the message broker instead of the network? window.navigator.onLine? A certain timeout? Other?
Which criteria should be used to start trying to forward ongoing requests on the network? self.addEventListener('online', ...)? navigator.connection?
Should requests respect the order or should they be forwarded in parallel? In other terms, should they be considered as dependent on each other, or not?
If run in parallel, should they be batched to prevent a bottleneck on the network?
In case the network is considered available, but the requests still fail for some reason, which retry logic to implement? Exponential backoff? Other?
How to notify the user that their actions are in a pending state while they are?
...
This is really very broad for a single StackOverflow answer.
That being said, here is a minimal working solution:
HTML:
<input id="file" type="file">
<button id="sync">SYNC</button>
<button id="get">GET</button>
JavaScript:
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register('/service-worker.js', { scope: '/' }).then(function (reg) {
console.log('Registration succeeded. Scope is ' + reg.scope);
}).catch(function (error) {
console.log('Registration failed with ' + error);
});
};
document.getElementById('get').addEventListener('click', function () {
fetch('api');
});
document.getElementById('file').addEventListener('change', function () {
fetch('api', { method: 'PUT', body: document.getElementById('file').files[0] });
});
document.getElementById('sync').addEventListener('click', function () {
navigator.serviceWorker.controller.postMessage('sync');
});
Service Worker:
self.importScripts('https://unpkg.com/idb#5.0.1/build/iife/index-min.js');
const { openDB, deleteDB, wrap, unwrap } = idb;
const dbPromise = openDB('put-store', 1, {
upgrade(db) {
db.createObjectStore('put');
},
});
const idbKeyval = {
async get(key) {
return (await dbPromise).get('put', key);
},
async set(key, val) {
return (await dbPromise).put('put', val, key);
},
async delete(key) {
return (await dbPromise).delete('put', key);
},
async clear() {
return (await dbPromise).clear('put');
},
async keys() {
return (await dbPromise).getAllKeys('put');
},
};
self.addEventListener('fetch', function (event) {
if (event.request.method === 'PUT') {
let body;
event.respondWith(event.request.blob().then(file => {
// Retrieve the body then clone the request, to avoid "body already used" errors
body = file;
return fetch(new Request(event.request.url, { method: event.request.method, body }));
}).then(response => handleResult(response, event, body)).catch(() => handleResult(null, event, body)));
} else if (event.request.method === 'GET') {
event.respondWith(fetch(event.request).then(response => {
return response.ok ? response : caches.match(event.request);
}).catch(() => caches.match(event.request)));
}
});
async function handleResult(response, event, body) {
const getRequest = new Request(event.request.url, { method: 'GET' });
const cache = await caches.open('v1');
await idbKeyval.set(event.request.method + '.' + event.request.url, { url: event.request.url, method: event.request.method, body });
const returnResponse = response && response.ok ? response : new Response(body);
cache.put(getRequest, returnResponse.clone());
return returnResponse;
}
// Function to call when the network is supposed to be available
async function sync() {
const keys = await idbKeyval.keys();
for (const key of keys) {
try {
const { url, method, body } = await idbKeyval.get(key);
const response = await fetch(url, { method, body });
if (response && response.ok)
await idbKeyval.delete(key);
}
catch (e) {
console.warn(`An error occurred while trying to sync the request: ${key}`, e);
}
}
}
self.addEventListener('message', sync);
Some words about the solution: it allows to cache the PUT request for future GET requests, and it also stores the PUT request into an IndexedDB database for future sync. About the key, I was inspired by Angular's TransferHttpCacheInterceptor which allows to serialize backend requests on the server-side rendered page for use by the browser-rendered page. It uses <verb>.<url> as the key. That supposes a request will override another request with the same verb and URL.
This solution also supposes that the backend does not return 204 No content as a response of a PUT request, but 200 with the entity in the body.
I was also stumbling upon it lately. Here is what I am doing to store in index db and return response when offline.
const storeFileAndReturnResponse = async function (request, urlSearchParams) {
let requestClone = request.clone();
let formData = await requestClone.formData();
let tableStore = "fileUploads";
let fileList = [];
let formDataToStore = [];
//Use formData.entries to iterate collection - this assumes you used input type= file
for (const pair of formData.entries()) {
let fileObjectUploaded = pair[1];
//content holds the arrayBuffer (blob) of the uploaded file
formDataToStore.push({
key: pair[0],
value: fileObjectUploaded,
content: await fileObjectUploaded.arrayBuffer(),
});
let fileName = fileObjectUploaded.name;
fileList.push({
fileName: fileName,
});
}
let payloadToStore = {
parentId: parentId,
fileList: fileList,
formDataKeyValue: formDataToStore,
};
(await idbContext).put(tableStore, payloadToStore);
return {
UploadedFileList: fileList,
};
};
I have the following code for a service worker:
'use strict';
// Incrementing CACHE_VERSION will kick off the install event and force previously cached
// resources to be cached again.
const CACHE_VERSION = 1;
let CURRENT_CACHES = {
offline: 'offline-v' + CACHE_VERSION
};
const OFFLINE_URL = 'offline.php';
function createCacheBustedRequest(url) {
let request = new Request(url, {cache: 'reload'});
// See https://fetch.spec.whatwg.org/#concept-request-mode
// This is not yet supported in Chrome as of M48, so we need to explicitly check to see
// if the cache: 'reload' option had any effect.
if ('cache' in request) {
return request;
}
// If {cache: 'reload'} didn't have any effect, append a cache-busting URL parameter instead.
let bustedUrl = new URL(url, self.location.href);
bustedUrl.search += (bustedUrl.search ? '&' : '') + 'cachebust=' + Date.now();
return new Request(bustedUrl);
}
self.addEventListener('install', event => {
event.waitUntil(
// We can't use cache.add() here, since we want OFFLINE_URL to be the cache key, but
// the actual URL we end up requesting might include a cache-busting parameter.
fetch(createCacheBustedRequest(OFFLINE_URL)).then(function(response) {
return caches.open(CURRENT_CACHES.offline).then(function(cache) {
return cache.put(OFFLINE_URL, response);
});
})
);
});
self.addEventListener('activate', event => {
// Delete all caches that aren't named in CURRENT_CACHES.
// While there is only one cache in this example, the same logic will handle the case where
// there are multiple versioned caches.
let expectedCacheNames = Object.keys(CURRENT_CACHES).map(function(key) {
return CURRENT_CACHES[key];
});
event.waitUntil(
caches.keys().then(cacheNames => {
return Promise.all(
cacheNames.map(cacheName => {
if (expectedCacheNames.indexOf(cacheName) === -1) {
// If this cache name isn't present in the array of "expected" cache names,
// then delete it.
console.log('Deleting out of date cache:', cacheName);
return caches.delete(cacheName);
}
})
);
})
);
});
self.addEventListener('fetch', event => {
// We only want to call event.respondWith() if this is a navigation request
// for an HTML page.
// request.mode of 'navigate' is unfortunately not supported in Chrome
// versions older than 49, so we need to include a less precise fallback,
// which checks for a GET request with an Accept: text/html header.
if (event.request.mode === 'navigate' ||
(event.request.method === 'GET' &&
event.request.headers.get('accept').includes('text/html'))) {
console.log('Handling fetch event for', event.request.url);
event.respondWith(
fetch(createCacheBustedRequest(event.request.url)).catch(error => {
// The catch is only triggered if fetch() throws an exception, which will most likely
// happen due to the server being unreachable.
// If fetch() returns a valid HTTP response with an response code in the 4xx or 5xx
// range, the catch() will NOT be called. If you need custom handling for 4xx or 5xx
// errors, see https://github.com/GoogleChrome/samples/tree/gh-pages/service-worker/fallback-response
console.log('Fetch failed; returning offline page instead.', error);
return caches.match(OFFLINE_URL);
})
);
}
// If our if() condition is false, then this fetch handler won't intercept the request.
// If there are any other fetch handlers registered, they will get a chance to call
// event.respondWith(). If no fetch handlers call event.respondWith(), the request will be
// handled by the browser as if there were no service worker involvement.
});
How could it be possible to make it in a way so that it does not have to save anything on cache? The webapp in question needs connection at all time. Therefore, the main purpose of this service worker is to qualify for phone installation and to have later on push notifications capabilities.
After some online research, here is the best solution:
sw_install.js
console.log('Started', self);
self.addEventListener('install', function(event) {
self.skipWaiting();
console.log('Installed', event);
});
self.addEventListener('activate', function(event) {
console.log('Activated', event);
});
self.addEventListener('push', function(event) {
console.log('Push message received', event);
// TODO
});
main.js
if ('serviceWorker' in navigator) {
console.log('Service Worker is supported');
navigator.serviceWorker.register('sw_install.js').then(function(reg){
console.log(':^)', reg);
// TODO
}).catch(function(err) {
console.log(':^(', err);
});
}
You can use npm sw-toolbox library (https://www.npmjs.com/package/sw-toolbox).
main.js
if (navigator.serviceWorker){
navigator.serviceWorker.register('/service-worker.js', {scope: './'})
.then(function (registration) {
console.log("sw registered",registration);
})
.catch(function (e) {
console.error("error",e);
})
} else {
console.log('Service Worker is not supported in this browser.')
}
}
service-worker.js
(global => {
'use strict';
// Load the sw-tookbox library.
importScripts('/js/sw-toolbox.js');
var precache_urls = [
'/index.html',
'/img/logo.png',
'/img/main.png'
];
//for debugging only
global.toolbox.options.debug = true;
global.toolbox.router.get('/img/(.*)', self.toolbox.cacheFirst, {
cache: {
name: "Images",
maxEntries: 10
}
});
global.toolbox.router.default = global.toolbox.networkFirst;
global.addEventListener('install', event => event.waitUntil(global.skipWaiting()));
global.addEventListener('activate', event => event.waitUntil(global.clients.claim()));
global.addEventListener('push', event => {
var pushObj = event.data.json();
var pushData = pushObj.data;
// push payload if there, if not make an ajax get call to get it (can use fetch)
var title = pushData && pushData.title;
var body = pushData && pushData.body;
var icon = '/img/logo.png';
event.waitUntil(global.registration.showNotification(title, {
body: body,
icon: icon,
badge: icon
data:pushData
}));
});
global.addEventListener('notificationclick', event => {
event.notification.close();
var url = event.notification.data.url|| '/';
event.waitUntil(
clients.matchAll({
type: 'window'
}).then(windowClients => {
console.log('WindowClients', windowClients);
for (var i = 0; i < windowClients.length; i++) {
var client = windowClients[i];
console.log('WindowClient', client);
if (client.url === url && 'focus' in client) {
return client.focus();
}
}
if (clients.openWindow) {
return clients.openWindow(url);
}
}));
});
})(self);