Electron BrowserWindow cannot get response when debugger is attached - javascript

I'm writing an Electron app which creates a BrowserWindow. I want to capture a few requests sent to a server, I also want responses for there requests. Using Electron WebRequest api I can't get responses, so searched the web and found that I can attach a debugger programatically.
I attach debugger using the below code and I get almost all responses correctly. But for one bigger request I can't get the response. I get an error
Error: No resource with given identifier found
If I launch DevTools and navigate to that request I also can't get the response: Failed to load response data. If I comment out below code the response in DevTools show correctly.
Note that this happens only for one specific request which returns about 1MB response. For all other requests I can get the response using getResponseData().
const dbg = win.webContents.debugger
var getResponseData = async (reqId) => {
const res = await dbg.sendCommand("Network.getResponseBody", {requestId: reqId});
return res.body
}
try {
dbg.attach('1.3')
dbg.sendCommand('Network.enable')
} catch (err) {
console.log('Debugger attach failed : ', err)
}
dbg.on('detach', async (event, reason) => {
console.log('Debugger detached due to : ', reason)
})
dbg.on('message', (e, m, p) => {
if (m === 'Network.requestWillBeSent') {
if (p.request.url === someURL) {
const j = JSON.parse(p.request.postData)
console.log("req " + p.requestId)
global.webReqs[p.requestId] = { reqData: j}
}
} else if (m === 'Network.loadingFinished') {
if (p.requestId in global.webReqs) {
console.log("res " + p.requestId)
getResponseData(p.requestId).then(res => {
console.log(res.slice(0,60))
}).catch(err => {
console.error(err)
})
}
}
});
Short update
The event stack for this particular request is as follows, where 13548.212 is simply requestId
Network.requestWillBeSentExtraInfo 13548.212
Network.requestWillBeSent 13548.212
Network.responseReceivedExtraInfo 13548.212
Network.responseReceived 13548.212
Network.dataReceived 13548.212 [repeated 135 times]
...
Network.loadingFinished 13548.212

Looks that I found a solution. It's rather a workaround, but it works. I didn't use Network.getResponseBody. I used Fetch(https://chromedevtools.github.io/devtools-protocol/tot/Fetch).
To use that one need to subscribe for Responses matching a pattern. Then you can react on Fetch.requestPaused events. During that you have direct access to a request and indirect to a response. To get the response call Fetch.getResponseBody with proper requestId. Also remember to send Fetch.continueRequest as
The request is paused until the client responds with one of continueRequest, failRequest or fulfillRequest
https://chromedevtools.github.io/devtools-protocol/tot/Fetch/#event-requestPaused
dbg.sendCommand('Fetch.enable', {
patterns: [
{ urlPattern: interestingURLpattern, requestStage: "Response" }
]})
var getResponseJson = async (requestId) => {
const res = await dbg.sendCommand("Fetch.getResponseBody", {requestId: requestId})
return JSON.parse(res.base64Encoded ? Buffer.from(res.body, 'base64').toString() : res.body)
}
dbg.on('message', (e, m, p) => {
if(m === 'Fetch.requestPaused') {
var reqJson = JSON.parse(p.request.postData)
var resJson = await getResponseJson(p.requestId)
...
await dbg.sendCommand("Fetch.continueRequest", {requestId: p.requestId})
}
});

Related

How to do A/B testing with Cloudflare workers

I'm looking for an example of what these two lines of code look like in a functioning A/B testing worker. From https://developers.cloudflare.com/workers/examples/ab-testing
const TEST_RESPONSE = new Response("Test group") // e.g. await fetch("/test/sompath", request)
const CONTROL_RESPONSE = new Response("Control group") // e.g. await fetch("/control/sompath", request)
I used the examples, subsituting the paths I’m using, and got a syntax error saying await can only be used in async. So I changed the function to async function handleRequest(request) and got a 500 error.
What should these two lines look like for the code to work?
Working example
document.getElementById("myBtn").addEventListener("click", myFun);
async function myFun() {
const isTest = Math.random() > 0.5 //here you place your condition
const res = isTest ? await fetch1() : await fetch1() //here you should have different fetches for A and B
console.log(res)
document.getElementById("demo").innerHTML = res.message;
}
async function fetch1() {
//I took the link from some other SO answer as I was running into CORS problems with e.g. google.com
return fetch("https://currency-converter5.p.rapidapi.com/currency/list?format=json", {
"method": "GET",
"headers": {
"x-rapidapi-host": "currency-converter5.p.rapidapi.com",
"x-rapidapi-key": "**redacted**"
}
})
.then(response => response.json())
}
<button id="myBtn">button</button>
<div id="demo">demo</div>
<script src="https://unpkg.com/#babel/standalone#7/babel.min.js"></script>
<script type="text/babel">
</script>
Okay, I've worked on this a bit and this worker below seems to fulfill your requirements:
async function handleRequest(request) {
const NAME = "experiment-0";
try {
// The Responses below are placeholders. You can set up a custom path for each test (e.g. /control/somepath ).
const TEST_RESPONSE = await fetch(
"https://httpbin.org/image/jpeg",
request
);
const CONTROL_RESPONSE = await fetch(
"https://httpbin.org/image/png",
request
);
// Determine which group this requester is in.
const cookie = request.headers.get("cookie");
if (cookie && cookie.includes(`${NAME}=control`)) {
return CONTROL_RESPONSE;
} else if (cookie && cookie.includes(`${NAME}=test`)) {
return TEST_RESPONSE;
} else {
// If there is no cookie, this is a new client. Choose a group and set the cookie.
const group = Math.random() < 0.5 ? "test" : "control"; // 50/50 split
const response = group === "control" ? CONTROL_RESPONSE : TEST_RESPONSE;
return new Response(response.body, {
status: response.status,
headers: {
...response.headers,
"Set-Cookie": `${NAME}=${group}; path=/`,
},
});
}
} catch (e) {
var response = [`internal server error: ${e.message}`];
if (e.stack) {
response.push(e.stack);
}
return new Response(response.join("\n"), {
status: 500,
headers: {
"Content-type": "text/plain",
},
});
}
}
addEventListener("fetch", (event) => {
event.respondWith(handleRequest(event.request));
});
There are some issues with the snippet once you uncomment the fetch() parts:
Yes, the function needs to be async in order to use await keyword, so I added that.
Additionally, I think the 500 error you were seeing was also due to a bug in the snippet: You'll notice I form a new Response instead of modifying the one that was chosen from the ternary expression. This is because responses are immutable in the workers runtime, so adding headers to an already instantiated response will result in an error. Therefore, you can just create an entirely new Response with all the bits from the original one and it seems to work.
Also, in order to gain insight into errors in a worker, always a good idea to add the try/catch and render those errors in the worker response.
To get yours working on this, just replace the httpbin.org urls with whatever you need to A/B test.

Firebase functions end point returning an empty object from server but working in localhost

I was creating an API using firebase functions and express JS. In the express, I have a number of GET routes that fetches the data from the internet using Axios. In addition to that one GET route scrape a site using Axios and JSDOM and returns an array of objects.
I'm using async-await modifiers to run the code in an asynchronous manner. However, all the other endpoints are returning proper data but scraping one not.
From the console, it's actually going inside the response then callback but returning an empty object. Also from the firebase console, I got to know that it's logging an error "Error: Request failed with status code 503Error".
Code:
index.js
//To get crypto news
app.get("/crypto-news", (req,res) => {
return cryptoNews.getLatesCryptoNews().then(async(response) => {
console.log("News arrived") //Which is actually logging in console
return Promise.resolve(res.json({data: response}));
}).catch(err => {
console.log("Failed to fetch")
return res.send("Failed to fetch from API");
})
});
//Firebase
exports.app = functions.https.onRequest(app);
cryptoNews.js
//Get nse equity data
async function getLatesCryptoNews() {
try {
let response = await axios({
method: 'get',
url: `https://tracemycode.com/`,
json: true
});
if (await response) {
const dom = new JSDOM(response.data);
const document = dom.window.document;
const posts = document.querySelectorAll("div.news-posts");
//Check emptiness
if (posts.length !== 0) {
let data = [];
console.log("Posts not empty"); //not loggin in console
//Go through each node and get data
posts.forEach(item => {
let time = item.querySelector('div.news-post-time p').textContent;
let title = item.querySelector('div.news-post-title h1').textContent;
//Push to array
data.push({
"title": (title !== null && typeof title === "string") && title,
"time":( time !== null && typeof time === "string") && time.substring(1, time.length - 1),
});
});
console.log("Sending data") //not logging in console
return Promise.resolve(data);
}
}
} catch (err) {
console.error(err + "Error");
}
}
exports.getLatesCryptoNews = getLatesCryptoNews;
I wonder why it's working fine in the localhost and not in the firebase functions server.

Programmatically capturing AJAX traffic with headless Chrome

Chrome officially supports running the browser in headless mode (including programmatic control via the Puppeteer API and/or the CRI library).
I've searched through the documentation, but I haven't found how to programmatically capture the AJAX traffic from the instances (ie. start an instance of Chrome from code, navigate to a page, and access the background response/request calls & raw data (all from code not using the developer tools or extensions).
Do you have any suggestions or examples detailing how this could be achieved? Thanks!
Update
As #Alejandro pointed out in the comment, resourceType is a function and the return value is lowercased
page.on('request', request => {
if (request.resourceType() === 'xhr')
// do something
});
Original answer
Puppeteer's API makes this really easy:
page.on('request', request => {
if (request.resourceType === 'XHR')
// do something
});
You can also intercept requests with setRequestInterception, but it's not needed in this example if you're not going to modify the requests.
There's an example of intercepting image requests that you can adapt.
resourceTypes are defined here.
I finally found how to do what I wanted. It can be done with chrome-remote-interface (CRI), and node.js. I'm attaching the minimal code required.
const CDP = require('chrome-remote-interface');
(async function () {
// you need to have a Chrome open with remote debugging enabled
// ie. chrome --remote-debugging-port=9222
const protocol = await CDP({port: 9222});
const {Page, Network} = protocol;
await Page.enable();
await Network.enable(); // need this to call Network.getResponseBody below
Page.navigate({url: 'http://localhost/'}); // your URL
const onDataReceived = async (e) => {
try {
let response = await Network.getResponseBody({requestId: e.requestId})
if (typeof response.body === 'string') {
console.log(response.body);
}
} catch (ex) {
console.log(ex.message)
}
}
protocol.on('Network.dataReceived', onDataReceived)
})();
Puppeteer's listeners could help you capture xhr response via response and request event.
You should check wether request.resourceType() is xhr or fetch first.
listener = page.on('response', response => {
const isXhr = ['xhr','fetch'].includes(response.request().resourceType())
if (isXhr){
console.log(response.url());
response.text().then(console.log)
}
})
const browser = await puppeteer.launch();
const page = await browser.newPage();
const pageClient = page["_client"];
pageClient.on("Network.responseReceived", event => {
if (~event.response.url.indexOf('/api/chart/rank')) {
console.log(event.response.url);
pageClient.send('Network.getResponseBody', {
requestId: event.requestId
}).then(async response => {
const body = response.body;
if (body) {
try {
const json = JSON.parse(body);
}
catch (e) {
}
}
});
}
});
await page.setRequestInterception(true);
page.on("request", async request => {
request.continue();
});
await page.goto('http://www.example.com', { timeout: 0 });

Handling File Uploads When Offline With Service Worker

We have a web app (built using AngularJS) that we're gradually adding PWA 'features' too (service worker, launchable, notifications, etc). One of the features our web app has is the ability to complete a web form while offline. At the moment, we store the data in IndexedDB when offline, and simply encourage the user to push that data to the server once they're online ("This form is saved to your device. Now you're back online, you should save it to the cloud..."). We will do this automatically at some point, but that's not necessary at the moment.
We are adding a feature to these web forms, whereby the user will be able to attach files (images, documents) to the form, perhaps at several points throughout the form.
My question is this - is there a way for service worker to handle file uploads? To somehow - perhaps - store the path to the file to be uploaded, when offline, and push that file up once the connection has been restored? Would this work on mobile devices, as do we have access to that 'path' on those devices? Any help, advice or references would be much appreciated.
When the user selects a file via an <input type="file"> element, we are able to get the selected file(s) via fileInput.files. This gives us a FileList object, each item in it being a File object representing the selected file(s). FileList and File are supported by HTML5's Structured Clone Algorithm.
When adding items to an IndexedDB store, it creates a structured clone of the value being stored. Since FileList and File objects are supported by the structured clone algorithm, this means that we can store these objects in IndexedDB directly.
To perform those file uploads once the user goes online again, you can use the Background Sync feature of service workers. Here's an introductory article on how to do that. There are a lot of other resources for that as well.
In order to be able to include file attachments in your request once your background sync code runs, you can use FormData. FormDatas allow adding File objects into the request that will be sent to your backend, and it is available from within the service worker context.
One way to handle file uploads/deletes and almost everything, is by keeping track of all the changes made during the offline requests. We can create a sync object with two arrays inside, one for pending files that will need to be uploaded and one for deleted files that will need to be deleted when we'll get back online.
tl;dr
Key phases
Service Worker Installation
Along with static data, we make sure to fetch dynamic data as the main listing of our uploaded files (in the example case /uploads GET returns JSON data with the files).
Service Worker Fetch
Handling the service worker fetch event, if the fetch fails, then we have to handle the requests for the files listing, the requests that upload a file to the server and the request that deletes a file from the server. If we don't have any of these requests, then we return a match from the default cache.
Listing GET
We get the cached object of the listing (in our case /uploads) and the sync object. We concat the default listing files with the pending files and we remove the deleted files and we return new response object with a JSON result as the server would have returned it.
Uloading PUT
We get the cached listing files and the sync pending files from the cache. If the file isn't present, then we create a new cache entry for that file and we use the mime type and the blob from the request to create a new Response object that it will be saved to the default cache.
Deleting DELETE
We check in the cached uploads and if the file is present we delete the entry from both the listing array and the cached file. If the file is pending we just delete the entry from the pending array, else if it's not already in the deleted array, then we add it. We update listing, files and sync object cache at the end.
Syncing
When the online event gets triggered, we try to synchronize with the server. We read the sync cache.
If there are pending files, then we get each file Response object from cache and we send a PUT fetch request back to the server.
If there are deleted files, then we send a DELETE fetch request for each file to the server.
Finally, we reset the sync cache object.
Code implementation
(Please read the inline comments)
Service Worker Install
const cacheName = 'pwasndbx';
const syncCacheName = 'pwasndbx-sync';
const pendingName = '__pending';
const syncName = '__sync';
const filesToCache = [
'/',
'/uploads',
'/styles.css',
'/main.js',
'/utils.js',
'/favicon.ico',
'/manifest.json',
];
/* Start the service worker and cache all of the app's content */
self.addEventListener('install', function(e) {
console.log('SW:install');
e.waitUntil(Promise.all([
caches.open(cacheName).then(async function(cache) {
let cacheAdds = [];
try {
// Get all the files from the uploads listing
const res = await fetch('/uploads');
const { data = [] } = await res.json();
const files = data.map(f => `/uploads/${f}`);
// Cache all uploads files urls
cacheAdds.push(cache.addAll(files));
} catch(err) {
console.warn('PWA:install:fetch(uploads):err', err);
}
// Also add our static files to the cache
cacheAdds.push(cache.addAll(filesToCache));
return Promise.all(cacheAdds);
}),
// Create the sync cache object
caches.open(syncCacheName).then(cache => cache.put(syncName, jsonResponse({
pending: [], // For storing the penging files that later will be synced
deleted: [] // For storing the files that later will be deleted on sync
}))),
])
);
});
Service Worker Fetch
self.addEventListener('fetch', function(event) {
// Clone request so we can consume data later
const request = event.request.clone();
const { method, url, headers } = event.request;
event.respondWith(
fetch(event.request).catch(async function(err) {
const { headers, method, url } = event.request;
// A custom header that we set to indicate the requests come from our syncing method
// so we won't try to fetch anything from cache, we need syncing to be done on the server
const xSyncing = headers.get('X-Syncing');
if(xSyncing && xSyncing.length) {
return caches.match(event.request);
}
switch(method) {
case 'GET':
// Handle listing data for /uploads and return JSON response
break;
case 'PUT':
// Handle upload to cache and return success response
break;
case 'DELETE':
// Handle delete from cache and return success response
break;
}
// If we meet no specific criteria, then lookup to the cache
return caches.match(event.request);
})
);
});
function jsonResponse(data, status = 200) {
return new Response(data && JSON.stringify(data), {
status,
headers: {'Content-Type': 'application/json'}
});
}
Service Worker Fetch Listing GET
if(url.match(/\/uploads\/?$/)) { // Failed to get the uploads listing
// Get the uploads data from cache
const uploadsRes = await caches.match(event.request);
let { data: files = [] } = await uploadsRes.json();
// Get the sync data from cache
const syncRes = await caches.match(new Request(syncName), { cacheName: syncCacheName });
const sync = await syncRes.json();
// Return the files from uploads + pending files from sync - deleted files from sync
const data = files.concat(sync.pending).filter(f => sync.deleted.indexOf(f) < 0);
// Return a JSON response with the updated data
return jsonResponse({
success: true,
data
});
}
Service Worker Fetch Uloading PUT
// Get our custom headers
const filename = headers.get('X-Filename');
const mimetype = headers.get('X-Mimetype');
if(filename && mimetype) {
// Get the uploads data from cache
const uploadsRes = await caches.match('/uploads', { cacheName });
let { data: files = [] } = await uploadsRes.json();
// Get the sync data from cache
const syncRes = await caches.match(new Request(syncName), { cacheName: syncCacheName });
const sync = await syncRes.json();
// If the file exists in the uploads or in the pendings, then return a 409 Conflict response
if(files.indexOf(filename) >= 0 || sync.pending.indexOf(filename) >= 0) {
return jsonResponse({ success: false }, 409);
}
caches.open(cacheName).then(async (cache) => {
// Write the file to the cache using the response we cloned at the beggining
const data = await request.blob();
cache.put(`/uploads/${filename}`, new Response(data, {
headers: { 'Content-Type': mimetype }
}));
// Write the updated files data to the uploads cache
cache.put('/uploads', jsonResponse({ success: true, data: files }));
});
// Add the file to the sync pending data and update the sync cache object
sync.pending.push(filename);
caches.open(syncCacheName).then(cache => cache.put(new Request(syncName), jsonResponse(sync)));
// Return a success response with fromSw set to tru so we know this response came from service worker
return jsonResponse({ success: true, fromSw: true });
}
Service Worker Fetch Deleting DELETE
// Get our custom headers
const filename = headers.get('X-Filename');
if(filename) {
// Get the uploads data from cache
const uploadsRes = await caches.match('/uploads', { cacheName });
let { data: files = [] } = await uploadsRes.json();
// Get the sync data from cache
const syncRes = await caches.match(new Request(syncName), { cacheName: syncCacheName });
const sync = await syncRes.json();
// Check if the file is already pending or deleted
const pendingIndex = sync.pending.indexOf(filename);
const uploadsIndex = files.indexOf(filename);
if(pendingIndex >= 0) {
// If it's pending, then remove it from pending sync data
sync.pending.splice(pendingIndex, 1);
} else if(sync.deleted.indexOf(filename) < 0) {
// If it's not in pending and not already in sync for deleting,
// then add it for delete when we'll sync with the server
sync.deleted.push(filename);
}
// Update the sync cache
caches.open(syncCacheName).then(cache => cache.put(new Request(syncName), jsonResponse(sync)));
// If the file is in the uplods data
if(uploadsIndex >= 0) {
// Updates the uploads data
files.splice(uploadsIndex, 1);
caches.open(cacheName).then(async (cache) => {
// Remove the file from the cache
cache.delete(`/uploads/${filename}`);
// Update the uploads data cache
cache.put('/uploads', jsonResponse({ success: true, data: files }));
});
}
// Return a JSON success response
return jsonResponse({ success: true });
}
Synching
// Get the sync data from cache
const syncRes = await caches.match(new Request(syncName), { cacheName: syncCacheName });
const sync = await syncRes.json();
// If the are pending files send them to the server
if(sync.pending && sync.pending.length) {
sync.pending.forEach(async (file) => {
const url = `/uploads/${file}`;
const fileRes = await caches.match(url);
const data = await fileRes.blob();
fetch(url, {
method: 'PUT',
headers: {
'X-Filename': file,
'X-Syncing': 'syncing' // Tell SW fetch that we are synching so to ignore this fetch
},
body: data
}).catch(err => console.log('sync:pending:PUT:err', file, err));
});
}
// If the are deleted files send delete request to the server
if(sync.deleted && sync.deleted.length) {
sync.deleted.forEach(async (file) => {
const url = `/uploads/${file}`;
fetch(url, {
method: 'DELETE',
headers: {
'X-Filename': file,
'X-Syncing': 'syncing' // Tell SW fetch that we are synching so to ignore this fetch
}
}).catch(err => console.log('sync:deleted:DELETE:err', file, err));
});
}
// Update and reset the sync cache object
caches.open(syncCacheName).then(cache => cache.put(syncName, jsonResponse({
pending: [],
deleted: []
})));
Example PWA
I have created a PWA example that implements all these, which you can find and test here. I have tested it using Chrome and Firefox and using Firefox Android on a mobile device.
You can find the full source code of the application (including an express server) in this Github repository: https://github.com/clytras/pwa-sandbox.
The Cache API is designed to store a request (as the key) and a response (as the value) in order to cache a content from the server, for the web page. Here, we're talking about caching user input for future dispatch to the server. In other terms, we're not trying to implement a cache, but a message broker, and that's not currently something handled by the Service Worker spec (Source).
You can figure it out by trying this code:
HTML:
<button id="get">GET</button>
<button id="post">POST</button>
<button id="put">PUT</button>
<button id="patch">PATCH</button>
JavaScript:
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register('/service-worker.js', { scope: '/' }).then(function (reg) {
console.log('Registration succeeded. Scope is ' + reg.scope);
}).catch(function (error) {
console.log('Registration failed with ' + error);
});
};
document.getElementById('get').addEventListener('click', async function () {
console.log('Response: ', await fetch('50x.html'));
});
document.getElementById('post').addEventListener('click', async function () {
console.log('Response: ', await fetch('50x.html', { method: 'POST' }));
});
document.getElementById('put').addEventListener('click', async function () {
console.log('Response: ', await fetch('50x.html', { method: 'PUT' }));
});
document.getElementById('patch').addEventListener('click', async function () {
console.log('Response: ', await fetch('50x.html', { method: 'PATCH' }));
});
Service Worker:
self.addEventListener('fetch', function (event) {
var response;
event.respondWith(fetch(event.request).then(function (r) {
response = r;
caches.open('v1').then(function (cache) {
cache.put(event.request, response);
}).catch(e => console.error(e));
return response.clone();
}));
});
Which throws:
TypeError: Request method 'POST' is unsupported
TypeError: Request method 'PUT' is unsupported
TypeError: Request method 'PATCH' is unsupported
Since, the Cache API can't be used, and following the Google guidelines, IndexedDB is the best solution as a data store for ongoing requests.
Then, the implementation of a message broker is the responsibility of the developer, and there is no unique generic implementation that will cover all of the use cases. There are many parameters that will determine the solution:
Which criteria will trigger the use of the message broker instead of the network? window.navigator.onLine? A certain timeout? Other?
Which criteria should be used to start trying to forward ongoing requests on the network? self.addEventListener('online', ...)? navigator.connection?
Should requests respect the order or should they be forwarded in parallel? In other terms, should they be considered as dependent on each other, or not?
If run in parallel, should they be batched to prevent a bottleneck on the network?
In case the network is considered available, but the requests still fail for some reason, which retry logic to implement? Exponential backoff? Other?
How to notify the user that their actions are in a pending state while they are?
...
This is really very broad for a single StackOverflow answer.
That being said, here is a minimal working solution:
HTML:
<input id="file" type="file">
<button id="sync">SYNC</button>
<button id="get">GET</button>
JavaScript:
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register('/service-worker.js', { scope: '/' }).then(function (reg) {
console.log('Registration succeeded. Scope is ' + reg.scope);
}).catch(function (error) {
console.log('Registration failed with ' + error);
});
};
document.getElementById('get').addEventListener('click', function () {
fetch('api');
});
document.getElementById('file').addEventListener('change', function () {
fetch('api', { method: 'PUT', body: document.getElementById('file').files[0] });
});
document.getElementById('sync').addEventListener('click', function () {
navigator.serviceWorker.controller.postMessage('sync');
});
Service Worker:
self.importScripts('https://unpkg.com/idb#5.0.1/build/iife/index-min.js');
const { openDB, deleteDB, wrap, unwrap } = idb;
const dbPromise = openDB('put-store', 1, {
upgrade(db) {
db.createObjectStore('put');
},
});
const idbKeyval = {
async get(key) {
return (await dbPromise).get('put', key);
},
async set(key, val) {
return (await dbPromise).put('put', val, key);
},
async delete(key) {
return (await dbPromise).delete('put', key);
},
async clear() {
return (await dbPromise).clear('put');
},
async keys() {
return (await dbPromise).getAllKeys('put');
},
};
self.addEventListener('fetch', function (event) {
if (event.request.method === 'PUT') {
let body;
event.respondWith(event.request.blob().then(file => {
// Retrieve the body then clone the request, to avoid "body already used" errors
body = file;
return fetch(new Request(event.request.url, { method: event.request.method, body }));
}).then(response => handleResult(response, event, body)).catch(() => handleResult(null, event, body)));
} else if (event.request.method === 'GET') {
event.respondWith(fetch(event.request).then(response => {
return response.ok ? response : caches.match(event.request);
}).catch(() => caches.match(event.request)));
}
});
async function handleResult(response, event, body) {
const getRequest = new Request(event.request.url, { method: 'GET' });
const cache = await caches.open('v1');
await idbKeyval.set(event.request.method + '.' + event.request.url, { url: event.request.url, method: event.request.method, body });
const returnResponse = response && response.ok ? response : new Response(body);
cache.put(getRequest, returnResponse.clone());
return returnResponse;
}
// Function to call when the network is supposed to be available
async function sync() {
const keys = await idbKeyval.keys();
for (const key of keys) {
try {
const { url, method, body } = await idbKeyval.get(key);
const response = await fetch(url, { method, body });
if (response && response.ok)
await idbKeyval.delete(key);
}
catch (e) {
console.warn(`An error occurred while trying to sync the request: ${key}`, e);
}
}
}
self.addEventListener('message', sync);
Some words about the solution: it allows to cache the PUT request for future GET requests, and it also stores the PUT request into an IndexedDB database for future sync. About the key, I was inspired by Angular's TransferHttpCacheInterceptor which allows to serialize backend requests on the server-side rendered page for use by the browser-rendered page. It uses <verb>.<url> as the key. That supposes a request will override another request with the same verb and URL.
This solution also supposes that the backend does not return 204 No content as a response of a PUT request, but 200 with the entity in the body.
I was also stumbling upon it lately. Here is what I am doing to store in index db and return response when offline.
const storeFileAndReturnResponse = async function (request, urlSearchParams) {
let requestClone = request.clone();
let formData = await requestClone.formData();
let tableStore = "fileUploads";
let fileList = [];
let formDataToStore = [];
//Use formData.entries to iterate collection - this assumes you used input type= file
for (const pair of formData.entries()) {
let fileObjectUploaded = pair[1];
//content holds the arrayBuffer (blob) of the uploaded file
formDataToStore.push({
key: pair[0],
value: fileObjectUploaded,
content: await fileObjectUploaded.arrayBuffer(),
});
let fileName = fileObjectUploaded.name;
fileList.push({
fileName: fileName,
});
}
let payloadToStore = {
parentId: parentId,
fileList: fileList,
formDataKeyValue: formDataToStore,
};
(await idbContext).put(tableStore, payloadToStore);
return {
UploadedFileList: fileList,
};
};

Push WebAPI + IndexedDB + ServiceWorker

I've implemented the Push WebAPI in my web application using Service Worker as many articles explain on the web.
Now I need to store some data inside IndexedDB to make them available while the web app is closed (chrome tab closed, service worker in background execution).
In particular I would like to store a simple url from where retrieve the notification data (from server).
Here is my code:
self.addEventListener("push", (event) => {
console.log("[serviceWorker] Push message received", event);
notify({ event: "push" }); // This notifies the push service for handling the notification
var open = indexedDB.open("pushServiceWorkerDb", 1);
open.onsuccess = () => {
var db = open.result;
var tx = db.transaction("urls");
var store = tx.objectStore("urls");
var request = store.get("fetchNotificationDataUrl");
request.onsuccess = (ev) => {
var fetchNotificationDataUrl = request.result;
console.log("[serviceWorker] Fetching notification data from ->", fetchNotificationDataUrl);
if (!(!fetchNotificationDataUrl || fetchNotificationDataUrl.length === 0 || !fetchNotificationDataUrl.trim().length === 0)) {
event.waitUntil(
fetch(fetchNotificationDataUrl, {
credentials: "include"
}).then((response) => {
if (response.status !== 200) {
console.log("[serviceWorker] Looks like there was a problem. Status Code: " + response.status);
throw new Error();
}
return response.json().then((data) => {
if (!data) {
console.error("[serviceWorker] The API returned no data. Showing default notification", data);
//throw new Error();
showDefaultNotification({ url: "/" });
}
var title = data.Title;
var message = data.Message;
var icon = data.Icon;
var tag = data.Tag;
var url = data.Url;
return self.registration.showNotification(title, {
body: message,
icon: icon,
tag: tag,
data: {
url: url
},
requireInteraction: true
});
});
}).catch((err) => {
console.error("[serviceWorker] Unable to retrieve data", err);
var title = "An error occurred";
var message = "We were unable to get the information for this push message";
var icon = "/favicon.ico";
var tag = "notification-error";
return self.registration.showNotification(title, {
body: message,
icon: icon,
tag: tag,
data: {
url: "/"
},
requireInteraction: true
});
})
);
} else {
showDefaultNotification({ url: "/" });
}
}
};
});
Unfortunately when I receive a new push event it doesn't work, showing this exception:
Uncaught DOMException: Failed to execute 'waitUntil' on 'ExtendableEvent': The event handler is already finished.
at IDBRequest.request.onsuccess (https://192.168.0.102/pushServiceWorker.js:99:23)
How can I resolve this?
Thanks in advance
The initial call to event.waitUntil() needs to be done synchronously when the event handler is first invoked. You can then pass in a promise chain to event.waitUntil(), and inside that promise chain, carry out any number of asynchronous actions.
Your current code invokes an asynchronous IndexedDB callback before it calls event.waitUntil(), which is why you're seeing that error.
The easiest way to include IndexedDB operations inside a promise chain is to use a wrapper library, like idb-keyval, which takes the callback-based IndexedDB API and converts it into a promise-based API.
Your code could then look like:
self.addEventListener('push', event => {
// Call event.waitUntil() immediately:
event.waitUntil(
// You can chain together promises:
idbKeyval.get('fetchNotificationDataUrl')
.then(url => fetch(url))
.then(response => response.json())
.then(json => self.registration.showNotification(...)
);
});

Categories