DOMException: The operation is insecure during cache adding or retriving - javascript

I want to add cache to my site. I am using the following two functions to add and get the cache. It is working fine during development but when I use docker to build the app, then the caching is not working. It is giving me the following error.(Uncaught (in promise) DOMException: The operation is insecure.)
Image showing the error
Here is my code.
export const addDataIntoCache = (cacheName, url, response) => {
// Converting our response into Actual Response form
const data = new Response(JSON.stringify(response));
if ('caches' in window) {
// Opening given cache and putting our data into it
caches.open(cacheName).then((cache) => {
cache.put(url, data);
console.log('Data Added into cache!')
});
} else {
console.log("does not able to find caches in window");
caches.open(cacheName).then((cache) => {
cache.put(url, data);
console.log('Data Added into cache!')
});
}
};
export const getSingleCacheData = async (cacheName, url) => {
if (typeof caches === 'undefined') {
console.log("cache is undefined");
return null;
}
const cacheStorage = await caches.open(cacheName);
const cachedResponse = await cacheStorage.match(url);
// If no cache exists
if (!cachedResponse || !cachedResponse.ok) {
console.log('Fetched failed!');
return null;
}
return cachedResponse.json().then((item) => {
console.log("fetched from cache");
console.log(item);
return item;
});
};

Related

Http requests being dropped in Chrome Extension

Summary:
I've built a chrome extension that reaches out to external API to fetch some data. Sometimes that data returns quickly, sometimes it takes 4 seconds or so. I'm often doing about 5-10 in rapid succession (this is a scraping tool).
Previously, a lot of requests were dropped because the service worker in V3 of Manifest randomly shuts down. I thought I had resolved that. Then I realized there was a race condition because local storage doesn't have a proper queue.
Current Error - Even with all these fixes, requests are still being dropped. The external API returns the correct data successfully, but it seems like the extension never gets it. Hoping someone can point me in the right direction.
Relevant code attached, I imagine it will help someone dealing with these queue and service worker issues.
Local Storage queue
let writing: Map<string, Promise<any>> = new Map();
let updateUnsynchronized = async (ks: string[], f: Function) => {
let m = await new Promise((resolve, reject) => {
chrome.storage.local.get(ks, res => {
let m = {};
for (let k of ks) {
m[k] = res[k];
}
maybeResolveLocalStorage(resolve, reject, m);
});
});
// Guaranteed to have not changed in the meantime
let updated = await new Promise((resolve, reject) => {
let updateMap = f(m);
chrome.storage.local.set(updateMap, () => {
maybeResolveLocalStorage(resolve, reject, updateMap);
});
});
console.log(ks, 'Updated', updated);
return updated;
};
export async function update(ks: string[], f: Function) {
let ret = null;
// Global lock for now
await navigator.locks.request('global-storage-lock', async lock => {
ret = await updateUnsynchronized(ks, f);
});
return ret;
}
Here's the main function
export async function appendStoredScrapes(
scrape: any,
fromHTTPResponse: boolean
) {
let updated = await update(['urlType', 'scrapes'], storage => {
const urlType = storage.urlType;
const scrapes = storage.scrapes;
const {url} = scrape;
if (fromHTTPResponse) {
// We want to make sure that the url type at time of scrape, not time of return, is used
scrapes[url] = {...scrapes[url], ...scrape};
} else {
scrapes[url] = {...scrapes[url], ...scrape, urlType};
}
return {scrapes};
});
chrome.action.setBadgeText({text: `${Object.keys(updated['scrapes']).length}`});
}
Keeping the service worker alive
let defaultKeepAliveInterval = 20000;
// To avoid GC
let channel;
// To be run in content scripts
export function contentKeepAlive(name : string) {
channel = chrome.runtime.connect({ name });
channel.onDisconnect.addListener(() => contentKeepAlive(name));
channel.onMessage.addListener(msg => { });
}
let deleteTimer = (chan : any) => {
if (chan._timer) {
clearTimeout(chan._timer);
delete chan._timer;
}
}
let backgroundForceReconnect = (chan : chrome.runtime.Port) => {
deleteTimer(chan);
chan.disconnect();
}
// To be run in background scripts
export function backgroundKeepAlive(name : string) {
chrome.runtime.onConnect.addListener(chan => {
if (chan.name === name) {
channel = chan;
channel.onMessage.addListener((msg, chan) => { });
channel.onDisconnect.addListener(deleteTimer);
channel._timer = setTimeout(backgroundForceReconnect, defaultKeepAliveInterval, channel);
}
});
}
// "Always call sendResponse() in your chrome.runtime.onMessage listener even if you don't need
// the response. This is a bug in MV3." — https://stackoverflow.com/questions/66618136/persistent-service-worker-in-chrome-extension
export function defaultSendResponse (sendResponse : Function) {
sendResponse({ farewell: 'goodbye' });
}
Relevant parts of background.ts
backgroundKeepAlive('extension-background');
let listen = async (request, sender, sendResponse) => {
try {
if (request.message === 'SEND_URL_DETAIL') {
const {url, website, urlType} = request;
await appendStoredScrapes({url}, false);
let data = await fetchPageData(url, website, urlType);
console.log(data, url, 'fetch data returned background');
await appendStoredScrapes(data, true);
defaultSendResponse(sendResponse);
} else if (request.message === 'KEEPALIVE') {
sendResponse({isAlive: true});
} else {
defaultSendResponse(sendResponse);
}
} catch (e) {
console.error('background listener error', e);
}
};
chrome.runtime.onMessage.addListener(function (request, sender, sendResponse) {
listen(request, sender, sendResponse);
});

Progressive Web App doesn't fetch my cached files

I am trying to turn my weather app into a PWA, and I want to create an offline page if the user lose the connection.
So I've managed to put the html and related ressources (like scripts or svg) into the browser's cache, but when I got offline, only the html page load, and not the other stuff...
Here is the files that are in the cache :
And here is the errors that occur in the console and in the network when I got offline :
As you see, only the KUTE.js library (that doesn't work even if apparently loaded ???) that comes from a CDN and the things imported by the CSS (I put the CSS directly in my html page) are loaded.
--- If you wonder what's the "en" file, it's because I made a translation system with Express, Ejs and cookies, and so when you go to /en or /fr in the url, it will translate the page either in english or french. ---
Finally, here is the code of my service worker :
const OFFLINE_VERSION = 1;
const CACHE_NAME = "offline";
const OFFLINE_URL = "offline.html";
const BASE = location.protocol + "//" + location.host;
const CACHED_FILES = [
"https://cdn.jsdelivr.net/npm/kute.js#2.1.2/dist/kute.min.js",
`${BASE}/src/favicon/favicon.ico`,
`${BASE}/src/favicon/android-chrome-192x192.png`,
`${BASE}/src/favicon/android-chrome-512x512.png`,
`${BASE}/src/favicon/apple-touch-icon.png`,
`${BASE}/src/favicon/favicon-16x16.png`,
`${BASE}/src/favicon/favicon-32x32.png`,
`${BASE}/src/svg/layered-waves.svg`,
`${BASE}/js/background.js`,
`${BASE}/js/animation-blob.js`
];
self.addEventListener('install', (event) => {
event.waitUntil((async() => {
const cache = await caches.open(CACHE_NAME);
await Promise.all(
[...CACHED_FILES, OFFLINE_URL].map((path) => {
return cache.add(new Request(path, {cache: "reload"}));
})
);
})());
self.skipWaiting();
});
self.addEventListener('activate', (event) => {
event.waitUntil((async () => {
if ("navigationPreload" in self.registration) {
await self.registration.navigationPreload.enable();
}
})());
self.clients.claim();
});
self.addEventListener('fetch', (event) => {
if(event.request.mode === "navigate") {
event.respondWith((async() => {
try {
const preloadResponse = await event.preloadResponse;
if(preloadResponse) {
return preloadResponse;
}
return await fetch(event.request);
} catch(e) {
const cache = await caches.open(CACHE_NAME);
return await cache.match(OFFLINE_URL);
}
})());
}
});
It's the "regular" code for creating an offline page, except that I add multiple files to the cache.
So do you know why I can't fetch my other cached files ?
Thank you in advance !
this function act as a proxy to know if it should fetch data from cache or net wetwork :
self.addEventListener('fetch', (event) => {
if(event.request.mode === "navigate") {
event.respondWith((async() => {
try {
const preloadResponse = await event.preloadResponse;
if(preloadResponse) {
return preloadResponse;
}
return await fetch(event.request);
} catch(e) {
const cache = await caches.open(CACHE_NAME);
return await cache.match(OFFLINE_URL);
}
})());
}
});
but this line if(event.request.mode === "navigate") make a special behavior only if it's a navigate request (when navigator load a new page). So you need some minor change to make it work, you can try something like this :
self.addEventListener('fetch', (event) => {
event.respondWith((async() => {
try {
const preloadResponse = await event.preloadResponse;
if(preloadResponse) {
return preloadResponse;
}
return await fetch(event.request);
} catch(e) {
const cache = await caches.open(CACHE_NAME);
return await cache.match(event.request);
}
})());
});
This will normaly make it work but now every resquest will now pass through ServiceWorker and not only navigate one

Object property coming up as undefined after assigning with an async fetch

I'm having some trouble with adding a key to an object as seen here:
const recursiveFetchAndWait = useCallback(
(url) => {
setLoading(true);
fetch(url)
.then(async response => {
if (response.status === 200) { // Checking for response code 200
const xml = await response.text();
setLoading(false);
return XML2JS.parseString(xml, (err, result) => { // xml2js: converts XML to JSON
if (result.items.$.totalitems !== '0') { // Only processing further if there are returned results
result.items.item.forEach(game => {
/* Fetching the statistics from a separate API, because the base API doesn't include these */
const gameId = game.$.objectid;
fetch('https://cors-anywhere.herokuapp.com/https://www.boardgamegeek.com/xmlapi2/thing?id=' + gameId + '&stats=1')
.then(async response => {
const xml = await response.text();
return XML2JS.parseString(xml, (err, result) => {
console.log('result', result); // This returns data.
game.statistics = result.items.item[0].statistics[0].ratings[0];
// setStatistics(...{statistics}, ...{gameId: result.items.item[0].statistics[0].ratings[0]})
})
})
console.log('game', game); // This returns the object with the newly statistics key.
console.log('STATS!', game.statistics); // This doesn't recognize the statistics key?!
/* Going through the array and changing default values and converting string numbers to actual numbers */
if (game.stats[0].rating[0].ranks[0].rank[0].$.value === 'Not Ranked')
game.stats[0].rating[0].ranks[0].rank[0].$.value = 'N/A';
else {
game.stats[0].rating[0].ranks[0].rank[0].$.value = Number(game.stats[0].rating[0].ranks[0].rank[0].$.value);
}
game.stats[0].$.minplayers = Number(game.stats[0].$.minplayers);
if (isNaN(game.stats[0].$.minplayers))
game.stats[0].$.minplayers = '--';
game.stats[0].$.maxplayers = Number(game.stats[0].$.maxplayers);
if (isNaN(game.stats[0].$.maxplayers))
game.stats[0].$.maxplayers = '--';
game.stats[0].$.maxplaytime = Number(game.stats[0].$.maxplaytime);
if (isNaN(game.stats[0].$.maxplaytime))
game.stats[0].$.maxplaytime = '--';
if (game.yearpublished === undefined)
game.yearpublished = ['--'];
});
setGameList(result.items.item)
}
});
} else if (response.status === 202) { // If the status response was 202 (API still retrieving data), call the fetch again after a set timeout
setTimeoutAsCallback(() => recursiveFetchAndWait(url));
} else
console.log(response.status);
})
},
[],
);
Here are the results from the console.logs:
image
I fear the issue relates with the async call, but I'm confused as to why the first console.log() works fine then. If it is an async issue, how do I go about resolving this?
Your first console.log works because the "game" variable already exists and contains data before you even make the async fetch request. You could call it before the fetch and it would still be ok.
Your second console.log trying to output "game. statistics" is being run before the fetch has returned with any data. This is because async calls do not stop and wait for the code to complete the async task before moving on to the next lines of code. That is the intended purpose of an asynchronous code block. It will run the code inside the callback with the response once it's returned to perform anything that relies on the returned data. But without blocking the browser from continuing through the code to run the rest of the lines of code.
To achieve what you seem to be attempting to do, you could either place the task that you need to run after getting the data in a separate function and then calling it with the response.
games.forEach(game => {
fetch('https://www.boardgamegeek.com/xmlapi2/thing?id='+game.$.objectid+'&stats=1')
.then(response => {
processData(response, game);
})
});
const processData = (response, game) => {
const xml = response.text();
XML2JS.parseString(xml, (err, result) => {
game.statistics = result.items.item[0].statistics[0].ratings[0];
})
console.log('game', game);
console.log('STATS!', game.statistics);
}
or you could explicitly tell it to wait for the completion of the async task to complete before moving on. This would require you to either use promises or wrap the entire games foreach loop in an async function. This is because only an async function knows how to handle processing an await on another async function called inside of itself.
Code for updated question
The editor wont let me format code properly anymore, but essentially the simplest solution is all your data handling logic should be executed within the XML callback. From your shared code I can't see any requirement for it to exist outside of the callback where the data is handled after it has been retrieved.
const recursiveFetchAndWait = useCallback(
(url) => {
setLoading(true);
fetch(url)
.then(async response => {
if (response.status === 200) { // Checking for response code 200
const xml = await response.text();
setLoading(false);
return XML2JS.parseString(xml, (err, result) => { // xml2js: converts XML to JSON
if (result.items.$.totalitems !== '0') { // Only processing further if there are returned results
result.items.item.forEach(game => {
/* Fetching the statistics from a separate API, because the base API doesn't include these */
const gameId = game.$.objectid;
fetch('https://cors-anywhere.herokuapp.com/https://www.boardgamegeek.com/xmlapi2/thing?id=' + gameId + '&stats=1')
.then(async response => {
const xml = await response.text();
return XML2JS.parseString(xml, (err, result) => {
// BEGINNING OF "XML2JS.parseString"
console.log('result', result); // This returns data.
game.statistics = result.items.item[0].statistics[0].ratings[0];
// setStatistics(...{statistics}, ...{gameId: result.items.item[0].statistics[0].ratings[0]})
console.log('game', game); // This returns the object with the newly statistics key.
console.log('STATS!', game.statistics); // This doesn't recognize the statistics key?!
/* Going through the array and changing default values and converting string numbers to actual numbers */
if (game.stats[0].rating[0].ranks[0].rank[0].$.value === 'Not Ranked')
game.stats[0].rating[0].ranks[0].rank[0].$.value = 'N/A';
else {
game.stats[0].rating[0].ranks[0].rank[0].$.value = Number(game.stats[0].rating[0].ranks[0].rank[0].$.value);
}
game.stats[0].$.minplayers = Number(game.stats[0].$.minplayers);
if (isNaN(game.stats[0].$.minplayers))
game.stats[0].$.minplayers = '--';
game.stats[0].$.maxplayers = Number(game.stats[0].$.maxplayers);
if (isNaN(game.stats[0].$.maxplayers))
game.stats[0].$.maxplayers = '--';
game.stats[0].$.maxplaytime = Number(game.stats[0].$.maxplaytime);
if (isNaN(game.stats[0].$.maxplaytime))
game.stats[0].$.maxplaytime = '--';
if (game.yearpublished === undefined)
game.yearpublished = ['--'];
});
setGameList(game); // The forEach means that result.items.item == game
// END OF "XML2JS.parseString"
})
})
}
});
} else if (response.status === 202) { // If the status response was 202 (API still retrieving data), call the fetch again after a set timeout
setTimeoutAsCallback(() => recursiveFetchAndWait(url));
} else
console.log(response.status);
})
},
[],
);
Move console.log('STATS!', game.statistics); immediately below game.statistics =.
Or, do everything inside an async function:
(async () => {
for (const game of games) {
const response = fetch('https://www.boardgamegeek.com/xmlapi2/thing?id=' + game.$.objectid + '&stats=1');
const xml = await response.text();
await new Promise(resolve => {
XML2JS.parseString(xml, (err, result) => {
game.statistics = result.items.item[0].statistics[0].ratings[0];
resolve();
});
});
}
games.forEach(game => {
console.log('game', game);
console.log('STATS!', game.statistics);
});
})();
If you want to create a sequencial flow, you should follow as below:
await Promise.all(games.map(async game => {
await new Promise((resolve) => {
fetch('https://www.boardgamegeek.com/xmlapi2/thing?id=' + game.$.objectid + '&stats=1')
.then(async response => {
const xml = await response.text(); // XML2JS boilerplate
xml2js.parseString(xml, (err, result) => { // XML2JS boilerplate
console.log('result', result); // This returns data.
game.statistics = result.items.item[0].statistics[0].ratings[0]; // Creating a new statistics key on the game object and assigning it the statistics from the API call
resolve();
});
});
});
}));
games.forEach(game => {
console.log('game', game);
console.log('STATS!', game.statistics);
});
})();

How to cache data from async function that uses fetch in Node

I was trying to see if there was a way to cache a json response from a fetch async call, possibly using LRU.
I've tried using several packages, such as node-cache and lru-cache, but I don't think they worked because my function is asynchronous.
This is what my fetch function basically looks like:
const jsonFetch = async (url) => {
try {
const response = await fetch (url)
const json = await response.json();
return json
}
catch (error) {
console.log(error)
}
}
For example, if I get someone to hit my route 20 times in a minute, I'd like to easily fetch the data and return the response within 0.03 ms instead of 0.3 ms. Currently, it is always using the a URL to fetch the data.
This has been here for a while, but I agree with the comment from #sleepy012. If I wanted to avoid parallel calls, the trick should be to cache the promise, not only the value. So something like this should work:
let cache = {}
function cacheAsync(loader) {
return async (url) => {
if (url in cache) { // return cached result if available
console.log("cache hit")
return cache[url]
}
try {
const responsePromise = loader(url)
cache[url] = responsePromise
return responsePromise
}
catch (error) {
console.log('Error', error.message)
}
};
}
function delayedLoader(url) {
console.log('Loading url: ' + url)
return new Promise((r) => setTimeout(r, 1000,'Returning ' + url));
}
const cachedLoader = cacheAsync(delayedLoader);
cachedLoader('url1').then((d) => console.log('First load got: ' + d));
cachedLoader('url1').then((d) => console.log('Second load got: ' + d));
cachedLoader('url2').then((d) => console.log('Third load got: ' + d));
cachedLoader('url2').then((d) => console.log('Fourth load got: ' + d));
console.log('Waiting for load to complete');
There's nothing about async functions that will prevent caching results. It's possible the libraries you're looking at can't handle the promises, but here's a basic proof of concept that might help to get things started:
let cache = {}
const jsonFetch = async (url) => {
if (url in cache) { // return cached result if available
console.log("cache hit")
return cache[url]
}
try {
const response = await fetch (url)
const json = response.json();
cache[url] = json // cache response keyed to url
return json
}
catch (error) {
console.log(error)
}
}
jsonFetch("https://jsonplaceholder.typicode.com/todos/1").then((user) => console.log(user.id))
// should be cached -- same url
setTimeout(() => jsonFetch("https://jsonplaceholder.typicode.com/todos/1").then((user) => console.log(user.id)), 2000)
// not in cache
setTimeout(() => jsonFetch("https://jsonplaceholder.typicode.com/todos/2").then((user) => console.log(user.id)), 2000)
You will only get cache hits on requests made after the first request returns a value to cache

How to retrieve all posts of a user via Facebook Graph API using promises and recursion?

I am currently developing a web app which uses the Facebook Graph API.
What I would like to achieve is to get all posts of a user.
However, this is not that easy since I have to paginate the results.
At the moment I am struggeling with promises.
What I try to achieve is to fill an array with the post objects.
Therefore I use promises and recursion which does not work as expected.
My code currently looks as follows:
// Here I retrieve the user with his or her posts,
// just the first 25 due to pagination
if (accessToken) {
return new Promise(resolve => {
FB.api('/me?fields=id,name,posts&access_token=' + accessToken, response => {
this.get('currentUser').set('content', response);
resolve()
})
})
}
// Returns all posts of a given user
function getAllPostsOfUser(posts, postsArr) {
// Process each post of the current pagination level
for (var post of posts.data) {
// Only store meaningful posts
if (post !== undefined && post.message !== undefined) {
postsArr.push(post)
}
}
// Further posts are retrievalable via paging.next which is an url
if (posts.data.length !== 0 && posts.paging.next !== undefined) {
FB.api(posts.paging.next, response => {
getAllPostsOfUser(response, postsArr)
resolve()
})
}
return postsArr
}
var posts = getAllPostsOfUser(this.get('currentUser').content.posts, [])
// I want to use all the posts here
console.log(posts)
The problem I have is that I want to use the posts where the console.log is placed but when I log the posts array a lot of posts are missing.
I am sure that I did something wrong with the promises but I do not know what.
I would be glad if anyone could guide me to a solution.
Thank you in advance.
Try this:
function getAllPosts() {
return new Promise((resolve, reject) => {
let postsArr = [];
function recursiveAPICall(apiURL) {
FB.api(apiURL, (response) => {
if (response && response.data) {
//add response to posts array (merge arrays), check if there is more data via paging
postsArr = postsArr.concat(response.data);
if (response.paging && response.paging.next) {
recursiveAPICall(response.paging.next);
} else {
resolve(postsArr);
}
} else {
reject();
}
});
}
recursiveAPICall("/me/posts?fields=message&limit=100");
});
}
getAllPosts()
.then((response) => {
console.log(response);
})
.catch((e) => {
console.log(e);
});
Not tested, just a quick example I came up with. It returns a promise and uses a recursive function to get all entries. BTW, you don't need to add the Access Token. If you are logged in, the SDK will use it internally.
This is an old question that is already answered but I thought it could use a more modern answer, considering how many lines of code could be saved. This code has not been tested with the real API but it should work.
This function returns a promise of an array of posts.
async function getPosts(url = "/me/posts?fields=message&limit=100") {
const { error, paging, data } = (await new Promise(r => FB.api(url, r))) || {}
if (error || !data) throw new Error(error || "Could not get posts")
return data.concat(paging?.next ? await getPosts(paging.next) : [])
}
With comments:
async function getPosts(url = "/me/posts?fields=message&limit=100") {
// get response data out of callback
const { error, paging, data } = (await new Promise(r => FB.api(url, r))) || {}
// if there was an error or there wasn't any data, throw
if (error || !data) throw new Error(error || "Could not get posts")
// return this page's data + if there's a next page, recursively get its data
return data.concat(paging?.next ? await getPosts(paging.next) : [])
}
The function can then be consumed like so:
async function main() {
try {
const posts = await getPosts(); // the array of posts
console.log(posts);
} catch (error) {
console.error(error);
}
}
main();
Below is a snippet demonstrating the function using a fake API.
// fake api for testing purposes
const FB = {
api(url, callback) {
const pages = [
{
data: ["post1", "post2", "post3"],
paging: { next: 1 },
},
{
data: ["post4", "post5", "post6"],
paging: { next: 2 },
},
{
data: ["post7", "post8", "post9"],
},
];
if (typeof url !== "number") return callback(pages[0]);
return callback(pages[url]);
},
};
async function getPosts(url = "/me/posts?fields=message&limit=100") {
const { error, paging, data } = (await new Promise(r => FB.api(url, r))) || {}
if (error || !data) throw new Error(error || "Could not get posts")
return data.concat(paging?.next ? await getPosts(paging.next) : [])
}
async function main() {
try {
const posts = await getPosts(); // the array of posts
console.log(posts);
} catch (error) {
console.error(error);
}
}
main();
.as-console-wrapper{max-height:none !important;top: 0;}
Also see erikhagreis's ESM wrapper on GitHub.

Categories