I need to implement serviceworker file caching, but using a no-cors flag.
This is because I get a CORS error on my webserver for below code. The code is from the standard Ionic 2 starter template (inside serviceworker.js). I can't use the standard code because for some reason the requests trigger an authentication flow in which there is a redirect to some URL, which fails because of a CORS error.
How would I do that in the nicest (cq. easiest) way?
// TODO: Implement this without CORS (set no-cors flag)
self.toolbox.precache(
[
// './build/main.js',
// './build/vendor.js',
// './build/main.css',
// './build/polyfills.js',
// 'index.html',
// 'manifest.json'
]
);
EDIT: It's not really an authentication error that happens, the user is definitely already authenticated. But because of the redirect during the authentication the request for the files above goes wrong. I found this article: What is an opaque request, and what it serves for? which indicates settings the no cors flag would be the solution. The error I get, like on that page is:
No 'Access-Control-Allow-Origin' header is present on the requested resource.
Origin 'http://abc' is therefore not allowed access.
If an opaque response serves your needs, set the request's mode to 'no-cors'
to fetch the resource with CORS disabled.
Solved it myself like below. The install event is what triggers the app to store files locally.
/**
* Check out https://googlechromelabs.github.io/sw-toolbox/ for
* more info on how to use sw-toolbox to custom configure your service worker.
*/
'use strict';
importScripts('./build/sw-toolbox.js');
self.toolbox.options.cache = {
name: 'ionic-cache'
};
// pre-cache our key assets
// TODO: Implemente this without using CORS (set no-cors flag)
/*
self.toolbox.precache(
[
'./build/main.js',
'./build/vendor.js',
'./build/main.css',
'./build/polyfills.js',
'index.html',
'manifest.json'
]
);
*/
// MANUAL precaching in order to evade CORS error on SharePoint
var aFilesToCache = [
'./assets/json/propertyLUT.js',
'./assets/json/propertyvalues.js',
'./build/main.js',
'./build/vendor.js',
'./build/main.css',
'./build/polyfills.js',
'index.html',
'manifest.json'
];
self.addEventListener('fetch', function(event) {
console.log('Handling fetch event for', event.request.url);
event.respondWith(
// Opens Cache objects that start with 'font'.
caches.open('pwa_').then(function(cache) {
return cache.match(event.request).then(function(response) {
if (response) {
console.log('Found response in cache:', response);
return response;
}
}).catch(function(error) {
// Handles exceptions that arise from match() or fetch().
console.error('Error in fetch handler:', error);
throw error;
});
})
);
});
self.addEventListener('install', event => {
function onInstall(event, filesToCache) {
console.log('Hit event INSTALL');
return Promise.all(filesToCache.map(function(aUrl)
{
return caches.open('pwa_').then(function(cache)
{
debugger;
aUrl = resolveURL(aUrl, self.location.href);
return fetch(aUrl, { mode: 'no-cors' })
.then(function(response)
{
return cache.put(aUrl, response.clone());
});
})
}))
}
event.waitUntil(
onInstall(event, aFilesToCache).then( () => self.skipWaiting() )
);
});
function resolveURL(relative, base) {
var stack = base.split("/"),
parts = relative.split("/");
stack.pop(); // remove current file name (or empty string)
// (omit if "base" is the current folder without trailing slash)
for (var i=0; i<parts.length; i++) {
if (parts[i] == ".")
continue;
if (parts[i] == "..")
stack.pop();
else
stack.push(parts[i]);
}
return stack.join("/");
}
/*
foreach(aUrl in aFilesToCache)
{
var corsRequest = new Request(url, {mode: 'no-cors'});
fetch(corsRequest).then(response => {
return cache.put("pwa_" + url, response);
}); // response won't be opaque.
}
*/
// dynamically cache any other local assets
self.toolbox.router.any('/*', self.toolbox.fastest);
// for any other requests go to the network, cache,
// and then only use that cached resource if your user goes offline
self.toolbox.router.default = self.toolbox.networkFirst;
Related
I created a new React Project with Service Worker already written. I am making 3 different API requests in App. But only the results from the Pokémon API gets displayed when I disconnect from the internet and reload. The other 2 aren't getting cached. Here is the code in App. Really simple.
function App() {
const [pokemon, setPokemon] = useState([])
const [word, setWord] = useState("")
const [color, setColor] = useState("")
useEffect(() => {
getAPI()
}, [])
const getAPI = async () => {
const response = await fetch("https://pokeapi.co/api/v2/pokemon?limit=100")
const data = await response.json()
setPokemon(data.results)
const response1 = await fetch("https://random-words-api.vercel.app/word")
const data1 = await response1.json()
setWord(data1[0].word)
const response2 = await fetch("https://random-data-api.com/api/color/random_color")
const data2 = await response2.json()
setColor(data2.color_name)
}
return (
<div className="App">
<p>Random Word: {word}</p>
<p>Random Color: {color}</p>
{pokemon.map((p,i) => <p onClick={(e)=> console.log(e.target)} key={i}>{ `${i} - ${p.name} - ${p.url}`}</p>)}
</div>
);
}
export default App;
This is the prebuilt service worker file.
/* eslint-disable no-restricted-globals */
// This service worker can be customized!
// See https://developers.google.com/web/tools/workbox/modules
// for the list of available Workbox modules, or add any other
// code you'd like.
// You can also remove this file if you'd prefer not to use a
// service worker, and the Workbox build step will be skipped.
import { clientsClaim } from 'workbox-core';
import { ExpirationPlugin } from 'workbox-expiration';
import { precacheAndRoute, createHandlerBoundToURL } from 'workbox-precaching';
import { registerRoute } from 'workbox-routing';
import { StaleWhileRevalidate } from 'workbox-strategies';
clientsClaim();
// Precache all of the assets generated by your build process.
// Their URLs are injected into the manifest variable below.
// This variable must be present somewhere in your service worker file,
// even if you decide not to use precaching. See https://cra.link/PWA
precacheAndRoute(self.__WB_MANIFEST);
// Set up App Shell-style routing, so that all navigation requests
// are fulfilled with your index.html shell. Learn more at
// https://developers.google.com/web/fundamentals/architecture/app-shell
const fileExtensionRegexp = new RegExp('/[^/?]+\\.[^/]+$');
registerRoute(
// Return false to exempt requests from being fulfilled by index.html.
({ request, url }) => {
// If this isn't a navigation, skip.
if (request.mode !== 'navigate') {
return false;
} // If this is a URL that starts with /_, skip.
if (url.pathname.startsWith('/_')) {
return false;
} // If this looks like a URL for a resource, because it contains // a file extension, skip.
if (url.pathname.match(fileExtensionRegexp)) {
return false;
} // Return true to signal that we want to use the handler.
return true;
},
createHandlerBoundToURL(process.env.PUBLIC_URL + '/index.html')
);
// An example runtime caching route for requests that aren't handled by the
// precache, in this case same-origin .png requests like those from in public/
registerRoute(
// Add in any other file extensions or routing criteria as needed.
({ url }) => url.origin === self.location.origin && url.pathname.endsWith('.png'), // Customize this strategy as needed, e.g., by changing to CacheFirst.
new StaleWhileRevalidate({
cacheName: 'images',
plugins: [
// Ensure that once this runtime cache reaches a maximum size the
// least-recently used images are removed.
new ExpirationPlugin({ maxEntries: 50 }),
],
})
);
// This allows the web app to trigger skipWaiting via
// registration.waiting.postMessage({type: 'SKIP_WAITING'})
self.addEventListener('message', (event) => {
console.error("Message")
if (event.data && event.data.type === 'SKIP_WAITING') {
self.skipWaiting();
}
});
self.addEventListener('fetch', function (event) {
event.respondWith(
fetch(event.request).catch(function () {
return caches.match(event.request);
}),
);
});
// Any other custom service worker logic can go here.
Whats the cause of this behaviour?
EDIT: It is so smart it only caches duplicate responses. But i would like it to cache the last value anyway. How?
Ok, i only see you have a cache match, but i don't see there's code to validate the cache, ex. when there's no match.
self.addEventListener('fetch', function(event) {
console.log('Handling fetch event for', event.request.url);
event.respondWith(
caches.open(CURRENT_CACHES.font).then(function(cache) {
return cache.match(event.request).then(function(response) {
if (response) {
// If there is an entry in the cache for event.request, then response will be defined
// and we can just return it. Note that in this example, only font resources are cached.
console.log(' Found response in cache:', response);
return response;
}
// Otherwise, if there is no entry in the cache for event.request, response will be
// undefined, and we need to fetch() the resource.
console.log(' No response for %s found in cache. About to fetch ' +
'from network...', event.request.url);
// We call .clone() on the request since we might use it in a call to cache.put() later on.
// Both fetch() and cache.put() "consume" the request, so we need to make a copy.
// (see https://developer.mozilla.org/en-US/docs/Web/API/Request/clone)
return fetch(event.request.clone()).then(function(response) {
console.log(' Response for %s from network is: %O',
event.request.url, response);
if (response.status < 400 &&
response.headers.has('content-type') &&
response.headers.get('content-type').match(/^font\//i)) {
// This avoids caching responses that we know are errors (i.e. HTTP status code of 4xx or 5xx).
// We also only want to cache responses that correspond to fonts,
// i.e. have a Content-Type response header that starts with "font/".
// Note that for opaque filtered responses (https://fetch.spec.whatwg.org/#concept-filtered-response-opaque)
// we can't access to the response headers, so this check will always fail and the font won't be cached.
// All of the Google Web Fonts are served off of a domain that supports CORS, so that isn't an issue here.
// It is something to keep in mind if you're attempting to cache other resources from a cross-origin
// domain that doesn't support CORS, though!
// We call .clone() on the response to save a copy of it to the cache. By doing so, we get to keep
// the original response object which we will return back to the controlled page.
// (see https://developer.mozilla.org/en-US/docs/Web/API/Request/clone)
console.log(' Caching the response to', event.request.url);
cache.put(event.request, response.clone());
} else {
console.log(' Not caching the response to', event.request.url);
}
// Return the original response object, which will be used to fulfill the resource request.
return response;
});
}).catch(function(error) {
// This catch() will handle exceptions that arise from the match() or fetch() operations.
// Note that a HTTP error response (e.g. 404) will NOT trigger an exception.
// It will return a normal response object that has the appropriate error code set.
console.error(' Error in fetch handler:', error);
throw error;
});
})
);
This is from the documentation, https://developer.mozilla.org/en-US/docs/Web/API/Cache
I am trying to get the cookie after a successful login but I cant figure out how.
On the ASP Net Core Identity API using swagger the browser gets the cookie but when I use the fetch api I cant get the cookie. I tried returning the return response.json(); but this does not work. I also have the redirecting to home page on login Success but Iam not sure exactly how to return the return response.json(); if that is needed.
Both the Identity api and the JS - Cleint are running on localhost.
JS - Fetch API - POST:
function IdentityPost(formID, postUrl) {
const currForm = document.getElementById(formID); // Get the Form
var submitBtn = currForm.elements.namedItem("triggerSubmit"); // Get the submit button of the form
// Listen for Form- Submit
currForm.addEventListener('submit',function handler(e)
{
e.preventDefault(); // Prevent page reload on Submit
submitBtn.disabled = true; // Disable the submit button
LoadingMsg(); // Show Loading Message
// Get form data as string---------------------------------------------------------------
const formData = new FormData(this); // "this" = this Form
const searchParams = new URLSearchParams(formData); // Get the form data params
let formQueryString = searchParams.toString(); // Get the form data params as string
// POST ----------------------------------------------------------------------------------
fetch(identityApiUri + postUrl + formQueryString, // #1 = API-Address, #2 = API - Controller/Mehod, #3 = form data as sring
{
method: 'POST',
credentials: 'same-origin'
}).then(function (response)
{
// IF OK
if (response.status == 200 || response.status == 201) // Status 201 = "Created"
{
RemoveLoadingMsg();
SuccessMsg("Success");
currForm.reset(); // Reset the form
submitBtn.disabled = false; // Enable Submit button
if (document.referrer.split('/')[2] === window.location.host) // Return to previous page if local
{
history.back(); // Go back to previouse page
}
else
{
window.location.href = "/"; // RETURN TO Home
}
}
else // If Bad STATUS
{
return Promise.reject(response); // Triggers Catch method
}
}).catch(function (err) // If Exception
{
RemoveLoadingMsg();
// Show Error
try // Because of JSON Parse and err.text()
{
err.text().then(errorMessage => {
var error = errorMessage.substring(1, errorMessage.length - 1); // Remove the [..] form the Msg
ErrorMsg(error); // Get the error and display
});
}
catch(e)
{
console.warn("Post Exception - Probably No connection to hte server");
ErrorMsg(err + " - Server is probably offline"); // Get the error and display
}
submitBtn.disabled = false; // Enable Submit button
console.warn('Post Exception:', err);
});
this.removeEventListener('submit', handler); // Remove Event Listener
});
}
ASP Net Core - Identity API - Startup:
I have enabled CORS Any Origin. - Not sure if I need ti include the .AllowCredentials(). If I try to enable it it says that I cant have .AllowAnyOrigin() enabled. I am accesing the Api directly from the Client "Browser".
using Leanheat.Identity.API.DBContexts;
using Leanheat.Identity.API.Filters;
using Leanheat.Identity.Models;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.HttpsPolicy;
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Authorization;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.OpenApi.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace Leanheat.Identity.API
{
public class Startup
{
// Startup
public Startup(IConfiguration configuration)
{
Configuration = configuration;
}
public IConfiguration Configuration { get; }
// Configure Services =================================================================================
public void ConfigureServices(IServiceCollection services)// This method gets called by the runtime. Use this method to add services to the container.
{
// Log in - DbContext
services.AddDbContextPool<LeanheatIdentityApiContext>(options =>
options.UseSqlServer(Configuration.GetConnectionString("IdentityContextConnection")));
// UnitOfWork - Filter
services.AddScoped<UnitOfWorkFilter>();
services.AddControllers(config => { config.Filters.AddService<UnitOfWorkFilter>(); }); // UnitOfWork for all Controllers
// CORS - Allow calling the API from WebBrowsers
services.AddCors();
// Log In
services.AddIdentity<ApplicationUser, IdentityRole>(options =>
{
// Password settings
options.Password.RequireDigit = false;
options.Password.RequireLowercase = false;
options.Password.RequireNonAlphanumeric = false;
options.Password.RequireUppercase = false;
options.Password.RequiredLength = 6;
options.Password.RequiredUniqueChars = 1;
}).AddEntityFrameworkStores<LeanheatIdentityApiContext>().AddDefaultTokenProviders(); // AddDefaultTokenProviders is used for the Update Log In Password etc.
// Log In
// Make all Controllers protected by default so only Authorized Users can accsess them, for Anonymouse Users use [AlloAnonymouse] over the controllers.
services.AddMvc(options => {
var policy = new AuthorizationPolicyBuilder()
.RequireAuthenticatedUser()
.Build();
options.Filters.Add(new AuthorizeFilter(policy));
}).AddXmlSerializerFormatters();
//services.AddControllers();
// Swagger
services.AddSwaggerGen(c =>
{
c.SwaggerDoc("v1", new OpenApiInfo { Title = "Leanheat.Identity.API", Version = "v1" });
});
}
// Configure ===========================================================================================
public void Configure(IApplicationBuilder app, IWebHostEnvironment env)// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
{
// Default Code------------------------------------------------------------------------------------>
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
// Swagger
app.UseSwagger();
app.UseSwaggerUI(c => c.SwaggerEndpoint("/swagger/v1/swagger.json", "Leanheat.Identity.API v1"));
}
app.UseHsts(); // Allow HTTPS
app.UseHttpsRedirection();
app.UseRouting();
// CORS - Allow calling the API from WebBrowsers
app.UseCors(x => x
.AllowAnyMethod()
.AllowAnyHeader()
.AllowAnyOrigin()
.SetIsOriginAllowed(origin => true));// allow any origin
// Log In
app.UseAuthentication(); // UseAuthentication SHOULD ALWAYS BE BEFORE Authorization
app.UseAuthorization();
app.UseEndpoints(endpoints =>
{
endpoints.MapControllers();
});
}
}
}
The login Method in the Identity API:
// Log In ===================================================================================
[HttpPost]
[Route("LogIn")]
[AllowAnonymous]
public async Task<IActionResult> LogIn(string email, string password, bool rememberMe)
{
if(email != null && password !=null)
{
var result = await signInManager.PasswordSignInAsync(email, password, rememberMe, false);
if (result.Succeeded) // If Login Ok
{
return new JsonResult(result);
}
return StatusCode(401, "[\n \"Invalid Log In\" \n]"); // If Erors return errors
}
return StatusCode(401, "[\n \"Email or Password cant be empty\" \n]");
}
Using swagger I can get the cookie in the browser:
EDIT - Almost working:
I added to the Identity API in the startup.cs this:
services.AddCors(options =>
{
options.AddDefaultPolicy(builder =>
builder.SetIsOriginAllowed(_ => true)
.AllowAnyMethod()
.AllowAnyHeader()
.AllowCredentials());
});
And in the Js Post Code:
// POST ----------------------------------------------------------------------------------
fetch(identityApiUri + postUrl + formQueryString, // #1 = API-Address, #2 = API - Controller/Mehod, #3 = form data as sring
{
method: 'POST',
mode: 'no-cors',
headers: {
'Access-Control-Allow-Origin': '*'
},
credentials: 'include'
And now I get the cookie but I get also exception:
The server is returning Statuscode 200 and I can now get the cookie but I get exception in the fetch api post method.
Ok: false - but I get the cookie and the server returns status 200.
You're calling fetch() from a different origin than the api, right? If so, this sounds like a simple CORS issue.
By default, CORS does not include credentials such as cookies. You have to opt in by both setting the credentials mode on the client side, and the Access-Control-Allow-Credentials header on the server side.
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Credentials
With the fetch() api, the credentials mode is set with the credentials: 'include' option. As far as the server side, I'm not familiar with ASP but it sounds like it provides some conveniency method to set the relevant header.
As you hint at in your post, when the Access-Control-Allow-Credentials header is set to true, the * value - meaning any origin - actually can’t be used in the Access-Control-Allow-Origin header, so you will have to be explicit about what origin you want allowed - ie the origin of your client application, the origin being defined as the combination of the protocol, domain, and port.
Now it Works thanks to #IAmDranged
JS - Fetch Api - Post Method:
fetch(identityApiUri + postUrl + formQueryString, // #1 = API-Address, #2 = API - Controller/Mehod, #3 = form data as sring
{
method: 'POST',
mode: 'cors',
//headers: {
// 'Access-Control-Allow-Origin': 'https://localhost',
//},
credentials: 'include'
}).then(function (response)
{ ...........................
Asp Net core Identity API - Startup.cs:
// Configure ===========================================================================================
public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
{
app.UseRouting();
// CORS
app.UseCors(x => x
.AllowAnyMethod()
.AllowAnyHeader()
.AllowCredentials()
.WithOrigins("https://localhost:44351")); // Allow only this origin
//.SetIsOriginAllowed(origin => true));// Allow any origin
.................................................
So the fix was:
Add to JS - fetch method:
mode: 'cors',
credentials: 'include'
Add to Asp net Core - startup.cs
app.UseCors(x => x
.AllowAnyMethod()
.AllowAnyHeader()
.AllowCredentials()
.WithOrigins("https://localhost:44351")); // Allow only this origin
//.SetIsOriginAllowed(origin => true));// Allow any origin
The .WithOrigins("https://localhost:44351")); allows only the client to use the api and if I use .SetIsOriginAllowed(origin => true)); without the WithOrigins part - it allows all.
I am implementing a service worker on my web application.
This is my serviceWorker.js:
var CACHE_NAME = 'my-cache';
var urlsToCache = [
'img/logout.png'
];
self.addEventListener('install', function(event) {
event.waitUntil(
caches.open(CACHE_NAME).then(function(cache) {
return cache.addAll(urlsToCache);
}))
});
self.addEventListener('fetch', function(event) {
console.log(event.request);
caches.match(event.request).then(function(response) {
if (response) {
console.log(response);
return response;
}
});
});
It's all woriking fine (in dev tools i can see the png file stored is the cache storage).
The problem is when i go offline: i guess that the get request for the png file is matched because if i navigate to localhost:8080/myapp/img/logout.png the response logged (as you can see in the script above) it'is not undefined but chrome still returns ERR_INTERNET_DISCONNECTED.
This is the dir structure of the webapp:
here
What am i doing wrong?
In fetch event of service worker Instead of matching request object change it to cache.match(event.request.url).
I would like to use a service worker to cache files and improve user experience by providing offline pages. I used the pwabuilder.com to create the files for the website. Unfortunately, even when using the code without any elements to cache it issues the error "Uncaught (in promise) TypeError: Request failed"
I double checked the code, I tried the different bug fixes shown on Google Developper and Stackoverflow, but none of these helped me fix the issue.
I have this in the HTML file:
if ("serviceWorker" in navigator) {
if (navigator.serviceWorker.controller) {
console.log("[PWA Builder] active service worker found, no need to register");
} else {
// Register the service worker
navigator.serviceWorker
.register("pwabuilder-sw.js", {
scope: "./"
})
.then(function (reg) {
console.log("[PWA Builder] Service worker has been registered for scope: " + reg.scope);
});
}
}
The service worker registers sucessfully.
The pwabuilder-sw.js is this one:
//This is the service worker with the Advanced caching
const CACHE = "pwabuilder-adv-cache";
const precacheFiles = [
/* Add an array of files to precache for your app */
'/cms/stylesheets/bootstrap.css',
'/cms/stylesheets/ifpayroll.css',
'/cms/stylesheets/animate.css',
'/cms/stylesheets/fontawesome-webfont.css',
'/cms/javascript/main.js',
'/cms/javascript/aos.js',
'/cms/images/logo#3x.png',
];
// TODO: replace the following with the correct offline fallback page i.e.: const offlineFallbackPage = "offline.html";
const offlineFallbackPage = "ToDo-replace-this-name.html";
const networkFirstPaths = [
/* Add an array of regex of paths that should go network first */
// Example: /\/api\/.*/
];
const avoidCachingPaths = [
/* Add an array of regex of paths that shouldn't be cached */
// Example: /\/api\/.*/
];
function pathComparer(requestUrl, pathRegEx) {
return requestUrl.match(new RegExp(pathRegEx));
}
function comparePaths(requestUrl, pathsArray) {
if (requestUrl) {
for (let index = 0; index < pathsArray.length; index++) {
const pathRegEx = pathsArray[index];
if (pathComparer(requestUrl, pathRegEx)) {
return true;
}
}
}
return false;
}
self.addEventListener("install", function (event) {
console.log("[PWA Builder] Install Event processing");
console.log("[PWA Builder] Skip waiting on install");
self.skipWaiting();
event.waitUntil(
caches.open(CACHE).then(function (cache) {
console.log("[PWA Builder] Caching pages during install");
return cache.addAll(precacheFiles).then(function () {
if (offlineFallbackPage === "ToDo-replace-this-name.html") {
return cache.add(new Response("TODO: Update the value of the offlineFallbackPage constant in the serviceworker."));
}
return cache.add(offlineFallbackPage);
});
})
);
});
// Allow sw to control of current page
self.addEventListener("activate", function (event) {
console.log("[PWA Builder] Claiming clients for current page");
event.waitUntil(self.clients.claim());
});
// If any fetch fails, it will look for the request in the cache and serve it from there first
self.addEventListener("fetch", function (event) {
if (event.request.method !== "GET") return;
if (comparePaths(event.request.url, networkFirstPaths)) {
networkFirstFetch(event);
} else {
cacheFirstFetch(event);
}
});
function cacheFirstFetch(event) {
event.respondWith(
fromCache(event.request).then(
function (response) {
// The response was found in the cache so we responde with it and update the entry
// This is where we call the server to get the newest version of the
// file to use the next time we show view
event.waitUntil(
fetch(event.request).then(function (response) {
return updateCache(event.request, response);
})
);
return response;
},
function () {
// The response was not found in the cache so we look for it on the server
return fetch(event.request)
.then(function (response) {
// If request was success, add or update it in the cache
event.waitUntil(updateCache(event.request, response.clone()));
return response;
})
.catch(function (error) {
// The following validates that the request was for a navigation to a new document
if (event.request.destination !== "document" || event.request.mode !== "navigate") {
return;
}
console.log("[PWA Builder] Network request failed and no cache." + error);
// Use the precached offline page as fallback
return caches.open(CACHE).then(function (cache) {
cache.match(offlineFallbackPage);
});
});
}
)
);
}
function networkFirstFetch(event) {
event.respondWith(
fetch(event.request)
.then(function (response) {
// If request was success, add or update it in the cache
event.waitUntil(updateCache(event.request, response.clone()));
return response;
})
.catch(function (error) {
console.log("[PWA Builder] Network request Failed. Serving content from cache: " + error);
return fromCache(event.request);
})
);
}
function fromCache(request) {
// Check to see if you have it in the cache
// Return response
// If not in the cache, then return error page
return caches.open(CACHE).then(function (cache) {
return cache.match(request).then(function (matching) {
if (!matching || matching.status === 404) {
return Promise.reject("no-match");
}
return matching;
});
});
}
function updateCache(request, response) {
if (!comparePaths(request.url, avoidCachingPaths)) {
return caches.open(CACHE).then(function (cache) {
return cache.put(request, response);
});
}
return Promise.resolve();
}
It is 100% the same as the one provided on pwabuilder.com except for the cached files that have been added.
Manifest: unknown 'orientation' value ignored.
pwabuilder-sw.js:83 [PWA Builder] Install Event processing
pwabuilder-sw.js:87 [PWA Builder] Skip waiting on install
pwabuilder-sw.js:97 [PWA Builder] Caching pages during install
legal.html:63 [PWA] Service worker has been registered for scope: https://www.ifpayroll.lu/
pwabuilder-sw.js:1 Uncaught (in promise) TypeError: Request failed
This is what I get.
You have to change this line to reflect your real filename, and make sure that file exists:
const offlineFallbackPage = "ToDo-replace-this-name.html";
Then you can also delete these lines:
if (offlineFallbackPage === "ToDo-replace-this-name.html") {
return cache.add(new Response("TODO: Update the value of the offlineFallbackPage constant in the serviceworker."));
}
The offline fallback page instruction is badly documented IMO/annoying.
These lines can be removed:
if (offlineFallbackPage === "ToDo-replace-this-name.html") {
return cache.add(new Response("TODO: Update the value of the offlineFallbackPage constant in the serviceworker."));
}
I am experiencing an issue when developing an application and attempting to utilize a refresh token. I am using ADFS for authentication, where I get an id_token that expires every hour and a refresh token that lasts 8 hours.
In development, the below script works perfectly as intended and reaches out to the server for a refresh.
In production, it gets new tokens, but it never retries the original request. I am trying to find out why it is different on webpack-dev-server vs production.
Any help would be much appreciated!
P.S. Using Babel Presets: babel-preset-env and babel-preset-stage-2
axios.js
import axios from 'axios'
// Set baseURL for development and production
const baseURL = process.env.NODE_ENV === 'development' ? '//localhost:3001/api' : '/api'
// Create instance of axios with correct baseURL
const instance = axios.create({
baseURL
})
// Intercept responses
instance.interceptors.response.use((response) => {
return response
}, async (error) => {
// Pull config, status and data from the error
const { config, response: { status, data } } = error
// Pull tokens from local storage
let currentTokens = JSON.parse(localStorage.getItem('tokens')) || null
// If response errors at 401, token is still valid and we have tokens in localStorage
if(status === 401 && data.token_invalid === undefined && currentTokens && !config._retry) {
config._retry = true
try {
// Ask server for new token
const authenticate = await instance.post('/user/login', {refresh_token: currentTokens.refresh_token})
// Pull tokens and success from authenticated request
const { tokens, success } = authenticate.data
// If successful, set access_token, id_token, headers and localStorage
if(success) {
currentTokens.access_token = tokens.access_token
currentTokens.id_token = tokens.id_token
const bearer = `Bearer ${tokens.id_token}`
config.headers['Authorization'] = bearer
Object.assign(instance.defaults, {headers: {Authorization: bearer}})
localStorage.setItem('tokens', JSON.stringify(currentTokens))
// Rerun original request
return instance(config)
}
} catch (e) {
// Catch any errors
console.log(e)
return
}
} else if(data && data.token_invalid !== undefined && data.token_invalid) {
// If refresh has expired, take user to ADFS to reauthenticate
location = `${process.env.OAUTH_CLIENT_EP}?client_id=${process.env.AZURE_CLIENT_ID}&redirect_uri=${process.env.REDIRECT_URI}&resource=${process.env.REDIRECT_URI}&response_type=code`
return
} else {
// Console log all remaining errors
return
}
})
export default instance
Found the issue. It appears that since I'm using both relative and absolute urls for the baseURL, the absolute URL in development is being processed correctly, however the relative URL is being chained to the original request.
In other words, sending in production, the url looks like: /api/api/actual/request, where it should just be /api/actual/request.
I solved this by adding a API_URL to my config files, and input the absolute url for both development and production and the updated my instance creation to the following.
const instance = axios.create({
baseURL: process.env.API_URL
})
Thanks to all who viewed and attempted to help. Have a great day everyone!