I have a Flutter App with a platform being on a PWA. Having some problems with getting the installability function on the Chrome browser to show up for it. It appears that Google has introduced a set of checks to the service worker, in particular the event listener for 'fetch' to see if it can handle a offline mode appropriately: https://developer.chrome.com/blog/improved-pwa-offline-detection/
I'm not entirely sure what needs to change on the bundled service worker that Flutter creates when building for Web, so I assume a custom implementation will be needed:
// The application shell files that are downloaded before a service worker can
// start.
const CORE = [
"/",
"main.dart.js",
"index.html",
"assets/NOTICES",
"assets/AssetManifest.json",
"assets/FontManifest.json"];
// During install, the TEMP cache is populated with the application shell files.
self.addEventListener("install", (event) => {
self.skipWaiting();
return event.waitUntil(
caches.open(TEMP).then((cache) => {
return cache.addAll(
CORE.map((value) => new Request(value + '?revision=' + RESOURCES[value], {'cache': 'reload'})));
})
);
});
// During activate, the cache is populated with the temp files downloaded in
// install. If this service worker is upgrading from one with a saved
// MANIFEST, then use this to retain unchanged resource files.
self.addEventListener("activate", function(event) {
return event.waitUntil(async function() {
try {
var contentCache = await caches.open(CACHE_NAME);
var tempCache = await caches.open(TEMP);
var manifestCache = await caches.open(MANIFEST);
var manifest = await manifestCache.match('manifest');
// When there is no prior manifest, clear the entire cache.
if (!manifest) {
await caches.delete(CACHE_NAME);
contentCache = await caches.open(CACHE_NAME);
for (var request of await tempCache.keys()) {
var response = await tempCache.match(request);
await contentCache.put(request, response);
}
await caches.delete(TEMP);
// Save the manifest to make future upgrades efficient.
await manifestCache.put('manifest', new Response(JSON.stringify(RESOURCES)));
return;
}
var oldManifest = await manifest.json();
var origin = self.location.origin;
for (var request of await contentCache.keys()) {
var key = request.url.substring(origin.length + 1);
if (key == "") {
key = "/";
}
// If a resource from the old manifest is not in the new cache, or if
// the MD5 sum has changed, delete it. Otherwise the resource is left
// in the cache and can be reused by the new service worker.
if (!RESOURCES[key] || RESOURCES[key] != oldManifest[key]) {
await contentCache.delete(request);
}
}
// Populate the cache with the app shell TEMP files, potentially overwriting
// cache files preserved above.
for (var request of await tempCache.keys()) {
var response = await tempCache.match(request);
await contentCache.put(request, response);
}
await caches.delete(TEMP);
// Save the manifest to make future upgrades efficient.
await manifestCache.put('manifest', new Response(JSON.stringify(RESOURCES)));
return;
} catch (err) {
// On an unhandled exception the state of the cache cannot be guaranteed.
console.error('Failed to upgrade service worker: ' + err);
await caches.delete(CACHE_NAME);
await caches.delete(TEMP);
await caches.delete(MANIFEST);
}
}());
});
// The fetch handler redirects requests for RESOURCE files to the service
// worker cache.
self.addEventListener("fetch", (event) => {
if (event.request.method !== 'GET') {
return;
}
var origin = self.location.origin;
var key = event.request.url.substring(origin.length + 1);
// Redirect URLs to the index.html
if (key.indexOf('?v=') != -1) {
key = key.split('?v=')[0];
}
if (event.request.url == origin || event.request.url.startsWith(origin + '/#') || key == '') {
key = '/';
}
// If the URL is not the RESOURCE list then return to signal that the
// browser should take over.
if (!RESOURCES[key]) {
return;
}
// If the URL is the index.html, perform an online-first request.
if (key == '/') {
return onlineFirst(event);
}
event.respondWith(caches.open(CACHE_NAME)
.then((cache) => {
return cache.match(event.request).then((response) => {
// Either respond with the cached resource, or perform a fetch and
// lazily populate the cache.
return response || fetch(event.request).then((response) => {
cache.put(event.request, response.clone());
return response;
});
})
})
);
});
self.addEventListener('message', (event) => {
// SkipWaiting can be used to immediately activate a waiting service worker.
// This will also require a page refresh triggered by the main worker.
if (event.data === 'skipWaiting') {
self.skipWaiting();
return;
}
if (event.data === 'downloadOffline') {
downloadOffline();
return;
}
});
// Download offline will check the RESOURCES for all files not in the cache
// and populate them.
async function downloadOffline() {
var resources = [];
var contentCache = await caches.open(CACHE_NAME);
var currentContent = {};
for (var request of await contentCache.keys()) {
var key = request.url.substring(origin.length + 1);
if (key == "") {
key = "/";
}
currentContent[key] = true;
}
for (var resourceKey in Object.keys(RESOURCES)) {
if (!currentContent[resourceKey]) {
resources.push(resourceKey);
}
}
return contentCache.addAll(resources);
}
// Attempt to download the resource online before falling back to
// the offline cache.
function onlineFirst(event) {
return event.respondWith(
fetch(event.request).then((response) => {
return caches.open(CACHE_NAME).then((cache) => {
cache.put(event.request, response.clone());
return response;
});
}).catch((error) => {
return caches.open(CACHE_NAME).then((cache) => {
return cache.match(event.request).then((response) => {
if (response != null) {
return response;
}
throw error;
});
});
})
);
}
I've peered into the following sources: https://javascript.plainenglish.io/your-pwa-is-going-to-break-in-august-2021-34982f329f40 and https://developer.mozilla.org/en-US/docs/Web/Progressive_web_apps/Offline_Service_workers, though I'm still receiving the same issue:
Chrome Application Tab - Manifest
Related
I am facing one issue. In my application my page is requesting to server which is run by Node.js to fetch 2000 record at a time. Here the records are coming from Node but in dev tool console its not expanding and also I have some loader implementation that is not stopping even after receiving the response. I am explaining whole code below.
demo.component.ts:
onFileSelect($event) {
const file = $event.target.files[0];
const fileName = file.name;
const fileExtension = fileName.replace(/^.*\./, '');
if (fileExtension === 'ubot') {
this.loginService.startSpinner(true);
const formData = new FormData();
formData.append('cec', this.cec);
formData.append('screenName', this.intFlow);
formData.append('fileCategory', 'payload');
formData.append('file', file);
this.intentService.reverseFile(formData).subscribe(async (res: any) => {
console.log('response', res);
console.log('succ', res.status);
if (res && res.status === 'success') {
this.loginService.startSpinner(false);
this.intentService.intentData = '';
this.resettoOriginalState();
this.cdref.detach();
await this.repopulateDataFromFile(res.body);
(<HTMLInputElement>document.getElementById('fileuploader')).value = "";
}
else {
this.loginService.startSpinner(false);
this._notifications.create(res.msg, '', this.errorNotificationType);
(<HTMLInputElement>document.getElementById('fileuploader')).value = "";
}
});
} else {
this.loginService.startSpinner(false);
this._notifications.create('Please choose a file', '', this.errorNotificationType);
}
}
Here I am requesting to server through one service which is given below.
reverseFile(value) {
// const token = localStorage.getItem('token')
// let headers = new HttpHeaders({
// Authorization: 'Bearer ' + token
// })
return this.http.post(this.nodeAppUrl + 'reverseFile', value,{ observe: 'response'})
.pipe(
tap((res:any) => this.loginService.validateToken(res)),
map((res:any) => {
return res.body
})
)
}
Here the angular is requesting the spinner is starting and after some sec the response also coming from Node.js but as we have the line this.loginService.startSpinner(false); after success message but the spinner is still running.
Here in the response we have more than 2000 records which is in nested array of object format and we are populating the record using this.repopulateDataFromFile(res.body); method. I am attaching below the screen shot of console tool.
Even the status is success I am not able to stop the spinner and also I am not able to expand the record the console which is showing the value was evaluated upon first expanding.......
Can anybody please give any help why it is happening and how to resolve this.
I am trying to implement a download functionality using streams in NodeJS.
In the code I am trying to simulate an endpoint that sends data in chunks, something similar to paginated data, for example chunks in size of 5000. Or to make it further clear, we can send top and skip parameters to the endpoint to get a particular chunk of data. If no parameters are provided, it send the first 5000 entries.
There are 2 cases that I am trying to take care of:
When the user cancels the download from the browser, how do I handle the continuous fetching of data from the endpoint
When the user pauses the download from the browser, how do I pause the data fetching, and then resume once user resumes it
The first case can be taken care of using 'close' event of request. When the connection between the client and the server get cancelled, I disconnect.
If anyone has a better way of implementing this please suggest.
I am having trouble handling the second case when the user pauses.
If anyone could guide me through this, or even provide a better solution to the overall problem(incl. handling the chunks of data), it would be really helpful.
const {createServer} = require('http');
const {Transform} = require('stream');
const axios = require('axios');
var c = 0;
class ApiStream extends Transform {
constructor(apiCallback, res, req) {
super();
this.apiCallback = apiCallback;
this.isPipeSetup = false;
this.res = res;
this.req = req
}
//Will get data continuously
async start() {
let response;
try {
response = await this.apiCallback();
} catch (e) {
response = null;
}
if (!this.isPipeSetup) {
this.pipe(this.res);
this.isPipeSetup = true;
}
if (response) {
response = response.data
if (Array.isArray(response)) {
response.forEach((item) => {
this.push(JSON.stringify(item) + "\n");
});
} else if (typeof response === "object") {
this.push(JSON.stringify(response) + "\n");
} else if (typeof response === "string") {
this.push(response + "\n");
}
this.start()
}else{
this.push(null);
console.log('Stream ended')
}
}
}
const server = createServer(async (req, res, stream) => {
res.setHeader("Content-disposition", "attachment; filename=download.json");
res.setHeader("Content-type", "text/plain");
let disconnected = false;
const filestream = new ApiStream(async () => {
let response;
try {
if(disconnected){
console.log('Client connection closed')
return null;
}
c++;
response = await axios.get("https://jsonplaceholder.typicode.com/users");
//Simulate delay in data fetching
let z = 0;
if(c>=200) response = null;
while(z<10000){
let b = 0;
while(b<10000){
b+=0.5;
}
z +=0.5;
}
} catch (error) {
res.status(500).send(error);
}
if (response) {
return response;
}
return null;
}, res, req);
await filestream.start();
req.on('close', (err) => {
disconnected = true;
})
})
server.listen(5050, () => console.log('server running on port 5050'));
So again like other thousand times I updated my service workers file version, to check any errors, I opened the Chrome(Browser) developer tool, and what I see... fetching works in a new way...ERROR?
Fetch finished loading: GET "https://www.example.com/favicon.ico". etc...some more CSS and image, I don't know what this is: Fetch failed loading: GET "https://www.example.com/". (the last line of console log)
Why it needs to request the domain top root every time...
Now I check the headers (DEV tools - Network - Headers) because the network status = (failed)
Request URL: https://www.example.com/
Referrer Policy: unsafe-url
pretty much no headers info at all or any content???
If I use the preload it shows extra ERROR in red (The service worker navigation preload request failed with network error:net::ERR_INTERNET_DISCONNECTED), so I have disabled preload for now, the service worker would still work with this error.
I had just updated to PHP 8.0 so maybe that was doing something, but after getting back to the old version nothing changed. Maybe my server started blocking some sort of requests, but that is unlikely, more like bad request from chrome service workers.
If the Chrome tries to check with the last request some sort of offline capacity, I use to display an offline page if fetch error, If that has anything to do with this.
Anyways despite the problems/errors described above the service worker works like it should.
Anyways, here is the SW code example:
const OFFLINE_VERSION = 1;
var filevers='xxxx';
const CACHE_NAME = 'offline'+filevers;
// Customize this with a different URL if needed.
const OFFLINE_URL = 'offlineurl.php';
const OFFLINE_URL_ALL = [
'stylesheet'+filevers+'.css',
'offlineurl.php',
'favicon.ico',
'img/logo.png'
].map(url => new Request(url, {credentials: 'include'}));
self.addEventListener('install', (event) => {
event.waitUntil((async () => {
const cache = await caches.open(CACHE_NAME);
// Setting {cache: 'reload'} in the new request will ensure that the response
// isn't fulfilled from the HTTP cache; i.e., it will be from the network.
await cache.addAll(OFFLINE_URL_ALL);
})());
});
self.addEventListener('activate', (event) => {
event.waitUntil((async () => {
// Enable navigation preload if it's supported.
// See https://developers.google.com/web/updates/2017/02/navigation-preload
//removed for now
})());
// Tell the active service worker to take control of the page immediately.
self.clients.claim();
});
self.addEventListener('fetch', (event) => {
// We only want to call event.respondWith() if this is a navigation request
// for an HTML page.
const destination = event.request.destination;
if (destination == "style" || destination == "script" || destination == "document" || destination == "image" || destination == "font") {
event.respondWith((async () => {
try {
const cache = await caches.open(CACHE_NAME);
const cachedResponse = await cache.match(event.request);
if (cachedResponse) {
return cachedResponse;
} else {
// First, try to use the navigation preload response if it's supported.
//removed for now
const networkResponse = await fetch(event.request);
return networkResponse;
}
} catch (error) {
if (event.request.mode === 'navigate') {
// catch is only triggered if an exception is thrown, which is likely
// due to a network error.
// If fetch() returns a valid HTTP response with a response code in
// the 4xx or 5xx range, the catch() will NOT be called.
const cache = await caches.open(CACHE_NAME);
const cachedResponse = await cache.match(OFFLINE_URL);
return cachedResponse;
}
}
})());
}
});
Any suggestions, what may cause the error?
I'm trying to upload multiple images to firebase at once. It is doing that, but the returned url array of those cloud image links are too late since the post is already being sent with an empty array. Here is my code:
// uploading media files using promises
async uploadMedia(mediaFile: string){
const extension = mediaFile.split('.')[mediaFile.split('.').length - 1];
const mediaFileName = `${Math.round(Math.random()*100000000000)}.${extension}`;
this.uploadProgress = 0;
const response = await fetch(mediaFile);
const blob = await response.blob();
const storageRef = storage.ref(`${mediaFileName}`).put(blob);
return storageRef.on(`state_changed`,snapshot=>{
this.uploadProgress = (snapshot.bytesTransferred/snapshot.totalBytes);
}, error=>{
this.error = error.message;
this.submitting = false;
this.uploadingMedia = false;
return;
},
async () => {
// check whether the media is an image or a video and add to correct arrays
if(extension == "png" || extension == "jpg"){
return storageRef.snapshot.ref.getDownloadURL().then(async (url)=>{
this.firebaseImageUrls = [...this.firebaseImageUrls, url];
return;
});
}
else{
return storageRef.snapshot.ref.getDownloadURL().then(async (url)=>{
this.firebaseVideoUrls = [...this.firebaseVideoUrls, url];
return;
});
}
});
}
Where everything is being called:
await Promise.all(this.props.store.selectedImagesArray.map(async (file:string) => {
await this.uploadMedia(file);
}))
this.submitPost(); // this submits everything with the firebaseImageUrls
any help is appreciated
The problem seems to be that storageRef.on() does not return a promise. It just registers the handlers. I'm not an expert on firebase. Maybe the put(blob) returns a promise that you can use.
Figured it out. I had to make a promise and resolve the promise for each upload task and then loop through all the files doing this. Then when all the files are completely uploaded and the loop is completed, then I can submit the post with the files that are in firebaseImageUrls.
async uploadMedia(mediaFile: string){
return new Promise(async (resolve, reject) => {
//making the uploading task for one file
const extension = mediaFile.split('.')[mediaFile.split('.').length - 1];
const mediaFileName = `${Math.round(Math.random()*100000000000)}.${extension}`;
const response = await fetch(mediaFile);
const blob = await response.blob();
const storageRef = storage.ref(`${mediaFileName}`);
const task = storageRef.put(blob);
task.on(`state_changed`,snapshot=>{
this.uploadProgress = (snapshot.bytesTransferred/snapshot.totalBytes);
}, error=>{
this.error = error.message;
this.submitting = false;
this.uploadingMedia = false;
return;
},
async () => {
if(extension == "png" || extension == "jpg"){
task.snapshot.ref.getDownloadURL().then((url:any)=>{
console.log(url);
resolve(url);
});
}
else{
task.snapshot.ref.getDownloadURL().then((url:any)=>{
console.log(url);
resolve(url);
});
}
});
})
}
The loop:
for(var i = 0; i < this.props.store.selectedImagesArray.length; i++){
const imageUrl = await this.uploadMedia(this.props.store.selectedImagesArray[i]);
this.firebaseImageUrls = [...this.firebaseImageUrls, imageUrl];
}
this.submitPost();
I'm running the below node-rdkafka code in Eclipse as Node.js application. This is the sample code from https://blizzard.github.io/node-rdkafka/current/tutorial-producer_.html
I want to run this in a test server and call from iOS Mobile application.
I knew about running node.js app in AWS.
Question I: Is there any other options to run in a free test server environment like Tomcat?
Question II: Even If I am able to run this node.js app in a server, how do i call from a mobile application? Do I need to call producer.on('ready', function(arg) (or) What function i need to call from Mobile app?
var Kafka = require('node-rdkafka');
//console.log(Kafka.features);
//console.log(Kafka.librdkafkaVersion);
var producer = new Kafka.Producer({
'metadata.broker.list': 'localhost:9092',
'dr_cb': true
});
var topicName = 'MyTest';
//logging debug messages, if debug is enabled
producer.on('event.log', function(log) {
console.log(log);
});
//logging all errors
producer.on('event.error', function(err) {
console.error('Error from producer');
console.error(err);
});
//counter to stop this sample after maxMessages are sent
var counter = 0;
var maxMessages = 10;
producer.on('delivery-report', function(err, report) {
console.log('delivery-report: ' + JSON.stringify(report));
counter++;
});
//Wait for the ready event before producing
producer.on('ready', function(arg) {
console.log('producer ready.' + JSON.stringify(arg));
for (var i = 0; i < maxMessages; i++) {
var value = new Buffer('MyProducerTest - value-' +i);
var key = "key-"+i;
// if partition is set to -1, librdkafka will use the default partitioner
var partition = -1;
producer.produce(topicName, partition, value, key);
}
//need to keep polling for a while to ensure the delivery reports are received
var pollLoop = setInterval(function() {
producer.poll();
if (counter === maxMessages) {
clearInterval(pollLoop);
producer.disconnect();
}
}, 1000);
});
/*
producer.on('disconnected', function(arg) {
console.log('producer disconnected. ' + JSON.stringify(arg));
});*/
//starting the producer
producer.connect();
First of all, you need an HTTP server. ExpressJS can be used. Then, just tack on the Express code basically at the end, but move the producer loop into the request route.
So, start with what you had
var Kafka = require('node-rdkafka');
//console.log(Kafka.features);
//console.log(Kafka.librdkafkaVersion);
var producer = new Kafka.Producer({
'metadata.broker.list': 'localhost:9092',
'dr_cb': true
});
var topicName = 'MyTest';
//logging debug messages, if debug is enabled
producer.on('event.log', function(log) {
console.log(log);
});
//logging all errors
producer.on('event.error', function(err) {
console.error('Error from producer');
console.error(err);
});
producer.on('delivery-report', function(err, report) {
console.log('delivery-report: ' + JSON.stringify(report));
counter++;
});
//Wait for the ready event before producing
producer.on('ready', function(arg) {
console.log('producer ready.' + JSON.stringify(arg));
});
producer.on('disconnected', function(arg) {
console.log('producer disconnected. ' + JSON.stringify(arg));
});
//starting the producer
producer.connect();
Then, you can add this in the same file.
var express = require('express')
var app = express()
app.get('/', (req, res) => res.send('Ready to send messages!'))
app.post('/:maxMessages', function (req, res) {
if (req.params.maxMessages) {
var maxMessages = parseInt(req.params.maxMessages);
for (var i = 0; i < maxMessages; i++) {
var value = new Buffer('MyProducerTest - value-' +i);
var key = "key-"+i;
// if partition is set to -1, librdkafka will use the default partitioner
var partition = -1;
producer.produce(topicName, partition, value, key);
} // end for
} // end if
}); // end app.post()
app.listen(3000, () => console.log('Example app listening on port 3000!'))
I don't think the poll loop is necessary since you don't care about the counter anymore.
Now, connect your mobile app to http://<your server IP>:3000/ and send test messages with a POST request to http://<your server IP>:3000/10, for example, and adjust to change the number of messages to send
I might be late on this but this is how I did using promises and found it better than have a time out etc.
const postMessageToPublisher = (req, res) => {
return new Promise((resolve, reject) => {
producer.connect();
producer.setPollInterval(globalConfigs.producerPollingTime);
const actualBody = requestBody.data;
const requestBody = req.body;
const topicName = req.body.topicName;
const key = requestBody.key || uuid();
const partition = requestBody.partition || undefined;
const data = Buffer.from(JSON.stringify(udpatedBody));
/**
* Actual messages are sent here when the producer is ready
*/
producer.on(kafkaEvents.READY, () => {
try {
producer.produce(
topic,
partition,
message,
key // setting key user provided or UUID
);
} catch (error) {
reject(error);
}
});
// Register listener for debug information; only invoked if debug option set in driver_options
producer.on(kafkaEvents.LOG, log => {
logger.info('Producer event log notification for debugging:', log);
});
// Register error listener
producer.on(kafkaEvents.ERROR, err => {
logger.error('Error from producer:' + JSON.stringify(err));
reject(err);
});
// Register delivery report listener
producer.on(kafkaEvents.PUBLISH_ACKNOWLEDGMENT, (err, ackMessage) => {
if (err) {
logger.error(
'Delivery report: Failed sending message ' + ackMessage.value
);
logger.error('and the error is :', err);
reject({ value: ackMessage.value, error: err });
} else {
resolve({
teamName: globalConfigs.TeamNameService,
topicName: ackMessage.topic,
key: ackMessage.key.toString()
});
}
});
});
};
Please note that kafkaEvents contains my constants for the events we listen to and it is just a reference such as kafkaEvents.LOG is same as event.log
and also the calling function is expecting this to a promise and accordingly we user .then(data => 'send your response to user from here') and .catch(error => 'send error response to user
this is how I achieved it using promises