Javascript Web Worker isn't getting message - javascript

I have a web worker that's running a long-running calculation. I've split the calculation up to run in a chain of Promises, so several times per second, the thread has a chance to do something else before continuing with the calculation.
I want to be able to cancel the calculation by sending a message. Using Worker.terminate() kills the calculation, but doesn't free up the memory, and I'm running into browser crashes.
function calculate() {
// case when no dice are being calculated
if (!_diceList.length) {
_finished = true;
return Promise.resolve();
}
let promise = Promise.resolve();
// create chain of promise calculations
for (let i = 0; i < _totalPermutations; i += PERMUTATIONS_PER_BREAK) {
promise = promise.then(() => {
if (!_shouldContinue || _finished) {
return;
} else {
reportStatus();
_atBreak = false;
calculateRecursive();
}
});
}
return promise;
}
// ...
function run(data) {
setup(data.dice);
calculate().then(() => {
reportStatus();
}).catch(() => true).then(() => {
close();
});
}
onmessage = function (e) {
if (e.data === 'cancel') {
_shouldContinue = false;
} else {
run(e.data);
}
};
When I debug, I can see that the code that creates the Worker does send the 'cancel' message, but when I set a breakpoint in my onmessage in the Worker code, it never enters the function.
I know that JavaScript is single-threaded, but I thought the entire purpose of Promises was to simulate async behavior.

Related

How to execute variable number of async calls(coming dynamically at runtime) serially?

I am making a chrome extension (mv3). Based on user activity, the content.js passes a message to the background.js which then calls an async function to add data in Google Docs using Docs API.
I want each request to execute only after the previous one has finished running. I am using chrome.runtime.sendMessage to send a message from content.js and don't see a way of calling background.js serially from there. So I need a way of executing them one by one in background.js only. The order of these requests is also important (but if the order of the requests gets changed by one/two places, I think that would still be okay from a user perspective).
I tried something and it is working but I am not sure if I am missing some edge cases, because I was unable to find the approach in any other answers -
Semaphore-like queue in javascript?
Run n number of async function before calling another method in nodejs
JavaScript: execute async function one by one
The approach I used is: I use a stack like structure to store requests, use setInterval to check for any pending requests and execute them serially.
content.js:
chrome.runtime.sendMessage({message});
background.js:
let addToDocInterval = "";
let addToDocCalls = [];
async function addToDoc(msg) {
// Await calls to doc API
}
async function addToDocHelper() {
if(addToDocCalls.length === 0)
return;
clearInterval(addToDocInterval)
while(addToDocCalls.length > 0) {
let msg = addToDocCalls.shift();
await addToDoc(msg);
}
addToDocInterval = setInterval(addToDocHelper, 1000);
}
chrome.runtime.onMessage.addListener((msg) => {
// Some other logic
addToDocCalls.push(msg);
})
addToDocInterval = setInterval(addToDocHelper, 1000);
Is this approach correct? Or is there any better way to do this?
I'd suggest changing several things.
Don't use timers polling the array. Just initiate processing the array anytime you add a new item to the array.
Keep a flag on whether if you're already processing the array so you don't start duplicate processing.
Use a class to encapsulate this functionality into an object.
Encapsulate the addToDocCalls array and adding to it so your class is managing it and outside code just calls a function to add to it which also triggers the processing. Basically, you're making it so callers don't have to know how the insides work. They just call helper.addMsg(msg) and the class instance does all the work.
Here's an implementation:
async function addToDoc(msg) {
// Await calls to doc API
}
class docHelper {
constructor() {
this.addToDocCalls = [];
this.loopRunning = false;
}
addMsg(msg) {
// add item to the queue and initiate processing of the queue
this.addToDocCalls.push(msg);
this.process();
}
async process() {
// don't run this loop twice if we're already running it
if (this.loopRunning) return;
try {
this.loopRunning = true;
// process all items in the addToDocCalls we have
while(this.addToDocCalls.length > 0) {
let msg = addToDocCalls.shift();
await addToDoc(msg);
}
} finally {
this.loopRunning = false;
}
}
}
const helper = new docHelper();
chrome.runtime.onMessage.addListener((msg) => {
// Some other logic
helper.addMsg(msg);
});
So, process() will run until the array is empty. Any interim calls to addMsg while process() is running will add more items to array and will call process() again, but the loopRunning flag will keep it from starting duplicate processing loops. If addMsg() is called while process is not running, it will start the process loop.
P.S. You also need to figure out what sort of error handling you want if addToDoc(msg) rejects. This code protects the this.loopRunning flag if it rejects, but doesn't actually handle a reject error. In code like this that is processing a queue, often times all you can really do is log the error and move on, but you need to decide what is the proper course of action on a rejection.
You don't need to use setTimeout. You do not even need a while loop.
let addToDocInterval = "";
let addToDocCalls = [];
let running = false;
async function addToDoc(msg) {
// Await calls to doc API
}
async function addToDocHelper() {
if(running || addToDocCalls.length === 0)
return;
running = true;
let msg = addToDocCalls.shift();
await addToDoc(msg);
running = false;
addToDocHelper();
}
chrome.runtime.onMessage.addListener((msg) => {
// Some other logic
addToDocCalls.push(msg);
addToDocHelper();
});
The code should be self explanatory. There is no magic.
Here is a generic way to run async tasks sequentially (and add more tasks to the queue at any time).
const tasks = [];
let taskInProgress = false;
async function qTask(newTask) {
if (newTask) tasks.push(newTask);
if (tasks.length === 0) return;
if (taskInProgress) return;
const nextTask = tasks.shift();
taskInProgress = true;
try {
await nextTask();
} finally {
taskInProgress = false;
//use setTimeout so call stack can't overflow
setTimeout(qTask, 0);
}
}
//the code below is just used to demonstrate the code above works
async function test() {
console.log(`queuing first task`);
qTask(async () => {
await delay(500); //pretend this task takes 0.5 seconds
console.log('first task started');
throw 'demonstrate error does not ruin task queue';
console.log('first task finished');
});
for (let i = 0; i < 5; i++) {
console.log(`queuing task ${i}`)
qTask(async () => {
await delay(200); //pretend this task takes 0.2 seconds
console.log(`task ${i} ran`);
});
}
await delay(1000); //wait 1 second
console.log(`queuing extra task`);
qTask(async () => {
console.log('extra task ran');
});
await delay(3000); //wait 3 seconds
console.log(`queuing last task`);
qTask(async () => {
console.log('last task ran');
});
}
test();
function delay(ms) {
return new Promise(resolve => {
setTimeout(resolve, ms);
});
}

How to Abort (Stop) a chain of promises, executed in a loop? - JavaScript

I have a web application, in which I have two buttons: "start download" and "abort download", that are bind to the methods: start() and stop() accordingly.
1. When the user clicks the "start download" button, the application will loop over an array of data. For each item in the array, it will call the _downloadFile() method that will download one file and return a new Promise(), once that download is complete the next download will start.
2. If during the download flow (that may take a long long time) the user clicked the "abort download" button I would like to stop the download flow.
How can I implement the abort functionality?
Here is what I got so far?
Notice that due to the fact I'm using async/await, the loop will holt until the promise is resolved, and the next iteration will only be executed once the promise is resolved (or rejected).
async function start(dataArray)
{
for (let i = 0; i < dataArray.length; i++)
{
try
{
let fileUrl = `http://server-url/${ i }/${ dataArray[i] }.xml`;
let result = await _downloadFile(fileUrl);
_saveResult(result);
}
catch(e) // promise rejected
{
_handleError(e);
}
}
}
function stop()
{
// what goes here?? how do I stop the flow??
}
function _downloadFile(fileUrl)
{
return new Promise((resolve, reject) =>
{
// ...
});
}
function _saveFile(data)
{
// ..
}
function _handleError(error)
{
console.error("Error: " + error);
}
Set a flag that the function can check, and throw if it's set. (Well, you're actually converting rejection to resolution in your code, so maybe you don't throw, just return early. [I wouldn't do that, btw.])
let stopped = false; // *** The flag
async function start(dataArray)
{
stopped = false; // *** Re-init the flag
for (let i = 0; i < dataArray.length; i++)
{
try
{
let fileUrl = `http://server-url/${ i }/${ dataArray[i] }.xml`;
let result = await _downloadFile(fileUrl);
if (stopped) { // *** Check the flag
return; // *** and return
}
_saveResult(result);
}
catch(e) // promise rejected
{
_handleError(e);
}
}
}
function stop()
{
// *** Set the flag
stopped = true;
}

Failing to resolve promise within my websocket acknowledgement reconnect logic

I have a custom connect function that creates a promise I want resolved once I make a websocket call and receive an acknowledgement. The remote server may be up, it may be down, but if it's unavailable I want to keep trying until I'm successful.
const socketIOClient = require('socket.io-client');
function createTimeoutCallback(callback)
{
let called = false;
let timerID = setTimeout(() => {
if (called) return;
called = true;
callback(new TimeoutError());
},
60*1000);
return function() {
if (called) return;
called = true;
clearTimeout(timerID);
callback.apply(this, arguments);
}
}
async function myConnect()
{
let mysocket = socketIOClient(url);
return new Promise((resolve, reject) => {
mysocket.emit('clientconnect', args, createTimeoutCallback((resp) => {
if (!(resp instanceof TimeoutError)) {
// SUCCESS
doSomething();
resolve();
}
// We timed out, try again
else {
mysocket.close();
setTimeout(myConnect, 60*1000);
}
}));
});
}
await connect();
// doSomething() gets called but we never get here
In the above code, if the endpoint is available, everything works fine. But I'm never returning from the myConnect() function when (1) I wait on its promise; and (2) the function needs to make several connection attempts (e.g., the server is not initially up); and (3) the endpoint finally comes back online and the connection succeeds. I suspect this has everything to do with me essentially abandoning the original promise on a reconnect attempt, but I don't want to reject it or the operation will prematurely fail.
I did find a workaround, which relies on an embedded function. With this technique there is only one promise which is always accessible and in scope, and the timer-based recursion (i.e., not stack-based) is always wrapped in that single promise that is never abandoned. I put this together before Jaromanda answered, so I can't confirm that his solution would work.
async function myConnect()
{
return new Promise((resolve, reject) => {
function innerConnect()
{
let mysocket = socketIOClient(url);
mysocket.emit('clientconnect', args, createTimeoutCallback((resp) => {
if (!(resp instanceof TimeoutError)) {
// SUCCESS
doSomething();
resolve();
}
// We timed out, try again
else {
mysocket.close();
setTimeout(innerConnect, 60*1000);
}
}));
}
innerConnect();
});
}

Intercept fetch() request, put it on-hold and resume when certain criteria is met

The following code fires an alert when it detects fetch() request to a certain endpoint. Doing so makes the request stops from proceeding, waits for the user to close the alert and then lets the request flow to the endpoint.
My question is how to achieve the same interruption, but instead of waiting for the alert to be closed, I'd need the request to wait for the appearance of a cookie. I have a feeling it needs to be done with Promises :)
const x = window.fetch;
window.fetch = function() {
if (arguments[0] == '/certain_endpoint') { alert('stopping for a while'); }
return x.apply(this, arguments)
}
You can use setInterval with promises to periodically poll for a certain condition and resolve when it is met.
const x = window.fetch;
window.fetch = function() {
if (arguments[0] == '/needs_cookie') {
return waitForCookie().then(cookie => {
return x.apply(this, arguments);
});
} else {
return x.apply(this, arguments);
}
}
// Returns a promise that resolves to a cookie when it is set.
function waitForCookie() {
return new Promise(function (resolve, reject) {
var intervalID = setInterval(checkForCookie, 100);
function checkForCookie() {
var cookie = getCookie();
if (cookie) {
clearInterval(intervalID);
resolve(cookie);
}
}
});
}
// Here, getCookie() returns undefined the first two times it's called.
// In reality, it should just parse document.cookie however you normally do.
var attempts = 0;
function getCookie() {
if (attempts < 2) {
attempts++;
console.log('Attempts: ', attempts);
return undefined;
} else {
return 'cookie!';
}
}
If you're trying to do more complicated asynchronous stuff, including polling, you may want to check out RxJS.
{
const original = window.fetch
window.fetch = function(...args) {
if (args[0] == '/certain_endpoint') {
return new Promise(res => {
setTimeout(() => res(original(...args)), 1000);
});
}
return original(...args);
};
}
You may return a promise instead that resolves after some time

Best practice to avoid clashing promises in js

I have a save function in my app which can be called manually and an autosave function which runs every 60 seconds.
To prevent the two ops trying to access the same file at the same instant, I set a flag called isSaving to true when one starts running, and to false again afterward. If open or save detect that autosave is running, they wait 1000ms and try again. If they fail after that I consider it an error.
Autosave:
setInterval(autosave, 1000 * 60);
isSaving = false;
function autosave()
{
return new WinJS.Promise(function (complete, error, progress)
{
if(isSaving == false) // no saving op in progress
{
// set saving flag on
isSaving = true;
// write file
return writeFile(currentFile)
.then(function () {
// saving flag off
isSaving = false;
complete();
});
}
else {
// silently abort
complete();
}
});
}
Manual save:
var saveFileAttempts = 0;
function save()
{
return new WinJS.Promise(function (complete, error, progress)
{
if (isSaving == false) // no saving op in progress
{
// set saving flag on
isSaving = true;
// write file
return writeFile(currentFile)
.then(function () {
// show notification to user "file saved"
return showSaveNotification()
})
.then(function () {
// set saving flag off
isSaving = false;
complete();
});
}
else if (saveFileAttempts < 10) {
// try again in 1000ms, up to 10 times
saveFileAttempts++;
setTimeout(function () { save(); }, 1000);
}
else{
error();
}
});
}
Open:
var openFileAttempts = 0;
function open()
{
return new WinJS.Promise(function (complete, error, progress)
{
if (isSaving == false)
{
return readFile()
.then(function (file) {
currentFile = file;
openFileAttempts = 0;
complete();
});
}
else if (openFileAttempts < 10) {
// try again in 1000ms, up to 10 times
openFileAttempts++;
setTimeout(function () { open(); }, 1000);
}
else{
error();
}
});
}
This feels like a hack. Is there a better way to achieve what I'm trying to do?
FYI: These functions return promises because there are other functions that call them.
Instead of waiting 1000ms and trying again, I'd recommend using a promise to represent that a save is ongoing and when it will end.
var saving = null;
setInterval(function() {
if (!saving) // ignore autosave when already triggered
save().then(showAutoSafeNotification);
}, 60e3);
function save() {
if (saving)
return saving.then(save); // queue
// else
var written = writeFile(currentFile);
saving = written.then(function() {
saving = null;
}, function() {
saving = null;
});
return written;
}
You can do the same with open (and might want to abstract the written part out), although I fail to see how it interferes with an (auto)save. If you're concerned about reading the file that is already open while it is saved, I'd let the filesystem handle that and catch the error.
how about maintaining a single promise chain. Then, you might not need a setTimeout, this is a easy way, might be flawed, haven't used WinJS, writing code like it is normal promise:
setInterval(save, 1000 * 60);
var promise = Promise.resolve();
function save(){
return promise.then(function(){
return writeFile(currentFile);
});
}
function open(){
return promise.then(function(){
return readFile().then(function (file) {
currentFile = file;
});
});
}
but I guess, one problem with this code is, since it is single promise chain, you need to catch error properly in your application.

Categories