Toggling API call by a stream - javascript

Here:
import Rx from 'rxjs';
function fakeApi(name, delay, response) {
return new Rx.Observable(observer => {
console.log(`${name}: Request.`)
let running = true;
const id = setTimeout(() => {
console.log(`${name}: Response.`)
running = false;
observer.next(response);
observer.complete();
}, delay);
return () => {
if(running) console.log(`${name}: Cancel.`)
clearTimeout(id);
}
})
}
function apiSearch() { return fakeApi('Search', 4000, "This is a result of the search."); }
//============================================================
const messages$ = new Rx.Subject();
const toggle$ = messages$.filter(m => m === 'toggle');
const searchDone$ = toggle$.flatMap(() =>
apiSearch().takeUntil(toggle$)
);
searchDone$.subscribe(m => console.log('Subscriber:', m))
setTimeout(() => {
// This one starts the API call.
toggle$.next('toggle');
}, 2000)
setTimeout(() => {
// This one should only cancel the API call in progress, not to start a new one.
toggle$.next('toggle');
}, 3000)
setTimeout(() => {
// And this should start a new request again...
toggle$.next('toggle');
}, 9000)
my intent is to start the API call and stop it when it is in progress by the same toggle$ signal. Problem with the code is that toggle$ starts a new API call every time. I would like it not to start the new call when there is one already running, just to stop the one which is already in progress. Some way should I "unsubscribe" the outermost flatMap from toggle$ stream while apiSearch() is running. I guess that there is a need to restructure the code to achieve the behaviour... What is the RxJS way of doing that?
UPDATE: After some more investigations and user guide lookups, I came with this:
const searchDone$ = toggle$.take(1).flatMap(() =>
apiSearch().takeUntil(toggle$)
).repeat()
Works like it should. Still feels cryptic a little bit. Is this how you RxJS guys would solve it?

I think your solution will work only once since you're using take(1). You could do it like this:
const searchDone$ = toggle$
.let(observable => {
let pending;
return observable
.switchMap(() => {
let innerObs;
if (pending) {
innerObs = Observable.empty();
} else {
pending = innerObs = apiSearch();
}
return innerObs.finally(() => pending = null);
});
});
I'm using let() only to wrap pending without declaring it in parent scope. The switchMap() operator unsubscribes for you automatically without using take*().
The output with your test setTimeouts will be as follows:
Search: Request.
Search: Cancel.
Search: Request.
Search: Response.
Subscriber: This is a result of the search.

Related

Memory leak in while with promises

I have a nodejs cluster with a primary that handles worker cycles (in the while loop) and that listens to worker messages to progress in the cycle.
(In my code index.js does not send messages on setInterval but on other type of event, I have here simplified the code to get the essence of the problem)
Server.js
var cluster = require('cluster');
const ClusterMessages = require('cluster-messages');
const messages = new ClusterMessages();
if (cluster.isMaster){
let worker = cluster.fork()
console.log(cluster);
(async ()=>{
let cycle = 0
while(true){
console.log(cycle);
cycle ++
await Promise.all([
enough(),
])
}
function enough () {
return new Promise(resolve => {
messages.on('enough', () => {
console.log('enough');
resolve()
});
});
}})()
} else {
require('./index.js')
}
Index.js
const ClusterMessages = require('cluster-messages');
const messages = new ClusterMessages();
setInterval(() => {
messages.send('enough');
}, 1000);
The code is working fine (as such, in this example and in my code) but there seems to be a memory leak as you can understand from the output of this code:
0
enough
1
enough
enough
2
enough
enough
enough
3
enough
enough
enough
enough...
I tried several things like exchanging new Promise and messages.on(), add a return in the callback of the promise but I have no clue what is happening here. Any ideas?
The solution is to make another event that can be triggered once contrary to the 'event listener' of the cluster-messages package
Server.js
if (cluster.isMaster){
let worker = cluster.fork()
console.log(cluster);
// Importing events
const EventEmitter = require('events');
const eventEmitter = new EventEmitter();
messages.on('enough', () => {
eventEmitter.emit('event');
});
(async ()=>{let cycle = 0
while(true){
console.log(cycle);
cycle ++
await Promise.all([
enough(),
])
}
function enough () {
return new Promise(resolve => {
eventEmitter.once('event', () => {
console.log('event');
resolve()
});
});
}})()
} else {
require('./index.js')
}
Index.js
const ClusterMessages = require('cluster-messages');
const messages = new ClusterMessages();
setInterval(() => {
messages.send('enough');
}, 1000);
Every call of enough() installs another listener for the enough event on messages. They never get removed, leaking memory (and leading to an increasing number of logs per event). Instead, use the once method to install the listener:
function enough () {
return new Promise(resolve => {
messages.once('enough', () => {
// ^^^^
console.log('enough');
resolve();
});
});
}
Or even simpler, using once:
const { once } = require('events');
function enough() {
return once(messages, 'enough');
}
In your particular example, I would recommend not to use promises to handle the events. You might even miss events that are fired while you are removing and re-attaching a listener. Just write
let cycle = 0
messages.on('enough', () => {
console.log(cycle);
cycle++;
});
If for some reason you need a loop that you can break from or await other things in, I would recommend an asynchronous iterator, built with on:
const { once } = require('events');
(async () => {
let cycle = 0
for await (const _ of on(messages, 'enough')) {
console.log(cycle);
cycle++;
}
})();

Using RxJS how to buffer function calls until an other async function call has resolved

How can I use RxJS to buffer function calls until another async function has resolved?
Here is a simple example of what I'd like to accomplish
function asyncFunc(time) {
setTimeout(() => {
console.log('asyncFunc has resolved');
}, time);
}
function funcToBuffer(time) {
setTimeout(() => {
console.log(time);
}, time);
}
asyncFunc(3000);
funcToBuffer(1000);
funcToBuffer(2000);
funcToBuffer(4000);
funcToBuffer(5000);
asyncFunc(8000);
funcToBuffer(6000);
funcToBuffer(7000);
At the moment this code will print:
1000
2000
asyncFunc has resolved
4000
5000
6000
7000
asyncFunc has resolved
What I want is for this to print:
asyncFunc has resolved
1000
2000
4000
5000
asyncFunc has resolved
6000
7000
In essence, I want some kind of control flow that allows me to call funcToBuffer whenever I feel like, but under the hood, I want it to hold on executing whenever asyncFunc is executing and waiting to be resolved. Once asyncFunc has resolved, funcToBuffer calls should no longer be buffered and be executed right away.
I have tried playing with the buffer operator but I wasn't able to achieve the desired outcome.
If I understand it right, your main goal is to control the execution of a sequence of functions through a mechanism that buffers them until something happens, and that something is exactly what triggers the execution of the functions buffered.
If this is correct, the following could be the basis for a possible solution to your problem
const functions$ = new Subject<() => any>();
const buffer$ = new Subject<any>();
const executeBuffer$ = new Subject<any>();
const setBuffer = (executionDelay: number) => {
buffer$.next();
setTimeout(() => {
executeBuffer$.next();
}, executionDelay);
}
const functionBuffer$ = functions$
.pipe(
bufferWhen(() => buffer$),
);
zip(functionBuffer$, executeBuffer$)
.pipe(
tap(functionsAndExecuteSignal => functionsAndExecuteSignal[0].forEach(f => f()))
)
.subscribe();
Let me explain a bit the code.
First thing, we build functions$, i.e. an Observable of the functions we want to control. The Observable is built using a Subject, since we want to be able to control the notification of such Observable programmatically. In other words, rather than kicking the execution of a function like this funcToBuffer(1000), we create the function (as an object) and ask the functions$ Observable to emit the function like this
const aFunction = () => setTimeout(() => {console.log('I am a function that completes in 1 second');}, 1000);
functions$.next(aFunction);
In this way we have created a stream of functions that eventually will be executed.
Second thing, we create 2 more Observables, buffer$ and executeBuffer$, again using Subjects. Such Observables are used to signal when we have to create a buffer out of the functions emitted so far by functions$ and when we have to start the execution of the functions buffered.
These last 2 Observables are used in the function setBuffer. When you call setBuffer you basically say: please, create a buffer with all the functions which have been emitted so far by functions$ and start executing them after the executionDelay time specified as parameter.
The buffering part is performed by the functionBuffer$ Observable which is created using bufferWhen operator. The execution part is implemented leveraging the zip operator, that allows us to set the rhythm of execution of the functions based on the emissions of executeBuffer$ Observable.
You can test the above code setting up the following test data.
let f: () => any;
setBuffer(3000);
f = () => setTimeout(() => {console.log('f1');}, 1000);
functions$.next(f);
f = () => setTimeout(() => {console.log('f2');}, 2000);
functions$.next(f);
f = () => setTimeout(() => {console.log('f4');}, 4000);
functions$.next(f);
f = () => setTimeout(() => {console.log('f5');}, 5000);
functions$.next(f);
setBuffer(8000);
f = () => setTimeout(() => {console.log('f6');}, 6000);
functions$.next(f);
f = () => setTimeout(() => {console.log('f7');}, 7000);
functions$.next(f);
setBuffer(16000);
I started working on a solution with combineLatest but figured that a BehaviorSubject would be a better solution once I put more thought into it.
const { BehaviorSubject } = rxjs;
const { filter } = rxjs.operators;
let finalised$ = new BehaviorSubject(false);
function asyncFunc(time) {
setTimeout(() => {
console.log('asyncFunc has resolved');
if (!finalised$.getValue()) {
finalised$.next(true);
}
}, time);
}
function funcToBuffer(time) {
finalised$.pipe(filter(finalised => finalised)).subscribe(_ => { // Filter so only fire finalised being true
setTimeout(() => {
console.log(time);
}, time);
});
}
asyncFunc(3000);
funcToBuffer(1000);
funcToBuffer(2000);
funcToBuffer(4000);
funcToBuffer(5000);
asyncFunc(8000);
funcToBuffer(6000);
funcToBuffer(7000);
<script src="https://cdnjs.cloudflare.com/ajax/libs/rxjs/6.2.2/rxjs.umd.min.js"></script>
CombineLatest that waits for both obsevables to fire.
const { of, combineLatest } = rxjs;
const { delay } = rxjs.operators;
let obs1$ = of(1).pipe(delay(1000));
let obs2$ = of(2).pipe(delay(2000));
let now = new Date();
combineLatest(obs1$, obs2$).subscribe(([obs1, obs2]) => {
let ellapsed = new Date().getTime() - now.getTime();
console.log(`${obs1} - ${obs2} took ${ellapsed}`);
});
<script src="https://cdnjs.cloudflare.com/ajax/libs/rxjs/6.2.2/rxjs.umd.min.js"></script>

"Unsubscribe" function callback/hook in Observable "executor" function

I am confused about what the purpose of the "dispose" or "unsubscribe" function is for, which is (optionally) returned from an observable "executor" function, like so:
const Rx = require('rxjs');
const obs = Rx.Observable.create(obs => {
// we are in the Observable "executor" function
obs.next(4);
// we return this function, which gets called if we unsubscribe
return function () {
console.log('disposed');
}
});
const s1 = obs.subscribe(
function (v) {
console.log(v);
},
function (e) {
console.log(e);
},
function () {
console.log('complete');
}
);
const s2 = obs.subscribe(
function (v) {
console.log(v);
},
function (e) {
console.log(e);
},
function () {
console.log('complete');
}
);
s1.unsubscribe();
s2.unsubscribe();
What confuses me is that such a function would actually be more likely to hold on to references in your code and therefore prevent garbage collection.
Can anyone tell me what the purpose is of returning a function in that scenario, what the function is called, and what it's signature is? I am having trouble figuring out information about it.
I also see much more complex examples of returning a subscription from the executor function, for example this:
let index = 0;
let obsEnqueue = this.obsEnqueue = new Rx.Subject();
this.queueStream = Rx.Observable.create(obs => {
const push = Rx.Subscriber.create(v => {
if ((index % obsEnqueue.observers.length) === obsEnqueue.observers.indexOf(push)) {
obs.next(v);
}
});
return obsEnqueue.subscribe(push);
});
This seems to return a subscription instead of just a plain function. Can anyone explain what's going on with this?
To make it a clear question, what is the difference between doing this:
const sub = new Rx.Subject();
const obs = Rx.Observable.create($obs => {
$obs.next(4);
return sub.subscribe($obs);
});
and not returning the result of the subscribe call:
const sub = new Rx.Subject();
const obs = Rx.Observable.create($obs => {
$obs.next(4);
sub.subscribe($obs);
});
The unsubscribe function that Rx.Observable.create needs to return is invoked when downstream does not listen to the stream anymore, effectively giving you time to clean up resources.
In regards to your question; .subscribe() returns the subscription on which you can call .unsubscribe(). So if you want to do something with an other subscription you can pipe through that subscription to your downstream:
const obs = Rx.Observable.create($obs => {
const timer = Rx.Observable.interval(300)
.do(i => console.log('emission: ' + i))
return timer.subscribe($obs);
});
obs.take(4).subscribe(i => console.log('outer-emission:'+i))
<script src="https://cdnjs.cloudflare.com/ajax/libs/rxjs/5.0.2/Rx.js"></script>
Without the unsubscribe function you would stop listening to the observable but the interval created internally would keep on running:
const obs = Rx.Observable.create($obs => {
const timer = Rx.Observable.interval(300)
.do(i => console.log('emission: ' + i))
.take(10)
.subscribe(
val => $obs.next(val),
err => $obs.error(err),
() => $obs.complete()
);
return function(){} // empty unsubscribe function, internal subscription will keep on running
});
obs.take(4).subscribe(i => console.log('outer-emission:'+i))
<script src="https://cdnjs.cloudflare.com/ajax/libs/rxjs/5.0.2/Rx.js"></script>

RxJS: show loading if request is slow

I thought of using RxJS to solve elegantly this problem, but after trying various approaches, I couldn't find out how to do it...
My need is quite common: I do a Rest call, ie. I have a Promise.
If the response comes quickly, I just want to use the result.
If it is slow to come, I want to display a spinner, until the request completes.
This is to avoid a flash of a the spinner, then the data.
Maybe it can be done by making two observables: one with the promise, the other with a timeout and showing the spinner as side effect.
I tried switch() without much success, perhaps because the other observable doesn't produce a value.
Has somebody implemented something like that?
Based on #PhiLho's answer, I wrote a pipeable operator, which does exactly that:
export function executeDelayed<T>(
fn : () => void,
delay : number,
thisArg? : any
) : OperatorFunction<T, T> {
return function executeDelayedOperation(source : Observable<T>) : Observable<T> {
let timerSub = timer(delay).subscribe(() => fn());
return source.pipe(
tap(
() => {
timerSub.unsubscribe();
timerSub = timer(delay).subscribe(() => fn());
},
undefined,
() => {
timerSub.unsubscribe();
}
)
);
}
}
Basically it returns a function, which gets the Observable source.
Then it starts a timer, using the given delay.
If this timer emits a next-event, the function is called.
However, if the source emits a next, the timer is cancelled and a new one is startet.
In the complete of the source, the timer is finally cancelled.
This operator can then be used like this:
this.loadResults().pipe(
executeDelayed(
() => this.startLoading(),
500
)
).subscribe(results => this.showResult())
I did not wirte many operators myself, so this operator-implementation might not be the best, but it works.
Any suggestions on how to optimize it are welcome :)
EDIT:
As #DauleDK mentioned, a error won't stop the timer in this case and the fn will be called after delay. If thats not what you want, you need to add an onError-callback in the tap, which calls timerSub.unsubscribe():
export function executeDelayed<T>(
fn : () => void,
delay : number,
thisArg? : any
) : OperatorFunction<T, T> {
return function executeDelayedOperation(source : Observable<T>) : Observable<T> {
let timerSub = timer(delay).subscribe(() => fn());
return source.pipe(
tap(
() => {
timerSub.unsubscribe();
timerSub = timer(delay).subscribe(() => fn());
},
() => timerSub.unsubscribe(), // unsubscribe on error
() => timerSub.unsubscribe()
)
);
}
}
Here is an example that I have used. We assume here that you get the data that you want to send to the server as an Observable as well, called query$. A query coming in will then trigger the loadResults function, which should return a promise and puts the result in the results$ observable.
Now the trick is to use observable$.map(() => new Date()) to get the timestamp of the last emitted value.
Then we can compare the timestamps of the last query and the last response that came in from the server.
Since you also wanted to not only show a loading animation, but wanted to wait for 750ms before showing the animation, we introduce the delayed timestamp. See the comments below for a bit more explanation.
At the end we have the isLoading$ Observable that contains true or false. Subscribe to it, to get notified when to show/hide the loading animation.
const query$ = ... // From user input.
const WAIT_BEFORE_SHOW_LOADING = 750;
const results$ = query$.flatMapLatest(loadResults);
const queryTimestamp$ = query$.map(() => new Date());
const resultsTimestamp$ = results$.map(() => new Date());
const queryDelayTimestamp$ = (
// For every query coming in, we wait 750ms, then create a timestamp.
query$
.delay(WAIT_BEFORE_SHOW_LOADING)
.map(() => new Date())
);
const isLoading$ = (
queryTimestamp$.combineLatest(
resultsTimestamp$,
queryDelayTimestamp$,
(queryTimestamp, resultsTimestamp, delayTimestamp) => {
return (
// If the latest query is more recent than the latest
// results we got we can assume that
// it's still loading.
queryTimestamp > resultsTimestamp &&
// But only show the isLoading animation when delay has passed
// as well.
delayTimestamp > resultsTimestamp
);
}
)
.startWith(false)
.distinctUntilChanged()
);
OK, thinking more about it in my commuting, I found a solution...
You can find my experiment ground at http://plnkr.co/edit/Z3nQ8q
In short, the solution is to actually subscribe to the observable handing the spinner (instead of trying to compose it in some way).
If the result of the Rest request comes before the observable fires, we just cancel the spinner's disposable (subscription), so it does nothing.
Otherwise, the observable fires and display its spinner. We can then just hide it after receiving the response.
Code:
function test(loadTime)
{
var prom = promiseInTime(loadTime, { id: 'First'}); // Return data after a while
var restO = Rx.Observable.fromPromise(prom);
var load = Rx.Observable.timer(750);
var loadD = load.subscribe(
undefined,
undefined,
function onComplete() { show('Showing a loading spinner'); });
restO.subscribe(
function onNext(v) { show('Next - ' + JSON.stringify(v)); },
function onError(e) { show('Error - ' + JSON.stringify(e)); loadD.dispose(); },
function onComplete() { show('Done'); loadD.dispose(); }
);
}
test(500);
test(1500);
Not sure if that's an idiomatic way of doing this with RxJS, but it seems to work...
Other solutions are welcome, of course.
Just before fetching the data, ie. creating the spinner, set timeout for a function, which creates the spinner. Lets say you are willing to wait half a second, until showing spinner... it would be something like:
spinnerTimeout = setTimeout(showSpinner, 500)
fetch(url).then(data => {
if (spinner) {
clearTimeout(spinnerTimeout) //this is critical
removeSpinnerElement()
}
doSomethingWith(data)
});
EDIT: if it's not obvious, clearTimer stops the showSpinner from executing, if the data arrived sooner than 500ms(ish).
Here is my solution :
public static addDelayedFunction<T>(delayedFunction: Function, delay_ms: number): (mainObs: Observable<T>) => Observable<T> {
const stopTimer$: Subject<void> = new Subject<void>();
const stopTimer = (): void => {
stopTimer$.next();
stopTimer$.complete();
};
const catchErrorAndStopTimer = (obs: Observable<T>): Observable<T> => {
return obs.pipe(catchError(err => {
stopTimer();
throw err;
}));
};
const timerObs: Observable<any> = of({})
.pipe(delay(delay_ms))
.pipe(takeUntil(stopTimer$))
.pipe(tap(() => delayedFunction()));
return (mainObs: Observable<T>) => catchErrorAndStopTimer(
of({})
.pipe(tap(() => timerObs.subscribe()))
.pipe(mergeMap(() => catchErrorAndStopTimer(mainObs.pipe(tap(stopTimer)))))
);
}

Promise - is it possible to force cancel a promise

I use ES6 Promises to manage all of my network data retrieval and there are some situations where I need to force cancel them.
Basically the scenario is such that I have a type-ahead search on the UI where the request is delegated to the backend has to carry out the search based on the partial input. While this network request (#1) may take a little bit of time, user continues to type which eventually triggers another backend call (#2)
Here #2 naturally takes precedence over #1 so I would like to cancel the Promise wrapping request #1. I already have a cache of all Promises in the data layer so I can theoretically retrieve it as I am attempting to submit a Promise for #2.
But how do I cancel Promise #1 once I retrieve it from the cache?
Could anyone suggest an approach?
In modern JavaScript - no
Promises have settled (hah) and it appears like it will never be possible to cancel a (pending) promise.
Instead, there is a cross-platform (Node, Browsers etc) cancellation primitive as part of WHATWG (a standards body that also builds HTML) called AbortController. You can use it to cancel functions that return promises rather than promises themselves:
// Take a signal parameter in the function that needs cancellation
async function somethingIWantToCancel({ signal } = {}) {
// either pass it directly to APIs that support it
// (fetch and most Node APIs do)
const response = await fetch('.../', { signal });
// return response.json;
// or if the API does not already support it -
// manually adapt your code to support signals:
const onAbort = (e) => {
// run any code relating to aborting here
};
signal.addEventListener('abort', onAbort, { once: true });
// and be sure to clean it up when the action you are performing
// is finished to avoid a leak
// ... sometime later ...
signal.removeEventListener('abort', onAbort);
}
// Usage
const ac = new AbortController();
setTimeout(() => ac.abort(), 1000); // give it a 1s timeout
try {
await somethingIWantToCancel({ signal: ac.signal });
} catch (e) {
if (e.name === 'AbortError') {
// deal with cancellation in caller, or ignore
} else {
throw e; // don't swallow errors :)
}
}
No. We can't do that yet.
ES6 promises do not support cancellation yet. It's on its way, and its design is something a lot of people worked really hard on. Sound cancellation semantics are hard to get right and this is work in progress. There are interesting debates on the "fetch" repo, on esdiscuss and on several other repos on GH but I'd just be patient if I were you.
But, but, but.. cancellation is really important!
It is, the reality of the matter is cancellation is really an important scenario in client-side programming. The cases you describe like aborting web requests are important and they're everywhere.
So... the language screwed me!
Yeah, sorry about that. Promises had to get in first before further things were specified - so they went in without some useful stuff like .finally and .cancel - it's on its way though, to the spec through the DOM. Cancellation is not an afterthought it's just a time constraint and a more iterative approach to API design.
So what can I do?
You have several alternatives:
Use a third party library like bluebird who can move a lot faster than the spec and thus have cancellation as well as a bunch of other goodies - this is what large companies like WhatsApp do.
Pass a cancellation token.
Using a third party library is pretty obvious. As for a token, you can make your method take a function in and then call it, as such:
function getWithCancel(url, token) { // the token is for cancellation
var xhr = new XMLHttpRequest;
xhr.open("GET", url);
return new Promise(function(resolve, reject) {
xhr.onload = function() { resolve(xhr.responseText); });
token.cancel = function() { // SPECIFY CANCELLATION
xhr.abort(); // abort request
reject(new Error("Cancelled")); // reject the promise
};
xhr.onerror = reject;
});
};
Which would let you do:
var token = {};
var promise = getWithCancel("/someUrl", token);
// later we want to abort the promise:
token.cancel();
Your actual use case - last
This isn't too hard with the token approach:
function last(fn) {
var lastToken = { cancel: function(){} }; // start with no op
return function() {
lastToken.cancel();
var args = Array.prototype.slice.call(arguments);
args.push(lastToken);
return fn.apply(this, args);
};
}
Which would let you do:
var synced = last(getWithCancel);
synced("/url1?q=a"); // this will get canceled
synced("/url1?q=ab"); // this will get canceled too
synced("/url1?q=abc"); // this will get canceled too
synced("/url1?q=abcd").then(function() {
// only this will run
});
And no, libraries like Bacon and Rx don't "shine" here because they're observable libraries, they just have the same advantage user level promise libraries have by not being spec bound. I guess we'll wait to have and see in ES2016 when observables go native. They are nifty for typeahead though.
With AbortController
It is possible to use abort controller to reject promise or resolve on your demand:
let controller = new AbortController();
let task = new Promise((resolve, reject) => {
// some logic ...
const abortListener = ({target}) => {
controller.signal.removeEventListener('abort', abortListener);
reject(target.reason);
}
controller.signal.addEventListener('abort', abortListener);
});
controller.abort('cancelled reason'); // task is now in rejected state
Also it's better to remove event listener on abort to prevent memory leaks
And you can later check if error was thrown by abort by checking the controller.signal.aborted boolean property like:
const res = task.catch((err) => (
controller.signal.aborted
? { value: err }
: { value: 'fallback' }
));
If you would check if task is aborted and just return, then the Promise will be in pending status forever. But in that case you also will not get .catch fired with any error if that's your intension:
controller.abort();
new Promise((resolve, reject) => {
if(controller.signal.aborted) return;
}
Same works for cancelling fetch:
let controller = new AbortController();
fetch(url, {
signal: controller.signal
});
or just pass controller:
let controller = new AbortController();
fetch(url, controller);
And call abort method to cancel one, or infinite number of fetches where you passed this controller
controller.abort();
Standard proposals for cancellable promises have failed.
A promise is not a control surface for the async action fulfilling it; confuses owner with consumer. Instead, create asynchronous functions that can be cancelled through some passed-in token.
Another promise makes a fine token, making cancel easy to implement with Promise.race:
Example: Use Promise.race to cancel the effect of a previous chain:
let cancel = () => {};
input.oninput = function(ev) {
let term = ev.target.value;
console.log(`searching for "${term}"`);
cancel();
let p = new Promise(resolve => cancel = resolve);
Promise.race([p, getSearchResults(term)]).then(results => {
if (results) {
console.log(`results for "${term}"`,results);
}
});
}
function getSearchResults(term) {
return new Promise(resolve => {
let timeout = 100 + Math.floor(Math.random() * 1900);
setTimeout(() => resolve([term.toLowerCase(), term.toUpperCase()]), timeout);
});
}
Search: <input id="input">
Here we're "cancelling" previous searches by injecting an undefined result and testing for it, but we could easily imagine rejecting with "CancelledError" instead.
Of course this doesn't actually cancel the network search, but that's a limitation of fetch. If fetch were to take a cancel promise as argument, then it could cancel the network activity.
I've proposed this "Cancel promise pattern" on es-discuss, exactly to suggest that fetch do this.
I have checked out Mozilla JS reference and found this:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/race
Let's check it out:
var p1 = new Promise(function(resolve, reject) {
setTimeout(resolve, 500, "one");
});
var p2 = new Promise(function(resolve, reject) {
setTimeout(resolve, 100, "two");
});
Promise.race([p1, p2]).then(function(value) {
console.log(value); // "two"
// Both resolve, but p2 is faster
});
We have here p1, and p2 put in Promise.race(...) as arguments, this is actually creating new resolve promise, which is what you require.
For Node.js and Electron, I'd highly recommend using Promise Extensions for JavaScript (Prex). Its author Ron Buckton is one of the key TypeScript engineers and also is the guy behind the current TC39's ECMAScript Cancellation proposal. The library is well documented and chances are some of Prex will make to the standard.
On a personal note and coming from C# background, I like very much the fact that Prex is modelled upon the existing Cancellation in Managed Threads framework, i.e. based on the approach taken with CancellationTokenSource/CancellationToken .NET APIs. In my experience, those have been very handy to implement robust cancellation logic in managed apps.
I also verified it to work within a browser by bundling Prex using Browserify.
Here is an example of a delay with cancellation (Gist and RunKit, using Prex for its CancellationToken and Deferred):
// by #noseratio
// https://gist.github.com/noseratio/141a2df292b108ec4c147db4530379d2
// https://runkit.com/noseratio/cancellablepromise
const prex = require('prex');
/**
* A cancellable promise.
* #extends Promise
*/
class CancellablePromise extends Promise {
static get [Symbol.species]() {
// tinyurl.com/promise-constructor
return Promise;
}
constructor(executor, token) {
const withCancellation = async () => {
// create a new linked token source
const linkedSource = new prex.CancellationTokenSource(token? [token]: []);
try {
const linkedToken = linkedSource.token;
const deferred = new prex.Deferred();
linkedToken.register(() => deferred.reject(new prex.CancelError()));
executor({
resolve: value => deferred.resolve(value),
reject: error => deferred.reject(error),
token: linkedToken
});
await deferred.promise;
}
finally {
// this will also free all linkedToken registrations,
// so the executor doesn't have to worry about it
linkedSource.close();
}
};
super((resolve, reject) => withCancellation().then(resolve, reject));
}
}
/**
* A cancellable delay.
* #extends Promise
*/
class Delay extends CancellablePromise {
static get [Symbol.species]() { return Promise; }
constructor(delayMs, token) {
super(r => {
const id = setTimeout(r.resolve, delayMs);
r.token.register(() => clearTimeout(id));
}, token);
}
}
// main
async function main() {
const tokenSource = new prex.CancellationTokenSource();
const token = tokenSource.token;
setTimeout(() => tokenSource.cancel(), 2000); // cancel after 2000ms
let delay = 1000;
console.log(`delaying by ${delay}ms`);
await new Delay(delay, token);
console.log("successfully delayed."); // we should reach here
delay = 2000;
console.log(`delaying by ${delay}ms`);
await new Delay(delay, token);
console.log("successfully delayed."); // we should not reach here
}
main().catch(error => console.error(`Error caught, ${error}`));
Note that cancellation is a race. I.e., a promise may have been resolved successfully, but by the time you observe it (with await or then), the cancellation may have been triggered as well. It's up to you how you handle this race, but it doesn't hurts to call token.throwIfCancellationRequested() an extra time, like I do above.
I faced similar problem recently.
I had a promise based client (not a network one) and i wanted to always give the latest requested data to the user to keep the UI smooth.
After struggling with cancellation idea, Promise.race(...) and Promise.all(..) i just started remembering my last request id and when promise was fulfilled i was only rendering my data when it matched the id of a last request.
Hope it helps someone.
See https://www.npmjs.com/package/promise-abortable
$ npm install promise-abortable
You can make the promise reject before finishing:
// Our function to cancel promises receives a promise and return the same one and a cancel function
const cancellablePromise = (promiseToCancel) => {
let cancel
const promise = new Promise((resolve, reject) => {
cancel = reject
promiseToCancel
.then(resolve)
.catch(reject)
})
return {promise, cancel}
}
// A simple promise to exeute a function with a delay
const waitAndExecute = (time, functionToExecute) => new Promise((resolve, reject) => {
timeInMs = time * 1000
setTimeout(()=>{
console.log(`Waited ${time} secs`)
resolve(functionToExecute())
}, timeInMs)
})
// The promise that we will cancel
const fetchURL = () => fetch('https://pokeapi.co/api/v2/pokemon/ditto/')
// Create a function that resolve in 1 seconds. (We will cancel it in 0.5 secs)
const {promise, cancel} = cancellablePromise(waitAndExecute(1, fetchURL))
promise
.then((res) => {
console.log('then', res) // This will executed in 1 second
})
.catch(() => {
console.log('catch') // We will force the promise reject in 0.5 seconds
})
waitAndExecute(0.5, cancel) // Cancel previous promise in 0.5 seconds, so it will be rejected before finishing. Commenting this line will make the promise resolve
Unfortunately the fetch call has already be done, so you will see the call resolving in the Network tab. Your code will just ignore it.
Using the Promise subclass provided by the external package, this can be done as follows: Live demo
import CPromise from "c-promise2";
function fetchWithTimeout(url, {timeout, ...fetchOptions}= {}) {
return new CPromise((resolve, reject, {signal}) => {
fetch(url, {...fetchOptions, signal}).then(resolve, reject)
}, timeout)
}
const chain= fetchWithTimeout('http://localhost/')
.then(response => response.json())
.then(console.log, console.warn);
//chain.cancel(); call this to abort the promise and releated request
Using AbortController
I've been researching about this for a few days and I still feel that rejecting the promise inside an abort event handler is only part of the approach.
The thing is that as you may know, only rejecting a promise, makes the code awaiting for it to resume execution but if there's any code that runs after the rejection or resolution of the promise, or outside of its execution scope, e.g. Inside of an event listener or an async call, it will keep running, wasting cycles and maybe even memory on something that isn't really needed anymore.
Lacking approach
When executing the snippet below, after 2 seconds, the console will contain the output derived from the execution of the promise rejection, and any output derived from the pending work. The promise will be rejected and the work awaiting for it can continue, but the work will not, which in my opinion is the main point of this exercise.
let abortController = new AbortController();
new Promise( ( resolve, reject ) => {
if ( abortController.signal.aborted ) return;
let abortHandler = () => {
reject( 'Aborted' );
};
abortController.signal.addEventListener( 'abort', abortHandler );
setTimeout( () => {
console.log( 'Work' );
console.log( 'More work' );
resolve( 'Work result' );
abortController.signal.removeEventListener( 'abort', abortHandler );
}, 2000 );
} )
.then( result => console.log( 'then:', result ) )
.catch( reason => console.error( 'catch:', reason ) );
setTimeout( () => abortController.abort(), 1000 );
Which leads me to think that after defining the abort event handler there must be calls to
if ( abortController.signal.aborted ) return;
in sensible points of the code that is performing the work so that the work doesn't get performed and can gracefully stop if necessary (Adding more statements before the return in the if block above).
Proposal
This approach reminds me a little about the cancellable token proposal from a few years back but it will in fact prevent work to be performed in vain. The console output should now only be the abort error and nothing more and even, when the work is in progress, and then cancelled in the middle, it can stop, as said before in a sensible step of the processing, like at the beginning of a loop's body
let abortController = new AbortController();
new Promise( ( resolve, reject ) => {
if ( abortController.signal.aborted ) return;
let abortHandler = () => {
reject( 'Aborted' );
};
abortController.signal.addEventListener( 'abort', abortHandler );
setTimeout( () => {
if ( abortController.signal.aborted ) return;
console.log( 'Work' );
if ( abortController.signal.aborted ) return;
console.log( 'More work' );
resolve( 'Work result' );
abortController.signal.removeEventListener( 'abort', abortHandler );
}, 2000 );
} )
.then( result => console.log( 'then:', result ) )
.catch( reason => console.error( 'catch:', reason ) );
setTimeout( () => abortController.abort(), 1000 );
I found the posted solutions here a little hard to read, so I created a helper function that is in my opinion easier to use.
The helper function gives access to to the information whether the current call is already obsolete or not. With this information the function itself has to take care of things accordingly (usually by simply returning).
// Typescript
export function obsoletableFn<Res, Args extends unknown[]>(
fn: (isObsolete: () => boolean, ...args: Args) => Promise<Res>,
): (...args: Args) => Promise<Res> {
let lastCaller = null;
return (...args: Args) => {
const me = Symbol();
lastCaller = me;
const isObsolete = () => lastCaller !== me;
return fn(isObsolete, ...args);
};
}
// helper function
function obsoletableFn(fn) {
let lastCaller = null;
return (...args) => {
const me = Symbol();
lastCaller = me;
const isObsolete = () => lastCaller !== me;
return fn(isObsolete, ...args);
};
}
const simulateRequest = () => new Promise(resolve => setTimeout(resolve, Math.random() * 2000 + 1000));
// usage
const myFireAndForgetFn = obsoletableFn(async(isObsolete, x) => {
console.log(x, 'starting');
await simulateRequest();
if (isObsolete()) {
console.log(x, 'is obsolete');
// return, as there is already a more recent call running
return;
}
console.log(x, 'is not obsolete');
document.querySelector('div').innerHTML = `Response ${x}`;
});
myFireAndForgetFn('A');
myFireAndForgetFn('B');
<div>Waiting for response...</div>
So I have an async function that I needed to cancel on user input, but it's a long running one that involves mouse control.
I used p-queue and added each line in my function into it and have an observable that I feed the cancellation signal. Anything that the queue starts processing will run no matter what but you should be able to cancel anything after that by clearing the queue. The shorter the task you add to the queue, the sooner you can quit after getting the cancel signal. You can be lazy and throw whole chunks of code into the queue instead of the one liners i have in the example.
p-queue releases Version 6 works with commonjs, 7+ switches to ESM and could break your app. Breaks my electron/typescript/webpack one.
const cancellable_function = async () => {
const queue = new PQueue({concurrency:1});
queue.pause();
queue.addAll([
async () => await move_mouse({...}),
async () => await mouse_click({...}),
])
for await (const item of items) {
queue.addAll([
async () => await do_something({...}),
async () => await do_something_else({...}),
])
}
const {information} = await get_information();
queue.addAll([
async () => await move_mouse({...}),
async () => await mouse_click({...}),
])
cancel_signal$.pipe(take(1)).subscribe(() => {
queue.clear();
});
queue.start();
await queue.onEmpty()
}
Because #jib reject my modify, so I post my answer here. It's just the modfify of #jib's anwser with some comments and using more understandable variable names.
Below I just show examples of two different method: one is resolve() the other is reject()
let cancelCallback = () => {};
input.oninput = function(ev) {
let term = ev.target.value;
console.log(`searching for "${term}"`);
cancelCallback(); //cancel previous promise by calling cancelCallback()
let setCancelCallbackPromise = () => {
return new Promise((resolve, reject) => {
// set cancelCallback when running this promise
cancelCallback = () => {
// pass cancel messages by resolve()
return resolve('Canceled');
};
})
}
Promise.race([setCancelCallbackPromise(), getSearchResults(term)]).then(results => {
// check if the calling of resolve() is from cancelCallback() or getSearchResults()
if (results == 'Canceled') {
console.log("error(by resolve): ", results);
} else {
console.log(`results for "${term}"`, results);
}
});
}
input2.oninput = function(ev) {
let term = ev.target.value;
console.log(`searching for "${term}"`);
cancelCallback(); //cancel previous promise by calling cancelCallback()
let setCancelCallbackPromise = () => {
return new Promise((resolve, reject) => {
// set cancelCallback when running this promise
cancelCallback = () => {
// pass cancel messages by reject()
return reject('Canceled');
};
})
}
Promise.race([setCancelCallbackPromise(), getSearchResults(term)]).then(results => {
// check if the calling of resolve() is from cancelCallback() or getSearchResults()
if (results !== 'Canceled') {
console.log(`results for "${term}"`, results);
}
}).catch(error => {
console.log("error(by reject): ", error);
})
}
function getSearchResults(term) {
return new Promise(resolve => {
let timeout = 100 + Math.floor(Math.random() * 1900);
setTimeout(() => resolve([term.toLowerCase(), term.toUpperCase()]), timeout);
});
}
Search(use resolve): <input id="input">
<br> Search2(use reject and catch error): <input id="input2">

Categories