AWS Lambda and Promises: Callbacks not being called - javascript

I'm assuming that I'm lacking some fundamentals on promises. I have a process that is within a AWS Lambda that downloads three files, then produces an output that is sent via email.
module.exports.test = async (event) => {
var p = download1();
var c = download2();
var h = download3();
await Promise.all([p, c, h]).then(() => {
... bunch of logic manipulating the data
customers.forEach(i => {
buildFile().then(data => {
sendEmail(data).then(response => {
console.log('Email sent successfully');
});
});
});
}, errHandler);
};
Both the buildFile and sendEmail functions return a Promise, but I never get to the 'Email sent successfully' message. It runs the code, but never actually returns before the Lambda completes (at least that's what I think is happening).
My understanding was that the Promise would fulfill the callback, but now I'm thinking I need to do something similar to how I did the downloads within the original Promise.all(). Is that the right direction?
The process should get the files, then it loops through customers to create each file and send via SES.

You're looking for
module.exports.test = async (event) => {
var p = download1();
var c = download2();
var h = download3();
try {
await Promise.all([p, c, h]);
// ... bunch of logic manipulating the data
var promises = customers.map(async (i) => {
var data = await buildFile();
var response = await sendEmail(data);
console.log('Email sent successfully');
});
await Promise.all(promises);
} catch(e) {
errHandler(e);
}
};
Your test function didn't wait for the promises that you created in the forEach loop, so the lambda completes before everything is done.

#Bergi answer is correct, however I would like to expand it a little bit and give you some resources to increase or strength your Promises knowledge. I'll use the next snippet of code, it's a bit cumbersome because I wrote it on Google Chrome snippets so feel free to paste it there and play around with it:
(function() {
const promise1 = new Promise(function(resolve, reject) {
setTimeout(function() {
resolve('Replicant');
}, 300);
});
const promise2 = new Promise(function(resolve, reject) {
setTimeout(function() {
resolve('Human?');
}, 300);
});
function buildFile(type) {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve(`${type}`);
}, 300);
});
}
function sendMail(customer, answer) {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve(`Sent test to: ${customer}, he/she is a ${answer}`);
}, 300);
});
}
let customers = ['Rob Batty', 'Rachel', 'Deckard'];
async function myFunc() {
const [a, b, c] = await Promise.all([promise1, promise1, promise2]);
const answers = [a, b, c];
// const promises = customers.map(async (name, index) => {
// const file = await buildFile(answers[index]);
// const mail = await sendMail(name, file);
// console.log(mail);
// });
const promises = customers.map((name, index) => {
buildFile(answers[index])
.then(file => sendMail(name, file))
.then(sent => console.log(sent))
// If you're going to use Promises this is important! :D
.catch(err => console.log(err))
});
const [m1, m2, m3] = await Promise.all(promises);
console.log(m1, m2, m3);
}
myFunc();
})()
As pointed out in the answer, the problem is related to the use of forEach, why? Well, simply because you're executing asynchronous code on a synchronous type of method, the don't get along very well :), so, the solution is to create an Array of Promises, like a Factory. After the map function the Promises are Pending nor Fullfiled or Rejected it's when you call the Promise.all() method that you await for their results and they give you the values or in your use case, generate the file and then send the email to the user. I hope this helps you to get a better understanding on how Promises works. I'll leave at the end two very important links, at least for me, that helped me out at some point with Promises. Cheers, sigfried.
Nolans Lawson's Article
MDN Promises

Related

JavaScript Promises with AJAX

I am trying to write a series of AJAX requests into a dictionary.
I am attempting to use promises for this, however I am either writing the promise syntax incorrectly, or what I think may be happening is that the function is actually completing (for loop is done, and AJAX requests sent) but the AJAX requests are still not returned. Therefore this is still returning an empty dictionary.
let dict = {};
let activeMachines = ["41", "42", "43"];
let dataPromise = new Promise (function (resolve, reject)
{
for (let i = 0; i < activeMachines.length; i++)
{
let machineID = activeMachines[i]
let getAPIData = new XMLHttpRequest();
let url = 'http://127.0.0.1:8000/processes/apidata/' +machineID + '/';
getAPIData.open('GET', url);
getAPIData.send();
getAPIData.onload = function()
{
let APIData = JSON.parse(getAPIData.responseText);
dict['machine_' + machineID] = APIData[0].author_id;
dict['temp' + machineID] = APIData[0].tempData; //get value
dict['humid' + machineID] = APIData[0].humidData;
timeValue = String((APIData[0].dateTime));
dict['time' + machineID] = new Date(timeValue);
console.log("done");
}
}
resolve();
});
dataPromise.then(function() {console.log(dict);});
Is there a way to "sense" when all of the XMLHTTPRequests have returned?
#Rafael's answer will work, but it doesn't illuminate much about what's going wrong, since you're trying to grok the concept of Promises and write one yourself.
Fundamentally I think your approach has two missteps: 1. creating a single Promise that handles calls to all of your arbitrary list of "activeMachines", and 2. putting your resolve() call in the wrong place.
Usually a Promise looks like this:
const myPromise = new Promise(function(resolve, reject) {
doSomeAsyncWork(function(result) {
// Some kind of async call with a callback function or somesuch...
resolve(result);
});
}).then(data => {
// Do something with the final result
console.log(data);
});
You can simulate some kind of arbitrary asynchronous work with setTimeout():
const myPromise = new Promise(function(resolve, reject) {
// Resolve with "Done!" after 5 seconds
setTimeout(() => {
resolve("Done!");
}, 5000);
}).then(data => {
console.log(data); // "Done!"
});
However your original code puts the resolve() call in a weird place, and doesn't even pass it any data. It looks sorta equivalent to this:
const myPromise = new Promise(function(resolve, reject) {
// Resolve with "Done!" after 5 seconds
setTimeout(() => {
// Doing some work here instead of resolving...
}, 5000);
resolve();
}).then(data => {
console.log(data); // This would be "undefined"
});
Where you're doing a console.log("done"); in your original code is actually where you should be doing a resolve(someData);!
You're also trying to do side effect work inside of your Promise's async function stuff, which is really weird and contrary to how a Promise is supposed to work. The promise is supposed to go off and do its async work, and then resolve with the resulting data -- literally with the .then() chain.
Also, instead of doing multiple asynchronous calls inside of your Promise, you should generalize it so it is reusable and encapsulates only a single network request. That way you can fire off multiple asynchronous Promises, wait for them all to resolve, and then do something.
const activeMachines = ["41", "42", "43"];
// Make a reusable function that returns a single Promise
function fetchAPI(num) {
return new Promise(function(resolve, reject) {
const getAPIData = new XMLHttpRequest();
const url = "http://127.0.0.1:8000/processes/apidata/" + num + "/";
getAPIData.open("GET", url);
getAPIData.send();
getAPIData.onload = function() {
const APIData = JSON.parse(getAPIData.responseText);
const resolveData = {};
resolveData["machine_" + num] = APIData[0].author_id;
resolveData["temp" + num] = APIData[0].tempData; //get value
resolveData["humid" + num] = APIData[0].humidData;
timeValue = String(APIData[0].dateTime);
resolveData["time" + num] = new Date(timeValue);
resolve(resolveData);
};
});
}
// Promise.all() will resolve once all Promises in its array have also resolved
Promise.all(
activeMachines.map(ea => {
return fetchAPI(ea);
})
).then(data => {
// All of your network Promises have completed!
// The value of "data" here will be an array of all your network results
});
The fetch() API is great and you should learn to use that also -- but only once you understand the theory and practice behind how Promises actually operate. :)
Here's an example of the Fetch API which uses Promises by default:
let m_ids = [1,2,3,4];
let forks = m_ids.map(m => fetch(`http://127.0.0.1:8000/processes/apidata/${m}`));
let joined = Promise.all(forks);
joined
.then(files => console.log('all done', files))
.catch(error => console.error(error));
I hope this helps!

Correct way to sequence two asynchronous operations that each return a promise javascript

I was wondering what is the proper way to call a promise after another promise resolves. I know we can use async await to create functions which will resolve a promise. I was wondering which form of handling promises is consider proper practice, or is it good practice to create generators instead? consider the following code:
const fetchSomething = () => new Promise((resolve) => {
setTimeout(() => resolve(console.log('future value')), 500);
});
const fetchSomethingElse = () => new Promise((resolve) => {
setTimeout(() => resolve(console.log('future value dueeee')), 3000);
});
const get = () => {
return fetchSomething().then(function(){
fetchSomethingElse()
});
}
get();
or
const fetchSomething = () => new Promise((resolve) => {
setTimeout(() => resolve({resolve: true}), 500);
});
const fetchSomethingElse = () => new Promise((resolve) => {
setTimeout(() => resolve({resolve: true}), 3000);
});
const get = async function() {
const fet = await fetchSomething();
const fet2 = await fetchSomethingElse();
};
get();
Either one is fine. Your choice.
In the first you're nesting .then() handlers. In the second, you're using the newer await to sequence them. More people are moving to await because it appears to be simpler code for sequencing operations (assuming you do proper error handling), though in this case, they are pretty similar in complexity, particularly with the simplification suggested below so it's really up to your own personal coding style.
What is missing in both is that get() just returned a promise so you need to use .then() and .catch() with it to get the value and catch any errors.
Also, something that is missing in the first is that you aren't returning the second promise which means the caller won't know when the second operation is done.
Your first can be simplified and fixed up like this:
const get = () => {
return fetchSomething().then(fetchSomethingElse);
}
get().then(val => {
// done here
}).catch(err => {
// error here
});
As Pointy mentioned, you don't "call a promise". You "call a function that returns a promise". Promises are objects. They are not callable.
Probably what your title could be rewritten to is: "Correct way to sequence two asynchronous operations that each return a promise".
For completeness, if your two async operations don't depend upon one another, then you don't have to manually sequence them. You can start them both and then monitor when both are done. This will sometimes get a faster end-to-end response.
You can do that using Promise.all():
const get = function() {
return Promise.all([fetchSomething(), fetchSomethingElse()]).then(results => {
// process results array and then return final value
// results[0] is result from fetchSomething, results[1] is result from fetchSomethingElse
return finalVal;
});
}
Both are fine, but you are making a common mistake in the top example (and maybe it's just because of the simplification of the code for the question). You are returning the promise from get, but you are not returning the promise from the then. This means the caller of get won't know when both promises have resolved. Consider:
const fetchSomething = () => new Promise((resolve) => {
setTimeout(() => resolve(console.log('future value')), 500);
});
const fetchSomethingElse = () => new Promise((resolve) => {
setTimeout(() => resolve(console.log('future value dueeee')), 3000);
});
const get = () => {
return fetchSomething().then(function(){
fetchSomethingElse()
});
}
// we don't when fetchSomethingElse is done
get().then(() => console.log("done"));
Also there's another option you might consider since the second promise doesn't depend on the output of the first. Call them in parallel:
const get = () => {
return Promise.all([fetchSomething(), fetchSomethingElse() ])
}
In this case one can start before the other is finished and the whole operation should be faster.
It's important to remember that in Promise-based patterns you're using functions that return Promises. Promises are passed in resolve and reject arguments (which are themselves functions). What you resolve with, is what gets exectuted in the .then() phase, and what you reject with gets exectuted in the .catch() phase.
To handle Promises in sequence, you're passing your values into the top-level function that wraps the Promise.
so...
const p1 = () => {
return new Promise((resolve,reject) => {
window.setTimeout(() => {
resolve('future value one');
},500);
});
};
const p2 = (v1) => {
return new Promise((resolve,reject) => {
window.setTimeout(() => {
const v2 = 'future value two';
resolve({v1,v2});
},500);
});
};
p1().then(p2).then(console.log);

Is there a way to short circuit async/await flow?

All four functions are called below in update return promises.
async function update() {
var urls = await getCdnUrls();
var metadata = await fetchMetaData(urls);
var content = await fetchContent(metadata);
await render(content);
return;
}
What if we want to abort the sequence from outside, at any given time?
For example, while fetchMetaData is being executed, we realize we no longer need to render the component and we want to cancel the remaining operations (fetchContent and render). Is there a way to abort/cancel these operations from outside the update function?
We could check against a condition after each await, but that seems like an inelegant solution, and even then we will have to wait for the current operation to finish.
The standard way to do this now is through AbortSignals
async function update({ signal } = {}) {
// pass these to methods to cancel them internally in turn
// this is implemented throughout Node.js and most of the web platform
try {
var urls = await getCdnUrls({ signal });
var metadata = await fetchMetaData(urls);
var content = await fetchContent(metadata);
await render(content);
} catch (e) {
if(e.name !== 'AbortError') throw e;
}
return;
}
// usage
const ac = new AbortController();
update({ signal: ac.signal });
ac.abort(); // cancel the update
OLD 2016 content below, beware dragons
I just gave a talk about this - this is a lovely topic but sadly you're not really going to like the solutions I'm going to propose as they're gateway-solutions.
What the spec does for you
Getting cancellation "just right" is actually very hard. People have been working on just that for a while and it was decided not to block async functions on it.
There are two proposals attempting to solve this in ECMAScript core:
Cancellation tokens - which adds cancellation tokens that aim to solve this issue.
Cancelable promise - which adds catch cancel (e) { syntax and throw.cancel syntax which aims to address this issue.
Both proposals changed substantially over the last week so I wouldn't count on either to arrive in the next year or so. The proposals are somewhat complimentary and are not at odds.
What you can do to solve this from your side
Cancellation tokens are easy to implement. Sadly the sort of cancellation you'd really want (aka "third state cancellation where cancellation is not an exception) is impossible with async functions at the moment since you don't control how they're run. You can do two things:
Use coroutines instead - bluebird ships with sound cancellation using generators and promises which you can use.
Implement tokens with abortive semantics - this is actually pretty easy so let's do it here
CancellationTokens
Well, a token signals cancellation:
class Token {
constructor(fn) {
this.isCancellationRequested = false;
this.onCancelled = []; // actions to execute when cancelled
this.onCancelled.push(() => this.isCancellationRequested = true);
// expose a promise to the outside
this.promise = new Promise(resolve => this.onCancelled.push(resolve));
// let the user add handlers
fn(f => this.onCancelled.push(f));
}
cancel() { this.onCancelled.forEach(x => x); }
}
This would let you do something like:
async function update(token) {
if(token.isCancellationRequested) return;
var urls = await getCdnUrls();
if(token.isCancellationRequested) return;
var metadata = await fetchMetaData(urls);
if(token.isCancellationRequested) return;
var content = await fetchContent(metadata);
if(token.isCancellationRequested) return;
await render(content);
return;
}
var token = new Token(); // don't ned any special handling here
update(token);
// ...
if(updateNotNeeded) token.cancel(); // will abort asynchronous actions
Which is a really ugly way that would work, optimally you'd want async functions to be aware of this but they're not (yet).
Optimally, all your interim functions would be aware and would throw on cancellation (again, only because we can't have third-state) which would look like:
async function update(token) {
var urls = await getCdnUrls(token);
var metadata = await fetchMetaData(urls, token);
var content = await fetchContent(metadata, token);
await render(content, token);
return;
}
Since each of our functions are cancellation aware, they can perform actual logical cancellation - getCdnUrls can abort the request and throw, fetchMetaData can abort the underlying request and throw and so on.
Here is how one might write getCdnUrl (note the singular) using the XMLHttpRequest API in browsers:
function getCdnUrl(url, token) {
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
var p = new Promise((resolve, reject) => {
xhr.onload = () => resolve(xhr);
xhr.onerror = e => reject(new Error(e));
token.promise.then(x => {
try { xhr.abort(); } catch(e) {}; // ignore abort errors
reject(new Error("cancelled"));
});
});
xhr.send();
return p;
}
This is as close as we can get with async functions without coroutines. It's not very pretty but it's certainly usable.
Note that you'd want to avoid cancellations being treated as exceptions. This means that if your functions throw on cancellation you need to filter those errors on the global error handlers process.on("unhandledRejection", e => ... and such.
You can get what you want using Typescript + Bluebird + cancelable-awaiter.
Now that all evidence point to cancellation tokens not making it to ECMAScript, I think the best solution for cancellations is the bluebird implementation mentioned by #BenjaminGruenbaum, however, I find the usage of co-routines and generators a bit clumsy and uneasy on the eyes.
Since I'm using Typescript, which now support async/await syntax for es5 and es3 targets, I've created a simple module which replaces the default __awaiter helper with one that supports bluebird cancellations: https://www.npmjs.com/package/cancelable-awaiter
Unfortunately, there is no support of cancellable promises so far. There are some custom implementations e.g.
Extends/wraps a promise to be cancellable and resolvable
function promisify(promise) {
let _resolve, _reject
let wrap = new Promise(async (resolve, reject) => {
_resolve = resolve
_reject = reject
let result = await promise
resolve(result)
})
wrap.resolve = _resolve
wrap.reject = _reject
return wrap
}
Usage: Cancel promise and stop further execution immediately after it
async function test() {
// Create promise that should be resolved in 3 seconds
let promise = new Promise(resolve => setTimeout(() => resolve('our resolved value'), 3000))
// extend our promise to be cancellable
let cancellablePromise = promisify(promise)
// Cancel promise in 2 seconds.
// if you comment this line out, then promise will be resolved.
setTimeout(() => cancellablePromise.reject('error code'), 2000)
// wait promise to be resolved
let result = await cancellablePromise
// this line will never be executed!
console.log(result)
}
In this approach, a promise itself is executed till the end, but the caller code that awaits promise result can be 'cancelled'.
Unfortunately, no, you can't control execution flow of default async/await behaviour – it does not mean that the problem itself is impossible, it means that you need to do change your approach a bit.
First of all, your proposal about wrapping every async line in a check is a working solution, and if you have just couple places with such functionality, there is nothing wrong with it.
If you want to use this pattern pretty often, the best solution, probably, is to switch to generators: while not so widespread, they allow you to define each step's behaviour, and adding cancel is the easiest. Generators are pretty powerful, but, as I've mentioned, they require a runner function and not so straightforward as async/await.
Another approach is to create cancellable tokens pattern – you create an object, which will be filled a function which wants to implement this functionality:
async function updateUser(token) {
let cancelled = false;
// we don't reject, since we don't have access to
// the returned promise
// so we just don't call other functions, and reject
// in the end
token.cancel = () => {
cancelled = true;
};
const data = await wrapWithCancel(fetchData)();
const userData = await wrapWithCancel(updateUserData)(data);
const userAddress = await wrapWithCancel(updateUserAddress)(userData);
const marketingData = await wrapWithCancel(updateMarketingData)(userAddress);
// because we've wrapped all functions, in case of cancellations
// we'll just fall through to this point, without calling any of
// actual functions. We also can't reject by ourselves, since
// we don't have control over returned promise
if (cancelled) {
throw { reason: 'cancelled' };
}
return marketingData;
function wrapWithCancel(fn) {
return data => {
if (!cancelled) {
return fn(data);
}
}
}
}
const token = {};
const promise = updateUser(token);
// wait some time...
token.cancel(); // user will be updated any way
I've written articles, both on cancellation and generators:
promise cancellation
generators usage
To summarize – you have to do some additional work in order to support canncellation, and if you want to have it as a first class citizen in your application, you have to use generators.
Here is a simple exemple with a promise:
let resp = await new Promise(function(resolve, reject) {
// simulating time consuming process
setTimeout(() => resolve('Promise RESOLVED !'), 3000);
// hit a button to cancel the promise
$('#btn').click(() => resolve('Promise CANCELED !'));
});
Please see this codepen for a demo
Using CPromise (c-promise2 package) this can be easily done in the following way
(Demo):
import CPromise from "c-promise2";
async function getCdnUrls() {
console.log(`task1:start`);
await CPromise.delay(1000);
console.log(`task1:end`);
}
async function fetchMetaData() {
console.log(`task2:start`);
await CPromise.delay(1000);
console.log(`task2:end`);
}
function* fetchContent() {
// using generators is the recommended way to write asynchronous code with CPromise
console.log(`task3:start`);
yield CPromise.delay(1000);
console.log(`task3:end`);
}
function* render() {
console.log(`task4:start`);
yield CPromise.delay(1000);
console.log(`task4:end`);
}
const update = CPromise.promisify(function* () {
var urls = yield getCdnUrls();
var metadata = yield fetchMetaData(urls);
var content = yield* fetchContent(metadata);
yield* render(content);
return 123;
});
const promise = update().then(
(v) => console.log(`Done: ${v}`),
(e) => console.warn(`Fail: ${e}`)
);
setTimeout(() => promise.cancel(), 2500);
Console output:
task1:start
task1:end
task2:start
task2:end
task3:start
Fail: CanceledError: canceled
Just like in regular code you should throw an exception from the first function (or each of the next functions) and have a try block around the whole set of calls. No need to have extra if-elses. That's one of the nice bits about async/await, that you get to keep error handling the way we're used to from regular code.
Wrt cancelling the other operations there is no need to. They will actually not start until their expressions are encountered by the interpreter. So the second async call will only start after the first one finishes, without errors. Other tasks might get the chance to execute in the meantime, but for all intents and purposes, this section of code is serial and will execute in the desired order.
This answer I posted may help you to rewrite your function as:
async function update() {
var get_urls = comPromise.race([getCdnUrls()]);
var get_metadata = get_urls.then(urls=>fetchMetaData(urls));
var get_content = get_metadata.then(metadata=>fetchContent(metadata);
var render = get_content.then(content=>render(content));
await render;
return;
}
// this is the cancel command so that later steps will never proceed:
get_urls.abort();
But I am yet to implement the "class-preserving" then function so currently you have to wrap every part you want to be able to cancel with comPromise.race.
I created a library called #kaisukez/cancellation-token
The idea is to pass a CancellationToken to every async function, then wrap every promise in AsyncCheckpoint. So that when the token is cancelled, your async function will be cancelled in the next checkpoint.
This idea came from tc39/proposal-cancelable-promises
and conradreuter/cancellationtoken.
How to use my library
Refactor your code
// from this
async function yourFunction(param1, param2) {
const result1 = await someAsyncFunction1(param1)
const result2 = await someAsyncFunction2(param2)
return [result1, result2]
}
// to this
import { AsyncCheckpoint } from '#kaisukez/cancellation-token'
async function yourFunction(token, param1, param2) {
const result1 = await AsyncCheckpoint.after(token, () => someAsyncFunction1(param1))
const result2 = await AsyncCheckpoint.after(token, () => someAsyncFunction2(param2))
return [result1, result2]
}
Create a token then call your function with that token
import { CancellationToken, CancellationError } from '#kaisukez/cancellation-token'
const [token, cancel] = CancellationToken.source()
// spawn background task (run async function without using `await`)
CancellationError.ignoreAsync(() => yourAsyncFunction(token, param1, param2))
// ... do something ...
// then cancel the background task
await cancel()
So this is the solution of the OP's question.
import { CancellationToken, CancellationError, AsyncCheckpoint } from '#kaisukez/cancellation-token'
async function update(token) {
var urls = await AsyncCheckpoint.after(token, () => getCdnUrls());
var metadata = await AsyncCheckpoint.after(token, () => fetchMetaData(urls));
var content = await AsyncCheckpoint.after(token, () => fetchContent(metadata));
await AsyncCheckpoint.after(token, () => render(content));
return;
}
const [token, cancel] = CancellationToken.source();
// spawn background task (run async function without using `await`)
CancellationError.ignoreAsync(() => update(token))
// ... do something ...
// then cancel the background task
await cancel()
Example written in Node with Typescript of a call which can be aborted from outside:
function cancelable(asyncFunc: Promise<void>): [Promise<void>, () => boolean] {
class CancelEmitter extends EventEmitter { }
const cancelEmitter = new CancelEmitter();
const promise = new Promise<void>(async (resolve, reject) => {
cancelEmitter.on('cancel', () => {
resolve();
});
try {
await asyncFunc;
resolve();
} catch (err) {
reject(err);
}
});
return [promise, () => cancelEmitter.emit('cancel')];
}
Usage:
const asyncFunction = async () => {
// doSomething
}
const [promise, cancel] = cancelable(asyncFunction());
setTimeout(() => {
cancel();
}, 2000);
(async () => await promise)();

Promise - is it possible to force cancel a promise

I use ES6 Promises to manage all of my network data retrieval and there are some situations where I need to force cancel them.
Basically the scenario is such that I have a type-ahead search on the UI where the request is delegated to the backend has to carry out the search based on the partial input. While this network request (#1) may take a little bit of time, user continues to type which eventually triggers another backend call (#2)
Here #2 naturally takes precedence over #1 so I would like to cancel the Promise wrapping request #1. I already have a cache of all Promises in the data layer so I can theoretically retrieve it as I am attempting to submit a Promise for #2.
But how do I cancel Promise #1 once I retrieve it from the cache?
Could anyone suggest an approach?
In modern JavaScript - no
Promises have settled (hah) and it appears like it will never be possible to cancel a (pending) promise.
Instead, there is a cross-platform (Node, Browsers etc) cancellation primitive as part of WHATWG (a standards body that also builds HTML) called AbortController. You can use it to cancel functions that return promises rather than promises themselves:
// Take a signal parameter in the function that needs cancellation
async function somethingIWantToCancel({ signal } = {}) {
// either pass it directly to APIs that support it
// (fetch and most Node APIs do)
const response = await fetch('.../', { signal });
// return response.json;
// or if the API does not already support it -
// manually adapt your code to support signals:
const onAbort = (e) => {
// run any code relating to aborting here
};
signal.addEventListener('abort', onAbort, { once: true });
// and be sure to clean it up when the action you are performing
// is finished to avoid a leak
// ... sometime later ...
signal.removeEventListener('abort', onAbort);
}
// Usage
const ac = new AbortController();
setTimeout(() => ac.abort(), 1000); // give it a 1s timeout
try {
await somethingIWantToCancel({ signal: ac.signal });
} catch (e) {
if (e.name === 'AbortError') {
// deal with cancellation in caller, or ignore
} else {
throw e; // don't swallow errors :)
}
}
No. We can't do that yet.
ES6 promises do not support cancellation yet. It's on its way, and its design is something a lot of people worked really hard on. Sound cancellation semantics are hard to get right and this is work in progress. There are interesting debates on the "fetch" repo, on esdiscuss and on several other repos on GH but I'd just be patient if I were you.
But, but, but.. cancellation is really important!
It is, the reality of the matter is cancellation is really an important scenario in client-side programming. The cases you describe like aborting web requests are important and they're everywhere.
So... the language screwed me!
Yeah, sorry about that. Promises had to get in first before further things were specified - so they went in without some useful stuff like .finally and .cancel - it's on its way though, to the spec through the DOM. Cancellation is not an afterthought it's just a time constraint and a more iterative approach to API design.
So what can I do?
You have several alternatives:
Use a third party library like bluebird who can move a lot faster than the spec and thus have cancellation as well as a bunch of other goodies - this is what large companies like WhatsApp do.
Pass a cancellation token.
Using a third party library is pretty obvious. As for a token, you can make your method take a function in and then call it, as such:
function getWithCancel(url, token) { // the token is for cancellation
var xhr = new XMLHttpRequest;
xhr.open("GET", url);
return new Promise(function(resolve, reject) {
xhr.onload = function() { resolve(xhr.responseText); });
token.cancel = function() { // SPECIFY CANCELLATION
xhr.abort(); // abort request
reject(new Error("Cancelled")); // reject the promise
};
xhr.onerror = reject;
});
};
Which would let you do:
var token = {};
var promise = getWithCancel("/someUrl", token);
// later we want to abort the promise:
token.cancel();
Your actual use case - last
This isn't too hard with the token approach:
function last(fn) {
var lastToken = { cancel: function(){} }; // start with no op
return function() {
lastToken.cancel();
var args = Array.prototype.slice.call(arguments);
args.push(lastToken);
return fn.apply(this, args);
};
}
Which would let you do:
var synced = last(getWithCancel);
synced("/url1?q=a"); // this will get canceled
synced("/url1?q=ab"); // this will get canceled too
synced("/url1?q=abc"); // this will get canceled too
synced("/url1?q=abcd").then(function() {
// only this will run
});
And no, libraries like Bacon and Rx don't "shine" here because they're observable libraries, they just have the same advantage user level promise libraries have by not being spec bound. I guess we'll wait to have and see in ES2016 when observables go native. They are nifty for typeahead though.
With AbortController
It is possible to use abort controller to reject promise or resolve on your demand:
let controller = new AbortController();
let task = new Promise((resolve, reject) => {
// some logic ...
const abortListener = ({target}) => {
controller.signal.removeEventListener('abort', abortListener);
reject(target.reason);
}
controller.signal.addEventListener('abort', abortListener);
});
controller.abort('cancelled reason'); // task is now in rejected state
Also it's better to remove event listener on abort to prevent memory leaks
And you can later check if error was thrown by abort by checking the controller.signal.aborted boolean property like:
const res = task.catch((err) => (
controller.signal.aborted
? { value: err }
: { value: 'fallback' }
));
If you would check if task is aborted and just return, then the Promise will be in pending status forever. But in that case you also will not get .catch fired with any error if that's your intension:
controller.abort();
new Promise((resolve, reject) => {
if(controller.signal.aborted) return;
}
Same works for cancelling fetch:
let controller = new AbortController();
fetch(url, {
signal: controller.signal
});
or just pass controller:
let controller = new AbortController();
fetch(url, controller);
And call abort method to cancel one, or infinite number of fetches where you passed this controller
controller.abort();
Standard proposals for cancellable promises have failed.
A promise is not a control surface for the async action fulfilling it; confuses owner with consumer. Instead, create asynchronous functions that can be cancelled through some passed-in token.
Another promise makes a fine token, making cancel easy to implement with Promise.race:
Example: Use Promise.race to cancel the effect of a previous chain:
let cancel = () => {};
input.oninput = function(ev) {
let term = ev.target.value;
console.log(`searching for "${term}"`);
cancel();
let p = new Promise(resolve => cancel = resolve);
Promise.race([p, getSearchResults(term)]).then(results => {
if (results) {
console.log(`results for "${term}"`,results);
}
});
}
function getSearchResults(term) {
return new Promise(resolve => {
let timeout = 100 + Math.floor(Math.random() * 1900);
setTimeout(() => resolve([term.toLowerCase(), term.toUpperCase()]), timeout);
});
}
Search: <input id="input">
Here we're "cancelling" previous searches by injecting an undefined result and testing for it, but we could easily imagine rejecting with "CancelledError" instead.
Of course this doesn't actually cancel the network search, but that's a limitation of fetch. If fetch were to take a cancel promise as argument, then it could cancel the network activity.
I've proposed this "Cancel promise pattern" on es-discuss, exactly to suggest that fetch do this.
I have checked out Mozilla JS reference and found this:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/race
Let's check it out:
var p1 = new Promise(function(resolve, reject) {
setTimeout(resolve, 500, "one");
});
var p2 = new Promise(function(resolve, reject) {
setTimeout(resolve, 100, "two");
});
Promise.race([p1, p2]).then(function(value) {
console.log(value); // "two"
// Both resolve, but p2 is faster
});
We have here p1, and p2 put in Promise.race(...) as arguments, this is actually creating new resolve promise, which is what you require.
For Node.js and Electron, I'd highly recommend using Promise Extensions for JavaScript (Prex). Its author Ron Buckton is one of the key TypeScript engineers and also is the guy behind the current TC39's ECMAScript Cancellation proposal. The library is well documented and chances are some of Prex will make to the standard.
On a personal note and coming from C# background, I like very much the fact that Prex is modelled upon the existing Cancellation in Managed Threads framework, i.e. based on the approach taken with CancellationTokenSource/CancellationToken .NET APIs. In my experience, those have been very handy to implement robust cancellation logic in managed apps.
I also verified it to work within a browser by bundling Prex using Browserify.
Here is an example of a delay with cancellation (Gist and RunKit, using Prex for its CancellationToken and Deferred):
// by #noseratio
// https://gist.github.com/noseratio/141a2df292b108ec4c147db4530379d2
// https://runkit.com/noseratio/cancellablepromise
const prex = require('prex');
/**
* A cancellable promise.
* #extends Promise
*/
class CancellablePromise extends Promise {
static get [Symbol.species]() {
// tinyurl.com/promise-constructor
return Promise;
}
constructor(executor, token) {
const withCancellation = async () => {
// create a new linked token source
const linkedSource = new prex.CancellationTokenSource(token? [token]: []);
try {
const linkedToken = linkedSource.token;
const deferred = new prex.Deferred();
linkedToken.register(() => deferred.reject(new prex.CancelError()));
executor({
resolve: value => deferred.resolve(value),
reject: error => deferred.reject(error),
token: linkedToken
});
await deferred.promise;
}
finally {
// this will also free all linkedToken registrations,
// so the executor doesn't have to worry about it
linkedSource.close();
}
};
super((resolve, reject) => withCancellation().then(resolve, reject));
}
}
/**
* A cancellable delay.
* #extends Promise
*/
class Delay extends CancellablePromise {
static get [Symbol.species]() { return Promise; }
constructor(delayMs, token) {
super(r => {
const id = setTimeout(r.resolve, delayMs);
r.token.register(() => clearTimeout(id));
}, token);
}
}
// main
async function main() {
const tokenSource = new prex.CancellationTokenSource();
const token = tokenSource.token;
setTimeout(() => tokenSource.cancel(), 2000); // cancel after 2000ms
let delay = 1000;
console.log(`delaying by ${delay}ms`);
await new Delay(delay, token);
console.log("successfully delayed."); // we should reach here
delay = 2000;
console.log(`delaying by ${delay}ms`);
await new Delay(delay, token);
console.log("successfully delayed."); // we should not reach here
}
main().catch(error => console.error(`Error caught, ${error}`));
Note that cancellation is a race. I.e., a promise may have been resolved successfully, but by the time you observe it (with await or then), the cancellation may have been triggered as well. It's up to you how you handle this race, but it doesn't hurts to call token.throwIfCancellationRequested() an extra time, like I do above.
I faced similar problem recently.
I had a promise based client (not a network one) and i wanted to always give the latest requested data to the user to keep the UI smooth.
After struggling with cancellation idea, Promise.race(...) and Promise.all(..) i just started remembering my last request id and when promise was fulfilled i was only rendering my data when it matched the id of a last request.
Hope it helps someone.
See https://www.npmjs.com/package/promise-abortable
$ npm install promise-abortable
You can make the promise reject before finishing:
// Our function to cancel promises receives a promise and return the same one and a cancel function
const cancellablePromise = (promiseToCancel) => {
let cancel
const promise = new Promise((resolve, reject) => {
cancel = reject
promiseToCancel
.then(resolve)
.catch(reject)
})
return {promise, cancel}
}
// A simple promise to exeute a function with a delay
const waitAndExecute = (time, functionToExecute) => new Promise((resolve, reject) => {
timeInMs = time * 1000
setTimeout(()=>{
console.log(`Waited ${time} secs`)
resolve(functionToExecute())
}, timeInMs)
})
// The promise that we will cancel
const fetchURL = () => fetch('https://pokeapi.co/api/v2/pokemon/ditto/')
// Create a function that resolve in 1 seconds. (We will cancel it in 0.5 secs)
const {promise, cancel} = cancellablePromise(waitAndExecute(1, fetchURL))
promise
.then((res) => {
console.log('then', res) // This will executed in 1 second
})
.catch(() => {
console.log('catch') // We will force the promise reject in 0.5 seconds
})
waitAndExecute(0.5, cancel) // Cancel previous promise in 0.5 seconds, so it will be rejected before finishing. Commenting this line will make the promise resolve
Unfortunately the fetch call has already be done, so you will see the call resolving in the Network tab. Your code will just ignore it.
Using the Promise subclass provided by the external package, this can be done as follows: Live demo
import CPromise from "c-promise2";
function fetchWithTimeout(url, {timeout, ...fetchOptions}= {}) {
return new CPromise((resolve, reject, {signal}) => {
fetch(url, {...fetchOptions, signal}).then(resolve, reject)
}, timeout)
}
const chain= fetchWithTimeout('http://localhost/')
.then(response => response.json())
.then(console.log, console.warn);
//chain.cancel(); call this to abort the promise and releated request
Using AbortController
I've been researching about this for a few days and I still feel that rejecting the promise inside an abort event handler is only part of the approach.
The thing is that as you may know, only rejecting a promise, makes the code awaiting for it to resume execution but if there's any code that runs after the rejection or resolution of the promise, or outside of its execution scope, e.g. Inside of an event listener or an async call, it will keep running, wasting cycles and maybe even memory on something that isn't really needed anymore.
Lacking approach
When executing the snippet below, after 2 seconds, the console will contain the output derived from the execution of the promise rejection, and any output derived from the pending work. The promise will be rejected and the work awaiting for it can continue, but the work will not, which in my opinion is the main point of this exercise.
let abortController = new AbortController();
new Promise( ( resolve, reject ) => {
if ( abortController.signal.aborted ) return;
let abortHandler = () => {
reject( 'Aborted' );
};
abortController.signal.addEventListener( 'abort', abortHandler );
setTimeout( () => {
console.log( 'Work' );
console.log( 'More work' );
resolve( 'Work result' );
abortController.signal.removeEventListener( 'abort', abortHandler );
}, 2000 );
} )
.then( result => console.log( 'then:', result ) )
.catch( reason => console.error( 'catch:', reason ) );
setTimeout( () => abortController.abort(), 1000 );
Which leads me to think that after defining the abort event handler there must be calls to
if ( abortController.signal.aborted ) return;
in sensible points of the code that is performing the work so that the work doesn't get performed and can gracefully stop if necessary (Adding more statements before the return in the if block above).
Proposal
This approach reminds me a little about the cancellable token proposal from a few years back but it will in fact prevent work to be performed in vain. The console output should now only be the abort error and nothing more and even, when the work is in progress, and then cancelled in the middle, it can stop, as said before in a sensible step of the processing, like at the beginning of a loop's body
let abortController = new AbortController();
new Promise( ( resolve, reject ) => {
if ( abortController.signal.aborted ) return;
let abortHandler = () => {
reject( 'Aborted' );
};
abortController.signal.addEventListener( 'abort', abortHandler );
setTimeout( () => {
if ( abortController.signal.aborted ) return;
console.log( 'Work' );
if ( abortController.signal.aborted ) return;
console.log( 'More work' );
resolve( 'Work result' );
abortController.signal.removeEventListener( 'abort', abortHandler );
}, 2000 );
} )
.then( result => console.log( 'then:', result ) )
.catch( reason => console.error( 'catch:', reason ) );
setTimeout( () => abortController.abort(), 1000 );
I found the posted solutions here a little hard to read, so I created a helper function that is in my opinion easier to use.
The helper function gives access to to the information whether the current call is already obsolete or not. With this information the function itself has to take care of things accordingly (usually by simply returning).
// Typescript
export function obsoletableFn<Res, Args extends unknown[]>(
fn: (isObsolete: () => boolean, ...args: Args) => Promise<Res>,
): (...args: Args) => Promise<Res> {
let lastCaller = null;
return (...args: Args) => {
const me = Symbol();
lastCaller = me;
const isObsolete = () => lastCaller !== me;
return fn(isObsolete, ...args);
};
}
// helper function
function obsoletableFn(fn) {
let lastCaller = null;
return (...args) => {
const me = Symbol();
lastCaller = me;
const isObsolete = () => lastCaller !== me;
return fn(isObsolete, ...args);
};
}
const simulateRequest = () => new Promise(resolve => setTimeout(resolve, Math.random() * 2000 + 1000));
// usage
const myFireAndForgetFn = obsoletableFn(async(isObsolete, x) => {
console.log(x, 'starting');
await simulateRequest();
if (isObsolete()) {
console.log(x, 'is obsolete');
// return, as there is already a more recent call running
return;
}
console.log(x, 'is not obsolete');
document.querySelector('div').innerHTML = `Response ${x}`;
});
myFireAndForgetFn('A');
myFireAndForgetFn('B');
<div>Waiting for response...</div>
So I have an async function that I needed to cancel on user input, but it's a long running one that involves mouse control.
I used p-queue and added each line in my function into it and have an observable that I feed the cancellation signal. Anything that the queue starts processing will run no matter what but you should be able to cancel anything after that by clearing the queue. The shorter the task you add to the queue, the sooner you can quit after getting the cancel signal. You can be lazy and throw whole chunks of code into the queue instead of the one liners i have in the example.
p-queue releases Version 6 works with commonjs, 7+ switches to ESM and could break your app. Breaks my electron/typescript/webpack one.
const cancellable_function = async () => {
const queue = new PQueue({concurrency:1});
queue.pause();
queue.addAll([
async () => await move_mouse({...}),
async () => await mouse_click({...}),
])
for await (const item of items) {
queue.addAll([
async () => await do_something({...}),
async () => await do_something_else({...}),
])
}
const {information} = await get_information();
queue.addAll([
async () => await move_mouse({...}),
async () => await mouse_click({...}),
])
cancel_signal$.pipe(take(1)).subscribe(() => {
queue.clear();
});
queue.start();
await queue.onEmpty()
}
Because #jib reject my modify, so I post my answer here. It's just the modfify of #jib's anwser with some comments and using more understandable variable names.
Below I just show examples of two different method: one is resolve() the other is reject()
let cancelCallback = () => {};
input.oninput = function(ev) {
let term = ev.target.value;
console.log(`searching for "${term}"`);
cancelCallback(); //cancel previous promise by calling cancelCallback()
let setCancelCallbackPromise = () => {
return new Promise((resolve, reject) => {
// set cancelCallback when running this promise
cancelCallback = () => {
// pass cancel messages by resolve()
return resolve('Canceled');
};
})
}
Promise.race([setCancelCallbackPromise(), getSearchResults(term)]).then(results => {
// check if the calling of resolve() is from cancelCallback() or getSearchResults()
if (results == 'Canceled') {
console.log("error(by resolve): ", results);
} else {
console.log(`results for "${term}"`, results);
}
});
}
input2.oninput = function(ev) {
let term = ev.target.value;
console.log(`searching for "${term}"`);
cancelCallback(); //cancel previous promise by calling cancelCallback()
let setCancelCallbackPromise = () => {
return new Promise((resolve, reject) => {
// set cancelCallback when running this promise
cancelCallback = () => {
// pass cancel messages by reject()
return reject('Canceled');
};
})
}
Promise.race([setCancelCallbackPromise(), getSearchResults(term)]).then(results => {
// check if the calling of resolve() is from cancelCallback() or getSearchResults()
if (results !== 'Canceled') {
console.log(`results for "${term}"`, results);
}
}).catch(error => {
console.log("error(by reject): ", error);
})
}
function getSearchResults(term) {
return new Promise(resolve => {
let timeout = 100 + Math.floor(Math.random() * 1900);
setTimeout(() => resolve([term.toLowerCase(), term.toUpperCase()]), timeout);
});
}
Search(use resolve): <input id="input">
<br> Search2(use reject and catch error): <input id="input2">

How to make q.all execute in order like async.series [duplicate]

Consider the following code that reads an array of files in a serial/sequential manner. readFiles returns a promise, which is resolved only once all files have been read in sequence.
var readFile = function(file) {
... // Returns a promise.
};
var readFiles = function(files) {
return new Promise((resolve, reject) => {
var readSequential = function(index) {
if (index >= files.length) {
resolve();
} else {
readFile(files[index]).then(function() {
readSequential(index + 1);
}).catch(reject);
}
};
readSequential(0); // Start with the first file!
});
};
The above code works, but I don't like having to do recursion for things to occur sequentially. Is there a simpler way that this code can be re-written so that I don't have to use my weird readSequential function?
Originally I tried to use Promise.all, but that caused all of the readFile calls to happen concurrently, which is not what I want:
var readFiles = function(files) {
return Promise.all(files.map(function(file) {
return readFile(file);
}));
};
Update 2017: I would use an async function if the environment supports it:
async function readFiles(files) {
for(const file of files) {
await readFile(file);
}
};
If you'd like, you can defer reading the files until you need them using an async generator (if your environment supports it):
async function* readFiles(files) {
for(const file of files) {
yield await readFile(file);
}
};
Update: In second thought - I might use a for loop instead:
var readFiles = function(files) {
var p = Promise.resolve(); // Q() in q
files.forEach(file =>
p = p.then(() => readFile(file));
);
return p;
};
Or more compactly, with reduce:
var readFiles = function(files) {
return files.reduce((p, file) => {
return p.then(() => readFile(file));
}, Promise.resolve()); // initial
};
In other promise libraries (like when and Bluebird) you have utility methods for this.
For example, Bluebird would be:
var Promise = require("bluebird");
var fs = Promise.promisifyAll(require("fs"));
var readAll = Promise.resolve(files).map(fs.readFileAsync,{concurrency: 1 });
// if the order matters, you can use Promise.each instead and omit concurrency param
readAll.then(function(allFileContents){
// do stuff to read files.
});
Although there is really no reason not to use async await today.
Here is how I prefer to run tasks in series.
function runSerial() {
var that = this;
// task1 is a function that returns a promise (and immediately starts executing)
// task2 is a function that returns a promise (and immediately starts executing)
return Promise.resolve()
.then(function() {
return that.task1();
})
.then(function() {
return that.task2();
})
.then(function() {
console.log(" ---- done ----");
});
}
What about cases with more tasks? Like, 10?
function runSerial(tasks) {
var result = Promise.resolve();
tasks.forEach(task => {
result = result.then(() => task());
});
return result;
}
This question is old, but we live in a world of ES6 and functional JavaScript, so let's see how we can improve.
Because promises execute immediately, we can't just create an array of promises, they would all fire off in parallel.
Instead, we need to create an array of functions that returns a promise. Each function will then be executed sequentially, which then starts the promise inside.
We can solve this a few ways, but my favorite way is to use reduce.
It gets a little tricky using reduce in combination with promises, so I have broken down the one liner into some smaller digestible bites below.
The essence of this function is to use reduce starting with an initial value of Promise.resolve([]), or a promise containing an empty array.
This promise will then be passed into the reduce method as promise. This is the key to chaining each promise together sequentially. The next promise to execute is func and when the then fires, the results are concatenated and that promise is then returned, executing the reduce cycle with the next promise function.
Once all promises have executed, the returned promise will contain an array of all the results of each promise.
ES6 Example (one liner)
/*
* serial executes Promises sequentially.
* #param {funcs} An array of funcs that return promises.
* #example
* const urls = ['/url1', '/url2', '/url3']
* serial(urls.map(url => () => $.ajax(url)))
* .then(console.log.bind(console))
*/
const serial = funcs =>
funcs.reduce((promise, func) =>
promise.then(result => func().then(Array.prototype.concat.bind(result))), Promise.resolve([]))
ES6 Example (broken down)
// broken down to for easier understanding
const concat = list => Array.prototype.concat.bind(list)
const promiseConcat = f => x => f().then(concat(x))
const promiseReduce = (acc, x) => acc.then(promiseConcat(x))
/*
* serial executes Promises sequentially.
* #param {funcs} An array of funcs that return promises.
* #example
* const urls = ['/url1', '/url2', '/url3']
* serial(urls.map(url => () => $.ajax(url)))
* .then(console.log.bind(console))
*/
const serial = funcs => funcs.reduce(promiseReduce, Promise.resolve([]))
Usage:
// first take your work
const urls = ['/url1', '/url2', '/url3', '/url4']
// next convert each item to a function that returns a promise
const funcs = urls.map(url => () => $.ajax(url))
// execute them serially
serial(funcs)
.then(console.log.bind(console))
To do this simply in ES6:
function(files) {
// Create a new empty promise (don't do that with real people ;)
var sequence = Promise.resolve();
// Loop over each file, and add on a promise to the
// end of the 'sequence' promise.
files.forEach(file => {
// Chain one computation onto the sequence
sequence =
sequence
.then(() => performComputation(file))
.then(result => doSomething(result));
// Resolves for each file, one at a time.
})
// This will resolve after the entire chain is resolved
return sequence;
}
Addition example
const addTwo = async () => 2;
const addThree = async (inValue) => new Promise((resolve) => setTimeout(resolve(inValue + 3), 2000));
const addFour = (inValue) => new Promise((res) => setTimeout(res(inValue + 4), 1000));
const addFive = async (inValue) => inValue + 5;
// Function which handles promises from above
async function sequenceAddition() {
let sum = await [addTwo, addThree, addFour, addFive].reduce(
(promise, currPromise) => promise.then((val) => currPromise(val)),
Promise.resolve()
);
console.log('sum:', sum); // 2 + 3 + 4 + 5 = 14
}
// Run function. See console for result.
sequenceAddition();
General syntax to use reduce()
function sequence(tasks, fn) {
return tasks.reduce((promise, task) => promise.then(() => fn(task)), Promise.resolve());
}
UPDATE
items-promise is a ready to use NPM package doing the same.
I've had to run a lot of sequential tasks and used these answers to forge a function that would take care of handling any sequential task...
function one_by_one(objects_array, iterator, callback) {
var start_promise = objects_array.reduce(function (prom, object) {
return prom.then(function () {
return iterator(object);
});
}, Promise.resolve()); // initial
if(callback){
start_promise.then(callback);
}else{
return start_promise;
}
}
The function takes 2 arguments + 1 optional. First argument is the array on which we will be working. The second argument is the task itself, a function that returns a promise, the next task will be started only when this promise resolves. The third argument is a callback to run when all tasks have been done. If no callback is passed, then the function returns the promise it created so we can handle the end.
Here's an example of usage:
var filenames = ['1.jpg','2.jpg','3.jpg'];
var resize_task = function(filename){
//return promise of async resizing with filename
};
one_by_one(filenames,resize_task );
Hope it saves someone some time...
With Async/Await (if you have the support of ES7)
function downloadFile(fileUrl) { ... } // This function return a Promise
async function main()
{
var filesList = [...];
for (const file of filesList) {
await downloadFile(file);
}
}
(you must use for loop, and not forEach because async/await has problems running in forEach loop)
Without Async/Await (using Promise)
function downloadFile(fileUrl) { ... } // This function return a Promise
function downloadRecursion(filesList, index)
{
index = index || 0;
if (index < filesList.length)
{
downloadFile(filesList[index]).then(function()
{
index++;
downloadRecursion(filesList, index); // self invocation - recursion!
});
}
else
{
return Promise.resolve();
}
}
function main()
{
var filesList = [...];
downloadRecursion(filesList);
}
My preferred solution:
function processArray(arr, fn) {
return arr.reduce(
(p, v) => p.then((a) => fn(v).then(r => a.concat([r]))),
Promise.resolve([])
);
}
It's not fundamentally different from others published here but:
Applies the function to items in series
Resolves to an array of results
Doesn't require async/await (support is still quite limited, circa 2017)
Uses arrow functions; nice and concise
Example usage:
const numbers = [0, 4, 20, 100];
const multiplyBy3 = (x) => new Promise(res => res(x * 3));
// Prints [ 0, 12, 60, 300 ]
processArray(numbers, multiplyBy3).then(console.log);
Tested on reasonable current Chrome (v59) and NodeJS (v8.1.2).
First, you need to understand that a promise is executed at the time of creation.
So for example if you have a code:
["a","b","c"].map(x => returnsPromise(x))
You need to change it to:
["a","b","c"].map(x => () => returnsPromise(x))
Then we need to sequentially chain promises:
["a", "b", "c"].map(x => () => returnsPromise(x))
.reduce(
(before, after) => before.then(_ => after()),
Promise.resolve()
)
executing after(), will make sure that promise is created (and executed) only when its time comes.
Nicest solution that I was able to figure out was with bluebird promises. You can just do Promise.resolve(files).each(fs.readFileAsync); which guarantees that promises are resolved sequentially in order.
With async/await of ES2016 (and maybe some features of ES2018), this can be reduced to this form:
function readFile(file) {
... // Returns a promise.
}
async function readFiles(files) {
for (file in files) {
await readFile(file)
}
}
I haven't seen another answer express that simplicity. The OP said parallel execution of readFile was not desired. However, with IO like this it really makes sense to not be blocking on a single file read, while keeping the loop execution synchronous (you don't want to do the next step until all files have been read). Since I just learned about this and am a bit excited about it, I'll share that approach of parallel asynchronous execution of readFile with overall synchronous execution of readFiles.
async function readFiles(files) {
await Promise.all(files.map(readFile))
}
Isn't that a thing of beauty?
This is a slight variation of another answer above. Using native Promises:
function inSequence(tasks) {
return tasks.reduce((p, task) => p.then(task), Promise.resolve())
}
Explanation
If you have these tasks [t1, t2, t3], then the above is equivalent to Promise.resolve().then(t1).then(t2).then(t3). It's the behavior of reduce.
How to use
First You need to construct a list of tasks! A task is a function that accepts no argument. If you need to pass arguments to your function, then use bind or other methods to create a task. For example:
var tasks = files.map(file => processFile.bind(null, file))
inSequence(tasks).then(...)
I created this simple method on the Promise object:
Create and add a Promise.sequence method to the Promise object
Promise.sequence = function (chain) {
var results = [];
var entries = chain;
if (entries.entries) entries = entries.entries();
return new Promise(function (yes, no) {
var next = function () {
var entry = entries.next();
if(entry.done) yes(results);
else {
results.push(entry.value[1]().then(next, function() { no(results); } ));
}
};
next();
});
};
Usage:
var todo = [];
todo.push(firstPromise);
if (someCriterium) todo.push(optionalPromise);
todo.push(lastPromise);
// Invoking them
Promise.sequence(todo)
.then(function(results) {}, function(results) {});
The best thing about this extension to the Promise object, is that it is consistent with the style of promises. Promise.all and Promise.sequence is invoked the same way, but have different semantics.
Caution
Sequential running of promises is not usually a very good way to use promises. It's usually better to use Promise.all, and let the browser run the code as fast as possible. However, there are real use cases for it - for example when writing a mobile app using javascript.
My answer based on https://stackoverflow.com/a/31070150/7542429.
Promise.series = function series(arrayOfPromises) {
var results = [];
return arrayOfPromises.reduce(function(seriesPromise, promise) {
return seriesPromise.then(function() {
return promise
.then(function(result) {
results.push(result);
});
});
}, Promise.resolve())
.then(function() {
return results;
});
};
This solution returns the results as an array like Promise.all().
Usage:
Promise.series([array of promises])
.then(function(results) {
// do stuff with results here
});
Use Array.prototype.reduce, and remember to wrap your promises in a function otherwise they will already be running!
// array of Promise providers
const providers = [
function(){
return Promise.resolve(1);
},
function(){
return Promise.resolve(2);
},
function(){
return Promise.resolve(3);
}
]
const inSeries = function(providers){
const seed = Promise.resolve(null);
return providers.reduce(function(a,b){
return a.then(b);
}, seed);
};
nice and easy...
you should be able to re-use the same seed for performance, etc.
It's important to guard against empty arrays or arrays with only 1 element when using reduce, so this technique is your best bet:
const providers = [
function(v){
return Promise.resolve(v+1);
},
function(v){
return Promise.resolve(v+2);
},
function(v){
return Promise.resolve(v+3);
}
]
const inSeries = function(providers, initialVal){
if(providers.length < 1){
return Promise.resolve(null)
}
return providers.reduce((a,b) => a.then(b), providers.shift()(initialVal));
};
and then call it like:
inSeries(providers, 1).then(v => {
console.log(v); // 7
});
Using modern ES:
const series = async (tasks) => {
const results = [];
for (const task of tasks) {
const result = await task;
results.push(result);
}
return results;
};
//...
const readFiles = await series(files.map(readFile));
Most of the answers dont include the results of ALL promises individually, so in case someone is looking for this particular behaviour, this is a possible solution using recursion.
It follows the style of Promise.all:
Returns the array of results in the .then() callback.
If some promise fails, its returned immediately in the .catch() callback.
const promiseEach = (arrayOfTasks) => {
let results = []
return new Promise((resolve, reject) => {
const resolveNext = (arrayOfTasks) => {
// If all tasks are already resolved, return the final array of results
if (arrayOfTasks.length === 0) return resolve(results)
// Extract first promise and solve it
const first = arrayOfTasks.shift()
first().then((res) => {
results.push(res)
resolveNext(arrayOfTasks)
}).catch((err) => {
reject(err)
})
}
resolveNext(arrayOfTasks)
})
}
// Lets try it 😎
const promise = (time, shouldThrowError) => new Promise((resolve, reject) => {
const timeInMs = time * 1000
setTimeout(()=>{
console.log(`Waited ${time} secs`)
if (shouldThrowError) reject(new Error('Promise failed'))
resolve(time)
}, timeInMs)
})
const tasks = [() => promise(1), () => promise(2)]
promiseEach(tasks)
.then((res) => {
console.log(res) // [1, 2]
})
// Oops some promise failed
.catch((error) => {
console.log(error)
})
Note about the tasks array declaration:
In this case is not possible to use the following notation like Promise.all would use:
const tasks = [promise(1), promise(2)]
And we have to use:
const tasks = [() => promise(1), () => promise(2)]
The reason is that JavaScript starts executing the promise immediatelly after its declared. If we use methods like Promise.all, it just checks that the state of all of them is fulfilled or rejected, but doesnt start the exection itself. Using () => promise() we stop the execution until its called.
You can use this function that gets promiseFactories List:
function executeSequentially(promiseFactories) {
var result = Promise.resolve();
promiseFactories.forEach(function (promiseFactory) {
result = result.then(promiseFactory);
});
return result;
}
Promise Factory is just simple function that returns a Promise:
function myPromiseFactory() {
return somethingThatCreatesAPromise();
}
It works because a promise factory doesn't create the promise until it's asked to. It works the same way as a then function – in fact, it's the same thing!
You don't want to operate over an array of promises at all. Per the Promise spec, as soon as a promise is created, it begins executing. So what you really want is an array of promise factories...
If you want to learn more on Promises, you should check this link:
https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html
If you want you can use reduce to make a sequential promise, for example:
[2,3,4,5,6,7,8,9].reduce((promises, page) => {
return promises.then((page) => {
console.log(page);
return Promise.resolve(page+1);
});
}, Promise.resolve(1));
it'll always works in sequential.
I really liked #joelnet's answer, but to me, that style of coding is a little bit tough to digest, so I spent a couple of days trying to figure out how I would express the same solution in a more readable manner and this is my take, just with a different syntax and some comments.
// first take your work
const urls = ['/url1', '/url2', '/url3', '/url4']
// next convert each item to a function that returns a promise
const functions = urls.map((url) => {
// For every url we return a new function
return () => {
return new Promise((resolve) => {
// random wait in milliseconds
const randomWait = parseInt((Math.random() * 1000),10)
console.log('waiting to resolve in ms', randomWait)
setTimeout(()=>resolve({randomWait, url}),randomWait)
})
}
})
const promiseReduce = (acc, next) => {
// we wait for the accumulator to resolve it's promise
return acc.then((accResult) => {
// and then we return a new promise that will become
// the new value for the accumulator
return next().then((nextResult) => {
// that eventually will resolve to a new array containing
// the value of the two promises
return accResult.concat(nextResult)
})
})
};
// the accumulator will always be a promise that resolves to an array
const accumulator = Promise.resolve([])
// we call reduce with the reduce function and the accumulator initial value
functions.reduce(promiseReduce, accumulator)
.then((result) => {
// let's display the final value here
console.log('=== The final result ===')
console.log(result)
})
As Bergi noticed, I think the best and clear solution is use BlueBird.each, code below:
const BlueBird = require('bluebird');
BlueBird.each(files, fs.readFileAsync);
I find myself coming back to this question many times and the answers aren't exactly giving me what I need, so putting this here for anyone that needs this too.
The code below does sequential promises execution (one after another), and each round consists of multiple callings:
async function sequence(list, cb) {
const result = [];
await list.reduce(async (promise, item) => promise
.then(() => cb(item))
.then((res) => result.push(res)
), Promise.resolve());
return result;
}
Showcase:
<script src="https://cdnjs.cloudflare.com/ajax/libs/axios/0.15.3/axios.min.js"></script>
<script src="https://unpkg.com/#babel/standalone#7/babel.min.js"></script>
<script type="text/babel">
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function readFile(url, index) {
console.log('Running index: ', index);
// First action
const firstTime = await axios.get(url);
console.log('First API response: ', firstTime.data.activity);
// Second action
await sleep(1000);
// Third action
const secondTime = await axios.get(url);
console.log('Second API response: ', secondTime.data.activity);
// Fourth action
await sleep(1000);
return secondTime.data;
}
async function sequence(urls, fn) {
const result = [];
await urls.reduce(async (promise, url, index) => promise.then(() => fn(url, index)).then((res) => result.push(res)), Promise.resolve());
return result;
}
const urls = [
'https://www.boredapi.com/api/activity',
'https://www.boredapi.com/api/activity',
'https://www.boredapi.com/api/activity',
];
(async function init() {
const result = await sequence(urls, readFile);
console.log('result', result);
})()
</script>
I use the following code to extend the Promise object. It handles rejection of the promises and returns an array of results
Code
/*
Runs tasks in sequence and resolves a promise upon finish
tasks: an array of functions that return a promise upon call.
parameters: an array of arrays corresponding to the parameters to be passed on each function call.
context: Object to use as context to call each function. (The 'this' keyword that may be used inside the function definition)
*/
Promise.sequence = function(tasks, parameters = [], context = null) {
return new Promise((resolve, reject)=>{
var nextTask = tasks.splice(0,1)[0].apply(context, parameters[0]); //Dequeue and call the first task
var output = new Array(tasks.length + 1);
var errorFlag = false;
tasks.forEach((task, index) => {
nextTask = nextTask.then(r => {
output[index] = r;
return task.apply(context, parameters[index+1]);
}, e=>{
output[index] = e;
errorFlag = true;
return task.apply(context, parameters[index+1]);
});
});
// Last task
nextTask.then(r=>{
output[output.length - 1] = r;
if (errorFlag) reject(output); else resolve(output);
})
.catch(e=>{
output[output.length - 1] = e;
reject(output);
});
});
};
Example
function functionThatReturnsAPromise(n) {
return new Promise((resolve, reject)=>{
//Emulating real life delays, like a web request
setTimeout(()=>{
resolve(n);
}, 1000);
});
}
var arrayOfArguments = [['a'],['b'],['c'],['d']];
var arrayOfFunctions = (new Array(4)).fill(functionThatReturnsAPromise);
Promise.sequence(arrayOfFunctions, arrayOfArguments)
.then(console.log)
.catch(console.error);
Your approach is not bad, but it does have two issues: it swallows errors and it employs the Explicit Promise Construction Antipattern.
You can solve both of these issues, and make the code cleaner, while still employing the same general strategy:
var Q = require("q");
var readFile = function(file) {
... // Returns a promise.
};
var readFiles = function(files) {
var readSequential = function(index) {
if (index < files.length) {
return readFile(files[index]).then(function() {
return readSequential(index + 1);
});
}
};
// using Promise.resolve() here in case files.length is 0
return Promise.resolve(readSequential(0)); // Start!
};
This is my sequentially implementation that I use in various projects:
const file = [file1, file2, file3];
const fileContents = sequentially(readFile, files);
// somewhere else in the code:
export const sequentially = async <T, P>(
toPromise: (element: T) => Promise<P>,
elements: T[]
): Promise<P[]> => {
const results: P[] = [];
await elements.reduce(async (sequence, element) => {
await sequence;
results.push(await toPromise(element));
}, Promise.resolve());
return results;
};
Here is my Angular/TypeScript approach, using RxJS:
Given an array of URL strings, convert it into an Observable using the from function.
Use pipe to wrap the Ajax request, immediate response logic, any desired delay, and error handling.
Inside of the pipe, use concatMap to serialize the requests. Otherwise, using Javascript forEach or map would make the requests at the same time.
Use RxJS ajax to make the call, and also to add any desired delay after each call returns.
Working example: https://stackblitz.com/edit/rxjs-bnrkix?file=index.ts
The code looks like this (I left in some extras so you can choose what to keep or discard):
import { ajax } from 'rxjs/ajax';
import { catchError, concatMap, delay, from, of, map, Observable } from 'rxjs';
const urls = [
'https://randomuser.me/api/',
'https://randomuser.me/api/',
'https://randomuser.me/api/',
];
const delayAfterCall = 500;
from(urls)
.pipe(
concatMap((url: string) => {
return ajax.getJSON(url).pipe(
map((response) => {
console.log('Done! Received:', response);
return response;
}),
catchError((error) => {
console.error('Error: ', error);
return of(error);
}),
delay(delayAfterCall)
);
})
)
.subscribe((response) => {
console.log('received email:', response.results[0].email);
});
On the basis of the question's title, "Resolve promises one after another (i.e. in sequence)?", we might understand that the OP is more interested in the sequential handling of promises on settlement than sequential calls per se.
This answer is offered :
to demonstrate that sequential calls are not necessary for sequential handling of responses.
to expose viable alternative patterns to this page's visitors - including the OP if he is still interested over a year later.
despite the OP's assertion that he does not want to make calls concurrently, which may genuinely be the case but equally may be an assumption based on the desire for sequential handling of responses as the title implies.
If concurrent calls are genuinely not wanted then see Benjamin Gruenbaum's answer which covers sequential calls (etc) comprehensively.
If however, you are interested (for improved performance) in patterns which allow concurrent calls followed by sequential handling of responses, then please read on.
It's tempting to think you have to use Promise.all(arr.map(fn)).then(fn) (as I have done many times) or a Promise lib's fancy sugar (notably Bluebird's), however (with credit to this article) an arr.map(fn).reduce(fn) pattern will do the job, with the advantages that it :
works with any promise lib - even pre-compliant versions of jQuery - only .then() is used.
affords the flexibility to skip-over-error or stop-on-error, whichever you want with a one line mod.
Here it is, written for Q.
var readFiles = function(files) {
return files.map(readFile) //Make calls in parallel.
.reduce(function(sequence, filePromise) {
return sequence.then(function() {
return filePromise;
}).then(function(file) {
//Do stuff with file ... in the correct sequence!
}, function(error) {
console.log(error); //optional
return sequence;//skip-over-error. To stop-on-error, `return error` (jQuery), or `throw error` (Promises/A+).
});
}, Q()).then(function() {
// all done.
});
};
Note: only that one fragment, Q(), is specific to Q. For jQuery you need to ensure that readFile() returns a jQuery promise. With A+ libs, foreign promises will be assimilated.
The key here is the reduction's sequence promise, which sequences the handling of the readFile promises but not their creation.
And once you have absorbed that, it's maybe slightly mind-blowing when you realise that the .map() stage isn't actually necessary! The whole job, parallel calls plus serial handling in the correct order, can be achieved with reduce() alone, plus the added advantage of further flexibility to :
convert from parallel async calls to serial async calls by simply moving one line - potentially useful during development.
Here it is, for Q again.
var readFiles = function(files) {
return files.reduce(function(sequence, f) {
var filePromise = readFile(f);//Make calls in parallel. To call sequentially, move this line down one.
return sequence.then(function() {
return filePromise;
}).then(function(file) {
//Do stuff with file ... in the correct sequence!
}, function(error) {
console.log(error); //optional
return sequence;//Skip over any errors. To stop-on-error, `return error` (jQuery), or `throw error` (Promises/A+).
});
}, Q()).then(function() {
// all done.
});
};
That's the basic pattern. If you wanted also to deliver data (eg the files or some transform of them) to the caller, you would need a mild variant.
If someone else needs a guaranteed way of STRICTLY sequential way of resolving Promises when performing CRUD operations you also can use the following code as a basis.
As long as you add 'return' before calling each function, describing a Promise, and use this example as a basis the next .then() function call will CONSISTENTLY start after the completion of the previous one:
getRidOfOlderShoutsPromise = () => {
return readShoutsPromise('BEFORE')
.then(() => {
return deleteOlderShoutsPromise();
})
.then(() => {
return readShoutsPromise('AFTER')
})
.catch(err => console.log(err.message));
}
deleteOlderShoutsPromise = () => {
return new Promise ( (resolve, reject) => {
console.log("in deleteOlderShouts");
let d = new Date();
let TwoMinuteAgo = d - 1000 * 90 ;
All_Shouts.deleteMany({ dateTime: {$lt: TwoMinuteAgo}}, function(err) {
if (err) reject();
console.log("DELETED OLDs at "+d);
resolve();
});
});
}
readShoutsPromise = (tex) => {
return new Promise( (resolve, reject) => {
console.log("in readShoutsPromise -"+tex);
All_Shouts
.find({})
.sort([['dateTime', 'ascending']])
.exec(function (err, data){
if (err) reject();
let d = new Date();
console.log("shouts "+tex+" delete PROMISE = "+data.length +"; date ="+d);
resolve(data);
});
});
}
Array push and pop method can be used for sequence of promises. You can also push new promises when you need additional data. This is the code, I will use in React Infinite loader to load sequence of pages.
var promises = [Promise.resolve()];
function methodThatReturnsAPromise(page) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log(`Resolve-${page}! ${new Date()} `);
resolve();
}, 1000);
});
}
function pushPromise(page) {
promises.push(promises.pop().then(function () {
return methodThatReturnsAPromise(page)
}));
}
pushPromise(1);
pushPromise(2);
pushPromise(3);
(function() {
function sleep(ms) {
return new Promise(function(resolve) {
setTimeout(function() {
return resolve();
}, ms);
});
}
function serial(arr, index, results) {
if (index == arr.length) {
return Promise.resolve(results);
}
return new Promise(function(resolve, reject) {
if (!index) {
index = 0;
results = [];
}
return arr[index]()
.then(function(d) {
return resolve(d);
})
.catch(function(err) {
return reject(err);
});
})
.then(function(result) {
console.log("here");
results.push(result);
return serial(arr, index + 1, results);
})
.catch(function(err) {
throw err;
});
}
const a = [5000, 5000, 5000];
serial(a.map(x => () => sleep(x)));
})();
Here the key is how you call the sleep function. You need to pass an array of functions which itself returns a promise instead of an array of promises.

Categories