Javascript: Run async task in series(or sequence) without libraries - javascript

I want to run some asynchronous task in a loop, but it should execute in sequence order(one after another). It should be vanilla JS, not with any libraries.
var doSome = function(i) {
return Promise.resolve(setTimeout(() => {
console.log('done... ' + i)
}, 1000 * (i%3)));
}
var looper = function() {
var p = Promise.resolve();
[1,2,3].forEach((n) => {
p = p.then(() => doSome(n))
})
return p;
}
looper();
Current output:
calling for ...1
calling for ...2
calling for ...3
Promise {<resolved>: 8260}
done... 3
done... 1
done... 2
Expected output:
calling for ...1
calling for ...2
calling for ...3
Promise {<resolved>: 8260}
done... 1
done... 2
done... 3
Note: Kindly answer, if you tried and it's working as expected

So, from your comment below, I think your own example code isn't quite matching your description. I think what you want for your example is something closer to the below snippet:
var doSome = function(i) {
return new Promise((resolve, reject) => {
setTimeout(() => resolve(`Completing ${i}`), 1000*(i%3))
});
}
var looper = function() {
[1,2,3].forEach((n) => {
doSome(n).then(console.log);
});
}
looper();
Here, the array [1, 2, 3] is iterated over, and an asynchronous process is generated for each one. As each of those async processes complete, we .then on them and console log their resolved result.
So, now the question comes how to best queue the results? Below, I stored them into an array, then leveraged async/await in order to pause execution on the results until they complete in order.
// This is probably what you want
var doSome = function(i) {
return new Promise((resolve, reject) => {
setTimeout(() => resolve(`Completing ${i}`), 1000*(i%3))
});
}
var looper = async function() {
const nums = [1,2,3];
const promises = []
nums.forEach((n) => {
console.log(`Queueing ${n}`);
promises.push(doSome(n));
});
for (let promise of promises) {
const result = await promise;
console.log(result);
}
}
looper();
Now, we could have eliminated a loop and only executed one after the last completed:
// Don't use this-- it is less efficient
var doSome = function(i) {
return new Promise((resolve, reject) => {
setTimeout(() => resolve(`Completing ${i}`), 1000*(i%3))
});
}
var looper = async function() {
const nums = [1,2,3];
const promises = [];
for (let n of nums) {
console.log(`Queueing ${n}`);
const result = await doSome(n);
console.log(result);
};
}
looper();
But, as you can see in the log, this approach won't queue up the next async process until the previous one has completed. This is undesirable and doesn't match your use case. What we get from the two-looped approach preceding this one is that all async processes are immediately executed, but then we order/queue the results so they respect our predefined order, not the order in which they resolve.
UPDATE
Regarding Promise.all, async/await and the intended behavior of the queueing:
So, if you want to avoid using async/await, I think you could write some sort of utility:
var doSome = function(i) {
return new Promise((resolve, reject) => {
setTimeout(() => resolve(`Completing ${i}`), 1000*(i%3))
});
}
function handlePromiseQueue(queue) {
let promise = queue.shift();
promise.then((data) => {
console.log(data)
if (queue.length > 0) {
handlePromiseQueue(queue);
}
})
}
var looper = function() {
const nums = [1,2,3];
const promises = []
nums.forEach((n) => {
console.log(`Queueing ${n}`);
promises.push(doSome(n));
});
handlePromiseQueue(promises);
}
looper();
HOWEVER, let me be clear-- if user Bergi's assertion is correct, and it is not important that each async promise be executed upon as soon as it resolves, only that none of them be acted upon until they all have come back, then this can 100% be simplified with Promise.all:
// This is probably what you want
var doSome = function(i) {
return new Promise((resolve, reject) => {
setTimeout(() => resolve(`Completing ${i}`), 1000*(i%3))
});
}
function handlePromiseQueue(queue) {
let promise = queue.shift();
promise.then((data) => {
console.log(data)
if (queue.length > 0) {
handlePromiseQueue(queue);
}
})
}
var looper = function() {
const nums = [1,2,3];
const promises = []
nums.forEach((n) => {
console.log(`Queueing ${n}`);
promises.push(doSome(n));
});
Promise.all(promises).then(() => handlePromiseQueue(promises));
}
looper();
Finally, as Bergi also pointed out, I am playing fast and loose here by not setting up any catch on these various promises-- I omitted them for brevity in examples, but for your purposes you will want to include proper handling for errors or bad requests.

Related

How to run setTimeout synchronously in a loop [duplicate]

for (let i = 0; i < 10; i++) {
const promise = new Promise((resolve, reject) => {
const timeout = Math.random() * 1000;
setTimeout(() => {
console.log(i);
}, timeout);
});
// TODO: Chain this promise to the previous one (maybe without having it running?)
}
The above will give the following random output:
6
9
4
8
5
1
7
2
3
0
The task is simple: Make sure each promise runs only after the other one (.then()).
For some reason, I couldn't find a way to do it.
I tried generator functions (yield), tried simple functions that return a promise, but at the end of the day it always comes down to the same problem: The loop is synchronous.
With async I'd simply use async.series().
How do you solve it?
As you already hinted in your question, your code creates all promises synchronously. Instead they should only be created at the time the preceding one resolves.
Secondly, each promise that is created with new Promise needs to be resolved with a call to resolve (or reject). This should be done when the timer expires. That will trigger any then callback you would have on that promise. And such a then callback (or await) is a necessity in order to implement the chain.
With those ingredients, there are several ways to perform this asynchronous chaining:
With a for loop that starts with an immediately resolving promise
With Array#reduce that starts with an immediately resolving promise
With a function that passes itself as resolution callback
With ECMAScript2017's async / await syntax
With ECMAScript2020's for await...of syntax
But let me first introduce a very useful, generic function.
Promisfying setTimeout
Using setTimeout is fine, but we actually need a promise that resolves when the timer expires. So let's create such a function: this is called promisifying a function, in this case we will promisify setTimeout. It will improve the readability of the code, and can be used for all of the above options:
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
See a snippet and comments for each of the options below.
1. With for
You can use a for loop, but you must make sure it doesn't create all promises synchronously. Instead you create an initial immediately resolving promise, and then chain new promises as the previous ones resolve:
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
for (let i = 0, p = Promise.resolve(); i < 10; i++) {
p = p.then(() => delay(Math.random() * 1000))
.then(() => console.log(i));
}
So this code creates one long chain of then calls. The variable p only serves to not lose track of that chain, and allow a next iteration of the loop to continue on the same chain. The callbacks will start executing after the synchronous loop has completed.
It is important that the then-callback returns the promise that delay() creates: this will ensure the asynchronous chaining.
2. With reduce
This is just a more functional approach to the previous strategy. You create an array with the same length as the chain you want to execute, and start out with an immediately resolving promise:
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
[...Array(10)].reduce( (p, _, i) =>
p.then(() => delay(Math.random() * 1000))
.then(() => console.log(i))
, Promise.resolve() );
This is probably more useful when you actually have an array with data to be used in the promises.
3. With a function passing itself as resolution-callback
Here we create a function and call it immediately. It creates the first promise synchronously. When it resolves, the function is called again:
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
(function loop(i) {
if (i >= 10) return; // all done
delay(Math.random() * 1000).then(() => {
console.log(i);
loop(i+1);
});
})(0);
This creates a function named loop, and at the very end of the code you can see it gets called immediately with argument 0. This is the counter, and the i argument. The function will create a new promise if that counter is still below 10, otherwise the chaining stops.
When delay() resolves, it will trigger the then callback which will call the function again.
4. With async/await
Modern JS engines support this syntax:
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
(async function loop() {
for (let i = 0; i < 10; i++) {
await delay(Math.random() * 1000);
console.log(i);
}
})();
It may look strange, as it seems like the promises are created synchronously, but in reality the async function returns when it executes the first await. Every time an awaited promise resolves, the function's running context is restored, and proceeds after the await, until it encounters the next one, and so it continues until the loop finishes.
5. With for await...of
With EcmaScript 2020, the for await...of found its way to modern JavaScript engines. Although it does not really reduce code in this case, it allows to isolate the definition of the random interval chain from the actual iteration of it:
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
async function * randomDelays(count, max) {
for (let i = 0; i < count; i++) yield delay(Math.random() * max).then(() => i);
}
(async function loop() {
for await (let i of randomDelays(10, 1000)) console.log(i);
})();
You can use async/await for this. I would explain more, but there's nothing really to it. It's just a regular for loop but I added the await keyword before the construction of your Promise
What I like about this is your Promise can resolve a normal value instead of having a side effect like your code (or other answers here) include. This gives you powers like in The Legend of Zelda: A Link to the Past where you can affect things in both the Light World and the Dark World – ie, you can easily work with data before/after the Promised data is available without having to resort to deeply nested functions, other unwieldy control structures, or stupid IIFEs.
// where DarkWorld is in the scary, unknown future
// where LightWorld is the world we saved from Ganondorf
LightWorld ... await DarkWorld
So here's what that will look like ...
async function someProcedure (n) {
for (let i = 0; i < n; i++) {
const t = Math.random() * 1000
const x = await new Promise(r => setTimeout(r, t, i))
console.log (i, x)
}
return 'done'
}
someProcedure(10)
.then(console.log)
.catch(console.error)
0 0
1 1
2 2
3 3
4 4
5 5
6 6
7 7
8 8
9 9
done
See how we don't have to deal with that bothersome .then call within our procedure? And async keyword will automatically ensure that a Promise is returned, so we can chain a .then call on the returned value. This sets us up for great success: run the sequence of n Promises, then do something important – like display a success/error message.
Based on the excellent answer by trincot, I wrote a reusable function that accepts a handler to run over each item in an array. The function itself returns a promise that allows you to wait until the loop has finished and the handler function that you pass may also return a promise.
loop(items, handler) : Promise
It took me some time to get it right, but I believe the following code will be usable in a lot of promise-looping situations.
Copy-paste ready code:
// SEE https://stackoverflow.com/a/46295049/286685
const loop = (arr, fn, busy, err, i=0) => {
const body = (ok,er) => {
try {const r = fn(arr[i], i, arr); r && r.then ? r.then(ok).catch(er) : ok(r)}
catch(e) {er(e)}
}
const next = (ok,er) => () => loop(arr, fn, ok, er, ++i)
const run = (ok,er) => i < arr.length ? new Promise(body).then(next(ok,er)).catch(er) : ok()
return busy ? run(busy,err) : new Promise(run)
}
Usage
To use it, call it with the array to loop over as the first argument and the handler function as the second. Do not pass parameters for the third, fourth and fifth arguments, they are used internally.
const loop = (arr, fn, busy, err, i=0) => {
const body = (ok,er) => {
try {const r = fn(arr[i], i, arr); r && r.then ? r.then(ok).catch(er) : ok(r)}
catch(e) {er(e)}
}
const next = (ok,er) => () => loop(arr, fn, ok, er, ++i)
const run = (ok,er) => i < arr.length ? new Promise(body).then(next(ok,er)).catch(er) : ok()
return busy ? run(busy,err) : new Promise(run)
}
const items = ['one', 'two', 'three']
loop(items, item => {
console.info(item)
})
.then(() => console.info('Done!'))
Advanced use cases
Let's look at the handler function, nested loops and error handling.
handler(current, index, all)
The handler gets passed 3 arguments. The current item, the index of the current item and the complete array being looped over. If the handler function needs to do async work, it can return a promise and the loop function will wait for the promise to resolve before starting the next iteration. You can nest loop invocations and all works as expected.
const loop = (arr, fn, busy, err, i=0) => {
const body = (ok,er) => {
try {const r = fn(arr[i], i, arr); r && r.then ? r.then(ok).catch(er) : ok(r)}
catch(e) {er(e)}
}
const next = (ok,er) => () => loop(arr, fn, ok, er, ++i)
const run = (ok,er) => i < arr.length ? new Promise(body).then(next(ok,er)).catch(er) : ok()
return busy ? run(busy,err) : new Promise(run)
}
const tests = [
[],
['one', 'two'],
['A', 'B', 'C']
]
loop(tests, (test, idx, all) => new Promise((testNext, testFailed) => {
console.info('Performing test ' + idx)
return loop(test, (testCase) => {
console.info(testCase)
})
.then(testNext)
.catch(testFailed)
}))
.then(() => console.info('All tests done'))
Error handling
Many promise-looping examples I looked at break down when an exception occurs. Getting this function to do the right thing was pretty tricky, but as far as I can tell it is working now. Make sure to add a catch handler to any inner loops and invoke the rejection function when it happens. E.g.:
const loop = (arr, fn, busy, err, i=0) => {
const body = (ok,er) => {
try {const r = fn(arr[i], i, arr); r && r.then ? r.then(ok).catch(er) : ok(r)}
catch(e) {er(e)}
}
const next = (ok,er) => () => loop(arr, fn, ok, er, ++i)
const run = (ok,er) => i < arr.length ? new Promise(body).then(next(ok,er)).catch(er) : ok()
return busy ? run(busy,err) : new Promise(run)
}
const tests = [
[],
['one', 'two'],
['A', 'B', 'C']
]
loop(tests, (test, idx, all) => new Promise((testNext, testFailed) => {
console.info('Performing test ' + idx)
loop(test, (testCase) => {
if (idx == 2) throw new Error()
console.info(testCase)
})
.then(testNext)
.catch(testFailed) // <--- DON'T FORGET!!
}))
.then(() => console.error('Oops, test should have failed'))
.catch(e => console.info('Succesfully caught error: ', e))
.then(() => console.info('All tests done'))
UPDATE: NPM package
Since writing this answer, I turned the above code in an NPM package.
for-async
Install
npm install --save for-async
Import
var forAsync = require('for-async'); // Common JS, or
import forAsync from 'for-async';
Usage (async)
var arr = ['some', 'cool', 'array'];
forAsync(arr, function(item, idx){
return new Promise(function(resolve){
setTimeout(function(){
console.info(item, idx);
// Logs 3 lines: `some 0`, `cool 1`, `array 2`
resolve(); // <-- signals that this iteration is complete
}, 25); // delay 25 ms to make async
})
})
See the package readme for more details.
If you are limited to ES6, the best option is Promise all. Promise.all(array) also returns an array of promises after successfully executing all the promises in array argument.
Suppose, if you want to update many student records in the database, the following code demonstrates the concept of Promise.all in such case-
let promises = students.map((student, index) => {
//where students is a db object
student.rollNo = index + 1;
student.city = 'City Name';
//Update whatever information on student you want
return student.save();
});
Promise.all(promises).then(() => {
//All the save queries will be executed when .then is executed
//You can do further operations here after as all update operations are completed now
});
Map is just an example method for loop. You can also use for or forin or forEach loop. So the concept is pretty simple, start the loop in which you want to do bulk async operations. Push every such async operation statement in an array declared outside the scope of that loop. After the loop completes, execute the Promise all statement with the prepared array of such queries/promises as argument.
The basic concept is that the javascript loop is synchronous whereas database call is async and we use push method in loop that is also sync. So, the problem of asynchronous behavior doesn't occur inside the loop.
here's my 2 cents worth:
resuable function forpromise()
emulates a classic for loop
allows for early exit based on internal logic, returning a value
can collect an array of results passed into resolve/next/collect
defaults to start=0,increment=1
exceptions thrown inside loop are caught and passed to .catch()
function forpromise(lo, hi, st, res, fn) {
if (typeof res === 'function') {
fn = res;
res = undefined;
}
if (typeof hi === 'function') {
fn = hi;
hi = lo;
lo = 0;
st = 1;
}
if (typeof st === 'function') {
fn = st;
st = 1;
}
return new Promise(function(resolve, reject) {
(function loop(i) {
if (i >= hi) return resolve(res);
const promise = new Promise(function(nxt, brk) {
try {
fn(i, nxt, brk);
} catch (ouch) {
return reject(ouch);
}
});
promise.
catch (function(brkres) {
hi = lo - st;
resolve(brkres)
}).then(function(el) {
if (res) res.push(el);
loop(i + st)
});
})(lo);
});
}
//no result returned, just loop from 0 thru 9
forpromise(0, 10, function(i, next) {
console.log("iterating:", i);
next();
}).then(function() {
console.log("test result 1", arguments);
//shortform:no result returned, just loop from 0 thru 4
forpromise(5, function(i, next) {
console.log("counting:", i);
next();
}).then(function() {
console.log("test result 2", arguments);
//collect result array, even numbers only
forpromise(0, 10, 2, [], function(i, collect) {
console.log("adding item:", i);
collect("result-" + i);
}).then(function() {
console.log("test result 3", arguments);
//collect results, even numbers, break loop early with different result
forpromise(0, 10, 2, [], function(i, collect, break_) {
console.log("adding item:", i);
if (i === 8) return break_("ending early");
collect("result-" + i);
}).then(function() {
console.log("test result 4", arguments);
// collect results, but break loop on exception thrown, which we catch
forpromise(0, 10, 2, [], function(i, collect, break_) {
console.log("adding item:", i);
if (i === 4) throw new Error("failure inside loop");
collect("result-" + i);
}).then(function() {
console.log("test result 5", arguments);
}).
catch (function(err) {
console.log("caught in test 5:[Error ", err.message, "]");
});
});
});
});
});
In ES6, you should use 'for await':
(async () => {
for await (const num of asyncIterable) {
console.log(num);
}
// My action here
})();
For more information, see this for await...of.
I see the previous answers and feel confused. And I coded the following by the answers' inspiration. I think its logic is more obvious, I call the function to replace original for loop:
async function pointToCountry(world, data) { // Data is for loop array
if (data.length > 0) { // For condition
const da = data.shift(); // Get current data and modified data one row code
// Some business logic
msg = da.info
pointofView(world, da);
// Await the current task
await new Promise(r => setTimeout(_ => {
r() // Resolve and finish the current task
}, 5000))
// Call itself and enter the next loop
pointToCountry(world, data)
} else { // Business logic after all tasks
pointofView(world, { longitude: 0, latitude: 0 });
world.controls().autoRotate = true;
}
}
// This is my main function - calculate all project by city
const projectCity = async (req, res, next) => {
try {
let record = [];
let cityList = await Cityodel.find({active:true});
for (let j = 0; j < cityList.length; j++) {
let arr = [];
let projectList = await getProduct(cityList[j]._id)
arr.push({
_id:cityList[j]._id,
name:cityList[j].name,
projectList:projectList
})
record.push(arr);
}
return res.status(200).send({
status: CONSTANT.REQUESTED_CODES.SUCCESS,
result: record });
} catch (error) {
return res.status(400).json(UTILS.errorHandler(error));
}
};
async function getProduct(city){
let projectList = await ProjectModel.find({city:city});
return projectList;
}
I've created a snippet in Angular that loops a promise function indefinitely. You can start it, stop it, or restart it.
You basically need to recursively call the same method and await it's current process like so:
async autoloop(): Promise<void> {
if(this.running){
await this.runMe();
await this.autoloop();
}
return Promise.resolve();
}
JavaScript:
import { Component } from '#angular/core';
#Component({
selector: 'my-app',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css'],
})
export class AppComponent {
messages: string[] = [];
counter = 1;
running = false;
constructor() {
this.start();
}
onClick(): void {
this.running = !this.running;
if(this.running){
this.start();
}
else{
this.stop();
}
}
async onRestartClick(): Promise<void>{
await this.stop();
this.messages = [];
this.counter = 1;
this.start();
}
start(): void{
this.running = true;
this.autoloop();
}
async stop(): Promise<void>{
this.running = false;
await this.delay(1000);
}
async autoloop(): Promise<void> {
if(this.running){
await this.runMe();
await this.autoloop();
}
return Promise.resolve();
}
async runMe(): Promise<void> {
await this.delay(1000);
if(this.running){
this.messages.push(`Message ${this.counter++}`);
}
return Promise.resolve();
}
async delay(ms: number) {
await new Promise<void>((resolve) => setTimeout(() => resolve(), ms));
}
}
Html:
<h1>Endless looping a promise every 1 second</h1>
<button (click)="onClick()">Start / stop</button>
<button (click)="onRestartClick()">Restart</button>
<p *ngFor="let message of messages">
{{message}}
</p>

What are the practical differences between an AsyncIterable and an Observable?

I've been hung up about this topic lately. It seems AsyncIterables and Observables both have stream-like qualities, though they are consumed a bit differently.
You could consume an async iterable like this
const myAsyncIterable = async function*() { yield 1; yield 2; yield 3; }
const main = async () => {
for await (const number of myAsyncIterable()) {
console.log(number)
}
}
main()
You can consume an observable like this
const Observable = rxjs
const { map } = rxjs.operators
Observable.of(1, 2, 3).subscribe(x => console.log(x))
<script src="https://unpkg.com/rxjs/bundles/rxjs.umd.min.js"></script>
My overarching question is based off of this RxJS pr
If the observable emits at a pace faster than the loop completes, there will be a memory build up as the buffer gets more full. We could provide other methods that use different strategies (e.g. just the most recent value, etc), but leave this as the default. Note that the loop itself may have several awaits in it, that exacerbate the problem.
It seems to me that async iterators inherently do not have a backpressure problem, so is it right to implement Symbol.asyncIterator (##asyncIterator) on an Observable and default to a backpressure strategy? Is there even a need for Observables in light of AsyncIterables?
Ideally, you could show me practical differences between AsyncIterables and Observables with code examples.
The main difference is which side decides when to iterate.
In the case of Async Iterators the client decides by calling await iterator.next(). The source decides when to resolve the promise, but the client has to ask for the next value first. Thus, the consumer "pulls" the data in from the source.
Observables register a callback function which is called by the observable immediately when a new value comes in. Thus, the source "pushes" to the consumer.
An Observable could easily be used to consume an Async Iterator by using a Subject and mapping it to the next value of the async iterator. You would then call next on the Subject whenever you're ready to consume the next item. Here is a code sample
const pull = new Subject();
const output = pull.pipe(
concatMap(() => from(iter.next())),
map(val => {
if(val.done) pull.complete();
return val.value;
})
);
//wherever you need this
output.pipe(
).subscribe(() => {
//we're ready for the next item
if(!pull.closed) pull.next();
});
This is the current implementation Observable[Symbol.asyncIterator].
Here's a basic example of Symbol.asyncIterator implemented on an array:
const dummyPromise = (val, time) => new Promise(res => setTimeout(res, time * 1000, val));
const items = [1, 2, 3];
items[Symbol.asyncIterator] = async function * () {
yield * await this.map(v => dummyPromise(v, v));
}
!(async () => {
for await (const value of items) {
console.log(value);
}
})();
/*
1 - after 1s
2 - after 2s
3 - after 3s
*/
The way I understand generators(sync generators) is that they are pausable functions, meaning that you can request a value right now and another value 10 seconds later. The async generators follow the same approach, except that the value they produce is asynchronous, which means that you'll have to await for it.
For instance:
const dummyPromise = (val, time) => new Promise(res => setTimeout(res, time * 1000, val));
const items = [1, 2, 3];
items[Symbol.asyncIterator] = async function * () {
yield * await this.map(v => dummyPromise(v, v));
}
const it = items[Symbol.asyncIterator]();
(async () => {
// console.log(await it.next())
await it.next();
setTimeout(async () => {
console.log(await it.next());
}, 2000); // It will take 4s in total
})();
Going back to the Observable's implementation:
async function* coroutine<T>(source: Observable<T>) {
const deferreds: Deferred<IteratorResult<T>>[] = [];
const values: T[] = [];
let hasError = false;
let error: any = null;
let completed = false;
const subs = source.subscribe({
next: value => {
if (deferreds.length > 0) {
deferreds.shift()!.resolve({ value, done: false });
} else {
values.push(value);
}
},
error: err => { /* ... */ },
complete: () => { /* ... */ },
});
try {
while (true) {
if (values.length > 0) {
yield values.shift();
} else if (completed) {
return;
} else if (hasError) {
throw error;
} else {
const d = new Deferred<IteratorResult<T>>();
deferreds.push(d);
const result = await d.promise;
if (result.done) {
return;
} else {
yield result.value;
}
}
}
} catch (err) {
throw err;
} finally {
subs.unsubscribe();
}
}
From my understanding:
values is used to keep track of synchronous values
If you have of(1, 2, 3), the values array will contain [1, 2, 3] before it even reached while(true) { }. And because you're using a for await (const v of ...),
you'd be requesting values as if you were doing it.next(); it.next(); it.next() ....
Put differently, as soon as you can consume one value from your iterator, you're immediately requesting for the next one, until the data producer has nothing to offer.
deferreds is used for asynchronous values
so at your first it.next() , the values array is empty(meaning that the observable did not emit synchronously), so it will fall back to the last else, which simply creates a promise that is added to deferreds, after which that promise is awaited until it either resolves or rejects.
When the observable finally emits, deferreds won't be empty, so the awaited promise will resolve with the newly arrived value.
const src$ = merge(
timer(1000).pipe(mapTo(1)),
timer(2000).pipe(mapTo(2)),
timer(3000).pipe(mapTo(3)),
);
!(async () => {
for await (const value of src$) {
console.log(value);
}
})();
StackBlitz
The observable stuff is mind-bending, and my understanding could be flawed. But an async iterator is just an iterator that returns promises, which can resolve to future events in a "live" stream of events (a hot observable). It could be implemented using a queue as follows.
function* iterateClickEvents(target) {
const queue = []
target.addEventListener('click', e => queue.shift()?.fulfill(e))
while (true)
yield new Promise(fulfill => queue.push({fulfill}))
}
//use it
for await (const e of iterateClickEvents(myButton))
handleEvent(e)
Then you can implement fluent operators like:
class FluentIterable {
constructor(iterable) {
this.iterable = iterable
}
filter(predicate) {
return new FluentIterable(this.$filter(predicate))
}
async* $filter(predicate) {
for await (const value of this.iterable)
if (predicate(value))
yield value
}
async each(fn) {
for await (const value of this.iterable)
fn(value)
}
}
//use it
new FluentIterable(iterateClickEvents(document.body))
.filter(e => e.target == myButton)
.each(handleEvent)
.catch(console.error)
https://codepen.io/ken107/pen/PojZjgB
You could implement a map operator that returns the results of inner iterators. Things get complicated from there.

Using promise in loop results in Promise failure

I'd like to reuse the same code in a loop. This code contains promises. However, when iterating, this code results in an error.
I've tried using for and while loops. There seems to be no issue when I use the for loop for a single iteration.
Here is a minimal version of my code:
var search_url = /* Some initial URL */
var glued = "";
for(var i = 0; i < 2; i++)
{
const prom = request(search_url)
.then(function success(response /* An array from a XMLHTTPRequest*/) {
if (/* Some condition */)
{
search_url = /* Gets next URL */
glued += processQuery(response[0]);
} else {
console.log("Done.")
}
})
.catch(function failure(err) {
console.error(err.message); // TODO: do something w error
})
}
document.getElementById('api-content').textContent = glued;
I expect the results to append to the variable glued but instead, I get an error: failure Promise.catch (async) (anonymous) after the first iteration of the loop.
Answer:
You can use the Symbol.iterator in accordance with for await to perform asynchronous execution of your promises. This can be packaged up into a constructor, in the example case it's called Serial (because we're going through promises one by one, in order)
function Serial(promises = []) {
return {
promises,
resolved: [],
addPromise: function(fn) {
promises.push(fn);
},
resolve: async function(cb = i => i, err = (e) => console.log("trace: Serial.resolve " + e)) {
try {
for await (let p of this[Symbol.iterator]()) {}
return this.resolved.map(cb);
} catch (e) {
err(e);
}
},
[Symbol.iterator]: async function*() {
this.resolved = [];
for (let promise of this.promises) {
let p = await promise().catch(e => console.log("trace: Serial[Symbol.iterator] ::" + e));
this.resolved.push(p);
yield p;
}
}
}
}
What is the above?
It's a constructor called Serial.
It takes as an argument an array of Functions that return Promises.
The functions are stored in Serial.promises
It has an empty array stored in Serial.resolved - this will store the resolved promise requests.
It has two methods:
addPromise: Takes a Function that returns a Promise and adds it to Serial.promises
resolve: Asynchronously calls a custom Symbol.iterator. This iterator goes through every single promise, waits for it to be completed, and adds it to Serial.resolved. Once this is completed, it returns a map function that acts on the populated Serial.resolved array. This allows you to simply call resolve and then provide a callback of what to do with the array of responses. A.e. .resolve()((resolved_requests) => //do something with resolved_requests)
Why does it work?
Although many people don't realize this Symbol.iterator is much more powerful than standard for loops. This is for two big reasons.
The first reason, and the one that is applicable in this situation, is because it allows for asynchronous calls that can affect the state of the applied object.
The second reason is that it can be used to provide two different types of data from the same object. A.e. You may have an array that you would like to read the contents of:
let arr = [1,2,3,4];
You can use a for loop or forEach to get the data:
arr.forEach(v => console.log(v));
// 1, 2, 3, 4
But if you adjust the iterator:
arr[Symbol.iterator] = function* () {
yield* this.map(v => v+1);
};
You get this:
arr.forEach(v => console.log(v));
// 1, 2, 3, 4
for(let v of arr) console.log(v);
// 2, 3, 4, 5
This is useful for many different reasons, including timestamping requests/mapping references, etc. If you'd like to know more please take a look at the ECMAScript Documentation: For in and For Of Statements
Use:
It can be used by calling the constructor with an Array of functions that return Promises. You can also add Function Promises to the Object by using
new Serial([])
.addPromise(() => fetch(url))
It doesn't run the Function Promises until you use the .resolve method.
This means that you can add promises ad hoc if you'd like before you do anything with the asynchronous calls. A.e. These two are the same:
With addPromise:
let promises = new Serial([() => fetch(url), () => fetch(url2), () => fetch(url3)]);
promises.addPromise(() => fetch(url4));
promises.resolve().then((responses) => responses)
Without addPromise:
let promises = new Serial([() => fetch(url), () => fetch(url2), () => fetch(url3), () => fetch(url4)])
.resolve().then((responses) => responses)
Data:
Since I can't really replicate your data calls, I opted for JSONPlaceholder (a fake online rest api) to show the promise requests in action.
The data looks like this:
let searchURLs = ["https://jsonplaceholder.typicode.com/todos/1",
"https://jsonplaceholder.typicode.com/todos/2",
"https://jsonplaceholder.typicode.com/todos/3"]
//since our constructor takes functions that return promises, I map over the URLS:
.map(url => () => fetch(url));
To get the responses we can call the above data using our constructor:
let promises = new Serial(searchURLS)
.resolve()
.then((resolved_array) => console.log(resolved_array));
Our resolved_array gives us an array of XHR Response Objects. You can see that here:
function Serial(promises = []) {
return {
promises,
resolved: [],
addPromise: function(fn) {
promises.push(fn);
},
resolve: async function(cb = i => i, err = (e) => console.log("trace: Serial.resolve " + e)) {
try {
for await (let p of this[Symbol.iterator]()) {}
return this.resolved.map(cb);
} catch (e) {
err(e);
}
},
[Symbol.iterator]: async function*() {
this.resolved = [];
for (let promise of this.promises) {
let p = await promise().catch(e => console.log("trace: Serial[Symbol.iterator] ::" + e));
this.resolved.push(p);
yield p;
}
}
}
}
let searchURLs = ["https://jsonplaceholder.typicode.com/todos/1", "https://jsonplaceholder.typicode.com/todos/2", "https://jsonplaceholder.typicode.com/todos/3"].map(url => () => fetch(url));
let promises = new Serial(searchURLs).resolve().then((resolved_array) => console.log(resolved_array));
Getting Results to Screen:
I opted to use a closure function to simply add text to an output HTMLElement.
This is added like this:
HTML:
<output></output>
JS:
let output = ((selector) => (text) => document.querySelector(selector).textContent += text)("output");
Putting it together:
If we use the output snippet along with our Serial object the final functional code looks like this:
let promises = new Serial(searchURLs).resolve()
.then((resolved) => resolved.map(response =>
response.json()
.then(obj => output(obj.title))));
What's happening above is this:
we input all our functions that return promises. new Serial(searchURLS)
we tell it to resolve all the requests .resolve()
after it resolves all the requests, we tell it to take the requests and map the array .then(resolved => resolved.map
the responses we turn to objects by using .json method. This is necessary for JSON, but may not be necessary for you
after this is done, we use .then(obj => to tell it to do something with each computed response
we output the title to the screen using output(obj.title)
Result:
let output = ((selector) => (text) => document.querySelector(selector).textContent += text)("output");
function Serial(promises = []) {
return {
promises,
resolved: [],
addPromise: function(fn) {
promises.push(fn);
},
resolve: async function(cb = i => i, err = (e) => console.log("trace: Serial.resolve " + e)) {
try {
for await (let p of this[Symbol.iterator]()) {}
return this.resolved.map(cb);
} catch (e) {
err(e);
}
},
[Symbol.iterator]: async function*() {
this.resolved = [];
for (let promise of this.promises) {
let p = await promise().catch(e => console.log("trace: Serial[Symbol.iterator] ::" + e));
this.resolved.push(p);
yield p;
}
}
}
}
let searchURLs = ["https://jsonplaceholder.typicode.com/todos/1", "https://jsonplaceholder.typicode.com/todos/2", "https://jsonplaceholder.typicode.com/todos/3"].map(url => () => fetch(url));
let promises = new Serial(searchURLs).resolve()
.then((resolved) => resolved.map(response =>
response.json()
.then(obj => output(obj.title))));
<output></output>
Why go this route?
It's reusable, functional, and if you import the Serial Constructor you can keep your code slim and comprehensible. If this is a cornerstone of your code, it'll be easy to maintain and use.
Using it with your code:
I will add how to specifically use this with your code to fully answer your question and so that you may understand further.
NOTE glued will be populated with the requested data, but it's unnecessary. I left it in because you may have wanted it stored for a reason outside the scope of your question and I don't want to make assumptions.
//setup urls:
var search_urls = ["https://jsonplaceholder.typicode.com/todos/1", "https://jsonplaceholder.typicode.com/todos/2"];
var request = (url) => () => fetch(url);
let my_requests = new Serial(search_urls.map(request));
//setup glued (you don't really need to, but if for some reason you want the info stored...
var glued = "";
//setup helper function to grab title(this is necessary for my specific data)
var addTitle = (req) => req.json().then(obj => (glued += obj.title, document.getElementById('api-content').textContent = glued));
// put it all together:
my_requests.resolve().then(requests => requests.map(addTitle));
Using it with your code - Working Example:
function Serial(promises = []) {
return {
promises,
resolved: [],
addPromise: function(fn) {
promises.push(fn);
},
resolve: async function(cb = i => i, err = (e) => console.log("trace: Serial.resolve " + e)) {
try {
for await (let p of this[Symbol.iterator]()) {}
return this.resolved.map(cb);
} catch (e) {
err(e);
}
},
[Symbol.iterator]: async function*() {
this.resolved = [];
for (let promise of this.promises) {
let p = await promise().catch(e => console.log("trace: Serial[Symbol.iterator] ::" + e));
this.resolved.push(p);
yield p;
}
}
}
}
//setup urls:
var search_urls = ["https://jsonplaceholder.typicode.com/todos/1", "https://jsonplaceholder.typicode.com/todos/2"];
var request = (url) => () => fetch(url);
let my_requests = new Serial(search_urls.map(request));
//setup glued (you don't really need to, but if for some reason you want the info stored...
var glued = "";
//setup helper function to grab title(this is necessary for my specific data)
var addTitle = (req) => req.json().then(obj => (glued += obj.title, document.getElementById('api-content').textContent = glued));
// put it all together:
my_requests.resolve().then(requests => requests.map(addTitle));
<div id="api-content"></div>
Final Note
It's likely that we will be seeing a prototypal change to the Promise object in the future that allows for easy serialization of Promises. Currently (7/15/19) there is a TC39 Proposal that does add a lot of functionality to the Promise object but it hasn't been fully vetted yet, and as with many ideas trapped within the Proposal stage, it's almost impossible to tell when they will be implemented into Browsers, or even if the idea will stagnate and fall off the radar.
Until then workarounds like this are necessary and useful( the reason why I even went through the motions of constructing this Serializer object was for a transpiler I wrote in Node, but it's been very helpful beyond that! ) but do keep an eye out for any changes because you never know!
Hope this helps! Happy Coding!
Your best bet is probably going to be building up that glued variable with recursion.
Here's an example using recursion with a callback function:
var glued = "";
requestRecursively(/* Some initial URL string */, function() {
document.getElementById('api-content').textContent = glued;
});
function requestRecursively(url, cb) {
request(url).then(function (response) {
if (/* Some condition */) {
glued += processQuery(response[0]);
var next = /* Gets next URL string */;
if (next) {
// There's another URL. Make another request.
requestRecursively(next, cb);
} else {
// We're done. Invoke the callback;
cb();
}
} else {
console.log("Done.");
}
}).catch(function (err) {
console.error(err.message);
});
}

resolving Promises sequentially

EDIT: This is for an Electron project, with a local server spun up on the user's system. So, any concerns about what happens if multiple users attempt simultaneous access can be ignored!
My client code is generating an array of JavaScript objects, which I'm calling packets. There can be potentially infinite packets, although between 1 and 10 is the most common use-case.
I need to make API calls to a backend route, one per packet, passing a packet as an argument to each call.
However, the backend does some heavy computational work on each packet, to the point where attempting to crunch more than 2-3 packets at once crashes the server.
Is it possible to resolve Promises synchronously, such that the second Promise only fires when the first resolves, the third firing after the second, and so on?
It's my understanding that Promise.all() makes all calls simultaneously, which doesn't help me here.
(I also know that this is an anti-pattern, but for my specific set of requirements, I'm not sure what else to do)
I know this is purely abstract, but any thoughts would be appreciated!!!
Get weird with Promises
An async queue, a spin-off one of my previous answers; I've added random completion time to simulate a real environment:
class Queue {
constructor() {
this.queue = [];
}
enqueue(obj) {
return this.queue.push(obj);
}
dequeue() {
return this.queue.shift();
}
hasWork() {
return (this.queue.length > 0);
}
}
class AsyncQueue extends Queue {
constructor(job) {
super();
this.job = job;
}
process(cb) {
return this.job(this.dequeue()).then(data => {
cb(data);
if (this.hasWork())
return this.process(cb);
});
}
}
//MUST RETURN Promise
function work() {
var duration = chooseDelay();
console.log('START JOB, I.E., MAKE REQUEST (will take %s)', duration);
return t_o(duration);
}
function report() {
console.log('JOB DONE');
}
function done() {
console.log('ALL WORK DONE');
}
function t_o(delay) {
return new Promise(function (resolve, reject) {
setTimeout(function () {
resolve();
}, delay);
});
}
function chooseDelay() {
var delays = [200, 1000, 4000, 100, 50, 7000];
return delays[parseInt(Math.random() * 100) % delays.length];
}
var q = new AsyncQueue(work);
for (var packet = 0; packet < 10; ++packet)
q.enqueue(packet);
q.process(report).then(done);
As an alternative to an otherwise good answer, here's a really simple queue that works (work function shamelessly copied and expanded from other answer)
// Here is the queue "creator"
let promiseQueue = fn => {
let q = Promise.resolve();
return (...args) => q = q.then(() => fn(...args));
};
// that's it, that's the whole code for a promise queue
// here we create a queue
var q = promiseQueue(work);
// simple mock asynchronous function
function work({index, data}) {
var duration = parseInt(Math.random() * 100) + 100;
console.log('START JOB %s, I.E., MAKE REQUEST (will take %s) and should result with %s', index, duration, (index +1) * data);
return new Promise(resolve => setTimeout(resolve, duration)) // mock a delay
.then(() => ({index, result:(index + 1) * data})); // and some "processing"
}
// simulating two "chunks" of packets, generated a millisecond apart, but still, the sequence will be maintained
setTimeout(() => {
var packets = Array.from({length:10}, (_, index) => ({index, data:parseInt(Math.random() * 10000)}));
var promises = packets.map(packet => q(packet));
// the results in promise all are all the results of this batch of "packets"
Promise.all(promises).then(results => console.log(results));
}, 100);
setTimeout(() => {
var packets = Array.from({length:10}, (_, index) => ({index: index + 10, data:parseInt(Math.random() * 10000)}));
var promises = packets.map(packet => q(packet));
Promise.all(promises).then(results => console.log(results));
}, 101);
the simple function to execute promises sequentially
const sequentiallyExecuting = (promises) => {
let promise = Promise.resolve();
promises.forEach((task) => {
promise = promise.then((data) => {
return task;
})
});
return promise;
}
// pass array of promises to this function
sequentiallyExecuting(promises).then((data) =>{
console.log("all requests completed sequentially");
})
'use strict';
// job to be done
function job(params) {
return function () {
console.log('job started', params);
return new Promise(function (resolve) {
setTimeout(function () {
console.log('job finished');
resolve();
}, 1000);
})
}
}
// data to be processed sequentially
var params = [
1,
2,
3,
4,
5
];
// reduce data to Promise sequence
params.reduce(function (cum, cur) {
return cum.then(job(cur));
}, Promise.resolve());
With async/await it becomes trivial:
while (promiseArray.length > 0)
await promiseArray.shift();

How to make q.all execute in order like async.series [duplicate]

Consider the following code that reads an array of files in a serial/sequential manner. readFiles returns a promise, which is resolved only once all files have been read in sequence.
var readFile = function(file) {
... // Returns a promise.
};
var readFiles = function(files) {
return new Promise((resolve, reject) => {
var readSequential = function(index) {
if (index >= files.length) {
resolve();
} else {
readFile(files[index]).then(function() {
readSequential(index + 1);
}).catch(reject);
}
};
readSequential(0); // Start with the first file!
});
};
The above code works, but I don't like having to do recursion for things to occur sequentially. Is there a simpler way that this code can be re-written so that I don't have to use my weird readSequential function?
Originally I tried to use Promise.all, but that caused all of the readFile calls to happen concurrently, which is not what I want:
var readFiles = function(files) {
return Promise.all(files.map(function(file) {
return readFile(file);
}));
};
Update 2017: I would use an async function if the environment supports it:
async function readFiles(files) {
for(const file of files) {
await readFile(file);
}
};
If you'd like, you can defer reading the files until you need them using an async generator (if your environment supports it):
async function* readFiles(files) {
for(const file of files) {
yield await readFile(file);
}
};
Update: In second thought - I might use a for loop instead:
var readFiles = function(files) {
var p = Promise.resolve(); // Q() in q
files.forEach(file =>
p = p.then(() => readFile(file));
);
return p;
};
Or more compactly, with reduce:
var readFiles = function(files) {
return files.reduce((p, file) => {
return p.then(() => readFile(file));
}, Promise.resolve()); // initial
};
In other promise libraries (like when and Bluebird) you have utility methods for this.
For example, Bluebird would be:
var Promise = require("bluebird");
var fs = Promise.promisifyAll(require("fs"));
var readAll = Promise.resolve(files).map(fs.readFileAsync,{concurrency: 1 });
// if the order matters, you can use Promise.each instead and omit concurrency param
readAll.then(function(allFileContents){
// do stuff to read files.
});
Although there is really no reason not to use async await today.
Here is how I prefer to run tasks in series.
function runSerial() {
var that = this;
// task1 is a function that returns a promise (and immediately starts executing)
// task2 is a function that returns a promise (and immediately starts executing)
return Promise.resolve()
.then(function() {
return that.task1();
})
.then(function() {
return that.task2();
})
.then(function() {
console.log(" ---- done ----");
});
}
What about cases with more tasks? Like, 10?
function runSerial(tasks) {
var result = Promise.resolve();
tasks.forEach(task => {
result = result.then(() => task());
});
return result;
}
This question is old, but we live in a world of ES6 and functional JavaScript, so let's see how we can improve.
Because promises execute immediately, we can't just create an array of promises, they would all fire off in parallel.
Instead, we need to create an array of functions that returns a promise. Each function will then be executed sequentially, which then starts the promise inside.
We can solve this a few ways, but my favorite way is to use reduce.
It gets a little tricky using reduce in combination with promises, so I have broken down the one liner into some smaller digestible bites below.
The essence of this function is to use reduce starting with an initial value of Promise.resolve([]), or a promise containing an empty array.
This promise will then be passed into the reduce method as promise. This is the key to chaining each promise together sequentially. The next promise to execute is func and when the then fires, the results are concatenated and that promise is then returned, executing the reduce cycle with the next promise function.
Once all promises have executed, the returned promise will contain an array of all the results of each promise.
ES6 Example (one liner)
/*
* serial executes Promises sequentially.
* #param {funcs} An array of funcs that return promises.
* #example
* const urls = ['/url1', '/url2', '/url3']
* serial(urls.map(url => () => $.ajax(url)))
* .then(console.log.bind(console))
*/
const serial = funcs =>
funcs.reduce((promise, func) =>
promise.then(result => func().then(Array.prototype.concat.bind(result))), Promise.resolve([]))
ES6 Example (broken down)
// broken down to for easier understanding
const concat = list => Array.prototype.concat.bind(list)
const promiseConcat = f => x => f().then(concat(x))
const promiseReduce = (acc, x) => acc.then(promiseConcat(x))
/*
* serial executes Promises sequentially.
* #param {funcs} An array of funcs that return promises.
* #example
* const urls = ['/url1', '/url2', '/url3']
* serial(urls.map(url => () => $.ajax(url)))
* .then(console.log.bind(console))
*/
const serial = funcs => funcs.reduce(promiseReduce, Promise.resolve([]))
Usage:
// first take your work
const urls = ['/url1', '/url2', '/url3', '/url4']
// next convert each item to a function that returns a promise
const funcs = urls.map(url => () => $.ajax(url))
// execute them serially
serial(funcs)
.then(console.log.bind(console))
To do this simply in ES6:
function(files) {
// Create a new empty promise (don't do that with real people ;)
var sequence = Promise.resolve();
// Loop over each file, and add on a promise to the
// end of the 'sequence' promise.
files.forEach(file => {
// Chain one computation onto the sequence
sequence =
sequence
.then(() => performComputation(file))
.then(result => doSomething(result));
// Resolves for each file, one at a time.
})
// This will resolve after the entire chain is resolved
return sequence;
}
Addition example
const addTwo = async () => 2;
const addThree = async (inValue) => new Promise((resolve) => setTimeout(resolve(inValue + 3), 2000));
const addFour = (inValue) => new Promise((res) => setTimeout(res(inValue + 4), 1000));
const addFive = async (inValue) => inValue + 5;
// Function which handles promises from above
async function sequenceAddition() {
let sum = await [addTwo, addThree, addFour, addFive].reduce(
(promise, currPromise) => promise.then((val) => currPromise(val)),
Promise.resolve()
);
console.log('sum:', sum); // 2 + 3 + 4 + 5 = 14
}
// Run function. See console for result.
sequenceAddition();
General syntax to use reduce()
function sequence(tasks, fn) {
return tasks.reduce((promise, task) => promise.then(() => fn(task)), Promise.resolve());
}
UPDATE
items-promise is a ready to use NPM package doing the same.
I've had to run a lot of sequential tasks and used these answers to forge a function that would take care of handling any sequential task...
function one_by_one(objects_array, iterator, callback) {
var start_promise = objects_array.reduce(function (prom, object) {
return prom.then(function () {
return iterator(object);
});
}, Promise.resolve()); // initial
if(callback){
start_promise.then(callback);
}else{
return start_promise;
}
}
The function takes 2 arguments + 1 optional. First argument is the array on which we will be working. The second argument is the task itself, a function that returns a promise, the next task will be started only when this promise resolves. The third argument is a callback to run when all tasks have been done. If no callback is passed, then the function returns the promise it created so we can handle the end.
Here's an example of usage:
var filenames = ['1.jpg','2.jpg','3.jpg'];
var resize_task = function(filename){
//return promise of async resizing with filename
};
one_by_one(filenames,resize_task );
Hope it saves someone some time...
With Async/Await (if you have the support of ES7)
function downloadFile(fileUrl) { ... } // This function return a Promise
async function main()
{
var filesList = [...];
for (const file of filesList) {
await downloadFile(file);
}
}
(you must use for loop, and not forEach because async/await has problems running in forEach loop)
Without Async/Await (using Promise)
function downloadFile(fileUrl) { ... } // This function return a Promise
function downloadRecursion(filesList, index)
{
index = index || 0;
if (index < filesList.length)
{
downloadFile(filesList[index]).then(function()
{
index++;
downloadRecursion(filesList, index); // self invocation - recursion!
});
}
else
{
return Promise.resolve();
}
}
function main()
{
var filesList = [...];
downloadRecursion(filesList);
}
My preferred solution:
function processArray(arr, fn) {
return arr.reduce(
(p, v) => p.then((a) => fn(v).then(r => a.concat([r]))),
Promise.resolve([])
);
}
It's not fundamentally different from others published here but:
Applies the function to items in series
Resolves to an array of results
Doesn't require async/await (support is still quite limited, circa 2017)
Uses arrow functions; nice and concise
Example usage:
const numbers = [0, 4, 20, 100];
const multiplyBy3 = (x) => new Promise(res => res(x * 3));
// Prints [ 0, 12, 60, 300 ]
processArray(numbers, multiplyBy3).then(console.log);
Tested on reasonable current Chrome (v59) and NodeJS (v8.1.2).
First, you need to understand that a promise is executed at the time of creation.
So for example if you have a code:
["a","b","c"].map(x => returnsPromise(x))
You need to change it to:
["a","b","c"].map(x => () => returnsPromise(x))
Then we need to sequentially chain promises:
["a", "b", "c"].map(x => () => returnsPromise(x))
.reduce(
(before, after) => before.then(_ => after()),
Promise.resolve()
)
executing after(), will make sure that promise is created (and executed) only when its time comes.
Nicest solution that I was able to figure out was with bluebird promises. You can just do Promise.resolve(files).each(fs.readFileAsync); which guarantees that promises are resolved sequentially in order.
With async/await of ES2016 (and maybe some features of ES2018), this can be reduced to this form:
function readFile(file) {
... // Returns a promise.
}
async function readFiles(files) {
for (file in files) {
await readFile(file)
}
}
I haven't seen another answer express that simplicity. The OP said parallel execution of readFile was not desired. However, with IO like this it really makes sense to not be blocking on a single file read, while keeping the loop execution synchronous (you don't want to do the next step until all files have been read). Since I just learned about this and am a bit excited about it, I'll share that approach of parallel asynchronous execution of readFile with overall synchronous execution of readFiles.
async function readFiles(files) {
await Promise.all(files.map(readFile))
}
Isn't that a thing of beauty?
This is a slight variation of another answer above. Using native Promises:
function inSequence(tasks) {
return tasks.reduce((p, task) => p.then(task), Promise.resolve())
}
Explanation
If you have these tasks [t1, t2, t3], then the above is equivalent to Promise.resolve().then(t1).then(t2).then(t3). It's the behavior of reduce.
How to use
First You need to construct a list of tasks! A task is a function that accepts no argument. If you need to pass arguments to your function, then use bind or other methods to create a task. For example:
var tasks = files.map(file => processFile.bind(null, file))
inSequence(tasks).then(...)
I created this simple method on the Promise object:
Create and add a Promise.sequence method to the Promise object
Promise.sequence = function (chain) {
var results = [];
var entries = chain;
if (entries.entries) entries = entries.entries();
return new Promise(function (yes, no) {
var next = function () {
var entry = entries.next();
if(entry.done) yes(results);
else {
results.push(entry.value[1]().then(next, function() { no(results); } ));
}
};
next();
});
};
Usage:
var todo = [];
todo.push(firstPromise);
if (someCriterium) todo.push(optionalPromise);
todo.push(lastPromise);
// Invoking them
Promise.sequence(todo)
.then(function(results) {}, function(results) {});
The best thing about this extension to the Promise object, is that it is consistent with the style of promises. Promise.all and Promise.sequence is invoked the same way, but have different semantics.
Caution
Sequential running of promises is not usually a very good way to use promises. It's usually better to use Promise.all, and let the browser run the code as fast as possible. However, there are real use cases for it - for example when writing a mobile app using javascript.
My answer based on https://stackoverflow.com/a/31070150/7542429.
Promise.series = function series(arrayOfPromises) {
var results = [];
return arrayOfPromises.reduce(function(seriesPromise, promise) {
return seriesPromise.then(function() {
return promise
.then(function(result) {
results.push(result);
});
});
}, Promise.resolve())
.then(function() {
return results;
});
};
This solution returns the results as an array like Promise.all().
Usage:
Promise.series([array of promises])
.then(function(results) {
// do stuff with results here
});
Use Array.prototype.reduce, and remember to wrap your promises in a function otherwise they will already be running!
// array of Promise providers
const providers = [
function(){
return Promise.resolve(1);
},
function(){
return Promise.resolve(2);
},
function(){
return Promise.resolve(3);
}
]
const inSeries = function(providers){
const seed = Promise.resolve(null);
return providers.reduce(function(a,b){
return a.then(b);
}, seed);
};
nice and easy...
you should be able to re-use the same seed for performance, etc.
It's important to guard against empty arrays or arrays with only 1 element when using reduce, so this technique is your best bet:
const providers = [
function(v){
return Promise.resolve(v+1);
},
function(v){
return Promise.resolve(v+2);
},
function(v){
return Promise.resolve(v+3);
}
]
const inSeries = function(providers, initialVal){
if(providers.length < 1){
return Promise.resolve(null)
}
return providers.reduce((a,b) => a.then(b), providers.shift()(initialVal));
};
and then call it like:
inSeries(providers, 1).then(v => {
console.log(v); // 7
});
Using modern ES:
const series = async (tasks) => {
const results = [];
for (const task of tasks) {
const result = await task;
results.push(result);
}
return results;
};
//...
const readFiles = await series(files.map(readFile));
Most of the answers dont include the results of ALL promises individually, so in case someone is looking for this particular behaviour, this is a possible solution using recursion.
It follows the style of Promise.all:
Returns the array of results in the .then() callback.
If some promise fails, its returned immediately in the .catch() callback.
const promiseEach = (arrayOfTasks) => {
let results = []
return new Promise((resolve, reject) => {
const resolveNext = (arrayOfTasks) => {
// If all tasks are already resolved, return the final array of results
if (arrayOfTasks.length === 0) return resolve(results)
// Extract first promise and solve it
const first = arrayOfTasks.shift()
first().then((res) => {
results.push(res)
resolveNext(arrayOfTasks)
}).catch((err) => {
reject(err)
})
}
resolveNext(arrayOfTasks)
})
}
// Lets try it 😎
const promise = (time, shouldThrowError) => new Promise((resolve, reject) => {
const timeInMs = time * 1000
setTimeout(()=>{
console.log(`Waited ${time} secs`)
if (shouldThrowError) reject(new Error('Promise failed'))
resolve(time)
}, timeInMs)
})
const tasks = [() => promise(1), () => promise(2)]
promiseEach(tasks)
.then((res) => {
console.log(res) // [1, 2]
})
// Oops some promise failed
.catch((error) => {
console.log(error)
})
Note about the tasks array declaration:
In this case is not possible to use the following notation like Promise.all would use:
const tasks = [promise(1), promise(2)]
And we have to use:
const tasks = [() => promise(1), () => promise(2)]
The reason is that JavaScript starts executing the promise immediatelly after its declared. If we use methods like Promise.all, it just checks that the state of all of them is fulfilled or rejected, but doesnt start the exection itself. Using () => promise() we stop the execution until its called.
You can use this function that gets promiseFactories List:
function executeSequentially(promiseFactories) {
var result = Promise.resolve();
promiseFactories.forEach(function (promiseFactory) {
result = result.then(promiseFactory);
});
return result;
}
Promise Factory is just simple function that returns a Promise:
function myPromiseFactory() {
return somethingThatCreatesAPromise();
}
It works because a promise factory doesn't create the promise until it's asked to. It works the same way as a then function – in fact, it's the same thing!
You don't want to operate over an array of promises at all. Per the Promise spec, as soon as a promise is created, it begins executing. So what you really want is an array of promise factories...
If you want to learn more on Promises, you should check this link:
https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html
If you want you can use reduce to make a sequential promise, for example:
[2,3,4,5,6,7,8,9].reduce((promises, page) => {
return promises.then((page) => {
console.log(page);
return Promise.resolve(page+1);
});
}, Promise.resolve(1));
it'll always works in sequential.
I really liked #joelnet's answer, but to me, that style of coding is a little bit tough to digest, so I spent a couple of days trying to figure out how I would express the same solution in a more readable manner and this is my take, just with a different syntax and some comments.
// first take your work
const urls = ['/url1', '/url2', '/url3', '/url4']
// next convert each item to a function that returns a promise
const functions = urls.map((url) => {
// For every url we return a new function
return () => {
return new Promise((resolve) => {
// random wait in milliseconds
const randomWait = parseInt((Math.random() * 1000),10)
console.log('waiting to resolve in ms', randomWait)
setTimeout(()=>resolve({randomWait, url}),randomWait)
})
}
})
const promiseReduce = (acc, next) => {
// we wait for the accumulator to resolve it's promise
return acc.then((accResult) => {
// and then we return a new promise that will become
// the new value for the accumulator
return next().then((nextResult) => {
// that eventually will resolve to a new array containing
// the value of the two promises
return accResult.concat(nextResult)
})
})
};
// the accumulator will always be a promise that resolves to an array
const accumulator = Promise.resolve([])
// we call reduce with the reduce function and the accumulator initial value
functions.reduce(promiseReduce, accumulator)
.then((result) => {
// let's display the final value here
console.log('=== The final result ===')
console.log(result)
})
As Bergi noticed, I think the best and clear solution is use BlueBird.each, code below:
const BlueBird = require('bluebird');
BlueBird.each(files, fs.readFileAsync);
I find myself coming back to this question many times and the answers aren't exactly giving me what I need, so putting this here for anyone that needs this too.
The code below does sequential promises execution (one after another), and each round consists of multiple callings:
async function sequence(list, cb) {
const result = [];
await list.reduce(async (promise, item) => promise
.then(() => cb(item))
.then((res) => result.push(res)
), Promise.resolve());
return result;
}
Showcase:
<script src="https://cdnjs.cloudflare.com/ajax/libs/axios/0.15.3/axios.min.js"></script>
<script src="https://unpkg.com/#babel/standalone#7/babel.min.js"></script>
<script type="text/babel">
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function readFile(url, index) {
console.log('Running index: ', index);
// First action
const firstTime = await axios.get(url);
console.log('First API response: ', firstTime.data.activity);
// Second action
await sleep(1000);
// Third action
const secondTime = await axios.get(url);
console.log('Second API response: ', secondTime.data.activity);
// Fourth action
await sleep(1000);
return secondTime.data;
}
async function sequence(urls, fn) {
const result = [];
await urls.reduce(async (promise, url, index) => promise.then(() => fn(url, index)).then((res) => result.push(res)), Promise.resolve());
return result;
}
const urls = [
'https://www.boredapi.com/api/activity',
'https://www.boredapi.com/api/activity',
'https://www.boredapi.com/api/activity',
];
(async function init() {
const result = await sequence(urls, readFile);
console.log('result', result);
})()
</script>
I use the following code to extend the Promise object. It handles rejection of the promises and returns an array of results
Code
/*
Runs tasks in sequence and resolves a promise upon finish
tasks: an array of functions that return a promise upon call.
parameters: an array of arrays corresponding to the parameters to be passed on each function call.
context: Object to use as context to call each function. (The 'this' keyword that may be used inside the function definition)
*/
Promise.sequence = function(tasks, parameters = [], context = null) {
return new Promise((resolve, reject)=>{
var nextTask = tasks.splice(0,1)[0].apply(context, parameters[0]); //Dequeue and call the first task
var output = new Array(tasks.length + 1);
var errorFlag = false;
tasks.forEach((task, index) => {
nextTask = nextTask.then(r => {
output[index] = r;
return task.apply(context, parameters[index+1]);
}, e=>{
output[index] = e;
errorFlag = true;
return task.apply(context, parameters[index+1]);
});
});
// Last task
nextTask.then(r=>{
output[output.length - 1] = r;
if (errorFlag) reject(output); else resolve(output);
})
.catch(e=>{
output[output.length - 1] = e;
reject(output);
});
});
};
Example
function functionThatReturnsAPromise(n) {
return new Promise((resolve, reject)=>{
//Emulating real life delays, like a web request
setTimeout(()=>{
resolve(n);
}, 1000);
});
}
var arrayOfArguments = [['a'],['b'],['c'],['d']];
var arrayOfFunctions = (new Array(4)).fill(functionThatReturnsAPromise);
Promise.sequence(arrayOfFunctions, arrayOfArguments)
.then(console.log)
.catch(console.error);
Your approach is not bad, but it does have two issues: it swallows errors and it employs the Explicit Promise Construction Antipattern.
You can solve both of these issues, and make the code cleaner, while still employing the same general strategy:
var Q = require("q");
var readFile = function(file) {
... // Returns a promise.
};
var readFiles = function(files) {
var readSequential = function(index) {
if (index < files.length) {
return readFile(files[index]).then(function() {
return readSequential(index + 1);
});
}
};
// using Promise.resolve() here in case files.length is 0
return Promise.resolve(readSequential(0)); // Start!
};
This is my sequentially implementation that I use in various projects:
const file = [file1, file2, file3];
const fileContents = sequentially(readFile, files);
// somewhere else in the code:
export const sequentially = async <T, P>(
toPromise: (element: T) => Promise<P>,
elements: T[]
): Promise<P[]> => {
const results: P[] = [];
await elements.reduce(async (sequence, element) => {
await sequence;
results.push(await toPromise(element));
}, Promise.resolve());
return results;
};
Here is my Angular/TypeScript approach, using RxJS:
Given an array of URL strings, convert it into an Observable using the from function.
Use pipe to wrap the Ajax request, immediate response logic, any desired delay, and error handling.
Inside of the pipe, use concatMap to serialize the requests. Otherwise, using Javascript forEach or map would make the requests at the same time.
Use RxJS ajax to make the call, and also to add any desired delay after each call returns.
Working example: https://stackblitz.com/edit/rxjs-bnrkix?file=index.ts
The code looks like this (I left in some extras so you can choose what to keep or discard):
import { ajax } from 'rxjs/ajax';
import { catchError, concatMap, delay, from, of, map, Observable } from 'rxjs';
const urls = [
'https://randomuser.me/api/',
'https://randomuser.me/api/',
'https://randomuser.me/api/',
];
const delayAfterCall = 500;
from(urls)
.pipe(
concatMap((url: string) => {
return ajax.getJSON(url).pipe(
map((response) => {
console.log('Done! Received:', response);
return response;
}),
catchError((error) => {
console.error('Error: ', error);
return of(error);
}),
delay(delayAfterCall)
);
})
)
.subscribe((response) => {
console.log('received email:', response.results[0].email);
});
On the basis of the question's title, "Resolve promises one after another (i.e. in sequence)?", we might understand that the OP is more interested in the sequential handling of promises on settlement than sequential calls per se.
This answer is offered :
to demonstrate that sequential calls are not necessary for sequential handling of responses.
to expose viable alternative patterns to this page's visitors - including the OP if he is still interested over a year later.
despite the OP's assertion that he does not want to make calls concurrently, which may genuinely be the case but equally may be an assumption based on the desire for sequential handling of responses as the title implies.
If concurrent calls are genuinely not wanted then see Benjamin Gruenbaum's answer which covers sequential calls (etc) comprehensively.
If however, you are interested (for improved performance) in patterns which allow concurrent calls followed by sequential handling of responses, then please read on.
It's tempting to think you have to use Promise.all(arr.map(fn)).then(fn) (as I have done many times) or a Promise lib's fancy sugar (notably Bluebird's), however (with credit to this article) an arr.map(fn).reduce(fn) pattern will do the job, with the advantages that it :
works with any promise lib - even pre-compliant versions of jQuery - only .then() is used.
affords the flexibility to skip-over-error or stop-on-error, whichever you want with a one line mod.
Here it is, written for Q.
var readFiles = function(files) {
return files.map(readFile) //Make calls in parallel.
.reduce(function(sequence, filePromise) {
return sequence.then(function() {
return filePromise;
}).then(function(file) {
//Do stuff with file ... in the correct sequence!
}, function(error) {
console.log(error); //optional
return sequence;//skip-over-error. To stop-on-error, `return error` (jQuery), or `throw error` (Promises/A+).
});
}, Q()).then(function() {
// all done.
});
};
Note: only that one fragment, Q(), is specific to Q. For jQuery you need to ensure that readFile() returns a jQuery promise. With A+ libs, foreign promises will be assimilated.
The key here is the reduction's sequence promise, which sequences the handling of the readFile promises but not their creation.
And once you have absorbed that, it's maybe slightly mind-blowing when you realise that the .map() stage isn't actually necessary! The whole job, parallel calls plus serial handling in the correct order, can be achieved with reduce() alone, plus the added advantage of further flexibility to :
convert from parallel async calls to serial async calls by simply moving one line - potentially useful during development.
Here it is, for Q again.
var readFiles = function(files) {
return files.reduce(function(sequence, f) {
var filePromise = readFile(f);//Make calls in parallel. To call sequentially, move this line down one.
return sequence.then(function() {
return filePromise;
}).then(function(file) {
//Do stuff with file ... in the correct sequence!
}, function(error) {
console.log(error); //optional
return sequence;//Skip over any errors. To stop-on-error, `return error` (jQuery), or `throw error` (Promises/A+).
});
}, Q()).then(function() {
// all done.
});
};
That's the basic pattern. If you wanted also to deliver data (eg the files or some transform of them) to the caller, you would need a mild variant.
If someone else needs a guaranteed way of STRICTLY sequential way of resolving Promises when performing CRUD operations you also can use the following code as a basis.
As long as you add 'return' before calling each function, describing a Promise, and use this example as a basis the next .then() function call will CONSISTENTLY start after the completion of the previous one:
getRidOfOlderShoutsPromise = () => {
return readShoutsPromise('BEFORE')
.then(() => {
return deleteOlderShoutsPromise();
})
.then(() => {
return readShoutsPromise('AFTER')
})
.catch(err => console.log(err.message));
}
deleteOlderShoutsPromise = () => {
return new Promise ( (resolve, reject) => {
console.log("in deleteOlderShouts");
let d = new Date();
let TwoMinuteAgo = d - 1000 * 90 ;
All_Shouts.deleteMany({ dateTime: {$lt: TwoMinuteAgo}}, function(err) {
if (err) reject();
console.log("DELETED OLDs at "+d);
resolve();
});
});
}
readShoutsPromise = (tex) => {
return new Promise( (resolve, reject) => {
console.log("in readShoutsPromise -"+tex);
All_Shouts
.find({})
.sort([['dateTime', 'ascending']])
.exec(function (err, data){
if (err) reject();
let d = new Date();
console.log("shouts "+tex+" delete PROMISE = "+data.length +"; date ="+d);
resolve(data);
});
});
}
Array push and pop method can be used for sequence of promises. You can also push new promises when you need additional data. This is the code, I will use in React Infinite loader to load sequence of pages.
var promises = [Promise.resolve()];
function methodThatReturnsAPromise(page) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log(`Resolve-${page}! ${new Date()} `);
resolve();
}, 1000);
});
}
function pushPromise(page) {
promises.push(promises.pop().then(function () {
return methodThatReturnsAPromise(page)
}));
}
pushPromise(1);
pushPromise(2);
pushPromise(3);
(function() {
function sleep(ms) {
return new Promise(function(resolve) {
setTimeout(function() {
return resolve();
}, ms);
});
}
function serial(arr, index, results) {
if (index == arr.length) {
return Promise.resolve(results);
}
return new Promise(function(resolve, reject) {
if (!index) {
index = 0;
results = [];
}
return arr[index]()
.then(function(d) {
return resolve(d);
})
.catch(function(err) {
return reject(err);
});
})
.then(function(result) {
console.log("here");
results.push(result);
return serial(arr, index + 1, results);
})
.catch(function(err) {
throw err;
});
}
const a = [5000, 5000, 5000];
serial(a.map(x => () => sleep(x)));
})();
Here the key is how you call the sleep function. You need to pass an array of functions which itself returns a promise instead of an array of promises.

Categories