Await in catch block fails inside forEach - javascript

I have an array-like structure that exposes async methods. The async method calls contain try-catch blocks which in turn expose more async methods in the case of caught errors. I'd like to understand why forEach doesn't play nicely with async/await.
let items = ['foo', 'bar', 'baz'];
// Desirable behavior
processForLoop(items);
/* Processing foo
* Resolved foo after 3 seconds.
* Processing bar
* Resolved bar after 3 seconds.
* Processing baz
* Resolved baz after 3 seconds.
*/
// Undesirable behavior
processForEach(items);
/* Processing foo
* Processing bar
* Processing baz
* Resolved foo after 3 seconds.
* Resolved bar after 3 seconds.
* Resolved baz after 3 seconds.
*/
async function processForLoop(items) {
for(let i = 0; i < items.length; i++) {
await tryToProcess(items[i]);
}
}
async function processForEach(items) {
items.forEach(await tryToProcess);
}
async function tryToProcess(item) {
try {
await process(item);
} catch(error) {
await resolveAfter3Seconds(item);
}
}
// Asynchronous method
// Automatic failure for the sake of argument
function process(item) {
console.log(`Processing ${item}`);
return new Promise((resolve, reject) =>
setTimeout(() => reject(Error('process error message')), 1)
);
}
// Asynchrounous method
function resolveAfter3Seconds(x) {
return new Promise(resolve => setTimeout(() => {
console.log(`Resolved ${x} after 3 seconds.`);
resolve(x);
}, 3000));
}

I'd like to understand why forEach doesn't play nicely with async/await.
It's easier when we consider that async is just syntactic sugar for a function returning a promise.
items.forEach(f) expects a function f as argument, which it executes on each item one at at time before it returns. It ignores the return value of f.
items.forEach(await tryToProcess) is nonsense equivalent to Promise.resolve(tryToProcess).then(ttp => items.forEach(ttp))
and functionally no different from items.forEach(tryToProcess).
Now tryToProcess returns a promise, but forEach ignores the return value, as we've mentioned, so it ignores that promise. This is bad news, and can lead to unhandled rejection errors, since all promise chains should be returned or terminated with catch to handle errors correctly.
This mistake is equivalent to forgetting await. Unfortunately, there's no array.forEachAwait().
items.map(f) is a little better, since it creates an array out of the return values from f, which in the case of tryToProcess would give us an array of promises. E.g. we could do this:
await Promise.all(items.map(tryToProcess));
...but all tryToProcess calls on each item would execute in parallel with each other.
Importantly, map runs them in parallel. Promise.all is just a means to wait for their completion.
As a rule...
I always use for of instead of forEach in async functions:
for (const item of items) {
await tryToProcess(item);
}
...even when there's no await in the loop, just in case I add one later, to avoid this foot-gun.

There is no way to use forEach with await like that - forEach cannot run asynchronous iterations in serial, only in parallel (and even then, map with Promise.all would be better). Instead, if you want to use array methods, use reduce and await the resolution of the previous iteration's Promise:
let items = ['foo', 'bar', 'baz'];
processForEach(items);
async function processForLoop(items) {
for (let i = 0; i < items.length; i++) {
await tryToProcess(items[i]);
}
}
async function processForEach(items) {
await items.reduce(async(lastPromise, item) => {
await lastPromise;
await tryToProcess(item);
}, Promise.resolve());
}
async function tryToProcess(item) {
try {
await process(item);
} catch (error) {
await resolveAfter3Seconds(item);
}
}
// Asynchronous method
// Automatic failure for the sake of argument
function process(item) {
console.log(`Processing ${item}`);
return new Promise((resolve, reject) =>
setTimeout(() => reject(Error('process error message')), 1)
);
}
// Asynchrounous method
function resolveAfter3Seconds(x) {
return new Promise(resolve => setTimeout(() => {
console.log(`Resolved ${x} after 3 seconds.`);
resolve(x);
}, 3000));
}
Also note that if the only await in a function is just before the function returns, you may as well just return the Promise itself, rather than have the function be async.

Related

Javascript async await - why is my await not stopping execution of following code until resolved? [duplicate]

There is quite some topics posted about how async/await behaves in javascript map function, but still, detail explanation in bellow two examples would be nice:
const resultsPromises = myArray.map(async number => {
return await getResult(number);
});
const resultsPromises = myArray.map(number => {
return getResult(number);
});
edited: this if of course a fictional case, so just opened for debate, why,how and when should map function wait for await keyword. solutions how to modify this example, calling Promise.all() is kind of not the aim of this question.
getResult is an async function
The other answers have pretty well covered the details of how your examples behave, but I wanted to try to state it more succinctly.
const resultsPromises = myArray.map(async number => {
return await getResult(number);
});
const resultsPromises = myArray.map(number => {
return getResult(number);
});
Array.prototype.map synchronously loops through an array and transforms each element to the return value of its callback.
Both examples return a Promise.
async functions always return a Promise.
getResult returns a Promise.
Therefore, if there are no errors you can think of them both in pseudocode as:
const resultsPromises = myArray.map(/* map each element to a Promise */);
As zero298 stated and alnitak demonstrated, this very quickly (synchronously) starts off each promise in order; however, since they're run in parallel each promise will resolve/reject as they see fit and will likely not settle (fulfill or reject) in order.
Either run the promises in parallel and collect the results with Promise.all or run them sequentially using a for * loop or Array.prototype.reduce.
Alternatively, you could use a third-party module for chainable asynchronous JavaScript methods I maintain to clean things up and--perhaps--make the code match your intuition of how an async map operation might work:
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
const getResult = async n => {
await delay(Math.random() * 1000);
console.log(n);
return n;
};
(async () => {
console.log('parallel:');
await AsyncAF([1, 2, 3]).map(getResult).then(console.log);
console.log('sequential:');
await AsyncAF([1, 2, 3]).series.map(getResult).then(console.log)
})();
<script src="https://unpkg.com/async-af#7.0.12/index.js"></script>
async/await is usefull when you want to flatten your code by removing the .then() callbacks or if you want to implicitly return a Promise:
const delay = n => new Promise(res => setTimeout(res, n));
async function test1() {
await delay(200);
// do something usefull here
console.log('hello 1');
}
async function test2() {
return 'hello 2'; // this returned value will be wrapped in a Promise
}
test1();
test2().then(console.log);
However, in your case, you are not using await to replace a .then(), nor are you using it to return an implicit Promise since your function already returns a Promise. So they are not necessary.
Parallel execution of all the Promises
If you want to run all Promises in parallel, I would suggest to simply return the result of getResult with map() and generate an array of Promises. The Promises will be started sequentially but will eventually run in parallel.
const resultsPromises = indicators.map(getResult);
Then you can await all promises and get the resolved results using Promise.all():
const data = [1, 2, 3];
const getResult = x => new Promise(res => {
return setTimeout(() => {
console.log(x);
res(x);
}, Math.random() * 1000)
});
Promise.all(data.map(getResult)).then(console.log);
Sequential execution of the Promises
However, if you want to run each Promise sequentially and wait for the previous Promise to resolve before running the next one, then you can use reduce() and async/await like this:
const data = [1, 2, 3];
const getResult = x => new Promise(res => {
return setTimeout(() => {
console.log(x);
res(x);
}, Math.random() * 1000)
});
data.reduce(async (previous, x) => {
const result = await previous;
return [...result, await getResult(x)];
}, Promise.resolve([])).then(console.log);
Array.prototype.map() is a function that transforms Arrays. It maps one Array to another Array. The most important part of its function signature is the callback. The callback is called on each item in the Array and what that callback returns is what is put into the new Array returned by map.
It does not do anything special with what gets returned. It does not call .then() on the items, it does not await anything. It synchronously transforms data.
That means that if the callback returns a Promise (which all async functions do), all the promises will be "hot" and running in parallel.
In your example, if getResult() returns a Promise or is itself async, there isn't really a difference between your implementations. resultsPromises will be populated by Promises that may or may not be resolved yet.
If you want to wait for everything to finish before moving on, you need to use Promise.all().
Additionally, if you only want 1 getResults() to be running at a time, use a regular for loop and await within the loop.
If the intent of the first code snippet was to have a .map call that waits for all of the Promises to be resolved before returning (and to have those callbacks run sequentially) I'm afraid it doesn't work like that. The .map function doesn't know how to do that with async functions.
This can be demonstrated with the following code:
const array = [ 1, 2, 3, 4, 5 ];
function getResult(n)
{
console.log('starting ' + n);
return new Promise(resolve => {
setTimeout(() => {
console.log('finished ' + n);
resolve(n);
}, 1000 * (Math.random(5) + 1));
});
}
let promises = array.map(async (n) => {
return await getResult(n);
});
console.log('map finished');
Promise.all(promises).then(console.log);
Where you'll see that the .map call finishes immediately before any of the asynchronous operations are completed.
If getResult always returns a promise and never throws an error then both will behave the same.
Some promise returning functions can throw errors before the promise is returned, in this case wrapping the call to getResult in an async function will turn that thrown error into a rejected promise, which can be useful.
As has been stated in many comments, you never need return await - it is equivalent to adding .then(result=>result) on the end of a promise chain - it is (mostly) harmless but unessesary. Just use return.

Can I prevent an `AsyncGenerator` from yielding after its `return()` method has been invoked?

AsyncGenerator.prototype.return() - JavaScript | MDN states:
The return() method of an async generator acts as if a return statement is inserted in the generator's body at the current suspended position, which finishes the generator and allows the generator to perform any cleanup tasks when combined with a try...finally block.
Why then does the following code print 0–3 rather than only 0–2?
const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
const values = (async function* delayedIntegers() {
let n = 0;
while (true) {
yield n++;
await delay(100);
}
})();
await Promise.all([
(async () => {
for await (const value of values) console.log(value);
})(),
(async () => {
await delay(250);
values.return();
})(),
]);
I tried adding log statements to better understand where the "current suspended position" is and from what I can tell when I call the return() method the AsyncGenerator instance isn't suspended (the body execution isn't at a yield statement) and instead of returning once reaching the yield statement the next value is yielded and then suspended at which point the "return" finally happens.
Is there any way to detect that the return() method has been invoked and not yield afterwards?
I can implement the AsyncIterator interface myself but then I lose the yield syntax supported by async generators:
const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
const values = (() => {
let n = 0;
let done = false;
return {
[Symbol.asyncIterator]() {
return this;
},
async next() {
if (done) return { done, value: undefined };
if (n !== 0) {
await delay(100);
if (done) return { done, value: undefined };
}
return { done, value: n++ };
},
async return() {
done = true;
return { done, value: undefined };
},
};
})();
await Promise.all([
(async () => {
for await (const value of values) console.log(value);
})(),
(async () => {
await delay(250);
values.return();
})(),
]);
Why does the code print 0–3 rather than only 0–2? From what I can tell, when I call the return() method, the AsyncGenerator instance isn't suspended (the body execution isn't at a yield statement) and instead of returning once reaching the yield statement the next value is yielded and then suspended at which point the "return" finally happens.
Yes, precisely this is what happens. The generator is already running because the for await … of loop did call its .next() method, and so the generator will complete that before considering the .return() call.
All the methods that you invoke on an async generator are queued. (In a sync generator, you'd get a "TypeError: Generator is already running" instead). One can demonstrate this by immediately calling next multiple times:
const values = (async function*() {
let i=0; while (true) {
await new Promise(r => { setTimeout(r, 1000); });
yield i++;
}
})();
values.next().then(console.log, console.error);
values.next().then(console.log, console.error);
values.next().then(console.log, console.error);
values.return('done').then(console.log, console.error);
values.next().then(console.log, console.error);
Is there any way to detect that the return() method has been invoked and not yield afterwards?
No, not from within the generator. And really you probably still should yield the value if you already expended the effort to produce it.
It sounds like what you want to do is to ignore the produced value when you want the generator to stop. You should do that in your for await … of loop - and you can also use it to stop the generator by using a break statement:
const delay = (ms) => new Promise((resolve) => {
setTimeout(resolve, ms);
});
async function* delayedIntegers() {
let n = 0;
while (true) {
yield n++;
await delay(1000);
}
}
(async function main() {
const start = Date.now();
const values = delayedIntegers();
for await (const value of values) {
if (Date.now() - start > 2500) {
console.log('done:', value);
break;
}
console.log(value);
}
})();
But if you really want to abort the generator from the outside, you need an out-of-band channel to signal the cancellation. You can use an AbortSignal for this:
const delay = (ms, signal) => new Promise((resolve, reject) => {
function done() {
resolve();
signal?.removeEventListener("abort", stop);
}
function stop() {
reject(this.reason);
clearTimeout(handle);
}
signal?.throwIfAborted();
const handle = setTimeout(done, ms);
signal?.addEventListener("abort", stop, {once: true});
});
async function* delayedIntegers(signal) {
let n = 0;
while (true) {
yield n++;
await delay(1000, signal);
}
}
(async function main() {
try {
const values = delayedIntegers(AbortSignal.timeout(2500));
for await (const value of values) {
console.log(value);
}
} catch(e) {
if (e.name != "TimeoutError") throw e;
console.log("done");
}
})();
This will actually permit to stop the generator during the timeout, not after the full second has elapsed.
Is there a way to prevent this "extra yield" after invoking the return method? If not, are there libraries, patterns, etc. our there that avoid this while still implementing these AsyncIterator interface optional properties?
As #Bergi clearly explained, the extra yield cannot be avoided with the AsyncGenerator.return() method. This is a really interesting case, but I don't think you will find libraries that fix it. #Bergi proposed a clever solution using the AbortSignal, I have tried a different approach with only Promises:
(async function test() {
const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
const wrapIntoStoppable = function (generator) {
const newGenerator = {
isGeneratorStopped: false,
resolveStopPromise: null,
async *[Symbol.asyncIterator]() {
let stoppedSymbol = Symbol('stoppedPromise')
let stoppingPromise
while (true) {
if (this.isGeneratorStopped)
return
stoppingPromise = new Promise((resolve, _) => this.resolveStopPromise = resolve)
.then(_ => stoppedSymbol)
nextValuePromise = generator.next()
const result = await Promise.race([nextValuePromise, stoppingPromise])
this.resolveStopPromise() // resolve the promise in case it is still pending
if (result === stoppedSymbol)
return
else
yield result.value
}
},
stop: function() {
this.resolveStopPromise()
this.isGeneratorStopped = true
}
}
const handler = {
get: function(target, prop, receiver) {
if (['next', 'return', 'throw'].includes(prop))
return generator[prop].bind(generator)
else
return newGenerator[prop].bind(newGenerator)
}
}
return new Proxy(newGenerator, handler)
}
const values = wrapIntoStoppable((async function* delayedIntegers() {
let n = 0;
while (true) {
yield n++;
await delay(100);
}
})());
await Promise.all([
(async () => {
for await (const value of values) {
console.log(Date.now())
console.log(value);
}
console.log(Date.now())
// console.log(await values.next())
// console.log(await values.return())
// console.log(await values.throw())
})(),
(async () => {
await delay(250);
values.stop()
})(),
]);
})();
The idea is that I wrap an async generator with an object that has an async iterator. All the elements yielded by the wrapping generator are yielded by the original generator, but now 2 promises are started:
nextValuePromise that will return the value to yield
stoppingPromise that will end the iteration if resolved before the previous one
In this way, if the stop() method (which resolves stoppingPromise) is called before the first promise is resolved, then Promise.race() will immediately return a dummy Symbol. When the result of the race is this symbol, the iterator returns. The stop() function also sets the isGeneratorStopped flag that makes sure the iterator will eventually return if the stop() method is called after the stoppingPromise() is manually resolved.
I have also used a Proxy to make sure that the wrapping object behaves as a true AsyncGenerator. Calls to next(), return() or throw() are simply forwarded to the wrapped generator.
Let's see the pros:
wrapIntoStoppable can become a util method that just wraps any async generator. This is certainly convenient because you don't have to use signals every time there is a pending Promise 1
Once the stop() method is called on the async generator, the for await...of loop immediately returns. Note: this doesn't mean that pending Promises are aborted
And now the cons:
Maybe too much code to maintain? Now the generator has a proxy that wraps another wrapper... I would like to simplify the design at least
After the generator is stopped, the nextValuePromise() could be resolved in the meantime, causing some potential side effects. This is the main reason why it is a pretty dangerous library function.
Actually, I think you could even merge #Bergi's and my solution and manage to abort a Promise when the stop() method is called. However, in this case, all the promises need to handle the abort signals.
await would suspend the async part of the function, but not the generator part, thus AsyncGenerator.return() can not act on the await suspension, but only yield suspension. And I think that's why AsyncGenerator.return() returns a promise, but Generator.return() does not.
Yes. Bergi is right. The for await loop invokes .next() right after the consumption and puts yield in charge before return. So what happens is;
# 0ms 0 gets yielded and .next() puts yield in charge to yield 1 once resolved.
#100ms 1 gets yielded and .next() puts yield in charge to yield 2 once resolved.
#200ms 2 gets yielded and .next() puts yield in charge to yield 3 once resolved.
#250ms a values.return() is enqueued but yield has already been queued to yield 3.
#300ms 3 gets yielded and generator finalizes along with the iterable values.
Now the thing is, if we find a way to resolve or reject the promise waiting for 3 prematurely #250ms then we are fine. Yet without using the abort abstraction you can still do this with naked promises and even without using an async generator. You just need to lift the resolve and reject functions out of the generator functions scope and invoke from there. I think it's best to reject prematurely and catch the rejection at the outer scope (silent or not).
Here is a way to accomplish this;
const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms)),
prior = {};
const values = (function* delayedIntegers() {
let n = 0,
p = new Promise((v,x) => Object.assign(prior,{v,x}));
prior.v(n++); // resolve p with 0 and increment n
while (true) {
yield p
p = new Promise((v,x) => Object.assign(prior,{v,x}));
delay(100).then(_ => prior.v(n++));
}
})();
(async () => {
try {
for await (const value of values) console.log(value);
}
catch(e){
console.log(e);
values.return();
}
})();
delay(250).then(_ => prior.x("Finalized..!"));
This is almost like your code but there is this prior object which holds the resolve and reject functions of a promise callback as v and x respectively.
Since prior object is accessible from within the outer context we can invoke it's x method (rejection) before the while loop in the generator resolves 3 and catch the rejection with the employed catch(e).

Why is Promise.all needed for Array.map Array.forEach asynchronous operation?

I am using NodeJS (ExpressJS) server, have the following code:
let caseScore = 0
await questions.map(async q => {
caseScore += await q.grade() // Queries database and gets questions, finds score
}))
console.log(caseScore)
>> Output: 0
However, it appears that q.grade() finishes executing after the request is finished. I put a console.log() within q.grade, and it shows up after response is sent. Clearly this is executed asynchronously. So later I did this:
let caseScore = 0
await Promise.all(questions.map(async q => {
caseScore += await q.grade() // Queries database and gets questions, finds score
})))
console.log(caseScore)
>> Output: 2
It works perfectly. Can someone explain to me why Promise.all is needed? Also, if I switch .map for .forEach, Promise.all errors, what is the correct way to do this for .forEach?
You should have to wait for all async calls to be completed. That is the reason why promise.all needed.
It converts the set of promises to a single promise.
When you await an array of promises, you wait for an array which is not a promise (even though it contain promises inside). await will immediately resolve values that are not a Promise. It's like if you did await 'hello', that would be resolving instantly.
Promise.all is a util that exposes a new Promise that resolves only when all the promises in the array passed as an argument are resolved. Since it's creates it's own promise, you can await on Promise.all.
[EDIT]
Careful, do not use await in loops like this
for (var q of questions) {
caseScore += await q.grade();
}
The reason is that it translates to
questions[0].grade().then(score =>
return questions[1].grade().then(score =>
return questions[2].grade().then(score =>
return questions[3].grade().then(score =>
// ...
);
);
);
);
it creates a lots of context in memory and makes everything serial which is not taking advantage of javascript async nature
if you want to have a sum or some values shared between promises you can use a context object
async function grade1() {
this.sum += await getSumFromServer();
this.nbOfTasks += 1;
}
async function grade2() {
this.sum += await getSumFromServer();
this.nbOfTasks += 1;
}
async function grade3() {
this.sum += await getSumFromServer();
this.nbOfTasks += 1;
}
async function main() {
const context = {
sum: 0,
nbOfTasks: 0,
}
const promisesWithContext = [grade1, grade2, grade3]
.map(fn => fn.call(context));
await Promise.all(promisesWithContext);
console.log(context);
}
main();
// STUB
function getSumFromServer() {
return new Promise((resolve) => {
setTimeout(() => {
resolve(Math.random() * 100)
}, 1000)
});
}
Array.prototype.map works like that.
Array.prototype.map = function(callback) {
var results = [];
for (var i = 0; i < this.length; i++) {
results.push(callback(this[i], i));
}
return result;
}
In your code your callback returns Promise and Array.map returns array of Promise. Because it doesn't awaits for callback to be completed. Array.forEach also will not work for you because it doesn't awaits callback too.
The best way to solve your problem is using Promise.all to convert array of Promise into a single Promise and await it.
await Promise.all(questions.map(async q => {
caseScore += await q.grade();
}));
Or using simple for loop like that.
for (var q of questions) {
caseScore += await q.grade();
}

why Javascript async/await code run async in parellel

I saw this good article to introduce async/await from Google.
However, I couldn't understand why these code run in parallel
async function parallel() {
const wait1 = wait(500);
const wait2 = wait(500);
await wait1;
await wait2;
return "done!";
}
And this run in series
async function series() {
await wait(500);
await wait(500);
return "done!";
}
Why is the key difference between these two methods ?
In my opinion, both of them are await promise and should work the same result.
Here is my test code. Can run in browser console which support async/await.
function wait(){
return new Promise((res)=>{setTimeout(()=>{res()}, 2000)})
}
async function parallel() {
const wait1 = wait();
const wait2 = wait();
await wait1;
await wait2;
return "done!";
}
async function series() {
await wait();
await wait();
return "done!";
}
parallel().then(res => console.log("parallel!"))
series().then(res => console.log("series!"))
======
Thanks for the answers.
But I still have some question. Why exact the async/await means?
In my knowledge, constructing Promise instance would execute directly.
Here is my test code
function wait(){
return new Promise((res)=>{setTimeout(()=>{console.log("wait!");res();}, 2000)})
}
wait()
//Promise {<pending>}
//wait!
let w = wait()
//undefined
//wait!
let w = await wait()
//wait!
//undefined
async function test(){await wait()}
// undefined
test()
//Promise {<pending>}
//wait!
So why const wait1 = wait(); inside parallel function execute directly?
By the way, should I open another question to ask these question?
await doesn't cause the Promise or its setTimeout() to start, which seems to be what you're expecting. The call to wait() alone starts them immediately, whether there's an active await on the promise or not.
await only helps you know when the already on-going operation, tracked through the promise, has completed.
So, the difference is just due to when wait() is being called and starting each timeout:
parallel() calls wait() back-to-back as quickly as the engine can get from one to the next, before either are awaited, so the 2 timeouts begin/end at nearly the same time.
series() forces the 2nd wait() to not be called until after the 1st has completed, by having an await act in between them.
Regarding your edit, async and await are syntactic sugar. Using them, the engine will modify your function at runtime, inserting additional code needed to interact with the promises.
A (possible, but not precise) equivalent of series() without await and async might be:
function series() {
return Promise.resolve()
.then(function () { return wait(500) })
.then(function () { return wait(500) })
.then(function () { return "done!"; });
}
And for parallel():
function parallel() {
const wait1 = wait(500);
const wait2 = wait(500);
return Promise.resolve()
.then(wait1)
.then(wait2)
.then(function () { return "done!"; });
}
In parallel(), you call both methods and then await their results while in series() you await the result of the first wait() call before calling the second wait().
Why exact the async/await means? In my knowledge, constructing Promise instance would execute directly.
The Promise instance is returned immediately, i.e. synchronously. But the value of the Promise is evaluated asynchronously by calling the first parameter given to its constructor, a function usually called resolve. This is what you are awaiting for.
The difference is that in parallel, both of the promises are scheduled right after each other.
In series, you wait for the first wait to resolve and then schedule the second wait to be called.
If we expand the methods we'd get something similar to:
function parallel() {
const wait1 = wait();
const wait2 = wait();
// Both wait1 and wait2 timeouts are scheduled
return wait1
.then(() => wait2)
.then(() => "done");
}
function series() {
// The second wait happens _after_ the first has been *resolved*
return wait()
.then(() => wait())
.then(() => "done");
}

How to make q.all execute in order like async.series [duplicate]

Consider the following code that reads an array of files in a serial/sequential manner. readFiles returns a promise, which is resolved only once all files have been read in sequence.
var readFile = function(file) {
... // Returns a promise.
};
var readFiles = function(files) {
return new Promise((resolve, reject) => {
var readSequential = function(index) {
if (index >= files.length) {
resolve();
} else {
readFile(files[index]).then(function() {
readSequential(index + 1);
}).catch(reject);
}
};
readSequential(0); // Start with the first file!
});
};
The above code works, but I don't like having to do recursion for things to occur sequentially. Is there a simpler way that this code can be re-written so that I don't have to use my weird readSequential function?
Originally I tried to use Promise.all, but that caused all of the readFile calls to happen concurrently, which is not what I want:
var readFiles = function(files) {
return Promise.all(files.map(function(file) {
return readFile(file);
}));
};
Update 2017: I would use an async function if the environment supports it:
async function readFiles(files) {
for(const file of files) {
await readFile(file);
}
};
If you'd like, you can defer reading the files until you need them using an async generator (if your environment supports it):
async function* readFiles(files) {
for(const file of files) {
yield await readFile(file);
}
};
Update: In second thought - I might use a for loop instead:
var readFiles = function(files) {
var p = Promise.resolve(); // Q() in q
files.forEach(file =>
p = p.then(() => readFile(file));
);
return p;
};
Or more compactly, with reduce:
var readFiles = function(files) {
return files.reduce((p, file) => {
return p.then(() => readFile(file));
}, Promise.resolve()); // initial
};
In other promise libraries (like when and Bluebird) you have utility methods for this.
For example, Bluebird would be:
var Promise = require("bluebird");
var fs = Promise.promisifyAll(require("fs"));
var readAll = Promise.resolve(files).map(fs.readFileAsync,{concurrency: 1 });
// if the order matters, you can use Promise.each instead and omit concurrency param
readAll.then(function(allFileContents){
// do stuff to read files.
});
Although there is really no reason not to use async await today.
Here is how I prefer to run tasks in series.
function runSerial() {
var that = this;
// task1 is a function that returns a promise (and immediately starts executing)
// task2 is a function that returns a promise (and immediately starts executing)
return Promise.resolve()
.then(function() {
return that.task1();
})
.then(function() {
return that.task2();
})
.then(function() {
console.log(" ---- done ----");
});
}
What about cases with more tasks? Like, 10?
function runSerial(tasks) {
var result = Promise.resolve();
tasks.forEach(task => {
result = result.then(() => task());
});
return result;
}
This question is old, but we live in a world of ES6 and functional JavaScript, so let's see how we can improve.
Because promises execute immediately, we can't just create an array of promises, they would all fire off in parallel.
Instead, we need to create an array of functions that returns a promise. Each function will then be executed sequentially, which then starts the promise inside.
We can solve this a few ways, but my favorite way is to use reduce.
It gets a little tricky using reduce in combination with promises, so I have broken down the one liner into some smaller digestible bites below.
The essence of this function is to use reduce starting with an initial value of Promise.resolve([]), or a promise containing an empty array.
This promise will then be passed into the reduce method as promise. This is the key to chaining each promise together sequentially. The next promise to execute is func and when the then fires, the results are concatenated and that promise is then returned, executing the reduce cycle with the next promise function.
Once all promises have executed, the returned promise will contain an array of all the results of each promise.
ES6 Example (one liner)
/*
* serial executes Promises sequentially.
* #param {funcs} An array of funcs that return promises.
* #example
* const urls = ['/url1', '/url2', '/url3']
* serial(urls.map(url => () => $.ajax(url)))
* .then(console.log.bind(console))
*/
const serial = funcs =>
funcs.reduce((promise, func) =>
promise.then(result => func().then(Array.prototype.concat.bind(result))), Promise.resolve([]))
ES6 Example (broken down)
// broken down to for easier understanding
const concat = list => Array.prototype.concat.bind(list)
const promiseConcat = f => x => f().then(concat(x))
const promiseReduce = (acc, x) => acc.then(promiseConcat(x))
/*
* serial executes Promises sequentially.
* #param {funcs} An array of funcs that return promises.
* #example
* const urls = ['/url1', '/url2', '/url3']
* serial(urls.map(url => () => $.ajax(url)))
* .then(console.log.bind(console))
*/
const serial = funcs => funcs.reduce(promiseReduce, Promise.resolve([]))
Usage:
// first take your work
const urls = ['/url1', '/url2', '/url3', '/url4']
// next convert each item to a function that returns a promise
const funcs = urls.map(url => () => $.ajax(url))
// execute them serially
serial(funcs)
.then(console.log.bind(console))
To do this simply in ES6:
function(files) {
// Create a new empty promise (don't do that with real people ;)
var sequence = Promise.resolve();
// Loop over each file, and add on a promise to the
// end of the 'sequence' promise.
files.forEach(file => {
// Chain one computation onto the sequence
sequence =
sequence
.then(() => performComputation(file))
.then(result => doSomething(result));
// Resolves for each file, one at a time.
})
// This will resolve after the entire chain is resolved
return sequence;
}
Addition example
const addTwo = async () => 2;
const addThree = async (inValue) => new Promise((resolve) => setTimeout(resolve(inValue + 3), 2000));
const addFour = (inValue) => new Promise((res) => setTimeout(res(inValue + 4), 1000));
const addFive = async (inValue) => inValue + 5;
// Function which handles promises from above
async function sequenceAddition() {
let sum = await [addTwo, addThree, addFour, addFive].reduce(
(promise, currPromise) => promise.then((val) => currPromise(val)),
Promise.resolve()
);
console.log('sum:', sum); // 2 + 3 + 4 + 5 = 14
}
// Run function. See console for result.
sequenceAddition();
General syntax to use reduce()
function sequence(tasks, fn) {
return tasks.reduce((promise, task) => promise.then(() => fn(task)), Promise.resolve());
}
UPDATE
items-promise is a ready to use NPM package doing the same.
I've had to run a lot of sequential tasks and used these answers to forge a function that would take care of handling any sequential task...
function one_by_one(objects_array, iterator, callback) {
var start_promise = objects_array.reduce(function (prom, object) {
return prom.then(function () {
return iterator(object);
});
}, Promise.resolve()); // initial
if(callback){
start_promise.then(callback);
}else{
return start_promise;
}
}
The function takes 2 arguments + 1 optional. First argument is the array on which we will be working. The second argument is the task itself, a function that returns a promise, the next task will be started only when this promise resolves. The third argument is a callback to run when all tasks have been done. If no callback is passed, then the function returns the promise it created so we can handle the end.
Here's an example of usage:
var filenames = ['1.jpg','2.jpg','3.jpg'];
var resize_task = function(filename){
//return promise of async resizing with filename
};
one_by_one(filenames,resize_task );
Hope it saves someone some time...
With Async/Await (if you have the support of ES7)
function downloadFile(fileUrl) { ... } // This function return a Promise
async function main()
{
var filesList = [...];
for (const file of filesList) {
await downloadFile(file);
}
}
(you must use for loop, and not forEach because async/await has problems running in forEach loop)
Without Async/Await (using Promise)
function downloadFile(fileUrl) { ... } // This function return a Promise
function downloadRecursion(filesList, index)
{
index = index || 0;
if (index < filesList.length)
{
downloadFile(filesList[index]).then(function()
{
index++;
downloadRecursion(filesList, index); // self invocation - recursion!
});
}
else
{
return Promise.resolve();
}
}
function main()
{
var filesList = [...];
downloadRecursion(filesList);
}
My preferred solution:
function processArray(arr, fn) {
return arr.reduce(
(p, v) => p.then((a) => fn(v).then(r => a.concat([r]))),
Promise.resolve([])
);
}
It's not fundamentally different from others published here but:
Applies the function to items in series
Resolves to an array of results
Doesn't require async/await (support is still quite limited, circa 2017)
Uses arrow functions; nice and concise
Example usage:
const numbers = [0, 4, 20, 100];
const multiplyBy3 = (x) => new Promise(res => res(x * 3));
// Prints [ 0, 12, 60, 300 ]
processArray(numbers, multiplyBy3).then(console.log);
Tested on reasonable current Chrome (v59) and NodeJS (v8.1.2).
First, you need to understand that a promise is executed at the time of creation.
So for example if you have a code:
["a","b","c"].map(x => returnsPromise(x))
You need to change it to:
["a","b","c"].map(x => () => returnsPromise(x))
Then we need to sequentially chain promises:
["a", "b", "c"].map(x => () => returnsPromise(x))
.reduce(
(before, after) => before.then(_ => after()),
Promise.resolve()
)
executing after(), will make sure that promise is created (and executed) only when its time comes.
Nicest solution that I was able to figure out was with bluebird promises. You can just do Promise.resolve(files).each(fs.readFileAsync); which guarantees that promises are resolved sequentially in order.
With async/await of ES2016 (and maybe some features of ES2018), this can be reduced to this form:
function readFile(file) {
... // Returns a promise.
}
async function readFiles(files) {
for (file in files) {
await readFile(file)
}
}
I haven't seen another answer express that simplicity. The OP said parallel execution of readFile was not desired. However, with IO like this it really makes sense to not be blocking on a single file read, while keeping the loop execution synchronous (you don't want to do the next step until all files have been read). Since I just learned about this and am a bit excited about it, I'll share that approach of parallel asynchronous execution of readFile with overall synchronous execution of readFiles.
async function readFiles(files) {
await Promise.all(files.map(readFile))
}
Isn't that a thing of beauty?
This is a slight variation of another answer above. Using native Promises:
function inSequence(tasks) {
return tasks.reduce((p, task) => p.then(task), Promise.resolve())
}
Explanation
If you have these tasks [t1, t2, t3], then the above is equivalent to Promise.resolve().then(t1).then(t2).then(t3). It's the behavior of reduce.
How to use
First You need to construct a list of tasks! A task is a function that accepts no argument. If you need to pass arguments to your function, then use bind or other methods to create a task. For example:
var tasks = files.map(file => processFile.bind(null, file))
inSequence(tasks).then(...)
I created this simple method on the Promise object:
Create and add a Promise.sequence method to the Promise object
Promise.sequence = function (chain) {
var results = [];
var entries = chain;
if (entries.entries) entries = entries.entries();
return new Promise(function (yes, no) {
var next = function () {
var entry = entries.next();
if(entry.done) yes(results);
else {
results.push(entry.value[1]().then(next, function() { no(results); } ));
}
};
next();
});
};
Usage:
var todo = [];
todo.push(firstPromise);
if (someCriterium) todo.push(optionalPromise);
todo.push(lastPromise);
// Invoking them
Promise.sequence(todo)
.then(function(results) {}, function(results) {});
The best thing about this extension to the Promise object, is that it is consistent with the style of promises. Promise.all and Promise.sequence is invoked the same way, but have different semantics.
Caution
Sequential running of promises is not usually a very good way to use promises. It's usually better to use Promise.all, and let the browser run the code as fast as possible. However, there are real use cases for it - for example when writing a mobile app using javascript.
My answer based on https://stackoverflow.com/a/31070150/7542429.
Promise.series = function series(arrayOfPromises) {
var results = [];
return arrayOfPromises.reduce(function(seriesPromise, promise) {
return seriesPromise.then(function() {
return promise
.then(function(result) {
results.push(result);
});
});
}, Promise.resolve())
.then(function() {
return results;
});
};
This solution returns the results as an array like Promise.all().
Usage:
Promise.series([array of promises])
.then(function(results) {
// do stuff with results here
});
Use Array.prototype.reduce, and remember to wrap your promises in a function otherwise they will already be running!
// array of Promise providers
const providers = [
function(){
return Promise.resolve(1);
},
function(){
return Promise.resolve(2);
},
function(){
return Promise.resolve(3);
}
]
const inSeries = function(providers){
const seed = Promise.resolve(null);
return providers.reduce(function(a,b){
return a.then(b);
}, seed);
};
nice and easy...
you should be able to re-use the same seed for performance, etc.
It's important to guard against empty arrays or arrays with only 1 element when using reduce, so this technique is your best bet:
const providers = [
function(v){
return Promise.resolve(v+1);
},
function(v){
return Promise.resolve(v+2);
},
function(v){
return Promise.resolve(v+3);
}
]
const inSeries = function(providers, initialVal){
if(providers.length < 1){
return Promise.resolve(null)
}
return providers.reduce((a,b) => a.then(b), providers.shift()(initialVal));
};
and then call it like:
inSeries(providers, 1).then(v => {
console.log(v); // 7
});
Using modern ES:
const series = async (tasks) => {
const results = [];
for (const task of tasks) {
const result = await task;
results.push(result);
}
return results;
};
//...
const readFiles = await series(files.map(readFile));
Most of the answers dont include the results of ALL promises individually, so in case someone is looking for this particular behaviour, this is a possible solution using recursion.
It follows the style of Promise.all:
Returns the array of results in the .then() callback.
If some promise fails, its returned immediately in the .catch() callback.
const promiseEach = (arrayOfTasks) => {
let results = []
return new Promise((resolve, reject) => {
const resolveNext = (arrayOfTasks) => {
// If all tasks are already resolved, return the final array of results
if (arrayOfTasks.length === 0) return resolve(results)
// Extract first promise and solve it
const first = arrayOfTasks.shift()
first().then((res) => {
results.push(res)
resolveNext(arrayOfTasks)
}).catch((err) => {
reject(err)
})
}
resolveNext(arrayOfTasks)
})
}
// Lets try it 😎
const promise = (time, shouldThrowError) => new Promise((resolve, reject) => {
const timeInMs = time * 1000
setTimeout(()=>{
console.log(`Waited ${time} secs`)
if (shouldThrowError) reject(new Error('Promise failed'))
resolve(time)
}, timeInMs)
})
const tasks = [() => promise(1), () => promise(2)]
promiseEach(tasks)
.then((res) => {
console.log(res) // [1, 2]
})
// Oops some promise failed
.catch((error) => {
console.log(error)
})
Note about the tasks array declaration:
In this case is not possible to use the following notation like Promise.all would use:
const tasks = [promise(1), promise(2)]
And we have to use:
const tasks = [() => promise(1), () => promise(2)]
The reason is that JavaScript starts executing the promise immediatelly after its declared. If we use methods like Promise.all, it just checks that the state of all of them is fulfilled or rejected, but doesnt start the exection itself. Using () => promise() we stop the execution until its called.
You can use this function that gets promiseFactories List:
function executeSequentially(promiseFactories) {
var result = Promise.resolve();
promiseFactories.forEach(function (promiseFactory) {
result = result.then(promiseFactory);
});
return result;
}
Promise Factory is just simple function that returns a Promise:
function myPromiseFactory() {
return somethingThatCreatesAPromise();
}
It works because a promise factory doesn't create the promise until it's asked to. It works the same way as a then function – in fact, it's the same thing!
You don't want to operate over an array of promises at all. Per the Promise spec, as soon as a promise is created, it begins executing. So what you really want is an array of promise factories...
If you want to learn more on Promises, you should check this link:
https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html
If you want you can use reduce to make a sequential promise, for example:
[2,3,4,5,6,7,8,9].reduce((promises, page) => {
return promises.then((page) => {
console.log(page);
return Promise.resolve(page+1);
});
}, Promise.resolve(1));
it'll always works in sequential.
I really liked #joelnet's answer, but to me, that style of coding is a little bit tough to digest, so I spent a couple of days trying to figure out how I would express the same solution in a more readable manner and this is my take, just with a different syntax and some comments.
// first take your work
const urls = ['/url1', '/url2', '/url3', '/url4']
// next convert each item to a function that returns a promise
const functions = urls.map((url) => {
// For every url we return a new function
return () => {
return new Promise((resolve) => {
// random wait in milliseconds
const randomWait = parseInt((Math.random() * 1000),10)
console.log('waiting to resolve in ms', randomWait)
setTimeout(()=>resolve({randomWait, url}),randomWait)
})
}
})
const promiseReduce = (acc, next) => {
// we wait for the accumulator to resolve it's promise
return acc.then((accResult) => {
// and then we return a new promise that will become
// the new value for the accumulator
return next().then((nextResult) => {
// that eventually will resolve to a new array containing
// the value of the two promises
return accResult.concat(nextResult)
})
})
};
// the accumulator will always be a promise that resolves to an array
const accumulator = Promise.resolve([])
// we call reduce with the reduce function and the accumulator initial value
functions.reduce(promiseReduce, accumulator)
.then((result) => {
// let's display the final value here
console.log('=== The final result ===')
console.log(result)
})
As Bergi noticed, I think the best and clear solution is use BlueBird.each, code below:
const BlueBird = require('bluebird');
BlueBird.each(files, fs.readFileAsync);
I find myself coming back to this question many times and the answers aren't exactly giving me what I need, so putting this here for anyone that needs this too.
The code below does sequential promises execution (one after another), and each round consists of multiple callings:
async function sequence(list, cb) {
const result = [];
await list.reduce(async (promise, item) => promise
.then(() => cb(item))
.then((res) => result.push(res)
), Promise.resolve());
return result;
}
Showcase:
<script src="https://cdnjs.cloudflare.com/ajax/libs/axios/0.15.3/axios.min.js"></script>
<script src="https://unpkg.com/#babel/standalone#7/babel.min.js"></script>
<script type="text/babel">
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function readFile(url, index) {
console.log('Running index: ', index);
// First action
const firstTime = await axios.get(url);
console.log('First API response: ', firstTime.data.activity);
// Second action
await sleep(1000);
// Third action
const secondTime = await axios.get(url);
console.log('Second API response: ', secondTime.data.activity);
// Fourth action
await sleep(1000);
return secondTime.data;
}
async function sequence(urls, fn) {
const result = [];
await urls.reduce(async (promise, url, index) => promise.then(() => fn(url, index)).then((res) => result.push(res)), Promise.resolve());
return result;
}
const urls = [
'https://www.boredapi.com/api/activity',
'https://www.boredapi.com/api/activity',
'https://www.boredapi.com/api/activity',
];
(async function init() {
const result = await sequence(urls, readFile);
console.log('result', result);
})()
</script>
I use the following code to extend the Promise object. It handles rejection of the promises and returns an array of results
Code
/*
Runs tasks in sequence and resolves a promise upon finish
tasks: an array of functions that return a promise upon call.
parameters: an array of arrays corresponding to the parameters to be passed on each function call.
context: Object to use as context to call each function. (The 'this' keyword that may be used inside the function definition)
*/
Promise.sequence = function(tasks, parameters = [], context = null) {
return new Promise((resolve, reject)=>{
var nextTask = tasks.splice(0,1)[0].apply(context, parameters[0]); //Dequeue and call the first task
var output = new Array(tasks.length + 1);
var errorFlag = false;
tasks.forEach((task, index) => {
nextTask = nextTask.then(r => {
output[index] = r;
return task.apply(context, parameters[index+1]);
}, e=>{
output[index] = e;
errorFlag = true;
return task.apply(context, parameters[index+1]);
});
});
// Last task
nextTask.then(r=>{
output[output.length - 1] = r;
if (errorFlag) reject(output); else resolve(output);
})
.catch(e=>{
output[output.length - 1] = e;
reject(output);
});
});
};
Example
function functionThatReturnsAPromise(n) {
return new Promise((resolve, reject)=>{
//Emulating real life delays, like a web request
setTimeout(()=>{
resolve(n);
}, 1000);
});
}
var arrayOfArguments = [['a'],['b'],['c'],['d']];
var arrayOfFunctions = (new Array(4)).fill(functionThatReturnsAPromise);
Promise.sequence(arrayOfFunctions, arrayOfArguments)
.then(console.log)
.catch(console.error);
Your approach is not bad, but it does have two issues: it swallows errors and it employs the Explicit Promise Construction Antipattern.
You can solve both of these issues, and make the code cleaner, while still employing the same general strategy:
var Q = require("q");
var readFile = function(file) {
... // Returns a promise.
};
var readFiles = function(files) {
var readSequential = function(index) {
if (index < files.length) {
return readFile(files[index]).then(function() {
return readSequential(index + 1);
});
}
};
// using Promise.resolve() here in case files.length is 0
return Promise.resolve(readSequential(0)); // Start!
};
This is my sequentially implementation that I use in various projects:
const file = [file1, file2, file3];
const fileContents = sequentially(readFile, files);
// somewhere else in the code:
export const sequentially = async <T, P>(
toPromise: (element: T) => Promise<P>,
elements: T[]
): Promise<P[]> => {
const results: P[] = [];
await elements.reduce(async (sequence, element) => {
await sequence;
results.push(await toPromise(element));
}, Promise.resolve());
return results;
};
Here is my Angular/TypeScript approach, using RxJS:
Given an array of URL strings, convert it into an Observable using the from function.
Use pipe to wrap the Ajax request, immediate response logic, any desired delay, and error handling.
Inside of the pipe, use concatMap to serialize the requests. Otherwise, using Javascript forEach or map would make the requests at the same time.
Use RxJS ajax to make the call, and also to add any desired delay after each call returns.
Working example: https://stackblitz.com/edit/rxjs-bnrkix?file=index.ts
The code looks like this (I left in some extras so you can choose what to keep or discard):
import { ajax } from 'rxjs/ajax';
import { catchError, concatMap, delay, from, of, map, Observable } from 'rxjs';
const urls = [
'https://randomuser.me/api/',
'https://randomuser.me/api/',
'https://randomuser.me/api/',
];
const delayAfterCall = 500;
from(urls)
.pipe(
concatMap((url: string) => {
return ajax.getJSON(url).pipe(
map((response) => {
console.log('Done! Received:', response);
return response;
}),
catchError((error) => {
console.error('Error: ', error);
return of(error);
}),
delay(delayAfterCall)
);
})
)
.subscribe((response) => {
console.log('received email:', response.results[0].email);
});
On the basis of the question's title, "Resolve promises one after another (i.e. in sequence)?", we might understand that the OP is more interested in the sequential handling of promises on settlement than sequential calls per se.
This answer is offered :
to demonstrate that sequential calls are not necessary for sequential handling of responses.
to expose viable alternative patterns to this page's visitors - including the OP if he is still interested over a year later.
despite the OP's assertion that he does not want to make calls concurrently, which may genuinely be the case but equally may be an assumption based on the desire for sequential handling of responses as the title implies.
If concurrent calls are genuinely not wanted then see Benjamin Gruenbaum's answer which covers sequential calls (etc) comprehensively.
If however, you are interested (for improved performance) in patterns which allow concurrent calls followed by sequential handling of responses, then please read on.
It's tempting to think you have to use Promise.all(arr.map(fn)).then(fn) (as I have done many times) or a Promise lib's fancy sugar (notably Bluebird's), however (with credit to this article) an arr.map(fn).reduce(fn) pattern will do the job, with the advantages that it :
works with any promise lib - even pre-compliant versions of jQuery - only .then() is used.
affords the flexibility to skip-over-error or stop-on-error, whichever you want with a one line mod.
Here it is, written for Q.
var readFiles = function(files) {
return files.map(readFile) //Make calls in parallel.
.reduce(function(sequence, filePromise) {
return sequence.then(function() {
return filePromise;
}).then(function(file) {
//Do stuff with file ... in the correct sequence!
}, function(error) {
console.log(error); //optional
return sequence;//skip-over-error. To stop-on-error, `return error` (jQuery), or `throw error` (Promises/A+).
});
}, Q()).then(function() {
// all done.
});
};
Note: only that one fragment, Q(), is specific to Q. For jQuery you need to ensure that readFile() returns a jQuery promise. With A+ libs, foreign promises will be assimilated.
The key here is the reduction's sequence promise, which sequences the handling of the readFile promises but not their creation.
And once you have absorbed that, it's maybe slightly mind-blowing when you realise that the .map() stage isn't actually necessary! The whole job, parallel calls plus serial handling in the correct order, can be achieved with reduce() alone, plus the added advantage of further flexibility to :
convert from parallel async calls to serial async calls by simply moving one line - potentially useful during development.
Here it is, for Q again.
var readFiles = function(files) {
return files.reduce(function(sequence, f) {
var filePromise = readFile(f);//Make calls in parallel. To call sequentially, move this line down one.
return sequence.then(function() {
return filePromise;
}).then(function(file) {
//Do stuff with file ... in the correct sequence!
}, function(error) {
console.log(error); //optional
return sequence;//Skip over any errors. To stop-on-error, `return error` (jQuery), or `throw error` (Promises/A+).
});
}, Q()).then(function() {
// all done.
});
};
That's the basic pattern. If you wanted also to deliver data (eg the files or some transform of them) to the caller, you would need a mild variant.
If someone else needs a guaranteed way of STRICTLY sequential way of resolving Promises when performing CRUD operations you also can use the following code as a basis.
As long as you add 'return' before calling each function, describing a Promise, and use this example as a basis the next .then() function call will CONSISTENTLY start after the completion of the previous one:
getRidOfOlderShoutsPromise = () => {
return readShoutsPromise('BEFORE')
.then(() => {
return deleteOlderShoutsPromise();
})
.then(() => {
return readShoutsPromise('AFTER')
})
.catch(err => console.log(err.message));
}
deleteOlderShoutsPromise = () => {
return new Promise ( (resolve, reject) => {
console.log("in deleteOlderShouts");
let d = new Date();
let TwoMinuteAgo = d - 1000 * 90 ;
All_Shouts.deleteMany({ dateTime: {$lt: TwoMinuteAgo}}, function(err) {
if (err) reject();
console.log("DELETED OLDs at "+d);
resolve();
});
});
}
readShoutsPromise = (tex) => {
return new Promise( (resolve, reject) => {
console.log("in readShoutsPromise -"+tex);
All_Shouts
.find({})
.sort([['dateTime', 'ascending']])
.exec(function (err, data){
if (err) reject();
let d = new Date();
console.log("shouts "+tex+" delete PROMISE = "+data.length +"; date ="+d);
resolve(data);
});
});
}
Array push and pop method can be used for sequence of promises. You can also push new promises when you need additional data. This is the code, I will use in React Infinite loader to load sequence of pages.
var promises = [Promise.resolve()];
function methodThatReturnsAPromise(page) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log(`Resolve-${page}! ${new Date()} `);
resolve();
}, 1000);
});
}
function pushPromise(page) {
promises.push(promises.pop().then(function () {
return methodThatReturnsAPromise(page)
}));
}
pushPromise(1);
pushPromise(2);
pushPromise(3);
(function() {
function sleep(ms) {
return new Promise(function(resolve) {
setTimeout(function() {
return resolve();
}, ms);
});
}
function serial(arr, index, results) {
if (index == arr.length) {
return Promise.resolve(results);
}
return new Promise(function(resolve, reject) {
if (!index) {
index = 0;
results = [];
}
return arr[index]()
.then(function(d) {
return resolve(d);
})
.catch(function(err) {
return reject(err);
});
})
.then(function(result) {
console.log("here");
results.push(result);
return serial(arr, index + 1, results);
})
.catch(function(err) {
throw err;
});
}
const a = [5000, 5000, 5000];
serial(a.map(x => () => sleep(x)));
})();
Here the key is how you call the sleep function. You need to pass an array of functions which itself returns a promise instead of an array of promises.

Categories