for (let i = 0; i < 10; i++) {
const promise = new Promise((resolve, reject) => {
const timeout = Math.random() * 1000;
setTimeout(() => {
console.log(i);
}, timeout);
});
// TODO: Chain this promise to the previous one (maybe without having it running?)
}
The above will give the following random output:
6
9
4
8
5
1
7
2
3
0
The task is simple: Make sure each promise runs only after the other one (.then()).
For some reason, I couldn't find a way to do it.
I tried generator functions (yield), tried simple functions that return a promise, but at the end of the day it always comes down to the same problem: The loop is synchronous.
With async I'd simply use async.series().
How do you solve it?
As you already hinted in your question, your code creates all promises synchronously. Instead they should only be created at the time the preceding one resolves.
Secondly, each promise that is created with new Promise needs to be resolved with a call to resolve (or reject). This should be done when the timer expires. That will trigger any then callback you would have on that promise. And such a then callback (or await) is a necessity in order to implement the chain.
With those ingredients, there are several ways to perform this asynchronous chaining:
With a for loop that starts with an immediately resolving promise
With Array#reduce that starts with an immediately resolving promise
With a function that passes itself as resolution callback
With ECMAScript2017's async / await syntax
With ECMAScript2020's for await...of syntax
But let me first introduce a very useful, generic function.
Promisfying setTimeout
Using setTimeout is fine, but we actually need a promise that resolves when the timer expires. So let's create such a function: this is called promisifying a function, in this case we will promisify setTimeout. It will improve the readability of the code, and can be used for all of the above options:
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
See a snippet and comments for each of the options below.
1. With for
You can use a for loop, but you must make sure it doesn't create all promises synchronously. Instead you create an initial immediately resolving promise, and then chain new promises as the previous ones resolve:
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
for (let i = 0, p = Promise.resolve(); i < 10; i++) {
p = p.then(() => delay(Math.random() * 1000))
.then(() => console.log(i));
}
So this code creates one long chain of then calls. The variable p only serves to not lose track of that chain, and allow a next iteration of the loop to continue on the same chain. The callbacks will start executing after the synchronous loop has completed.
It is important that the then-callback returns the promise that delay() creates: this will ensure the asynchronous chaining.
2. With reduce
This is just a more functional approach to the previous strategy. You create an array with the same length as the chain you want to execute, and start out with an immediately resolving promise:
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
[...Array(10)].reduce( (p, _, i) =>
p.then(() => delay(Math.random() * 1000))
.then(() => console.log(i))
, Promise.resolve() );
This is probably more useful when you actually have an array with data to be used in the promises.
3. With a function passing itself as resolution-callback
Here we create a function and call it immediately. It creates the first promise synchronously. When it resolves, the function is called again:
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
(function loop(i) {
if (i >= 10) return; // all done
delay(Math.random() * 1000).then(() => {
console.log(i);
loop(i+1);
});
})(0);
This creates a function named loop, and at the very end of the code you can see it gets called immediately with argument 0. This is the counter, and the i argument. The function will create a new promise if that counter is still below 10, otherwise the chaining stops.
When delay() resolves, it will trigger the then callback which will call the function again.
4. With async/await
Modern JS engines support this syntax:
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
(async function loop() {
for (let i = 0; i < 10; i++) {
await delay(Math.random() * 1000);
console.log(i);
}
})();
It may look strange, as it seems like the promises are created synchronously, but in reality the async function returns when it executes the first await. Every time an awaited promise resolves, the function's running context is restored, and proceeds after the await, until it encounters the next one, and so it continues until the loop finishes.
5. With for await...of
With EcmaScript 2020, the for await...of found its way to modern JavaScript engines. Although it does not really reduce code in this case, it allows to isolate the definition of the random interval chain from the actual iteration of it:
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
async function * randomDelays(count, max) {
for (let i = 0; i < count; i++) yield delay(Math.random() * max).then(() => i);
}
(async function loop() {
for await (let i of randomDelays(10, 1000)) console.log(i);
})();
You can use async/await for this. I would explain more, but there's nothing really to it. It's just a regular for loop but I added the await keyword before the construction of your Promise
What I like about this is your Promise can resolve a normal value instead of having a side effect like your code (or other answers here) include. This gives you powers like in The Legend of Zelda: A Link to the Past where you can affect things in both the Light World and the Dark World – ie, you can easily work with data before/after the Promised data is available without having to resort to deeply nested functions, other unwieldy control structures, or stupid IIFEs.
// where DarkWorld is in the scary, unknown future
// where LightWorld is the world we saved from Ganondorf
LightWorld ... await DarkWorld
So here's what that will look like ...
async function someProcedure (n) {
for (let i = 0; i < n; i++) {
const t = Math.random() * 1000
const x = await new Promise(r => setTimeout(r, t, i))
console.log (i, x)
}
return 'done'
}
someProcedure(10)
.then(console.log)
.catch(console.error)
0 0
1 1
2 2
3 3
4 4
5 5
6 6
7 7
8 8
9 9
done
See how we don't have to deal with that bothersome .then call within our procedure? And async keyword will automatically ensure that a Promise is returned, so we can chain a .then call on the returned value. This sets us up for great success: run the sequence of n Promises, then do something important – like display a success/error message.
Based on the excellent answer by trincot, I wrote a reusable function that accepts a handler to run over each item in an array. The function itself returns a promise that allows you to wait until the loop has finished and the handler function that you pass may also return a promise.
loop(items, handler) : Promise
It took me some time to get it right, but I believe the following code will be usable in a lot of promise-looping situations.
Copy-paste ready code:
// SEE https://stackoverflow.com/a/46295049/286685
const loop = (arr, fn, busy, err, i=0) => {
const body = (ok,er) => {
try {const r = fn(arr[i], i, arr); r && r.then ? r.then(ok).catch(er) : ok(r)}
catch(e) {er(e)}
}
const next = (ok,er) => () => loop(arr, fn, ok, er, ++i)
const run = (ok,er) => i < arr.length ? new Promise(body).then(next(ok,er)).catch(er) : ok()
return busy ? run(busy,err) : new Promise(run)
}
Usage
To use it, call it with the array to loop over as the first argument and the handler function as the second. Do not pass parameters for the third, fourth and fifth arguments, they are used internally.
const loop = (arr, fn, busy, err, i=0) => {
const body = (ok,er) => {
try {const r = fn(arr[i], i, arr); r && r.then ? r.then(ok).catch(er) : ok(r)}
catch(e) {er(e)}
}
const next = (ok,er) => () => loop(arr, fn, ok, er, ++i)
const run = (ok,er) => i < arr.length ? new Promise(body).then(next(ok,er)).catch(er) : ok()
return busy ? run(busy,err) : new Promise(run)
}
const items = ['one', 'two', 'three']
loop(items, item => {
console.info(item)
})
.then(() => console.info('Done!'))
Advanced use cases
Let's look at the handler function, nested loops and error handling.
handler(current, index, all)
The handler gets passed 3 arguments. The current item, the index of the current item and the complete array being looped over. If the handler function needs to do async work, it can return a promise and the loop function will wait for the promise to resolve before starting the next iteration. You can nest loop invocations and all works as expected.
const loop = (arr, fn, busy, err, i=0) => {
const body = (ok,er) => {
try {const r = fn(arr[i], i, arr); r && r.then ? r.then(ok).catch(er) : ok(r)}
catch(e) {er(e)}
}
const next = (ok,er) => () => loop(arr, fn, ok, er, ++i)
const run = (ok,er) => i < arr.length ? new Promise(body).then(next(ok,er)).catch(er) : ok()
return busy ? run(busy,err) : new Promise(run)
}
const tests = [
[],
['one', 'two'],
['A', 'B', 'C']
]
loop(tests, (test, idx, all) => new Promise((testNext, testFailed) => {
console.info('Performing test ' + idx)
return loop(test, (testCase) => {
console.info(testCase)
})
.then(testNext)
.catch(testFailed)
}))
.then(() => console.info('All tests done'))
Error handling
Many promise-looping examples I looked at break down when an exception occurs. Getting this function to do the right thing was pretty tricky, but as far as I can tell it is working now. Make sure to add a catch handler to any inner loops and invoke the rejection function when it happens. E.g.:
const loop = (arr, fn, busy, err, i=0) => {
const body = (ok,er) => {
try {const r = fn(arr[i], i, arr); r && r.then ? r.then(ok).catch(er) : ok(r)}
catch(e) {er(e)}
}
const next = (ok,er) => () => loop(arr, fn, ok, er, ++i)
const run = (ok,er) => i < arr.length ? new Promise(body).then(next(ok,er)).catch(er) : ok()
return busy ? run(busy,err) : new Promise(run)
}
const tests = [
[],
['one', 'two'],
['A', 'B', 'C']
]
loop(tests, (test, idx, all) => new Promise((testNext, testFailed) => {
console.info('Performing test ' + idx)
loop(test, (testCase) => {
if (idx == 2) throw new Error()
console.info(testCase)
})
.then(testNext)
.catch(testFailed) // <--- DON'T FORGET!!
}))
.then(() => console.error('Oops, test should have failed'))
.catch(e => console.info('Succesfully caught error: ', e))
.then(() => console.info('All tests done'))
UPDATE: NPM package
Since writing this answer, I turned the above code in an NPM package.
for-async
Install
npm install --save for-async
Import
var forAsync = require('for-async'); // Common JS, or
import forAsync from 'for-async';
Usage (async)
var arr = ['some', 'cool', 'array'];
forAsync(arr, function(item, idx){
return new Promise(function(resolve){
setTimeout(function(){
console.info(item, idx);
// Logs 3 lines: `some 0`, `cool 1`, `array 2`
resolve(); // <-- signals that this iteration is complete
}, 25); // delay 25 ms to make async
})
})
See the package readme for more details.
If you are limited to ES6, the best option is Promise all. Promise.all(array) also returns an array of promises after successfully executing all the promises in array argument.
Suppose, if you want to update many student records in the database, the following code demonstrates the concept of Promise.all in such case-
let promises = students.map((student, index) => {
//where students is a db object
student.rollNo = index + 1;
student.city = 'City Name';
//Update whatever information on student you want
return student.save();
});
Promise.all(promises).then(() => {
//All the save queries will be executed when .then is executed
//You can do further operations here after as all update operations are completed now
});
Map is just an example method for loop. You can also use for or forin or forEach loop. So the concept is pretty simple, start the loop in which you want to do bulk async operations. Push every such async operation statement in an array declared outside the scope of that loop. After the loop completes, execute the Promise all statement with the prepared array of such queries/promises as argument.
The basic concept is that the javascript loop is synchronous whereas database call is async and we use push method in loop that is also sync. So, the problem of asynchronous behavior doesn't occur inside the loop.
here's my 2 cents worth:
resuable function forpromise()
emulates a classic for loop
allows for early exit based on internal logic, returning a value
can collect an array of results passed into resolve/next/collect
defaults to start=0,increment=1
exceptions thrown inside loop are caught and passed to .catch()
function forpromise(lo, hi, st, res, fn) {
if (typeof res === 'function') {
fn = res;
res = undefined;
}
if (typeof hi === 'function') {
fn = hi;
hi = lo;
lo = 0;
st = 1;
}
if (typeof st === 'function') {
fn = st;
st = 1;
}
return new Promise(function(resolve, reject) {
(function loop(i) {
if (i >= hi) return resolve(res);
const promise = new Promise(function(nxt, brk) {
try {
fn(i, nxt, brk);
} catch (ouch) {
return reject(ouch);
}
});
promise.
catch (function(brkres) {
hi = lo - st;
resolve(brkres)
}).then(function(el) {
if (res) res.push(el);
loop(i + st)
});
})(lo);
});
}
//no result returned, just loop from 0 thru 9
forpromise(0, 10, function(i, next) {
console.log("iterating:", i);
next();
}).then(function() {
console.log("test result 1", arguments);
//shortform:no result returned, just loop from 0 thru 4
forpromise(5, function(i, next) {
console.log("counting:", i);
next();
}).then(function() {
console.log("test result 2", arguments);
//collect result array, even numbers only
forpromise(0, 10, 2, [], function(i, collect) {
console.log("adding item:", i);
collect("result-" + i);
}).then(function() {
console.log("test result 3", arguments);
//collect results, even numbers, break loop early with different result
forpromise(0, 10, 2, [], function(i, collect, break_) {
console.log("adding item:", i);
if (i === 8) return break_("ending early");
collect("result-" + i);
}).then(function() {
console.log("test result 4", arguments);
// collect results, but break loop on exception thrown, which we catch
forpromise(0, 10, 2, [], function(i, collect, break_) {
console.log("adding item:", i);
if (i === 4) throw new Error("failure inside loop");
collect("result-" + i);
}).then(function() {
console.log("test result 5", arguments);
}).
catch (function(err) {
console.log("caught in test 5:[Error ", err.message, "]");
});
});
});
});
});
In ES6, you should use 'for await':
(async () => {
for await (const num of asyncIterable) {
console.log(num);
}
// My action here
})();
For more information, see this for await...of.
I see the previous answers and feel confused. And I coded the following by the answers' inspiration. I think its logic is more obvious, I call the function to replace original for loop:
async function pointToCountry(world, data) { // Data is for loop array
if (data.length > 0) { // For condition
const da = data.shift(); // Get current data and modified data one row code
// Some business logic
msg = da.info
pointofView(world, da);
// Await the current task
await new Promise(r => setTimeout(_ => {
r() // Resolve and finish the current task
}, 5000))
// Call itself and enter the next loop
pointToCountry(world, data)
} else { // Business logic after all tasks
pointofView(world, { longitude: 0, latitude: 0 });
world.controls().autoRotate = true;
}
}
// This is my main function - calculate all project by city
const projectCity = async (req, res, next) => {
try {
let record = [];
let cityList = await Cityodel.find({active:true});
for (let j = 0; j < cityList.length; j++) {
let arr = [];
let projectList = await getProduct(cityList[j]._id)
arr.push({
_id:cityList[j]._id,
name:cityList[j].name,
projectList:projectList
})
record.push(arr);
}
return res.status(200).send({
status: CONSTANT.REQUESTED_CODES.SUCCESS,
result: record });
} catch (error) {
return res.status(400).json(UTILS.errorHandler(error));
}
};
async function getProduct(city){
let projectList = await ProjectModel.find({city:city});
return projectList;
}
I've created a snippet in Angular that loops a promise function indefinitely. You can start it, stop it, or restart it.
You basically need to recursively call the same method and await it's current process like so:
async autoloop(): Promise<void> {
if(this.running){
await this.runMe();
await this.autoloop();
}
return Promise.resolve();
}
JavaScript:
import { Component } from '#angular/core';
#Component({
selector: 'my-app',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css'],
})
export class AppComponent {
messages: string[] = [];
counter = 1;
running = false;
constructor() {
this.start();
}
onClick(): void {
this.running = !this.running;
if(this.running){
this.start();
}
else{
this.stop();
}
}
async onRestartClick(): Promise<void>{
await this.stop();
this.messages = [];
this.counter = 1;
this.start();
}
start(): void{
this.running = true;
this.autoloop();
}
async stop(): Promise<void>{
this.running = false;
await this.delay(1000);
}
async autoloop(): Promise<void> {
if(this.running){
await this.runMe();
await this.autoloop();
}
return Promise.resolve();
}
async runMe(): Promise<void> {
await this.delay(1000);
if(this.running){
this.messages.push(`Message ${this.counter++}`);
}
return Promise.resolve();
}
async delay(ms: number) {
await new Promise<void>((resolve) => setTimeout(() => resolve(), ms));
}
}
Html:
<h1>Endless looping a promise every 1 second</h1>
<button (click)="onClick()">Start / stop</button>
<button (click)="onRestartClick()">Restart</button>
<p *ngFor="let message of messages">
{{message}}
</p>
I want to run some asynchronous task in a loop, but it should execute in sequence order(one after another). It should be vanilla JS, not with any libraries.
var doSome = function(i) {
return Promise.resolve(setTimeout(() => {
console.log('done... ' + i)
}, 1000 * (i%3)));
}
var looper = function() {
var p = Promise.resolve();
[1,2,3].forEach((n) => {
p = p.then(() => doSome(n))
})
return p;
}
looper();
Current output:
calling for ...1
calling for ...2
calling for ...3
Promise {<resolved>: 8260}
done... 3
done... 1
done... 2
Expected output:
calling for ...1
calling for ...2
calling for ...3
Promise {<resolved>: 8260}
done... 1
done... 2
done... 3
Note: Kindly answer, if you tried and it's working as expected
So, from your comment below, I think your own example code isn't quite matching your description. I think what you want for your example is something closer to the below snippet:
var doSome = function(i) {
return new Promise((resolve, reject) => {
setTimeout(() => resolve(`Completing ${i}`), 1000*(i%3))
});
}
var looper = function() {
[1,2,3].forEach((n) => {
doSome(n).then(console.log);
});
}
looper();
Here, the array [1, 2, 3] is iterated over, and an asynchronous process is generated for each one. As each of those async processes complete, we .then on them and console log their resolved result.
So, now the question comes how to best queue the results? Below, I stored them into an array, then leveraged async/await in order to pause execution on the results until they complete in order.
// This is probably what you want
var doSome = function(i) {
return new Promise((resolve, reject) => {
setTimeout(() => resolve(`Completing ${i}`), 1000*(i%3))
});
}
var looper = async function() {
const nums = [1,2,3];
const promises = []
nums.forEach((n) => {
console.log(`Queueing ${n}`);
promises.push(doSome(n));
});
for (let promise of promises) {
const result = await promise;
console.log(result);
}
}
looper();
Now, we could have eliminated a loop and only executed one after the last completed:
// Don't use this-- it is less efficient
var doSome = function(i) {
return new Promise((resolve, reject) => {
setTimeout(() => resolve(`Completing ${i}`), 1000*(i%3))
});
}
var looper = async function() {
const nums = [1,2,3];
const promises = [];
for (let n of nums) {
console.log(`Queueing ${n}`);
const result = await doSome(n);
console.log(result);
};
}
looper();
But, as you can see in the log, this approach won't queue up the next async process until the previous one has completed. This is undesirable and doesn't match your use case. What we get from the two-looped approach preceding this one is that all async processes are immediately executed, but then we order/queue the results so they respect our predefined order, not the order in which they resolve.
UPDATE
Regarding Promise.all, async/await and the intended behavior of the queueing:
So, if you want to avoid using async/await, I think you could write some sort of utility:
var doSome = function(i) {
return new Promise((resolve, reject) => {
setTimeout(() => resolve(`Completing ${i}`), 1000*(i%3))
});
}
function handlePromiseQueue(queue) {
let promise = queue.shift();
promise.then((data) => {
console.log(data)
if (queue.length > 0) {
handlePromiseQueue(queue);
}
})
}
var looper = function() {
const nums = [1,2,3];
const promises = []
nums.forEach((n) => {
console.log(`Queueing ${n}`);
promises.push(doSome(n));
});
handlePromiseQueue(promises);
}
looper();
HOWEVER, let me be clear-- if user Bergi's assertion is correct, and it is not important that each async promise be executed upon as soon as it resolves, only that none of them be acted upon until they all have come back, then this can 100% be simplified with Promise.all:
// This is probably what you want
var doSome = function(i) {
return new Promise((resolve, reject) => {
setTimeout(() => resolve(`Completing ${i}`), 1000*(i%3))
});
}
function handlePromiseQueue(queue) {
let promise = queue.shift();
promise.then((data) => {
console.log(data)
if (queue.length > 0) {
handlePromiseQueue(queue);
}
})
}
var looper = function() {
const nums = [1,2,3];
const promises = []
nums.forEach((n) => {
console.log(`Queueing ${n}`);
promises.push(doSome(n));
});
Promise.all(promises).then(() => handlePromiseQueue(promises));
}
looper();
Finally, as Bergi also pointed out, I am playing fast and loose here by not setting up any catch on these various promises-- I omitted them for brevity in examples, but for your purposes you will want to include proper handling for errors or bad requests.
My need is simple. I would like to delay calls to sendEmail by 100 milliseconds. The email service provider permits at most sending 10 emails per second.
Note, however, though .map is synchronous, it immediately returns a Promise.
I have tried setTimeout to no avail, such as setTimeout(() => resolve(x), 100) and setTimeout(() => {return new Promise....}, 100).
Thoughts?
const promises = userEmailArray.map((userEmail) => {
return new Promise((resolve, reject) => {
....
mailer.sendEmail(userEmail);
return resolve();
});
});
});
...
Promise.all(promises).then(() => resolve()).catch(error => reject(error));
There are a bunch of different ways to approach this. I'd probably just use a recursive chained promise myself and then you can more precisely use a timer based on the finish from the previous call and you can use promises for calling it and handling propagation of errors.
I've assumed here that your mailer.sendEmail() follows the node.js callback calling convention so we need to "promisify" it. If it already returns a promise, then you can use it directly instead of the sendEmail() function that I created.
Anyways, here are a bunch of different approaches.
Recall Same Function After Delay (delayed recursion)
// make promisified version - assumes it follows node.js async calling convention
let sendEmail = util.promisify(mailer.sendEmail);
function delay(t, data) {
return new Promise(resolve => {
setTimeout(resolve.bind(null, data), t);
});
}
function sendAll(array) {
let index = 0;
function next() {
if (index < array.length) {
return sendEmail(array[index++]).then(function() {
return delay(100).then(next);
});
}
}
return Promise.resolve().then(next);
}
// usage
sendAll(userEmailArray).then(() => {
// all done here
}).catch(err => {
// process error here
});
Use setInterval to Control Pace
You could also just use a setInterval to just launch a new request every 100ms until the array was empty:
// promisify
let sendEmail = util.promisify(mailer.sendEmail);
function sendAll(array) {
return new Promise((resolve, reject) => {
let index = 0;
let timer = setInterval(function() {
if (index < array.length) {
sendEmail(array[index++]).catch(() => {
clearInterval(timer);
reject();
});
} else {
clearInterval(timer);
resolve();
}
}, 100);
})
}
Use await to Pause Loop
And, you could use await in ES6 to "pause" the loop:
// make promisified version - assumes it follows node.js async calling convention
let sendEmail = util.promisify(mailer.sendEmail);
function delay(t, data) {
return new Promise(resolve => {
setTimeout(resolve.bind(null, data), t);
});
}
// assume this is inside an async function
for (let userEmail of userEmailArray) {
await sendEmail(userEmail).then(delay.bind(null, 100));
}
Use .reduce() with Promises to Sequence Access to Array
If you aren't trying to accumulate an array of results, but just want to sequence, then a canonical ways to do that is using a promise chain driven by .reduce():
// make promisified version - assumes it follows node.js async calling convention
let sendEmail = util.promisify(mailer.sendEmail);
function delay(t, data) {
return new Promise(resolve => {
setTimeout(resolve.bind(null, data), t);
});
}
userEmailArray.reduce(function(p, userEmail) {
return p.then(() => {
return sendEmail(userEmail).then(delay.bind(null, 100));
});
}, Promise.resolve()).then(() => {
// all done here
}).catch(err => {
// process error here
});
Using Bluebird Features for both Concurrency Control and Delay
The Bluebird promise library has a couple useful features built in that help here:
const Promise = require('Bluebird');
// make promisified version - assumes it follows node.js async calling convention
let sendEmail = Promise.promisify(mailer.sendEmail);
Promise.map(userEmailArray, userEmail => {
return sendEmail(userEmail).delay(100);
}, {concurrency: 1}).then(() => {
// all done here
}).catch(err => {
// process error here
});
Note the use of both the {concurrency: 1} feature to control how many requests are in-flight at the same time and the built-in .delay(100) promise method.
Create Sequence Helper That Can Be Used Generally
And, it might be useful to just create a little helper function for sequencing an array with a delay between iterations:
function delay(t, data) {
return new Promise(resolve => {
setTimeout(resolve, t, data);
});
}
async function runSequence(array, delayT, fn) {
let results = [];
for (let item of array) {
let data = await fn(item);
results.push(data);
await delay(delayT);
}
return results;
}
Then, you can just use that helper whenever you need it:
// make promisified version - assumes it follows node.js async calling convention
let sendEmail = util.promisify(mailer.sendEmail);
runSequence(userEmailArray, 100, sendEmail).then(() => {
// all done here
}).catch(err => {
// process error here
});
You already have a 'queue' of sorts: a list of addresses to send to. All you really need to do now is pause before sending each one. However, you don't want to pause for the same length of time prior to each send. That'll result in a single pause of n ms, then a whole raft of messages being sent within a few ms of each other. Try running this and you'll see what I mean:
const userEmailArray = [ 'one', 'two', 'three' ]
const promises = userEmailArray.map(userEmail =>
new Promise(resolve =>
setTimeout(() => {
console.log(userEmail)
resolve()
}, 1000)
)
)
Promise.all(promises).then(() => console.log('done'))
Hopefully you saw a pause of about a second, then a bunch of messages appearing at once! Not really what we're after.
Ideally, you'd delegate this to a worker process in the background so as not to block. However, assuming you're not going to do that for now, one trick is to have each call delayed by a different amount of time. (Note that this does not solve the problem of multiple users trying to process large lists at once, which presumably is going to trigger the same API restrictions).
const userEmailArray = [ 'one', 'two', 'three' ]
const promises = userEmailArray.map((userEmail, i) =>
new Promise(resolve =>
setTimeout(() => {
console.log(userEmail)
resolve()
}, 1000 * userEmailArray.length - 1000 * i)
)
)
Promise.all(promises).then(() => console.log('done'))
Here, you should see each array element be processed in roughly staggered fashion. Again, this is not a scaleable solution but hopefully it demonstrates a bit about timing and promises.
It's simpler to just do an async iteration of the array.
function send(i, arr, cb) {
if (i >= arr.length) return cb();
mailer.sendEmail(arr[i]);
setTimeout(send, 100, i+1, arr, cb);
}
send(0, userEmailArray, function() { console.log("all done") });
Inside a promise, I need to call and process an indeterminate number of asynch API responses after individually calling them either inside another promise, or after said promise, but before another so the order of execution is respected.
var promiseA = function() {
return new Promise(function(resolve, reject) {
// 1. Establish objects needed from one API endpoint
// 2. Call API endpoint for each object and parse
// 3. Only then continue to next promise
}
}
var finalPromise = function() {
return new Promise(function(resolve, reject) {
//
}
}
promiseA()
.then(finalPromise)
So inside promiseA, I find out how many objects I'll need to poll individually from an API. Each request is of course asynchronous. I need to make these calls and process the response before the final promise is called.
I am struggling to determine a pattern for this with promises, where I can dynamically create these promises and only allow the final promise to execute after the indeterminate and asynchronous have executed and processed. I've worked with other languages where this is possible, but I'm struggling to see it here with Promises.
Any help is appreciated.
I have changed the answer to incorporate the comments below. Since, you mentioned ES6 promises I shall stick to that. There are two basic types of callbacks that we might care about.
DOM load or other one time event callbacks (window.onload and so on)
Async method callback (AJAX call, setTimout and so on)
Since,
1.DOM load or other one time event
var p = new Promise(function(res, rej) {
window.onload = res();
};
2.Plain callback: these are callbacks that don't conform to a convention. e.g. setTimeout
var p = new Promise(function(res, rej){
setTimeout(function() {
//your business/view logic
success? res():rej(); //if successful resolve else reject
}, 2000);
});
In each of the above case the promise (var p) can be wrapped to be returned by a function.
var myAsyncMethod = function () {
var p = new ... // as mentioned in 1 or 2
return p;
}
Then the usage:
myAsyncMethod()
.then(function(){/* success-handler */})
.catch(function(/* failure-handler */));
Specific to your question you may have many such methods:
function baseAJAXCall (url) {
new Promise(functoin(rej, res) {
$.get(url, function(err, data){
if(err) {
rej();
}
else {
resolve(data);
}
});
}
};
function callAPIEndpoint(url) {
return baseAJAXCall(url);
}
function finalPromiseHandler () {
//your final business/view logic
}
//USAGE
callAPIEndpoint('/my-first-call')
.then(function(data){
var promiseArray = data.map(function(item){
return baseAJAXCall(item.url);
});
return Promise.all(promiseArray);
})
.then(finalPromiseHandler)
.catch(function(){
console.log('.error-message.');
});
Ref:
How do I convert an existing callback API to promises?.
http://www.datchley.name/es6-promises/
Links from comments below.
---OLD ANSWER: PLEASE OVERLOOK---
I am familiar with this library : https://github.com/kriskowal/q. And, you can do this using using the q.all and q.allSettled constructs. May be that is what you are looking for.
Normally, the pattern is to create a function that returns a promise.
function someAsyncFuncName1(url) {
var def = q.defer();
//async function
$.get(url, function(err, data){ //suppose
if(err){
def.reject();
}
else {
def.resolve(data); //pass the data to the .then() handler.
}
});
return def.promise;
}
function someAsyncFuncName2() {
var def = q.defer();
//async function
setTimeout(function(){ //suppose
//do something
if(good) {
def.resolve();
} else {
def.reject();
}
}, 1000); //arbitrary timeout of 1 second
return def.promise;
}
USAGE:
q.all([someAsyncFuncName1('/api-1'), someAsyncFuncName2()])
.then(function() {
//final handler
});
On a similar line of thought one can use q.allSettled() if you want to wait for all promises to return.
Hope this helps.
---EOF OLD ANSWER---
First of all, if async functions used in PromiseA don't return promises, you need to promisify them. You can do that with Promise constructor, but it's much better to use libraries, such as bluebird with their promisify methods.
Let's imagine, that we have two functions getUserIdsAsync and getUserAsync. The first on returns a list of user ids, getUserAsync returns an user data by userId. And you need to get a list of users by their ids. The code of PromiseA could look so:
var promiseA = function() {
return getUserIdsAsync()
.then(userIds => {
let ops = users.map(uid => getUserAsync(uid));
return Promise.all(ops);
});
}
The following snippet shows a solution without using any external library like bluebird. It follows the code snippet in your question (which seems to be more complicate than needed).
You have to collect all api promisses in an array. Then you can call Promise.all() to get a Promise for the end of all api promisses. Then you can do some final stuff, like parsing the result of each promise and continue afterwards.
function getRandomInt(min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
var apiEndpoint = function (name) {
return new Promise( (resolve, reject) => {
setTimeout(() => resolve('API ' + name + ' job done'), 1000);
});
}
var promiseA = function() {
return new Promise( (resolve, reject) => {
const promisses = [];
for (var i=1; i < getRandomInt(3,6); i++) {
// 1. Establish objects needed from one API endpoint
promisses.push(apiEndpoint('This is number ' + i));
}
Promise.all(promisses).then( results => {
// do final stuff
for (const s of results) {
// 2. Call API endpoint for each object and parse
console.log(s);
}
// continue ...
// 3. Only then continue to next promise
resolve('now it is finished');
}).catch( err => reject(err) );
});
}
var finalPromise = function() {
return new Promise( (resolve, reject) => {
console.log('finalPromise');
resolve();
});
}
promiseA()
.then( () => finalPromise())
.catch(err => console.log(err) );
Please hold in mind that this solution is not easy to read. Using external libraries or reducing promisses can improve readability. Maybe you should take a look to the async/await pattern to get a much more better (readable) solution.
Here is a solution with async/await:
function getRandomInt(min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
const apiEndpoint = function (name) {
return new Promise( (resolve, reject) => {
setTimeout(() => resolve('API ' + name + ' job done'), 1000);
});
}
async function promiseParallel () {
const promisses = [];
for (let i = 1; i < getRandomInt(3,6); i++) {
promisses.push(apiEndpoint('This is number ' + i));
}
for (const p of promisses) {
const x = await p;
console.log(x);
}
return ('everything is done');
}
promiseParallel().then( result => {
console.log(result);
}).catch( err => console.log(err) );
If you want call the promisses sequentially you can replace with:
async function promiseSequ () {
for (let i = 1; i < getRandomInt(3,6); i++) {
const x = await apiEndpoint('This is number ' + i);
console.log(x);
}
return ('everything is done');
}