As the title suggests. How do I do this?
I want to call whenAllDone() after the forEach-loop has gone through each element and done some asynchronous processing.
[1, 2, 3].forEach(
function(item, index, array, done) {
asyncFunction(item, function itemDone() {
console.log(item + " done");
done();
});
}, function allDone() {
console.log("All done");
whenAllDone();
}
);
Possible to get it to work like this? When the second argument to forEach is a callback function which runs once it went through all iterations?
Expected output:
3 done
1 done
2 done
All done!
Array.forEach does not provide this nicety (oh if it would) but there are several ways to accomplish what you want:
Using a simple counter
function callback () { console.log('all done'); }
var itemsProcessed = 0;
[1, 2, 3].forEach((item, index, array) => {
asyncFunction(item, () => {
itemsProcessed++;
if(itemsProcessed === array.length) {
callback();
}
});
});
(thanks to #vanuan and others) This approach guarantees that all items are processed before invoking the "done" callback. You need to use a counter that gets updated in the callback. Depending on the value of the index parameter does not provide the same guarantee, because the order of return of the asynchronous operations is not guaranteed.
Using ES6 Promises
(a promise library can be used for older browsers):
Process all requests guaranteeing synchronous execution (e.g. 1 then 2 then 3)
function asyncFunction (item, cb) {
setTimeout(() => {
console.log('done with', item);
cb();
}, 100);
}
let requests = [1, 2, 3].reduce((promiseChain, item) => {
return promiseChain.then(() => new Promise((resolve) => {
asyncFunction(item, resolve);
}));
}, Promise.resolve());
requests.then(() => console.log('done'))
Process all async requests without "synchronous" execution (2 may finish faster than 1)
let requests = [1,2,3].map((item) => {
return new Promise((resolve) => {
asyncFunction(item, resolve);
});
})
Promise.all(requests).then(() => console.log('done'));
Using an async library
There are other asynchronous libraries, async being the most popular, that provide mechanisms to express what you want.
Edit
The body of the question has been edited to remove the previously synchronous example code, so i've updated my answer to clarify.
The original example used synchronous like code to model asynchronous behaviour, so the following applied:
array.forEach is synchronous and so is res.write, so you can simply put your callback after your call to foreach:
posts.foreach(function(v, i) {
res.write(v + ". index " + i);
});
res.end();
If you encounter asynchronous functions, and you want to make sure that before executing the code it finishes its task, we can always use the callback capability.
For example:
var ctr = 0;
posts.forEach(function(element, index, array){
asynchronous(function(data){
ctr++;
if (ctr === array.length) {
functionAfterForEach();
}
})
});
Note: functionAfterForEach is the function to be executed after foreach tasks are finished. asynchronous is the asynchronous function executed inside foreach.
Hope this will fix your problem, i usually work with this when i need to execute forEach with asynchronous tasks inside.
foo = [a,b,c,d];
waiting = foo.length;
foo.forEach(function(entry){
doAsynchronousFunction(entry,finish) //call finish after each entry
}
function finish(){
waiting--;
if (waiting==0) {
//do your Job intended to be done after forEach is completed
}
}
with
function doAsynchronousFunction(entry,callback){
//asynchronousjob with entry
callback();
}
It's odd how many incorrect answers has been given to asynchronous case!
It can be simply shown that checking index does not provide expected behavior:
// INCORRECT
var list = [4000, 2000];
list.forEach(function(l, index) {
console.log(l + ' started ...');
setTimeout(function() {
console.log(index + ': ' + l);
}, l);
});
output:
4000 started
2000 started
1: 2000
0: 4000
If we check for index === array.length - 1, callback will be called upon completion of first iteration, whilst first element is still pending!
To solve this problem without using external libraries such as async, I think your best bet is to save length of list and decrement if after each iteration. Since there's just one thread we're sure there no chance of race condition.
var list = [4000, 2000];
var counter = list.length;
list.forEach(function(l, index) {
console.log(l + ' started ...');
setTimeout(function() {
console.log(index + ': ' + l);
counter -= 1;
if ( counter === 0)
// call your callback here
}, l);
});
With ES2018 you can use async iterators:
const asyncFunction = a => fetch(a);
const itemDone = a => console.log(a);
async function example() {
const arrayOfFetchPromises = [1, 2, 3].map(asyncFunction);
for await (const item of arrayOfFetchPromises) {
itemDone(item);
}
console.log('All done');
}
My solution without Promise (this ensures that every action is ended before the next one begins):
Array.prototype.forEachAsync = function (callback, end) {
var self = this;
function task(index) {
var x = self[index];
if (index >= self.length) {
end()
}
else {
callback(self[index], index, self, function () {
task(index + 1);
});
}
}
task(0);
};
var i = 0;
var myArray = Array.apply(null, Array(10)).map(function(item) { return i++; });
console.log(JSON.stringify(myArray));
myArray.forEachAsync(function(item, index, arr, next){
setTimeout(function(){
$(".toto").append("<div>item index " + item + " done</div>");
console.log("action " + item + " done");
next();
}, 300);
}, function(){
$(".toto").append("<div>ALL ACTIONS ARE DONE</div>");
console.log("ALL ACTIONS ARE DONE");
});
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
<div class="toto">
</div>
There are many solutions and ways to achieve this on this thread!.
But, if you need do this with map and async/await then here it is
// Execution Starts
console.log("start")
// The Map will return promises
// the Execution will not go forward until all the promises are resolved.
await Promise.all(
[1, 2, 3].map( async (item) => {
await asyncFunction(item)
})
)
// Will only run after all the items have resolved the asynchronous function.
console.log("End")
The output will be something like this! May vary based on the asynchronous function.
start
2
3
1
end
Note: If you use await in a map, it will always return promises array.
This is the solution for Node.js which is asynchronous.
using the async npm package.
(JavaScript) Synchronizing forEach Loop with callbacks inside
My solution:
//Object forEachDone
Object.defineProperty(Array.prototype, "forEachDone", {
enumerable: false,
value: function(task, cb){
var counter = 0;
this.forEach(function(item, index, array){
task(item, index, array);
if(array.length === ++counter){
if(cb) cb();
}
});
}
});
//Array forEachDone
Object.defineProperty(Object.prototype, "forEachDone", {
enumerable: false,
value: function(task, cb){
var obj = this;
var counter = 0;
Object.keys(obj).forEach(function(key, index, array){
task(obj[key], key, obj);
if(array.length === ++counter){
if(cb) cb();
}
});
}
});
Example:
var arr = ['a', 'b', 'c'];
arr.forEachDone(function(item){
console.log(item);
}, function(){
console.log('done');
});
// out: a b c done
I try Easy Way to resolve it, share it with you :
let counter = 0;
arr.forEach(async (item, index) => {
await request.query(item, (err, recordset) => {
if (err) console.log(err);
//do Somthings
counter++;
if(counter == tableCmd.length){
sql.close();
callback();
}
});
request is Function of mssql Library in Node js. This can replace each function or Code u want.
GoodLuck
var i=0;
const waitFor = (ms) =>
{
new Promise((r) =>
{
setTimeout(function () {
console.log('timeout completed: ',ms,' : ',i);
i++;
if(i==data.length){
console.log('Done')
}
}, ms);
})
}
var data=[1000, 200, 500];
data.forEach((num) => {
waitFor(num)
})
var counter = 0;
var listArray = [0, 1, 2, 3, 4];
function callBack() {
if (listArray.length === counter) {
console.log('All Done')
}
};
listArray.forEach(function(element){
console.log(element);
counter = counter + 1;
callBack();
});
//First Example
save() {
this.holdImageData.forEach((val,i) => {
this.UploadToMinio(val.file, val.index, res => {
if (res && i+1 == this.holdImageData.length) {
this.FinallySave();
}
})
})
}
UploadToMinio(files, index, callback) {
this._fileUploadService.uploadFile(files[0], files[0].name, 'rms').subscribe(data => {
if (data) {
console.log('data >>> ', data);
callback(true);
}
})
}
FinallySave() {}
//Second Example
var sum = 0; // It can be global variable
startFunction() {
this.sumFunction(2, 4, res => {
if (res == true && sum == 6) {
this.saveFunction();
} else {
//call to another function
}
})
}
sumFunction(num1, num2, callback) {
if ((num1 + num2) == 6) {
callback(true)
}
callback(false);
}
You shouldn't need a callback for iterating through a list. Just add the end() call after the loop.
posts.forEach(function(v, i){
res.write(v + ". Index " + i);
});
res.end();
How about setInterval, to check for complete iteration count, brings guarantee. not sure if it won't overload the scope though but I use it and seems to be the one
_.forEach(actual_JSON, function (key, value) {
// run any action and push with each iteration
array.push(response.id)
});
setInterval(function(){
if(array.length > 300) {
callback()
}
}, 100);
Related
I need to iterate IndexedDB table with the Dexie and process each item with some function.
My code is something like that:
var db = new Dexie(dbName);
db.version(dbVersion).stores({
smthtbl: '++id, data, creationTime'});
db.smthtbl.each(function (item) {
return proccessItem(item);
}).then(function() {
console.log("done");
return "done"
});
function proccessItem(item) {
console.log(item.id + " : " + item.data);
};
And I need to set a timeout between iterations so that each next item is displayed through the timeout.
Something like this might work for you.
The iterateWithDelay function below is reusable for any other case where you want to iterate slowly through an array of items. I also made it pass the index and array in, like .map() does. (You could also add a feature to stop iteration if the callback returns a certain value like false, etc.)
Dry-coded, YMMV, etc. :)
const iterateWithDelay = (items, delay, callback) =>
new Promise(resolve => {
let index = 0;
const tick = () => {
const item = items[index];
if (!item) {
resolve();
return;
}
callback(item, index, items);
index++;
setTimeout(tick, delay);
};
tick();
});
var db = new Dexie(dbName);
db.version(dbVersion).stores({ smthtbl: "++id, data, creationTime" });
db.smthtbl
.toArray(items => iterateWithDelay(items, 100, proccessItem))
.then(() => {
console.log("done");
return "done";
});
function proccessItem(item) {
console.log(item.id + " : " + item.data);
}
Suppose you have an Array/Object that contains a list of values. Lets say those a mysql commands or urls or filespaths. Now you want to iterate over all of them and execute some code over every entry.
for(let i = 0; i < urls.length; i++){
doSthWith(urls[i]);
}
No Problem so far. But now lets say each function has a callback and needs the result of the last execution. e.g. you request something from one website and you want to use the results of this request for one of your following requests.
for(let i = 0; i < urls.length; i++){
if(resultOfLastIteration.successful){ //or some other result besides the last one
doSthWith(urls[i]);
}
}
Now lets say the length of urls (or sth similar) is over 100. Thats why you normaly use a loop so you dont need to write the same function a 100 times. That also means that Promises wont do the trick either (except Im unaware trick a trick), because you have the same problem:
doSthWith(urls[0]).then(...
doSthWith(urls[1]).then(... //either put them inside each other
).then(...
doSthWith(urls[i]) //or in sequence
...
).catch(err){...}
Either way I dont see a way to use a loop.
A way that I found but isnt really "good" is to use the package "wait.for"(https://www.npmjs.com/package/wait.for). But what makes this package tricky is to launch a fiber each time you want to use wait.for:
//somewhere you use the function in a fiber Context
wait.for(loopedExecutionOfUrls, urls);
//function declaration
function loopedExecutionOfUrls(urls, cb){
//variables:
for(let i = 0; i < urls.length; i++){
if(someTempResultVar[i-1] === true){
someTempResultVar = wait.for(doSthWith,urls[i]);
} else if(...){...}
}
}
But Im not sure if this approach is really good, besides you always have to check if you have wrapped the whole thing in a Fiber so for each function that has loops with functions that have callbacks. Thus you have 3 levels: the lauchFiber level, wait.for(loopedFunction) level and the wait.for the callback function level. (Hope I that was formulated understandable)
So my questions is: Do you guys have a good approach where you can loop throw callback functions and can use results of those whenever you like?
good = easy to use, read, performant, not recursive,...
(Im sorry if this question is stupid, but I really have problems getting along with this asynchronous programming)
If you want to wait for doSthWith to finish before doing the same but with the nex url, you have to chain your promises and you can use array.prototype.reduce to do that:
urls = ["aaa", "bbb", "ccc", "ddd"];
urls.reduce((lastPromise, url) => lastPromise.then((resultOfPreviousPromise) => {
console.log("Result of previous request: ", resultOfPreviousPromise); // <-- Result of the previous request that you can use for the next request
return doSthWith(url);
}), Promise.resolve());
function doSthWith(arg) { // Simulate the doSthWith promise
console.log("do something with: ", arg);
return new Promise(resolve => {
setTimeout(() => resolve("result of " + arg), 2000);
});
}
Use async, specifically async.each:
const async = require('async');
function doSthWith(url, cb) {
console.log('doing something with ' + url);
setTimeout(() => cb(), 2000);
}
const urls = ['https://stackoverflow.com/', 'https://phihag.de/'];
async.each(urls, doSthWith, (err) => {
if (err) {
// In practice, likely a callback or throw here
console.error(err);
} else {
console.log('done!');
}
});
Use async.map if you are interested in the result.
When I need to loop over promises I use my handy dandy ploop function. Here is an example:
// Function that returns a promise
var searchForNumber = function(number) {
return new Promise(function(resolve, reject) {
setTimeout(function() {
var min = 1;
var max = 10;
var val = Math.floor(Math.random()*(max-min+1)+min);
console.log('Value is: ' + val.toString());
return resolve(val);
}, 1000);
});
};
// fn : function that should return a promise.
// args : the arguments that should be passed to fn.
// donefn : function that should check the result of the promise
// and return true to indicate whether ploop should stop or not.
var ploop = function(fn, args, donefn) {
return Promise.resolve(true)
.then(function() {
return(fn.apply(null, args));
})
.then(function(result) {
var finished = donefn(result);
if(finished === true){
return result;
} else {
return ploop(fn, args, donefn);
}
});
};
var searchFor = 4;
var donefn = function(result) {
return result === searchFor;
};
console.log('Searching for: ' + searchFor);
ploop(searchForNumber, [searchFor], donefn)
.then(function(val) {
console.log('Finally found! ' + val.toString());
process.exit(0);
})
.catch(function(err) {
process.exit(1);
});
I have an object like so:
let myObject = {
'db1': [db1_file1Id,db1_file2Id,db_1file3Id],
'db2': [db2_file1Id, db2_file2Id]
...
}
I iterate through through this object and on each iteration: I connect to the database, retrieve the file, do some stuff and save the file back. Basically asynchronous stuff.
for (let prop in myObject) {
if (myObject.hasOwnProperty(prop)) {
doStuff(prop, myObject[prop]);
}
}
Now the doStuff function makes sure I have a local scope so there is no inconsistencies. But still, the execution is not synchronous due to the asynchronous operations inside each loop. I basically need one db to be completely processed before moving on to the next. How do I fix this?
One approach that I thought of was to have recursive loop. But as per my understanding, this would require me to change my data structure extensively which is sub-optimal imo.
let arr; //containing my data
process(x) {
if (x < arr.length){
//process(x+1) is called inside doStuff after asynchronous operations are complete
doStuff(arr[x]);
}
}
You could use the solution you proposed at the end using Object.entries(obj). For example,
let arrProps = Object.entries(myObject);
process(index) {
if (index < arrProps.length){
// Call the callback once you complete execution of doStuff
doStuff(arrProps[index], () => process(index + 1));
}
}
Inside doStuff:
function doStuff(props, callback) {
// Process props
//finally in the promise of async call, on success call
.then(callback)
}
OR you could use a generator function, if you want to use for ... in loop.
The following will do what you ask, it returns an array of resolve values.
Do you want to stop processing if any one of them rejects? In case you need to make some changes, now it rejects if any of them reject and won't continue processing they keys in your object (object named myObject):
var myObject = {
'one': ["one"],
'two': ["two"]
};
var doStuff = arr =>
console.log("starting:", arr[0]) ||
Promise.resolve(arr[0]);
var [promise,result] =
Object.keys(myObject)
.reduce(
([promise,results], key) =>
[
promise
.then(
resolve =>
doStuff(myObject[key])
)
.then(
resolve => results.push(resolve)&&resolve
)
.then(
resolve => console.log("done:", resolve)
)
,results
]
, [Promise.resolve(), []]
)
promise.then(
_ => {
console.log("done all",result)
}
);
The answer ayushgp uses recursion, here is a working example that doesn't need changes to doSomething:
var myObject = {
'one': ["one"],
'two': ["two"]
};
var doStuff = arr =>
console.log("starting:",arr[0]) ||
Promise.resolve(arr[0])
var process = (arr,processFn) => {
const rec = (arr,processFn,promise,results) =>
arr.length === 0
? promise.then(_=>results)
: promise
.then(_ => processFn(arr[0][1]))
.then(result=>results.push(result)&&console.log("resolved:",result))
.then(_ => rec(arr.slice(1),processFn,promise,results));
return rec(arr,processFn,Promise.resolve(),[]);
};
process(
Object.keys(myObject).map(key=>[key,myObject[key]]),
doStuff
)
.then(
results => console.log("done all, results:",results)
);
One solution would be to make doStuff return a Promise which you can use to build a chain of promises using calls to then.
The Bluebird promise library provides this functionality with .each and .mapSeries.
You could implement it as:
Promise.forEachSeries = function(array, action) {
return array.reduce(function(prevPromise, value, index, array) {
return prevPromise.then(function() {
return action(value, index, array);
});
}, Promise.resolve());
}
You would use it like this:
Promise.forEachSeries(arr, doStuff);
The following code might be close to what you are asking. I use indices i and j to loop through databases and files respectively :
var dbs = {
db1: ["q", "w", "e", "r"],
db2: ["t", "y"]
};
var names = Object.keys(dbs);
var db, x, i = 0, j = 0;
if (names.length > 0) {
db = dbs[names[i]];
x = db[j];
console.log("start");
asyncProcessing(x)
.then(onSuccess)
.catch(onFailure);
}
function onFailure (e) {
console.log("[FAILURE]", e);
console.log("end");
}
function onSuccess (xx) {
console.log("[SUCCESS]", xx);
j = (j + 1) % db.length; // next j
if (j === 0) i = i + 1; // next i
if (i < names.length) {
db = dbs[names[i]];
x = db[j];
asyncProcessing(x)
.then(onSuccess)
.catch(onFailure);
} else {
console.log("end");
}
}
function asyncProcessing (x) {
return new Promise(function (resolve, reject) {
setTimeout(function () {
// force first two success then random
if (x === "q" || x === "w" || Math.random() * 3 > 1) {
resolve(x + x);
} else {
reject("Not lucky. Try again.");
}
}, 1000);
});
}
The Promise object represents the eventual completion (or failure) of an asynchronous operation, and its resulting value. you can try like .
$("#myPara").delay(4500).fadeOut().promise().done(function(){
$("#myHeading").attr("style","display:none;") ;
for(var i=10;i<15;i++){
console.log(i);
}
});
console.log("Hello promise !");
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
<p id="myPara"> Hello </p>
<h1 id="myHeading">to be hide</h1>
for (let prop in myObject) {
if (myObject.hasOwnProperty(prop)) {
var stuff= doStuff(prop, myObject[prop]).promise().done(function(){
// Do whatever u want after completion of doStuff
});
}
}
Have a look at Mozila ref.
I'm using for loop to iterate over an array of elements and to call the same function with different parameters inside the for loop. Here is my code:
exports.listTopSongs = function(query) {
return new Promise(function(resolve, reject) {
var str = query.split(","), category,
for(var i=0; i<str.length; i++) {
sampleFn(str[i], 'sample', resolve, reject);
}
});
};
function sampleFn(lang, cat, resolve, reject) {
client.on("error", function (err) {
console.log(err);
var err = new Error('Exception in redis client connection')
reject(err);
});
client.keys(lang, function (err, keys){
if (err) return console.log(err);
if(keys.length != 0) {
client.hgetall(keys, function (error, value) {
var objects = Object.keys(value);
result['title'] = lang;
result[cat] = [];
var x =0;
for(x; x<objects.length; x++) {
var val = objects[x];
User.findAll({attributes: ['X', 'Y', 'Z'],
where: {
A: val
}
}).then(data => {
if(data != null) {
//some actions with data and stored it seperately in a Json array
if(result[cat].length == objects.length) {
resolve(result);
}
} else {
console.log(""+cat+" is not avilable for this value "+data.dataValues['X']);
}
});
}
});
});
}
Here it won't wait for completion of first iteration. It just run asyncronously before completing first iteration function. I need to return the result as result:[{ 1, 2}, {3,4}]. but it runs seamlessly and returns empty or only one object before completing all. How to resolve it.
I used node-async-loop. But it uses next and i can't able to send my parameteres while using that package. Please help me
Async provides control flow methods allowing to do so.
Using async.each:
async.each(openFiles, function(file, callback) {
// Perform operation on file here.
console.log('Processing file ' + file);
if( file.length > 32 ) {
console.log('This file name is too long');
callback('File name too long');
} else {
// Do work to process file here
console.log('File processed');
callback();
}
}, function(err) {
// if any of the file processing produced an error, err would equal that error
if( err ) {
// One of the iterations produced an error.
// All processing will now stop.
console.log('A file failed to process');
} else {
console.log('All files have been processed successfully');
}
});
If you don't want to use a library, you can code it yourself. It would also be very instructive. I took your issue and coded a dummy async loop :
function listTopSongs(query) {
return new Promise(async(resolve, reject) => { //add async here in order to do asynchronous calls
const str = query.split(",") //str is const, and the other variable was not used anyway
for( let i = 0;i < str.length; i++) {
const planet = await sampleFn(str[i], 'sample', resolve, reject)
console.log(planet)
}
});
};
function sampleFn(a, b, c, d) {
return fetch(`https://swapi.co/api/planets/${a}/`)
.then(r => r.json())
.then(rjson => (a + " : " + rjson.name))
}
listTopSongs("1,2,3,4,5,6,7,8,9")
I used some dummy star wars API to fake a long promise but it should work with your sampleFn. Be careful, it is very, very slow if you have network call like the one in the example.
EDIT: I ran your code and I noticed there are a few mistakes: there is no resolve in your promise so it's not a thenable (https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/resolve see thenable )
Here is a fully working code. The nice part : no library needed, no dependencies.
//for node.js, use node-fetch :
//const fetch = require("node-fetch")
function listTopSongs(query) {
return new Promise(async(resolve, reject) => { //add async here in order to do asynchronous calls
const str = query.split(",") //str is const, and the other variable was not used anyway
const planets = []
for (let i = 0; i < str.length; i++) {
const planet = await sampleFn(i + 1, str[i], resolve, reject)
planets[i] = planet
console.log(planet)
}
resolve(planets)
});
};
function sampleFn(a, b, c, d) {
return fetch(`https://swapi.co/api/planets/${a}/`)
.then(r => r.json())
.then(rjson => (a + b + " : " + rjson.name))
}
listTopSongs("a,b,c,d").then(planets => console.log(planets))
Since you are using promise, you can do something like this
exports.listTopSongs = function(query) {
return Promise.resolve(true).then(function(){
var str = query.split(",");
var promises = str.map(function(s){
return sampleFn(str[i], 'sample');
});
return Promise.all(promises);
}).then(function(results){
//whatever you want to do with the result
});
};
For this to work you have to change your sampleFn to not to depend on external resolve and reject functions. I don't see a reason using external resolve and reject. why just not use Promise.Resolve, Promise.Reject;
I am new to the world of node.js and Javascript and I have a loop that goes over an array of objects
at a certain condition I need to call a function that does asnyc work
and the loop to stop while the function isn't done
fucntion foo1(arr){
for(var i=0 ; arr.length>i ; i++){
if(i==8){//or any other condition
doAsyncStuff(hits[i])
}
}
}
function doAsyncStuff(item){
parser.parseURL(someurl,function(error,result){
item.someprop=result.someprop;
})
}
the problem is no matter what I do, I can't seem to make the function wait it end's before I have the result and doesn't update the item I need it to update.
I understand it's a common issue but none of the solution I found worked.
any help would be welcome.
Thanks!
Looping and doing async stuff is a little tricky in JS. You could use one of the libraries that #smnbbrv mentioned in his comment. But you could also do it yourself, which can help you understand how some of these libraries work.
function foo1(arr) {
next(arr, 0)
}
function doAsyncStuff(item, cb) {
parser.parseURL(someurl, function(error, result) {
item.someprop = result.someprop;
cb(result);
})
}
function next(arr, i) {
// Stop when we reach the end of the array.
if (i >= arr.length) {
return;
}
if (i == 8) { // or any condition
// Move to the next item only when the async work is done.
doAsyncStuff(arr[i], function() {
next(arr, i + 1)
})
} else {
next(arr, i + 1)
}
}
I would use Bluebird Promises and the reducer pattern.
var Promise = require('bluebird');
// iterates over the array 'arr' and calls fn
// Myfn repeatedly, optionally passing an initial.
// for the 1st iteration. For subsequent iterations,
// the value returned by prev invocation of Myfn is passed.
Promise.reduce(arr, Myfn, someInitialValue);
function Myfn(prev, item) {
//return a value or a promise.
}
see documentation of Reduce here: http://bluebirdjs.com/docs/api/promise.reduce.html
If I understand you correctly then you could use a Promise chain, serialised using reduce (a for loop would also work), something like this
function doAsyncStuff(item) {
return new Promise(resolve => {
const time = Math.ceil(Math.random() * 2000 + 1000);
window.setTimeout(() => {
item.someprop = time;
resolve();
}, time);
});
}
function foo1(arr) {
return arr.reduce(
(promise, item, index) => index % 2 === 0 ? promise.then(() => doAsyncStuff(item)) : promise,
Promise.resolve()
);
}
const hits = new Array(9).fill().map(() => ({}));
foo1(hits).then(() => {
console.log(hits);
});
There is also Promise.all, which you could probably use (though not sure how wonderful that would be, I'm not a frequent Promise user).
Update: Using Promise.all
function doAsyncStuff(item) {
return new Promise(resolve => {
const time = Math.ceil(Math.random() * 2000 + 1000);
window.setTimeout(() => {
item.someprop = time;
resolve();
}, time);
});
}
function foo1(arr) {
return Promise.all(
arr.map((item, index) => index % 2 === 0 && doAsyncStuff(item))
);
}
const hits = new Array(9).fill().map(() => ({}));
foo1(hits).then(() => {
console.log(hits);
});
I still haven't figured out the best way to format ES6, it always seems to end up with longish lines. (personal styling issue) :)