I am looking for ideas/help to improve my code. It's already working, but I am not confident with it and not really proud of it. This is short version of my function -
module.exports.serverlist = async () => {
let promises = [];
const serverlist = [];
serverlist.push({ mon_sid: 'AAA', mon_hostname: 'aaaa.com', mon_port: 80 })
serverlist.push({ mon_sid: 'BBB', mon_hostname: 'bbbb.com', mon_port: 80 })
serverlist.forEach(async (Server) => {
if (Server.mon_sid.includes('_DB')) {
// Function home.checkOracleDatabase return promise, same as above functions
promises.push(home.checkOracleDatabase(Server.mon_hostname, Server.mon_port));
} else if (Server.mon_sid.includes('_HDB')) {
promises.push(home.checkHANADatabase(Server.mon_hostname, Server.mon_port));
} else {
promises.push(home.checkPort(Server.mon_port, Server.mon_hostname, 1000));
}
})
for (let i = 0; i < serverlist.length; i++) {
serverlist[i].status = await promises[i];
}
console.table(serverlist);
What does the code do?:
- It asynchronously performing needed availability check of the service.
What is the expectation?
- That the code will run asynchronously, at the end of function it will wait for all promises to be resolved, for failed results it will perform the check over again, and to have more control over the promises. At this moment the promises are not really connected somehow with the array of systems, it just base on the order but it can be problematic in future.
If someone can assist/give some advises, I would be more than happy.
Also I am not sure how many parallel asynchronous operations NodeJS can perform (or the OS). Currently there are 30 systems on the list, but in future it can be 200-300, I am not sure if it can be handled at once.
Update
I used promise.all - it works fine. However the problem is as I mentioned, I don't have any control over the $promises array. The results as saved in the sequence as they were triggered, and this is how they are being assigned back to serverlist[i].status array from $promises. But I would like to have more control over it, I want to have some index, or something in the $promises array so I can make sure that the results are assigned to PROPER systems (not luckily counting that by the sequence it will be assigned as it should).
Also I would like to extend this function with option to reAttempt failed checks, and for that definitely I need some index in $promises array.
Update 2
After all of your suggestion this is how the code looks for now -
function performChecks(serverlist) {
const Checks = serverlist.map(Server => {
if (Server.mon_sid.includes('_DB')) {
let DB_SID = Server.mon_sid.replace('_DB', '');
return home.checkOracleDatabase(DB_SID, Server.mon_hostname, Server.mon_port)
} else if (Server.mon_sid.includes('_HDB')) {
let DB_SID = Server.mon_sid.replace('_HDB', '');
return home.checkHANADatabase(DB_SID, Server.mon_hostname, Server.mon_port);
} else {
return home.checkPort(Server.mon_port, Server.mon_hostname, 1000)
}
})
return Promise.allSettled(Checks)
}
// Ignore the way that the function is created, it's just for debug purpose
(async function () {
let checkResults = [];
let reAttempt = [];
let reAttemptResults = [];
const serverlist = [];
serverlist.push({ mon_id: 1, mon_sid: 'AAA', mon_hostname: 'hostname_1', mon_port: 3203 })
serverlist.push({ mon_id: 2, mon_sid: 'BBB', mon_hostname: 'hostname_2', mon_port: 3201 })
serverlist.push({ mon_id: 3, mon_sid: 'CCC', mon_hostname: 'hostname_3', mon_port: 3203 })
// Perform first check for all servers
checkResults = await performChecks(serverlist);
// Combine results from check into serverlist array under status key
for(let i = 0; i < serverlist.length; i++) {
serverlist[i]['status'] = checkResults[i].value;
}
// Check for failed results and save them under reAttempt variable
reAttempt = serverlist.filter(Server => Server.status == false);
// Perform checks again for failed results to make sure that it wasn't temporary netowrk/script issue/lag
// Additionally performChecks function will accept one more argument in future which will activate additional trace for reAttempt
reAttemptResults = await performChecks(reAttempt);
// Combine results from reAttempt checks into reAttempt array
for(let i = 0; i < reAttempt.length; i++) {
reAttempt[i]['status'] = reAttemptResults[i].value;
}
// Combine reAttempt array with serverlist array so serverlist can have latest updated data
serverlist.map(x => Object.assign(x, reAttempt.find(y => y.mon_id == x.mon_id)));
// View the results
console.table(serverlist);
})();
Firstly instead of doing a for each and push promises you can map them and do a Promise all. You need no push. Your function can return directly your promise all call. The caller can await it or use then...
Something like this (I didn't test it)
// serverlist declaration
function getList(serverlist) {
const operations = serverlist.map(Server => {
if (Server.mon_sid.includes('_DB')) {
return home.checkOracleDatabase(Server.mon_hostname, Server.mon_port);
} else if (Server.mon_sid.includes('_HDB')) {
return home.checkHANADatabase(Server.mon_hostname, Server.mon_port);
} else {
return home.checkPort(Server.mon_port, Server.mon_hostname, 1000);
}
});
return Promise.all(operations)
}
const serverlist = [...]
const test = await getList(serverlist)
// test is an array of fulfilled/unfulfilled results of Promise.all
So I would create a function that takes a list of operations(serverList) and returns the promise all like the example above without awaiting for it.
The caller would await it or using another promise all of a series of other calls to your function.
Potentially you can go deeper like Inception :)
// on the caller you can go even deeper
await Promise.all([getList([...]) , getList([...]) , getList([...]) ])
Considering what you added you can return something more customized like:
if (Server.mon_sid.includes('_DB')) {
return home.checkOracleDatabase(Server.mon_hostname, Server.mon_port).then(result => ({result, name: 'Oracle', index:..., date:..., hostname: Server.mon_hostname, port: Server.mon_port}))
In the case above your promise would return a json with the output of your operation as result, plus few additional fields you might want to reuse to organize your data.
For more consistency and for resolving this question i/we(other people which can help you) need all code this all variables definition (because for now i can't find where is the promises variable is defined). Thanks
Related
I have a javascript application running on NodeJS. For context I am using the express framework as this is meant to be our backend. I have a chunk of code which is meant to get data from a database, filter it and then send it back to the client. Instead, the filtering happens AFTER the response is sent, meaning the client is getting incorrect data. The code is below.
let resultArray = [];
const bulkSearchPromise = new Promise((resolve, reject) => {
for (let index = 0; index < input.length; index++) {
collectionPool.query('SELECT * FROM users WHERE ' + type + ' = $1', [input[index]], (err2, result2) => { // Make a query for each input user is trying to search for
if (err2) console.log("Error in bulk search: " + err2);
else {
if (result2.rows.length > 0) { // If input user searched for was found
pool.query('UPDATE users SET usedsearches = usedsearches + 1 WHERE id = $1', [result.rows[0].id]); // Increments used searches
// The code below will filter useless key value pairs. For example if username: null then there is not a reason to send it back to the client
let filteredArray = [];
for (let index = 0; index < result2.rows.length; index++) {
let array = Object.entries(result2.rows[index]);
let filtered = array.filter(([key, value]) => value != null);
let filteredObject = Object.fromEntries(filtered);
filteredArray.push(filteredObject);
resultArray.push(filteredObject);
}
console.log("a"); // This should run first.
}
}
});
}
resolve("ok");
})
bulkSearchPromise.then((value) => {
console.log("b"); // This should run second
return res.json({
status: 'success',
content: resultArray
}); // resultArray should be populated after the filtering above. Instead it is empty.
})
When the endpoint is hit the output will always be
username
b
a
What I need is for the for loop to run first and then after resultArray is populated, return it back to the client.
I've tried wrapping this code into a promise, but that hasnt helped either as 'resolve("ok")' is still called before the for loop completes.
Your promise is resolving before collectionPool.query is done.
The for loop only runs after collectionPool.query is done but you are resolving the promise before it.
Note that collectionPool.query is async and the its callback will only run when the web-api is finished (if this concept is murky check this out http://latentflip.com/loupe)
Option 1:
Move resolve() inside the collectionPool.query (where the console.log("a");) call back and call resolve(filteredObject).
In addition you should reject(err2) when err2 is not null (not just console)
Option 2:
you can use Util.Promisify to transform collectionPool.query to promise base API which will save you the hustle of manually transform
const util = require('util');
util.promisify(collectionPool.query)(QUERY)
.then(queryRes => {
/* logic to refine queryRes to object */
return filteredObject;
})
in both options you can omit the resultArray from your code. if you resolve(filteredObject) or return filteredObject; in then() you will be able to access this data on the next then (the value in bulkSearchPromise.then((value)).
let currentProduct;
for (let i = 0; i < products.length; i++) {
currentProduct = products[i];
subscription.getAll(products[i]._id)
.then((subs) => {
update(subs, currentProduct);
});
}
I'm using bluebird, the methods getAll and update return promises. How can I say "Wait until the two promises return, then update the currentProduct value"? I'm quite new to JS...
This will be straightforward if you can use async/await:
// Make sure that this code is inside a function declared using
// the `async` keyword.
let currentProduct;
for (let i = 0; i < products.length; i++) {
currentProduct = products[i];
// By using await, the code will halt here until
// the promise resolves, then it will go to the
// next iteration...
await subscription.getAll(products[i]._id)
.then((subs) => {
// Make sure to return your promise here...
return update(subs, currentProduct);
});
// You could also avoid the .then by using two awaits:
/*
const subs = await subscription.getAll(products[i]._id);
await update(subs, currentProduct);
*/
}
Or if you can only use plain promises, you can loop through all your products, and put each promise in the .then of the last loop. In that way, it will only advance to the next when the previous has resolved (even though it will have iterated the whole loop first):
let currentProduct;
let promiseChain = Promise.resolve();
for (let i = 0; i < products.length; i++) {
currentProduct = products[i];
// Note that there is a scoping issue here, since
// none of the .then code runs till the loop completes,
// you need to pass the current value of `currentProduct`
// into the chain manually, to avoid having its value
// changed before the .then code accesses it.
const makeNextPromise = (currentProduct) => () => {
// Make sure to return your promise here.
return subscription.getAll(products[i]._id)
.then((subs) => {
// Make sure to return your promise here.
return update(subs, currentProduct);
});
}
// Note that we pass the value of `currentProduct` into the
// function to avoid it changing as the loop iterates.
promiseChain = promiseChain.then(makeNextPromise(currentProduct))
}
In the second snippet, the loop just sets up the entire chain, but doesn't execute the code inside the .then immediately. Your getAll functions won't run until each prior one has resolved in turn (which is what you want).
Here is how I'd do it:
for (let product of products) {
let subs = await subscription.getAll(product._id);
await update(subs, product);
}
No need to manually chain promises or iterate arrays by index :)
You may want to keep track of what products you've processed because when one fails you have no idea how many succeeded and you don't know what to correct (if roll back) or retry.
The async "loop" could be a recursive function:
const updateProducts = /* add async */async (products,processed=[]) => {
try{
if(products.length===0){
return processed;
}
const subs = await subscription.getAll(products[0]._id)
await update(subs, product);
processed.push(product[0]._id);
}catch(err){
throw [err,processed];
}
return await updateProducts(products.slice(1),processed);
}
Without async you can use recursion or reduce:
//using reduce
const updateProducts = (products) => {
//keep track of processed id's
const processed = [];
return products.reduce(
(acc,product)=>
acc
.then(_=>subscription.getAll(product._id))
.then(subs=>update(subs, product))
//add product id to processed product ids
.then(_=>processed.push(product._id)),
Promise.resolve()
)
//resolve with processed product id's
.then(_=>processed)
//when rejecting include the processed items
.catch(err=>Promise.reject([err,processed]));
}
//using recursion
const updateProducts = (products,processed=[]) =>
(products.length!==0)
? subscription.getAll(products[0]._id)
.then(subs=>update(subs, product))
//add product id to processed
.then(_=>processed.push(products[0]._id))
//reject with error and id's of processed products
.catch(err=>Promise.reject([err,processed]))
.then(_=>updateProducts(products.slice(1),processed))
: processed//resolve with array of processed product ids
Here is how you'd call updateProducts:
updateProducts(products)
.then(processed=>console.log("Following products are updated.",processed))
.catch(([err,processed])=>
console.error(
"something went wrong:",err,
"following were processed until something went wrong:",
processed
)
)
This is my first question and I'm trying to learn javascript/nodejs
I have an array x.
var x = [1,2,3,4];
Also I have a function which takes in a param, does some processing and returns a json.
function funcName (param){
//does some external API calls and returns a JSON
return result;
}
Now what I'm looking for is rather than iterating over the array and calling the function again and again, is there a way to call them in parallel and then join the result and return it together ?
Also I'm looking for ways to catch the failed function executions.
for ex: funcName(3) fails for some reason
What you could do is create a file that does your heavy lifting, then run a fork of that file.
In this function we do the following:
loop over each value in the array and create a promise that we will store in an array
Next we create a fork
We then send data to the fork using cp.send()
Wait for a response back and resolve the promise
Using promise.all we can tell when all our child processes have completed
The first parameter will be an array of all the child process results
So our main process will look a little something like this:
const { fork } = require('child_process')
let x = [1,2,3,4]
function process(x) {
let promises = []
for (let i = 0; i < x.length; i++) {
promises.push(new Promise(resolve => {
let cp = fork('my_process.js', [i])
cp.on('message', data => {
cp.kill()
resolve(data)
})
}))
}
Promise.all(promises).then(data => {
console.log(data)
})
}
process(x)
Now in our child we can listen for messages, and do our heavy lifting and return the result back like so (very simple example):
// We got some data lets process it
result = []
switch (process.argv[1]) {
case 1:
result = [1, 1, 1, 1, 1, 1]
break
case 2:
result = [2, 2, 2, 2, 2, 2]
break
}
// Send the result back to the main process
process.send(result)
The comments and other answer are correct. JavaScript has no parallel processing capability whatsoever (forking processes doesn't count).
However, you can make the API calls in a vaguely parallel fashion. Since, as they are asynchronous, the network IO can be interleaved.
Consider the following:
const urls = ['api/items/1', 'api/items/2', etc];
Promise.all(urls.map(fetch))
.then(results => {
processResults(results);
});
While that won't execute JavaScript instructions in parallel, the asynchronous fetch calls will not wait for eachother to complete but will be interleaved and the results will be collected when all have completed.
With error handling:
const urls = ['api/items/1', 'api/items/2', etc];
Promise.all(urls.map(fetch).map(promise => promise.catch(() => undefined))
.then(results => results.filter(result => result !== undefined))
.then(results => {
processResults(results);
});
let currentProduct;
for (let i = 0; i < products.length; i++) {
currentProduct = products[i];
subscription.getAll(products[i]._id)
.then((subs) => {
update(subs, currentProduct);
});
}
I'm using bluebird, the methods getAll and update return promises. How can I say "Wait until the two promises return, then update the currentProduct value"? I'm quite new to JS...
This will be straightforward if you can use async/await:
// Make sure that this code is inside a function declared using
// the `async` keyword.
let currentProduct;
for (let i = 0; i < products.length; i++) {
currentProduct = products[i];
// By using await, the code will halt here until
// the promise resolves, then it will go to the
// next iteration...
await subscription.getAll(products[i]._id)
.then((subs) => {
// Make sure to return your promise here...
return update(subs, currentProduct);
});
// You could also avoid the .then by using two awaits:
/*
const subs = await subscription.getAll(products[i]._id);
await update(subs, currentProduct);
*/
}
Or if you can only use plain promises, you can loop through all your products, and put each promise in the .then of the last loop. In that way, it will only advance to the next when the previous has resolved (even though it will have iterated the whole loop first):
let currentProduct;
let promiseChain = Promise.resolve();
for (let i = 0; i < products.length; i++) {
currentProduct = products[i];
// Note that there is a scoping issue here, since
// none of the .then code runs till the loop completes,
// you need to pass the current value of `currentProduct`
// into the chain manually, to avoid having its value
// changed before the .then code accesses it.
const makeNextPromise = (currentProduct) => () => {
// Make sure to return your promise here.
return subscription.getAll(products[i]._id)
.then((subs) => {
// Make sure to return your promise here.
return update(subs, currentProduct);
});
}
// Note that we pass the value of `currentProduct` into the
// function to avoid it changing as the loop iterates.
promiseChain = promiseChain.then(makeNextPromise(currentProduct))
}
In the second snippet, the loop just sets up the entire chain, but doesn't execute the code inside the .then immediately. Your getAll functions won't run until each prior one has resolved in turn (which is what you want).
Here is how I'd do it:
for (let product of products) {
let subs = await subscription.getAll(product._id);
await update(subs, product);
}
No need to manually chain promises or iterate arrays by index :)
You may want to keep track of what products you've processed because when one fails you have no idea how many succeeded and you don't know what to correct (if roll back) or retry.
The async "loop" could be a recursive function:
const updateProducts = /* add async */async (products,processed=[]) => {
try{
if(products.length===0){
return processed;
}
const subs = await subscription.getAll(products[0]._id)
await update(subs, product);
processed.push(product[0]._id);
}catch(err){
throw [err,processed];
}
return await updateProducts(products.slice(1),processed);
}
Without async you can use recursion or reduce:
//using reduce
const updateProducts = (products) => {
//keep track of processed id's
const processed = [];
return products.reduce(
(acc,product)=>
acc
.then(_=>subscription.getAll(product._id))
.then(subs=>update(subs, product))
//add product id to processed product ids
.then(_=>processed.push(product._id)),
Promise.resolve()
)
//resolve with processed product id's
.then(_=>processed)
//when rejecting include the processed items
.catch(err=>Promise.reject([err,processed]));
}
//using recursion
const updateProducts = (products,processed=[]) =>
(products.length!==0)
? subscription.getAll(products[0]._id)
.then(subs=>update(subs, product))
//add product id to processed
.then(_=>processed.push(products[0]._id))
//reject with error and id's of processed products
.catch(err=>Promise.reject([err,processed]))
.then(_=>updateProducts(products.slice(1),processed))
: processed//resolve with array of processed product ids
Here is how you'd call updateProducts:
updateProducts(products)
.then(processed=>console.log("Following products are updated.",processed))
.catch(([err,processed])=>
console.error(
"something went wrong:",err,
"following were processed until something went wrong:",
processed
)
)
i want to perform synchronous operation of functions using promise. I have loop that passes the data to be inserted to insert function and after inserting one row i want to check the no. of rows exists in table hence i am performing select operation.
But the issue is if there are 3 records then it inserts all 3 records and after that my select function gets executed. what i want is after insertion of one record select function gets called.
here is my pseudo code as entire code involves lot of operations
for(var i=0; data.length ; i++){
self.executeFeedbackTrack(data);
}
executeFeedbackTrack:function(callInfo){
var self=this;
return self.insertFeedbackTrack(callInfo).then(function(data){
console.log("insertFeedbackTrack status "+status);
return self.getFeedbackTrack();
});
},
getFeedbackTrack :function(){
return new Promise(function(resolve,reject){
var objDBFeedbackTrack = new DBFeedbackTrack();
objDBFeedbackTrack.selectFeedbackTrack(function(arrayCallRegisters){
if(arrayCallRegisters){
console.log("notification.js no. of feedbacks "+arrayCallRegisters.length);
resolve(arrayCallRegisters.length);
}
});
});
},
insertFeedbackTrack :function(callInfo){
return new Promise(function(resolve,reject){
var objDBFeedbackTrack = new DBFeedbackTrack();
objDBFeedbackTrack.insertFeedbackTrack(callInfo.callNumber,callInfo.callServiceType,function(status){
resolve(status);
$('#loader').hide();
});
});
}
The previous answer is good, but if you are using nodejs, or babel, or you are using only modern browsers. You can use an async-await pair, it is es8 stuff.
let insertFeedbackTrack = function(){ return new Promise(/***/)};
let getFeedbackTrack = function(){ return new Promise(/***/)};
let processResult = async function(data){
let feedbacks = [];
for(let i=0;i<data.length;i++){
let insertedResult = await insertFeedbackTrack(data[i]);//perhaps you will return an id;
let feedbackTrack = await getFeedbackTrack(insertedResult.id);
feedbacks.push(feedbackTrack);
}
return feedbacks;
}
processResult(data).then(/** do stuff */)
It looks to me like this is caused by executing a series of asynchronous inserts, and assuming that the get of insert n (inside of a .then()) is called before insert n+1 is executed. However, I'm not aware of any such guarantee, in JavaScript; all that I'm familiar with is that then n will be called after insert n, not that it would be called before insert n+1.
What I'd suggest is avoiding this mix of traditional and callback-based code, and instead put the iteration step inside getFeedbackTrack().then. Assuming this understanding of the issue is correct, then something like the following should work:
function iterate(i) {
if (i < data.length) {
obj.insertFeedbackTrack(data[i]).then(function(insertResult) {
self.getFeedbackTrack().then(function(getResult) {
// this line is the important one, replacing the `for` loop earlier
iterate(i+1);
});
});
}
}
iterate(0);
By doing that, you would guarantee that insert for the next element does not occur until the current select executes successfully.
Naturally, you may also want to restructure that to use chained .then instead of nested; I used nested rather than chained to emphasize the ordering of callbacks.
This can be solved by using a very handy JS library Ramda. Concept is to use two methods, one is R.partial and another is R.pipeP.
First create a promises array from your data array, like following.
var promises = data.map(function(i) {
return R.partial(sample, [i])
});
Then you can pass this promise to R.pipeP, so that it can be executed one after another. like below.
var doOperation = R.pipeP.apply(this, promises)
Please execute following snippet attached.
// Sample promise returning function
function sample(d) {
return new Promise(function(resolve, reject){
setTimeout(function() {
console.log('resolved for:' + d);
resolve(d);
}, 1000)
})
}
// Sample data
var data = [1, 2, 3, 4, 5]
// Converting data array to promise array
var promises = data.map(function(i) {
return R.partial(sample, [i])
});
var doOperation = R.pipeP.apply(this, promises)
doOperation();
<script src="https://cdnjs.cloudflare.com/ajax/libs/ramda/0.25.0/ramda.min.js"></script>
So in your case, the code will look like this
var promises = data.map(function(i) {
return R.partial(self.executeFeedbackTrack, [i])
});
var doOperation = R.pipeP.apply(this, promises)
doOperation();
I use yield for such cases if using generator functions.
for(var i = 0; i < order.tasks.length; i++){
if(order.tasks[i].customer_id === 0){
var name = order.tasks[i].customer_name.split(" ")
const customers = yield db.queryAsync(
`INSERT INTO customers(
business_id)
VALUES(?)
`,[order.business_id])
}
}
Or else I use self-calling functions in case of callbacks.
var i = 0;
(function loop() {
if (i < data.length) {
task_ids1.push([order.tasks[i].task_id])
i++;
loop();
}
}());
Here's how I would sequentially call promises in a loop (I'm using ES7).
First, let's define some basic data:
const data = [0,1,2,3];
Then, let's simulate some long running process, so let's create a function that returns a Promise (you can think of this as a simulated network request, or whatever suits your needs)
const promiseExample = (item) =>
new Promise((res) => {
setTimeout(() => {
console.log('resolved ', item);
res(item);
}, 1000);
});
Now, let's create an array of promises. What the next line of code does is: for every item in the array data, return a promise factory. A promise factory is a function that wraps a certain promise without running it.
const funcs = data.map(item => async () => await promiseExample(item));
Now, the actual code starts here. We need a function that does the actual serialization. Since it has to handle an array of promiseFactories, I split it in two functions, one for the serialization of a single promise, and one for handling an array of promiseFactories.
const serializePromise = promiseFactoryList =>
promiseFactoryList.reduce(serialize, Promise.resolve([]));
const serialize = async (promise, promiseFactory) => {
const promiseResult = await promise;
const res = await promiseFactory();
return [...promiseResult, res];
};
Now, you can simply call it like this:
serializePromise(funcs).then(res => {
console.log('res', res);
});
As you can see, the code is pretty simple, elegant, functional, and doesn't need any external dependency. I hope this answers your question and helps you!
const serializePromise = promiseFactoryList =>
promiseFactoryList.reduce(serialize, Promise.resolve([]));
const serialize = async (promise, promiseFactory) => {
const promiseResult = await promise;
const res = await promiseFactory();
return [...promiseResult, res];
};
const data = [0,1,2,3];
const promiseExample = (item) =>
new Promise((res) => {
setTimeout(() => {
console.log('resolved ', item);
res(item);
}, 1000);
});
const funcs = data.map(item => async () => await promiseExample(item))
serializePromise(funcs).then(res => {
console.log('res', res);
});
I ran into this problem recently and solved it as shown below. This is very similar to the answer by #Ethan Kaminsky, but only uses callbacks. This may be useful for people avoiding promises for whatever reason.
In my application the asynchronous function may fail and can safely be retried; I included this logic because it's useful and doesn't overly complicate the routine, but it is not exercised in the example.
// Some callback when the task is complete
function cb(...rest) { window.alert( `${rest.join(', ')}` ) }
// The data and the function operating on the data
// The function calls "callback(err)" on completion
const data = [ 'dataset1', 'dataset2', 'dataset3' ]
const doTheThing = (thingDone) => setTimeout( thingDone, 1000 )
let i = -1 // counter/interator for data[]
let retries = 20 // everything fails; total retry #
// The do-async-synchronously (with max retries) loop
function next( err ) {
if( err ) {
if( ! --retries ) return cb( 'too many retries' )
} else if( ! data[++i] ) return cb( undefined, 'done-data' )
console.log( 'i is', i, data[i] )
doTheThing( next, data[i] ) // callback is first here
}
// start the process
next()