Run callback function after forEach is done - javascript

In the project, I have a loop going through a list of urls. It downloads file from every url and do some post process over the downloaded file.
After the all the process done (both download process and post process), I want to execute a callback function. Because post process includes some streaming task, it has close event. If the last item can be identified, I can pass the callback function to the close event. However, since the loop is async, I can't track which item is done at last.
For now, I use a 5 second timeout to make sure the callback is executed after the whole process. Obviously, this is not sustainable. What's a good way to handle this?
loop code:
exports.processArray = (items, process, callback) => {
var todo = items.concat();
setTimeout(function() {
process(todo.shift());
if(todo.length > 0) {
// execute download and post process each second
// however it doesn't guarantee one start after previous one done
setTimeout(arguments.callee, 1000);
} else {
setTimeout(() => {callback();}, 5000);
}
}, 1000);
};
processArray(
// First param, the array
urlList,
// Second param, download and post process
(url) => {
if(url.startsWith('http')) {
getDataReg(url, uid);
}
else if(url.startsWith('ftp')) {
getDataFtp(url, uid);
}
else {
console.log('not a valid resource');
}
},
// Third param, callback to be executed after all done
() => {
Request.get(`${config.demouri}bound=${request.query.boundary};uid=${uid}`, {
method: 'GET',
auth: auth
})
.on('response', (response) => {
console.log('response event emmits');
zipFiles(uid)
.then((path) => {
reply.file(path, { confine: false, filename: uid + '.zip', mode: 'inline'}).header('Content-Disposition');
});
});
}
);
Download and post process:
exports.getDataFtp = (url, uid) => {
console.log('get into ftp');
var usefulUrl = url.split('//')[1];
var spliter = usefulUrl.indexOf('/');
var host = usefulUrl.substring(0, spliter);
var dir = usefulUrl.substring(spliter+1, usefulUrl.length);
var client = new ftp();
var connection = {
host: host
};
var fileNameStart = dir.lastIndexOf('/') + 1;
var fileNameEnd = dir.length;
var fileName = dir.substring(fileNameStart, fileNameEnd);
console.log('filename: ', fileName);
client.on('ready', () => {
console.log('get into ftp ready');
client.get(dir, (err, stream) => {
if (err) {
console.log('get file err:', err);
return;
} else{
console.log('get into ftp get');
stream.pipe(fs.createWriteStream(datadir + `download/${uid}/${fileName}`));
stream.on('end', () => {
console.log('get into ftp close');
unzipData(datadir + `download/${uid}/`, fileName, uid);
client.end();
});
}
});
});
client.connect(connection);
};
exports.getDataReg = (url, uid) => {
console.log('get into http');
var fileNameStart = url.lastIndexOf('/') + 1;
var fileNameEnd = url.length;
var fileName = url.substring(fileNameStart, fileNameEnd);
var file = fs.createWriteStream(datadir + `download/${uid}/${fileName}`);
if (url.startsWith('https')) {
https.get(url, (response) => {
console.log('start piping file');
response.pipe(file);
file.on('finish', () => {
console.log('get into http finish');
unzipData(datadir + `download/${uid}/`, fileName, uid);
});
}).on('error', (err) => { // Handle errors
fs.unlink(datadir + `download/${uid}/${fileName}`);
console.log('download file err: ', err);
});
} else {
http.get(url, (response) => {
console.log('start piping file');
response.pipe(file);
file.on('finish', () => {
unzipData(datadir + `download/${uid}/`, fileName, uid);
});
}).on('error', (err) => {
fs.unlink(datadir + `download/${uid}/${fileName}`);
console.log('download file err: ', err);
});
}
};
function unzipData(path, fileName, uid) {
console.log('get into unzip');
console.log('creating: ', path + fileName);
fs.createReadStream(path + fileName)
.pipe(unzip.Extract({path: path}))
.on('close', () => {
console.log('get into unzip close');
var filelist = listFile(path);
filelist.forEach((filePath) => {
if (!filePath.endsWith('.zip')) {
var components = filePath.split('/');
var component = components[components.length-1];
mv(filePath, datadir + `processing/${uid}/${component}`, (err) => {
if(err) {
console.log('move file err: ');
} else {
console.log('move file done');
}
});
}
});
fs.unlink(path + fileName, (err) => {});
});
}

After the all the process done (both download process and post process), I want to execute a callback function.
The interesting thing about a series of asynchronous processes is that you can never know when exactly all processes will complete. So setting a timeout for the callback is quick&dirty way to do it, but it's not reliable for sure.
You can instead use a counter to solve this problem.
Let's say you have 10 operations to perform. At the beginning you set your counter to ten counter = 10 And after each process is completed, regardless how (it can either succeed or fail), you can decrement the counter by 1 like counter -= 1 and right after it you can check if the counter is 0, if so that means all processes are completed and we reached the end. You can now safely run your callback function, like if(counter === 0) callback();
If I were you, I would do something like this:
*Notice that the called process should return a promise, so that I can know when it finishes (again regardless how)
*If you need help about promises, this useful article might help you: https://howtonode.org/promises
*Oh and one more thing, you should avoid using arguments.callee, because it's deprecated. Here is why Why was the arguments.callee.caller property deprecated in JavaScript?
exports.processArray = (items, process, callback) => {
var todo = [].concat(items);
var counter = todo.length;
runProcess();
function runProcess() {
// Check if the counter already reached 0
if(checkCounter() === false) {
// Nope. Counter is still > 0, which means we got work to do.
var processPromise = process(todo.shift());
processPromise
.then(function() {
// success
})
.catch(function() {
// failure
})
.finally(function() {
// The previous process is done.
// Now we can go with the next one.
--counter;
runProcess();
})
}
};
function checkCounter() {
if(counter === 0) {
callback();
return true;
} else {
return false;
}
}
};

What you want to do is to make all your asynchronous processes converge into a single promise that you can use to execute the callback at the correct moment.
Lets start at the point each process is complete, which I assume is in the callback passed to the mv() function in unzipData(). You want to wrap each of these asynchronous actions in a Promise that resolves in the callback and you also want to use these promises later and for that you use the .map() method to collect the promises in an array (instead of .forEach()).
Here's the code:
var promises = filelist.map((filePath) => {
if (!filePath.endsWith('.zip')) {
var components = filePath.split('/');
var component = components[components.length-1];
return new Promise((resolve, reject) =>
mv(filePath, datadir + `processing/${uid}/${component}`, (err) => {
if(err) {
console.log('move file err: ');
reject(); // Or resolve() if you want to ignore the error and not cause it to prevent the callback from executing later
} else {
console.log('move file done');
resolve();
}
}));
}
return Promise.resolve();
});
(if the asynchronous action is not to be executed, a Promise that resolves immediately is returned instead)
Now, we can turn this list of Promises into a single Promise that resolves when all of the promises in the list has resolved:
var allPromise = Promise.all(promises);
Next, we need to look further up in the code. We can see that the code we've just been looking at is itself part of an event handler of an asynchronous action, i.e. fs.createReadStream(). You need to wrap that in a promise that gets resolved when the inner promises resolve and this is the promise that the unzipData() function shall return:
function unzipData(path, fileName, uid) {
console.log('get into unzip');
console.log('creating: ', path + fileName);
return new Promise((outerResolve) =>
fs.createReadStream(path + fileName)
.pipe(unzip.Extract({path: path}))
.on('close', () => {
console.log('get into unzip close');
var filelist = listFile(path);
// Code from previous examples
allPromise.then(outerResolve);
}));
}
Next, we look at the functions that use unzipData(): getDataReg() and getDataFtp(). They only perform one asynchronous action so all you need to do is to make them return a promise that resolves when the promise returned by unzipData() resolves.
Simplified example:
exports.getDataReg = (url, uid) => {
return new Promise((resolve, reject) => {
// ...
https.get(url, (response) => {
response.pipe(file);
file.on('finish', () => {
unzipData(datadir + `download/${uid}/`, fileName, uid)
.then(resolve);
});
}).on('error', (err) => { // Handle errors
fs.unlink(datadir + `download/${uid}/${fileName}`);
reject(); // Or resolve() if you want to ignore the error and not cause it to prevent the callback from executing later
});
// ...
});
}
Finally, we get to the processArray() function and here you need to do the same thing we did to begin with: map the processes into a list of promises. First, the process function passed needs to return the promises returned by getDataReg() and getDataFtp():
// Second param, download and post process
(url) => {
if(url.startsWith('http')) {
return getDataReg(url, uid);
}
else if(url.startsWith('ftp')) {
return getDataFtp(url, uid);
}
else {
console.log('not a valid resource');
}
return Promise.reject(); // or Promise.resolve() if you want invalid resources to be ignored and not prevent the callback from executing later
}
Now, your processArray() function can look like this:
exports.processArray = (items, process, callback) =>
Promise.all(items.map(process))
.then(callback)
.catch(() => console.log('Something went wrong somewhere'));
Your callback will get invoked when all asynchronous actions have completed, regardless of in which order they do. If any one of the promises rejects, the callback will never be executed so manage your promise rejections accordingly.
Here's a JSFiddle with the complete code: https://jsfiddle.net/upn4yqsw/

In general, since nodejs does not appear to have implemented Streams Standard to be Promise based, at least from what can gather; but rather, uses an event based or callback mechanism, you can use Promise constructor within function call, to return a fulfilled Promise object when a specific event has been dispatched
const doStuff = (...args) => new Promise((resolve, reject)) => {
/* define and do stream stuff */
doStreamStuff.on(/* "close", "end" */, => {
// do stuff
resolve(/* value */)
})
});
doStuff(/* args */)
.then(data => {})
.catch(err => {})

Related

Correctly use fs write inside createReadStream on data

I am attempting to combine n binary files into a single file in javascript using streams. I have a write stream that is passed to the following function. I notice that the total written bytes does not match the actual number of bytes in the file, and is also not consistent across multiple runs.
After reading the documentation, I noticed that the write call returns a promise and is not safe to be called again until the previous promise is fulfilled. I am not sure how to make readStream.on('data', function (chunk) use await, as the function is not async and I get an error await is only valid in async function
async function concatFile (filename, fileHandle) {
return new Promise((resolve, reject) => {
const readStream = fs.createReadStream(filename, { highWaterMark: 1024 })
readStream.on('data', function (chunk) {
// read
fileHandle.write(chunk)
})
readStream.on('error', e => {
reject(e)
})
readStream.on('close', function (err) {
// close
})
readStream.on('end', function () {
// done
readStream.close()
resolve()
})
}) // end of Promise
}
I am using the above function in the following snippet:
const fileWriter = fs.createWriteStream('concatBins.bin', { flags: 'w' })
let writtenLen = 0
fileList = {}
fileList[0] = "foo.bin"
fileList[1] = "bar.bin"
for (const [key, value] of Object.entries(fileList)) {
await concatFile(value, fileWriter)
writtenLen = fileWriter.bytesWritten
console.log('bytes written ' + writtenLen)
}
You can pause the readStream until the write is done to avoid getting future data events and the resume it when done with the write. And, you can declare the .on('data', ...) callback to be async if you want to use await. But, you do have to pause the readStream because the async/await won't pause it for you.
// stream write that returns a promise when OK to proceed
// with more writes
function write(stream, data) {
return new Promise((resolve, reject) => {
if (stream.write(data)) {
resolve();
} else {
// need to wait for drain event
stream.once('drain', resolve);
}
});
}
async function concatFile (filename, writeStream) {
return new Promise((resolve, reject) => {
const readStream = fs.createReadStream(filename, { highWaterMark: 1024 });
let paused = false;
let ended = false;
readStream.on('data', async function(chunk) {
// read
try {
readStream.pause();
paused = true;
await write(writeStream, chunk);
} catch(e) {
// have to decide what you're doing if you get a write error here
reject(e);
} finally {
paused = false;
readStream.resume();
if (ended) {
readStream.emit("finalEnd");
}
}
});
readStream.on('error', e => {
reject(e)
})
readStream.on('close', function (err) {
// close
})
readStream.on('end', function () {
// done
ended = true;
if (!paused) {
readStream.emit('finalEnd');
}
});
// listen for our real end event
readStream.on('finalEnd', () {
readStream.close();
resolve()
});
}) // end of Promise
}

Wait for inner functions to wait and execute then proceed execution

I am executing a cloud function which is written in nodeJS.Here the function triggers when a data from the external source comes in and in this function, I have to call and check DB at the particular table but it takes more than 5 seconds and before the execution of my getDataFromDb function my main function completed execution. Plus there is a function called updateItems(postdate); and it executes if I cannot find data in my DB when triggering getDataFromDb
I tried async await but I am not sure where I am doing wrong. my function always ends first before my DB operation ends.
exports.handler = (event, context) => {
//here i am making data ready for DB and checking for the record if that is present in db
getDataFromDb(uniqueArray);
function getDataFromDb(uniqueArray) {
var params = {
// params for get reques
}
//db get operation
db.get(params, function (err, data) {
//takes time here
if (err) {
console.log(err); // an error occurred
}
else {
//another DB operation updateItems(postdata);
//takes time here
}
else {
console.log("first run for db")
//another DB operation updateItems(postdata);
//takes time here
}
}
});
}
});
console.log("main function ended")
};
the expected result should wait for the inner function to execute then end the main function but actually, the main function ends first then DB calling function ends
Though this can be achieved through callbacks, converting it to promise chain makes it easy, as execution of inner function depends on outer function, it's better to chain promises i.e. return Promise in the call back of first function, to execute them serially.
exports.handler = (event, context) => {
getDataFromDb(uniqueArray).then(success => {
console.log('Done')
})
.catch(err => {
console.log('handle get or post err here');
});
function getDataFromDb(uniqueArray) {
var params = {};
return new Promise((resolve, reject) => {
db.get(params, (err, data) => {
if (err) {
return reject(err); // an error occurred
} else {
return resolve(data);
}
});
}).then(success => updateItems(data))
}
});

Node js lost in asynchronous behaviour: undefined

Objective
Disclaimer: I am new to node world and having tough time wrapping head around node asynchronous behaviour.
I am trying to write a wrapper function to do a https.get on a given url and return json output.
Code
const https = require('https');
// Get the user details
var myUrl = <valid-url>;
const getJson = function(url) {
// https get request
const req = https.get(url, (res) => {
// get the status code
const { statusCode } = res;
const contentType = res.headers['content-type'];
// check for the errors
let error;
if (statusCode !== 200) {
error = new Error('Request Failed.\n' +
`Status Code: ${statusCode}`);
} else if (!/^application\/json/.test(contentType)) {
error = new Error('Invalid content-type.\n' +
`Expected application/json but received ${contentType}`);
}
if (error) {
console.error(error.message);
// consume response data to free up memory
res.resume();
return;
}
//parse json
res.setEncoding('utf8');
let rawData = '';
res.on('data', (chunk) => { rawData += chunk; });
res.on('end', () => {
try {
const parsedData = JSON.parse(rawData);
console.log(parsedData);
} catch (e) {
console.error(e.message);
}
});
}).on('error', (e) => {
console.error(`Got error: ${e.message}`);
});
}
console.log(getJson(myUrl));
Output
undefined
{ user_id: <user-id>,
name: 'Ajay Krishna Teja',
email: <my-email> }
Issue
So the https.get is able to hit end point and get data but not able to return the json. Constantly returning Undefined.
Things I tried
Returning parsedData on res.on(end) block
Defining a var and copying parsedData
Copying to a global variable (although I knew it's very bad practice)
Places I looked up
Node.js variable declaration and scope
How to get data out of a Node.js http get request
Javascript function returning undefined value in node js
Updated: Working code
const getJson = function(url,callback) {
// https get request
const req = https.get(url, (res) => {
// get the status code
const { statusCode } = res;
const contentType = res.headers['content-type'];
// check for the errors
let error;
if (statusCode !== 200) {
error = new Error('Request Failed.\n' +
`Status Code: ${statusCode}`);
} else if (!/^application\/json/.test(contentType)) {
error = new Error('Invalid content-type.\n' +
`Expected application/json but received ${contentType}`);
}
if (error) {
console.error(error.message);
// consume response data to free up memory
res.resume();
return;
}
//parse json
res.setEncoding('utf8');
let rawData = '';
res.on('data', (chunk) => { rawData += chunk; });
res.on('end', () => {
try {
const parsedData = JSON.parse(rawData);
callback(parsedData);
} catch (e) {
callback(false);
console.error(e.message);
}
});
}).on('error', (e) => {
console.error(`Got error: ${e.message}`);
});
return req;
}
// calling
getJson(amznProfileURL,(res) => {
console.log(res);
});
Short answer: You are not returning anything in your getJson function and undefined is the default Node/Javascript return value.
function getJson(){
callAsyncFunction(param1, param2, param3)
// there is no return value!
}
Longer answer: Javascript (and Node as a result) is a single threaded language that uses callbacks as it's mechanism to return async results back to the callee. To do this, you pass a function into asynchronous functions as a parameter and then that function gets called at some point in the future whenever the asynchronous function is ready to send back it's result. Calling return from this "anonymous function" is actually just returning from the "callback" function you are sending into the async function.
function getJson(){
console.log('A')
// request is started, but getJson continues execution!
http.get(url, (res)=> {
console.log('C') // by the time I'm called, 'B' has already been printed and the function has returned!
return true // this won't return getJson! It will only return the callback function which doesn't do anything!
})
console.log('B')
// end of function without return value, return undefined!
}
// Will print 'A', 'B', 'C'
There are a couple different ways you can handle this. Callbacks have been used traditionally but Javascript also natively supports Promises which are a little easier to manage and are used in many popular frameworks by default.
You can implement your function with callbacks by providing your own callback parameter to call as soon as http.get returns itself.
// define getJson with second callback parameter
const getJson = function(url, callback) {
http.get(url, (res) => {
if(res){
callback(res) // result came back, send to your own callback function
} else {
callback(false) // request failed, send back false to signify failure
}
})
}
// now I can use getJson and get the result!
getJson('http://getjson.com', (res) => {
console.log('got result!', res)
})
This is a pretty common hump to get over with async functions in node (and javascript in general).
What's happening is that your console.log(getJson(myUrl)) is called before the http request has returned anything. Basically, things like this won't work with async functions.
If you put your console.log() inside res.on('end) it will work. The way you need to deal with this if either put all your logic in the res.on('end) which kind of sucks, or pass a callback to your getJson() function which you call in res.on('end'), or wrap everything in a promise, which you can return from getJson().
To use a callback you would do something like this:
const getJson = function(url, callback) {
// a bunch of code
res.on('end', () => {
try {
const parsedData = JSON.parse(rawData);
callback(null, parsedDate) // callbacks in node traditionaly pass an error as the first arg
}
//finish
}
The you call it with a function:
getJson(url, function(err, return_val) {
if (err) // handle error
console.log(return_val)
}
You can also look at other HTTP libraries like Axios that will return a promise without much work. With axios and similar libraries you can simply:
axios.get(url)
.then(response => {
console.log(response);
})
.catch(function (error) {
console.log(error);
});
Which is one of the reasons people use these libraries. More here: https://github.com/axios/axios
Because it runs asynchronously, it does not wait for the function call to end.
You can fix it with promise pattern.
Try something like this:
/**
* Created by bagjeongtae on 2017. 10. 2..
*/
function parseData(url) {
return new Promise((resolve, reject) => {
https.get(url, (res) => {
// get the status code
const {statusCode} = res;
const contentType = res.headers['content-type'];
// check for the errors
let error;
if (statusCode !== 200) {
reject('Request Failed.\n' + `Status Code: ${statusCode}`);
} else if (!/^application\/json/.test(contentType)) {
reject('Invalid content-type.\n' +
`Expected application/json but received ${contentType}`);
}
if (error) {
console.error(error.message);
reject(error.messag);
}
res.resume();
//parse json
res.setEncoding('utf8');
let rawData = '';
res.on('data', (chunk) => {
rawData += chunk;
});
res.on('end', () => {
try {
const parsedData = JSON.parse(rawData);
console.log(parsedData);
resolve(parseData);
} catch (e) {
console.error(e.message);
reject(e.messag);
}
});
});
});
};
parseData('http://www.example.com').then( result =>{
console.log(result);
}, err => {
console.log(err);
})
Running getJson from console.log is asynchronous, so it does not wait for getJson to finish.
Asynchronous can be used like a synchronous.
I think the output is correct.The getJson(myUrl) is return undefined since you not set a return in the getJson function,the javascript return undefined by default and the
{ user_id: <user-id>,
name: 'Ajay Krishna Teja',
email: <my-email> }
is the output by console.log(parsedData) in you code.

How to make sure that a foreach loop running async calls finished?

I have a foreach loop, where I call an async function. How can I make sure that all the async functions called the specified callback function, and after that, run something?
Keep a counter.
Example:
const table = [1, 2, 3];
const counter = 0;
const done = () => {
console.log('foreach is done');
}
table.forEach((el) => {
doSomeAsync((err, result) => {
counter++;
if (counter === 3) {
done();
}
});
});
As the other answer says, you can use the async package which is really good. But for the sake of it I recommend using Promises and use the Vanila Promise.all(). Example:
const table = [1, 2, 3];
Promise.all(table.map((el) => {
return new Promise((resolve, reject) => {
doSomeAsync((err, result) => {
return err ? reject(err) : resolve(result);
});
});
}))
.then((result) => {
// when all calls are resolved
})
.catch((error) => {
// if one call encounters an error
});
You can use Async library for this. It has various useful utility functions.
There is a Queue function in it which can be used to execute a set of tasks and you get a callback when all tasks are executed where you can do whatever you want. You can also control the concurrency of your queue(how many tasks are executed at a time in parallel).
Here is a sample code-
// create a queue object with concurrency 2
var q = async.queue(function(task, callback) {
console.log('hello ' + task.name);
callback();
}, 2);
// The callback function which is called after all tasks are processed
q.drain = function() {
console.log('all tasks have been processed');
};
// add some tasks to the queue
q.push({name: 'foo'}, function(err) {
console.log('finished processing foo');
});
q.push({name: 'bar'}, function (err) {
console.log('finished processing bar');
});

Node JS Sync Work flow with Async request

Currently try to learn Node JS and getting my head around Async/Sync workflow.
Try to the follow:
Step 1:
- Get data 1 with function 1
- Get data 2 with function 2
- Get data 3 with function 3
Step2:
- Work out logic with data 1,2,3
Step 3
- Do final call
I been looking at Q and Async packages but still havent really find an example.
Can some one show me how they will go about this issue in Node JS?
Thanks
Not entirely clear on your implementation, but depending on how specific your ordering needs to be you could try something like this:
var data1 = null;
var data2 = null;
var data3 = null;
async.series([
function(httpDoneCallback){
async.parallel([
function(data1Callback){
$http(...).then(function(response){
// some logic here
data1 = response;
data1Callback();
})
},
function(data2Callback){
$http(...).then(function(response){
// some logic here
data2 = response;
data2Callback();
})
},
function(data3Callback){
$http(...).then(function(response){
// some logic here
data3 = response;
data3Callback();
})
}
], function(){
//all requests dome, move onto logic
httpDoneCallback();
})
},
function(logicDoneCallback){
// do some logic, maybe more asynchronous calls with the newly acquired data
logicDoneCallback();
}
], function(){
console.log('all done');
})
Do you want function 1, 2, and 3 to trigger at the same time? If so then this should help:
var async = require('async');
async.parallel([
function(cb1) {
cb1(null, "one")
},
function(cb2){
cb2(null, "two")
},
function(cb3){
cb3(null, "three")
}
], function(err, results) {
console.log(results); // Logs ["one", "two", "three"]
finalCall();
});
To explain, every function in the array submitted as the first param to the parallel method will also receive a callback function. Activating the callback function signifies that you're done fetching your data or doing whatever you need to do in said function. All three functions will trigger at the same time, and once all three callbacks are called, the final function is called. The callback accepts two parameters: "error", and "result." If everything's successful, pass "null" as the error parameter. The results will be given to the final function as an array containing each of the results for your individual functions.
You can setup a chain of Promises to do things sequentially:
var funcA = () => {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve('some data from A')
}, 1000)
});
}
var funcB = (dataFromA) => {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve(dataFromA + ' data from B')
}, 2000)
})
}
var funcC = (dataFromB) => {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve(dataFromB + ' data from C')
}, 500)
})
}
// Doing the functions on after another
funcA().then(funcB).then(funcC).then((data) => {
console.log(data);
})
Or if you want to do them all at the same time you can use Promise.all():
var promises = [];
promises.push(new Promise((resolve, reject) => {
setTimeout(() => {
resolve('some data from A')
}, 1000)
}));
promises.push(new Promise((resolve, reject) => {
setTimeout(() => {
resolve('some data from B')
}, 1000)
}));
promises.push(new Promise((resolve, reject) => {
setTimeout(() => {
resolve('some data from C')
}, 1000)
}));
// Execute the array of promises at the same time, and wait for them all to complete
Promise.all(promises).then((data) => {
console.log(data);
})
Probably the best thing to do is use Promises like #Tyler here states. However, for conceptual understanding it is best to first understand the node callback pattern.
Because some tasks take time, we give the task a function and say 'When you are done, put the data you retrieved into this function'. These functions that we give to other functions are called callbacks. They must be constructed to accept the data, and also an error in case there is a problem while fetching the data. In Node the error is the first callback parameter and the data is the second.
fs.readFile('/file/to/read.txt', function callback(error, data) {
if (error) console.log(error);
else console.log(data);
});
In this example, once node reads the file, it will feed that data into the callback function. In the callback we must account for the case that there was a problem and handle the error.
In your question you want to do multiple async tasks and use their results. Therefore you must take this pattern and nest several of them. So, continuing this example, if there is no error you will begin another async task.
fs.readFile('/file/to/read.txt', function callback(error, data) {
if (error) console.log(error);
else {
someOtherAsyncThing(function callback(error, data2) {
if (error) console.log(error);
else {
console.log(data + data2)
}
});
}
});

Categories