how to trigger a async process one after another - javascript

How I should modify the following code, so I can make sure Process3 is triggered after Process2.update or Process2.create completed?
The main purpose for following code is I want to makeProcess1 finished. Then check if id exist, if yes, Process2.update is triggered. if not, Process2.create is triggered.Once Process2 finished, check if cmd existed. if yes,triggered Process3.
run: function (req, res) {
if (req.session) {
const values = req.params.all();
const id = values.id;
const cmd = values.cmd;
const param = _.omit(values, ['cmd', 'id']);
const cb1 = (e, d) => {
if (e) {
console.log(e);
res.status(400).send({ e });
} else {
Process1(values);
res.status(200).send({ d });
}
};
const cd2 = (id, param, cb1) => {
if (id) {
Process2.update({ id }, param, cb1);
} else {
Process2.create(param, cb1);
}
};
if (cmd) {
cd2(id, param, cb1, Process3(values, cmd));
}
else {
cd2(id, param, cb1);
}
} else {
res.status(403).send({ e: 'Forbidden access.' });
}
}
try approach by following, but not sure how I can pass argument id, params to Process2 and process3
let async = require('async');
const Process1 = (value, cb) => {
console.log("Process1()");
console.log(value);
cb(null, value + 1);
};
const Process2 = (value, cb) => {
console.log("value(): wait 5 sec");
console.log(value);
cb(null, value+10);
};
const Process3 = (value, cb) => {
console.log(value);
console.log("Process3(): wait 5 sec");
cb(null, value+100);
};
let Pro_1_2 = async.compose(Process2, Process1);
let Pro_2_3 = async.compose(Process3, Process2);
Pro_1_2(1, (e, r) => {
Pro_2_3(r, (error, result) => {
console.log(result);
});
});

The code you posted in your original question seems pretty twisted up, so I'm not going to attempt to rewrite it, but in general if you want to perform asynchronous calls which depend on each other, async.auto is a good way to go. Rather than declaring variables at the top that you attempt to mutate via some function calls, it's better to make Process1, Process2 and Process3 asynchronous functions that call their callbacks with a new values object. Something like:
async.auto({
doProcess1: function(cb) {
// Assuming Process1 calls `cb(undefined, newValues)` when done.
Process1(values, cb);
return;
},
doProcess2: ['doProcess1', function(results, cb) {
if (results.doProcess1.id) {
Process2.update({id: results.doProcess1.id}, cb);
return;
} else {
Process2.create(_.omit(results.doProcess1, ['cmd', 'id']), cb);
return;
}
}],
doProcess3: ['doProcess2', function(results, cb) {
if (results.doProcess2.cmd) {
Process3(results.doProcess2, cb);
return;
}
else {
cb(undefined, results.process2);
return;
}
}]
}, function afterProcess3(err, results) {
// Handler err or process final results.
});
Note all the return calls. They're not strictly necessary, but good practice to avoid accidentally running more code after calling your asynchronous functions.

Have you considered using "compose", from async.js?
const a = (data, cb) => {
var result = 'a';
cb(null, result);
};
const b = (data, id, cb) => {
var result = 'b';
cb(null, result);
};
const c = (data, cb) => {
// stuff to do with result
};
var aThenC = async.compose(c, a);
var bThenC = async.compose(c, b);
if (useA) {
aThenC(data, (result) => {
// result from c
res.status(200).send(result);
});
} else {
bThenC(data, id, (result) => {
// result from c
res.status(200).send(result);
});
}
In this scenario, a and b are your Process2 create and update, respectively, and c is the callback to Process3, if I understood correctly.
EDIT: You'll only have to enter the initial parameters (e.g. register ID) on the composed function. What composes really do is this: a(b(c(param))). That param is basically everything you need to start the process. The parameters for the following functions will be set inside the function before that.
I'll add code to support it as soon as I'm on a keyboard.

Related

Combine two callbacks into one return

So I have this code:
module.exports.getEstimate = (event, context, callback) => {
var data = JSON.parse(event.body);
lalamove.getQuotation(data ,context, function(err, llm_data){
callback(null,llm_data)
});
};
So it calls lalamove.getQuotation function and returns an object:
{ "totalFee": "108", "totalFeeCurrency": "PHP" }
Now, I have added a new function, that returns this object:
{ "totalFee": "10", "totalFeeCurrency": "PHP" }
from a different function so I thought I should push them in one array and then that is when I would call the callback but it does not work, this is what I have tried
module.exports.getEstimate = (event, context, callback) => {
var data = JSON.parse(event.body);
var response = []
lalamove.getQuotation(data ,context, function(err, llm_data){
const llm_obj = { "lalamove": llm_data }
response.push(llm_obj);
});
inhouse.getQuotation(data ,context, function(err, ih_data){
const ih_obj = {"inhouse": ih_data }
response.push(ih_obj);
});
callback(null,response);
};
and what I want to be the response is like this:
["lalamove": { "totalFee": "108", "totalFeeCurrency": "PHP" },
"inhouse": { "totalFee": "10", "totalFeeCurrency": "PHP" }]
what am I doing wrong?
Your callback(null,response) will not wait for those two callback functions to finish. You can use Promise and use Promise.all(objs).then(function) to wait for all promises finish and run.
Welcome to World's Javascript world - Callback hell.
We have some options for your case: Callback hell, async lib, Promise, async/await...
Callback hell: Call a async function in a callback
module.exports.getEstimate = (event, context, callback) => {
var data = JSON.parse(event.body);
var response = []
lalamove.getQuotation(data, context, function (err, llm_data) {
const llm_obj = { "lalamove": llm_data }
response.push(llm_obj);
// lalamove.getQuotation done!
// call next action
inhouse.getQuotation(data, context, function (err, ih_data) {
const ih_obj = { "inhouse": ih_data }
response.push(ih_obj);
// inhouse.getQuotation done!
// call the last action
callback(null, response);
});
});
};
Async lib: async
You can use waterfall function to do actions in order, and parallel if order is not matter.
module.exports.getEstimate = (event, context, callback) => {
var data = JSON.parse(event.body);
var response = []
async.parallel([
function (next) {
lalamove.getQuotation(data, context, function (err, llm_data) {
// TODO: check err object
const llm_obj = { "lalamove": llm_data }
response.push(llm_obj);
// lalamove.getQuotation done!
// do next action
next();
});
},
function (next) {
inhouse.getQuotation(data, context, function (err, ih_data) {
const ih_obj = { "inhouse": ih_data }
response.push(ih_obj);
// inhouse.getQuotation done!
// do next action
next()
});
}
], function (err) {
// TODO: check err object
// call the last action
callback(null, response);
});
};
Try wrapping two quotation calls in Promise, then utilise Promise.all to wait for both of them to be completed, then return the result to the callback
module.exports.getEstimate = (event, context, callback) => {
let data = JSON.parse(event.body);
// wrap quotation calls in `Promise`
Promise.all([
new Promise(resolve => lalamove.getQuotation(data, context, (err, lalamove) => resolve({ lalamove }))),
new Promise(resolve => inhouse.getQuotation (data, context, (err, inhouse ) => resolve({ inhouse }))),
]).then(response => {
// return the result back to `callback`
callback(null, response);
})
};
You could also try using util.promisify and the async / await syntax.
For example:
const util = require("util");
module.exports.getEstimate = async (event, context, callback) => {
let data = JSON.parse(event.body);
try {
let response = await Promise.all([ util.promisify(lalamove.getQuotation)(data, context),
util.promisify(inhouse.getQuotation)(data, context) ]);
callback(null, response);
} catch (err) {
callback(err);
}
};
We can also do something similar, but without async / await:
const util = require("util");
const getEstimate = (event, context, callback) => {
let data = JSON.parse(event.body);
Promise.all([util.promisify(lalamove.getQuotation)(data, context),
util.promisify(inhouse.getQuotation)(data, context)])
.then(response => callback(null, response))
.catch(err => callback(err));
};

Wrap a resultset callback function with a generator/iterator

I'm working on converting a legacy callback-based API into an async library. But I just can't wrap my head around getting a "resultset" to work as a generator (Node 10.x).
The original API works like this:
api.prepare((err, rs) => {
rs.fetchRows(
(err, row) => {
// this callback is called as many times as rows exist
console.log("here's a row:", row);
},
() => {
console.log("we're done, data exausted");
}
);
});
But here is how I want to use it:
const wrapped = new ApiWrapper(api);
const rs = await wrapped.prepare({});
for (let row of rs.rows()) {
console.log("here's a row:", row);
}
let row;
while(row = await rs.next()) {
console.log("here's a row:", row);
}
I thought I had it under control with generators, but it looks like you cannot use yield inside a callback. It actually seems logical if you think about.
class ApiWrapper {
constructor(api) {
this.api = api;
}
prepare() {
return new Promise((resolve, reject) => {
this.api.prepare((err, rs) => {
if (err) {
reject(err);
} else {
resolve(rs);
}
});
});
}
*rows() {
this.api.fetchRows((err, row) => {
if (err) {
throw err;
} else {
yield row; // nope, not allowed here
}
});
}
next() { ... }
}
So what alternatives do I have?
Important: I don't want to store anything in an array then iterate that, we're talking giga-loads of row data here.
Edit
I'm able to simulate the behavior I want using stream.Readable but it warns me that it's an experimental feature. Here's a simplified array-based version of the issue I'm trying to solve using stream:
const stream = require('stream');
function gen(){
const s = new stream.Readable({
objectMode: true,
read(){
[11, 22, 33].forEach(row => {
this.push({ value: row });
});
this.push(null)
}
});
return s;
}
for await (let row of gen()) {
console.log(row);
}
// { value: 11 }
// { value: 22 }
// { value: 33 }
(node:97157) ExperimentalWarning: Readable[Symbol.asyncIterator] is an experimental feature. This feature could change at any time
I finally realized I needed something similar to Go's channels that were async/await compatible. Basically the answer is to synchronize an async iterator and a callback, making them wait for each other as next() iterations are consumed.
The best (Node) native solution I found was to use a stream as an iterator, which is supported in Node 10.x but tagged experimental. I also tried to implement it with the p-defer NPM module, but that turned out to be more involved than I expected. Finally ran across the https://www.npmjs.com/package/#nodeguy/channel module, which was exactly what I needed:
const Channel = require('#nodeguy/channel');
class ApiWrapper {
// ...
rows() {
const channel = new Channel();
const iter = {
[Symbol.asyncIterator]() {
return this;
},
async next() {
const val = await channel.shift();
if (val === undefined) {
return { done: true };
} else {
return { done: false, value: val };
}
}
};
this.api.fetchRows(async (err, row) => {
await channel.push(row);
}).then(() => channel.close());
return iter;
}
}
// then later
for await (let row of rs.rows()) {
console.log(row)
}
Note how each iterating function core, next() and rows(), have a await that will throttle how much data can be pushed across the channel, otherwise the producing callback could end up pushing data uncontrollably into the channel queue. The idea is that the callback should wait for data to be consumed by the iterator next() before pushing more.
Here's a more self-contained example:
const Channel = require('#nodeguy/channel');
function iterating() {
const channel = Channel();
const iter = {
[Symbol.asyncIterator]() {
return this;
},
async next() {
console.log('next');
const val = await channel.shift();
if (val === undefined) {
return { done: true };
} else {
return { done: false, value: val };
}
}
};
[11, 22, 33].forEach(async it => {
await channel.push(it);
console.log('pushed', it);
});
console.log('returned');
return iter;
}
(async function main() {
for await (let it of iterating()) {
console.log('got', it);
}
})();
/*
returned
next
pushed 11
got 11
next
pushed 22
got 22
next
pushed 33
got 33
next
*/
Like I said, Streams and/or Promises can be used to implement this, but the Channel module solves some of the complexity that make it more intuitive.
The original question has two nested callback taking async functions
api.prepare((err,res) => ...)
rs.fetchRows((err,res) => ...)
The first one runs the callback only once so just promisifying it as follows is sufficient.
function promisfied(f){
return new Promise((v,x) => f(x,v));
}
However the second function will invoke it's callback multiple times and we wish to generate an async iterable from this function such that we can consume it in a for await of loop.
This is also possible by employing async generators as follows;
async function* rowEmitterGenerator(rs){
let _v, // previous resolve
_x, // previous reject
_row = new Promise((v,x) => (_v = v, _x = x));
rs.fetchRows((err, row) => ( err ? _x(err) : _v(row)
, _row = new Promise((v,x) => (_v = v, _x = x))
));
while(true){
try {
yield _row;
}
catch(e){
console.log(e);
}
}
}
Then putting all together in a top level await context;
const rows = await promisified(api.prepare),
rowEmitter = rowEmitterGenerator(rows);
for await (let row of rowEmitter){
console.log(`Received row: ${row}`);
// do something with the row
}

Create callback in node js module.exports

How to create a callback function inside the module.exports parameter.
I'm trying to do a similar thing as below and I want to know how to implement callback function.
module.js
module.exports = (a, b, callback) => {
let sum = a+b
let error = null
//callback
}
app.js
const add = require(./module.js)
add(1,2, (err, result) => {
}
within your module.exports you need to "invoke" the call back function.
like so
callback(error, sum)
this will return the control back to the app.js add() function.
You implement your callback function here. i.e what you want to do with the result you received.
Here is what your code will look like:-
module.js
module.exports = (a, b, callback) => {
let sum = a+b
let error = null
callback(error, sum) // invoke the callback function
}
app.js
const add = require("./module")
add(1,2, (err, result) => {
if(err) { // Best practice to handle your errors
console.log(err)
} else { // Implement the logic, what you want to do once you recieve the response back
console.log(result)
}
})
You have used sum for your function; but I will be using divide, because that way I can show you the error thing of callback.
your export will look like this
module.exports = {
divide: (a,b,cb) => {
if (b === 0) {
cb('divide by zero', null);
} else {
cb(null, a/b);
}
}
}
and the import like this
var func = require('./testExport').divide;
func(1,2,(err,res) => {
console.log(err,res);
});
func(1,0,(err,res) => {
console.log(err,res);
})
Call backs are simply the function that you send in from the place you are calling the functions. In both function calls (in imported place) you see we are sending in a function as a callback that takes in two arguments.
In the exported place we call that same function with the first parameter as an error and the second as res.
If you want to import your function without require().func, you will have to export the function in default.
module.exports = (a,b,cb) => {
if (b === 0) {
cb('divide by zero', null);
} else {
cb(null, a/b);
}
}
and import it as
var defaultFunc = require('./testExport')
add.js
module.exports = (a, b, callback) => {
if (typeof a !== 'number' || typeof b !== 'number') {
return callback(new Error('Invalid argument passed'), null);
}
let sum = a + b;
callback(null, sum);
};
app.js
const add = require('./add');
add(1, 2, (err, result) => {
if (err) {
console.log(err);
}
console.log(result);
});
Here we are passing error as the first parameter and actual sum as the second parameter to the callback function. Say if we pass a string instead of the number then the first parameter will have the error object and result will be null.
Cheers.

Accessing this object of a callback function from outside

It might be a bit confusing what I'm asking but I'll try to be as clear as I can.
Basically I'm doing unit test with mocha/chai for my Data Access Layer of my Node.JS server. I'm using bluebird to return a promise and an SQLite Databases.
That's my function insert I want to test :
insert(sqlRequest, sqlParams, sqlRequest2) {
return new Promise(function (resolve, reject) {
let insertStatement = this.getDatabase().prepare(sqlRequest);
let getStatement = this.getDatabase().prepare(sqlRequest2);
insertStatement.run(sqlParams, err => {
console.log('this.changes = ', this.changes);
if (this.changes === 1) {
getStatement.all({ $id: this.lastID }, (err, rows) => {
if (err) {
console.log('entered second err');
reject(err);
} else {
resolve(rows[0]);
}
});
} else {
console.log('entered first err ');
reject(err);
}
});
}.bind(this));
}
And that's my test with mocha :
it('insert : Error 2st SQL query', function (done) {
const daoCommon = new DaoCommon();
daoCommon.getDatabase = () => {
return {
prepare: (sqlRequest) => {
return {
all: (sql, callback) => {
let err = {};
let rows = null;
callback(err, rows);
},
run: (sqlParams, callback) => {
let err = undefined;
callback(err);
}
}
}
}
}
daoCommon.insert('', '', '')
.then(success => {
expect.fail();
})
.catch(error => {
expect(error).to.eql({});
})
.finally(function () {
done();
})
});
I want to simulate a test where the this.changes is equal to 1 but I don't know how/where I can set this value. According to what I've read this this object is from the callback function, but I have no idea exactly from where it comes or how to set it for my tests.
Update:
You can set the this of a function you are calling with .call of the method.
In your case calling callback with this.changes value will look like:
var thisObject = {
changes: 1
};
callback.call(thisObject, err);
This will set the value this.changes of your callback function.
The value of this is explained in the API documentation
If execution was successful, the this object will contain two
properties named lastID and changes which contain the value of the
last inserted row ID and the number of rows affected by this query
respectively.
It means that the callback function will always have this.changes. You can not change it unless you set this.changes = something manually, which I don't understand why would you do that.
Thank for #Maxali comment, I will post the answer below :
You can set this when calling the function callback(err) by using .call(). eg: callback.call({changes:1}, err). this will set changes to 1
And note that I had to change this line insertStatement.run(sqlParams, err => { where I have the callback from an arrow function to a function declaration insertStatement.run(sqlParams, function(err) { for this to work. I assume this is due to the this which in an arrow function doesn't refer to the object inside the function itself.

Run callback function after forEach is done

In the project, I have a loop going through a list of urls. It downloads file from every url and do some post process over the downloaded file.
After the all the process done (both download process and post process), I want to execute a callback function. Because post process includes some streaming task, it has close event. If the last item can be identified, I can pass the callback function to the close event. However, since the loop is async, I can't track which item is done at last.
For now, I use a 5 second timeout to make sure the callback is executed after the whole process. Obviously, this is not sustainable. What's a good way to handle this?
loop code:
exports.processArray = (items, process, callback) => {
var todo = items.concat();
setTimeout(function() {
process(todo.shift());
if(todo.length > 0) {
// execute download and post process each second
// however it doesn't guarantee one start after previous one done
setTimeout(arguments.callee, 1000);
} else {
setTimeout(() => {callback();}, 5000);
}
}, 1000);
};
processArray(
// First param, the array
urlList,
// Second param, download and post process
(url) => {
if(url.startsWith('http')) {
getDataReg(url, uid);
}
else if(url.startsWith('ftp')) {
getDataFtp(url, uid);
}
else {
console.log('not a valid resource');
}
},
// Third param, callback to be executed after all done
() => {
Request.get(`${config.demouri}bound=${request.query.boundary};uid=${uid}`, {
method: 'GET',
auth: auth
})
.on('response', (response) => {
console.log('response event emmits');
zipFiles(uid)
.then((path) => {
reply.file(path, { confine: false, filename: uid + '.zip', mode: 'inline'}).header('Content-Disposition');
});
});
}
);
Download and post process:
exports.getDataFtp = (url, uid) => {
console.log('get into ftp');
var usefulUrl = url.split('//')[1];
var spliter = usefulUrl.indexOf('/');
var host = usefulUrl.substring(0, spliter);
var dir = usefulUrl.substring(spliter+1, usefulUrl.length);
var client = new ftp();
var connection = {
host: host
};
var fileNameStart = dir.lastIndexOf('/') + 1;
var fileNameEnd = dir.length;
var fileName = dir.substring(fileNameStart, fileNameEnd);
console.log('filename: ', fileName);
client.on('ready', () => {
console.log('get into ftp ready');
client.get(dir, (err, stream) => {
if (err) {
console.log('get file err:', err);
return;
} else{
console.log('get into ftp get');
stream.pipe(fs.createWriteStream(datadir + `download/${uid}/${fileName}`));
stream.on('end', () => {
console.log('get into ftp close');
unzipData(datadir + `download/${uid}/`, fileName, uid);
client.end();
});
}
});
});
client.connect(connection);
};
exports.getDataReg = (url, uid) => {
console.log('get into http');
var fileNameStart = url.lastIndexOf('/') + 1;
var fileNameEnd = url.length;
var fileName = url.substring(fileNameStart, fileNameEnd);
var file = fs.createWriteStream(datadir + `download/${uid}/${fileName}`);
if (url.startsWith('https')) {
https.get(url, (response) => {
console.log('start piping file');
response.pipe(file);
file.on('finish', () => {
console.log('get into http finish');
unzipData(datadir + `download/${uid}/`, fileName, uid);
});
}).on('error', (err) => { // Handle errors
fs.unlink(datadir + `download/${uid}/${fileName}`);
console.log('download file err: ', err);
});
} else {
http.get(url, (response) => {
console.log('start piping file');
response.pipe(file);
file.on('finish', () => {
unzipData(datadir + `download/${uid}/`, fileName, uid);
});
}).on('error', (err) => {
fs.unlink(datadir + `download/${uid}/${fileName}`);
console.log('download file err: ', err);
});
}
};
function unzipData(path, fileName, uid) {
console.log('get into unzip');
console.log('creating: ', path + fileName);
fs.createReadStream(path + fileName)
.pipe(unzip.Extract({path: path}))
.on('close', () => {
console.log('get into unzip close');
var filelist = listFile(path);
filelist.forEach((filePath) => {
if (!filePath.endsWith('.zip')) {
var components = filePath.split('/');
var component = components[components.length-1];
mv(filePath, datadir + `processing/${uid}/${component}`, (err) => {
if(err) {
console.log('move file err: ');
} else {
console.log('move file done');
}
});
}
});
fs.unlink(path + fileName, (err) => {});
});
}
After the all the process done (both download process and post process), I want to execute a callback function.
The interesting thing about a series of asynchronous processes is that you can never know when exactly all processes will complete. So setting a timeout for the callback is quick&dirty way to do it, but it's not reliable for sure.
You can instead use a counter to solve this problem.
Let's say you have 10 operations to perform. At the beginning you set your counter to ten counter = 10 And after each process is completed, regardless how (it can either succeed or fail), you can decrement the counter by 1 like counter -= 1 and right after it you can check if the counter is 0, if so that means all processes are completed and we reached the end. You can now safely run your callback function, like if(counter === 0) callback();
If I were you, I would do something like this:
*Notice that the called process should return a promise, so that I can know when it finishes (again regardless how)
*If you need help about promises, this useful article might help you: https://howtonode.org/promises
*Oh and one more thing, you should avoid using arguments.callee, because it's deprecated. Here is why Why was the arguments.callee.caller property deprecated in JavaScript?
exports.processArray = (items, process, callback) => {
var todo = [].concat(items);
var counter = todo.length;
runProcess();
function runProcess() {
// Check if the counter already reached 0
if(checkCounter() === false) {
// Nope. Counter is still > 0, which means we got work to do.
var processPromise = process(todo.shift());
processPromise
.then(function() {
// success
})
.catch(function() {
// failure
})
.finally(function() {
// The previous process is done.
// Now we can go with the next one.
--counter;
runProcess();
})
}
};
function checkCounter() {
if(counter === 0) {
callback();
return true;
} else {
return false;
}
}
};
What you want to do is to make all your asynchronous processes converge into a single promise that you can use to execute the callback at the correct moment.
Lets start at the point each process is complete, which I assume is in the callback passed to the mv() function in unzipData(). You want to wrap each of these asynchronous actions in a Promise that resolves in the callback and you also want to use these promises later and for that you use the .map() method to collect the promises in an array (instead of .forEach()).
Here's the code:
var promises = filelist.map((filePath) => {
if (!filePath.endsWith('.zip')) {
var components = filePath.split('/');
var component = components[components.length-1];
return new Promise((resolve, reject) =>
mv(filePath, datadir + `processing/${uid}/${component}`, (err) => {
if(err) {
console.log('move file err: ');
reject(); // Or resolve() if you want to ignore the error and not cause it to prevent the callback from executing later
} else {
console.log('move file done');
resolve();
}
}));
}
return Promise.resolve();
});
(if the asynchronous action is not to be executed, a Promise that resolves immediately is returned instead)
Now, we can turn this list of Promises into a single Promise that resolves when all of the promises in the list has resolved:
var allPromise = Promise.all(promises);
Next, we need to look further up in the code. We can see that the code we've just been looking at is itself part of an event handler of an asynchronous action, i.e. fs.createReadStream(). You need to wrap that in a promise that gets resolved when the inner promises resolve and this is the promise that the unzipData() function shall return:
function unzipData(path, fileName, uid) {
console.log('get into unzip');
console.log('creating: ', path + fileName);
return new Promise((outerResolve) =>
fs.createReadStream(path + fileName)
.pipe(unzip.Extract({path: path}))
.on('close', () => {
console.log('get into unzip close');
var filelist = listFile(path);
// Code from previous examples
allPromise.then(outerResolve);
}));
}
Next, we look at the functions that use unzipData(): getDataReg() and getDataFtp(). They only perform one asynchronous action so all you need to do is to make them return a promise that resolves when the promise returned by unzipData() resolves.
Simplified example:
exports.getDataReg = (url, uid) => {
return new Promise((resolve, reject) => {
// ...
https.get(url, (response) => {
response.pipe(file);
file.on('finish', () => {
unzipData(datadir + `download/${uid}/`, fileName, uid)
.then(resolve);
});
}).on('error', (err) => { // Handle errors
fs.unlink(datadir + `download/${uid}/${fileName}`);
reject(); // Or resolve() if you want to ignore the error and not cause it to prevent the callback from executing later
});
// ...
});
}
Finally, we get to the processArray() function and here you need to do the same thing we did to begin with: map the processes into a list of promises. First, the process function passed needs to return the promises returned by getDataReg() and getDataFtp():
// Second param, download and post process
(url) => {
if(url.startsWith('http')) {
return getDataReg(url, uid);
}
else if(url.startsWith('ftp')) {
return getDataFtp(url, uid);
}
else {
console.log('not a valid resource');
}
return Promise.reject(); // or Promise.resolve() if you want invalid resources to be ignored and not prevent the callback from executing later
}
Now, your processArray() function can look like this:
exports.processArray = (items, process, callback) =>
Promise.all(items.map(process))
.then(callback)
.catch(() => console.log('Something went wrong somewhere'));
Your callback will get invoked when all asynchronous actions have completed, regardless of in which order they do. If any one of the promises rejects, the callback will never be executed so manage your promise rejections accordingly.
Here's a JSFiddle with the complete code: https://jsfiddle.net/upn4yqsw/
In general, since nodejs does not appear to have implemented Streams Standard to be Promise based, at least from what can gather; but rather, uses an event based or callback mechanism, you can use Promise constructor within function call, to return a fulfilled Promise object when a specific event has been dispatched
const doStuff = (...args) => new Promise((resolve, reject)) => {
/* define and do stream stuff */
doStreamStuff.on(/* "close", "end" */, => {
// do stuff
resolve(/* value */)
})
});
doStuff(/* args */)
.then(data => {})
.catch(err => {})

Categories