I have an array with foods and every food has some measures. I have to insert them into two databases foods and measures, but I have to check if there is an existing measure in the measures database. It is asynchronous code, and I get multiple measures with the same name. How can I make it to wait before checking if the measure exist in the database?
return new Promise(function(resolve, reject) {
var foods = [];
async.each(result, function(obj, callback) {
var entity = mapper.fromNutritionApiToDatabase(obj);
var measures = mapper.mapMeasure(obj);
async.each(measures, function(item, callback) {
// console.log("Item : " + item.label);
var en = {
name: {
en: item.label,
bg: item.label
}
};
FoodMeasureRepository
.findOne({
"name.en": item.label
})
.then(function(result) {
// console.log("Result " + result);
if (result) {
entity.measures.push({
measureId: result._id,
value: parseInt(item.eqv)
});
callback();
} else {
FoodMeasureRepository
.insertOne(en)
.then(function(result) {
entity.measures.push({
measureId: result._id,
value: parseInt(item.eqv)
});
callback();
})
}
})
.catch(function(err) {
callback(err)
});
}, function(err) {
if (err) {
callback(err);
} else {
foods.push(entity);
callback();
}
});
}, function(err) {
if (err) {
console.log(err);
} else {
resolve(foods)
}
});
});
OK.
What is happening is that multiple measures are discovering that there is no measure in the database before the first of them adds it, but as the others have also found that it isn't there they also all add it. There are two ways to solve this:
Make a copy of what you know about the database, don't query the same thing twice
Implement a function that does an insert if not there on the database side
The second one is nicer, but often there are reasons you can't do it.
So here is an example of a local knowledge store. In this both in progress Promises and results from the database are stored in the "DB_contents" map, keyed by the item they are querying for.
Once the first query is made all subsequent queries will queue up. When the first query completes it will propagate its result to any waiting queries and store it for future reference.
var DB_contents = {}; // our knowledge and queue store
function ensure_DB_has_measure(label) {
return new Promise(function(_pass, _fail) {
// A more generalised fail function for clarity
function fail(err) {
// Remove the queue so nothing else waits
var original = DB_contents[label];
delete DB_contents[label];
// Inform everything in the queue of the failure
if(original instaceof Array) {
original.forEach(function(promise) {
promise.fail(err);
});
}
// Follow own failure method
_fail.apply(_fail, arguments);
}
function pass(result) {
// Replace the queue with the result so future calls don't query
var original = DB_contents[label];
DB_contents[label] = result;
// Inform all waiting calls of the result
if(original instanceof Array)
original.forEach(function(promise) {
promise.pass(result);
});
// Follow own completion path
_pass.apply(_pass, arguments);
}
// Check to see if it needs to queue
if(DB_contents[label] instanceof Array) {
DB_contents[label].push({pass:pass, fail:_fail});
// Or if the value is already here it can return
} else if(DB_contents[label]) {
_pass(DB_contents[label]);
// Or it needs to be fetched
} else {
// Initialise the queue
DB_contents[label] = [];
// Check for pre-existence
FoodMeasureRepository
.findOne({"name.en":label})
.then(function(result) {
if(result) {
pass(result);
} else {
FoodMeasureRepository
.insertOne({name:{en:label, bg:label}})
.then(pass)
.catch(fail);
}
})
.catch(fail)
}
});
}
// The above should go outside the async calls
// This can now replace the contents of your second async function body
ensure_DB_has_measure(item.label)
.then(function(result) {
entity.measures.push({
measureId: result._id,
value: parseInt(item.eqv)
});
callback();
})
.catch(function(err) {
callback(err);
})
Related
I'm currently writing a node application, that checks if a certain file exists at a specific location. For every order that exists where it is supposed to be I'd like to make a put request to my Woocommerce Api, that changes the order status to Processing.
for (i=0; i<my_orders.length; i++) {
var exportedThisPdf = true;
var orderId = my_orders[i].orderId.toString();
for (p=0; p<my_orders[i].products.length; p++) {
var stickerId = my_orders[i].products[p].meta[0].value;
if (fs.existsSync('/mypath/test')) {
} else {
exportedThisPdf = false;
}
}
if (exportedThisPdf == true) {
var data = {
status: 'processing'
};
client.updateStatus(orderId, data, function (err) {
if (err) console.log(err);
})
} else {
var data = {
status: 'failed'
};
client.updateStatus(orderId, data, function (err) {
if (err) console.log(err);
})
}
}
console.log("callback");
I would now like to only continue the code once all my order statuses have been successfully updated to either processing or failed.
Is there a way to go about this problem in a clean, asynchronous way?
Thanks in Advance
You want to await some promises. So at first create a global variable:
var promises = [];
Then basically whenever we do sth asynchronous, we add a promise to that array, e.g.:
promises.push(new Promise(function(resolve){
client.updateStatus(orderId, data, function (err) {
if (err) return console.log(err);
resolve();
})
}));
Then if all promises are added, we can await them:
Promise.all(promises)
.then(function(){
console.log("finished");
});
Try for this: Use Async Module
var async = require('async');
async.eachSeries(my_orders, function(order, ordercallback){
async.eachSeries(order.products, function(product, productcallback){
// DO your put logic here
client.updateStatus(orderId, data, function (err) {
productcallback();
})
}, function(err){
if(!err) ordercallback()
});
});
In the project, I have a loop going through a list of urls. It downloads file from every url and do some post process over the downloaded file.
After the all the process done (both download process and post process), I want to execute a callback function. Because post process includes some streaming task, it has close event. If the last item can be identified, I can pass the callback function to the close event. However, since the loop is async, I can't track which item is done at last.
For now, I use a 5 second timeout to make sure the callback is executed after the whole process. Obviously, this is not sustainable. What's a good way to handle this?
loop code:
exports.processArray = (items, process, callback) => {
var todo = items.concat();
setTimeout(function() {
process(todo.shift());
if(todo.length > 0) {
// execute download and post process each second
// however it doesn't guarantee one start after previous one done
setTimeout(arguments.callee, 1000);
} else {
setTimeout(() => {callback();}, 5000);
}
}, 1000);
};
processArray(
// First param, the array
urlList,
// Second param, download and post process
(url) => {
if(url.startsWith('http')) {
getDataReg(url, uid);
}
else if(url.startsWith('ftp')) {
getDataFtp(url, uid);
}
else {
console.log('not a valid resource');
}
},
// Third param, callback to be executed after all done
() => {
Request.get(`${config.demouri}bound=${request.query.boundary};uid=${uid}`, {
method: 'GET',
auth: auth
})
.on('response', (response) => {
console.log('response event emmits');
zipFiles(uid)
.then((path) => {
reply.file(path, { confine: false, filename: uid + '.zip', mode: 'inline'}).header('Content-Disposition');
});
});
}
);
Download and post process:
exports.getDataFtp = (url, uid) => {
console.log('get into ftp');
var usefulUrl = url.split('//')[1];
var spliter = usefulUrl.indexOf('/');
var host = usefulUrl.substring(0, spliter);
var dir = usefulUrl.substring(spliter+1, usefulUrl.length);
var client = new ftp();
var connection = {
host: host
};
var fileNameStart = dir.lastIndexOf('/') + 1;
var fileNameEnd = dir.length;
var fileName = dir.substring(fileNameStart, fileNameEnd);
console.log('filename: ', fileName);
client.on('ready', () => {
console.log('get into ftp ready');
client.get(dir, (err, stream) => {
if (err) {
console.log('get file err:', err);
return;
} else{
console.log('get into ftp get');
stream.pipe(fs.createWriteStream(datadir + `download/${uid}/${fileName}`));
stream.on('end', () => {
console.log('get into ftp close');
unzipData(datadir + `download/${uid}/`, fileName, uid);
client.end();
});
}
});
});
client.connect(connection);
};
exports.getDataReg = (url, uid) => {
console.log('get into http');
var fileNameStart = url.lastIndexOf('/') + 1;
var fileNameEnd = url.length;
var fileName = url.substring(fileNameStart, fileNameEnd);
var file = fs.createWriteStream(datadir + `download/${uid}/${fileName}`);
if (url.startsWith('https')) {
https.get(url, (response) => {
console.log('start piping file');
response.pipe(file);
file.on('finish', () => {
console.log('get into http finish');
unzipData(datadir + `download/${uid}/`, fileName, uid);
});
}).on('error', (err) => { // Handle errors
fs.unlink(datadir + `download/${uid}/${fileName}`);
console.log('download file err: ', err);
});
} else {
http.get(url, (response) => {
console.log('start piping file');
response.pipe(file);
file.on('finish', () => {
unzipData(datadir + `download/${uid}/`, fileName, uid);
});
}).on('error', (err) => {
fs.unlink(datadir + `download/${uid}/${fileName}`);
console.log('download file err: ', err);
});
}
};
function unzipData(path, fileName, uid) {
console.log('get into unzip');
console.log('creating: ', path + fileName);
fs.createReadStream(path + fileName)
.pipe(unzip.Extract({path: path}))
.on('close', () => {
console.log('get into unzip close');
var filelist = listFile(path);
filelist.forEach((filePath) => {
if (!filePath.endsWith('.zip')) {
var components = filePath.split('/');
var component = components[components.length-1];
mv(filePath, datadir + `processing/${uid}/${component}`, (err) => {
if(err) {
console.log('move file err: ');
} else {
console.log('move file done');
}
});
}
});
fs.unlink(path + fileName, (err) => {});
});
}
After the all the process done (both download process and post process), I want to execute a callback function.
The interesting thing about a series of asynchronous processes is that you can never know when exactly all processes will complete. So setting a timeout for the callback is quick&dirty way to do it, but it's not reliable for sure.
You can instead use a counter to solve this problem.
Let's say you have 10 operations to perform. At the beginning you set your counter to ten counter = 10 And after each process is completed, regardless how (it can either succeed or fail), you can decrement the counter by 1 like counter -= 1 and right after it you can check if the counter is 0, if so that means all processes are completed and we reached the end. You can now safely run your callback function, like if(counter === 0) callback();
If I were you, I would do something like this:
*Notice that the called process should return a promise, so that I can know when it finishes (again regardless how)
*If you need help about promises, this useful article might help you: https://howtonode.org/promises
*Oh and one more thing, you should avoid using arguments.callee, because it's deprecated. Here is why Why was the arguments.callee.caller property deprecated in JavaScript?
exports.processArray = (items, process, callback) => {
var todo = [].concat(items);
var counter = todo.length;
runProcess();
function runProcess() {
// Check if the counter already reached 0
if(checkCounter() === false) {
// Nope. Counter is still > 0, which means we got work to do.
var processPromise = process(todo.shift());
processPromise
.then(function() {
// success
})
.catch(function() {
// failure
})
.finally(function() {
// The previous process is done.
// Now we can go with the next one.
--counter;
runProcess();
})
}
};
function checkCounter() {
if(counter === 0) {
callback();
return true;
} else {
return false;
}
}
};
What you want to do is to make all your asynchronous processes converge into a single promise that you can use to execute the callback at the correct moment.
Lets start at the point each process is complete, which I assume is in the callback passed to the mv() function in unzipData(). You want to wrap each of these asynchronous actions in a Promise that resolves in the callback and you also want to use these promises later and for that you use the .map() method to collect the promises in an array (instead of .forEach()).
Here's the code:
var promises = filelist.map((filePath) => {
if (!filePath.endsWith('.zip')) {
var components = filePath.split('/');
var component = components[components.length-1];
return new Promise((resolve, reject) =>
mv(filePath, datadir + `processing/${uid}/${component}`, (err) => {
if(err) {
console.log('move file err: ');
reject(); // Or resolve() if you want to ignore the error and not cause it to prevent the callback from executing later
} else {
console.log('move file done');
resolve();
}
}));
}
return Promise.resolve();
});
(if the asynchronous action is not to be executed, a Promise that resolves immediately is returned instead)
Now, we can turn this list of Promises into a single Promise that resolves when all of the promises in the list has resolved:
var allPromise = Promise.all(promises);
Next, we need to look further up in the code. We can see that the code we've just been looking at is itself part of an event handler of an asynchronous action, i.e. fs.createReadStream(). You need to wrap that in a promise that gets resolved when the inner promises resolve and this is the promise that the unzipData() function shall return:
function unzipData(path, fileName, uid) {
console.log('get into unzip');
console.log('creating: ', path + fileName);
return new Promise((outerResolve) =>
fs.createReadStream(path + fileName)
.pipe(unzip.Extract({path: path}))
.on('close', () => {
console.log('get into unzip close');
var filelist = listFile(path);
// Code from previous examples
allPromise.then(outerResolve);
}));
}
Next, we look at the functions that use unzipData(): getDataReg() and getDataFtp(). They only perform one asynchronous action so all you need to do is to make them return a promise that resolves when the promise returned by unzipData() resolves.
Simplified example:
exports.getDataReg = (url, uid) => {
return new Promise((resolve, reject) => {
// ...
https.get(url, (response) => {
response.pipe(file);
file.on('finish', () => {
unzipData(datadir + `download/${uid}/`, fileName, uid)
.then(resolve);
});
}).on('error', (err) => { // Handle errors
fs.unlink(datadir + `download/${uid}/${fileName}`);
reject(); // Or resolve() if you want to ignore the error and not cause it to prevent the callback from executing later
});
// ...
});
}
Finally, we get to the processArray() function and here you need to do the same thing we did to begin with: map the processes into a list of promises. First, the process function passed needs to return the promises returned by getDataReg() and getDataFtp():
// Second param, download and post process
(url) => {
if(url.startsWith('http')) {
return getDataReg(url, uid);
}
else if(url.startsWith('ftp')) {
return getDataFtp(url, uid);
}
else {
console.log('not a valid resource');
}
return Promise.reject(); // or Promise.resolve() if you want invalid resources to be ignored and not prevent the callback from executing later
}
Now, your processArray() function can look like this:
exports.processArray = (items, process, callback) =>
Promise.all(items.map(process))
.then(callback)
.catch(() => console.log('Something went wrong somewhere'));
Your callback will get invoked when all asynchronous actions have completed, regardless of in which order they do. If any one of the promises rejects, the callback will never be executed so manage your promise rejections accordingly.
Here's a JSFiddle with the complete code: https://jsfiddle.net/upn4yqsw/
In general, since nodejs does not appear to have implemented Streams Standard to be Promise based, at least from what can gather; but rather, uses an event based or callback mechanism, you can use Promise constructor within function call, to return a fulfilled Promise object when a specific event has been dispatched
const doStuff = (...args) => new Promise((resolve, reject)) => {
/* define and do stream stuff */
doStreamStuff.on(/* "close", "end" */, => {
// do stuff
resolve(/* value */)
})
});
doStuff(/* args */)
.then(data => {})
.catch(err => {})
I have a array which I have to loop through. I can't use for loop because it's asynchronous and it has callback in. I would have to use a loop which waits for callback. Is that possible?
Code:
if ( bots[1].getInventory().getItems().length < capacity ){
var executed = false;
bots[1].createDeposit({
steamid: req.query.steamid,
token: req.query.token,
itemsFromThem: uniqueItems,
message: req.query.message,
callback: function( err, dbTradeId ){
if ( !executed ){
executed = true;
if ( !err && dbTradeId ){
res.json({ result: 1, dbTradeId: dbTradeId });
} else {
console.log('» Tried to create deposit, but',err);
myMessages.push("Problem");
res.json({ error: err });
}
}
}
});
} else {
console.log('» Tried to create deposit, but no bot found(2)');
myMessages.push("Available bot not found(2)");
}
My question is not a duplicate because, I don't want it to go through every item in the array. Only until the successful callback has been executed.
Regards
You must take a look at async#each .It allows you to run async calls against a list of array or in a loop and gives you a place to run a method when all of the async calls are done.
// Should give you an idea how to use it
async.each(bots, function (item, callback) {
if ( item.getInventory().getItems().length < capacity ){
var executed = false;
item.createDeposit({
steamid: req.query.steamid,
token: req.query.token,
itemsFromThem: uniqueItems,
message: req.query.message,
callback: function( err, dbTradeId ){
if ( !executed ){
executed = true;
if ( !err && dbTradeId ){
callback(null, { result: 1, dbTradeId: dbTradeId });
// res.json();
} else {
console.log('» Tried to create deposit, but',err);
myMessages.push("Problem");
callback(err);
}
}
}
});
} else {
console.log('» Tried to create deposit, but no bot found(2)');
myMessages.push("Available bot not found(2)");
}
}, function (err) {
console.log('All done');
});
You can create another array of promises and then use Promise.all to wait them for completion. I don't known the source of what you have to iterate, but let's assume that you want to make an http connection:
const yourThingies = [
"http://a-server.com/",
"http://www.another.com/"
//And so on...
];
let promises = [];
yourThingies.forEach( (url) => {
const p = new Promise((resolve, reject) => {
//Call this on resolve, or use something that returns a promise.
resolve("your data");
});
promises.push(p);
});
Promise.all(promises).then((solvedPromises) => {
//Do things once all is done
solvedPromises.forEach((solv) => {
//So on...
});
});
Further information about Promise Promise MDN Docs
EDIT:
Note: When I've answered there was no code, I'll try to edit.
I'm using the twit library for nodejs which have async calls, and I made functions like this:
function getUserFromSearch(phrase) {
T.get('search/tweets', { q: phrase+' lang:pt', count: 1 }, function(err, data, response) {
data['statuses'].forEach(function(element) {
var result = "maybe";
say('getting user profile from search for '+phrase, result);
console.log(element);
return element['user']['screen_name'];
}, this);
})
}
Since T.get() is executed asynchonously, whenever I do
var username = getUserFromSearch('banana');
I get username=undefined because it takes time to return things. The obvious solution would be to implement a callback like this:
function getUserFromSearch(phrase, callback) {
T.get('search/tweets', { q: phrase+' lang:pt', count: 1 }, function(err, data, response) {
data['statuses'].forEach(function(element) {
var result = "maybe";
say('getting user profile from search for '+phrase, result);
console.log(element);
callback(element['user']['screen_name']);
}, this);
})
}
But I don't think this is the best solution, because I need to create a callback function just for this. Isn't there a way to pass the username 'pointer' like this: getUserFromSearch('banana', username); in such a way that the function alters the value of username? Is there even a better way?
Isn't there a way to pass the username 'pointer'
No, there isn't. JavaScript doesn't have references to variables.
You could pass in an object with a username property and have it set that property, but that tightly couples getUserFromSearch to a specific object and isn't composable.
Promises are a great way to handle this situation. They're composable and offer clear semantics around success and failure:
function getUserFromSearch(phrase) {
return new Promise(function(resolve, reject) {
T.get('search/tweets', { q: phrase + ' lang:pt', count: 1 }, function(err, data, response) {
if (err) {
reject(err);
} else {
data['statuses'].forEach(function(element) {
var result = "maybe";
say('getting user profile from search for ' + phrase, result);
resolve(element['user']['screen_name']);
}, this);
}
})
});
}
Usage:
getUserFromSearch(phrase).then(function(result) {
username = result;
});
Yes, there's still a callback, but it's a callback that's readily composed with others.
With ES2015 syntax:
function getUserFromSearch(phrase) {
return new Promise((resolve, reject) => {
T.get('search/tweets', { q: phrase + ' lang:pt', count: 1 }, function(err, data, response) {
if (err) {
reject(err);
} else {
data['statuses'].forEach(element => {
const result = "maybe";
say('getting user profile from search for ' + phrase, result);
resolve(element['user']['screen_name']);
});
}
})
});
}
Usage:
getUserFromSearch(phrase).then(result => {
username = result;
});
Since JavaScript doesn't have the notion of pointers,you can create an object before calling the function and pass the object as the second parameter to the function. Assign the result of the function to the object's property.
Your code would look like:
function getUserFromSearch(phrase,obj) {
T.get('search/tweets', { q: phrase+' lang:pt', count: 1 }, function(err, data, response) {
data['statuses'].forEach(function(element) {
var result = "maybe";
say('getting user profile from search for '+phrase, result);
console.log(element);
obj.username = element['user']['screen_name'];
}, this);
})
}
var obj = {username:null}
getUserFromSearch('banana',obj);
This is the way javascript works.
Since this is an async function, you'll need to do it as you said.
getUserFromSearch('banana', function(username){
//do something with username here
});
What you can do, is use something like async.js to reduce the nested callback hell...
I'm trying to recursively call AWS's SNS listEndpointsByPlatformApplication. This returns the first 100 endpoints then a token in NextToken if there are more to return (details: AWS SNS listEndpointsByPlatformApplication).
Here's what I've tried:
var getEndpoints = function(platformARN, token) {
return new models.sequelize.Promise(function(resolve, reject) {
var params = {
PlatformApplicationArn: platformARNDev
};
if (token != null) {
params['NextToken'] = token;
}
sns.listEndpointsByPlatformApplication(params, function(err, data) {
if (err) {
return reject(err);
}
else {
endpoints = endpoints.concat(data.Endpoints); //save to global var
if ('NextToken' in data) {
//call recursively
return getEndpoints(platformARN, data.NextToken);
}
else {
console.log('trying to break out!');
return resolve(true);
}
}
});
});
}
I'm calling it with:
getEndpoints(platformARNDev, null)
.then(function(ret) {
console.log('HERE!');
}, function(err) {
console.log(err);
});
Problem is: the first call happens, then the recursive call happens, and I get the message trying to break out! but the HERE! never gets called. I've got something wrong with how my promises are returning I think.
Grateful for pointers.
The problem is that you try and resolve/reject partially completed query. Here is a complete working example with dummy service. I incapsulated the data grabbing into it's own recursive function and only do resolve/reject when i've completely fetched all the data or stumbled upon an error:
// This is the mock of the service. It yields data and token if
// it has more data to show. Otherwise data and null as a token.
var dummyData = [0, 1, 2, 3, 4];
function dummyAsyncCall(token, callback) {
token = token || 0;
setTimeout(function() {
callback({
dummyDataPart: dummyData[token],
token: (typeof (dummyData[token]) == 'undefined') ? null : (token + 1)
});
});
}
// Here is how you would recursively call it with promises:
function getAllData() {
//data accumulator is sitting within the function so it doesn't pollute the global namespace.
var dataSoFar = [];
function recursiveCall(token, resolve, reject) {
dummyAsyncCall(token, function(data) {
if (data.error) {
reject(data.error);
}
if (!data.token) {
//You don't need to return the resolve/reject result.
resolve(dataSoFar);
} else {
dataSoFar = dataSoFar.concat(data.dummyDataPart);
recursiveCall(data.token, resolve, reject);
}
});
}
return new Promise(function(resolve, reject) {
// Note me passing resolve and reject into the recursive call.
// I like it this way but you can just store them within the closure for
// later use
recursiveCall(null, resolve, reject);
});
}
//Here is the call to the recursive service.
getAllData().then(function(data) {
console.log(data);
});
Fiddle with me
That's because you dont need to return resolve/reject, just call resolve/reject when the recursive call completes. A rough code would look like this
var getEndpoints = function(platformARN, token) {
return new models.sequelize.Promise(function(resolve, reject) {
var params = {
PlatformApplicationArn: platformARNDev
};
if (token != null) {
params['NextToken'] = token;
}
sns.listEndpointsByPlatformApplication(params, function(err, data) {
if (err) {
reject(err);
}
else {
endpoints = endpoints.concat(data.Endpoints); //save to global var
if ('NextToken' in data) {
//call recursively
getEndpoints(platformARN, data.NextToken).then(function () {
resolve(true);
}).catch(function (err) {
reject(err);
});
}
else {
console.log('trying to break out!');
resolve(true);
}
}
});
});
}
(caution: this is just a rough code, may work or may not, but is to give a general idea)
I've added a code snippet below, to support this concept, and it works great, check it out.
i = 0;
$('#output').empty();
function pro() {
return new Promise(function(resolve, reject) {
if (i > 3) {
resolve();
return;
}
window.setTimeout(function() {
console.log(i);
$('#output').append(i).append('<br/>');
i += 1;
pro().then(function() {
resolve()
}).catch(function() {
reject()
});
}, 2000);
});
}
pro().then(function () { $('#output').append("now here"); })
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<div id="output"></div>