Node.js synchronously loop or iterate over asynchronous statements - javascript

I want to do a for each loop but have it run synchronously. Each iteration of the loop will do an http.get call and that will return json for it to insert the values into a database. The problem is that the for loop runs asynchronously and that causes all of the http.gets to all run at once and my database doesn't end up inserting all of the data.I am using async-foreach to try to do what I want it to do, but I don't have to use it if I can do it the right way.
mCardImport = require('m_cardImport.js');
var http = require('http');
app.get('/path/hi', function(req, res) {
mCardImport.getList(function(sets) {
forEach(sets, function(item, index, arr) {
theUrl = 'http://' + sets.set_code + '.json';
http.get(theUrl, function(res) {
var jsonData = '';
res.on('data', function(chunk) {
jsonData += chunk;
});
res.on('end', function() {
var theResponse = JSON.parse(jsonData);
mCardImport.importResponse(theResponse.list, theResponse.code, function(theSet) {
console.log("SET: " + theSet);
});
});
});
});
});
});
and my model
exports.importResponse = function(cardList, setCode, callback) {
mysqlLib.getConnection(function(err, connection) {
forEach(cardList, function(item, index, arr) {
var theSql = "INSERT INTO table (name, code, multid, collector_set_num) VALUES "
+ "(?, ?, ?, ?) ON DUPLICATE KEY UPDATE id=id";
connection.query(theSql, [item.name, setCode, item.multid, item.number], function(err, results) {
if (err) {
console.log(err);
};
});
});
});
callback(setCode);
};

With recursion the code is pretty clean. Wait for the http response to come back then fire off next attempt. This will work in all versions of node.
var urls = ['http://stackoverflow.com/', 'http://security.stackexchange.com/', 'http://unix.stackexchange.com/'];
var processItems = function(x){
if( x < urls.length ) {
http.get(urls[x], function(res) {
// add some code here to process the response
processItems(x+1);
});
}
};
processItems(0);
A solution using promises would also work well, and is more terse. For example, if you have a version of get that returns a promise and Node v7.6+, you could write an async/await function like this example, which uses some new JS features.
const urls = ['http://stackoverflow.com/', 'http://security.stackexchange.com/', 'http://unix.stackexchange.com/'];
async function processItems(urls){
for(const url of urls) {
const response = await promisifiedHttpGet(url);
// add some code here to process the response.
}
};
processItems(urls);
Note: both of these examples skip over error handling, but you should probably have that in a production app.

To loop and synchronously chain asynchronous actions, the cleanest solution is probably to use a promise library (promises are being introduced in ES6, this is the way to go).
Using Bluebird, this could be
Var p = Promise.resolve();
forEach(sets, function(item, index, arr) {
p.then(new Promise(function(resolve, reject) {
http.get(theUrl, function(res) {
....
res.on('end', function() {
...
resolve();
}
}));
});
p.then(function(){
// all tasks launched in the loop are finished
});

I found out that I wasn't releasing my mysql connections after I was done with each call and this tied up the connections causing it to fail and appear to be an issue with synchronization.
After explicitly calling connection.release(); it caused my code to work 100% correctly even in an asynchronous fashion.
Thanks for those who posted to this question.

"use strict";
var Promise = require("bluebird");
var some = require('promise-sequence/lib/some');
var pinger = function(wht) {
return new Promise(function(resolve, reject) {
setTimeout(function () {
console.log('I`ll Be Waiting: ' + wht);
resolve(wht);
}, Math.random() * (2000 - 1500) + 1500);
});
}
var result = [];
for (var i = 0; i <= 12; i++) {
result.push(i);
}
some(result, pinger).then(function(result){
console.log(result);
});

Just wrap the loop in an async function. This example illustrates what I mean:
const oneSecond = async () =>
new Promise((res, _) => setTimeout(res, 1000));
This function completes after just 1 second:
const syncFun = () => {
for (let i = 0; i < 5; i++) {
oneSecond().then(() => console.log(`${i}`));
}
}
syncFun(); // Completes after 1 second ❌
This one works as expected, finishing after 5 seconds:
const asyncFun = async () => {
for (let i = 0; i < 5; i++) {
await oneSecond();
console.log(`${i}`);
}
}
asyncFun(); // Completes after 5 seconds ✅

var urls = ['http://stackoverflow.com/', 'http://security.stackexchange.com/', 'http://unix.stackexchange.com/'];
for (i = 0; i < urls.length; i++){
http.get(urls[i], function(res) {
// add some code here to process the response
});
}

Related

nodejs/javascript limit how many async calls [duplicate]

I am building an application that will be making about a million calls to a remote api server. Will I be able to limit amount of connections to for example 10? Do I set max sockets to 10 will do it?
I am trying to understand what do these parameters do:
keepAlive: false,
maxSockets: 999,
maxFreeSockets: 1
In node http get function, in the following code:
var inputData = [];
for(i=1; i<=5000;i++){
inputData.push('number' + i);
}
var options = {
host: "localhost",
port: 80,
path: "/text.txt",
keepAlive: false,
maxSockets: 999,
maxFreeSockets: 1
}
var limit = inputData.length;
var counter = 0;
function fetchData(number){
return new Promise(function(resolve, reject){
var http = require('http');
fetch = function(resp){
var body = '';
resp.on('data',function(chunk){
body += chunk;
})
resp.on('end',function(){
console.log(resp)
resolve()
})
resp.on('error',function(err){
console.log('error');
})
}
var req = http.request(options, fetch);
req.end();
})
}
Promise.all(inputData.map(number => fetchData(number))).then(function(results) {
console.log('finished');
connection.end();
})
.catch(function(error) {
console.log('there wa an error');
console.log(error);
});
You really don't want to fire off 1,000,000 requests and somehow hope that maxSockets manages it to 100 at a time. There are a whole bunch of reasons why that is not a great way to do things. Instead, you should use your own code that manages the number of live connections to 100 at a time.
There are a number of ways to do that:
Write your own code that fires up 100 and then each time one finishes, it fires up the next one.
Use Bluebird's Promise.map() which has a built-in concurrency feature that will manage how many are inflight at the same time.
Use Async's async.mapLimit() which has a built-in concurrency feature that will manage how many are inflight at the same time.
As for writing code yourself to do this, you could do something like this;
function fetchAll() {
var start = 1;
var end = 1000000;
var concurrentMax = 100;
var concurrentCnt = 0;
var cntr = start;
return new Promise(function(resolve, reject) {
// start up requests until the max concurrent requests are going
function run() {
while (cntr < end && concurrentCnt < concurrentMax) {
++concurrentCnt;
fetchData(cntr++).then(function() {
--concurrentCnt;
run();
}, function(err) {
--concurrentCnt;
// decide what to do with error here
// to continue processing more requests, call run() here
// to stop processing more requests, call reject(err) here
});
}
if (cntr >= end && concurrentCnt === 0) {
// all requests are done here
resolve();
}
}
run();
});
}
I decided to use the async library.
Here is my complete solution to this:
var async = require('async')
var http = require('http');
var inputData = [];
for(i=1; i<=2000;i++){
inputData.push('number' + i);
}
var options = {
host: "o2.pl",
path: "/static/desktop.css?v=0.0.417",
port: 80
}
function fetchData(number, callback){
return new Promise(function(resolve, reject){
fetch = function(resp){
var body = '';
resp.on('data',function(chunk){
body += chunk;
})
process.stdout.write('.')
callback()
resp.on('error',function(err){
console.log('error');
console.log(err);
})
}
var req = http.request(options, fetch);
req.end();
})
}
function foo(item, callback){
return callback(false, 'foo');
}
async.mapLimit(inputData,100,fetchData,function(err, result){
console.log('finished');
})
Thank you for your help.

Download files from links in array

I'm trying to download files from links in an array with a length of several thousand positions. The problem is that when I iterate over the array I hit a wall when trying to synchronize the file fetch and write ( Maximum call stack size exceeded). I've tried to make a recursive function and played with promises but I still haven't managed to find a solution.
Help please!
My code so far:
function download(url, dest, cb) {
return new Promise(function (resolve, reject) {
let request = https.get(url, function (response) {
let file = fs.createWriteStream(dest);
response.pipe(file);
file.on('finish', function () {
console.log('File downloaded')
resolve(file.close(cb));
});
}).on('error', function (err) {
reject(err)
});
})
};
function recursiveDownload(links, i) {
if (i < links.length) {
download(links[i], './data/' + i + '.csv')
.then(recursiveDownload(links, ++i))
} else {
console.log('ended recursion')
}
}
recursiveDownload(links, 0)
You can use a for loop to serialize all your promises and synchronize them. Try the following:
var promise = Promise.resolve();
for(let i = 0; i < links.length; i++){
promise = promise.then(()=> download(links[i], './data/' + i + '.csv'));
}
Or you can even chain your promises using Array.reduce() :
var promise = links.reduce((p, link, index) => p.then(()=>download(link, './data/' + index + '.csv')),Promise.resolve());
Because you call recursiveDownload immeadiately withou waiting for the download to succeed. You actually want to call it when .then calls back:
download(links[i], './data/' + i + '.csv')
.then(() => recursiveDownload(links, ++i))

Infinite loop in Nodejs

While making a link Shortner script using nodejs, i encountered the following problem:
my program went on an infinite loop for a reason i ignore
here is the code:
function makeShort() {
var short = "";
var cond = true;
while(cond){
var possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for(var i = 0; i < length; i++){
short += possible.charAt(Math.floor(Math.random() * possible.length));
}
let query = {short:short};
Link.findOne(query, (err, link)=>{
if(err) throw err;
if(!link){
console.log("here");
cond = false;
}
});
}
return short;
}
to then use it here :
router.post('/', (req, res)=>{
let short = makeShort();
const newLink = new Link({
url: req.body.url,
short:short
});
newLink.save().then(link => {
res.json(link);
});
});
the idea is that i generate a random string (5 characters), and then, if it exists i create another one and so on.. until i find one that isn't used ( the database is empty btw so there is no reason for it to go infinite loop ).
You can loop over and test for values in your database using async/await. What we do is convert your function to an async function, then create a new function that will return a promise which will resolve true/false.
Next we call that function in the while loop and await for a result which will contain true/false we then set that to the variable cond and continue the loop.
It would look something like this:
async function makeShort(length) {
let cond = true;
while (cond) {
let short = (Math.random() * 1000).toString(32).replace(/\./g, '').substr(0, length);
let query = { short: short };
cond = await findOne(query);
}
return short;
}
function findOne(query) {
return new Promise(resolve => {
Link.findOne(query, (err, link) => {
if (err) resolve(false);
if (!link) {
return resolve(false);
}
return resolve(true);
});
})
}
We then can call it using let short = await makeShort() like this (we also have to the make (req, res) function async):
router.post('/', async (req, res) => {
let short = await makeShort();
const newLink = new Link({
url: req.body.url,
short: short
});
newLink.save().then(link => {
res.json(link);
});
});
Don't mix synchronous looping and asynchronous condition updating. Something like this is guaranteed to run the while body as many times as it can before that DoSomething call returns a result:
while(cond) {
// call something async. don't wait for a result.
DoSomething.asynchronous( () => { cond = false; });
// then immediately restart the iteration
}
So don't do that. Make your makeShort generate a shortform string asynchronously.
const symbols = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
const symbolCount = symbols.length;
function makeShort(howMany) {
howMany = howMany || 5;
let short = "";
while(howMany--) {
short += symbols[(Math.random() * symbolCount)|0];
}
return short;
}
Then, do your verification independently of this:
function assignShortForm(req, res) {
let short = makeShort();
verifyShortIsAvailable(
short,
success => {
// this short form was available
new Link({ url: req.body.url, short }).save().then(link => res.json(link));
}, error => {
// try again. RNG is not your friend, and this COULD run a very long time.
assignShortForm(req, res);
}
);
}
With your router using that function, not inlining it:
router.post('/', assignShortForm);
In this, verifyShortIsAvailable should do its work asynchronously:
verify verifyShortIsAvailable(short, resolve, reject) {
Link.findOne(query, (err, link) => {
if (err) return reject(err);
if (link) return reject("...");
return resolve();
});
}
while loops run synchronously, meaning they block the thread from further execution until they are complete. Because the link shortener is asynchronous, it is being blocked by the while loop.
To handle this code asynchronously, you can return a Promise
function makeShort() {
var possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
// note length was previously undefined in the comparison. use possible.length or another arbitrary value
for(var i = 0; i < possible.length; i++){
short += possible.charAt(Math.floor(Math.random() * possible.length));
}
let query = {short:short};
return new Promise((resolve, reject) => {
Link.findOne(query, (err, link) => {
if(err) return reject(err);
resolve(link)
});
})
}
Then you can use it like so...
let short = makeShort().then(shortLink => {
// do something with the link
}).catch(err => {
// handle the error
});
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises

Problems with asynchronous in loop

I need to:
1) Make two request
2) Then take data from requests
3) And send response
I am using firebase database. I am taking data from database by forEach
I need to process data and response data(json)
Can anyone help me with asynchronous? Or help write callback function
I read a lot of information about this, but dont understant
My code here works not fine
I have problems with asynchronous
So how can I improve it?
router.get('/places/all', function(req, res, next) {
var lat = req.param('lat');
lon = req.param('lon');
result = [],
resData = [];
var query = firebase.database().ref('Places');
var i = 0;
var promise1 = new Promise(function(resolve, reject) {
query.on("value", function(snapshot) {
console.log(snapshot.numChildren())
snapshot.forEach(function(childSnapshot) {
childData = childSnapshot.val();
var gmapApiKey = 'API_KEY';
placeID = childData.placeID;
language = req.param('lang');
url = 'https://maps.googleapis.com/maps/api/place/details/json?placeid=' + placeID + '&key=' + gmapApiKey + '&language=' + language;
placeLat = childData.lat;
placeLon = childData.lon;
distanceMatrixApiUrl = 'https://maps.googleapis.com/maps/api/distancematrix/json?origins=' + lat + ',' + lon + '&destinations=' + placeLat + ',' + placeLon + '&key=' + gmapApiKey;
i++;
var requestDistance = new Promise(function(resolve, reject) {
https.get(distanceMatrixApiUrl, res => {
res.setEncoding("utf8");
let body = "";
res.on("data", data => {
body += data;
});
res.on("end", () => {
body = JSON.parse(body);
resolve(body);
});
});
console.log(requestDistanceApi)
requestDistance = Promise.resolve(requestDistanceApi)
});
var requestPlaces = new Promise(function(resolve, reject) {
https.get(url, res => {
res.setEncoding("utf8");
let body = "";
res.on("data", data => {
body += data;
});
res.on("end", () => {
i++;
result = JSON.parse(body);
resolve(result);
});
});
console.log(requestPlaceApi)
requestPlaces = Promise.resolve(requestPlacesApi)
i++;
});
requestDistance.then(function(valueDistance) {
requestPlaces.then(function(valuePlace) {
resData.push({
name: valuePlace.result.name,
address: valuePlace.result.vicinity,
open_now: valuePlace.result.opening_hours.open_now,
weekday_text: valuePlace.result.opening_hours.weekday_text,
latitude: valuePlace.result.geometry.location.lat,
longitude: valuePlace.result.geometry.location.lng,
distance: valueDistance.rows[0].elements[0].distance.text,
});
}).catch((error) => {
assert.isNotOk(error,'Promise Places error');
done();
});
}).catch((error) => {
assert.isNotOk(error,'Promise Distance error');
done();
});
});
});
});
promise1.then(function(value) {
res.send(value);
}).catch((error) => {
assert.isNotOk(error,'Promise error');
done();
});
});
I'm not going to rewrite this all but rather will give you an outline.
First of all the firebase SDK has promises available in lieu of using callbacks. Use those instead of adding new Promise to wrap them. This will help streamline the code structure and enable making the whole promise chain simpler
I'm not sure which package htts.get() comes from but it too likely has promises. Convert to using those also
The other trick will be to combine the 2 nested requests into one promise which can be done using Promise.all()
So here's the basic code outline. Note that I have not paid a lot of attention to exactly how you want all this data processed in order to send to client. You will need to adjust as needed
You probably want to break this whole code block down into smaller functions also to make the logic easier to follow
Also note I have not added any error handling catch()s which is going to be up to you
// use firebase promise instead of callback
var mainpromise = query.on("value").then(function(snapshot) {
var subPromises = [];
snapshot.forEach(function(childSnapshot) {
//childData = ...
// look into https promises instead of wrapping in `new Promise
var placesPromise new Promise(function(resolve, reject) {
https.get(distanceMatrixApiUrl, res => {
// .....
res.on("end", () => {
body = JSON.parse(body);
resolve(body);
});
});
});
var distancePromise = new Promise.... // similar to above
// combine these promises into one
var combinedPromise = Promise.all([placesPromise, distancePromise]).then(function(resultsArray) {
var places = resultsArray[0],
distances = resultsArray[1];
//assign above to childData or whatever you want to do with them
return childData;// or return processed results or skip this `then and do all processing in the next then() commented below
})
// store combined promises in array
subPromises.push(combinedPromise);
});
// return promise to resolve mainPromise
return Promise.all(subPromises)// might need a `then()` here to return processed data depending on structure you want returned
});
mainPromise.then(function(results) {
// process and send results which is array of all the childData from snapshot.forEach
res.send(processedData);
})

How to wait for each iteration in for loop and return response as API response in nodeJS

I'm using for loop to iterate over an array of elements and to call the same function with different parameters inside the for loop. Here is my code:
exports.listTopSongs = function(query) {
return new Promise(function(resolve, reject) {
var str = query.split(","), category,
for(var i=0; i<str.length; i++) {
sampleFn(str[i], 'sample', resolve, reject);
}
});
};
function sampleFn(lang, cat, resolve, reject) {
client.on("error", function (err) {
console.log(err);
var err = new Error('Exception in redis client connection')
reject(err);
});
client.keys(lang, function (err, keys){
if (err) return console.log(err);
if(keys.length != 0) {
client.hgetall(keys, function (error, value) {
var objects = Object.keys(value);
result['title'] = lang;
result[cat] = [];
var x =0;
for(x; x<objects.length; x++) {
var val = objects[x];
User.findAll({attributes: ['X', 'Y', 'Z'],
where: {
A: val
}
}).then(data => {
if(data != null) {
//some actions with data and stored it seperately in a Json array
if(result[cat].length == objects.length) {
resolve(result);
}
} else {
console.log(""+cat+" is not avilable for this value "+data.dataValues['X']);
}
});
}
});
});
}
Here it won't wait for completion of first iteration. It just run asyncronously before completing first iteration function. I need to return the result as result:[{ 1, 2}, {3,4}]. but it runs seamlessly and returns empty or only one object before completing all. How to resolve it.
I used node-async-loop. But it uses next and i can't able to send my parameteres while using that package. Please help me
Async provides control flow methods allowing to do so.
Using async.each:
async.each(openFiles, function(file, callback) {
// Perform operation on file here.
console.log('Processing file ' + file);
if( file.length > 32 ) {
console.log('This file name is too long');
callback('File name too long');
} else {
// Do work to process file here
console.log('File processed');
callback();
}
}, function(err) {
// if any of the file processing produced an error, err would equal that error
if( err ) {
// One of the iterations produced an error.
// All processing will now stop.
console.log('A file failed to process');
} else {
console.log('All files have been processed successfully');
}
});
If you don't want to use a library, you can code it yourself. It would also be very instructive. I took your issue and coded a dummy async loop :
function listTopSongs(query) {
return new Promise(async(resolve, reject) => { //add async here in order to do asynchronous calls
const str = query.split(",") //str is const, and the other variable was not used anyway
for( let i = 0;i < str.length; i++) {
const planet = await sampleFn(str[i], 'sample', resolve, reject)
console.log(planet)
}
});
};
function sampleFn(a, b, c, d) {
return fetch(`https://swapi.co/api/planets/${a}/`)
.then(r => r.json())
.then(rjson => (a + " : " + rjson.name))
}
listTopSongs("1,2,3,4,5,6,7,8,9")
I used some dummy star wars API to fake a long promise but it should work with your sampleFn. Be careful, it is very, very slow if you have network call like the one in the example.
EDIT: I ran your code and I noticed there are a few mistakes: there is no resolve in your promise so it's not a thenable (https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/resolve see thenable )
Here is a fully working code. The nice part : no library needed, no dependencies.
//for node.js, use node-fetch :
//const fetch = require("node-fetch")
function listTopSongs(query) {
return new Promise(async(resolve, reject) => { //add async here in order to do asynchronous calls
const str = query.split(",") //str is const, and the other variable was not used anyway
const planets = []
for (let i = 0; i < str.length; i++) {
const planet = await sampleFn(i + 1, str[i], resolve, reject)
planets[i] = planet
console.log(planet)
}
resolve(planets)
});
};
function sampleFn(a, b, c, d) {
return fetch(`https://swapi.co/api/planets/${a}/`)
.then(r => r.json())
.then(rjson => (a + b + " : " + rjson.name))
}
listTopSongs("a,b,c,d").then(planets => console.log(planets))
Since you are using promise, you can do something like this
exports.listTopSongs = function(query) {
return Promise.resolve(true).then(function(){
var str = query.split(",");
var promises = str.map(function(s){
return sampleFn(str[i], 'sample');
});
return Promise.all(promises);
}).then(function(results){
//whatever you want to do with the result
});
};
For this to work you have to change your sampleFn to not to depend on external resolve and reject functions. I don't see a reason using external resolve and reject. why just not use Promise.Resolve, Promise.Reject;

Categories