Save results of multiple requests into array [closed] - javascript

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 7 years ago.
Improve this question
I was trying to make multiple API requests and store responses into the array.
Array stayed empty because I didn't know about async behavior of for loop.
const results = [];
for (let i = 0; i < apiUrls.length; i++) {
apiCall(apiUrls[i], res => {
results.push(res);
});
}
console.log(results) // []
So I rewrite it to:
const results = []
async function makeApiCalls () {
for (const apiUrl in apiUrls) {
const res = await apiCall(apiUrl)
results.push(res)
}
console.log(results) // [data, someData, otherData...]
}
makeApiCalls()
It works! But runs in sequence. We can improve it to run in parallel like this:
let results = []
async function makeApiCalls () {
const promises = []
// fire all requests
for (const apiUrl in apiUrls) {
promises.push(apiCall(apiUrl))
}
// wait until all promises are resolved and copy responses to array
results = [...await Promise.all(promises)];
console.log(results) // [data, someData, otherData...]
}
makeApiCalls()

Looping over asynchronous calls is not going to work very well, because you'll have no idea when you're finished and the stats array is populated. You need some additional machinery to handle this. Without using async or promises, the basic idea is:
var stats = [];
var finishedCount = 0; // keep track of how many callbacks have completed
for (let i = 0; i<tempBackends.length; i++)
{
http.get(tempBackends[i], function(res) {
console.log("Received response: " + res.statusCode);
if(res.statusCode == 200) {
stats.push('OK');
console.log('pushed OK\n');
}
else {
stats.push('Not OK');
console.log('pushed Not OK\n');
}
// ADD LOGIC HERE TO HANDLE ALL CALLBACKS HAVING FINISHED
finishedCount++;
if (finishedCount === tempBackends.length) {
console.log("ALL DONE!! stats is", stats);
}
});
}
However, with this approach, the stats array is not going to be aligned with tempBackends. The order of the values in stats will be based on the order in which the asynchronous calls finished. In addition, this style of code is going to be hard to maintain. Instead, you should use async or promises. The async approach would be:
async.map(
tempBackends,
function(backend, callback) {
http.get(backend, function(res) {
callback(null, res.statusCode === 200 ? "OK" : "Not OK");
});
function(err, data) {
console.log("ALL DONE!! stats is", data);
}
);
The promise approach is more readable and writable. First, make a promisified version of http.get:
function getPromise(backend) {
return new Promise(function(resolve) {
http.get(backend, function(res) {
resolve(res.statusCode === 200 ? "OK" : "Not OK");
});
});
}
Now you can just write
Promise.all(tempBackends . map(getPromise)) .
then(function(stats) {
console.log("ALL DONE!! stats is", stats);
});

var http = require('http');
var list = ['http://rest-service.guides.spring.io/greeting',
'http://rest-service.guides.spring.io/greeting',
'http://rest-service.guides.spring.io/greeting',
'http://rest-service.guides.spring.io/greeting',
'http://rest-service.guides.spring.io/greeting'];
var responseArray = [];
var calls = function(){
var index = 0;
var callWithIndex = function (i){
http.get(list[i], function(apiResponse) {
var statusCode = apiResponse.statusCode;
apiResponse.on('data', function(chunk) {
});
apiResponse.on('end', function() {
responseArray.push(statusCode);
i += 1;
if(i < list.length){
callWithIndex(i);
} else {
console.log(responseArray.toString());
}
});
});
};
callWithIndex(index);
};
calls();
If you mess with nodejs, you should know which callback you should execute your next iteration and when you should process your data. Notice how I called the next iteration in the response.on('end') part as well as how I pushed the statusCode to the array there as well.

Try passing your array variable stats inside the function, because the scope of a function is local.
http.get(tempBackends[i], function(res,stats) {
// access stats here now
}

Related

Fetching synchroneously from redis in Node through async/await

I am learning Javascript and Vue from Java from few days and not able to solve a problem with my Node, Express app through async/await. The below code is receiving a list of Stock symbols from request and then checking through a loop if details against any of the symbols is already cached in redis.
var controllers = {
getCurrentPrice: function(req, res) {
var symbolsArray = req.body.symbols;
var results = [];
var tmpArray = [];
_(symbolsArray).each( async function(symbol, iPos) {
client.hget("realtime", symbol, function(err, reply) {
if(reply) {
await results.push(reply);
} else {
await tmpArray.push(symbol);
}
console.log("reply", reply);
});
});
console.log("Results so far ", results);
if( !tmpArray || tmpArray.length == 0 ) { //will be fetching these now }
}
}
Getting output in the inner console statement but not for the outer one. I have tried looking at few solutions through net like through redis-co to promisify the redis calls but could not exactly solve it.
There are several things wrong here:
Doing await on the result of a .push() doesn't do anything useful. You use await on a promise.
Your .each() loop doesn't wait for each asycnhronous operation to finish, therefore you have no way of knowing when all the async operations are done
I'd suggest using a regular for loop where async/await will pause for the loop:
const util = require('util');
client.hgetP = util.promisify(client.hget);
var controllers = {
getCurrentPrice: async function(req, res) {
var symbolsArray = req.body.symbols;
var results = [];
var tmpArray = [];
for (let symbol of symbolsArray) {
let reply = await client.hgetP("realtime", symbol);
if (reply) {
results.push(reply);
} else {
tempArray.push(symbol);
}
}
// do any further processing of tempArray here
console.log(results);
return results; // this will be the resolved value of the returned promise
}
}
Sample usage:
obj.getCurrentPrice.then(results => {
console(results);
}).catch(err => {
console.log(err);
});

Fetching messages across multiple channels

I'm currently trying to fetch all messages in a guild, yet the .fetchMessages method only works on specific channels.
I've already tried using the .forEach function but have received multiple errors whilst doing so.
async function intervalFunc() {
var mainGuild = client.guilds.get("562324876330008576");
var messagesArray = [];
await mainGuild.channels.forEach(channel => {
if (channel.type == "text") {
channel.fetchMessages({ limit: 20 }).then(msg => {
messagesArray.push(msg);
});
}
});
console.log(messagesArray.length)
//....
}
The expected output is at least a few hundred but instead, it outputs 0.
You need to put await on the async operation:
async function intervalFunc() {
var mainGuild = client.guilds.get("562324876330008576");
var messagesArray = [];
for(channel in mainGuild.channels) {
if(channel.type == "text") {
const messages = await channel.fetchMessages({limit: 20});
messagesArray = [
...messagesArray,
...messages,
];
}
}
console.log(messagesArray.length);
}
So here it will wait for ferchMessages to return a value in an asynchronous manner and then proceed to next iteration.

Loops and Callback hell

Suppose you have an Array/Object that contains a list of values. Lets say those a mysql commands or urls or filespaths. Now you want to iterate over all of them and execute some code over every entry.
for(let i = 0; i < urls.length; i++){
doSthWith(urls[i]);
}
No Problem so far. But now lets say each function has a callback and needs the result of the last execution. e.g. you request something from one website and you want to use the results of this request for one of your following requests.
for(let i = 0; i < urls.length; i++){
if(resultOfLastIteration.successful){ //or some other result besides the last one
doSthWith(urls[i]);
}
}
Now lets say the length of urls (or sth similar) is over 100. Thats why you normaly use a loop so you dont need to write the same function a 100 times. That also means that Promises wont do the trick either (except Im unaware trick a trick), because you have the same problem:
doSthWith(urls[0]).then(...
doSthWith(urls[1]).then(... //either put them inside each other
).then(...
doSthWith(urls[i]) //or in sequence
...
).catch(err){...}
Either way I dont see a way to use a loop.
A way that I found but isnt really "good" is to use the package "wait.for"(https://www.npmjs.com/package/wait.for). But what makes this package tricky is to launch a fiber each time you want to use wait.for:
//somewhere you use the function in a fiber Context
wait.for(loopedExecutionOfUrls, urls);
//function declaration
function loopedExecutionOfUrls(urls, cb){
//variables:
for(let i = 0; i < urls.length; i++){
if(someTempResultVar[i-1] === true){
someTempResultVar = wait.for(doSthWith,urls[i]);
} else if(...){...}
}
}
But Im not sure if this approach is really good, besides you always have to check if you have wrapped the whole thing in a Fiber so for each function that has loops with functions that have callbacks. Thus you have 3 levels: the lauchFiber level, wait.for(loopedFunction) level and the wait.for the callback function level. (Hope I that was formulated understandable)
So my questions is: Do you guys have a good approach where you can loop throw callback functions and can use results of those whenever you like?
good = easy to use, read, performant, not recursive,...
(Im sorry if this question is stupid, but I really have problems getting along with this asynchronous programming)
If you want to wait for doSthWith to finish before doing the same but with the nex url, you have to chain your promises and you can use array.prototype.reduce to do that:
urls = ["aaa", "bbb", "ccc", "ddd"];
urls.reduce((lastPromise, url) => lastPromise.then((resultOfPreviousPromise) => {
console.log("Result of previous request: ", resultOfPreviousPromise); // <-- Result of the previous request that you can use for the next request
return doSthWith(url);
}), Promise.resolve());
function doSthWith(arg) { // Simulate the doSthWith promise
console.log("do something with: ", arg);
return new Promise(resolve => {
setTimeout(() => resolve("result of " + arg), 2000);
});
}
Use async, specifically async.each:
const async = require('async');
function doSthWith(url, cb) {
console.log('doing something with ' + url);
setTimeout(() => cb(), 2000);
}
const urls = ['https://stackoverflow.com/', 'https://phihag.de/'];
async.each(urls, doSthWith, (err) => {
if (err) {
// In practice, likely a callback or throw here
console.error(err);
} else {
console.log('done!');
}
});
Use async.map if you are interested in the result.
When I need to loop over promises I use my handy dandy ploop function. Here is an example:
// Function that returns a promise
var searchForNumber = function(number) {
return new Promise(function(resolve, reject) {
setTimeout(function() {
var min = 1;
var max = 10;
var val = Math.floor(Math.random()*(max-min+1)+min);
console.log('Value is: ' + val.toString());
return resolve(val);
}, 1000);
});
};
// fn : function that should return a promise.
// args : the arguments that should be passed to fn.
// donefn : function that should check the result of the promise
// and return true to indicate whether ploop should stop or not.
var ploop = function(fn, args, donefn) {
return Promise.resolve(true)
.then(function() {
return(fn.apply(null, args));
})
.then(function(result) {
var finished = donefn(result);
if(finished === true){
return result;
} else {
return ploop(fn, args, donefn);
}
});
};
var searchFor = 4;
var donefn = function(result) {
return result === searchFor;
};
console.log('Searching for: ' + searchFor);
ploop(searchForNumber, [searchFor], donefn)
.then(function(val) {
console.log('Finally found! ' + val.toString());
process.exit(0);
})
.catch(function(err) {
process.exit(1);
});

learnyounode - Juggling Async - different order

This is from the learnyounode tutorial exercise 9 on node.js. I'm having trouble understanding why my code doesn't print out the data in order.
let http = require('http'),
bl = require('bl'),
urlArray = [process.argv[2], process.argv[3], process.argv[4]]
results = []
//counter = 0;
function collectData(i) {
http.get(urlArray[i], (res) => {
res.pipe(bl((err, data) => {
if (err) {
return console.log(err);
}
data = data.toString();
results[i] = data;
//counter++;
//if (counter === 3) {
if (results.length === 3) {
results.forEach((result) => {
console.log(result);
})
}
}))
})
}
for (let i = 0; i < urlArray.length; i++) {
collectData(i);
}
The for loop should start from the first url and go through to the last in order. From my understanding, whatever happens in the current iteration of the loop must resolve for the loop to move to the next iteration. However, the results seem to be random. If I run my solution on the command line, sometimes the results are in order and sometimes they're not.
Edit: This is my current solution which works. I added the counter variable and put the http request into a function.
The reason you're getting different results on each run is because the get-function of http is implemented asynchronously (async). You're doing the requests in the right order, but the webserver on the get-URL responds not instantly.
So basically, if you have two URLs to call:
http://stackoverflow.com
http://google.com
You call them in this order, but google have a good response time this run, like 10ms, stackoverflow needs a little bit longer like 20ms, your callback function for google is called at first, then the callback-function for stackoverflow.
The response times can be different each run, thats why you experience different results each run.
This is your callback-function:
res.pipe(bl((err, data) => {
if (err) {
return console.log(err);
}
data = data.toString();
console.log(data);
}
The entire problem is with the variable "i" and the asynchronous calls. With this particular logic, you don't have control over the value of i because of the async calls.
In order to understand the problem with your code, print console.log after the line:
results[i] = data;
This is my solution to the problem:
var http = require('http');
var count =3;
var contentResults = [];
function hitRequests(i) {
http.get(process.argv[i+1],function(response){
response.setEncoding('utf8');
var entireContent='';
response.on('data', function(chunk){
entireContent += chunk;
});
response.on('end', function(chunk){
contentResults[i] = entireContent;
count --;
if(count <= 0) {
printAll();
}
});
}).on('error',function(e){
console.log('error'+e);
});
}
for(i=1;i<=3;i++) {
hitRequests(i);
}
function printAll() {
contentResults.forEach(function(result){
console.log(result);
});
}

Node.js synchronously loop or iterate over asynchronous statements

I want to do a for each loop but have it run synchronously. Each iteration of the loop will do an http.get call and that will return json for it to insert the values into a database. The problem is that the for loop runs asynchronously and that causes all of the http.gets to all run at once and my database doesn't end up inserting all of the data.I am using async-foreach to try to do what I want it to do, but I don't have to use it if I can do it the right way.
mCardImport = require('m_cardImport.js');
var http = require('http');
app.get('/path/hi', function(req, res) {
mCardImport.getList(function(sets) {
forEach(sets, function(item, index, arr) {
theUrl = 'http://' + sets.set_code + '.json';
http.get(theUrl, function(res) {
var jsonData = '';
res.on('data', function(chunk) {
jsonData += chunk;
});
res.on('end', function() {
var theResponse = JSON.parse(jsonData);
mCardImport.importResponse(theResponse.list, theResponse.code, function(theSet) {
console.log("SET: " + theSet);
});
});
});
});
});
});
and my model
exports.importResponse = function(cardList, setCode, callback) {
mysqlLib.getConnection(function(err, connection) {
forEach(cardList, function(item, index, arr) {
var theSql = "INSERT INTO table (name, code, multid, collector_set_num) VALUES "
+ "(?, ?, ?, ?) ON DUPLICATE KEY UPDATE id=id";
connection.query(theSql, [item.name, setCode, item.multid, item.number], function(err, results) {
if (err) {
console.log(err);
};
});
});
});
callback(setCode);
};
With recursion the code is pretty clean. Wait for the http response to come back then fire off next attempt. This will work in all versions of node.
var urls = ['http://stackoverflow.com/', 'http://security.stackexchange.com/', 'http://unix.stackexchange.com/'];
var processItems = function(x){
if( x < urls.length ) {
http.get(urls[x], function(res) {
// add some code here to process the response
processItems(x+1);
});
}
};
processItems(0);
A solution using promises would also work well, and is more terse. For example, if you have a version of get that returns a promise and Node v7.6+, you could write an async/await function like this example, which uses some new JS features.
const urls = ['http://stackoverflow.com/', 'http://security.stackexchange.com/', 'http://unix.stackexchange.com/'];
async function processItems(urls){
for(const url of urls) {
const response = await promisifiedHttpGet(url);
// add some code here to process the response.
}
};
processItems(urls);
Note: both of these examples skip over error handling, but you should probably have that in a production app.
To loop and synchronously chain asynchronous actions, the cleanest solution is probably to use a promise library (promises are being introduced in ES6, this is the way to go).
Using Bluebird, this could be
Var p = Promise.resolve();
forEach(sets, function(item, index, arr) {
p.then(new Promise(function(resolve, reject) {
http.get(theUrl, function(res) {
....
res.on('end', function() {
...
resolve();
}
}));
});
p.then(function(){
// all tasks launched in the loop are finished
});
I found out that I wasn't releasing my mysql connections after I was done with each call and this tied up the connections causing it to fail and appear to be an issue with synchronization.
After explicitly calling connection.release(); it caused my code to work 100% correctly even in an asynchronous fashion.
Thanks for those who posted to this question.
"use strict";
var Promise = require("bluebird");
var some = require('promise-sequence/lib/some');
var pinger = function(wht) {
return new Promise(function(resolve, reject) {
setTimeout(function () {
console.log('I`ll Be Waiting: ' + wht);
resolve(wht);
}, Math.random() * (2000 - 1500) + 1500);
});
}
var result = [];
for (var i = 0; i <= 12; i++) {
result.push(i);
}
some(result, pinger).then(function(result){
console.log(result);
});
Just wrap the loop in an async function. This example illustrates what I mean:
const oneSecond = async () =>
new Promise((res, _) => setTimeout(res, 1000));
This function completes after just 1 second:
const syncFun = () => {
for (let i = 0; i < 5; i++) {
oneSecond().then(() => console.log(`${i}`));
}
}
syncFun(); // Completes after 1 second ❌
This one works as expected, finishing after 5 seconds:
const asyncFun = async () => {
for (let i = 0; i < 5; i++) {
await oneSecond();
console.log(`${i}`);
}
}
asyncFun(); // Completes after 5 seconds ✅
var urls = ['http://stackoverflow.com/', 'http://security.stackexchange.com/', 'http://unix.stackexchange.com/'];
for (i = 0; i < urls.length; i++){
http.get(urls[i], function(res) {
// add some code here to process the response
});
}

Categories