Calling multiple HTTP requests in a single HTTP request in Node.js - javascript

I am trying to call multiple URL in a single URL call and push it's json response in an array and send that array in response to the end user.
My code look like this:
var express = require('express');
var main_router = express.Router();
var http = require('http');
urls = [
"http://localhost:3010/alm/build_tool",
"http://localhost:3010/alm/development_tool",
"http://localhost:3010/alm/project_architecture"];
var responses = [];
main_router.route('/')
.get(function (req, res) {
var completed_requests = 0;
for (url in urls) {
http.get(url, function(res) {
responses.push(res.body);
completed_request++;
if (completed_request == urls.length) {
// All download done, process responses array
}
});
}
res.send(responses);
});
I have also tried this using npm request module.
When i run this code it only return NULL or some random output that have only headers.
My aim is to call multiple URL's in a single node get request and append it's JSON output on a array and send to the end user.
Thanks

Here, try this code,
const async = require('async');
const request = require('request');
function httpGet(url, callback) {
const options = {
url : url,
json : true
};
request(options,
function(err, res, body) {
callback(err, body);
}
);
}
const urls= [
"http://localhost:3010/alm/build_tool",
"http://localhost:3010/alm/development_tool",
"http://localhost:3010/alm/project_architecture"
];
async.map(urls, httpGet, function (err, res){
if (err) return console.log(err);
console.log(res);
});
Explanation :
This code uses async and request node packages. async.map by definition takes 3 params, first one being an array, second being the iterator function you want to call with each element of that array, and the callback function, called when async.map has finished processing.
map(arr, iterator, [callback])
Produces a new array of values by mapping each value in arr through
the iterator function. The iterator is called with an item from arr
and a callback for when it has finished processing. Each of these
callback takes 2 arguments: an error, and the transformed item from
arr. If iterator passes an error to its callback, the main callback
(for the map function) is immediately called with the error.
Note: All calls to iterator function are parallel.
Inside your httpGet function, you are calling request function with passed url, and explicitly telling the response format to be json. request, when finished processing, calls the callback function with three params, err - if any, res - server response, body - response body.
In case there is no err from request, async.map collects the results from these callbacks as an array, and passes that array at the end to its third, callback function. Otherwise,if (err) is true, the async.map function stops the execution and calls its callback with an err.

I suggest to use the async library.
async.map(urls, http.get, function(err, responses){
if (err){
// handle error
}
else {
res.send responses
}
})
The snippet above will perform a http.get call for each of the urls in parallel, and will call your callback function with the results of all of the calls after all the responses were received.
If you want to call the urls in series, you can use async.mapSeries instead. If you want to limit the number of concurrent requests you can use async.mapLimit.

Related

What is a Event Driven non blocking IO model in Node.js?

I don't understand what is the real difference between the codes:
const fs = require('fs');
fs.readFile('/file.md', (err, data) => {
if (err) throw err;
});
const fs = require('fs');
const data = fs.readFileSync('/file.md');
Please somebody tell me what is going on here in simplified way.
In few words the difference is that the first snippet is asynchronous.
The real difference is when the code after the snippets get executed.
So if you try to execute:
const fs = require('fs');
console.log('preparing...');
fs.readFile('/file.md', (err, data) => {
if (err) throw err;
console.log('I am sure that the data is read!');
console.log(data);
});
console.log('Not sure if the data is here... ');
you'll see (if the file is big enough):
preparing...
Not sure if the data is here...
I am sure that the data is read!
$data
In the other case (the readFileSync), the data will be there (unless of errors).
Take look at this example
var express = require('express');
var fs = require('fs');
var app = express.createServer(express.logger());
app.get('/readFile', function(request, response) {
fs.readFile('data.txt', function(err, data){
response.send(data);
});
});
app.get('/readFileSync', function(request, response) {
let data = fs.readFileSync('data.txt');
});
response.send(data);
fs.readFile takes a call back which calls response.send. If you simply replace that with fs.readFileSync, you need to be aware it does not take a callback so your callback which calls response.send will never get called and therefore the response will never end and it will timeout.
You need to show your readFileSync code if you're not simply replacing readFile with readFileSync.
Also, just so you're aware, you should never call readFileSync in a node express/webserver since it will tie up the single thread loop while I/O is performed. You want the node loop to process other requests until the I/O completes and your callback handling code can run. Though you can use the promise to handle this.
And from v10.0.0 The callback parameter is no longer optional for readFile. Not passing it will throw a TypeError at runtime.

Node request module Http.IncomingMessage not emitting events

According to this link: request - Node
The callback argument gets 3 arguments:
An error when applicable (usually from http.ClientRequest object) An
http.IncomingMessage object The third is the response body (String or
Buffer, or JSON object if the json option is supplied)
Code:
var r = require("request");
var options= {
url: "http://www.example.com/"
};
var callback = function (err, res, body) {
if (!err && res.statusCode == 200) {
res.on("data", function(chunk) {
console.log("DATA : "+chunk);
});
res.on("finish", function() {
console.log("FINISHED");
});
console.log(body);
}
};
r(options, callback);
But in the above code, only the console.log(body) works, the event emitters don't.
Also, if the callback would be invoked only when the whole response is body is available, then what's the point of making the second argument as http.IncomingMessage(Readable Stream) when I can't stream it.
When you pass a callback like that, request buffers the entire response for you and that is what is available in body. Because of this, that means you won't see data and such events on res, because they've already been taken care of by request.
It looks like you're mixing two different ways to use the 'request' module. Depending on preference you can use either the callback approach or the streaming approach.
The callback approach involves passing a function as well as the options and when all the data is received it will call the callback function.
The streaming approach allows you to attach listeners to the events such as 'response'. I'm guessing you've mixed this code in from an example from receiving http requests and sending a response with a node server as I can't see any reference to 'data' and 'finish' events in the docs for the request module.

Node Express 4 send a response after multiple API calls

I am building a NodeJS server using Express4. I use this server as a middleman between frontend angular app and 3rd party API.
I created a certain path that my frontend app requests and I wish on that path to call the API multiple times and merge all of the responses and then send the resulting response.
I am not sure how to do this as I need to wait until each API call is finished.
Example code:
app.post('/SomePath', function(req, res) {
var merged = [];
for (var i in req.body.object) {
// APIObject.sendRequest uses superagent module to handle requests and responses
APIObject.sendRequest(req.body.object[i], function(err, result) {
merged.push(result);
});
}
// After all is done send result
res.send(merged);
});
As you can see Im calling the API within a loop depending on how many APIObject.sendRequest I received within request.
How can I send a response after all is done and the API responses are merged?
Thank you.
Check out this answer, it uses the Async module to make a few requests at the same time and then invokes a callback when they are all finished.
As per #sean's answer, I believe each would fit better than map.
It would then look something like this:
var async = require('async');
async.each(req.body.object, function(item, callback) {
APIObject.sendRequest(item, function(err, result)) {
if (err)
callback(err);
else
{
merged.push(result);
callback();
}
}
}, function(err) {
if (err)
res.sendStatus(500); //Example
else
res.send(merged);
});
First of all, you can't do an async method in a loop, that's not correct.
You can use the async module's map function.
app.post('/SomePath', function(req, res) {
async.map(req.body.object, APIObject.sendRequest, function(err, result) {
if(err) {
res.status(500).send('Something broke!');
return;
}
res.send(result);
});
});

Meteor method with HTTP request does not return callback

I am trying to create a Meteor method with a HTTP get request. I am getting back a result, but I can't get my callback on the client side to return the result. The callback needs to wait for the HTTP request to get back a result before it returns the callback. I am getting the data successfully from the HTTP request, so that is not the problem.
Any suggestions on how to get this working?
Meteor.methods({
getYouTubeVideo: function (id) {
check(id, String);
var params = {
part:'snippet, status, contentDetails',
id:id,
key:Meteor.settings.youtube.apiKey
};
HTTP.get('https://www.googleapis.com/youtube/v3/videos', {timeout:5000, params:params}, function(error, result){
if (error) {
throw new Meteor.Error(404, "Error: " + error);
return;
}
console.log(result);
return result;
});
}
});
You need to use the synchronous version of HTTP.get, just like this :
var result=HTTP.get('https://www.googleapis.com/youtube/v3/videos', {timeout:5000, params:params});
return result;
If you use the asynchronous version with a callback like you did, you're facing the common problem of having to try returning the result in the callback (which won't work) instead of in the method, which is what you should do.
Note that synchronous HTTP.get is only available in the server-environment, so put your method declaration under server/

Express | Making multiple http requests asynchronously

I am currently building a small node application that makes a few api calls and renders a webpage with charts on it. I'm using express and jade as the render engine.
The problem is that I'm quite new to javascript and I don't know how to scheme out my http requests so I can pass an object of variables I got from the api (http get) when there is more than one request. I don't know how to map it out to make a single object and send it to the jade rendering engine.
Here is what I have so far :
app.get('/test', function(req, res) {
apiRequestGoesHere(name, function(error, profile) {
//Get some data here
});
anotherApiRequest(tvshow, function(error, list) {
//Get some data here
});
res.render('test', data);
});
As it is right now, the page renders and the requests are not done yet, and if I place res.render inside one of the request, I can't access the other's data.
So what I want is a way to set it up so I can have multiple api calls, then make an object out of some elements of what is returned to me from the rest api and send it to Jade so I can use the data on the page.
You probably want to use async to help with this. async.parallel is a good choice for something simple like this:
app.get('/test', function(req, res) {
async.parallel([
function(next) {
apiRequestGoesHere(name, function(error, profile) {
//Get some data here
next(null, firstData);
});
},
function(next) {
anotherApiRequest(tvshow, function(error, list) {
//Get some data here
next(null, secondData);
});
}], function(err, results) {
// results is [firstData, secondData]
res.render('test', ...);
});
});
The first argument to those functions next should be an error if relevant (I put null) - as soon as one is called with an error, the final function will be called with that same error and the rest of the callbacks will be ignored.
You can async parallel.
async.parallel([
function(callback){
// Make http requests
// Invoke callback(err, result) after http request success or failure
},
function(callback){
// Make http requests
// Invoke callback(err, result) after http request success or failure
}
],
// optional callback
function(err, results){
// the results array will be array of result from the callback
});
The reason your page renders is the callbacks haven't "called back" yet. To do what you want, you would need to do something like:
app.get('/test', function(req, res) {
apiRequestGoesHere(name, function(error, profile) {
//Get some data here
anotherApiRequest(tvshow, function(error, list) {
//Get some data here
res.render('test', data);
});
});
});
This strategy leads to what is known as "pyramid code" because your nested callback functions end up deeper and deeper.
I would also recommend the step library by Tim Caswell. It would make your code look something like:
var step = require('step');
app.get('/test', function(req, res) {
step(
function () {
apiRequestGoesHere(name, this)
},
function (error, profile) {
if error throw error;
anotherApiRequest(tvshow, this)
},
function done(error, list) {
if error throw error;
res.render('test', list)
}
)
});
You could also use the group method to make the calls in parallel and still maintain the sequence of your callbacks.
Gl,
Aaron

Categories