I usually work with other programming languages, need to implement a bit of node.js code though, which I am entirely new to: For now, I just want the result of my azure function app to be actually dependent on me calling the api. I put the HTTP call into a promise and am waiting for the result:
module.exports = async function (context, req) {
context.res = {
body: {
"data": [{
"value": "start"
}]
}
};
await callapi(context);
};
function callapi(context){
var options = {
host: 'jsonplaceholder.typicode.com',
port: 443,
path: '/todos/1',
method: 'GET'
};
var p1 = new Promise(
function(resolve, reject) {
callback = function(httpres) {
var str = '';
response.on('error', function (err) {
context.res = {body: {"data": [{"value": "error"}]}};
});
httpres.on('data', function (chunk) {
str += chunk;
});
httpres.on('end', function () {
resolve(str);
});
}
https.request(options, callback).end();
}
);
p1.then(function(reqhtml) {
context.res = {
body: {
"data": [{
"value": "it worked"
}]
}
};
})
}
My expectiation for this was that it would - depending on if the server could be reached - either return (a context with) the value "it worked" or "error", however it doesnt wait for the promise and just returns "start".
How do I wait for the async function in Azure? I do not have code outside of this; this function is only called via another API, from where I can only manipulate results in a restrained way with a graphical user interface. Is there a way to force node.js to wait inside this function?
I've cleaned up your code a bit.
module.exports = async function (context, req) {
const value = await callapi();
context.res = {
body: {
data: [{
value
}]
}
};
};
// this function does not need any parameters,
// it's sole job is to make a https request to a static endpoint
// and return a value based on wether there was an error or not
function callapi(){
var options = {
host: 'jsonplaceholder.typicode.com',
port: 443,
path: '/todos/1',
method: 'GET'
};
return new Promise(function(resolve, reject) {
const req = https.request(options, res => {
let str = "";
res.on('data', function(chunk) {
str += chunk;
});
res.on('end', function() {
resolve(str);
});
});
res.on('error', reject);
req.end();
})
.then(() => "it worked")
.catch(() => "error");
}
For now, I just want the result of my azure function app to be actually dependent on me calling the api.
Since your callapi does not return anything await callapi() will await undefined and this will resolve in the next tick. (Basically, immediately after all current syncronous code has been executed and long before the Server could have sent any data back to you)
2nd: Mutating objects is frowned upon, because it's (mentally) hard to keep track of all the places that are potentially affected by this change; therefore it's dangerous. better return a copy with the changes.
3rd: keep it simple callapi does nothing where it would need to know anything about the context
The NPM library deasync solves the problem:
module.exports = async function (context, req) {
var uri = <uri>;
var source = get_source_at(uri)
context.res = { body: { "data": [{"value": source}] } };
};
function get_source_at(uri){
var request = require("request");
var deasync = require("deasync");
var source;
request({ uri:uri, headers: <headers>}
, function (error, response, body) {
source = body;
});
while(source === undefined) { //wait until async HTTPS request has finished
deasync.runLoopOnce();
}
return source;
}
Related
I'm currently working on writing a function on AWS Lambda. I want to convert a RSS feed into a JSON and give that as a response in the body when making to the Lambda endpoint.
I'm using an npm package to convert the RSS to JSON. However, when I run the code. I see that I get undefined in the conversion of the RSS URL. Here is the following code:
const feed = require('rss-to-json');
exports.handler = async (event) => {
let rssFeed = event.queryStringParameters.rssFeed;
let rssAsJsonData = convertRssIntoJson(rssFeed);
return sendRes(200, rssAsJsonData);
};
const sendRes = (status, body) => {
var response = {
isBase64Encoded: true|false,
statusCode: status,
headers: {
"Content-Type": "application/json"
},
body: body,
};
return response;
};
function convertRssIntoJson (rssFeed) {
console.log(rssFeed);
return feed.load(rssFeed, function(err, rss){
if(err) {
console.log("Error: ${err}");
return;
}
console.log(rss)
return rss;
});
};
However, in the logs I get undefined when console.log(rssAsJsonData).
However, when debugging I was able to see console.log(rss) working when I change body to be body: json.stringify("TESTING")
However, it only worked when logging to the console not when I tried to pass it to the body body: body, I can't seem to find what the error is. I'm moving from Ruby to JavaScript for this project maybe I'm missing something.
I'm using Postman to make the calls:
function convertRssIntoJson (rssFeed) {
console.log(rssFeed);
return feed.load(rssFeed, function(err, rss){
if(err) {
console.log("Error: ${err}");
return;
}
console.log(rss)
return rss;
});
};
The piece of code above is a callback. Under the hood, feed.load is asynchronous, which makes your callback be executed asynchronously.
Now, when you invoke your function like this
let rssAsJsonData = convertRssIntoJson(rssFeed);
your rss object inside convertRssIntoJson does not hold any value yet, because the callback hasn't been populated up to now. This is where your undefined comes from.
Callbacks themselves don't make code asynchronous by default, but NodeJS works with a non-blocking IO model and, since feed.load is an IO call, it will be executed asynchronously.
You have a few options now, but I will list only two. A not-so-nice and a nice solution:
1) The not-so-nice way to fix it is to add a callback as argument to your convertRssIntoJson function and pass the value of that rss object upstream. The not-so-nice full code can be found below:
const feed = require('rss-to-json');
exports.handler = async (event) => {
let rssFeed = event.queryStringParameters.rssFeed;
convertRssIntoJson(rssFeed, (err, data) => {
if (err) {
return sendRes(500, { message: 'There was an err: ' + err.message })
}
return sendRes(200, data)
})
};
const sendRes = (status, body) => {
var response = {
isBase64Encoded: true | false,
statusCode: status,
headers: {
"Content-Type": "application/json"
},
body: body,
};
return response;
};
const convertRssIntoJson = (rssFeed, callback) => {
console.log(rssFeed);
feed.load(rssFeed, function (err, rss) {
if (err) {
console.log("Error: ${err}");
callback(err, undefined)
}
console.log(rss)
callback(undefined, rss)
});
};
2) The nice, clean, elegant and recommended solution is this one. Wrap your callback in a Promise, like this
function convertRssIntoJson(rssFeed) {
console.log(rssFeed);
return new Promise((res, rej) => {
feed.load(rssFeed, function (err, rss) {
if (err) {
console.log("Error: ${err}");
return rej(err)
}
console.log(rss)
return res(rss)
});
})
};
Since your handler is async, it means it can just await on Promises.
So your client code is now as simple as:
return sendRes(200, await convertRssIntoJson(rssFeed));
Your final code will look like (I have refactored a little bit to make use of arrow functions):
const feed = require('rss-to-json');
exports.handler = async (event) => {
let rssFeed = event.queryStringParameters.rssFeed;
return sendRes(200, await convertRssIntoJson(rssFeed));
};
const sendRes = (status, body) => {
var response = {
isBase64Encoded: true | false,
statusCode: status,
headers: {
"Content-Type": "application/json"
},
body: body,
};
return response;
};
const convertRssIntoJson = (rssFeed) => {
console.log(rssFeed);
return new Promise((res, rej) => {
feed.load(rssFeed, (err, rss) => {
if (err) {
console.log("Error: ${err}");
return rej(err)
}
console.log(rss)
return res(rss)
});
})
};
If you want to know more about async/await, you can see it in here.
EDIT: Code refactor and code added for solution 1)
I want to mock the result of a function within a node module so that i can run assertions.
Considering the following node module:
const doPostRequest = require('./doPostRequest.js').doPostRequest;
const normalizeSucessResult = require('./normalizer.js').normalizeSucessResult;
const normalizeErrorResult = require('./normalizer.js').normalizeErrorResult;
exports.doPost = (params, postData) => {
return doPostRequest(params, postData).then((res) => {
const normalizedSuccessResult = normalizeSucessResult(res);
return normalizedSuccessResult;
}).catch((err) => {
const normalizedErrorResult = normalizeErrorResult(err);
return normalizedErrorResult;
})
}
The function doPostRequest returns a promise. How can i fake the return value of this promise so that i can assert if normalizeSucessResult has been called?
So for i have tried:
const normalizeSucessResult = require('./normalizer.js');
const doPostRequest = require('./doPostRequests.js');
const doPost = require('./doPost.js');
it('runs a happy flow scenario', async () => {
let normalizeSucessResultStub = sinon.stub(normalizeSucessResult, 'normalizeSucessResult');
let postData = { body: 'Lorum ipsum' };
let params = { host: 'someUrl', port: 433, method: 'POST', path: '/' };
sinon.stub(doPostRequest, 'doPostRequest').resolves("some response data"); //Fake response from doPostRequest
return doPost.doPost(params, postData).then((res) => { //res should be equal to some response data
expect(normalizeSucessResultStub).to.have.been.calledOnce;
expect(normalizeSucessResultStub).to.have.been.with("some response data");
});
});
The doPostRequest module looks like this:
const https = require('https')
module.exports.doPostRequest = function (params, postData) {
return new Promise((resolve, reject) => {
const req = https.request(params, (res) => {
let body = []
res.on('data', (chunk) => {
body.push(chunk)
})
res.on('end', () => {
try {
body = JSON.parse(Buffer.concat(body).toString())
} catch (e) {
reject(e)
}
resolve(body)
})
})
req.on('error', (err) => {
reject(err)
})
if (postData) {
req.write(JSON.stringify(postData))
}
req.end()
})
}
You can use Promise.resolve to return a promise with any given value.
Promise.resolve(“hello world”);
For stub your func you need to do like this
sinon.stub({doPostRequest}, 'doPostRequest').resolves("some response data")
Okay, i figured it out. The function doPostRequest was loaded using require, on the top of the file using const doPostRequest = require('./doPostRequest.js').doPostRequest;
In order to mock the data that comes back from a function that is loaded using require i had to use a node module called mock-require. There are more modules that can take care of this (proxyquire is a populair one) but i picked mock-require (i did not have a specific reason for choosing mock-require).
For anyone else that is stuck with a similar problem, try mock-require to mock the respose from files that are loaded using require.
I am currently trying to fetch data from the Spotify API with promises, and yesterday I got tremendous help for another question, regarding the same topic: "Loop the object returned from node promise and feed to the next .then".
What I do is first getting the tracks from my playlist, and then call another api which fetches the artists. Lastly I call another api which gets the artist images.
Now my question is: how do I return the data that I get from my promises?
This is my function that gets the playlist urls:
function getPlaylists(access_token) {
var options = {
url: 'https://api.spotify.com/v1/me/playlists',
headers: { 'Authorization': 'Bearer ' + access_token },
json: true
};
return new Promise(function(resolve, reject) {
request.get(options, function(error, response, body) {
var playlists = body.items;
var playlistArray = [];
playlists.forEach(function(playlist) {
var name = playlist.name;
var url = playlist.tracks.href;
playlistArray.push(url);
});
if(!error) {
resolve(playlistArray);
} else {
reject(error);
}
});
});
}
This one gets the artists:
function getArtists(url,access_token) {
var params = {
url: url,
headers: { 'Authorization': 'Bearer ' + access_token },
json: true
};
return new Promise(function(resolve, reject) {
request.get(params, function(error, response, body) {
var tracks = body.items;
var artistArray = [];
tracks.forEach(function(artists) {
let allArtists = artists.track.artists;
allArtists.forEach(function(artist) {
artistArray.push(artist);
});
})
if(!error) {
resolve(artistArray);
} else {
reject(error);
}
});
})
}
And this one gets the artist image:
function getArtistImages(artistId) {
var options = {
url: 'https://api.spotify.com/v1/artists/' + artistId,
json: true
};
return new Promise(function(resolve, reject) {
request.get(options, function(error, response, body) {
if(error != null) {
reject(error);
} else {
resolve(body);
}
});
})
}
EDIT EDIT
The way I call these functions is like this:
getPlaylists(access_token)
.then(playlists => Promise.all(playlists.map(playlist =>
getArtists(playlist, access_token)))
.then(artists => {
artists.map(artist => {
artist.map(a => {
console.log(a);
let component = renderToString(
<App>
<Table artists={a} />
</App>
);
res.send(
component
)
})
})
}));
It only returns the first result - obviously because it only gets to loop through the forEach loop once, before "res.send()", so how do I make sure that it loops through all artists, before I render the view? I believe I have to do another Promise.all(), but I am not sure where - does anyone have an idea?
I appreciate it :)
Its old post but I think it can be helpfull for someone.
I wrote a plugin to perform foreach based on promises supporting concurrency. I see you dont need concurrency what turn things more simple to apply other solutions.
I wrote a code by your code using my plugin. It works!
'use strict';
var request = require('request')
var promiseForeach = require('promise-foreach')
function getPlaylists(access_token) {
var options = {
url: 'https://api.spotify.com/v1/me/playlists',
headers: { 'Authorization': 'Bearer ' + access_token },
json: true
};
return new Promise(function (resolve, reject) {
request.get(options, function (error, response, body) {
var playlists = body.items;
var playlistArray = [];
playlists.forEach(function (playlist) {
var name = playlist.name;
var url = playlist.tracks.href;
playlistArray.push(url);
});
if (!error) {
resolve(playlistArray);
} else {
reject(error);
}
});
});
}
function getArtists(url, access_token) {
var params = {
url: url,
headers: { 'Authorization': 'Bearer ' + access_token },
json: true
};
return new Promise(function (resolve, reject) {
request.get(params, function (error, response, body) {
var tracks = body.items;
var artistArray = [];
tracks.forEach(function (artists) {
let allArtists = artists.track.artists;
allArtists.forEach(function (artist) {
artistArray.push(artist);
});
})
if (!error) {
promiseForeach.each(artistArray,
[function (artist) {
return getArtistImages(artist.id)
}],
function (arrayOfResultOfTask, currentList) {
return {
artistId: currentList.id,
artistName: currentList.name,
artistImages: arrayOfResultOfTask[0].images
}
},
function (err, newList) {
if (err) {
console.error(err)
return;
}
resolve(newList)
})
} else {
reject(error);
}
});
})
}
function getArtistImages(artistId) {
var options = {
url: 'https://api.spotify.com/v1/artists/' + artistId,
headers: { 'Authorization': 'Bearer ' + access_token },
json: true
};
return new Promise(function (resolve, reject) {
request.get(options, function (error, response, body) {
if (error != null) {
reject(error);
} else {
resolve(body);
}
});
})
}
var access_token = 'YOUR-TOKEN';
getPlaylists(access_token)
.then(playlists => {
promiseForeach.each(playlists,
[function (playlist) {
return getArtists(playlist, access_token)
}],
function (arrayOfResultOfTask, currentList) {
return {
playlistURL: currentList,
artist: arrayOfResultOfTask[0]
}
//return renderToString(
// <App>
// <Table artists={render} />
// </App>
//);
},
function (err, newList) {
if (err) {
console.error(err)
return;
}
res.send(newList)
})
});
The plugin: https://www.npmjs.com/package/promise-foreach
I hope it helps someone!
If I understand your problem correctly, it sounds like you are tripping up on how to actually use the result of Promises.
Asynchronous applications:
Promises encapsulate an asynchronous result, which is made available through the callback passed to then().
This asynchronous-ism will cascade throughout your application.
There are a number of ways applications manage the reality of asynchronous results: events, callbacks, observables, promises...
Example (using events):
This is a crude and untested example of how the data from an asynchronous request can get injected into your view. When my asynchronous request calls my then() callback, I update my model which triggers an event to re-render my view.
There are totally issues with this example (i.e. what if I don't want to rerender my whole view? what if my getArtists() returns sooner than my view can render it's loading state?). But for simplicity, we won't go there.
+function(){
var _view = $('#viewport');
var _model = {...}
var _spotifyClient = new SpotifyClient(); // this contains method similar to those you included in your question
_view.on('load', onLoad);
_view.on('model:update', onModelUpdate);
function onLoad() {
_spotifyClient
.getArtists()
.then(function(result) {
// when the getArtists() request has responded, I can update my model.
updateModel({ isLoading: false, artists: result });
})
// this will happen immediately after starting the "getArtists()" request.
udpateModel({ isLoading: true });
}
function updateModel(mod) {
for(var p in mod) {
_model[p] = mod[p];
}
_view.trigger('model:update', _model);
}
function onModelUpdate(model) {
refreshView();
}
function refreshView() {
var viewModel = buildTemplateModel(_model);
renderTemplate(_view, viewModel);
}
}();
I encourage you to research some view frameworks such as angular, knockout, or react. I also encourage you to research Reactive Extensions which provides an observable interface and many utilities regarding asynchronous streams.
Note on side effects:
Having the result of a promise trigger an event can be classified as a "side-effect". You should keep side-effects to a minimum in our application, and they really only belong in the controller / main part of your application.
If you are using promises in your application, reusable library classes and functions, that operate on promises, should return promises.
You need to call res.send on the array of all results, not on each one separately:
getPlaylists(access_token).then(playlists =>
Promise.all(playlists.map(playlist =>
getArtists(playlist, access_token)
))
).then(artists => {
res.send(artists.map(artist =>
artist.map(a => {
console.log(a);
return renderToString(
<App>
<Table artists={a} />
</App>
);
})
))
});
Also your parenthesis were slightly misnested (didn't match the indentation of the then calls) but that didn't lead to the problem.
I am learning Node. I have a console app that must make requests to web services in order. Specifically, I need to make three requests in order. In an attempt to make these requests, I'm using the built-in HTTPS module. I have one request successfully executing. But, I need to make three in succession. I'm not sure how to do this. Right now, I have:
console.log('Running Request #1...');
var options = {
host: 'example.com',
path: '/api/service',
port: 443,
method: 'GET',
headers: {
'api-key': '[Hidden]'
}
};
var req = https.request(options, (res) => {
res.on('data', (d) => {});
});
req.end();
req.on('error', (e) => {
console.error(e);
});
I'm not sure how to call my three requests in order. Yet, at the same time, gracefully handling an error. If I had promises, I would know how to chain them together and just use the catch handler. But, I'm not sure how to chain together requests since the HTTPS module uses the Arrow function syntax.
Any help is appreciated it.
Try this:
var https = require('https');
var urls = ['url1', 'url2', 'url3'];
var request = function(url) {
console.log(url);
return new Promise((resolve, reject) => {
https.get(url, (res) => {
res.on('end', () => {
resolve('what');
});
res.on('data', data =>{
});
}).on('error', e => {
reject(e);
});
});
};
var promise = request(urls.shift());
while(urls.length > 0) {
let url = urls.shift();
promise = promise.then(function() {
return request(url);
});
}
promise.catch(e => console.log);
consider use promise with reduce,something like this
var urls=['u1','u2','u3'];
var er=0
function getPromise(url) {
return new Promise(function (resolve,reject) {
setTimeout(function () {
console.log(url+ " is resolved in 2 sec")
er++
if(er==1)
{
reject(url)
}else{
resolve(url)
}
},2000)
})
}
urls.reduce(function (pre,cur) {
return pre.then(function () {
return getPromise(cur)
})
},new Promise(function (resolve,reject) {
resolve(null)
}))
.then(function (result) {
console.log("final result is "+result)
},function (e) {
console.log("something wrong happens : "+e)
})
Play with the code,I think it is want you want
Im am doing nodeschool exercises , the
This problem is the same as the previous problem (HTTP COLLECT) in
that you need to use http.get(). However, this time you will be
provided with three URLs as the first three command-line arguments.
You must collect the complete content provided to you by each of the
URLs and print it to the console (stdout). You don't need to print out
the length, just the data as a String; one line per URL. The catch is
that you must print them out in the same order as the URLs are
provided to you as command-line arguments.
in other words , i am to register 3 http.get request , and print data recieved from it in order.
I am trying to do it with promises = another get request wont be called untill the first on didnt end.
My code looks like this
var http=require("http");
var collect=[];
var dat=[];
for( var i = 2 ; i < process.argv.length;i++){
collect.push(process.argv[i]);
}
function chainIt(array,callback){
return array.reduce(function(promise,item){
return promise.then(function(){
return callback(item)
})
},Promise.resolve())
}
function getIt(item){
return http.get(item,function(response){
response.on("data",function(data){
dat.push(data);
})
})
}
chainIt(collett,function(item){
return getIt(item)
})
}).then(function(){
collect.forEach(function(x){
console.log(x);
})
})
But i actually print no data = i fail the exercise.
I do not see any bug here , but im just starting with promises and node. Where is the mistake?
For educational purposes, I recently wrote a wrapper for the http and https modules that uses native Promises. That said, I recommend using a library, such a request; that makes things simpler, has unit test coverage, as is maintained by the open source community. Also, my wrapper does a naive string concatenation with the response chunks, which I'm not convinced is the most performant way of building up the response body.
FYI: this requires Node.js 4 or above, although the methodology is pretty much the same in Node 0.x.x.
'use strict';
const http = require('http');
const url = require('url');
module.exports = {
get(url) {
return this._makeRequest('GET', url);
},
_makeRequest(method, urlString, options) {
// create a new Promise
return new Promise((resolve, reject) => {
/* Node's URL library allows us to create a
* URL object from our request string, so we can build
* our request for http.get */
const parsedUrl = url.parse(urlString);
const requestOptions = this._createOptions(method, parsedUrl);
const request = http.get(requestOptions, res => this._onResponse(res, resolve, reject));
/* if there's an error, then reject the Promise
* (can be handled with Promise.prototype.catch) */
request.on('error', reject);
request.end();
});
},
// the options that are required by http.get
_createOptions(method, url) {
return requestOptions = {
hostname: url.hostname,
path: url.path,
port: url.port,
method
};
},
/* once http.get returns a response, build it and
* resolve or reject the Promise */
_onResponse(response, resolve, reject) {
const hasResponseFailed = response.status >= 400;
var responseBody = '';
if (hasResponseFailed) {
reject(`Request to ${response.url} failed with HTTP ${response.status}`);
}
/* the response stream's (an instance of Stream) current data. See:
* https://nodejs.org/api/stream.html#stream_event_data */
response.on('data', chunk => responseBody += chunk.toString());
// once all the data has been read, resolve the Promise
response.on('end', () => resolve(responseBody));
}
};
EDIT: I only just realised you're new to Promises. Here's an example of how to use this wrapper:
'use strict';
const httpService = require('./httpService'); // the above wrapper
// get one URL
httpService.get('https://ron-swanson-quotes.herokuapp.com/v2/quotes').then(function gotData(data) {
console.log(data);
});
// get multiple URLs
const urls = [
'https://ron-swanson-quotes.herokuapp.com/v2/quotes',
'http://api.icndb.com/jokes/random'
];
/* map the URLs to Promises. This will actually start the
* requests, but Promise.prototype.then is always called,
* even if the operation has resolved */
const promises = urls.map(url => httpService.get(url));
Promise.all(promises).then(function gotData(responses) {
/* responses is an array containing the result of each
* Promise. This is ordered by the order of the URLs in the
* urls array */
const swansonQuote = responses[0];
const chuckNorrisQuote = responses[1];
console.log(swansonQuote);
console.log(chuckNorrisQuote);
});
Using Promise.all is the most efficient solution for this. You could also use async/await like below to solve this.
const http = require('http');
const bl = require('bl');
async function httpGet(url) {
return new Promise((resolve, reject) => {
http.get(url, response => {
response.setEncoding('utf8');
response.pipe(bl((err, data) => {
if (err) {
reject(err);
}
resolve(data.toString());
}));
});
});
}
async function main() {
const data1 = await httpGet(process.argv[2]);
const data2 = await httpGet(process.argv[3]);
const data3 = await httpGet(process.argv[4]);
console.log(data1);
console.log(data2);
console.log(data3);
}
main();
I don't think this exercise was meant to be solved with promises. I found my old exercice folder, this is how I did it without promises or extra libraries:
var http = require('http');
var urls = process.argv.slice(2);
// counts the number of requests done
var done = 0;
// stores the requests result
var result = [];
// this will be called by each http.get and they will provide their index
function callback(index, data) {
result[index] = data;
done++;
// all requests are done, log everything
if (done == urls.length) {
result.forEach(console.log);
}
}
function processUrl(url, index) {
var finalData = '';
http.get(url, function(response) {
response.setEncoding('utf8');
response.on('data', function(data) {
finalData += data;
});
response.on('error', console.error);
response.on('end', function() {
// console.log(finalData);
callback(index, finalData);
})
});
}
urls.forEach(processUrl);
Don't worry, you'll have enough promises to play with in the promise-it-wont-hurt workshop.
A bit late to the party here :)
Unfortunately none of the answers here uses the builtin util module in node.
Here is how to promisify http.get using util module and it is working correctly with typescript:
import util from "util";
const httpGetPromisified = util.promisify(
(url: string, cb: (err: any, result: IncomingMessage) => void) =>
http.get(url, (res) => cb(null, res))
);
// with promise.then
httpGetPromisified("http://www.google.com").then((res) => {
// res here has type http.IncomingMessage
console.log(res.statusCode);
});
// with async/await
const res = await httpGetPromisified("http://www.google.com");
console.log(res.statusCode);
Here's my solution after going through this thread:
var http = require('http');
var bl = require('bl')
promises = [
promiseLoad(process.argv[2]),
promiseLoad(process.argv[3]),
promiseLoad(process.argv[4])
];
Promise.all(promises).then(function(res) {
for(i=0; i<promises.length; i++) {
console.log(res[i]);
}
});
function promiseLoad(url) {
var body = '';
return new Promise(function(resolve, reject) {
http.get(url, function (response) {
response.setEncoding('utf8');
response.pipe(bl(function (err, data) {
resolve(data.toString())
}))
})
});
}
Here's the official solution in case you want to compare notes:
var http = require('http')
var bl = require('bl')
var results = []
var count = 0
function printResults () {
for (var i = 0; i < 3; i++) {
console.log(results[i])
}
}
function httpGet (index) {
http.get(process.argv[2 + index], function (response) {
response.pipe(bl(function (err, data) {
if (err) {
return console.error(err)
}
results[index] = data.toString()
count++
if (count === 3) {
printResults()
}
}))
})
}
for (var i = 0; i < 3; i++) {
httpGet(i)
}
const http = require('http');
const urls = process.argv.slice(2);
let callCount = 0;
const cb = (res) => {
res.setEncoding('utf8');
let rawData = '';
res.on('data', (chunk) => {
rawData += chunk.toString();
});
res.on('end', () => {
callCount += 1;
console.log(rawData);
if (callCount < urls.length) {
getData(urls[callCount]);
}
});
res.on('error', (error) => {
console.log(error);
});
};
const getData = (url) => {
http.get(url, cb);
};
getData(urls[callCount]);
Here's how I did it:
async myFunc = function {
let url = 'http://your.data/file';
let promise = new Promise((resolve, reject) => {
var data = '';
https.get(url, res => {
res.on('data', chunk => { data += chunk })
res.on('end', () => {
resolve(data);
})
})
});
let result = await promise; // wait until the promise resolves
doStuffWithResult(result);
};
One of the method is by using 'Q' library.
First create function that will hit URL and return promise
var Q = require('q);
function getIt(item){
return http.get(item,function(response){
return Q.resolve(response); // Return response
OR
return Q.resolve(error); // Return error
})
})
}
var urls = ['url1','url2','url3']; // list of urls
Q.spread(urls.map(getIt))
.then(function(res1,res2,res3){
// res1 is response for url1 and on
//Once all calls are finished you will get results here
});