I am having trouble assigning the value to btcprice, when I try to log the variable after the http.get it outputs undefined. I understand that http.get is occurring asynchronously, but don't know what to do in order to fix this. Any help would be great! Thank you.
const http = require('http');
var btcprice;
// request api
http.get(
{
host: 'api.coindesk.com',
path: '/v1/bpi/currentprice.json'
},
function(response){
// get data
let body = '';
response.on('data', function(d) { body += d; });
response.on('end', function() {
// manipulate received data
let parsed = JSON.parse(body);
btcprice = parsed.bpi.USD.rate;
});
})
I've created an example based on your explanation. You can see that the btcprice is only reassigned when the response is fully received before that the btcprice will have the default value undefined.
const http = require('http');
let btcprice;
// request api
http.get({
host: 'api.coindesk.com',
path: '/v1/bpi/currentprice.json'
}, (response) => {
// get data
let body = '';
response.on('data', function(d) {
body += d;
});
response.on('end', function() {
// manipulate received data
let parsed = JSON.parse(body);
btcprice = parsed.bpi.USD.rate;
console.log(btcprice); // btcprice will now have an value
});
})
console.log(btcprice); // btcprice will be "undefined" since the response isn't already available
Related
I wanted to get a JSON from a URL in my node JS code. The code is working fine, but the sequence of the execution is messed up because of the Async nature of the execution.
var http = require("https");
var number = 37302;
// these functions need to execute is sequence.
console.log('Before API Call');
var response = fetchJson(number);
console.log(response);
console.log('After API Call');
function fetchJson(number)
{
var url = 'https://example.com/api/getactionitems/' + number;
http.get(url, function(res){
var body = '';
res.on('data', function(chunk){
body += chunk;
console.log('JSON Retrieved.');
});
res.on('end', function(){
console.log('Parsing JSON');
var APIResponse = JSON.parse(body);
var Name = APIResponse.EmpName;
var outstring = APIResponse.ActionItem;
return ('Hi ' + Name + ', Your action Items are: '+ outstring);
});
})
.on('error', function(e){
return ("Got an error while fetching data.");
});
}
When this code executes, the sequence of the output strings are as follows:
Before API Call
undefined
After API Call
JSON Retrieved.
Parsing JSON
How can I correct the execution order, so that the sequence are like the following:
Before API Call
JSON Retrieved.
Parsing JSON
<Outpt from the JSON parsing>
After API Call
var http = require("https");
var number = 37302;
// these functions need to execute is sequence.
console.log('Before API Call');
fetchJson(number).then(function(res){
console.log(res);
console.log('After API Call');
}).catch(function(e){console.log('err',e)});
function fetchJson(number)
{
return new Promise(function(resolve,reject){
var url = 'https://example.com/api/getactionitems/' + number;
http.get(url, function(res){
var body = '';
res.on('data', function(chunk){
body += chunk;
console.log('JSON Retrieved.');
});
res.on('end', function(){
console.log('Parsing JSON');
var APIResponse = JSON.parse(body);
var Name = APIResponse.EmpName;
var outstring = APIResponse.ActionItem;
resolve('Hi ' + Name + ', Your action Items are: '+ outstring);
});
})
.on('error', function(e){
reject("Got an error while fetching data.");
});
});
}
I'm adding a contact me section to a website. I want to be able to send the data from the forms with JS, and then receive and do something with the data with Node. I understand that there are frameworks and libraries that can handle this stuff, but I would like to build it from scratch so that I have a better understanding of what is happening.
I currently have a section of JS (see below) that is taking the form data, and sending it as a POST request to the node script, but I can't seem to wrap my head around what is happening with node, or how to receive the data with the node script. Any help in pointing me in the right direction is greatly appreciated.
const name = $(".name");
const email = $(".email");
const message = $(".message");
const submitButton = $(".submitButton");
const nameRegex = /([a-zA-Z\s-])/g;
const emailRegex = /^(([^<>()\[\]\\.,;:\s#"]+(\.[^<>()\[\]\\.,;:\s#"]+)*)|(".+"))#((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/g;
const messageRegex = /([a-zA-Z\s.,?!$%&])/gm;
const url = "../node/contactMeSubmissionHandler.js"
submitButton.click(function(){
let nameContents = name.val().match(nameRegex).join("");
let emailContents = email.val().match(emailRegex).join("");
let messageContents = message.val().match(messageRegex).join("");
// if (emailRegex.test(emailContents) == true) {
// let emailValid = emailContents;
// } else {
// console.log("Email is invalid");
// };
const data = {
email: emailContents,
name: nameContents,
message: messageContents
}
$.post(url, data, function(data, status){
console.log(`${data} and status is ${status}`);
})
})
I like to write from scratch too. Here is working code which is called from a command line to get a token.
// clientEx.js
var http = require('http');
var fs = require('fs');
const _SERVER = "dcsmail.net"; /* dcsmail.net */
// Callback function is used to deal with response
//
var callback = function (response)
{
// update stream with data
var body = '';
response.on('data', function(data) {
body += data;
});
response.on ('end', function()
{
// Data received completely.
fs.writeFileSync ("temp.lst", body, 'utf8');
// console.log ("clientEx.js received: " + body);
});
}
if ((process.argv[2] == null) || (process.argv[3] == null) || (process.argv[4] == null) || (process.argv[5] == null))
{
console.log ("clientEx.js usage:<user email> <user password> <destination> <GUID>");
}
else
{
var Ef_email = encodeURI (process.argv[2]);
var Ef_pass = encodeURI (process.argv[3]);
var Ef_dest = encodeURI (process.argv[4]);
var Ef_guid = encodeURI (process.argv[5]);
var post_data = ("f_email=" + Ef_email +
"\&" + "f_pass=" + Ef_pass +
"\&" + "f_dest=" + Ef_dest +
"\&" + "f_guid=" + Ef_guid);
// Options to be used by request
var options = {
host: _SERVER,
port: '80',
path: '/DCSM/tokenP10.php',
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': Buffer.byteLength (post_data)
}
};
// console.log ("clientEx.js using " + _SERVER + ":" + options.port + options.path);
// request the token from the host
try
{
var req = http.request (options, callback);
req.write (post_data);
req.end();
}
catch (error)
{
fs.writeFileSync ("temp.lst", "Host access failed\n", 'utf8');
}
}
You should be able to adapt that to your needs.
Use this code to create a server and check the log in console for different request attributes.
const http = require('http');
http
.createServer((request, response) => {
console.log(request);
response.end();
})
.listen(3000);
Make GET and POST request to http://localhost:3000/ and look for method, headers etc.
See more here and here.
I would like to make a GET request to a certain API and pass its response to a method in the outter scope. Is it possible?
var http = require('http'),
magic = new Magic();
http.request('www.random.org/integers/?num=1&min=1&max=10&col=1&base=10&format=plain&rnd=new', function(response) {
var str = String();
response.on('data', function(chunk) {
str += chunk;
});
response.on('end', function() {
console.log(str);
// here
// magic.load(str);
});
}).end();
// or here
// magic.load(str);
The response str should be passed to magic.load().
I get no errors but the object is not loaded. What am I doing wrong?
Trying to make get request from node.js with express module. Here is code of this piece:
var req = http.request(options, function(res) {
res.on('data', function (chunk){
});
});
req.end();
But can't understand how to receive data from responses body, i tried res.body. or res.data. Didn't work.
The data arrives in the chunk parameter. Parts of it anyway. You need to pick up and join all the chunks into a complete response. Copy-paste example from http://docs.nodejitsu.com/articles/HTTP/clients/how-to-create-a-HTTP-request:
var http = require('http');
//The url we want is: 'www.random.org/integers/?num=1&min=1&max=10&col=1&base=10&format=plain&rnd=new'
var options = {
host: 'www.random.org',
path: '/integers/?num=1&min=1&max=10&col=1&base=10&format=plain&rnd=new'
};
callback = function(response) {
var str = '';
//another chunk of data has been recieved, so append it to `str`
response.on('data', function (chunk) {
str += chunk;
});
//the whole response has been recieved, so we just print it out here
response.on('end', function () {
console.log(str);
});
}
http.request(options, callback).end();
I'm trying to make simple feed reader in node and I'm facing a problem with multiple requests in node.js.
For example, I got table with urls something like:
urls = [
"http://url1.com/rss.xml",
"http://url2.com",
"http://url3.com"];
Now I want to get contents of each url. First idea was to use for(var i in urls) but it's not good idea. the best option would be to do it asynchronously but I don't know how to make it.
Any ideas?
EDIT:
I got this code:
var data = [];
for(var i = 0; i<urls.length; i++){
http.get(urls[i], function(response){
console.log('Reponse: ', response.statusCode, ' from url: ', urls[i]);
var body = '';
response.on('data', function(chunk){
body += chunk;
});
response.on('end', function() {
data.push(body);
});
}).on('error', function(e){
console.log('Error: ', e.message);
});
}
Problem is that first is call line "http.get..." for each element in loop and after that event response.on('data') is called and after that response.on('end'). It makes mess and I don't know how to handle this.
I know this is an old question, but I think a better solution would be to use JavaScripts Promise.all():
const request = require('request-promise');
const urls = ["http://www.google.com", "http://www.example.com"];
const promises = urls.map(url => request(url));
Promise.all(promises).then((data) => {
// data = [promise1,promise2]
});
By default node http requests are asynchronous. You can start them sequentially in your code and call a function that'll start when all requests are done. You can either do it by hand (count the finished vs started request) or use async.js
This is the no-dependency way (error checking omitted):
var http = require('http');
var urls = ["http://www.google.com", "http://www.example.com"];
var responses = [];
var completed_requests = 0;
for (i in urls) {
http.get(urls[i], function(res) {
responses.push(res);
completed_requests++;
if (completed_requests == urls.length) {
// All download done, process responses array
console.log(responses);
}
});
}
You need to check that on end (data complete event) has been called the exact number of requests... Here's a working example:
var http = require('http');
var urls = ['http://adrianmejia.com/atom.xml', 'http://twitrss.me/twitter_user_to_rss/?user=amejiarosario'];
var completed_requests = 0;
urls.forEach(function(url) {
var responses = [];
http.get(url, function(res) {
res.on('data', function(chunk){
responses.push(chunk);
});
res.on('end', function(){
if (completed_requests++ == urls.length - 1) {
// All downloads are completed
console.log('body:', responses.join());
}
});
});
})
You can use any promise library with ".all" implementation. I use RSVP library, Its simple enough.
var downloadFileList = [url:'http://stuff',dataname:'filename to download']
var ddownload = downloadFileList.map(function(id){
var dataname = id.dataname;
var url = id.url;
return new RSVP.Promise(function(fulfill, reject) {
var stream = fs.createWriteStream(dataname);
stream.on('close', function() {
console.log(dataname+' downloaded');
fulfill();
});
request(url).on('error', function(err) {
console.log(err);
reject();
}).pipe(stream);
});
});
return new RSVP.hashSettled(ddownload);
Promise.allSettled will not stop at error. It make sure you process all responses, even if some have an error.
Promise.allSettled(promises)
.then((data) => {
// do your stuff here
})
.catch((err) => {
console.log(JSON.stringify(err, null, 4));
});
The problem can be easily solved using closure. Make a function to handle the request and call that function in the loop. Every time the function would be called, it would have it's own lexical scope and using closure, it would be able to retain the address of the URL even if the loop ends. And even is the response is in streams, closure would handle that stuff too.
const request = require("request");
function getTheUrl(data) {
var options = {
url: "https://jsonplaceholder.typicode.com/posts/" + data
}
return options
}
function consoleTheResult(url) {
request(url, function (err, res, body) {
console.log(url);
});
}
for (var i = 0; i < 10; i++) {
consoleTheResult(getTheUrl(i))
}