node JSON https request becoming undefined - javascript

I am trying to send a single https request to an API, with a bunch of data in JSON format.
However, when running the code, I first ran into the problem that everything was undefined. I commented out my loop, trying to parse the data I needed, and right now I'm just trying to console.log all of the data.
However, it seems to still be looping through stuff somehow despite not having a loop anywhere in my code anymore.
here is the code for my request:
function getCards() {
// make a request
var options = {
host: 'omgvamp-hearthstone-v1.p.mashape.com',
path: '/cards',
method: 'GET',
};
var req = https.request(options, function(res) {
console.log('STATUS ' + res.statusCode);
console.log('HEADERS ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function(data) {
//console.log(data);
updateCardsCollection(JSON.stringify(data));
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
req.end();
}
The weird thing is that the console.log(data) in the above code, logs out all of the data just fine.
function updateCardsCollection(data) {
var cardsRaw = [];
console.log("DATA");
console.log("===========================================================");
console.log(data.Classic);
}
Here "Classic" is one of the arrays of objects in the API.
Before implementing node, express, mongoose and jade. The following loop worked fine to parse through the data:
for(var key in data) {
for(var i = 0; i < data[key].length; i++) {
console.log(data[key][i].cardId);
However the above beginning of a loop would print out an undefined error as well.
Another strange problem I seem to be having with this code is when I run it with the loop commented out, with just the 3 console.logs in the 'updateCardsCollection' function. It logs those 3 lines a lot of times. Despite the function only being called once.
Any idea why this code is no longer working for getting my API data?

At first, you need to do JSON.parse instead of JSON.stringify while you try to update your collection, if you want work with objects, not with string.
But if you done with first, your code still not work, because in data handler you get chunked data, not full response. See https://nodejs.org/api/http.html#http_http_request_options_callback for it.
You must accumulate data like this:
const req = https.request(options, res => {
const data = [];
res.on('data', d => data.push(d));
res.on('end', () => updateCardsCollection(JSON.parse(data.join(''))));
})

Related

Node JS Express how can I send a 404 error if a bad request is made to third party API?

In my Node JS server I have this route handler that sends a request to a third party API to get a username:
app.get('/players/:player', apiLimiter, function(request, response) {
const player = request.params.player;
const api_url = `https://api.com/shards/steam/players?filter[playerNames]=${player}`;
var options = {
method: "GET",
observe: 'body',
};
let apiRequest = https.request(api_url, options, function (res) {
let data = "";
res.on("data", chunk => {
data += chunk;
})
res.on("end", () => {
let objectParsed = JSON.parse(JSON.stringify(data));
response.send(objectParsed);
})
if(!player) {
res.status(404).send("Not found.");
}
})
apiRequest.end();
})
This works fine to get a user that exists. However, if I put in a fake username to my /players page, that page still loads with a 200 status instead of getting a 404 response. The page loads and looks broken because it's not actually getting any data from the API.
I feel like this is a dumb question .. In my research I have found how to handle errors if it's just the route, and not if it's the route dependent on the path parameter as in /players/:player
I found a question that was similar to mine (How to throw a 404 error in express.js?) and I tried using an If statement: if (!player){res.status(404).send("Not found."); } but no dice. Am I using this if statement in the wrong place?
How can I get my Node JS server to respond with a 404 if the user from the database doesn't exist?
You have to check the result of the API call and see if you got valid data back and send the 404 there. I also added a check to make sure something was passed for the player name and send back a 400 (bad request) if there's no player specified at all:
app.get('/players/:player', apiLimiter, function(request, response) {
const player = request.params.player;
if (!player) {
res.status(400).send("No player specified.");
return;
}
const api_url = `https://api.com/shards/steam/players?filter[playerNames]=${player}`;
var options = {
method: "GET",
observe: 'body',
};
let apiRequest = https.request(api_url, options, function(res) {
let data = "";
res.on("data", chunk => {
data += chunk;
})
res.on("end", () => {
let objectParsed = JSON.parse(data);
// test objectParsed here
if (!some condition in objectParsed) {
res.status(404).send("No data for that player name.");
} else {
response.send(objectParsed);
}
});
});
apiRequest.end();
});
Also, you don't want JSON.parse(JSON.stringify(data)) here. Your data is already a string. Just do JSON.parse(data).
FYI, if you use a small http request library such as got(), this code gets a lot simpler as it accumulates the response and parses the JSON for you in one line of code as in:
let data = await got(options).json()

In Http.get, what's the difference between response.on('data') and using the bl library in javascript?

var http = require('http');
var bl = require('bl');
var url = process.argv[2];
http.get(url, function(res) {
res.pipe(bl(function(err, data) {
var dataString = data.toString();
var dataCount = dataString.length
console.log(dataCount);
console.log(dataString);
}));
})
//
// http.get(url, function(res) {
// res.on('data', function(data) {
// var dataString = data.toString();
// var dataCount = dataString.length
// console.log(dataCount);
// console.log(dataString);
// });
// })
This is from the node school challenges (learnyounode). It's exercise 8.
The commented out code outputs each individual word and seems to break the data up and not aggregate that data. I am trying to find docs explaining this behavior but I cannot. Can someone shed a light on what's going on?
res is a readable stream, which means that it will emit data events when it has read some data. This doesn't necessarily have to be all the data, and it may get called multiple times (until either an end event is called, or an error event signals something went wrong).
That's basically what bl does (in your example): it collects all the data, and when it has all been read, it calls the callback function you pass it.
Without bl the code would look something like this:
http.get(url, function(res) {
var buffers = [];
res.on('data', function(data) {
buffers.push(data);
}).on('end', function() {
var dataString = Buffer.concat(buffers).toString();
var dataCount = dataString.length
console.log(dataCount);
console.log(dataString);
}).on('error', function(err) {
...handle the error...
});
});
In other words: the incoming data is collected in the buffers array, and when the end event is triggered (meaning all the data has been read), the data buffers are concatenated together to form the final data buffer.

Transfer cookies across domains in express

I've got a proxy set up in nodejs that goes to one of our backend servers for data; some of that data (such as session id) is stored as cookies. what I want to do is have the proxy get the remote cookies, push then into the header of the response to the original request, then send the response back. I'm close, but hit a snag:
app.get(/\/json\/(.+)/, getJson);
var getJson = function(req, response1) {
response1.setHeader('Content-Type', 'application/json; charset=utf-8');
var before1stWrite = true;
utils.setCookies(response1, ["floo=flum"]) // this works
var options = {
host : config.scraperUrl.replace('http://', ''),
path : '/rwd/' + req.params[0] + '?' + querystring.stringify(req.query),
method : "GET",
rejectUnauthorized : false
};
var request = https.request(options, function(response2) {
response2.setEncoding('utf8');
// utils.setCookies(response1, ["flib=flah"]) // this fails, too
response2.on('data', function(d) {
if (before1stWrite) {
console.log(response2.headers['set-cookie']); // remote's cookies
utils.setCookies(response1, ["flib=flah"]) // this fails
before1stWrite = false;
}
response1.write(d);
});
response2.on('end', function() {
response1.end()
});
});
request.end();
request.on('error', function(e) {
console.error("error occurred: " + e.message);
response1.end();
});
}
setCookies(response1, cookies) just loops thru the cookies and does
res.setHeader('Set-Cookie', cookie)
The problem is that it looks like the headers have been baked by the time the second setCookies is called; moving the method to the 'data' event handler does not help. The error I get is:
http.js:689
throw new Error('Can\'t set headers after they are sent.');
Any way to add headers to response1 that I receive from the response2?
UPDATE
I fixed the code to be sure that the attempt to write to headers of response1 was done before any other writes; it is not a fix, however.
Yes, you cannot send headers after data has started flowing. Did you try setting the header after this line?
response.setEncoding('utf8');
Also, did you consider using streams rather than transferring in chunks? http://nodejs.org/api/stream.html
You'll need to buffer the data.
Doing this is pretty much like piping:
response.on('data', function(d) {
res.write(d);
});
so you're sending the response straight away. Haven't tried it but this should work:
var data = "";
response.on('data', function(d) {
data += d;
});
response.on('end', function() {
console.log(response.headersSent);
console.log(response.headers['set-cookie']);
utils.setCookies(res, ["flib=flah"])
res.write(data);
res.end();
});
Just remember you're buffering all that data into memory, not recommended for large responses.

Sails.js Sending json object returned in https.request to the view

Just learning Sails.js so go easy on me.
I have queried an XML service and successfully jsonified it using xml2js
var req = https.request(options, function(res) {
var xml = '';
res.on('data', function(chunk) {
xml += chunk;
});
res.on('end', function () {
var result = parseString(xml, function (err, result) {
console.log(JSON.stringify(result)); // Position 1
});
return result;
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
req.write(data);
var result = req.end();
console.log('Result: ' + JSON.stringify(result)); // Position 2
res.view({ message : 'hello', result : result });
The view is loading fine, and <%= message %> outputs hello. Great.
Position1 console.log is returning the stringified json object - Great.
Position 2 consile.log is returning Result: true - Not good.
I need to be able to get that json data to my view for parsing. How do I do this?
It looks like you're assuming that calling req.end() will give you the response from the https.request you started above. There are a couple of things wrong with that:
req.end() is used to finish writing to an open request, not to get a response. According to the docs, the return value is unspecified.
The https.request call is asynchronous; even if req.end() worked like you want it to, the response wouldn't have come in by the time you call it.
The solution is to put your response code (i.e. your res.view) inside the handler for the end event that you've already written. I'd also recommend refactoring your code to use different variable names for the remote request / response so that they don't collide with the req and res variables in your controller action. The whole thing would then be something like:
myAction: function (req, res) {
// Not sure how you're setting options, so just an example
var options = {url: 'http://example.com', ...}
var request = https.request(options, function(response) {
var xml = '';
response.on('data', function(chunk) {
xml += chunk;
});
response.on('end', function () {
var result = parseString(xml, function (err, result) {
return res.view({ message : 'hello', result : JSON.stringify(result)});
});
});
});
request.on('error', function(e) {
console.log('problem with request: ' + e.message);
res.serverError(e);
});
}
You might also look into using something like the Request module to simplify your external request; it would save you from having to write event handlers for data and end.
if you want to pass json to some javascript variable:
var clientJsonVar = <%- JSON.stringify(serverSideJson)%>

Questions about the scopes, node.js, and express

I really suck at understanding scopes and other things of that nature in just about every language. Right now I am building an express application that takes user input and then queries an arbitrary api and then feeds it to the console. To handle the rest api, I am using shred. I know I can use nodes built in get request, but for some reason, I could never get it to work. The user makes the following get request to my app, /query?query=. This is what I have now. I can't really describe what I'm doing so pleas read the code comments.
var http = require('http');
var Shred = require("shred");
var assert = require("assert");
exports.query = function(req, res){
//thequery is the string that is requested
var thequery = req.query.query;
var shred = new Shred();
console.log("user searched" + " " + thequery);
console.log();
//The if statement detects if the user searched a url or something else
if (thequery.indexOf("somearbitratyrestapi.com") !== -1){
console.log("a url was searched");
//find info on the url
var thedata = shred.get({
url: "http://somearbitratyrestapi.com/bla/v2" + thequery,
headers: {
Accept: "application/json"
},
on: {
// You can use response codes as events
200: function(response) {
// Shred will automatically JSON-decode response bodies that have a
// JSON Content-Type
//This is the returned json
//I want to get this json Data outside the scope of this object
console(response.content.body);
},
// Any other response means something's wrong
response: function(response) {
console.log("ohknowz");
}
}
});
//I want to be able to see that json over here. How do?
}else{
console.log("another thing was searched");
}
/*
res.render('search-results', {
result: 'you gave me a url',
title: 'you gave me a url'
});
*/
};
I tried doing this
var http = require('http');
var Shred = require("shred");
var assert = require("assert");
exports.query = function(req, res){
//thequery is the string that is requested
var thequery = req.query.query;
var shred = new Shred();
//I created a variable outside of the object
var myjson;
console.log("user searched" + " " + thequery);
console.log();
//The if statement detects if the user searched a url or something else
if (thequery.indexOf("somearbitratyrestapi.com") !== -1){
console.log("a url was searched");
//find info on the url
var thedata = shred.get({
url: "http://somearbitratyrestapi.com/bla/v2" + thequery,
headers: {
Accept: "application/json"
},
on: {
// You can use response codes as events
200: function(response) {
// Shred will automatically JSON-decode response bodies that have a
// JSON Content-Type
//This is the returned json
//I set myjson to the returned json
myjson = response.content.body
},
// Any other response means something's wrong
response: function(response) {
console.log("ohknowz");
}
}
});
//Then I try to output the json and get nothing
console.log(myjson);
}else{
console.log("another thing was searched");
}
/*
res.render('search-results', {
result: 'you gave me a url',
title: 'you gave me a url'
});
*/
};
Sorry for the bad explanation of my problem. Can someone please help or explain what is going on.
So you think you need to move data out of your nested scope, but the opposite is true. Within the nested scope where you have access to your upstream JSON response, you need to access the res object and send it though:
myjson = response.content.body
res.send(myjson);
However, long term you'll need to do some more node tutorials and focus on how to use callbacks to avoid deeply nested function scopes.

Categories