Perform get request and print xml output in Node.js - javascript

New to Node.js (and javascript) i'm trying to make a request to my provider which is supposed to send me back an xml file.
So i have my Node.js file : app.js
In this file i have chosen to perform a get request the native way (well, i think...) :
// Required ... ('fs','url','http'...)
var file_url = 'http://www.myprovider.../test.xml'; // this url will return an xml file.
var download_file_httpget = function(file_url) {
var options = {
host: url.parse(file_url).host,
port: 80,
path: url.parse(file_url).pathname,
method: 'GET'
};
var completeResponse = '';
var file_name = url.parse(file_url).pathname.split('/').pop();
http.get(options, function(res) {
res.setEncoding('utf8');
var decoder = new StringDecoder('utf8');
res.on('data', function(data) {
completeResponse += data;
}).on('end', function() {
console.log(completeResponse.toSring());
});
});
};
download_file_httpget(file_url);
The problem is when i print the results i get something like :
�*�r��Ff8����ij��.�#�F
�������v������#���[
������G+��*u��D:�q"�0�ß���z�g$���rp��r�r�����c#n# ��������QB>�>&K��6��}L��a#��:b�6�$�h\sDV���o�UfX庮�W��J��Qa{��6����ì��R�+��C6.��5��(���}�S x�,#n�-�E��r�*H������h�J!�G�K�������(ê£tCE�K
So i have understood that the completeReponse is a buffer and that i need to 'convert' this binary data but i have no idea how, the results are always the same when i try to parse the completeReponse.toString() with an XML parser.
I just to call the request, get the xml results as string and then write the file somewhere or convert to JSON.
Can someone help ?
Thank you very much.

Related

How do i wait for a function (toString()) to be finish before using that variable?

I trying to convert a buffer to string and trying to parse it to Json. But sometimes its trying to convert to Json before string operation completes.
In the below code I was trying to convert buffer data which is in variable dat to string and parsing it to JSON. So sometimes JSON.parse throws an error (Incomplete json format to parse).
var apicall = {
url: API,
method: 'post',
responseType: 'stream',
headers: {
'Content-Type': 'application/json',
},
data: body
}
axios(apicall).then((res) =>{
var writer = new MemoryStream(null, {
readable : true
})
res.data.pipe(writer)
writer.on('data',function(dat){
console.log(dat);
var e = dat.toString();
var jsondata = JSON.parse(e);
console.log(jsondata);
});
}).catch((e)=>{
console.log(e);
})
Request someone to help me how to wait for toString function to complete its process
The problem with this approach is that you will receive the response in chunks the default value is 16KB so if you have a json bigger than that it will come in multiple chunks.
One way to fix your algorithm is to store all those chunks into a separate array and once the stream is completed end event, to parse it:
let body = [];
writer.on('data', (chunk) => {
body.push(chunk);
}).on('end', () => {
body = Buffer.concat(body).toString();
// at this point, `body` has all the chunks stored in as a string
// here you will be able to JSON.parse the body
const json = JSON.parse(body);
});
The downside of the approach is that it will not work for very big files that exceed 1.7GB because they will not fit into memory.

How to parse large nested json objects?

PasteBin JSON
I would like to get this as Object it says jsonlint is valid but parsing is not anyone help would appreciate
"Data":[{...},{...},] // structure build like this
when i try
JSON.parse(jsonparamter) <-- Uncaught SyntaxError: Unexpected token A in JSON at position 71
at JSON.parse (<anonymous>)
at <anonymous>:1:6
There are multiple levels of JSON encoded data so you will have to create a loop to decode the elements deeper in the JSON nest. Use the below code to see an example of accessing Data.Adress.Value in this dictionary
// set up urls and headers for making HTTP req
corsurl = 'https://cors-anywhere.herokuapp.com/'
jsonurl = 'https://pastebin.com/raw/vuecweML'
headerNames = ['Content-Type','Accept']
headerValues = [ 'application/json', 'application/json']
// Modular get request function that I use
function getRequest (baseRestURL, APIPath, headerNames, headerValues, callback) {
var completeRestURL = baseRestURL + APIPath
console.log('REST API URL: ' + completeRestURL)
var method = 'GET'
var url = completeRestURL
var async = true
var request2 = new XMLHttpRequest()
request2.onload = function () {
console.log('ONLOAD')
var status = request2.status // HTTP response status, e.g., 200 for "200 OK"
console.log(status)
console.log(request2.responseText)
var response = request2.responseText
return callback(response)
}
request2.open(method, url, async)
for (var i in headerNames) {
request2.setRequestHeader(headerNames[i], headerValues[i])
}
request2.send(null)
}
// Our code of interest
getRequest(corsurl, jsonurl, headerNames, headerValues, response => {
parsed = JSON.parse(response).Data //parse our data the first time, and get the data attribute from dictionary
objects = JSON.parse(parsed) // parse a second time ( as data is JSON encoded twice )
selection = JSON.parse(objects[0].Address)[0].Value // parse a third time and select an attribute
document.getElementById('result').innerHTML = selection // Add it to our html to display
})
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.0.0/jquery.min.js"></script>
<div id='result'> Loading </div>

Sending data from NodeJS to the client by using Ajax calls

I increase a value at the server by running an Ajax call and want to update my UI after doing this
function increaseHitpoints(){
$.ajax({
type: 'GET',
url: 'http://localhost:8888/incHp/2312'
}).done(function (data) {
$("#txtHitpoints").html(data);
});
}
In my app.js I read a JSON file, manipulate the value, write it back to the file and return it to the client
app.get('/incHp/:id', function (req, res) {
var database = './database.json';
fs.readFile(database, 'utf8', function (err, data) { // read the data
var json = JSON.parse(data);
var users = json.users;
var hitpoints;
users.find(u => {
if (u.id === Number(req.params.id)) { // get the user by id
u.hitpoints++;
hitpoints = u.hitpoints;
}
});
json = JSON.stringify(json);
fs.writeFile(database, json, (err) => { // update the JSON file
// -> missing part here <-
});
});
});
what do I have to pass into the missing part if I want to return the new value? The new value would be hitpoints
I tried res.send(hitpoints); but it seems this function wants to return a status code, not a value.
If you send a numerical value, it will be observed as an HTTP response code
https://expressjs.com/en/api.html#res
But you can send your hitpoints as a string res.send(hitpoints.toString())or as json res.send({hits: hitpoints});
Depends on what format you want your response to be. I prefer using JSON. So in JSON case you would do this:
fs.writeFile(database, json, (err) => {
res.status(200).json({yourKey: yourValue});
});
Then you can access the JSON object in your frontend:
$("#txtHitpoints").html(data.yourKey);

In Http.get, what's the difference between response.on('data') and using the bl library in javascript?

var http = require('http');
var bl = require('bl');
var url = process.argv[2];
http.get(url, function(res) {
res.pipe(bl(function(err, data) {
var dataString = data.toString();
var dataCount = dataString.length
console.log(dataCount);
console.log(dataString);
}));
})
//
// http.get(url, function(res) {
// res.on('data', function(data) {
// var dataString = data.toString();
// var dataCount = dataString.length
// console.log(dataCount);
// console.log(dataString);
// });
// })
This is from the node school challenges (learnyounode). It's exercise 8.
The commented out code outputs each individual word and seems to break the data up and not aggregate that data. I am trying to find docs explaining this behavior but I cannot. Can someone shed a light on what's going on?
res is a readable stream, which means that it will emit data events when it has read some data. This doesn't necessarily have to be all the data, and it may get called multiple times (until either an end event is called, or an error event signals something went wrong).
That's basically what bl does (in your example): it collects all the data, and when it has all been read, it calls the callback function you pass it.
Without bl the code would look something like this:
http.get(url, function(res) {
var buffers = [];
res.on('data', function(data) {
buffers.push(data);
}).on('end', function() {
var dataString = Buffer.concat(buffers).toString();
var dataCount = dataString.length
console.log(dataCount);
console.log(dataString);
}).on('error', function(err) {
...handle the error...
});
});
In other words: the incoming data is collected in the buffers array, and when the end event is triggered (meaning all the data has been read), the data buffers are concatenated together to form the final data buffer.

Parsing JSON url with NODE.JS and keeping it up to date with SOCKET.IO

I am very new to node.js and socket.io and I am trying to figure out how to read a JSON array from an external url, then parse it and display on the main page. Then I believe I use socket.io to keep that connection open and keep the JSON object up to date when a change occurs.
This is what I have so far for node.js.
var http = require("http");
var fs = require('fs');
var options = 'http://api.trakt.tv/user/watching.json/APIKEY/USERNAME';
var server = http.createServer(function(request, response){
console.log('Connection');
http.get(options, function(res){
var data = '';
res.on('data', function (chunk){
data += chunk;
});
res.on('end',function(){
var obj = JSON.parse(data);
console.log( obj );
})
});
response.end();
});
server.listen(8888);
When I connect to localhost:8888 I see the console show up with "connection" and then the console logs the contents of the JSON object. This is as far as I have managed to get. I would appreciate and help and pointers on how to get that JSON object displayed and styled on my index page and keep it up to date
TIA
Mark
Okay, now that I understand the problem, here is my answer. It's going to be a bit advice laden, as there is no true "right way" to do what you want. All we can be assured if is that yes, you are going to want to use WebSockets (or in this case socket.io, which can emulate websockets on older browsers).
Your current pull code is probably fine, though you're going to want to tweak that code to run on a timeout so that the latest JSON is pulled every so often. In addition, we want to keep the various moving parts of this seperate: Reading from the API/writing the cache, listening to the cache, and then feeding the cache out to connected clients:
var http = require("http");
var fs = require('fs');
var url = 'http://api.trakt.tv/user/watching.json/APIKEY/USERNAME';
var cacheFile = '/path/to/cachefile';
var connectedSockets = [];
function fetchJson() {
http.get(url, function(res) {
body = '';
res.on('data', function(data) {
body += data;
});
res.on('end', function() {
fs.writeFileSync(cacheFile, body);
setTimeout(fetchJson, 1000); // Fetch it again in a second
});
})
}
fetchJson(); // Start fetching to our JSON cache
// Start watching our cache file
fs.watch(cacheFile, function(event, filename) {
if(event == 'change') {
fs.readFile(cacheFile, function(data) {
connectedSockets.forEach(function(socket) {
socket.emit('data', JSON.parse(data));
});
});
}
});
// Now setup our socket server
var io = require('socket.io').listen(8888);
io.sockets.on('connection', function(socket) {
connectedSockets.push(socket);
});
I don't handle disconnected here (you'll want to remove disconnected or err'ed sockets from the connectedSockets list), and I didn't actually run this...but it should give you an idea of where to head.
On the client, it should be a matter of simply:
var socket = io.connect('http://localhost:8888');
socket.on('data', function (data) {
// Data will contain your JSON object, do your DOM manip here
});

Categories