I'm trying to get some basic data from 2 different web api's (battery status and contacs) and write it into my .txt file
However when i do that, only one data gets written, as if it overwrites the other.
I know my code may look really bad, but im new to this and i really need help.
Code
//GET - Battery status
var options = {
host: 'www.w3.org',
port: 80,
path: '/work'
};
http.get(options, function (response) {
console.log("Response: " + response.statusCode);
console.log("Header:" + JSON.stringify(response.headers));
fs.writeFile("external-api.txt", "Responsecode:" + response.statusCode + "\nHeaders:" + JSON.stringify(response.headers))
}).on('error', function (e) {
console.log("Napaka!: " + e.message);
});
//GET ZAHTEVE - Contacts
var options = {
host: 'www.google.com',
port: 80,
path: '/work'
};
http.get(options, function (response) {
console.log("Odgovor: " + response.statusCode);
console.log("Header:" + JSON.stringify(response.headers));
fs.writeFile("external-api.txt", "Responsecode:" + response.statusCode + "\nHeaders:" + JSON.stringify(response.headers))
}).on('error', function (e) {
console.log("Napaka!: " + e.message);
});
Result
Anyone kind enough to tell me what am i doing wrong?
According to the Node.js docs:
fs.writeFile(file, data[, options], callback)
Asynchronously writes data to a file, replacing the file if it already exists. data can be a string or a buffer.
So,what you're looking for is:
fs.appendFile(file, data[, options], callback)
Asynchronously append data to a file, creating the file if it does not
yet exist. data can be a string or a buffer.
Hope this helps
Why don't you use fs.appendFile .
fs.appendFile('message.txt', 'data to append', function (err) {
});
Related
When I am posting a picture from my electron app to blob storage, sometimes it works, and other times I get this error on my terminal:
When I was first working on this app, this problem never showed up, until a week ago. It occurred without making any changes to this part of the app. Any idea on what could cause it.
The electron app goes white, and the dev tools are disconnected.
Here is the code:
var azure = require('azure-storage');
var blobSvc = azure.createBlobService('*connection keys inside here*');
function createBlob() {
blobSvc.createContainerIfNotExists('photos', {publicAccessLevel : 'blob'}, function(error, result, response){
if(!error){
console.log(response);
}
});
console.log("creating image for student#: " + stud_id);
blobSvc.createBlockBlobFromStream('photos', stud_id + '.jpg', toStream(imgData), imgData.size, function(error, result, response){
if(!error){
console.log("file upload: \n" + JSON.stringify(result) + " \n" + JSON.stringify(response));
createPerson();
}
else if (error) {
console.log("error: " + JSON.stringify(error));
}
});
}
In your code, you actually call the createBlockBlobFromStream immediately, probably without container having created. This may cause the problem.
So, you would need to put them within the callback of the createContainerIfNotExists function:
blobSvc.createContainerIfNotExists('photos', {publicAccessLevel : 'blob'}, function(error, result, response) {
if(!error) {
console.log(response);
console.log("creating image for student#: " + stud_id);
blobSvc.createBlockBlobFromStream('photos', stud_id + '.jpg', toStream(imgData), imgData.size, function(error, result, response) {
if(!error) {
console.log("file upload: \n" + JSON.stringify(result) + " \n" + JSON.stringify(response));
createPerson();
} else {
console.log("error: " + JSON.stringify(error));
}
});
}
});
I'm creating a script that will make a request 2 times per second to a localserver of cameras network and after it gets a positive response that camera detected someone I want to log three images.
In the json config file I have the triggerURL of the server, the interval port, the dataDir where logged images should be saved and a track array which contains the url of those images and the fileName they should receive.
This is the code of the script I use after reading the JSON file:
var configGet = {
host: config.triggerURL
, port: config.interval
, method: 'GET'
};
setInterval(function () {
var request = http.request(configGet, function (response) {
var content = "";
// Handle data chunks
response.on('data', function (chunk) {
content += chunk;
});
// Once we're done streaming the response, parse it as json.
response.on('end', function () {
var data = JSON.parse(response);
if (data.track.length > 0) {
//log images
var download = function (uri, filename, callback) {
request.head(uri, function (err, res, body) {
request(uri)
.pipe(fs.createWriteStream(filename))
.on('close', callback);
});
};
for (var image in data.track) {
var path = config.dataDir + '/' + image.fileName
download(image.url, path.format(config.timestamp), function () {
console.log('done');
});
}
}
});
// Report errors
request.on('error', function (error) {
console.log("Error while calling endpoint.", error);
});
request.end();
}, 500);
});
I have the following questions:
This method produces some kind of error with the download process of the images.Can you identify it?
Is there a better way of doing this process?
Without running the code or deeper inspection; should not "data = JSON.parse(response)" rather be "data = JSON.parse(content)"? Also, if data is undefined or does not contain "track" the "if (data.track.length > 0)" will throw an error. This can be fixed with "if (data && data.track && data.track.length > 0)".
I can not think of a very much better way. I would break it up more in functions to make the code more clear though.
I was wondering if we can measure the time it takes for an http request to be completed using node.js. Modifying slightly an example from the documentation (here), one can easily write down the following code.
var http = require('http');
var stamp1 = new Date();
var stamp2, stamp3, stamp4;
var options = {
hostname: 'www.google.com',
port: 80,
path: '/upload',
method: 'POST'
};
var req = http.request(options, function(res) {
stamp3 = new Date();
console.log('STATUS: ' + res.statusCode);
console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log('BODY: ' + chunk);
});
res.on('end', function () {
stamp4 = new Date();
console.log ("Stamp 3: " + stamp3);
console.log ("Stamp 4: " + stamp4);
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
// write data to request body
req.write('data\n');
req.write('data\n');
req.end();
stamp2 = new Date();
console.log ("Stamp 1: " + stamp1);
console.log ("Stamp 2: " + stamp2);
Now let me come to my point. On the response one can easily measure the time it takes for the response, since in the beginning stamp3 is set and on end stamp 4 is set. So, in principle for relatively large amounts of data these two timestamps will be different.
However, the question that I have is whether stamps 1 and 2 actually measure what is happening when the request is being prepared and dispatched. In other words, is req.write(....) a synchronous operation? Based on node.js principles I would expect req.write(...) to be an asynchronous operation where one can pass an arbitrarily large document and then upon successful completion we can have a callback knowing that the request has finished.
Comments?
Two function already exist for that:
console.time(id), start the timer
console.timeEnd(id) end the timer, print id followed by the time in ms
So in your case:
var req = http.request(options, function(res) {
console.time('Requete: '); //Begin to count the time
stamp3 = new Date();
console.log('STATUS: ' + res.statusCode);
console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log('BODY: ' + chunk);
});
res.on('end', function () {
stamp4 = new Date();
console.log ("Stamp 3: " + stamp3);
console.log ("Stamp 4: " + stamp4);
console.timeEnd('Requete: '); //Will print "Requete: X" with X being the time in ms
});
});
The docs mention no callback, so I'm assuming req.write as well as res.end to be synchronous.
So in your case if you're referring to just that one request that you're initializing, I think the time measurement should be accurate. I don't expect the time difference to be very big though (perhaps even in the same milisecond).
So I have NodeJS and installed the module xml2js. In the tutorial we have an example taking an xml file from directory and convert it with JSON.stringify() as in the example. Now is there a possibility instead of calling the local xml file (foo.xml), to call a url of XML service for ex: www.wunderground.com/city.ect/$data=xml
var parser = new xml2js.Parser();
parser.addListener('end', function(result) {
var res = JSON.stringify(result);
console.log('converted');
});
fs.readFile(__dirname + '/foo.xml', function(err, data) {
parser.parseString(data);
});
You need to create an http request instead of reading a file. Something like this, I think:
http.get("http://www.google.com/index.html", function(res) {
res.on('data', function (chunk) {
parser.parseString(chunk);
});
}).on('error', function(e) {
console.log("Got error: " + e.message);
});
http://nodejs.org/api/http.html#http_http_request_options_callback
I'm having trouble extracting the response body of a POST request in Node.js.I'm expecting the RESPONSE: 'access_token=...'
Should be pretty simple, not sure what I should be doing though. (Node v0.4.3)
Here's my code snippet.
payload = 'client_id='+client_id + '&client_secret='+ client_secret
+ '&code='+ code
var options = {
host: 'github.com',
path: '/login/oauth/access_token?',
method: 'POST'
};
var access_req = https.request(options, function(response){
response.on('error', function(err){
console.log("Error: " + err);
});
// response.body is undefined
console.log(response.statusCode);
});
access_req.write(payload);
access_req.end();
console.log("Sent the payload " + payload + "\n");
res.send("(Hopefully) Posted access exchange to github");
You'll need to bind to response's data event listener. Something like this:
var access_req = https.request(options, function(response){
response.on('data', function(chunk) {
console.log("Body chunk: " + chunk);
});
});
As Miikka says, the only way to get the body of a response in Node.js is to bind an event and wait for each chunk of the body to arrive. There is no response.body property. The http/https modules are very low-level; for nearly all applications, it makes more sense to use a higher-level wrapper library.
In your case, mikeal's request library is perfectly suited to the task. It waits until the full body of the response has arrived, then calls your callback of the form (err, response, body).
For parsing request bodies, you would probably want to use Connect with the bodyParser middleware (or the popular Express, which further extends Connect).