Save file automatically from IFrame in Node Webkit - javascript

I am using node-webkit to automate some common tasks.
I have an iframe which goes to a site address, then clicks save and I have a file save dialog pop out.
Is there any way I can catch the event to save the file witout requiring an external action (like setting the save folder and clicking on save)?

You may not be able to do it that way, but have you thought about just doing an HTTP GET request from node's http module? That's really the beauty of using node-webkit, you get to use node.js!
var http = require('http');
var fs = require('fs');
var path = require('path');
var saveLocation = path.join(__dirname, "/cache", "file.txt");
//The url we want is: 'www.random.org/integers/file.txt'
var options = {
host: 'www.random.org',
path: '/integers/file.txt'
};
callback = function(response) {
var str = '';
//another chunk of data has been recieved, so append it to `str`
response.on('data', function (chunk) {
str += chunk;
});
//the whole response has been recieved, so we just print it out here
response.on('end', function () {
console.log(str);
fs.writeFile(saveLocation, str, function (err) {
if (err) console.log("Problem Saving File.");
});
});
}
// Send the request.
http.request(options, callback).end();

Related

Error while creating file by fs on Node.js server

I have a Node.js server and I'm trying to parse captcha by uploading bmp image to server. There are two problems I'm facing.
Image is not uploaded correctly. When I try to open Image it gives me error "BMP image has bogus header data".
Currently I'm reading buffer from uploaded data and parsing it to extract captcha string. Captcha string is arbitrary (results shows correct last 3 characters instead of 6).
This is code I'm using:
app.use (function(req, res, next) {
console.log("statrt");
var data="";
req.on('data', function(chunk) {
data += chunk;
});
req.on('end', function() {
console.log("end");
req.body = data;
next();
});
});
var DoneInSync = fibrous(function(buffer){
var val = captcha.getCaptcha(buffer);
console.log("this" + val);
return val;
});
app.post('/', function (req,res){
buffer = new Buffer(req.body);
fs.writeFileSync("captchas_ass.bmp", buffer);
var val = DoneInSync.sync(buffer);
res.write("Yoing -> " + val);
res.end();
});
Captcha parser code runs perfectly, and is tested throughly offline.
Main problem I'm facing is uploading BMP file to server . Once done I can do this
var buf = fs.readFileSync("captcha.bmp");
and pass buf to captcha.getCaptcha(buf) and get result.
Can someone explain where it uploading is wrong?
Your middleware is stringifying the incoming data, which it shouldn't be doing. Try this:
app.use (function(req, res, next) {
var data = [];
req.on('data', function(chunk) {
data.push(chunk);
});
req.on('end', function() {
console.log("end");
req.body = Buffer.concat(data); // `req.body` will be a Buffer
next();
});
});
This would only work if you're uploading the data as a "raw" POST request. If you're using something like multipart/formdata, this won't work either. For that you should use a middleware like multer.

how can I download only first 10 kilobyte of a file in node.js

In node.js how can I download specific amount of file, like only first 10 kilobyte of a file.
in my project, I need to extract mp3 duration and bitrate from remote file and the only way I think is download just a few bytes to the entire file.
As some guy said here If you download (at least) the first 32kB of the file you should be OK for most MP3 files.
In Node.js you know when a chunk of data is available. If you do a http request you are given a response object which contains the headers. This response object is also a stream with events. The one you are searching for is "data". With this event you get a buffer filled with the data received (only the new data, the previously received chunks are not present). You just have to have a buffer and append to it each time you get a chunk of data. You can also get the length of the buffer. You can choose to stop downloading the data when you have enough data using the method destroy method. Here is an example :
var http = require("http");
var buff = new Buffer(0);
http.get("http://epfl.ch", function(res) {
res.on('data', function(chunk) {
buff = Buffer.concat([buff, chunk]);
if (buff.length > 10240) {
res.destroy();
console.log(buff);
}
});
})
this code will wait to fetch 10kb bytes then end the request. Then you can dou whatever you want with the data (buff)
If you want to save the data to file while downloading you can do this instead :
var http = require("http");
var buff = new Buffer(0);
var fs = require("fs");
var file = fs.createWriteStream("file.mp3");
http.get("http://epfl.ch", function(res) {
res.pipe(file);
res.on('data', function(chunk) {
buff = Buffer.concat([buff, chunk]);
if (buff.length > 10240) {
res.destroy();
file.close();
console.log(buff);
}
});
})
This code will create an input stream for a file and pipe the body of the request to this file (ie. each chunk of data received will be append to the file).
If you don't want to do anything with your buffer you don't need to keep it you can just count the number of bytes received and stop when you need.
var http = require("http");
var bytesRecieved = 0;
var fs = require("fs");
var file = fs.createWriteStream("file.mp3");
http.get("http://epfl.ch", function(res) {
res.pipe(file);
res.on('data', function(chunk) {
bytesRecieved += chunk.length;
if (bytesRecieved > 10240) {
res.destroy();
file.close();
}
});
})
Best regards,

call two file from another with node js

I have a file called node.js:
var net = require('net');
var crypto = require('crypto');
//sjcl
var sjcl = require('./sjcl');
//retrive fb profile
var loadFb = require('./loadFb.js');
var loadFeed = require('./loadFeed.js');
//read json user file
var fs = require('fs');
var text = fs.readFileSync(__dirname + '/users','utf8');
var HOST = 'localhost';
var PORT = 7000;
net.createServer(function(sock) {
// We have a connection - a socket object
console.log('CONNECTED: ' + sock.remoteAddress +':'+ sock.remotePort);
// Add a 'data' event handler to this instance of socket
sock.on('data', function(data) {
console.log('User request profile of: ' + data);
//var date = (data.toString()).split("***");
//var from = date[1];
loadFb(extendetPath, function(pageData)
{
loadFeed(extendetPath2, function(pageData2)
{
var fs = require('fs');
var profileText = fs.readFileSync('/tmp/profile','utf8');
console.log(profileText);
sock.write(profileText);
});
});
});
// Add a 'close' event handler to this instance of socket
sock.on('close', function(data) {
console.log('CLOSED: ' + sock.remoteAddress +' '+ sock.remotePort);
});
}).listen(PORT);
console.log('Server listening on ' + HOST +':'+ PORT);
function returnKeyFromUser(id)
{
//text
var trovata = false;
var dati = JSON.parse(text);
for(var i=0; i<dati.friendlist.friend.length && trovata==false; i++)
{
var user = (dati.friendlist.friend[i].username).replace("\n","");
var userID = (id).replace("\n","");
if(user==userID)
{
trovata=true;
return ((dati.friendlist.friend[i].publicKey).toString()).replace("\n","");
}
}
if(trovata==false)
return null;
}
There is a small http server that receives a facebook username and what he have to do is retrieve 2 page:
a graphapi with the profile information, and a graphapi with the feed informations of a facebook profile
I copy the other two files:
var https = require('https');
module.exports = function(path, callback) {
var options = {
host: 'graph.facebook.com',
port: 443,
path: (path.toString()).replace("\n",""),
method: 'GET'
};
var req = https.get(options, function(res) {
var pageData = "";
if((path.toString()).indexOf("/")==0 && (path.toString()).indexOf("/GET /`HTTP/")!=0)
//for load only (I hope facebook profile)
{
console.log(options);
res.setEncoding('utf8');
res.on('data', function (chunk) {
pageData += chunk;
});
res.on('end', function()
{
var fs = require('fs');
fs.writeFile("/tmp/profile", pageData, function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
//callback(pageData);
return;
});
}
});
};
3° file
var https = require('https');
module.exports = function(path, callback) {
var options = {
host: 'graph.facebook.com',
port: 443,
path: (path.toString()).replace("\n",""),
method: 'GET'
};
var req = https.get(options, function(res) {
var pageData = "";
if((path.toString()).indexOf("/")==0 && (path.toString()).indexOf("/GET / HTTP/")!=0) //for load only (I hope facebook profile)
{
console.log(options);
res.setEncoding('utf8');
res.on('data', function (chunk) {
pageData += chunk;
});
res.on('end', function()
{
var fs = require('fs');
fs.appendFile('/tmp/profile', "***"+pageData, function (err) {
if (err) throw err;
console.log('It\'s saved!');
});
callback(pageData);
});
}
});
};
I don't know If there is a way to call the two file in the first file node.js but what I done is this: (to call from node.js the fist file, and from the second file call the third)
in node.js file I call the first file loadFb.js with this command:
loadFb(extendetPath, function(pageData)
{
This call saves a file on my tmp profile directory and inside I call the other file loadFeed that appends some text.
After that I have to send the entire information to the client but I have a mistake.
In order the nodejs correctly call loadFb and he write tmp - profile, than he call loadFeed
but before appending the information the node call back to the client only the half of informations that I need.
I'm not a good nodejs programmer, this is a work for my thesis.
Can someone help me?
Let's look at the following code:
res.on('end', function()
{
var fs = require('fs');
fs.appendFile('/tmp/profile', "***"+pageData, function (err) {
if (err) throw err;
console.log('It\'s saved!');
});
callback(pageData);
});
What it does it runs the asynchronous method appendFile and immediately after that calls callback. So when the code in the callback is executed, the file is not updated yet. You need to move the callback(pageData); to the appendFile's callback. And you need to review you code keeping this in mind because I see that the same fix should be made in another file so maybe there are some similar places as well.

Run file after file download complete

I am very new to node-webkit. I am using the following code to download a file. How would I go about running the file automatically when the file has finished?
var https = require('https');
var fs = require('fs');
var file = fs.createWriteStream("update_setup.exe");
var request = https.get(url + "/appdata/update_setup.exe", function (response) {
response.pipe(file);
});
Just use the writable stream's close event and spawn a child process. The event will fire once the response has completed piping to the stream.
var https = require('https');
var fs = require('fs');
var exec = require('child_process').exec;
var file = fs.createWriteStream('update_setup.exe');
var request = https.get(path, function(res) {
res.pipe(file);
});
file.on('close', function() {
exec('update_setup.exe', function(err, stdout, stderr) {
// output from starting
});
});

Problem receiving response from node server using ajax post request

I have written a http server using node js
var sys = require("sys"),
http = require("http"),
url = require("url"),
path = require("path"),
fs = require("fs");
http.createServer(function(request, res) {
var parsed_url = url.parse(request.url);
var uri = parsed_url.pathname;
if(uri === "/test"){
res.writeHead(200, {'Content-Type': 'text/javascript'});
request.addListener('data', function (chunk) {
var data = eval("(" + chunk + ")");
console.log(data[0].id);
})
request.addListener('end', function() {
console.log('end triggered');
res.write("Post data");
res.end();
});
}
}).listen(8080);
and i am trying to send back response of ajax request but i am unable to receive any response. Here is the code for ajax request ,
var myhttp = new XMLHttpRequest();
var url = "http://localhost:8080/test";
var data = [{"a":"1"},{"b":"2"},{"c":"3"}];
var dataJson = JSON.stringify(data);
myhttp.open('POST', url, true);
myhttp.send(dataJson);
myhttp.onreadystatechange = function() {
if ((myhttp.readyState == 4) && (myhttp.status == 200)){
alert(myhttp.responseText);
}
else if ((myhttp.readyState == 4) && (myhttp.status != 200))
{
console.log("Error in Connection");
}
Can anyone help me what i am doing wrong ...
Thanks
Vinay
Your code is almost right but on your code sample you have
console.log(data[0].id)
the data object has no property id so if you only have
console.log(data[0])
there you have a response like
{ a: '1' }
therefore you can access the property a by doing
console.log(data[0].a);
UPDATED Updated with a full example
One more thing is that you are using eval and node comes with JSON.parse bundle with it so the snippet below is how i made it work
File: app.js
var sys = require("sys"),
http = require("http"),
url = require("url"),
path = require("path"),
fs = require("fs");
http.createServer(function(request, res) {
var parsed_url = url.parse(request.url);
var uri = parsed_url.pathname;
if(uri === "/test"){
res.writeHead(200, {'Content-Type': 'text/javascript'});
request.addListener('data', function (chunk) {
// removed this - eval("(" + chunk + ")");
var data = JSON.parse(chunk);
console.log(data[0].a);
})
request.addListener('end', function() {
console.log('end triggered');
res.write("Post data");
res.end();
});
} else if(uri === "/") {
fs.readFile("./index.html",function(err, data){
if(err) throw err;
res.writeHead(200, {'Content-Type': 'text/html'});
res.end(data);
});
}
}).listen(8080);
On the same directory create a file index.html with the following:
<html>
<head>
<script type="text/javascript" charset="utf-8">
var myhttp = new XMLHttpRequest();
var url = "http://localhost:8080/test";
var data = [{"a":"1"},{"b":"2"},{"c":"3"}];
var dataJson = JSON.stringify(data);
myhttp.open('POST', url, true);
myhttp.send(dataJson);
myhttp.onreadystatechange = function() {
if ((myhttp.readyState == 4) && (myhttp.status == 200)){
alert(myhttp.responseText);
}
else if ((myhttp.readyState == 4) && (myhttp.status != 200))
{
console.log("Error in Connection");
}
}
</script>
</head>
<body>
</body>
</html>
That is a complete working example of what you want.
With regards to the same origin policy issues you were having is mainly due to the fact that you cant POST data between 2 different domains via ajax unless you use some tricks with iframes but that is another story.
Also i think is good for anyone to understand the backbone of a technology before moving into frameworks so fair play to you.
good luck
You have to read the data in a different way. Posted data arrives on a node server in chunks (the 'data' event), that have to be collected until the 'end' event fires. Inside this event, you are able to access your payload.
var body = '';
request.addListener('data', function (chunk) {
body += chunk;
});
request.addListener('end', function() {
console.log(body);
res.write('post data: ' + body);
});
Additionaly, there seem to be some issues with your client-side code (especially concerning the status-code checks), but i can't really help you with those as i always work with frameworks like jQuery to manage async requests.
If you want to build reliable node.js servers for web use, i highly recommend the high-performance HTTP-Framework Express. It takes away alot of the pain when developing a web-based server application in node and is maintained actively.

Categories