my node.js app connects through var socket = net.createConnection(port, ip); to download a file from another server. As soon as connection is made the server sends the file as data.
I catch it then by doing
socket.on('data', function(data) {
}).on('connect', function() {
}).on('end', function() {
console.log('DONE');
});
My initial goal is, to download the file using the method above and at the same time give the bytes to the client's browser as a downloadable file. For example: user clicks a button on the site which triggers the server-side download function and the user gets the file-save prompt. Node.JS then downloads the file from the remote server and at the same time gives each new byte to the user at the browser client. Is this possible? I imagine it would need to send headers of octet-stream to trigger file transfer between browser Node.JS. But how?
Update
Now I tried the code below with the help of the answer below:
app.get('/download', function (req, res) {
res.setHeader('Content-disposition', 'attachment; filename=' + "afile.txt");
res.setHeader('Content-Length', "12468")
var socket = net.createConnection(1024, "localhost");
console.log('Socket created.');
socket.on('data', function(data) {
socket.pipe(res)
}).on('connect', function() {
// // Manually write an HTTP request.
// socket.write("GET / HTTP/1.0\r\n\r\n");
}).on('end', function() {
console.log('DONE');
socket.end();
});
});
The data is being sent to the user's browser as a download, but the end result is a broken file. I checked the contents within and it seeems that something along the process causes the file to corrupt.
I think now I have to write byte per byte? rather than doing socket.pipe?
You need to set content-disposition header in your http response:
response.writeHead(200, {
'Content-Disposition': 'attachment; filename=genome.jpeg; modification-date="Wed, 12 Feb 1997 16:29:51 -0500"'
});
yourDataStream.pipe(response);
see RFC2183
It looks like you may want this:
app.get('/download', function (req, res) {
res.attachment('afile.txt');
require('http').get('http://localhost:1234/', function(response) {
response.pipe(res);
}).on('error', function(err) {
res.send(500, err.message);
});
});
I found the solution!
by doing res.write(d) i was able to direct the bytes from the other connection to the user browser download.
app.get('/download', function (req, res) {
res.setHeader('Content-disposition', 'attachment; filename=' + "afile.jpg");
res.setHeader('Content-Length', "383790");
res.setHeader('Content-Type','image/jpeg');
var socket = net.createConnection(1024, "localhost");
console.log('Socket created.');
//socket.setEncoding("utf8");
socket.on('data', function(d) {
console.log(d);
res.write(d);
}).on('connect', function() {
// // Manually write an HTTP request.
// socket.write("GET / HTTP/1.0\r\n\r\n");
}).on('end', function() {
console.log('DONE');
socket.end();
});
});
Related
I have a NodeJS code where, basically this is what happens:
HTTP request (app.get) -> Send a request to a low level socket(using net.Socket()) -> Get response from socket -> res.send(response from socket)
This doesn't work because the net.Socket() uses async functions and events (client.on("data", callback)).
I tried something like this:
app.get("/", function(req, res){
client.connect("localhost", 420, function(){
client.write("example data");
});
client.on("data", function(data){
client.destroy();
res.send(data);
});
});
But it doesn't work because it says I am re-sending the headers (the res object won't change since the function is an event, not a sync function).
Any ideas? Or a library for sync socket requests? I have tried the following:
Synket
sync-socket
netlinkwrapper
And they don't work.
Edit: I am trying something like this:
async function sendData(client, res){
client.on('data', function(data){
console.log("Got data!");
res.send(""+data);
res.end();
console.log("Sent data!");
client.destroy();
console.log("Killed connection");
return;
});
}
app.get("/", function(req, res){
var body = req.query;
client.connect(420, "localhost", function(){
client.write("some random data");
console.log("Connected & Data sent!");
sendData(client, res);
});
});
It works the first time I try to access the page, but the second time the app crashes and I get this error:
_http_outgoing.js:489
throw new Error('Can\'t set headers after they are sent.');
^
Error: Can't set headers after they are sent.
client triggles data event multiple times.
`app.get("/", function(req, res){
client.connect("localhost", 420, function(){
client.write("example data");
});
client.pipe(res);
});
});
`
It turns out that, each time a request comes to that endpoint, a new subscription has been registered by calling the client.on('eventName', cb). So, calls starting from the second one will trigger multiples of those registrations and will cause that header error.
So, a workaround for that:
socket.on('event', (packet) => {
socket.removeAllListeners('event')
res.json({ packet });
});
That would do the trick but I'm not sure if it's a bad practice to continuously add/remove the same event.
Edit
Found a better way. once will ensure that registered event only will run once, just as we want:
socket.once('event', (packet) => {
res.json({ packet });
});
I'm using electron to build an app which serves several image files in a webserver using express.
From another app built in Android I get files from the server and post files to it.
I have no problems detecting when the Android app is posting the files:
app.post('/fileupload', function(req, res) {
alert("post");
var fstream;
req.pipe(req.busboy);
req.busboy.on('file', function (fieldname, file, filename) {
//console.log("Uploading: " + filename);
fstream = fs.createWriteStream(__dirname + '/images/' + filename);
file.pipe(fstream);
fstream.on('close', function () {
res.redirect('back');
});
});
});
But still have no success detecting when the Android app get the files from the server (it gets them, but I have no way to refresh my output screen when it does), I'm trying with this code:
app.use(function (req, res, next) {
alert("get");
next();
});
and this one too:
app.get('/', function (req, res) {
alert("get");
next();
});
I'm putting the files in a directory called images:
var express = require('express')
var app = express()
app.use(express.static('images'));
app.listen(3000);
EDIT
If I open a browser with the same url Android is getting, it triggers the event and shows the alert. Why it doesn't trigger when Android opens the connection?, I don't know.
The Android code for the get request is:
URL url = new URL(sURL);
HttpURLConnection conection = (HttpURLConnection)url.openConnection();
conection.setRequestMethod("GET");
conection.connect();
int lenghtOfFile = conection.getContentLength();
InputStream inputURL = new BufferedInputStream(url.openStream());
If you are calling API's then you should use res.send() using json
app.post('/fileupload', function(req, res) {
alert("post");
var fstream;
req.pipe(req.busboy);
req.busboy.on('file', function (fieldname, file, filename) {
//console.log("Uploading: " + filename);
fstream = fs.createWriteStream(__dirname + '/images/' + filename);
file.pipe(fstream);
fstream.on('close', function () {
res.send({status:1,data:{}}) //check this response on android side and change/refresh screen on android side if needed
//res.redirect('back');
});
});
});
Using this code, that is deprecated using my compile versión (23), express detects the get request.
HttpClient httpclient = new DefaultHttpClient();
// Prepare a request object
HttpGet httpget = new HttpGet(sURL.substring(0, sURL.lastIndexOf("/")));
// Execute the request
HttpResponse response;
response = httpclient.execute(httpget);
I am really new to JavaScript and Node JS. I have various image URLs that I want to buffer. I have tried the request npm module but want a lower level library for what I want to achieve.
For example:
http://assets.loeildelaphotographie.com/uploads/article_photo/image/128456/_Santu_Mofokeng_-_TOWNSHIPS_Shebeen_Soweto_1987.jpg
I see lots of examples that suggest using the request module or examples that save files to disk. However, I cannot find an HTTP GET request example that simply buffers the image so I can pass to another function. It needs to have an "end" event so I upload the buffered image data with confidence in another step. Is there a sample pattern or "how to" on this someone could provide? Thanks!
This is the native way:
var http=require('http'), imageBuffer;
http.get(
'http://www.kame.net/img/kame-anime-small.gif',
function(res) {
var body=new Buffer(0);
if (res.statusCode!==200) {
return console.error('HTTP '+res.statusCode);
}
res.on('data', function(chunk) {
body=Buffer.concat([body, chunk]);
});
res.on('end', function() {
imageBuffer=body;
});
res.on('error', function(err) {
console.error(err);
});
}
);
// Small webserver serving the image at http://127.0.0.1:4567
http.createServer(function(req, res) {
res.write(imageBuffer || 'Please reload page');
res.end();
}).listen(4567, '127.0.0.1');
and using request (encoding:null for binary response):
var request=require('request'), imageBuffer;
request({
uri: 'http://www.kame.net/img/kame-anime-small.gif',
encoding: null
}, function(err, res, body) {
if (err) {
return console.error(err);
} else if (res.statusCode!==200) {
return console.error('HTTP '+res.statusCode);
}
imageBuffer=body;
});
// Small webserver serving the image at http://127.0.0.1:4567
require('http').createServer(function(req, res) {
res.write(imageBuffer || 'Please reload page');
res.end();
}).listen(4567, '127.0.0.1');
Here's a simple example using the built-in streaming that the http response has:
var http = require('http');
var fs = require('fs');
var file = fs.createWriteStream("test.png");
var request = http.get("some URL to an image", function(response) {
response.pipe(file);
});
I ran this myself and successfully downloaded an image from an external web site and saved it to a file and then loaded the file into the browser to see the same image.
I am trying to upload a file on ftp server using node.js as below-
I am using library- https://github.com/sergi/jsftp
var fs = require('fs');
var Ftp = new JSFtp({
host: "ftp.some.net",
port: 21, // defaults to 21
user: "username", // defaults to "anonymous"
pass: "pass",
debugMode: true // defaults to "#anonymous"
});
Uploading file-
exports.UploadToFtP = function (req, res) {
Ftp.put('public/Test.html', '/Test/index.html', function (err) {
if (!err)
res.send(200);
else
res.send(err);
});
};
I tried uploading file with this method above and it responds me back with 200 OK . But I get no file on server.
Is this has to do something with connection time out of server ? Why this is not writing file on server?
If the debug mode is on, the jsftp instance will emit jsftp_debug events.
In order to react to print all debug events, we would listen to the debug messages like this:
Ftp.on('jsftp_debug', function(eventType, data) {
console.log('DEBUG: ', eventType);
console.log(JSON.stringify(data, null, 2));
});
The raw FTP accepts no parameters and returns the farewell
message from the server. Embed raw FTP function in the FTP GET method
We can use raw FTP commands directly as well. In this case, we use FTP
'QUIT' method, which accepts no parameters and returns the farewell
message from the server
ftp.raw.quit(function(err, res) {
if (err)
return console.error(err);
console.log("FTP session finalized! See you soon!");
});
The file needs to be converted to bytes first.
var fs = require('fs');
fs.readFile('example.txt', function (err, data ) {
Ftp.put(data, 'example.txt', function (err) {
if (!err) {
console.log('OK');
} else {
console.log('ERR');
}
});
});
I used this code to pipe an image to my clients:
req.pipe(fs.createReadStream(__dirname+'/imgen/cached_images/' + link).pipe(res))
it does work but sometimes the image is not transferred completely. But no error is thrown neither on client side (browser) nor on server side (node.js).
My second try was
var img = fs.readFileSync(__dirname+'/imgen/cached_images/' + link);
res.writeHead(200, {
'Content-Type' : 'image/png'
});
res.end(img, 'binary');
but it leads to the same strange behaviour...
Does anyone got a clue for me?
(abstracted code...)
var http = require('http');
http.createServer(function (req, res) {
Imgen.generateNew(
'virtualtwins/www_leonardocampus_de/overview/28',
'www.leonardocampus.de',
'overview',
'28',
null,
[],
[],
function (link) {
fs.stat(__dirname+'/imgen/cached_images/' + link, function(err, file_info) {
if (err) { console.log('err', err); }
console.log('file info', file_info.size);
res.writeHead(200, 'image/png');
fs.createReadStream(__dirname+'/imgen/cached_images/' + link).pipe(res);
});
}
);
}).listen(13337, '127.0.0.1');
Imgen.generateNew just creates a new file, saves it to the disk and gives back the path (link).
I've used this before and all that is needed is that in the function (req, res) {:
var path = ...;
res.writeHead(200, {
'Content-Type' : 'image/png'
});
fs.createReadStream(path).pipe(res);
where path is the computed path to the file to send. .pipe() will transfer the data from the read stream to the write stream and call end when the read stream ends, so there is no need to use res.end() after.
What the problem was: I had 2 different writeStreams! If WriteStream#1 is closed, the second should be closed too and then it all should be piped.
But node is asynchronous so while one has been closed, the other one hasn't. Even the stream.end() was called... well you always should wait for the close event!