Problems in writing binary data with node.js - javascript

I am trying to write the binary body of a request to a file and failing. The file is created on server but I am unable to open it. I am getting 'Fatal error: Not a png' on Ubuntu. Here is how I am making the request:
curl --request POST --data-binary "#abc.png" 192.168.1.38:8080
And here is how I am trying to save it with the file. The first snippet is a middleware for appending all the data together and second one is the request handler:
Middleware:
app.use(function(req, res, next) {
req.rawBody = '';
req.setEncoding('utf-8');
req.on('data', function(chunk) {
req.rawBody += chunk;
});
req.on('end', function() {
next();
});
});
Handler:
exports.save_image = function (req, res) {
fs.writeFile("./1.png", req.rawBody, function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
res.writeHead(200);
res.end('OK\n');
};
Here's some info which might help. In the middleware, if I log the length of rawBody, it looks to be correct. I am really stuck at how to correctly save the file. All I need is a nudge in the right direction.

Here is a complete express app that works. I hit it with curl --data-binary #photo.jpg localhost:9200 and it works fine.
var app = require("express")();
var fs = require("fs");
app.post("/", function (req, res) {
var outStream = fs.createWriteStream("/tmp/upload.jpg");
req.pipe(outStream);
res.send();
});
app.listen(9200);
I would just pipe the request straight to the filesystem. As to your actual problem, my first guess is req.setEncoding('utf-8'); as utf-8 is for text data not binary data.

For fix your code: I'm with #Peter Lyons, that the error is probably the req.setEncoding('utf-8'); line.
I know the following don't ask your question directly, but proposes an alternative to it by using req.files functionality provided by Express.js, which you are using.
if (req.files.photo && req.files.photo.name) {
// Get the temporary location of the file.
var tmp_path = req.files.photo.path;
// set where the file should actually exists - in this case it is in the "images" directory.
var target_path = './public/profile/' + req.files.photo.name;
// Move the file from the temporary location to the intended location.
fs.rename(tmp_path, target_path, function (error) {
if (!error) {
/*
* Remove old photo from fs.
* You can remove the following if you want to.
*/
fs.unlink('./public/profile/' + old_photo, function () {
if (error) {
callback_parallel(error);
}
else {
callback_parallel(null);
}
});
}
else {
callback_parallel(error);
}
});
}

Related

Inexplicable error arrizing in Node.JS server using Express for HTML server and Formidable for multipart from processing

As part of a larger project, I need to upload an image and some text field to my server using a singular HTML POST form. To do this, I elected to use Formidable to process the data. However, when I do this, the program throws an inexplicable error. This error is that when running the line var form = new formidable.IncomingForm();, the application says that formidable.IncomingForm() is not a valid constructor. The reason that this is so mind boggling to me is that this is the way that every piece of documentation for Formidable says to initialize a form. If you could help me out, that would be great.
//Adds new rock to database
var express = require('express');
var router = express.Router();
var fs = require('fs');
const formidable = require('express-formidable');
//const pool = require('../pool_handler');
router.use(formidable());
console.log('fileUpload module loaded');
/* POST page. */
router.post('/', function(req, result, next) {
console.log('inside POST request');
console.log(req.files);
if(!req.files) {
return next('/success');
}
var form = formidable.IncomingForm();
console.log('after the form issue');
form.parse(req, function (err, fields, files) {
var oldpath = files.image.path;
var newpath = '../public/images/' + files.image.name;
fs.rename(oldpath, newpath, function(err) {
if(err) {
throw err;
}
});
});
next('/success');
});
router.all('/', function(req, result) {
result.render('addNewRock');
});
router.all('/success', function(req, result) {
result.send('success!');
})
module.exports = router;
I am apology for my english, I am not write very well. But I expected to help you.
In my case I try to use that library (express-formidable) but it has very problems to compatibility and is very dificult solve them.
My sugestion is use the library "formidable", that provides all funtionality that you need and the page from npm contains all info that for implement.
Check formidable

Request ends before readstream events are handled

I'm attempting to make a nodejs function which reads back data from a file with the following code:
app.post('/DownloadData', function(req, res)
{
req.on('data', function(data) {
if (fs.existsSync('demoDataFile.dat')) {
var rstream = fs.createReadStream('demoDataFile.dat');
var bufs = [];
rstream.on('data', function(chunk) {
bufs.push(chunk);
console.log("data");
});
rstream.on('end', function() {
downbuf = Buffer.concat(bufs);
console.log(downbuf.length);
});
}
});
req.on('end', function() {
console.log("end length: " + downbuf.length);
res.end(downbuf);
});
req.on('error', function(err)
{
console.error(err.stack);
});
});
The problem is, the buffer comes back as empty as the req.on('end' ... is called before any of the rstream.on events ("data" and the length aren't printed in the console until after "end length: " has been printed). Am I handling the events wrong or is there some other issue? Any guidance would be appreciated.
Not sure why you're reading from req, because you're not using the body data at all. Also, because the data event can trigger multiple times, the code you're using to read the file may also get called multiple times, which probably isn't what you want.
Here's what I think you want:
app.post("/DownloadData", function(req, res) {
let stream = fs.createReadStream("demoDataFile.dat");
// Handle error regarding to creating/opening the file stream.
stream.on('error', function(err) {
console.error(err.stack);
res.sendStatus(500);
});
// Read the file data into memory.
let bufs = [];
stream.on("data", function(chunk) {
bufs.push(chunk);
console.log("data");
}).on("end", function() {
let downbuf = Buffer.concat(bufs);
console.log(downbuf.length);
...process the buffer...
res.end(downbuf);
});
});
You have to be aware that this will read the file into memory entirely. If it's a big file, it may require a lot of memory.
Since you don't specify which operations you have to perform on the file data, I can't recommend an alternative, but there are various modules available that can help you process file data in a streaming fashion (i.e. without having to read the file into memory entirely).

Sails.js downloading file

I'm beginner in sails.js and I need to add some logic to existing server.
I need to retrieve image from shared network, so I added ImageController.js and action in it:
module.exports = {
download: function(req, res) {
var run = req.param('run');
var test = req.param('test');
var file = req.param('file');
var path = util.format('\\\\tch-smain\\Share\\AutoTest\\Screenshots\\%s\\%s\\%s.jpeg', run, test, file);
fileAdapter.read(path)
.on('error', function (err){
return res.serverError(err);
})
.pipe(res);
}
};
Then I registered route in config\routes.js:
'/img': 'ImageController.download'
But when I trying to execute GET /img?run=a&test=b&file=c I get 500 error Internal Server Error. Something isn't right here.
But file on \\tch-smain\Share\AutoTest\Screenshots\a\b\c.jpeg exists

Receiving API info after it responds to client (sending undefined instead of api info)

This is my node.js file, what's happening is, a user inputs an id and it gets sent to the server-side, which the server then takes and searches it through an api on a third-party. The problem I am having is, the response from the initial id sent by the user gets res.end before I get the info back from the api, thus getting a big fat undefined or an empty string on the client side. How can I make it so the res.end waits until it receives the data from the api and then sends it to the client? Or am I going at it entirely wrong and shouldn't be doing it this way?
var http = require('http');
var https = require('https');
var url = require('url');`
http.createServer(function (req, res) {
var id = url.parse(req.url,false).query;
var realId = getInfoFromApi(id);
res.setHeader('Access-Control-Allow-Origin', '*');
res.writeHead(200);
res.end(JSON.stringify(realId));
}).listen(3000, '127.0.0.1');
function getInfoFromApi(id) {
var url = "https://random.com/" + id + "?api_key=blabla";
var test = https.get(url, function(res) {
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
var theId = JSON.parse(body)
console.log("Got response: ", theId);
});
}).on('error', function(e) {
console.log("Got error: ", e);
});
return test;
}
Any info into what I should be looking or resources that would help me would be greatly appreciated, thanks in advance for any help.
Also as a bonus question, should I be using POST method instead of GET from server to api-server? Or does POST privacy only matter from client to server? I'm just curious if clever clients would be able to access the server http.requests if I use GET instead of POST or if it even matters at all since it's from the server and not the client.
Oh and another thing, I was debating if I should use https.createServer since I have to use https for https.get, that way I could remove the http module from require and only use https. Is there any big difference in performance or anything if I do that?
Thanks again in advance.
EDIT:
http.createServer(function (req, res) {
var id = url.parse(req.url, false).query;
var link = "https://random.com/" + id + "?api_key=blabla";
var testo = https.get(link, function (rez) {
var body = '';
rez.on('data', function (chunk) {
body += chunk;
});
rez.on('end', function () {
var endRes = body
res.setHeader('Access-Control-Allow-Origin', '*');
res.writeHead(200);
res.end(endRes);
});
}).on('error', function (e) {
console.log("Got error: ", e);
});
}).listen(3000, '127.0.0.1');
I managed to change it like this after I read into callbacks. Thank you for that. It works fine now, so happy, thanks again. My question now though is, does this work flawlessly if dozens or hundreds of people are searching at the same time? Does each client connected to the server get their own seperate function thing that runs at their own pace without interfering with others? Thanks again for all your help.

Piping remote file in ExpressJS

I would like to read a remote image and display it. I can save the file but not getting the code right to display it. Ideally I just want to pass the file right though without processing - not sure if a tmp file step is required or not. This code displays nothing - no errors. I tried res.pipe(response) as well.
var url = 'http://proxy.boxresizer.com/convert?resize=50x50&source=' + filename
var request = http.get(url, function(response) {
var tmp = path.join(require('os').tmpDir(), filename);
var outstream = require('fs').createWriteStream(tmp);
response.pipe(outstream);
response.on('end', function() {
res.set('Content-Type', 'image/jpg');
res.pipe(outstream);
res.end();
});
});
Well I'd still like to know how to make the above work but I solved my issue in one line with the request module!
var url = 'http://proxy.boxresizer.com/convert?resize=50x50&source=' + filename
require('request').get(url).pipe(res); // res being Express response
Since request is deprecated, you can alternatively use node-fetch like so:
app.get("/", (req, res) => {
fetch(actualUrl).then(actual => {
actual.headers.forEach((v, n) => res.setHeader(n, v));
actual.body.pipe(res);
});
});
This is what seems to work for me, using the standard fetch() included in Node 18, which is a bit different than node-fetch, see github.com/node-fetch/node-fetch#bodybody:
app.get("/", (req, res) => {
fetch(url).then((response) => {
body.pipeTo(
new WritableStream({
start() {
response.headers.forEach((v, n) => res.setHeader(n, v));
},
write(chunk) {
res.write(chunk);
},
close() {
res.end();
},
})
);
});
});

Categories