Post Data, received by Express server with multer middleware, is truncated - javascript

I'm sending a 1.5MB string via a POST request to my Express server but when I look at the data received it's only 786 KB long.
The string I'm sending is a base64 image string. I've also tried to change the limits of multer to no avail.
CLIENT:
function upload(file) {
var form = new FormData(),
xhr = new XMLHttpRequest();
form.append('filename', "imageName.jpg");
form.append('imageData', file);
xhr.open('post', 'http://server/imgdata', true);
xhr.send(form);
}
SERVER:
app.use(multer({
dest: './uploads/',
// WILL RENAME THE RECEIVED FILE
rename: function(fieldname, filename) {
return filename + Date.now();
},
// UPLOAD HAS STARTED
onFileUploadStart: function(file) {
console.log(file.originalname + ' is starting ...')
},
// FILE HAS BEEN RECEIVED AND SAVED
onFileUploadComplete: function(file) {
console.log(file.fieldname + ' uploaded to ' + file.path)
done = true;
},
onFieldsLimit: function() {
console.log('Crossed fields limit!')
}
}));
app.post('/imgdata', function(req, res) {
// THIS IS RETURNS ONLY A PART OF THE DATA
res.json(req.body.image2);
var data = req.body.image2.replace(/^data:image\/\w+;base64,/, "");
var buf = new Buffer(data, 'base64');
// THE IMAGE IS SAVED BUT ONLY THE 786KB OF IT EVERY TIME REGARDLESS
// OF THE SIZE OF DATA SEND
fs.writeFile('image.jpg', buf, function(err) {
if (err) throw err;
console.log('It\'s saved!');
});
})

Changing the limits of the multer middleware didn't fix the issue,
limits: {
fieldNameSize : 100000,
fieldSize : 5242880
},
but as per #adeneo's suggestion I gave bodyParser an other go and after changing the limit of urlencoded like so
app.use(bodyParser.urlencoded({
extended: true,
limit: 100000000
}));
I received all the data on the server and successfully saved the image.

Related

How can I wait a Python shell to execute before serving file

I've an express server route which receives a xml file, then parses and return it as json.
When a user sends a file, it saves in a directory './upload', parses it with a Python script then output json in './json-output', which is served.
When I first upload a file, the response comes empty. But when I do the same upload steps (there is a json already created from last upload on './json-output' dir), it serves the json. It seems some asynchronous issue but I couldn't fix it.
app.post('/upload', function(req, res) {
upload(req, res, async function(err) {
if (err) {
res.json({ error_code: 1, err_desc: err });
return;
}
if (!req.file) {
res.json({ error_code: 1, err_desc: 'No file passed' });
return;
}
let fileName = req.file.originalname;
const options = {
args: [ fileName ]
};
const parserPath = path.join(__dirname + '/parser/parser.py');
const readFile = promisify(PythonShell.run);
await readFile(parserPath, options);
fileName = fileName.split('.')[0];
res.sendFile(path.join(__dirname + `/json-output/${fileName}.json`));
});
});
I'm running it inside a docker images
This a quite a "Dirty fix" in my eyes but you could do a while loop EG:
fileName = fileName.split('.')[0];
while (!fs.existsSync(path.join(__dirname + `/json-output/${fileName}.json`)){
console.log('File does not exist!')
}
//Becareful you should delete the file once the res.send is done
res.sendFile(path.join(__dirname + `/json-output/${fileName}.json`));
Decided to read the python-shell docs here an idea:
https://www.npmjs.com/package/python-shell#exchanging-data-between-node-and-python
So, in theory, you can start a new
let pyshell = new PythonShell(path.join(__dirname + '/parser/parser.py'),options);
pyshell.end(function (err,code,signal) {
if (err) throw err;
fileName = fileName.split('.')[0];
res.sendFile(path.join(__dirname + `/json-output/${fileName}.json`));
});

ExpressJS - Generate a CSV file on the server side and enable download from the client side

I am using ExpressJS to develop a web app.
There is a Create button on the page. What I want to achieve is, when the button is clicked, it sends a Post/Get request to the server side, and the server side then triggers a process to generate a CSV file and send it back to the client side for download.
I am thinking of using json2csv.
Client side:
$.ajax({
type: "POST",
url: "/incidents/createTable",
success: function() {
// HOW TO GET THE RETURNED DATA TO A DOWNLOADABLE FILE?
}
});
Server side and incidents router (the code snippet that follows was copied from the json2csv official npmjs page):
const { AsyncParser } = require('json2csv');
// THIS FUNCTION SHOULD GENERATE A CSV FILE IN MEMORY
router.post("/createTable", async function(req, res, next) {
console.log("flag 1"); // For flagging
const fields = ['field1', 'field2', 'field3'];
const opts = { fields };
const transformOpts = { highWaterMark: 8192 };
const asyncParser = new AsyncParser(opts, transformOpts);
console.log("flag 2"); // For flagging
let csv = '';
asyncParser.processor
.on('data', chunk => (csv += chunk.toString()))
.on('end', () => res.send(csv))
.on('error', err => console.error(err));
});
When I ran the web app and clicked the Create button, the server hung there, it passed "flag 2" and never went pass the asyncParser.processor. On the client side, the POST request was also hung there and with no status code.
Finally worked out a solution after doing a lot digging.
Server side:
var stream = require('stream');
//...
router.post("/createTable", async function(req, res, next) {
var fileContents = Buffer.from(JSON.stringify({
sampleTime: '1450632410296',
sampleData: '1234567890'
}));
var readStream = new stream.PassThrough();
readStream.end(fileContents);
res.set('Content-disposition', 'attachment; filename=' + "download.csv");
res.set('Content-Type', 'text/csv');
readStream.pipe(res);
});
Client side:
$.ajax({
type: "POST",
url: "/incidents/createTable",
success: function(result) {
var blob=new Blob([result], {type: 'text/csv'});
var link=document.createElement('a');
link.style = "display: none";
document.body.appendChild(link);
var url = window.URL.createObjectURL(blob);
link.href = url;
console.log(url);
link.download="download.csv";
link.click();
window.URL.revokeObjectURL(url);
}
});
You're missing one part, which is providing the data. Parser won't do anything until you do that. This is also hunging up a request, because res.send will never be reached.
Right from the docs:
asyncParser.input.push(data); // This data might come from an HTTP request, etc.
asyncParser.input.push(null); // Sending `null` to a stream signal that no more data is expected and ends it.
Here is complete code that will produce
"field1","field2","field3"
1,2,3
on GET /createTable
const { AsyncParser } = require('json2csv');
const express = require('express');
const app = express();
app.get("/createTable", async function(req, res, next) {
console.log("flag 1"); // For flagging
const fields = ['field1', 'field2', 'field3'];
const opts = { fields };
const transformOpts = { highWaterMark: 8192 };
const asyncParser = new AsyncParser(opts, transformOpts);
console.log("flag 2"); // For flagging
let csv = '';
asyncParser.processor
.on('data', chunk => (csv += chunk.toString()))
.on('end', () => res.send(csv))
.on('error', err => console.error(err));
asyncParser.input.push('{ "field1": 1, "field2": 2, "field3": 3 }');
asyncParser.input.push(null); // Sending `null` to a stream signal that no more data is expected and ends it.
});
app.listen(3000);

How to parse an object sent from react frontend in express.js?

So in my react front-end, I am using the 'react-drop-to-upload' module to allow the user to drag a file and upload. I followed the example on the npm module page and created a handler called handleDrop. The code looks like:
handleDrop(files) {
var data = new FormData();
alert((files[0]) instanceof File);
files.forEach((file, index) => {
data.append('file' + index, file);
});
fetch('/file_upload', {
method: 'POST',
body: data
});
}
At my express backend, I have the following code:
app.post('/file_upload', function(req , res){
var body = '';
req.on('data', function (data) {
body += data;
});
var post = "";
req.on('end', function () {
//post = qs.parse(body);
console.log(body);
// this won't create a buffer for me
//var fileBuffer = new Buffer(body);
//console.log(fileBuffer.toString('ascii'));
//pdfText(body, function(err, chunks) {
//console.log(chunks);
//});
});
//console.log(typeof post);
});
If I drop a txt file and do a console log on the body, it would give me:
------WebKitFormBoundaryqlp9eomS0BxhFJkQ
Content-Disposition: form-data; name="file0"; filename="lec16.txt"
Content-Type: text/plain
The content of my data!
------WebKitFormBoundaryqlp9eomS0BxhFJkQ--
I am trying to use the pdfText module which takes in a buffer or a pathname to the pdf file, extract text from it into an array of text 'chunks' . I want to convert the body object into a buffer using var fileBuffer = new Buffer(body); but that won't work. Can someone help me with this? Thanks!
You need a parser for multi-part data. You can look into multer regarding that.
Example code for you,
app.post('/file_upload', function(req , res){
var storage = multer.diskStorage({
destination: tmpUploadsPath
});
var upload = multer({
storage: storage
}).any();
upload(req, res, function(err) {
if (err) {
console.log(err);
return res.end('Error');
} else {
console.log(req.body);
req.files.forEach(function(item) {
// console.log(item);
// do something with the item,
const data = fs.readFileSync(item.path);
console.log(data);
});
res.end('File uploaded');
}
});
});
To understand the example code in depth, head here. Remember, you will get the file data as a buffer and not as actual data.

How to prompt save dialog to download file using angularjs?

I have a directory in server that contains file now i am sending file name from client and getting the file from server till that part is working good, below is the response from server now once i receive response i want to prompt for user to download that file for that i am trying to create blob using Angularjs but its not prompting for user to save the file. Any idea ?
ctrl.js
$scope.downloadFile = function(message){
DitFactory.getFile(message).then(function(response){
console.log('r',response);
var blob = new Blob([ response ], { type : 'text/plain' });
$scope.url = (window.URL || window.webkitURL).createObjectURL( blob );
console.log($scope.url);
});
};
serverResponse.json
{"level":"info","message":"another-2fdas message"}
server.js
app.get('/file', function (req, res) {
var dir = './ditLogs';
var root = path.resolve('./ditLogs');
var fileName = req.query.file_name;
var data;
fs.readdir(dir, function(err, items) {
items.forEach(function(file){
if(fileName === file){
data = file;
res.setHeader('Content-Disposition', 'attachment; filename=' + data);
res.sendFile(data, {root: root});
}
});
});
});
If your are using express, you can try below code in server.js file:
var file = 'path to your file';
res.download(file,function(err){
if(!err){
console.log('prompted successfully');
return;
}
});

Node.js script works once, then fails subsequently

I need a Node.js script that does the following:
1 - Triggers when an image is added to a specified S3 bucket. 2
- Creates a thumbnail of that image (360x203 pixels). 3 - Saves a copy of that thumbnail inside of a separate S3 folder. 4 -
Uploads the thumbnail to a specified FTP server, SIX (6) times using a
"FILENAME-X"naming convention.
The code works just as expected at first. The sample event pulls the image. Creates a thumnail. Saves it to the other S3 bucket. Then uploads it to the FTP server.
The problem: It works for the test file HappyFace.jpg once, but then each subsequent test fails. Also, I tried doing it with a different file, but was unsuccessful.
Also: If I could get some help writing a loop to name the different files that get uploaded, it would be very much appreciated. I usually code in PHP, so it'd probably take me longer than I hope to write.
Note: I removed my FTP credentials for privacy.
Problem Code Snippet:
function upload(contentType, data, next) {
// Upload test file to FTP server
c.append(data, 'testing.jpg', function(err) {
console.log("CONNECTION SUCCESS!");
if (err) throw err;
c.end();
});
// Connect to ftp
c.connect({
host: "",
port: 21, // defaults to 21
user: "", // defaults to "anonymous"
password: "" // defaults to "#anonymous"
});
// S3 Bucket Upload Function Goes Here
}
Full Code:
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var util = require('util');
var Client = require('ftp');
var fs = require('fs');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
// get reference to FTP client
var c = new Client();
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
// Get source bucket
var srcBucket = event.Records[0].s3.bucket.name;
// Get source object key
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var url = 'http://' + srcBucket + ".s3.amazonaws.com/" + srcKey;
// Set destination bucket
var dstBucket = srcBucket + "-thumbs";
// Set destination object key
var dstKey = "resized-" + srcKey;
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error('unable to infer image type for key ' + srcKey);
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
console.log('skipping non-image ' + srcKey);
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Transform the image buffer in memory.
this.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Upload test file to FTP server
c.append(data, 'testing.jpg', function(err) {
console.log("CONNECTION SUCCESS!");
if (err) throw err;
c.end();
});
// Connect to ftp
c.connect({
host: "",
port: 21, // defaults to 21
user: "", // defaults to "anonymous"
password: "" // defaults to "#anonymous"
});
// Stream the thumb image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
// context.done();
}
);
};
The logs:
START RequestId: edc808c1-712b-11e5-aa8a-ed7c188ee86c Version: $LATEST
2015-10-12T21:55:20.481Z edc808c1-712b-11e5-aa8a-ed7c188ee86c Reading options from event: { Records: [ { eventVersion: '2.0', eventTime: '1970-01-01T00:00:00.000Z', requestParameters: { sourceIPAddress: '127.0.0.1' }, s3: { configurationId: 'testConfigRule', object: { eTag: '0123456789abcdef0123456789abcdef', sequencer: '0A1B2C3D4E5F678901', key: 'HappyFace.jpg', size: 1024 }, bucket: { arn: 'arn:aws:s3:::images', name: 'images', ownerIdentity: { principalId: 'EXAMPLE' } }, s3SchemaVersion: '1.0' }, responseElements: { 'x-amz-id-2': 'EXAMPLE123/5678abcdefghijklambdaisawesome/mnopqrstuvwxyzABCDEFGH', 'x-amz-request-id': 'EXAMPLE123456789' }, awsRegion: 'us-east-1', eventName: 'ObjectCreated:Put', userIdentity: { principalId: 'EXAMPLE' }, eventSource: 'aws:s3' } ] }
2015-10-12T21:55:22.411Z edc808c1-712b-11e5-aa8a-ed7c188ee86c Successfully resized images/HappyFace.jpg and uploaded to images-thumbs/resized-HappyFace.jpg
2015-10-12T21:55:23.432Z edc808c1-712b-11e5-aa8a-ed7c188ee86c CONNECTION SUCCESS!
END RequestId: edc808c1-712b-11e5-aa8a-ed7c188ee86c
REPORT RequestId: edc808c1-712b-11e5-aa8a-ed7c188ee86c Duration: 3003.76 ms Billed Duration: 3000 ms Memory Size: 128 MB Max Memory Used: 43 MB
Task timed out after 3.00 seconds
START RequestId: d347e7e3-712d-11e5-bfdf-05baa36d50fd Version: $LATEST
2015-10-12T22:08:55.910Z d347e7e3-712d-11e5-bfdf-05baa36d50fd Reading options from event: { Records: [ { eventVersion: '2.0', eventTime: '1970-01-01T00:00:00.000Z', requestParameters: { sourceIPAddress: '127.0.0.1' }, s3: { configurationId: 'testConfigRule', object: { eTag: '0123456789abcdef0123456789abcdef', sequencer: '0A1B2C3D4E5F678901', key: 'HappyFace.jpg', size: 1024 }, bucket: { arn: 'arn:aws:s3:::images', name: 'images', ownerIdentity: { principalId: 'EXAMPLE' } }, s3SchemaVersion: '1.0' }, responseElements: { 'x-amz-id-2': 'EXAMPLE123/5678abcdefghijklambdaisawesome/mnopqrstuvwxyzABCDEFGH', 'x-amz-request-id': 'EXAMPLE123456789' }, awsRegion: 'us-east-1', eventName: 'ObjectCreated:Put', userIdentity: { principalId: 'EXAMPLE' }, eventSource: 'aws:s3' } ] }
END RequestId: d347e7e3-712d-11e5-bfdf-05baa36d50fd
REPORT RequestId: d347e7e3-712d-11e5-bfdf-05baa36d50fd Duration: 3003.33 ms Billed Duration: 3000 ms Memory Size: 128 MB Max Memory Used: 17 MB
Task timed out after 3.00 seconds
The line:
var c = new Client();
is only going to get executed once; all calls to your handler() function will use the same instance of your FTP client.
If there could be multiple overlapping calls to handler()—and in an async world it sure seems likely—then the calls to the FTP client, including c.connect(…) and c.end() will be invoked multiple times against the same FTP client, which may already have an upload in progress, leading to a scenario like this:
Call to handler(). Upload begins.
Call to handler(). Second upload begins.
First upload completes and calls c.end().
Second upload is canceled.
The solution is to create a new FTP client instance for each upload or, if your FTP server has a problem with that (limits the number of client connections), you’ll need to serialize your uploads somehow. One way to do that, since you’re using the async library, would be to use async.queue.

Categories