How to parse an object sent from react frontend in express.js? - javascript

So in my react front-end, I am using the 'react-drop-to-upload' module to allow the user to drag a file and upload. I followed the example on the npm module page and created a handler called handleDrop. The code looks like:
handleDrop(files) {
var data = new FormData();
alert((files[0]) instanceof File);
files.forEach((file, index) => {
data.append('file' + index, file);
});
fetch('/file_upload', {
method: 'POST',
body: data
});
}
At my express backend, I have the following code:
app.post('/file_upload', function(req , res){
var body = '';
req.on('data', function (data) {
body += data;
});
var post = "";
req.on('end', function () {
//post = qs.parse(body);
console.log(body);
// this won't create a buffer for me
//var fileBuffer = new Buffer(body);
//console.log(fileBuffer.toString('ascii'));
//pdfText(body, function(err, chunks) {
//console.log(chunks);
//});
});
//console.log(typeof post);
});
If I drop a txt file and do a console log on the body, it would give me:
------WebKitFormBoundaryqlp9eomS0BxhFJkQ
Content-Disposition: form-data; name="file0"; filename="lec16.txt"
Content-Type: text/plain
The content of my data!
------WebKitFormBoundaryqlp9eomS0BxhFJkQ--
I am trying to use the pdfText module which takes in a buffer or a pathname to the pdf file, extract text from it into an array of text 'chunks' . I want to convert the body object into a buffer using var fileBuffer = new Buffer(body); but that won't work. Can someone help me with this? Thanks!

You need a parser for multi-part data. You can look into multer regarding that.
Example code for you,
app.post('/file_upload', function(req , res){
var storage = multer.diskStorage({
destination: tmpUploadsPath
});
var upload = multer({
storage: storage
}).any();
upload(req, res, function(err) {
if (err) {
console.log(err);
return res.end('Error');
} else {
console.log(req.body);
req.files.forEach(function(item) {
// console.log(item);
// do something with the item,
const data = fs.readFileSync(item.path);
console.log(data);
});
res.end('File uploaded');
}
});
});
To understand the example code in depth, head here. Remember, you will get the file data as a buffer and not as actual data.

Related

ExpressJS - Generate a CSV file on the server side and enable download from the client side

I am using ExpressJS to develop a web app.
There is a Create button on the page. What I want to achieve is, when the button is clicked, it sends a Post/Get request to the server side, and the server side then triggers a process to generate a CSV file and send it back to the client side for download.
I am thinking of using json2csv.
Client side:
$.ajax({
type: "POST",
url: "/incidents/createTable",
success: function() {
// HOW TO GET THE RETURNED DATA TO A DOWNLOADABLE FILE?
}
});
Server side and incidents router (the code snippet that follows was copied from the json2csv official npmjs page):
const { AsyncParser } = require('json2csv');
// THIS FUNCTION SHOULD GENERATE A CSV FILE IN MEMORY
router.post("/createTable", async function(req, res, next) {
console.log("flag 1"); // For flagging
const fields = ['field1', 'field2', 'field3'];
const opts = { fields };
const transformOpts = { highWaterMark: 8192 };
const asyncParser = new AsyncParser(opts, transformOpts);
console.log("flag 2"); // For flagging
let csv = '';
asyncParser.processor
.on('data', chunk => (csv += chunk.toString()))
.on('end', () => res.send(csv))
.on('error', err => console.error(err));
});
When I ran the web app and clicked the Create button, the server hung there, it passed "flag 2" and never went pass the asyncParser.processor. On the client side, the POST request was also hung there and with no status code.
Finally worked out a solution after doing a lot digging.
Server side:
var stream = require('stream');
//...
router.post("/createTable", async function(req, res, next) {
var fileContents = Buffer.from(JSON.stringify({
sampleTime: '1450632410296',
sampleData: '1234567890'
}));
var readStream = new stream.PassThrough();
readStream.end(fileContents);
res.set('Content-disposition', 'attachment; filename=' + "download.csv");
res.set('Content-Type', 'text/csv');
readStream.pipe(res);
});
Client side:
$.ajax({
type: "POST",
url: "/incidents/createTable",
success: function(result) {
var blob=new Blob([result], {type: 'text/csv'});
var link=document.createElement('a');
link.style = "display: none";
document.body.appendChild(link);
var url = window.URL.createObjectURL(blob);
link.href = url;
console.log(url);
link.download="download.csv";
link.click();
window.URL.revokeObjectURL(url);
}
});
You're missing one part, which is providing the data. Parser won't do anything until you do that. This is also hunging up a request, because res.send will never be reached.
Right from the docs:
asyncParser.input.push(data); // This data might come from an HTTP request, etc.
asyncParser.input.push(null); // Sending `null` to a stream signal that no more data is expected and ends it.
Here is complete code that will produce
"field1","field2","field3"
1,2,3
on GET /createTable
const { AsyncParser } = require('json2csv');
const express = require('express');
const app = express();
app.get("/createTable", async function(req, res, next) {
console.log("flag 1"); // For flagging
const fields = ['field1', 'field2', 'field3'];
const opts = { fields };
const transformOpts = { highWaterMark: 8192 };
const asyncParser = new AsyncParser(opts, transformOpts);
console.log("flag 2"); // For flagging
let csv = '';
asyncParser.processor
.on('data', chunk => (csv += chunk.toString()))
.on('end', () => res.send(csv))
.on('error', err => console.error(err));
asyncParser.input.push('{ "field1": 1, "field2": 2, "field3": 3 }');
asyncParser.input.push(null); // Sending `null` to a stream signal that no more data is expected and ends it.
});
app.listen(3000);

Upload image using Node js and electron. (convert from c# code)

I'm converting an application from c# to electron and i have some problems when i try to make a request to upload an image on a server (not my server).
For c# i used RestSharp library and all worked fine.
private void UploadImage(string id)
{
RestClient client = new RestClient("https://www.website.com")
{
CookieContainer = new CookieContainer()
};
string path = #"D:\Downloads\image.jpg";
var request = new RestRequest("/upload?id=" + id, Method.POST);
request.AddFile("myfile", File.ReadAllBytes(path), Path.GetFileName(path), "image/jpeg");
request.AddHeader("Content-type", "application/json");
request.AddHeader("Accept", "application/json");
request.RequestFormat = DataFormat.Json;
client.Execute(request);
}
How can i convert this code in Node js? The only thing i could found was code that uploaded to their own server and that does not work for me.
This is what i tried in Node js
var fs = require('fs');
var request = require('request');
fs.createReadStream("D:\Downloads\image.jpg").pipe(request.post("https://www.website.com/upload?id=" + productId, function (error, response, body) {
if (error) {
console.log(error);
} else {
console.log(response);
}
}));
Using the code above i get status code 200 and the body response is telling me that no image was select. So the request is working, but sending the image doesn't.
This is what i did in order to solve the problem. Maybe it will help somebody else too.
var fs = require('fs');
var request = require('request');
var req = request.post(uploadURL, function (err, resp, body) {
if (err) {
console.log('Error!');
} else {
console.log('URL: ' + body);
}
});
var form = req.form();
form.append('myfile', fs.createReadStream("path\to\image.jpg"), {
filename: "image.jpg",
contentType: 'image/jpeg'
});
I've been trying to use the same technique using Electron to upload files to my localhost test server, but no luck. My code returns as successful in the console, but no file is ever uploaded. Is this something you came across, or is there anything you may be able to see I'm doing differently?
const fs = require('fs');
const request = require('request');
var uploadURL = 'http://localhost:80/sandbox/img';
var req = request.post(uploadURL, function (err, resp, body) {
if (err) {
console.log(err);
} else {
console.log(body);
}
});
var form = req.form();
form.append('upload', fs.createReadStream("C:/nodejs/dave/assets/img/brand_logos/logo.jpg"), {
filename: "logo.jpg",
contentType: 'image/jpeg'
});
Below is the response I get, which I'm assuming is expected...
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
<html><head>
<title>301 Moved Permanently</title>
</head><body>
<h1>Moved Permanently</h1>
<p>The document has moved here.</p>
<hr>
<address>Apache/2.4.27 (Win64) PHP/7.0.23 Server at localhost Port 80</address>
</body></html>

How to configure API endpoint to receive file from ember-uploader component

I'm trying to figure out how to use ember-uploader, I have the following component (like the one in the README)
export default EmberUploader.FileField.extend({
filesDidChange: function(files) {
const uploader = EmberUploader.Uploader.create({
url: (ENV.APP.API_HOST || '') + '/api/v1/images/',
});
console.log(uploader);
if (!Ember.isEmpty(files)) {
var photo = files[0];
console.log(photo);
uploader.upload(photo)
.then(data => {
// Handle success
console.log("Success uploading file");
console.log(data);
}, error => {
// Handle failure
console.log("ERROR uploading file");
console.log(error);
});
}
}
});
The express API endpoint is listening for a POST request.
var saveImage = (req, res, next) => {
let body = req.body;
res.json({
data: body
});
};
But the body is empty after the request is done. I really don't know how to implement the API endpoint in order to get the file, I tried to see the req object and it doesn't contains the file.
Debugging it, After select a file using the component I get the following info in the console.
Seems that the API endpoint works because I get the following output:
POST /api/v1/images/ 200 27.284 ms - 11
But I can't get the file.
SOLUTION
In Express 4, req.files is no longer available on the req object by
default. To access uploaded files on the req.files object, use a
multipart-handling middleware like busboy, multer, formidable,
multiparty, connect-multiparty, or pez.
Following this blog, the code below was added to the API and the ember-uploader code posted in the question worked as expected.
import formidable from 'formidable';
var saveImage = (req, res, next) => {
var form = new formidable.IncomingForm();
form.parse(req);
form.on('fileBegin', function (name, file){
file.path = __dirname + '/tmp/' + file.name;
});
form.on('file', function (name, file){
res.json({
data: file.name
});
});
};

How to upload file to s3 through nodejs express from Angularjs

I am facing problem to upload file to s3 by nodejs express, and angularjs.
I am using angular directive to send file to node express and from node to s3.
Angular directive :
(function() {
'use strict';
angular.module('app').directive('ngFileModel', ['$parse', function ($parse) {
return {
restrict: 'A',
link: function (scope, element, attrs) {
var model = $parse(attrs.ngFileModel);
var isMultiple = attrs.multiple;
var modelSetter = model.assign;
element.bind('change', function () {
var values = [];
angular.forEach(element[0].files, function (item) {
var value = {
// File Name
name: item.name,
//File Size
size: item.size,
//File URL to view
url: URL.createObjectURL(item),
// File Input Value
_file: item
};
values.push(value);
});
scope.$apply(function () {
if (isMultiple) {
modelSetter(scope, values);
} else {
modelSetter(scope, values[0]);
}
});
});
}
};
}]);
})();
Html code
<input type="file" id="upme" ng-file-model="files" multiple style="display:none;" />
<div ng-if="files.length>0" ng-init="vm.uploadFile()"></div>
Server side:
exports.upload = function(req, res){
var images = req.body.images;
//res.send(images);
// console.dir(images)
images.forEach(function(file){
// console.dir(file);
S3.upFile('testbucket',file.name, file.url, function(err, data){
if(err){
console.log(err)
}else{
console.dir(data);
}
});
});
Problem,
The upload function works and I get something has been uploaded on s3 bucket, the file name appears in bucket; but it seems that is not actual size of the file and I can not open. when I click on the file url it say to download the file, after I download the file, it does not open. I think there may have any problem to parse the file in the node server before uploadin to s3. But I can't identify which solution should be there.
I also get one error in the console.
TypeError: path must be a string or Buffer
at TypeError (native)
at Object.fs.open (fs.js:625:11)
at ReadStream.open (fs.js:1708:6)
at new ReadStream (fs.js:1695:10)
at Object.fs.createReadStream (fs.js:1643:10)
I have made s3 file upload function as module in separate file. Here is module function of file upload
// uploading file or object into bucket
exports.upFile = function(bucket_name, key, file, next){
var params = {Bucket: bucket_name, Key: key, Body: fs.createReadStream(file), ACL:"public-read"};
s3.upload(params, function(err, data) {
next(err, data);
});
};
I appreciate any help from experts.
You are not giving a file as an argument to the upload function but an object URL. To correct your implementation, you have to make some changes in angular. Firstly, you should send files as multipart form data from angular. You can achieve this by using:
var form = new formData();
angular.forEach(element[0].files, function (item) {
form.append('file', item);
});
Send this form data to your node server using http request. You can define the route and http method using express in nodejs. On angularJS the request should look something like this:
$http.post('/test_route', form, {
withCredentials: false,
headers: {
'Content-Type': undefined
},
trnasformRequest: angular.identity
}).success(function(data) {
// do something with data from server
});
At node server, when you receive the request you have to extract files from form data. First define a route and method using express:
var multiparty = require('multiparty');
app.post('test_route', function(req, res) {
var form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
var files_to_uplaod = files.file;
files_to_upload.forEach(function(file) {
read_file = fs.readFileSync(file.path);
var params = {Bucket: bucket_name, Key: file.originalFilename, Body: read_file, ACL:"public-read"};
s3.upload(params, function(err, data) {
next(err, data);
// once the file is uploaded you can remove the file from local disk which is saved whn multipart data arrives.
fs.unlink(file.path, function(err) {
if (err) {console.log(err);}
});
});
});
}
});
To parse the multiform on node server, use the multiparty module. More information can be found here: https://www.npmjs.com/package/multiparty

How to serve (uploaded) images using Meteor

I have this Meteor application in which it is possible to upload images. The uploading parts seem to work. I store the images in .uploads. Now I would like to make these images accessable by the following URL
http://localhost:3000/uploads
After a bit of googling I was able to create the following server side code:
var fs = Meteor.require('fs');
if (Meteor.isServer) {
WebApp.connectHandlers.stack.splice(0, 0, {
route: '/uploads',
handle: function (req, res, next) {
var path = process.env.PWD + '/.' + req.originalUrl.substr(1);
fs.readFile(path, {encoding: 'binary'}, function (err,data) {
if (err) {
throw err;
}
res.writeHead(200, {
'Content-Type': 'image/png'
});
//res.setEncoding("binary"); // this method does not exist
res.write(data);
res.end();
});
}
});
}
This code works, the path constructed is correct and in the browser I receive the 200 code, except it cannot display the image. Something is wrong with the data the browser receives. I checked the image on disk which is fine. So the code above must do something wrong with the data. Any suggestions what that might be?
Here is the code I found after googling (and works for me) a few days ago when I wanted to do what you need to do
files are in .screenshots directory mapped to :
http://localhost:3000/screenshots
code :
//directly serve screenshot files from /.screenshots dir
var fs = Npm.require('fs');
WebApp.connectHandlers.use(function(req, res, next) {
var re = /^\/screenshots\/(.*)$/.exec(req.url);
if (re !== null) { // Only handle URLs that start with /screenshots/*
var filePath = process.env.PWD + '/.screenshots/' + re[1];
var data = fs.readFileSync(filePath, data);
res.writeHead(200, {
'Content-Type': 'image'
});
res.write(data);
res.end();
} else { // Other urls will have default behaviors
next();
}
});

Categories