I am trying to handle a POST request on my Node Express server to deal with multipart form uploads, in my case the user is uploading images.
I want to pipe the upload to another server via my Express app which is currently setup to use body parser, which I also see does not support multipart bodes and instead recommends using some other libraries.
I have seen multiparty but I am unsure how to use this with my client side application.
In my client side code I am posting a FormData object like so:
function create(data, name) {
var formData = new FormData();
formData.append('file', data, name);
return this.parentBase.one('photos').withHttpConfig({transformRequest: angular.identity}).customPOST(formData, undefined, undefined, {'Content-Type': undefined});
}
Note: I am using the Restangular library for AngularJS as documented here
So from what I understand looking at the multiparty docs, I have to handle the form upload events and act upon it further once the form has finished uploading.
The thing is, I was hoping I could just pipe the upload directly to another server. Beforehand my client side app was making direct calls to this other server, but I am now trying to get everything routed through Express, is this possible, or do I have to use something like multiparty?
The request documentation gives an example of using formData, but I am unsure how this would work with the multiparty examples I have seen. For example once the upload completes in Express using mutliparty, do I then have to construct another formData object to then make a further request with, or would I have to pipe each part to the other server?
I'm confused, please can someone help clear this up for me?
Thanks
EDIT
OK, I have taken a look at multer following #yarons comments and this seems to be the kind of thing I want to be using, I have attempted to use this with my express router setup as per below:
routes.js
var express = require('express'),
router = express.Router(),
customers = require('./customers.controller.js'),
multer = require('multer'),
upload = multer();
router.post('/customers/:customerId/photos/', upload.single('file'), customers.createPhoto);
controller.js
module.exports.createPhoto = function(req, res) {
console.log(req.file);
var options = prepareCustomersAPIHeaders(req);
options.formData = req.file;
request(options).pipe(res);
};
Logging out the req.file property in the above controller I see this:
{ fieldname: 'file',
originalname: '4da2e703044932e33b8ceec711c35582.jpg',
encoding: '7bit',
mimetype: 'image/png',
buffer: <Buffer 89 50 4e 47 0d 0a 1a 0a 00 00 00 0d 49 48 44 52 00 00 00 fa 00
00 00 fa 08 06 00 00 00 88 ec 5a 3d 00 00 20 00 49 44 41 54 78 5e ac bd f9 8f e
6 e9 7a ... >,
size: 105868 }
Which is what I am posting through from the client side code using:
var formData = new FormData();
formData.append('file', data, name);
return this.parentBase.one('photos').withHttpConfig({transformRequest: angular.identity}).customPOST(formData, undefined, undefined, {'Content-Type': undefined});
Is what I have tried sensible? Only it doesn't work, I get an error back from the server I'm trying post to. Beforehand where I was making this post request directly to the server it all worked fine, so I must have something wrong in my Express\Multer setup
EDIT 2
Ok, so after more hunting around I came across this article using multiparty which I have manager to get working in my setup like so:
var request = require('request'),
multiparty = require('multiparty'),
FormData = require('form-data');
module.exports.createPhoto = function(req, res) {
//console.log(req.file);
var options = prepareCustomersAPIHeaders(req),
form = new multiparty.Form();
options.headers['Transfer-Encoding'] = 'chunked';
form.on('part', function(part){
if(part.filename) {
var form = new FormData(), r;
form.append(part.name, part, {filename: part.filename, contentType: part['content-type']});
r = request(options, function(err, response, body){
res.status(response.statusCode).send(body);
});
r._form = form
}
});
form.on('error', function(error){
console.log(error);
});
form.parse(req);
};
This is now uploading the files for me as expected to my other server, whilst this solution works, I dont like the line:
r._form = form
Seems to be assigning a private form variable to the request object, plus I can't see anything that is documented in this way on multiparty pages
Can anyone offer any comments on this possible solution?
We use something like the following:
CLIENT
//HTML
<input type="file" ng-file-select uploader="info.uploadPath" />
//DIRECTIVES
// It is attached to <input type="file" /> element
.directive('ngFileSelect', function() {
return {
link: function($scope, $element) {
$element.bind('change', function() {
$scope.$emit('file:add', this.files ? this.files : this);
});
}
};
})
//OTHER
var uploadPath = '/api/things/' + $stateParams.thingId + '/add_photo'
var uploadInfo = {
headers: {
'Authorization': authToken
},
form: {
title: scope.info.name
}
}
//SERVICE:
$rootScope.$on('file:add', function(event, items) {
this.addToQueue(items);
}.bind(this));
...
addToQueue: function(items) {
var length = this.queue.length;
angular.forEach(items.length ? items : [items], function(item) {
var isValid = !this.filters.length ? true : !!this.filters.filter(function(filter) {
return filter.apply(this, [item]);
}, this).length;
if (isValid) {
item = new Item({
url: this.url,
alias: this.alias,
removeAfterUpload: this.removeAfterUpload,
uploader: this,
file: item
});
this.queue.push(item);
}
}, this);
this.uploadAll();
},
getNotUploadedItems: function() {
return this.queue.filter(function(item) {
return !item.isUploaded;
});
},
/**
* Upload a item from the queue
* #param {Item|Number} value
*/
uploadItem: function(value, uploadInfo) {
if (this.isUploading) {
return;
}
var index = angular.isObject(value) ? this.getIndexOfItem(value) : value;
var item = this.queue[index];
var transport = item.file._form ? '_iframeTransport' : '_xhrTransport';
this.isUploading = true;
this[transport](item, uploadInfo);
},
uploadAll: function(uploadInfo) {
var item = this.getNotUploadedItems()[0];
this._uploadNext = !!item;
this._uploadNext && this.uploadItem(item, uploadInfo);
},
_xhrTransport: function(item, uploadInfo) {
var xhr = new XMLHttpRequest();
var form = new FormData();
var that = this;
form.append(item.alias, item.file);
angular.forEach(uploadInfo.form, function(value, name) {
form.append(name, value);
});
xhr.upload.addEventListener('progress', function(event) {
var progress = event.lengthComputable ? event.loaded * 100 / event.total : 0;
that._scope.$emit('in:progress', item, Math.round(progress));
}, false);
xhr.addEventListener('load', function() {
xhr.status === 200 && that._scope.$emit('in:success', xhr, item);
xhr.status !== 200 && that._scope.$emit('in:error', xhr, item);
that._scope.$emit('in:complete', xhr, item);
}, false);
xhr.addEventListener('error', function() {
that._scope.$emit('in:error', xhr, item);
that._scope.$emit('in:complete', xhr, item);
}, false);
xhr.addEventListener('abort', function() {
that._scope.$emit('in:complete', xhr, item);
}, false);
this._scope.$emit('beforeupload', item);
xhr.open('POST', item.url, true);
angular.forEach(uploadInfo.headers, function(value, name) {
xhr.setRequestHeader(name, value);
});
xhr.send(form);
},
SERVER
//things.router
app.route('/api/things/:thingId/add_photo')
.post(things.uploadPhoto);
//things.controller
exports.uploadPhoto = function(req, res) {
var formidable = require('formidable');
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files) {
var data = files.qqfile;
//actual file is at data.path
fs.createReadStream(data.path).pipe(request.put(uploadUrl));
}
}
Related
So, first time posting, I usually just find the answer I need by looking through similar questions but this time I'm stumped.
First off, I'm self-taught and about on par with an entry-level developer at absolute best (for reference my highest score on CodeSignal for javascript is 725).
Here is my problem:
I'm working on an SSG eCommerce website using the Nuxt.js framework. The products are digital and so they need to be fulfilled by providing a time-limited download link when a customer makes a purchase. I have the product files stored in a private amazon s3 bucket. I also have a Netlify Serverless Function that when called with a GET request, generates and returns a pre-signed URL for the file (at the moment there is only one product but ideally it should generate pre-signed URLs based on a filename sent as a JSON event body key string since more products are planned in the near future, But I can figure that out once the whole thing is working).
The website is set up to generate dynamic routes based on the user's order number so they can view their previous orders(/pages/account/orders/_id.vue). I have placed a download button, nested in an element on this page so that each order has a button to download the files. The idea is that button press calls a function I defined in the methods object. The function makes an XMLHttpRequest to the endpoint URL of the netlify function. Netlify function returns pre-signed URL to function which returns pre-signed URL to the href property so that file can be downloaded by the user.
But no matter what I try, it fails to download the file. When the page loads it successfully calls the Netlify function and I get a response code 200 but the href property remains blank. Am I going about this the wrong way? There is clearly something I'm not understanding correctly, Any input is greatly appreciated.
Here is my code....
The download button:
<a
:download=<<MY_PRODUCT_NAME>>
:href="getmyurl()"
>
<BaseButton
v-if="order.status === 'complete'"
fit="auto"
appearance="light"
label="Download"
/>
</a>
function that button calls:
methods: {
getmyurl() {
let myurl = "";
const funcurl = <<MY_NETLIFY_FUNCTION_URL>>;
let xhr = new XMLHttpRequest();
xhr.open('GET', funcurl);
xhr.send();
xhr.onload = function() {
if (xhr.status != 200) {
alert(`Error ${xhr.status}: ${xhr.statusText}`);
} else {
myurl = xhr.response.Geturl
};
};
return myurl
},
Netlify function:
require( "dotenv" ).config();
const AWS = require('aws-sdk');
let s3 = new AWS.S3({
accessKeyId: process.env.MY_AWS_ACCESS_KEY,
secretAccessKey: process.env.MY_AWS_SECRET_KEY,
region: process.env.MY_AWS_REGION,
signatureVersion: 'v4',
});
exports.handler = function( event, context, callback ) {
var headers = {
"Access-Control-Allow-Origin" : "*",
"Access-Control-Allow-Headers": "Content-Type"
};
if ( event.httpMethod === "OPTIONS" ) {
callback(
null,
{
statusCode: 200,
headers: headers,
body: JSON.stringify( "OK" )
}
);
return;
}
try {
var resourceKey = process.env.MY_FILE_NAME
var getParams = {
Bucket: process.env.MY_S3_BUCKET,
Key: resourceKey,
Expires: ( 60 * 60 ),
ResponseCacheControl: "max-age=604800"
};
var getUrl = s3.getSignedUrl( "getObject", getParams );
var response = {
statusCode: 200,
headers: headers,
body: JSON.stringify({
getUrl: getUrl
})
};
} catch ( error ) {
console.error( error );
var response = {
statusCode: 400,
headers: headers,
body: JSON.stringify({
message: "Request could not be processed."
})
};
}
callback( null, response );
}
I have a project where it uses Filepond to upload files and I need it to load file from server.
I already follow the docs but It doesn't work. The Filepond gives error Error during load 400 and it even doesn't send the request to load the file from server
This is my javascript
let pond = FilePond.create(value, {
files: [
{
// the server file reference
source: 'e958818e-92de-4953-960a-d8157467b766',
// set type to local to indicate an already uploaded file
options: {
type: 'local'
}
}
]
});
FilePond.setOptions({
labelFileProcessingError: (error) => {
return error.body;
},
server: {
headers: {
'#tokenSet.HeaderName' : '#tokenSet.RequestToken'
},
url: window.location.origin,
process: (fieldName, file, metadata, load, error, progress, abort) => {
// We ignore the metadata property and only send the file
fieldName = "File";
const formData = new FormData();
formData.append(fieldName, file, file.name);
const request = new XMLHttpRequest();
request.open('POST', '/UploadFileTemp/Process');
request.setRequestHeader('#tokenSet.HeaderName', '#tokenSet.RequestToken');
request.upload.onprogress = (e) => {
progress(e.lengthComputable, e.loaded, e.total);
};
request.onload = function () {
if (request.status >= 200 && request.status < 300) {
load(request.responseText);
}
else {
let errorMessageFromServer = request.responseText;
error('oh no');
}
};
request.send(formData);
},
revert: "/UploadFileTemp/revert/",
load: "/UploadFileTemp/load"
}
})
This is my controller
public async Task<IActionResult> Load(string p_fileId)
{
//Code to get the files
//Return the file
Response.Headers.Add("Content-Disposition", cd.ToString());
Response.Headers.Add("X-Content-Type-Options", "nosniff");
return PhysicalFile(filePath, "text/plain");
}
NB
I already test my controller via postman and it works. I also check the content-disposition header
I'd advise to first set all the options and then set the files property.
You're setting the files, and then you're telling FilePond where to find them, it's probably already trying to load them but doesn't have an endpoint (yet).
Restructuring the code to look like this should do the trick.
let pond = FilePond.create(value, {
server: {
headers: {
'#tokenSet.HeaderName': '#tokenSet.RequestToken',
},
url: window.location.origin,
process: (fieldName, file, metadata, load, error, progress, abort) => {
// your processing method
},
revert: '/UploadFileTemp/revert',
load: '/UploadFileTemp/load',
},
files: [
{
// the server file reference
source: 'e958818e-92de-4953-960a-d8157467b766',
// set type to local to indicate an already uploaded file
options: {
type: 'local',
},
},
],
});
I have an issue when I'm using my POST ajax function.
I'm running a simple app created by express-generator using node, express, mongodb, mongoose and jade. After I created a module for posting a new object to the database, I have noticed that the POST / DELETE requests are done multiple times. This is happening both on 'on-click' modes and if I change the function to load automatically on load.
Please note: the function is only called once by the user - there are no logs in the browser. It looks like it runs every 2 minutes, and I have absolutely no idea why.
Any help would be much appreciated.
Javascript code:
$('#btn5').on('click', saveFiveDayForecast);
saveFiveDayForecast = function() {
var random = {
city: 'One'
}
$.ajax({
type: 'post',
data: JSON.stringify(random),
url: '/fiveDay/',
contentType: 'application/json',
dataType: 'JSON'
})
}
Routing file:
var express = require('express');
var router = express.Router();
// DB schema and model setup ----------------------------------------------
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
// create 16 day schema
var fiveDaySchema = new Schema({
city : String
}, { collection: 'fiveDayModel' });
// create 16 day model for later manipulation in routing files
var fiveDayModel = mongoose.model('fiveDayModel', fiveDaySchema);
// END DB schema and model setup -------------------------------------------
/* POST */
router.post('/', function(req, res) {
var time = new Date();
var fiveDayWeatherToSave = new fiveDayModel( req.body );
console.log('post in the routing file was fired at: ');
console.log(time);
fiveDayWeatherToSave.save(function (err, data) {
if (err) console.log(err);
else console.log('Saved ', data );
});
});
Logs from node:
The node logs are interesting: when the data is saved first time there is no POST line in it, yet the data is saved anyway.
post in the routing file was fired at:
Wed May 13 2015 14:40:06 GMT+0100 (GMT Daylight Time)
Saved { __v: 0, city: 'One', _id: 55535436f0ee99bc1d790220 }
POST /fiveDay/ - - ms - -
post in the routing file was fired at:
Wed May 13 2015 14:42:06 GMT+0100 (GMT Daylight Time)
Saved { __v: 0, city: 'One', _id: 555354aef0ee99bc1d790221 }
POST /fiveDay/ - - ms - -
Browsers will retry a request if it fails due to a timeout. Since you never return a response, this will always happen. You should respond with something like res.end() or res.send("success!").
I'm trying to upload files to my Amazon S3 Bucket. S3 and amazon is set up.
This is the error message from Amazon:
Conflicting query string parameters: acl, policy
Policy and signature is encoded, with Crypto.js for Node.js
var crypto=Npm.require("crypto");
I'm trying to build POST request with Meteor HTTP.post method. This could be wrong as well.
var BucketName="mybucket";
var AWSAccessKeyId="MY_ACCES_KEY";
var AWSSecretKey="MY_SECRET_KEY";
//create policy
var POLICY_JSON={
"expiration": "2009-01-01T00:00:00Z",
"conditions": [
{"bucket": BucketName},
["starts-with", "$key", "uploads/"],
{"acl": 'public-read'},
["starts-with", "$Content-Type", ""],
["content-length-range", 0, 1048576],
]
}
var policyBase64=encodePolicy(POLICY_JSON);
//create signature
var SIGNATURE = encodeSignature(policyBase64,AWSSecretKey);
console.log('signature: ', SIGNATURE);
This is the POST request I'm using with Meteor:
//Send data----------
var options={
"params":{
"key":file.name,
'AWSAccessKeyId':AWSAccessKeyId,
'acl':'public-read',
'policy':policyBase64,
'signature':SIGNATURE,
'Content-Type':file.type,
'file':file,
"enctype":"multipart/form-data",
}
}
HTTP.call('POST','https://'+BucketName+'.s3.amazonaws.com/',options,function(error,result){
if(error){
console.log("and HTTP ERROR:",error);
}else{
console.log("result:",result);
}
});
and her I'm encoding the policy and the signature:
encodePolicy=function(jsonPolicy){
// stringify the policy, store it in a NodeJS Buffer object
var buffer=new Buffer(JSON.stringify(jsonPolicy));
// convert it to base64
var policy=buffer.toString("base64");
// replace "/" and "+" so that it is URL-safe.
return policy.replace(/\//g,"_").replace(/\+/g,"-");
}
encodeSignature=function(policy,secret){
var hmac=crypto.createHmac("sha256",secret);
hmac.update(policy);
return hmac.digest("hex");
}
A can't figure out whats going on. There might already be a problem at the POST method, or the encryption, because I don't know these methods too well. If someone could point me to the right direction, to encode, or send POST request to AmazonS3 properly, it could help a lot.
(I don't like to use filepicker.io, because I don't want to force the client to sign up there as well.)
Thanks in advance!!!
Direct uploads to S3 you can use the slingshot package:
meteor add edgee:slingshot
On the server side declare your directive:
Slingshot.createDirective("myFileUploads", Slingshot.S3Storage, {
bucket: "mybucket",
allowedFileTypes: ["image/png", "image/jpeg", "image/gif"],
acl: "public-read",
authorize: function () {
//You can add user restrictions here
return true;
},
key: function (file) {
return file.name;
}
});
This directive will generate policy and signature automatically.
And them just upload it like this:
var uploader = new Slingshot.Upload("myFileUploads");
uploader.send(document.getElementById('input').files[0], function (error, url) {
Meteor.users.update(Meteor.userId(), {$push: {"profile.files": url}});
});
Why don't you use the aws-sdk package? It packs all the needed methods for you. For example, here's the simple function for adding a file to bucket:
s3.putObject({
Bucket: ...,
ACL: ...,
Key: ...,
Metadata: ...,
ContentType: ...,
Body: ...,
}, function(err, data) {
...
});
check out the S3 meteor package. The readme has a very comprehensive walkthrough of how to get started
First thing is to add the package for s3 file upload.
For Installation: ADD (AWS SDK Smart Package)
$ meteor add peerlibrary: aws-sdk
1.Create Directive upload.js and paste this code.
angular.module('techno')
.directive("fileupload", [function () {
return {
scope: {
fileupload: "="
},
link: function(scope,element, attributes){
$('.button-collapse').sideNav();
element.bind("change", function (event) {
scope.$apply(function () {
scope.fileupload = event.target.files[0];
});
})
}};
}]);
2.Get Access key and paste it in your fileUpload.js file.
AWS.config.update({
accessKeyId: ' AKIAJ2TLJBEUO6IJLKMN ',
secretAccessKey: lqGE9o4WkovRi0hCFPToG0B6w9Okg/hUfpVr6K6g'
});
AWS.config.region = 'us-east-1';
let bucket = new AWS.S3();
3.Now put this upload code in your directive fileUpload.js
vm.upload = (Obj) =>{
vm.loadingButton = true;
let name = Obj.name;
let params = {
Bucket: 'technodheeraj',
Key: name,
ContentType: 'application/pdf',
Body: Obj,
ServerSideEncryption: 'AES256'
};
bucket.putObject(params, (err, data) => {
if (err) {
console.log('---err------->', err);
}
else {
vm.fileObject = {
userId: Meteor.userId(),
eventId: id,
fileName: name,
fileSize: fileObj.size,
};
vm.call("saveFile", vm.fileObject, (error, result) => {
if (!error){
console.log('File saved successfully');
}
})
}
})
};
4.Now in “saveFile” method paste this code
saveFile: function(file){
if(file){
return Files.insert(file);
}
};
5.In HTML paste this code
<input type="file" name="file" fileupload="file">
<button type="button" class="btn btn-info " ng-click="vm.upload(file)"> Upload File</button>
I am trying to combine the examples here, here to write a vows test for my node.js / express app that:
Creates a new user object
Checks the response was sane
Uses the returned _id to test looking up the newly created user
Again uses the _id to test updating the user
Item 1 and 2 work fine, but there is something wrong with my sub-context 'GET /users/:id'. It errors and I cannot figure out why. Tried Googling and using the debugger, but I still can't see what it is, I am probably just overlooking something obvious.
···✗ Errored » 3 honored ∙ 1 errored
Can anyone tell me why the 4th vow errors?
Here's my vows code:
var vows = require('vows')
, assert = require('assert')
, tobi = require('tobi')
var suite = vows.describe('Users API')
, now = new Date().getTime()
, newUser = { name: now + '_test_user', email: now + '#test.com' }
, browser = tobi.createBrowser(3000, 'localhost')
, defaultHeaders = { 'Content-Type': 'application/json' }
function assertStatus(code) {
return function (res, $) {
res.should.have.status(code)
}
}
var client = {
get: function(path) {
return function() {
browser.get(path, { headers: defaultHeaders }, this.callback)
}
},
post: function(path, data) {
return function() {
browser.post(path, { body: JSON.stringify(data), headers: defaultHeaders }, this.callback)
}
}
}
suite.addBatch({
'GET /users': {
topic: client.get('/users'),
'should respond with a 200 ok': assertStatus(200)
},
'POST /users': {
topic: client.post('/users', newUser),
'should respond with a 200 ok': assertStatus(200),
'should return the new user': function(res, $){
assert.isNotNull(res.body._id)
assert.isNotNull(res.body.created_at)
assert.isTrue(res.body._id.length > 0)
assert.equal(newUser.name, res.body.name)
assert.equal(newUser.email, res.body.email)
},
'GET /users/:id': { // Sub-context of POST /users
topic: function(res) { return client.get('/users/' + res.body._id) },
'should respond with a 200 ok': assertStatus(200)
}
}
})
suite.export(module)
EDIT
I tried simplifying the code as follows to help see if this.callback was the problem, but the error is still there:
'GET /users/:id': { // Sub-context of POST /users
topic: function(res) {
console.log('About to request /users/' + res.body._id)
browser.get('/users/' + res.body._id, { headers: defaultHeaders }, this.callback)
},
'should respond with a 200 ok': assertStatus(200)
}
How are you populating res for the fourth tes?? It wouldn't be visible outside the line
'should return the new user'
Try creating the id variable outside the addBatch call, and set it in the third test. then call
client.get('/users/' + id)
EDIT:
Better yet, put it back into newUser in the third test:
'should return the new user': function(res, $){
newUser.id = res.body._id
....
and then do:
client.get('/users/' + newUser.id)