How to upload file to s3 through nodejs express from Angularjs - javascript

I am facing problem to upload file to s3 by nodejs express, and angularjs.
I am using angular directive to send file to node express and from node to s3.
Angular directive :
(function() {
'use strict';
angular.module('app').directive('ngFileModel', ['$parse', function ($parse) {
return {
restrict: 'A',
link: function (scope, element, attrs) {
var model = $parse(attrs.ngFileModel);
var isMultiple = attrs.multiple;
var modelSetter = model.assign;
element.bind('change', function () {
var values = [];
angular.forEach(element[0].files, function (item) {
var value = {
// File Name
name: item.name,
//File Size
size: item.size,
//File URL to view
url: URL.createObjectURL(item),
// File Input Value
_file: item
};
values.push(value);
});
scope.$apply(function () {
if (isMultiple) {
modelSetter(scope, values);
} else {
modelSetter(scope, values[0]);
}
});
});
}
};
}]);
})();
Html code
<input type="file" id="upme" ng-file-model="files" multiple style="display:none;" />
<div ng-if="files.length>0" ng-init="vm.uploadFile()"></div>
Server side:
exports.upload = function(req, res){
var images = req.body.images;
//res.send(images);
// console.dir(images)
images.forEach(function(file){
// console.dir(file);
S3.upFile('testbucket',file.name, file.url, function(err, data){
if(err){
console.log(err)
}else{
console.dir(data);
}
});
});
Problem,
The upload function works and I get something has been uploaded on s3 bucket, the file name appears in bucket; but it seems that is not actual size of the file and I can not open. when I click on the file url it say to download the file, after I download the file, it does not open. I think there may have any problem to parse the file in the node server before uploadin to s3. But I can't identify which solution should be there.
I also get one error in the console.
TypeError: path must be a string or Buffer
at TypeError (native)
at Object.fs.open (fs.js:625:11)
at ReadStream.open (fs.js:1708:6)
at new ReadStream (fs.js:1695:10)
at Object.fs.createReadStream (fs.js:1643:10)
I have made s3 file upload function as module in separate file. Here is module function of file upload
// uploading file or object into bucket
exports.upFile = function(bucket_name, key, file, next){
var params = {Bucket: bucket_name, Key: key, Body: fs.createReadStream(file), ACL:"public-read"};
s3.upload(params, function(err, data) {
next(err, data);
});
};
I appreciate any help from experts.

You are not giving a file as an argument to the upload function but an object URL. To correct your implementation, you have to make some changes in angular. Firstly, you should send files as multipart form data from angular. You can achieve this by using:
var form = new formData();
angular.forEach(element[0].files, function (item) {
form.append('file', item);
});
Send this form data to your node server using http request. You can define the route and http method using express in nodejs. On angularJS the request should look something like this:
$http.post('/test_route', form, {
withCredentials: false,
headers: {
'Content-Type': undefined
},
trnasformRequest: angular.identity
}).success(function(data) {
// do something with data from server
});
At node server, when you receive the request you have to extract files from form data. First define a route and method using express:
var multiparty = require('multiparty');
app.post('test_route', function(req, res) {
var form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
var files_to_uplaod = files.file;
files_to_upload.forEach(function(file) {
read_file = fs.readFileSync(file.path);
var params = {Bucket: bucket_name, Key: file.originalFilename, Body: read_file, ACL:"public-read"};
s3.upload(params, function(err, data) {
next(err, data);
// once the file is uploaded you can remove the file from local disk which is saved whn multipart data arrives.
fs.unlink(file.path, function(err) {
if (err) {console.log(err);}
});
});
});
}
});
To parse the multiform on node server, use the multiparty module. More information can be found here: https://www.npmjs.com/package/multiparty

Related

How to parse an object sent from react frontend in express.js?

So in my react front-end, I am using the 'react-drop-to-upload' module to allow the user to drag a file and upload. I followed the example on the npm module page and created a handler called handleDrop. The code looks like:
handleDrop(files) {
var data = new FormData();
alert((files[0]) instanceof File);
files.forEach((file, index) => {
data.append('file' + index, file);
});
fetch('/file_upload', {
method: 'POST',
body: data
});
}
At my express backend, I have the following code:
app.post('/file_upload', function(req , res){
var body = '';
req.on('data', function (data) {
body += data;
});
var post = "";
req.on('end', function () {
//post = qs.parse(body);
console.log(body);
// this won't create a buffer for me
//var fileBuffer = new Buffer(body);
//console.log(fileBuffer.toString('ascii'));
//pdfText(body, function(err, chunks) {
//console.log(chunks);
//});
});
//console.log(typeof post);
});
If I drop a txt file and do a console log on the body, it would give me:
------WebKitFormBoundaryqlp9eomS0BxhFJkQ
Content-Disposition: form-data; name="file0"; filename="lec16.txt"
Content-Type: text/plain
The content of my data!
------WebKitFormBoundaryqlp9eomS0BxhFJkQ--
I am trying to use the pdfText module which takes in a buffer or a pathname to the pdf file, extract text from it into an array of text 'chunks' . I want to convert the body object into a buffer using var fileBuffer = new Buffer(body); but that won't work. Can someone help me with this? Thanks!
You need a parser for multi-part data. You can look into multer regarding that.
Example code for you,
app.post('/file_upload', function(req , res){
var storage = multer.diskStorage({
destination: tmpUploadsPath
});
var upload = multer({
storage: storage
}).any();
upload(req, res, function(err) {
if (err) {
console.log(err);
return res.end('Error');
} else {
console.log(req.body);
req.files.forEach(function(item) {
// console.log(item);
// do something with the item,
const data = fs.readFileSync(item.path);
console.log(data);
});
res.end('File uploaded');
}
});
});
To understand the example code in depth, head here. Remember, you will get the file data as a buffer and not as actual data.

Upload HTML file to AWS S3 and then serving it instead of downloading

I am downloading a web page and then I am writing to a file named thisArticle.html, using the below code.
var file = fs.createWriteStream("thisArticle.html");
var request = http.get(req.body.url, response => response.pipe(file) );
After that I am trying to read file and uploading to S3, here is the code that I wrote:
fs.readFile('thisArticle.html', 'utf8', function(err, html){
if (err) {
console.log(err + "");
throw err;
}
var pathToSave = 'articles/ ' + req.body.title +'.html';
var s3bucket = new AWS.S3({ params: { Bucket: 'all-articles' } });
s3bucket.createBucket(function () {
var params = {
Key: pathToSave,
Body: html,
ACL: 'public-read'
};
s3bucket.upload(params, function (err, data) {
fs.unlink("thisArticle.html", function (err) {
console.error(err);
});
if (err) {
console.log('ERROR MSG: ', err);
res.status(500).send(err);
} else {
console.log(data.Location);
}
// ..., more code below
});
});
});
Now, I am facing two issues:
The file is uploading but with 0 bytes (empty)
When I am trying to upload manually via S3 dashboard is uploaded successfully but when I tried to load the URL in the browser it downloads the HTML file instead of serving it.
Any guides if I am missing something?
Set the ContentType to "text/html".
s3 = boto3.client("s3")
s3.put_object(
Bucket=s3_bucket,
Key=s3_key,
Body=html_string,
CacheControl="max-age=0,no-cache,no-store,must-revalidate",
ContentType="text/html",
ACL="public-read"
)
It looks like your upload function is deleting the file with fs.unlink before it gets uploaded. That's why its going up as 0 Bytes.
Also, to make the bucket serve the HTML, you need to turn on webserving as described in the AWS S3 Docs. http://docs.aws.amazon.com/AmazonS3/latest/UG/ConfiguringBucketWebsite.html

How to configure API endpoint to receive file from ember-uploader component

I'm trying to figure out how to use ember-uploader, I have the following component (like the one in the README)
export default EmberUploader.FileField.extend({
filesDidChange: function(files) {
const uploader = EmberUploader.Uploader.create({
url: (ENV.APP.API_HOST || '') + '/api/v1/images/',
});
console.log(uploader);
if (!Ember.isEmpty(files)) {
var photo = files[0];
console.log(photo);
uploader.upload(photo)
.then(data => {
// Handle success
console.log("Success uploading file");
console.log(data);
}, error => {
// Handle failure
console.log("ERROR uploading file");
console.log(error);
});
}
}
});
The express API endpoint is listening for a POST request.
var saveImage = (req, res, next) => {
let body = req.body;
res.json({
data: body
});
};
But the body is empty after the request is done. I really don't know how to implement the API endpoint in order to get the file, I tried to see the req object and it doesn't contains the file.
Debugging it, After select a file using the component I get the following info in the console.
Seems that the API endpoint works because I get the following output:
POST /api/v1/images/ 200 27.284 ms - 11
But I can't get the file.
SOLUTION
In Express 4, req.files is no longer available on the req object by
default. To access uploaded files on the req.files object, use a
multipart-handling middleware like busboy, multer, formidable,
multiparty, connect-multiparty, or pez.
Following this blog, the code below was added to the API and the ember-uploader code posted in the question worked as expected.
import formidable from 'formidable';
var saveImage = (req, res, next) => {
var form = new formidable.IncomingForm();
form.parse(req);
form.on('fileBegin', function (name, file){
file.path = __dirname + '/tmp/' + file.name;
});
form.on('file', function (name, file){
res.json({
data: file.name
});
});
};

How we can upload multiple blobs to azure using nodejs

I am trying to upload 6 images to azure blob from single endpoint that I get from a registration form. The code shows how to upload a single blob but I need to upload multiple blobs at the same time. How can I do it?
Here is my code:
app.post('/upload', function (req, res) {
//var dirname = require('path').dirname(__dirname);
//var dirname1 = require('path').dirname(dirname);
var filename = req.files[0].filename;
var path = req.files[0].path;
var type = req.files[0].mimetype;
var options = {
contentType: type,
metadata: { fileName: filename }
}
blobSvc.createBlockBlobFromLocalFile(containerName, filename, path, options, function (error, result, response) {
if (error != null) {
console.log('Azure Full Error: ', error)
} else {
console.log(result);
console.log(response);
var user = new User();
user.name = req.body.name;
user.picture = 'https://yourblob.blob.core.windows.net/profile/' + result.name;
user.save(function (err) {
if (err) {
return res.json(err.message);
}
else {
return res.json({ User: user });
}
});
}
});
});
As Azure Storage for node sdk is based on RESTful APIs, and we implement upload functionality via Put Blob.
There is no such RESTful API or function in SDK for us to directly upload multiple independent blobs to Azure at once time.
You can implement this functionality for yourself by uploading files in loop.

S3 file upload file object using node js

I'm using Sailsjs 0.12.1, node.js 4.2.6
I want to upload the file From front-end(angular.js) through an API and from backend I want to upload the file to the AWS S3 bucket.
front-end I'm sending the file to the api. In backend I'm getting the file with the name but while uploading the file to S3 I'm getting the error
Cannot determine length of [object Object]
I google the error and found the many links but my bad luck.
Back-end
uploadPersonAvtar: function(req, res) {
var zlib = require('zlib');
var file = req.file('image');
var mime = require('mime');
data = {
Bucket: 'bucket',
Key : 'my_key',
Key: file.name,
Body: file,
ContentType: mime.lookup(file.name)
};
// Upload the stream
var s3obj = new AWS.S3(S3options);
s3obj.upload(data, function(err, data) {
if (err) console.log("An error occurred", err);
console.log("Uploaded the file at", data);
})
}
Is my approach is correct
If yes what I'm doing wrong.
I want to know how to use the file object to upload the file.
I can create a read stream but I don't have the file path When I'm creating the file object I'm getting the error: path must be a string
You could look at following example about streaming data : Amazon S3: Uploading an arbitrarily sized stream (upload)
var fs = require('fs');
var zlib = require('zlib');
var body = fs.createReadStream('bigfile').pipe(zlib.createGzip());
var s3obj = new AWS.S3({params: {Bucket: 'myBucket', Key: 'myKey'}});
s3obj.upload({Body: body}).
on('httpUploadProgress', function(evt) { console.log(evt); }).
send(function(err, data) { console.log(err, data) });

Categories