Uploading Assets to s3 using Gulp - javascript

I am currently trying to upload the assets of a website to amazon s3 using aws-sdk and gulp, but for now I just achieved uploading single files using this code:
gulp.task('publish', function() {
var AWS = require('aws-sdk'),
fs = require('fs');
AWS.config.accessKeyId = 'access_id';
AWS.config.secretAccessKey = 'secret_key';
AWS.config.region = 'eu-central-1';
var fileStream = fs.createReadStream('folder/filename');
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'bucket_name',
Key: 'assets/filename',
Body: fileStream,
ACL:'public-read'
}, function (err) {
if (err) { throw err; }
else { console.log("Upload successfull"); }
});
});
});
Since I am neither a node.js nor a JS dev, I have no Idea on how to upload all my assets in the folder assets of the S3.
Idealy, applying the action I use to upload one file, but for each file would be neat. How would this be doable?

Found the solution to my problem. With this code, I finaly managed to upload all my assets to my bucket with the right ACL's. Hope this can help people not to spend as much time as I did on such a stupid problem.
/*
* Dependencies
*/
var gulp = require('gulp');
var AWS = require('aws-sdk');
var fs = require('fs');
var walk = require('walk');
/*
* Declaration of global variables
*/
var isPaused = false;
/*
* Bucket access informations
*/
AWS.config.accessKeyId = 'access_keyid'
AWS.config.secretAccessKey = 'secret_access_key'
AWS.config.region = 'region';
/*
* Publishing function: uses a stream to push the files on the AWS Bucket
*/
function publishit(filename) {
var file = filename.substring('./'.length);
var key = file.substring('src/'.length);
var fileStream = fs.createReadStream(file);
isPaused = true;
// Check if there is an error on the file
fileStream.on('error', function (err) {
if (err) { throw err; }
});
// Action to do on opening of the file
fileStream.on('open', function () {
var s3 = new AWS.S3();
// Uploading the stream to the bucket
s3.putObject({
Bucket: 'bucket_name',
Key: key,
Body: fileStream,
ACL:'public-read'
}, function (err) {
// Show the error if there is any
if (err) { throw err; }
// If everything went successfully, print which file is being uploaded
else { console.log("Uploading asset "+ file); }
// Closing the stream to avoid leaks and socket timeouts
fileStream.close();
// Changing the status of 'isPaused' to false to continue uploading the other assets
isPaused = false;
});
});
}
gulp.task('assets', function() {
var files = [];
// Walker options (first arg is the folder you want to upload)
var walker = walk.walk('./assets', { followLinks: false });
walker.on('file', function(root, stat, next) {
// Add this file to the list of files
files.push(root + '/' + stat.name);
next();
});
// Action after every file has been added to 'files'
walker.on('end', function() {
for (var filename in files){
// Publish every file added to 'files'
publishit(files[filename]);
// Wait for one push on the server to be done before calling the next one
function waitForIt(){
if (isPaused) {
setTimeout(function(){waitForIt()},100);
}
};
};
});
});

Related

Write file to directory then zip directory

I am trying to write a file to a directory templates then stream a zip with the content that was written. However, the when the zip file is returned it says Failed - Network Error which is due to the fs.writeFile in the controller. If i remove the WriteFile stream then the zipping works fine. My question is how do i first write the file then run the zip. There seems to be something synchronous happening with the archiving and file writing of typeArrayString.
Controller:
exports.download_one_feed = function(req, res, next) {
Feed.findOne({'name': req.params.id})
.exec(function(err, dbfeeds){
if(err){
res.send('get error has occured in routes/feeds.js');
} else {
const feedArray = dbfeeds.feed.data;
// write file
// get from db collection & write file to download
const typeArrayString = JSON.stringify(feedArray);
let type = 'b'; // test only
fs.writeFile(path.join(appDir, 'templates/' + type + '/template.json'), typeArrayString, (err) => {
if (err) throw err;
console.log('Saved!');
})
archiverService.FileArchiver(feedArray, res);
}
})
};
Archive Service
const archiver = require('archiver')
const zip = archiver('zip')
const path = require('path')
const fs = require('fs')
const appDir = path.dirname(require.main.filename)
exports.FileArchiver = function (feedArray, res) {
// const app = this.app;
const uploadsDir = path.join(appDir, '/uploads/');
const templatesDir = path.join(appDir, '/templates/');
const extensions = [".jpg", ".png", ".svg"];
let imageArray = [];
const feedArrayObject = JSON.parse(feedArrayString);
feedArrayObject.forEach(function(x){iterate(x)}); // grab image names from object
imageArray = uniq_fast(imageArray); // remove duplicates
// zip images
for (let i = 0; i < imageArray.length; i++) {
console.log(imageArray[i])
const filePath = path.join(uploadsDir, imageArray[i]);
zip.append(fs.createReadStream(filePath), { name: 'images/'+imageArray[i] });
}
res.attachment('download.zip');
zip.pipe(res);
// zip template directory
console.log(templatesDir)
zip.directory(templatesDir, false);
zip.on('error', (err) => { throw err; });
zip.finalize();
return this;
}
Instead of writing the file then zipping the directory, i used zip.append to override the old file in the directory.

Upload HTML file to AWS S3 and then serving it instead of downloading

I am downloading a web page and then I am writing to a file named thisArticle.html, using the below code.
var file = fs.createWriteStream("thisArticle.html");
var request = http.get(req.body.url, response => response.pipe(file) );
After that I am trying to read file and uploading to S3, here is the code that I wrote:
fs.readFile('thisArticle.html', 'utf8', function(err, html){
if (err) {
console.log(err + "");
throw err;
}
var pathToSave = 'articles/ ' + req.body.title +'.html';
var s3bucket = new AWS.S3({ params: { Bucket: 'all-articles' } });
s3bucket.createBucket(function () {
var params = {
Key: pathToSave,
Body: html,
ACL: 'public-read'
};
s3bucket.upload(params, function (err, data) {
fs.unlink("thisArticle.html", function (err) {
console.error(err);
});
if (err) {
console.log('ERROR MSG: ', err);
res.status(500).send(err);
} else {
console.log(data.Location);
}
// ..., more code below
});
});
});
Now, I am facing two issues:
The file is uploading but with 0 bytes (empty)
When I am trying to upload manually via S3 dashboard is uploaded successfully but when I tried to load the URL in the browser it downloads the HTML file instead of serving it.
Any guides if I am missing something?
Set the ContentType to "text/html".
s3 = boto3.client("s3")
s3.put_object(
Bucket=s3_bucket,
Key=s3_key,
Body=html_string,
CacheControl="max-age=0,no-cache,no-store,must-revalidate",
ContentType="text/html",
ACL="public-read"
)
It looks like your upload function is deleting the file with fs.unlink before it gets uploaded. That's why its going up as 0 Bytes.
Also, to make the bucket serve the HTML, you need to turn on webserving as described in the AWS S3 Docs. http://docs.aws.amazon.com/AmazonS3/latest/UG/ConfiguringBucketWebsite.html

How to upload my files to another server using multer in nodejs?

var storage = multer.diskStorage({
destination: function (req, file, callback) {
callback(null, '/var/www/html');
},
filename: function (req, file, callback) {
//console.log(file);return;
if (file.mimetype == 'audio/mp3' || file.mimetype == 'audio/wav') {
var w = file.originalname;
var f = x.substr(0, x.lastIndexOf('.'));
callback(null, Date.now()+'-'+w);
}else{
var result = new sResultSh.commandResult("Failed","404");
onComplete(result);
}
},
});
var upload = multer({ storage: storage}).any();
upload(req, res, function (err) {
if(err){
var resultErr =[];
resultErr.push(err);
var result = new sResultSh.commandResult("Failed","404",resultErr);
onComplete(result);
}
else{
var result = new sResultSh.commandResult("Success","200",);
onComplete(result);
}
})
Above is my code and i need to upload my file to 195.158.1.45/var/www/html..
How to do this in nodejs?
my file upload is successful in my local system but i need to upload my file to another server ?
help?
Uploading Files to remote server using multer is not possible directly, But we can play around with multer-sftp, scp, ssh techniques in node js
Check this answers using multer-sftp and scp2

How we can upload multiple blobs to azure using nodejs

I am trying to upload 6 images to azure blob from single endpoint that I get from a registration form. The code shows how to upload a single blob but I need to upload multiple blobs at the same time. How can I do it?
Here is my code:
app.post('/upload', function (req, res) {
//var dirname = require('path').dirname(__dirname);
//var dirname1 = require('path').dirname(dirname);
var filename = req.files[0].filename;
var path = req.files[0].path;
var type = req.files[0].mimetype;
var options = {
contentType: type,
metadata: { fileName: filename }
}
blobSvc.createBlockBlobFromLocalFile(containerName, filename, path, options, function (error, result, response) {
if (error != null) {
console.log('Azure Full Error: ', error)
} else {
console.log(result);
console.log(response);
var user = new User();
user.name = req.body.name;
user.picture = 'https://yourblob.blob.core.windows.net/profile/' + result.name;
user.save(function (err) {
if (err) {
return res.json(err.message);
}
else {
return res.json({ User: user });
}
});
}
});
});
As Azure Storage for node sdk is based on RESTful APIs, and we implement upload functionality via Put Blob.
There is no such RESTful API or function in SDK for us to directly upload multiple independent blobs to Azure at once time.
You can implement this functionality for yourself by uploading files in loop.

How to upload file to s3 through nodejs express from Angularjs

I am facing problem to upload file to s3 by nodejs express, and angularjs.
I am using angular directive to send file to node express and from node to s3.
Angular directive :
(function() {
'use strict';
angular.module('app').directive('ngFileModel', ['$parse', function ($parse) {
return {
restrict: 'A',
link: function (scope, element, attrs) {
var model = $parse(attrs.ngFileModel);
var isMultiple = attrs.multiple;
var modelSetter = model.assign;
element.bind('change', function () {
var values = [];
angular.forEach(element[0].files, function (item) {
var value = {
// File Name
name: item.name,
//File Size
size: item.size,
//File URL to view
url: URL.createObjectURL(item),
// File Input Value
_file: item
};
values.push(value);
});
scope.$apply(function () {
if (isMultiple) {
modelSetter(scope, values);
} else {
modelSetter(scope, values[0]);
}
});
});
}
};
}]);
})();
Html code
<input type="file" id="upme" ng-file-model="files" multiple style="display:none;" />
<div ng-if="files.length>0" ng-init="vm.uploadFile()"></div>
Server side:
exports.upload = function(req, res){
var images = req.body.images;
//res.send(images);
// console.dir(images)
images.forEach(function(file){
// console.dir(file);
S3.upFile('testbucket',file.name, file.url, function(err, data){
if(err){
console.log(err)
}else{
console.dir(data);
}
});
});
Problem,
The upload function works and I get something has been uploaded on s3 bucket, the file name appears in bucket; but it seems that is not actual size of the file and I can not open. when I click on the file url it say to download the file, after I download the file, it does not open. I think there may have any problem to parse the file in the node server before uploadin to s3. But I can't identify which solution should be there.
I also get one error in the console.
TypeError: path must be a string or Buffer
at TypeError (native)
at Object.fs.open (fs.js:625:11)
at ReadStream.open (fs.js:1708:6)
at new ReadStream (fs.js:1695:10)
at Object.fs.createReadStream (fs.js:1643:10)
I have made s3 file upload function as module in separate file. Here is module function of file upload
// uploading file or object into bucket
exports.upFile = function(bucket_name, key, file, next){
var params = {Bucket: bucket_name, Key: key, Body: fs.createReadStream(file), ACL:"public-read"};
s3.upload(params, function(err, data) {
next(err, data);
});
};
I appreciate any help from experts.
You are not giving a file as an argument to the upload function but an object URL. To correct your implementation, you have to make some changes in angular. Firstly, you should send files as multipart form data from angular. You can achieve this by using:
var form = new formData();
angular.forEach(element[0].files, function (item) {
form.append('file', item);
});
Send this form data to your node server using http request. You can define the route and http method using express in nodejs. On angularJS the request should look something like this:
$http.post('/test_route', form, {
withCredentials: false,
headers: {
'Content-Type': undefined
},
trnasformRequest: angular.identity
}).success(function(data) {
// do something with data from server
});
At node server, when you receive the request you have to extract files from form data. First define a route and method using express:
var multiparty = require('multiparty');
app.post('test_route', function(req, res) {
var form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
var files_to_uplaod = files.file;
files_to_upload.forEach(function(file) {
read_file = fs.readFileSync(file.path);
var params = {Bucket: bucket_name, Key: file.originalFilename, Body: read_file, ACL:"public-read"};
s3.upload(params, function(err, data) {
next(err, data);
// once the file is uploaded you can remove the file from local disk which is saved whn multipart data arrives.
fs.unlink(file.path, function(err) {
if (err) {console.log(err);}
});
});
});
}
});
To parse the multiform on node server, use the multiparty module. More information can be found here: https://www.npmjs.com/package/multiparty

Categories