File upload is not working on Sails.js - javascript

I'm trying to upload an image to the server using the code below. It works fine for small images up to ~70KB.
For larger images it sets the file status to cancelled, saves to server disk but with a size of Zero KB.
var CustomerController = {
uploadPhoto: function (request, response) {
var customerId = request.param('id');
// check for the existence of the customer
Customer.findOne({
id: customerId
}).exec(function (err, customer) {
if (err || !customer) {
return response.notFound('Customer not found');
}
// get the file that was uploaded from the client
// and save it to disk
request.file('photo').upload(function onUploadComplete(err, files) {
if (err || !files || files.length < 1)
return response.serverError('Error uploading photo: ' + err);
// do something with files here
});
});
},
_config: {}
};
module.exports = CustomerController;
I'm using Sails 0.10.0-rc7.
Any ideas?

Related

Sending file through HTTP request

I tried to receive the file and store it in the multer storage
Node js code
enter code here
app.post('/createLicence', upload.single('photo'),function(req, res ,next) {
// any logic goes here
console.log("filename" ,req.body.name)
if (!req.file) {
console.log("No file received");
return res.send({
success: false
});
} else {
console.log('file received');
var function_name = 'createLicence'
var arguments_array = [req.file.path,'Raghav','Mumbai','Approved']
invoke = require('/Users/sanjeev.natarajan/fabric-samples/fabcar/invoke.js');
invoke.invokechaincode(function_name,arguments_array)
return res.send({
success: true
})
}
});
but i am receiving no file is receivedi have send the request through postman
-
From : https://www.npmjs.com/package/multer
In order to use the multer package, you have first to define a few parameters so that it can work on your fileDirectory.
In your server.js :
let multer = require('multer');
let storage = multer.diskStorage({
destination: function(req, file, cb) {
cb(null, '/path/to/storage/')
},
filename: function(req, file, callback) {
callback(null, file.originalname + '-' + Date.now());
}
});
let upload = multer({
storage: storage
});
Now, configure your route
router.route('/your/payload')
.post(authController.isAuthenticated, upload.any(), albumController.postFile)
Note that upload.any() will allow you to upload multiple different formatted files at once. Feel free to use any other kind of upload.method() depending on your needs.
From this point, multer already is doing its job, however you might want to keep track of the files uploaded on your server.
So, in your own module, the logic is pretty much straight forward :
(I'm assuming that you're using mongoose models since you're not giving much information, but that's not the relevant part anyway)
exports.postFile = async (req, res) => {
if (!req || !req.files || !req.files[0]) return res.status(400).send("Bad request.");
for (let i = 0; req.files[i]; i++) {
await File.create({
path: req.files[i],
originalName: req.files[i].originalName,
mimetype: req.files[i].mimetype,
owner: req.user.userId
}, (err, file) => {
if (err) console.log("Something went wrong: " + err); else {
// Do something with file
}
});
}
return res.status(418).send("I'm a teapot.");
}
This configuration and middleware use is ONLY for testing purpose, never ever let anyone upload something to your server without carefully handle that uploading process (file integrity, resource management, ...). An open uploading system can become a very wide backdoor getting straight to your server.
Hope this helps,
regards.

Upload HTML file to AWS S3 and then serving it instead of downloading

I am downloading a web page and then I am writing to a file named thisArticle.html, using the below code.
var file = fs.createWriteStream("thisArticle.html");
var request = http.get(req.body.url, response => response.pipe(file) );
After that I am trying to read file and uploading to S3, here is the code that I wrote:
fs.readFile('thisArticle.html', 'utf8', function(err, html){
if (err) {
console.log(err + "");
throw err;
}
var pathToSave = 'articles/ ' + req.body.title +'.html';
var s3bucket = new AWS.S3({ params: { Bucket: 'all-articles' } });
s3bucket.createBucket(function () {
var params = {
Key: pathToSave,
Body: html,
ACL: 'public-read'
};
s3bucket.upload(params, function (err, data) {
fs.unlink("thisArticle.html", function (err) {
console.error(err);
});
if (err) {
console.log('ERROR MSG: ', err);
res.status(500).send(err);
} else {
console.log(data.Location);
}
// ..., more code below
});
});
});
Now, I am facing two issues:
The file is uploading but with 0 bytes (empty)
When I am trying to upload manually via S3 dashboard is uploaded successfully but when I tried to load the URL in the browser it downloads the HTML file instead of serving it.
Any guides if I am missing something?
Set the ContentType to "text/html".
s3 = boto3.client("s3")
s3.put_object(
Bucket=s3_bucket,
Key=s3_key,
Body=html_string,
CacheControl="max-age=0,no-cache,no-store,must-revalidate",
ContentType="text/html",
ACL="public-read"
)
It looks like your upload function is deleting the file with fs.unlink before it gets uploaded. That's why its going up as 0 Bytes.
Also, to make the bucket serve the HTML, you need to turn on webserving as described in the AWS S3 Docs. http://docs.aws.amazon.com/AmazonS3/latest/UG/ConfiguringBucketWebsite.html

ssh2-sftp-client Get request node.js

I am currently attempting to download a file from an FTP server using ssh2-sftp-client.
I can see the list of the files as shown in the code below, my problem is when it comes to downloading the files.
As you can see from my code below I am attempting to use the sftp.get to get the contents of the file and node file system to create a file.
When the file saves it doesn't save the contents of the file on the server is only saves [object Object]
var Client = require('../../node_modules/ssh2-sftp-client');
var sftp = new Client();
var root = '/files';
var fs = require('fs');
sftp.connect({
host: '192.168.0.1',
port: '22',
username: 'user',
password: 'password'
}).then(() => {
return sftp.list(root);
}).then((data) => {
for( var i = 0, len = data.length; i < len; i++) {
console.log(data[i].name);
var name = data[i].name;
sftp.get(root+'/'+name).then((file) => {
console.log(file);
fs.writeFile('downloads/'+name, file, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
});
}) }
}).catch((err) => {
console.log(err, 'catch error');
});
How can I get this to save the contents of the file?
Any help or a push in the right direction would be very appreciated.
I resolved my problem by changing the sftp.get to the following:-
sftp.get(root+"/"+name).then((stream) => {
stream.pipe(fs.createWriteStream(local+name));
});
I hope this helps anybody else who might have this issue.

Combining angular-file-upload and multer

I have big headache to combine angular file upload plugin with multer to make it fully SPA. I stucked on uploading multiple files through multer.
This is how my multer options looks like: (node route.js file)
var upload = multer({
storage: storage,
limits: {
//fileSize: 819200
}
}).array('myFile');
this is my POST: (node route.js file)
router.post('/add/file', function(req, res, next) {
upload(req,res,function(err) {
console.log(req.files);
if(err) {
console.log("Error uploading file.");
}
});
});
this is inside my angular controller:
var uploader = $scope.uploader = new FileUploader({
url: 'http://localhost:3000/add/file',
alias: 'myFile'
});
uploader.filters.push({
name: 'imageFilter',
fn: function(item /*{File|FileLikeObject}*/, options) {
var type = '|' + item.type.slice(item.type.lastIndexOf('/') + 1) + '|';
return '|jpg|png|jpeg|bmp|gif|'.indexOf(type) !== -1;
}
});
It adds only 1st file and stucks - I don't get any error it just stucks - whole page works and I can send files again, but again only 1st file will be uploaded. Console shows that req.files have only 1 file (that first one)
I couldn't find any tutorial or anything on the Internet with angular-file-upload plugin, that's why I ask you guys
Not sure if you figured this out yet or not, but with sending multiple files over, the 'uploadAll' function will not send the next file until it receives a response back from the server. So the route should look like this. I also saw somewhere in the documentation that the response needs to be json...haven't tested whether or not this is true though
router.post('/add/file', function(req, res, next) {
upload(req,res,function(err) {
console.log(req.files);
if(err) {
console.log("Error uploading file.");
} else {
res.status(200).json({response: 'some response...'})
}
});
});

How we can upload multiple blobs to azure using nodejs

I am trying to upload 6 images to azure blob from single endpoint that I get from a registration form. The code shows how to upload a single blob but I need to upload multiple blobs at the same time. How can I do it?
Here is my code:
app.post('/upload', function (req, res) {
//var dirname = require('path').dirname(__dirname);
//var dirname1 = require('path').dirname(dirname);
var filename = req.files[0].filename;
var path = req.files[0].path;
var type = req.files[0].mimetype;
var options = {
contentType: type,
metadata: { fileName: filename }
}
blobSvc.createBlockBlobFromLocalFile(containerName, filename, path, options, function (error, result, response) {
if (error != null) {
console.log('Azure Full Error: ', error)
} else {
console.log(result);
console.log(response);
var user = new User();
user.name = req.body.name;
user.picture = 'https://yourblob.blob.core.windows.net/profile/' + result.name;
user.save(function (err) {
if (err) {
return res.json(err.message);
}
else {
return res.json({ User: user });
}
});
}
});
});
As Azure Storage for node sdk is based on RESTful APIs, and we implement upload functionality via Put Blob.
There is no such RESTful API or function in SDK for us to directly upload multiple independent blobs to Azure at once time.
You can implement this functionality for yourself by uploading files in loop.

Categories