Weird error, I use blueimp jquery file upload, to upload files from client-side to Node.js then Amazon S3 storage.
the code works perfect, the files does get to Amazon s3, but on client-side on the plugin I get this error: Error SyntaxError: Unexpected token S.
The Node.js code I'm using is the following:
app.post('/upload', function(req, res) {
fs.readFile(req.files.files[0].path, function (err, data) {
var imageName = req.files.files[0].name
/// If there's an error
if(!imageName){
console.log("There was an error")
res.redirect("/");
res.end();
} else {
var img_path = req.files.files[0].path;
var file_read = fs.readFile(img_path, function(err, data){
var BucketName = 'webwedding/'+SSName;
s3.createBucket({Bucket: BucketName}, function() {
var params = {Bucket: BucketName,ACL: "public-read" , Key: imageName, Body: data};
s3.putObject(params, function(err, data) {
res.writeHead(200, {'Content-Type': 'text/plain'});
if (err) {
console.log(err)
res.end(err);
}
else {
console.log('Uploaded ' + imageName +' To '+SSName);
res.end('Successfully uploaded data to webwedding\n');
}
});
});
});
}
});
});
I now came to understand that I need to JSON.stringly the result, but only cant manage to create the correct res.end result! trying to follow Blueimp support.
*Edit:*trying to add template to check results:
var files = [
{
"name": "picture1.jpg",
"size": 902604,
"url": "http:\/\/example.org\/files\/picture1.jpg",
"thumbnailUrl": "http:\/\/example.org\/files\/thumbnail\/picture1.jpg",
"deleteUrl": "http:\/\/example.org\/files\/picture1.jpg",
"deleteType": "DELETE"
}]
res.end(JSON.stringify(files));
gives me that error on client side:
Error Empty file upload result
The error comes from fileupload.ui code, hope it's not too long, but I think I must show it so maybe someone can figure it out:
if (data.context) {
data.context.each(function (index) {
var file = files[index] ||
{error: 'Empty file upload result'};
deferred = that._addFinishedDeferreds();
that._transition($(this)).done(
function () {
var node = $(this);
template = that._renderDownload([file])
.replaceAll(node);
that._forceReflow(template);
that._transition(template).done(
function () {
data.context = $(this);
that._trigger('completed', e, data);
that._trigger('finished', e, data);
deferred.resolve();
}
);
}
);
});
Related
const AWS = require('aws-sdk')
AWS.config.loadFromPath('./credentials.json');
AWS.config.update({region:'us-east-1'});
var rekognition = new AWS.rekognition();
var params = {
CollectionId: "sammple",
DetectionAttributes: [
],
ExternalImageId: "facialrekogntition", //TODo
Image: {
S3Object: {
Bucket: "facerekognition12",
Name: "download.jpg"
}
}
};
rekognition.indexFaces(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
Whenever I use node index.js, I am getting the error mentioned in the title.
Note - I have my credentials stored in the JSON file and also installed AWS-SDK for node.
It's new AWS.Rekognition(); basically capital R
i'm developing a web application using html, javascript and nodejs.
I have to store on the server some pdfs and so the way that i found to send pdf from client to server is to use FormData object.
Client side my code is:
var files = new FormData();
var count = 0;
$('#tableSlideId tr').each(function() {
var inputForm = $(this).find("th:first").children();
file = inputForm[0].files[0];
files.append((count++).toString(),file);
});
$.ajax({
type: "POST",
url: "/sendFiles",
data: files,
contentType: false,
processData: false,
}).done(function(err){
var text ="";
if(err) {
text = "Upload FAILED! Retry ...";
} else {
text = "Upload SUCCES!";
}
alert(text);
});
Now server side i need to access to each file stored in the FormData object in order to store them in a server's folder. So i try with the following code:
app.post('/sendFiles', function(req,res) {
new formidable.IncomingForm().parse(req, (err, fields, files) => {
if (err) {
console.error('Error', err)
throw err
}
// console.log('Files', files);
daoQuery.insertPdf(files);
})
res.end();
});
where daoQuery.insertPdf() function is defined as:
insertPdf: function(files){
//var form = new FormData(files);
//console.log(files);
for(var key in files){
//console.log("--->", files[key])
fs.writeFile(".\\dataset\\newFile.pdf", files[key].path, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
});
}
}
In this way I obtain a pdf file in the folder but it has 1KB size(that is not the original file's size) and if i try to open it the system says that the file is damaged.
I'm quite sure that the FormData object is recieved correctly from the server, infact in "insertPdf" function the print console.log('Files', files) gives me all the fields of the file ( like name, size, type = application/pdf, path and so on).
I solved modifying the insertPdf function as follow:
insertPdf: function(files){
//var form = new FormData(files);
//console.log(files);
for(var key in files){
//console.log("--->", files[key])
fs.readFile(files[key].path, function(err1,data){
if(err1) throw err;
fs.writeFile(".\\dataset\\newFile.pdf", data, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
});
});
I have an API that creates a zip file using the archiver module in which I would like to pass back the zip as a respone and download it on the client side.
This is what my API that creates the zip looks like:
reports.get('/xxx/:fileName', async (req, res) => {
var s3 = new AWS.S3();
var archiver = require('archiver');
var filenames = "xxx"
var str_array = filenames.split(',');
for (var i = 0; i < str_array.length; i++) {
var filename = str_array[i].trim();
localFileName = './temp/' + filename.substring(filename.indexOf("/") + 1);
file = fs.createWriteStream(localFileName, {flags: 'a', encoding: 'utf-8',mode: 0666});
file.on('error', function(e) { console.error(e); });
s3.getObject({
Bucket: config.xxx,
Key: filename
})
.on('error', function (err) {
console.log(err);
})
.on('httpData', function (chunk) {
file.on('open', function(){
file.write(chunk);
});
})
.on('httpDone', function () {
file.end();
})
.send();
}
res.end("Files have been downloaded successfully")
// create a file to stream archive data to.
var output = fs.createWriteStream('example.zip');
var archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
});
// listen for all archive data to be written
// 'close' event is fired only when a file descriptor is involved
output.on('close', function() {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
// This event is fired when the data source is drained no matter what was the data source.
// It is not part of this library but rather from the NodeJS Stream API.
// #see: https://nodejs.org/api/stream.html#stream_event_end
output.on('end', function() {
console.log('Data has been drained');
});
// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', function(err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
});
// good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
});
// pipe archive data to the file
archive.pipe(output);
// append files from a sub-directory, putting its contents at the root of archive
archive.directory('./temp', false);
// finalize the archive (ie we are done appending files but streams have to finish yet)
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
archive.finalize();
});
Also for reference here is another one of my APIs to show how I am accustomed to sending data back to the client:
reports.get('/xxx/:fileName', async (req, res) => {
var s3 = new AWS.S3();
var params = {
Bucket: config.reportBucket,
Key: req.params.fileName,
Expires: 60 * 5
}
try {
s3.getSignedUrl('getObject', params, function (err, url) {
if(err)throw err;
res.json(url);
});
}catch (err) {
res.status(500).send(err.toString());
}
});
How can I send the zip back as a response and download it on the client side to disk?
Since archive is streaming, I would assume it can be pipe(lined) directly to the response (res):
// Node.js v10+, if res is a proper stream
const {pipeline} = require('stream')
pipeline(archive, res)
// Alternatively (search for caveats of pipe vs. pipeline)
archive.pipe(res)
You should probably set some HTTP headers on res to tell the browser the MIME type and possibly a filename:
res.set({
'Content-Type': 'application/zip',
'Content-Disposition': 'attachment; filename="zip"'
})
Okay so once you wrote your file, example.zip you can easily follow the example mentioned in another answer and do:
var stat = fileSystem.statSync('example.zip');
res.writeHead(200, {
'Content-Type': 'application/zip',
'Content-Length': stat.size
});
var readStream = fileSystem.createReadStream('example.zip');
// We replaced all the event handlers with a simple call to readStream.pipe()
readStream.pipe(res);
This should work perfectly. Credits to OP
i don't get how to properly use jsZip..
i'm trying to get a file from a folder in localhost then zip it and at last save the zipped file in the same folder...
but really i don't have good comprension on how to use it
here what i have done :
JSZipUtils.getBinaryContent("http://localhost:8100/pdf/test.pdf", function (err, data) {
if(err) {
alert('err');
throw err;
}
else{
zip.file("test.pdf",data,{binary:true})
.then(function (blob) {
saveAs(blob, "pdf.zip");
});
}
});
can anibody help me?
The following bit of code is what ended up working for me.
NB, this is for jszip utils version 3. It needs filesaver.js to make the saveAs call work (https://github.com/eligrey/FileSaver.js).
Here's my code:
function create_zip(){
// get checked file urls
var urls = Array();
$("input:checked").each(function () {
urls.push($(this).siblings('a').attr('href'));
})
console.log(urls);
var zip = new JSZip();
var count = 0;
var zipFilename = "archive.zip";
urls.forEach(function(url){
var filename = url.substr(url.lastIndexOf("/")+1);
console.log(filename);
// loading a file and add it in a zip file
JSZipUtils.getBinaryContent(url, function (err, data) {
if(err) {
throw err; // or handle the error
}
zip.file(filename, data, {binary:true});
count++;
if (count == urls.length) {
zip.generateAsync({type:'blob'}).then(function(content) {
saveAs(content, zipFilename);
});
}
});
});
};
This is adapted from the info here: https://gist.github.com/noelvo/4502eea719f83270c8e9
Hope it helps!
var zip = new JSZip();
JSZipUtils.getBinaryContent(url, function (err, data) {
if(err) {
alert('err');
throw err;
}
else{
zip.file("test.csv",data,{binary:true});
zip.generateAsync({ type: "Blob" }).then(function (content) {
fsaver.saveAs(content, "example.zip");
});
}
});
try this for angular. this one is for csv file. give url as the path of the csv file.
How can I append data to a file using node.js
I already have a file named myfile.json with data. I want to check if the file name exists and then append some data to that file.
I'm using following code
var writeTempFile = function (reportPath, data, callback) {
fs.writeFile(reportPath, data, function (err) {
//if (err) //say(err);
callback(err);
});
}
writeTempFile(reportDir + '_' + query.jobid + ".json", data, function (err) {
context.sendResponse(data, 200, {
'Content-Type': 'text/html'
});
You can use jsonfile
var jf = require('jsonfile');
var yourdata;
var file = '/tmp/data.json';
jf.readFile(file, function(err, obj) {
if(!err) {
var finalData = merge(obj, yourdata);
jf.writeFile(file, finalData, function(err) {
console.log(err);
});
}
});
You need to implement your merging logic in merge(object1, object2)
https://npmjs.org/package/jsonfile
Check out the following code.
function addToFile(reportPath, data, callback){
fs.appendFile(reportPath, data, function (err) {
callback(err);
});
}
Node offers fs module to work with file system.
To use this module do
var fs = require('fs')
To append some data to file you can do:
fs.appendFile('message.txt', 'data to append', function (err) {
if (err) throw err;
console.log('The "data to append" was appended to file!');
});
Node offers you both synchronous and asynchronous method to append data to file, For more information please refer to this documentation