In our meteor app, the client will upload some files using collectionFS-filesystem which i store it to an uploads folders in root directory of my app.
//My CFS uploads collection
uploads = new FS.Collection("uploads", {
stores: [new FS.Store.FileSystem("uploads", {path: "~/uploads"})]
});
Later, i want to save the files to the database using collectionFS-gridFS.
//My CFS grid collection
files = new FS.Collection("files", {
stores: [new FS.Store.GridFS("files")]
});
How do i read the data from the file on server so that i can store the file to db? Can i use the file from the CFS-filesystem collection to convert it to CFS-gridFS file in anyway?
Thanks in advance.
I had accept the answer by #perusopersonale. However, below is my approach which i used to achieve this, based on documentation from here and here
uploads.find(document_id).forEach(function (fileObj) {
var type = fileObj.type();
var name = fileObj.name();
var readStream = fileObj.createReadStream(fileObj.collectionName);
var newFile = new FS.File();
newFile.attachData(readStream, {type: type});
newFile.name(name);
files.insert(newFile);
});
I don't understand why you want to use both. However I have to implement something similar (read from a cfs filesystem, do something and then reinsert in another db), here a version modified that should accomplish what you are triyng to do:
var fileObj = uploads.findOne(objId);
var newName = "newfilename"; //file name
//fileObj.copies.uploads.key contains the filename for store :"uploads"
fs.readFile( "~/uploads/"+fileObj.copies.uploads.key, function (err, data) {
var newFile = new FS.File();
newFile.attachData(data,{type: 'application/octet-stream'}, function(error){
newFile.name( newName);
file.insert(newFile);
});
});
Related
How do I transfer a zip archive generated on the server back to the client? I'm using AngularJS and SailsJS. Currently I set the HTML headers to match the content type, generate the archive using archiver and pipe the data into the res obejct before calling res.end().
The file-data is succesfully placed inside the XHR response, but the file is never downloaded on the clients side - unless I make an API call to zipFiles (see the code below).
How do I fix this?
zipFiles: async function (req, res) {
var archiver = require('archiver');
var year = req.allParams().year;
var quarter = req.allParams().quarter;
/*
* FIXME: This is dangerous, the same code is present in api/controllers/sirka/SirkaShStatController.js
* FIXME: A globally-available file should contain all relevant paths
*/
var src_path = __some__path__
var file_name = `download.zip`;
// Set HTML headers to match the contents of the respone
res.writeHead(200, {
'Content-Type': 'application/zip',
'Content-Disposition': `attachment; filename=${file_name}`,
});
var archive = archiver('zip');
archive.on('error', function(err) {
throw err;
});
// Once the archive has been finished (by archive.finalize()) send the file
archive.on('finish', function() {
sails.log.info('Archive finished, sending...')
res.end();
});
// Pipe the archive data into the respone object
archive.pipe(res);
// Append files found in src_path at the top level of the archive
archive.directory(src_path, false);
archive.finalize();
}
After a lot of searching and tinkering I've finally managed to solve the issue. I'll try to explain the different approaches that I took and their results.
1st approach
Generate the ZIP-file in-memory and transfer the binary data back to the user through the request.
This approach failed (see original question) since the call to zip the files was done through XHR/AJAX, even though it was possible to pipe the data into the response, it coulnd't be fetched on the client side.
2nd approach
Create the zip-file on the server, then represent the binary data as a Buffer. With this approach, I could simply return the buffer back to the caller by calling res.ok(data) once the zip-file was fully generated:
var archiver = require('archiver');
var archive = archiver('zip');
var fs = require('fs');
var output = fs.createWriteStream(dst_path);
archive.on('error', function(err) {
throw err;
});
// Once the archive has been finished (by archive.finalize()) send the file
archive.on('finish', function() {
sails.log.info('Archive finished, sending...');
});
output.on('close', function () {
var data = fs.readFileSync(dst_path);
console.log(data);
console.log(Buffer.byteLength(data));
return res.ok(data);
})
// Pipe the archive data into the response object
archive.pipe(output);
// Append files found in src_path at the top level of the archive
archive.directory(src_path, false);
archive.finalize();
Then on the client-side I simply receive the data, convert it to Uint8Array and wrap it with another array. The wrapping is necessary since the data makes up one whole part of the Blob.
Then once the Blob is generated, I create an ObjectURL for it and attach the link to an invisible a element that is automatically clicked.
var dataBuffer = res["data"];
var binaryData = new Uint8Array(dataBuffer);
var blobParts = [binaryData];
var blob = new Blob(blobParts, {type: 'application/zip'});
var downloadUrl = URL.createObjectURL(blob);
var a = document.createElement('a');
document.body.appendChild(a);
a.style = "display: none";
a.href = downloadUrl;
a.download = `Sygehusstatistik-${year}-K${quarter}.zip`;
a.click()
I've had issues with the generated zip-file getting placed into itself recursively, in order to avoid that ensure that src_path != dst_path
I am trying to create a service that gets a zip file, unpacks it, and uploads its contents to a Google Cloud Storage bucket.
The unzipping part seems to work well, but in my GCS bucket all the files seem to be empty.
I'm using the following code:
app.post('/fileupload', function(req, res) {
var form = new formidable.IncomingForm();
form.parse(req, function (err, fields, files) {
const uuid = uuidv4();
console.log(files.filetoupload.path); // temporary path to zip
fs.createReadStream(files.filetoupload.path)
.pipe(unzip.Parse())
.on('entry', function (entry) {
var fileName = entry.path;
var type = entry.type; // 'Directory' or 'File'
var size = entry.size;
const gcsname = uuid + '/' + fileName;
const blob = bucket.file(gcsname);
const blobStream = blob.createWriteStream(entry.path);
blobStream.on('error', (err) => {
console.log(err);
});
blobStream.on('finish', () => {
const publicUrl = format(`https://storage.googleapis.com/${bucket.name}/${blob.name}`);
console.log(publicUrl); // file on GCS
});
blobStream.end(entry.buffer);
});
});
});
I'm quite new to Node.js so I'm probably overlooking something - I've spent some time on documentation but I don't quite know what to do.
Could someone advise on what might be the problem?
The fs.createWriteStream() takes file path as argument but GCS createWriteStream() takes options
As per the example in this documentation the recommended way would be:
const stream = file.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
instead of:
const blobStream = blob.createWriteStream(entry.path).
Check whether your buffer is undefined or not . It may be due to unspecified disk/Mem storage that the buffer remains undefined .
I have have one page where I want to accept one file and 3-4 user inputs , I was able to achieve this using connect-multiparty middle-ware but the name of uploaded file is something gibberish with correct extension and uploaded files contents are too correct.
I want to achieve below things
Set name of file being uploaded
Create copy of file with different name if the file with same name exists in target directory
Set max limit on size and restrict type of file.
I searched on net but could not find any working example. My complete code is as below
var express = require('express');
var router = express.Router();
var fs = require('fs');
var multiparty = require('connect-multiparty');
var multipartyMiddleware = multiparty({
uploadDir : '../public/uploads'
});
router.post('/api/user/uploads', multipartyMiddleware, function(req, res) {
var file = req.files.file;
console.log(file.name);
console.log(file.type);
console.log(file);
console.log(req.body.test);
console.log("The file was saved!");
res.json({
success : 1
});
return;
});
module.exports = router;
You will have to rename the file after being copied using fs.rename(), or modify the source code of multiparty inside node_modules. Inside their code they have a function that does the renaming:
function uploadPath(baseDir, filename) {
var ext = path.extname(filename).replace(FILE_EXT_RE, '$1');
var name = randoString(18) + ext;
return path.join(baseDir, name);
}
I have done some modifications to their code so I could use it a little bit like multer:
https://gist.github.com/Edudjr/999c80df952458cc583272a5161b4d08
You would use it like so:
var EXT_RE = /(\.[_\-a-zA-Z0-9]{0,16}).*/g;
var options = {
uploadDir : path.join(__dirname,'../public/images'),
filename: function(filename, callback){
var name = filename.replace(EXT_RE, "");
callback(name+'-YEAH.png');
}
}
var form = new multiparty.Form(options);
They strongly advise you to save the files in the temp folder to prevent DoS on your server.
https://github.com/pillarjs/multiparty/issues/64
You can access it easily, I used this to get file name.
console.log(req.files.uploads.path.split('\\')[1]);
I am using uploads from Angular.
Am working on an offline application using HTML5 and jquery for mobile. i want to back up files from the local storage using jszip. below is a code snippet of what i have done...
if (localStorageKeys.length > 0) {
for (var i = 0; i < localStorageKeys.length; i++) {
var key = localStorageKeys[i];
if (key.search(_instrumentId) != -1) {
var data = localStorage.getItem(localStorageKeys[i])
var zip = new JSZip();
zip.file(localStorageKeys[i] + ".txt", data);
var datafile = document.getElementById('backupData');
datafile.download = "DataFiles.zip";
datafile.href = window.URL.createObjectURL(zip.generate({ type: "blob" }));
}
else {
}
}
}
in the code above am looping through the localstorage content and saving ezch file in a text format. the challenge that am facing is how to create several text files inside DataFiles.zip as currently am only able to create one text file inside the zipped folder. Am new to javascript so bare with any ambiguity in my question.
thanks in advance.
Just keep calling zip.file().
Look at the example from their documentation page (comments mine):
var zip = new JSZip();
// Add a text file with the contents "Hello World\n"
zip.file("Hello.txt", "Hello World\n");
// Add a another text file with the contents "Goodbye, cruel world\n"
zip.file("Goodbye.txt", "Goodbye, cruel world\n");
// Add a folder named "images"
var img = zip.folder("images");
// Add a file named "smile.gif" to that folder, from some Base64 data
img.file("smile.gif", imgData, {base64: true});
zip.generateAsync({type:"base64"}).then(function (content) {
location.href="data:application/zip;base64," + content;
});
The important thing is to understand the code you've written - learn what each line does. If you do this, you'd realize that you just need to call zip.file() again to add another file.
Adding to #Jonathon Reinhart answer,
You could also set both file name and path at the same time
// create a file and a folder
zip.file("nested/hello.txt", "Hello World\n");
// same as
zip.folder("nested").file("hello.txt", "Hello World\n");
If you receive a list of files ( from ui or array or whatever ) you can make a compress before and then archive. The code is something like this:
function upload(files){
var zip = new JSZip();
let archive = zip.folder("test");
files.map(function(file){
files.file(file.name, file.raw, {base64: true});
}.bind(this));
return archive.generateAsync({
type: "blob",
compression: "DEFLATE",
compressionOptions: {
level: 6
}
}).then(function(content){
// send to server or whatever operation
});
}
this worked for me at multiple json files. Maybe it helps.
In case you want to zip files and need a base64 output, you can use the below code-
import * as JSZip from 'jszip'
var zip = new JSZip();
zip.file("Hello.json", this.fileContent);
zip.generateAsync({ type: "base64" }).then(function (content) {
const base64Data = content
I have the following JS function which serves as a first prototype for a mozilla thunderbird extension.
The goal is to connect to a server and download a sample file, then unzipping it and storing the contents in the thunderbird profile folder.
Now this all works fine, except that the execution of the function stops after creating the zip file on the file system. So i have to restart the function again, in order to get the second part of the function executed which extracts the user.js file from the zip file.
Any ideas what the problem could be?
function downloadFile(httpLoc) {
// get profile directory
var file = Components.classes["#mozilla.org/file/directory_service;1"].
getService(Components.interfaces.nsIProperties).
get("ProfD", Components.interfaces.nsIFile);
var profilePath = file.path;
// change profile directory to native style
profilePath = profilePath.replace(/\\/gi , "\\\\");
profilePath = profilePath.toLowerCase();
// download the zip file
try {
//new obj_URI object
var obj_URI = Components.classes["#mozilla.org/network/io-service;1"].getService(Components.interfaces.nsIIOService).newURI(httpLoc, null, null);
//new file object
var obj_TargetFile = Components.classes["#mozilla.org/file/local;1"].createInstance(Components.interfaces.nsILocalFile);
//set to download the zip file into the profil direct
obj_TargetFile.initWithPath(profilePath + "\/" + "test.zip");
//if file the zip file doesn't exist, create it
if(!obj_TargetFile.exists()) {
alert("zip file wird erstellt");
obj_TargetFile.create(0x00,0644);
}
//new persitence object
var obj_Persist = Components.classes["#mozilla.org/embedding/browser/nsWebBrowserPersist;1"].createInstance(Components.interfaces.nsIWebBrowserPersist);
// with persist flags if desired ??
const nsIWBP = Components.interfaces.nsIWebBrowserPersist;
const flags = nsIWBP.PERSIST_FLAGS_REPLACE_EXISTING_FILES;
obj_Persist.persistFlags = flags | nsIWBP.PERSIST_FLAGS_FROM_CACHE;
//save file to target
obj_Persist.saveURI(obj_URI,null,null,null,null,obj_TargetFile);
} catch (e) {
alert(e);
} finally {
// unzip the user.js file to the profile direc
// creat a zipReader, open the zip file
var zipReader = Components.classes["#mozilla.org/libjar/zip-reader;1"]
.createInstance(Components.interfaces.nsIZipReader);
zipReader.open(obj_TargetFile);
//new file object, thats where the user.js will be extracted
var obj_UnzipTarget = Components.classes["#mozilla.org/file/local;1"].createInstance(Components.interfaces.nsILocalFile);
//set path for the user.js
obj_UnzipTarget.initWithPath(profilePath + "\/" + "user.js");
// if user.js doesn't exist, create it
if(!obj_UnzipTarget.exists()) {
alert("user.js wird erstellt");
obj_UnzipTarget.create(0x00,0644);
}
// extract the user.js out of the zip file, to the specified path
zipReader.extract("user.js", obj_UnzipTarget);
zipReader.close();
}
}
var hello = {
click: function() {
downloadFile("http://pse2.iam.unibe.ch/profiles/profile.zip");
},
};
saveURI is asynchronous, so you need to set the progress listener on the persist object to know when it has finished. Here's an example of setting a progress listener and later on there's a check to see whether the transfer has finished.