I am trying to read a zip file which has images from the project directory in React.
When I open the Zip file from <input type="file" />, it works by taking the event.target.files[0] .
But when I put that same file in react project and then try to open it by giving a path , it doesnt work.
Example : "./test.zip"
My current code:
let jsZip = new JSZip();
jsZip.loadAsync("./test.zip").then(function (zip) {
let imagess = [];
Object.keys(zip.files).forEach(function (filename) {
zip.files[filename].async("base64").then(function (fileData) {
const image = document.createElement("img");
image.src = "data:image/*;base64," + fileData;
document.querySelector(".unziped-container").appendChild(image);
});
});
});
I have been stuck on this for hours and the documentation is not helping either.
Any help is appreciated.
Anyone coming across this can use JSZip-utils and write the following code
JSZipUtils.getBinaryContent("../path/file.zip", function (err, data) {
if (err) {
throw err;
}
const jsZip = new JSZip();
jsZip.loadAsync(data).then(function (zip) {
Object.keys(zip.files).forEach(function (filename) {
zip.files[filename].async("base64").then(function (fileData) {
const image = document.createElement("img");
image.src = "data:image/*;base64," + fileData;
const unziped = document.querySelector(".unziped-container");
unziped.appendChild(image);
});
});
});
});
the documentation seems pretty clear: the first argument to loadAsync is the zip file data, but you're passing it a string.
(and what you're tring to do won't work anyway. your React code is running in the browser and has no knowledge of filesystem paths or filenames.)
Related
I am not using the package of multer because I am not using express so I am not sure how multer can work with sailsjs
Anyways, I am trying to upload multiple files to s3, at first I worked with for loop which did not work because for loop is synchronous and file upload is asynchronous.
But then I googled that using recurrsive would work so I tried it but somehow it still didn't though.
Files are uploaded but then the size isn't right for all of them.
Somehow the size might be bigger / smaller then when I download the file let's say if it's a doc file, either I get error saying it's not a msdoc file or what's inside is all scrambled. If it's a pdf, it'll say failed to open the pdf file.
If I try only with one file, it works sometimes but not always though.
Did I do something wrong with the codes below?
s3_upload_multi: async function(req){
try {
let fieldName = req._fileparser.upstreams[0].fieldName;
let files = req.file(fieldName)._files;
let return_obj = [];
const upload_rec = files => {
if (files.length <= 0) return return_obj;
const f = files.pop();
const fileUpload = f.stream;
const s3 = new AWS.S3();
// https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
s3.putObject(({ // uses s3 sdk
Bucket: sails.config.aws.bucket,
Key: 'blahblahblahblahblah',
Body: fileUpload._readableState.buffer.head.data, // buffer from file
ACL: 'public-read',
}, function ( err, data ) {
if (err) reject(err);
return_obj.push(data);
console.log(return_obj, 'return_obj');
});
return upload_rec(files);
};
upload_rec(files);
} catch (e) {
console.log(e, 'inside UploadService');
return false;
}
}
Thanks in advance for any advices and suggestions
I am trying to create a service that gets a zip file, unpacks it, and uploads its contents to a Google Cloud Storage bucket.
The unzipping part seems to work well, but in my GCS bucket all the files seem to be empty.
I'm using the following code:
app.post('/fileupload', function(req, res) {
var form = new formidable.IncomingForm();
form.parse(req, function (err, fields, files) {
const uuid = uuidv4();
console.log(files.filetoupload.path); // temporary path to zip
fs.createReadStream(files.filetoupload.path)
.pipe(unzip.Parse())
.on('entry', function (entry) {
var fileName = entry.path;
var type = entry.type; // 'Directory' or 'File'
var size = entry.size;
const gcsname = uuid + '/' + fileName;
const blob = bucket.file(gcsname);
const blobStream = blob.createWriteStream(entry.path);
blobStream.on('error', (err) => {
console.log(err);
});
blobStream.on('finish', () => {
const publicUrl = format(`https://storage.googleapis.com/${bucket.name}/${blob.name}`);
console.log(publicUrl); // file on GCS
});
blobStream.end(entry.buffer);
});
});
});
I'm quite new to Node.js so I'm probably overlooking something - I've spent some time on documentation but I don't quite know what to do.
Could someone advise on what might be the problem?
The fs.createWriteStream() takes file path as argument but GCS createWriteStream() takes options
As per the example in this documentation the recommended way would be:
const stream = file.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
instead of:
const blobStream = blob.createWriteStream(entry.path).
Check whether your buffer is undefined or not . It may be due to unspecified disk/Mem storage that the buffer remains undefined .
I am a newbie to both Javascript and ipfs and I am trying an experiment to fetch an image buffer from the ipfs hash "QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n" using ipfs-mini node module.
Below is my code
const IPFS = require('ipfs-mini');
const FileReader = require('filereader');
var multer = require('multer');
const ipfs = initialize();
app.post('/upload',function(req,res){
upload(req,res, function(err){
console.log(req.file.originalname);
ipfs.cat('QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n', function(err, data){
if(err) console.log("could not get the image from the ipfs for hash " + ghash);
else {
var wrt = data.toString('base64');
console.log('size ; ' + wrt.length);
fs.writeFile('tryipfsimage.gif',wrt, (err) =>{
if(err)console.log('can not write file');
else {
//console.log(data);
ipfs.stat('QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n', (err, data)=>{
// console.log(hexdump(wrt));
});
console.log("files written successfully");
}
});
}
});
});
});
function initialize() {
console.log('Initializing the ipfs object');
return new IPFS({
host: 'ipfs.infura.io',
protocol: 'https'
});
}
I could view the image properly in the browser using the link below "https://ipfs.io/ipfs/QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n", but if I open the file 'tryipfsimage.gif' in which I dump the return buffer of the cat API in above program, the content of the image seems corrupted. I am not sure what the mistake I am doing in the code. it would be great If someone points me the mistake.
From ipfs docs https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#javascript---ipfsfilescatipfspath-callback
file in the callback is actually a Buffer so by toString('base64')'ing it you are writing actual base64 into the .gif file - no need to do this. you can pass the Buffer directly to the fs.writeFile api with
fs.writeFile('tryipsimage.gif', file, ...
For larger files I would recommend using the ipfs catReadableStream, where you can do something more like:
const stream = ipfs.catReadableStream('QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n')
// don't forget to add error handlers to stream and whatnot
const fileStream = fs.createWriteStream('tryipsimage.gif')
stream.pipe(fileStream);
I am writing an Express app that takes in a base64 encoded string that represents an image. Right now, i'm not really sure how I can take that string and upload the image to AWS S3, so i'm reading in the encoded image string data, decoding it, writing a file using fs, and then trying to upload. I have this working for an endpoint that just takes in a raw file, and all of its content is correctly uploaded to AWS s3.
Now when I try to do what I described above, i'm able to upload to S3, but the file has 0kb and is empty, and i'm not sure why. I tested just taking the stringData and writing a file to a test file, and it works. However, when I try uploading to s3, the file shows but it's empty. Here is my code:
router.post('/images/tags/nutritionalInformation/image/base64encoded', function (req, res) {
console.log(req.body.imageString);
var base64Stream = req.body.imageString;
var imgDecodedBuffer = decodeBase64Image(base64Stream);
console.log(imgDecodedBuffer);
// write to image file
var prefix = guid().toString() + ".jpg";
var filePath = './uploads/' + prefix;
console.log(filePath);
fs.writeFile(filePath, imgDecodedBuffer.data, function(err) {
console.log(err);
});
var stream = fs.createReadStream(filePath);
console.log(stream);
return s3fsImpl.writeFile(prefix, stream).then(function () {
fs.unlink(filePath, function (err) {
if (err) {
console.error(err);
}
});
});
})
Here are the relevant import statements:
var fs = require('fs');
var s3fs = require('s3fs');
var multiparty = require('connect-multiparty'),
multipartyMidleware = multiparty();
var s3fsImpl = new s3fs('blahblah', {
accessKeyId: 'ACCESS_KEY_ID',
secretAccessKey: 'SECRET'
});
Any help would be greatly appreciated!
If you simply just pass in the buffer, which I presume is in your imgDecodedBuffer.data value, it should work.
Am working on an offline application using HTML5 and jquery for mobile. i want to back up files from the local storage using jszip. below is a code snippet of what i have done...
if (localStorageKeys.length > 0) {
for (var i = 0; i < localStorageKeys.length; i++) {
var key = localStorageKeys[i];
if (key.search(_instrumentId) != -1) {
var data = localStorage.getItem(localStorageKeys[i])
var zip = new JSZip();
zip.file(localStorageKeys[i] + ".txt", data);
var datafile = document.getElementById('backupData');
datafile.download = "DataFiles.zip";
datafile.href = window.URL.createObjectURL(zip.generate({ type: "blob" }));
}
else {
}
}
}
in the code above am looping through the localstorage content and saving ezch file in a text format. the challenge that am facing is how to create several text files inside DataFiles.zip as currently am only able to create one text file inside the zipped folder. Am new to javascript so bare with any ambiguity in my question.
thanks in advance.
Just keep calling zip.file().
Look at the example from their documentation page (comments mine):
var zip = new JSZip();
// Add a text file with the contents "Hello World\n"
zip.file("Hello.txt", "Hello World\n");
// Add a another text file with the contents "Goodbye, cruel world\n"
zip.file("Goodbye.txt", "Goodbye, cruel world\n");
// Add a folder named "images"
var img = zip.folder("images");
// Add a file named "smile.gif" to that folder, from some Base64 data
img.file("smile.gif", imgData, {base64: true});
zip.generateAsync({type:"base64"}).then(function (content) {
location.href="data:application/zip;base64," + content;
});
The important thing is to understand the code you've written - learn what each line does. If you do this, you'd realize that you just need to call zip.file() again to add another file.
Adding to #Jonathon Reinhart answer,
You could also set both file name and path at the same time
// create a file and a folder
zip.file("nested/hello.txt", "Hello World\n");
// same as
zip.folder("nested").file("hello.txt", "Hello World\n");
If you receive a list of files ( from ui or array or whatever ) you can make a compress before and then archive. The code is something like this:
function upload(files){
var zip = new JSZip();
let archive = zip.folder("test");
files.map(function(file){
files.file(file.name, file.raw, {base64: true});
}.bind(this));
return archive.generateAsync({
type: "blob",
compression: "DEFLATE",
compressionOptions: {
level: 6
}
}).then(function(content){
// send to server or whatever operation
});
}
this worked for me at multiple json files. Maybe it helps.
In case you want to zip files and need a base64 output, you can use the below code-
import * as JSZip from 'jszip'
var zip = new JSZip();
zip.file("Hello.json", this.fileContent);
zip.generateAsync({ type: "base64" }).then(function (content) {
const base64Data = content