Remove __MACOSX/ files from zip archive using javascript and fflate - javascript

I'm testing fflate to replace JSZip that sometimes will fail to open archives created using macOS or windows built-in compression utility. The library works fine and I'm able to create and read zip files content. I've done a test with an archive created using macOS and I've noticed that inside the achive the compression utility will create some __MACOSX/_.filename.ext files for each entry of the archive. I want to remove these files from the file list when the user open the zip using my app but I don't know how to proceed. Is there any solution I can use with vue and javascript to achive this?
Here is the code I'm using in my vue methods to read zip files
async handleFiles(e) {
const file = await this.readFile(e.dataTransfer.files[0]);
const unzip = unzipSync(file);
Object.keys(unzip).forEach( (key) => {
this.files.push({name: key, data: unzip[key]});
});
},
readFile(file) {
return new Promise( (resolve,reject) => {
const reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onerror = reject;
reader.onloadend = () => {
resolve(new Uint8Array(reader.result));
}
});
}

Have you considered using Array.filter?
Object.keys(unzip)
.filter(key => {
return !key.match(/^__MACOSX\//);
})
.forEach( (key) => {
this.files.push({name: key, data: unzip[key]});
});

Related

how to copy an image and save it in a new folder in electron

I am trying to make an image organizer app , which searches images using tag's ,
So I want the user to select the image they want, so far I have done this by the following code
// renderer process
$("#uploadImage).on("click", (e) => {
ipcRenderer.send('dialoguploadImage')
});
this is the main process
ipcMain.on('dialoguploadImage', (e) => {
dialog.showOpenDialog({
properties: ['openFile']
}).then(result => {
sendBackimagePathFromMain(result.filePaths[0])
}).
catch(err => {
console.log(err)
})
});
function sendBackimagePathFromMain(result) {
mainWindow.webContents.send('imagePathFromMain',result)
}
so I have the image path, and the only thing I want to know is
how can I duplicate this image, rename it, cerate a new folder and save the image in that folder
like for example to this folder
('./currentDirectory/imageBackup/dognothapppy.jpg')
You can use fs.mkdirSync() to make the folder and fs.copyFileSync() to 'duplicate and rename' the file (in a file system, you don't need to duplicate and rename a file in two different steps, you do both at once, which is copying a file), or their async functions.
const { mkdirSync, copyFileSync } = require('fs')
const { join } = require('path')
const folderToCreate = 'folder'
const fileToCopy = 'selectedFile.txt'
const newFileName = 'newFile.txt'
const dest = join(folderToCreate, newFileName)
mkdirSync(folderToCreate)
copyFileSync(fileToCopy, dest)

convert file to base64 in angular

i am new to angular and i want to convert files into base64 string. i try this and it gives base64 string but when i try to decode it using online tool it did not give correct result can anyone help me thank you.
can we convert pdf and word file into base64 or just images. with my code i successfully converted images but not any other file
my html code:
<input type="file" (change)="base($event)">
and my ts code:
base(event: { target: { files: any[]; }; }) {
const file = event.target.files[0];
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => {
console.log(reader.result);
};
}
Your code works fine. Are you sure you are correctly getting the file object?
Below is the stackblitz demo of the same.
Also, it better to use promise while reading files. You can use below function to read a file. It takes file as input and resolves to base64 string.
private readBase64(file): Promise<any> {
const reader = new FileReader();
const future = new Promise((resolve, reject) => {
reader.addEventListener('load', function () {
resolve(reader.result);
}, false);
reader.addEventListener('error', function (event) {
reject(event);
}, false);
reader.readAsDataURL(file);
});
return future;
}
Call this function as below -
this.readBase64(file)
.then((data) => {
// your code
});

How to upload a file into Firebase Storage from a callable https cloud function

I have been trying to upload a file to Firebase storage using a callable firebase cloud function.
All i am doing is fetching an image from an URL using axios and trying to upload to storage.
The problem i am facing is, I don't know how to save the response from axios and upload it to storage.
First , how to save the received file in the temp directory that os.tmpdir() creates.
Then how to upload it into storage.
Here i am receiving the data as arraybuffer and then converting it to Blob and trying to upload it.
Here is my code. I have been missing a major part i think.
If there is a better way, please recommend me. Ive been looking through a lot of documentation, and landed up with no clear solution. Please guide. Thanks in advance.
const bucket = admin.storage().bucket();
const path = require('path');
const os = require('os');
const fs = require('fs');
module.exports = functions.https.onCall((data, context) => {
try {
return new Promise((resolve, reject) => {
const {
imageFiles,
companyPIN,
projectId
} = data;
const filename = imageFiles[0].replace(/^.*[\\\/]/, '');
const filePath = `ProjectPlans/${companyPIN}/${projectId}/images/${filename}`; // Path i am trying to upload in FIrebase storage
const tempFilePath = path.join(os.tmpdir(), filename);
const metadata = {
contentType: 'application/image'
};
axios
.get(imageFiles[0], { // URL for the image
responseType: 'arraybuffer',
headers: {
accept: 'application/image'
}
})
.then(response => {
console.log(response);
const blobObj = new Blob([response.data], {
type: 'application/image'
});
return blobObj;
})
.then(async blobObj => {
return bucket.upload(blobObj, {
destination: tempFilePath // Here i am wrong.. How to set the path of downloaded blob file
});
}).then(buffer => {
resolve({ result: 'success' });
})
.catch(ex => {
console.error(ex);
});
});
} catch (error) {
// unknown: 500 Internal Server Error
throw new functions.https.HttpsError('unknown', 'Unknown error occurred. Contact the administrator.');
}
});
I'd take a slightly different approach and avoid using the local filesystem at all, since its just tmpfs and will cost you memory that your function is using anyway to hold the buffer/blob, so its simpler to just avoid it and write directly from that buffer to GCS using the save method on the GCS file object.
Here's an example. I've simplified out a lot of your setup, and I am using an http function instead of a callable. Likewise, I'm using a public stackoverflow image and not your original urls. In any case, you should be able to use this template to modify back to what you need (e.g. change the prototype and remove the http response and replace it with the return value you need):
const functions = require('firebase-functions');
const axios = require('axios');
const admin = require('firebase-admin');
admin.initializeApp();
exports.doIt = functions.https.onRequest((request, response) => {
const bucket = admin.storage().bucket();
const IMAGE_URL = 'https://cdn.sstatic.net/Sites/stackoverflow/company/img/logos/so/so-logo.svg';
const MIME_TYPE = 'image/svg+xml';
return axios.get(IMAGE_URL, { // URL for the image
responseType: 'arraybuffer',
headers: {
accept: MIME_TYPE
}
}).then(response => {
console.log(response); // only to show we got the data for debugging
const destinationFile = bucket.file('my-stackoverflow-logo.svg');
return destinationFile.save(response.data).then(() => { // note: defaults to resumable upload
return destinationFile.setMetadata({ contentType: MIME_TYPE });
});
}).then(() => { response.send('ok'); })
.catch((err) => { console.log(err); })
});
As a commenter noted, in the above example the axios request itself makes an external network access, and you will need to be on the Blaze or Flame plan for that. However, that alone doesn't appear to be your current problem.
Likewise, this also defaults to using a resumable upload, which the documentation does not recommend when you are doing large numbers of small (<10MB files) as there is some overhead.
You asked how this might be used to download multiple files. Here is one approach. First, lets assume you have a function that returns a promise that downloads a single file given its filename (I've abridged this from the above but its basically identical except for the change of INPUT_URL to filename -- note that it does not return a final result such as response.send(), and there's sort of an implicit assumption all the files are the same MIME_TYPE):
function downloadOneFile(filename) {
const bucket = admin.storage().bucket();
const MIME_TYPE = 'image/svg+xml';
return axios.get(filename, ...)
.then(response => {
const destinationFile = ...
});
}
Then, you just need to iteratively build a promise chain from the list of files. Lets say they are in imageUrls. Once built, return the entire chain:
let finalPromise = Promise.resolve();
imageUrls.forEach((item) => { finalPromise = finalPromise.then(() => downloadOneFile(item)); });
// if needed, add a final .then() section for the actual function result
return finalPromise.catch((err) => { console.log(err) });
Note that you could also build an array of the promises and pass them to Promise.all() -- that would likely be faster as you would get some parallelism, but I wouldn't recommend that unless you are very sure all of the data will fit inside the memory of your function at once. Even with this approach, you need to make sure the downloads can all complete within your function's timeout.

Do I need the IPFS daemon to upload files from a browser?

I'm working on this project using IPFS and I'm trying to create a website that allows users to upload files directly from their browser to IPFS. My goal was that the website would be a front-end website but whenever I add a file to IPFS and I check it's hash on https://gateway.ipfs.io/ipfs/hash-here nothing happens, which made me think that the files are probably not getting uploaded to IPFS because I'm not running it on my local machine. Is this correct?
const Buffer = require('safe-buffer').Buffer;
export default function uploadFiles(node, files) {
let reader = new FileReader();
reader.onloadend = () => {
let byteData = reader.result.split('base64,')[1];
let fileData = Buffer.from(byteData);
node.files.add(fileData, (err, res) => {
if (err) {
throw err
}
let hash = res[0].hash
console.log(hash); ///////prints a hash that isnt visible on
//gateway
node.files.cat(hash, (err, res) => {
if (err) {
throw err
}
let data = ''
res.on('data', (d) => {
data = data + d
})
res.on('end', () => {
// console.log(data);
// console.log(atob(data));
})
})
});
}
reader.readAsDataURL(files['0']);
};
Are you running a js-ipfs node in your browser? Did you get the chance to look at the examples in the examples folder in js-ipfs repo? Url here: https://github.com/ipfs/js-ipfs/tree/master/examples
If you add a file to your node and the node is on, the IPFS gateway node will be able to find the content from your browser node.

How to zip a directory with node.js [duplicate]

I need to zip an entire directory using Node.js. I'm currently using node-zip and each time the process runs it generates an invalid ZIP file (as you can see from this Github issue).
Is there another, better, Node.js option that will allow me to ZIP up a directory?
EDIT: I ended up using archiver
writeZip = function(dir,name) {
var zip = new JSZip(),
code = zip.folder(dir),
output = zip.generate(),
filename = ['jsd-',name,'.zip'].join('');
fs.writeFileSync(baseDir + filename, output);
console.log('creating ' + filename);
};
sample value for parameters:
dir = /tmp/jsd-<randomstring>/
name = <randomstring>
UPDATE: For those asking about the implementation I used, here's a link to my downloader:
I ended up using archiver lib. Works great.
Example
var file_system = require('fs');
var archiver = require('archiver');
var output = file_system.createWriteStream('target.zip');
var archive = archiver('zip');
output.on('close', function () {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
archive.on('error', function(err){
throw err;
});
archive.pipe(output);
// append files from a sub-directory, putting its contents at the root of archive
archive.directory(source_dir, false);
// append files from a sub-directory and naming it `new-subdir` within the archive
archive.directory('subdir/', 'new-subdir');
archive.finalize();
I'm not going to show something new, just wanted to summarise the solutions above for those who like Promises as much as I do 😉.
const archiver = require('archiver');
/**
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectory(sourceDir, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
archive
.directory(sourceDir, false)
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
Hope it will help someone 🤞
Use Node's native child_process api to accomplish this.
No need for third party libs. Two lines of code.
const child_process = require("child_process");
child_process.execSync(`zip -r <DESIRED_NAME_OF_ZIP_FILE_HERE> *`, {
cwd: <PATH_TO_FOLDER_YOU_WANT_ZIPPED_HERE>
});
The example above showcases the synchronous API. You can also use child_process.exec(path, options, callback) if you want async behavior. There are a lot more options you can specify other than cwd to further fine-tune your request.
If you don't have the ZIP utility:
This question is specifically asks about the zip utility for archiving/compression purposes. Therefore, this example assumes you have the zip utility installed on your system. For completeness sakes, some operating systems may not have utility installed by default. In that case you have at least three options:
Work with the archiving/compression utility that is native to your platform
Replace the shell command in the above Node.js code with code from your system. For example, linux distros usually come with tar/gzip utilities:
tar -cfz <DESIRED_NAME_OF_ZIP_FILE_HERE> <PATH_TO_FOLDER_YOU_WANT_ZIPPED_HERE>.
This is a nice option as you don't need to install anything new onto your operating system or manage another dependency (kind of the whole point for this answer).
Obtain the zip binary for your OS/distribution.
For example on Ubuntu: apt install zip.
The ZIP utility is tried and tested for decades, it's fairly ubiquitous and it's a safe choice. Do a quick google search or go to the creator, Info-ZIP's, website for downloadable binaries.
Use a third party library/module (of which there are plenty on NPM).
I don't prefer this option. However, if you don't really care to understand the native methods and introducing a new dependency is a non-issue, this is also a valid option.
This is another library which zips the folder in one line :
zip-local
var zipper = require('zip-local');
zipper.sync.zip("./hello/world/").compress().save("pack.zip");
Archive.bulk is now deprecated, the new method to be used for this is glob:
var fileName = 'zipOutput.zip'
var fileOutput = fs.createWriteStream(fileName);
fileOutput.on('close', function () {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
archive.pipe(fileOutput);
archive.glob("../dist/**/*"); //some glob pattern here
archive.glob("../dist/.htaccess"); //another glob pattern
// add as many as you like
archive.on('error', function(err){
throw err;
});
archive.finalize();
To include all files and directories:
archive.bulk([
{
expand: true,
cwd: "temp/freewheel-bvi-120",
src: ["**/*"],
dot: true
}
]);
It uses node-glob(https://github.com/isaacs/node-glob) underneath, so any matching expression compatible with that will work.
To pipe the result to the response object (scenarios where there is a need to download the zip rather than store locally)
archive.pipe(res);
Sam's hints for accessing the content of the directory worked for me.
src: ["**/*"]
I have found this small library that encapsulates what you need.
npm install zip-a-folder
const zip-a-folder = require('zip-a-folder');
await zip-a-folder.zip('/path/to/the/folder', '/path/to/archive.zip');
https://www.npmjs.com/package/zip-a-folder
Adm-zip has problems just compressing an existing archive https://github.com/cthackers/adm-zip/issues/64 as well as corruption with compressing binary files.
I've also ran into compression corruption issues with node-zip https://github.com/daraosn/node-zip/issues/4
node-archiver is the only one that seems to work well to compress but it doesn't have any uncompress functionality.
Since archiver is not compatible with the new version of webpack for a long time, I recommend using zip-lib.
var zl = require("zip-lib");
zl.archiveFolder("path/to/folder", "path/to/target.zip").then(function () {
console.log("done");
}, function (err) {
console.log(err);
});
As today, I'm using AdmZip and works great:
import AdmZip = require('adm-zip');
export async function archiveFile() {
try {
const zip = new AdmZip();
const outputDir = "/output_file_dir.zip";
zip.addLocalFolder("./yourFolder")
zip.writeZip(outputDir);
} catch (e) {
console.log(`Something went wrong ${e}`);
}
}
import ... from answer based on https://stackoverflow.com/a/51518100
To zip single directory
import archiver from 'archiver';
import fs from 'fs';
export default zipDirectory;
/**
* From: https://stackoverflow.com/a/51518100
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectory(sourceDir, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
archive
.directory(sourceDir, false)
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
To zip multiple directories:
import archiver from 'archiver';
import fs from 'fs';
export default zipDirectories;
/**
* Adapted from: https://stackoverflow.com/a/51518100
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectories(sourceDirs, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
var result = archive;
sourceDirs.forEach(sourceDir => {
result = result.directory(sourceDir, false);
});
result
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
You can try in a simple way:
Install zip-dir :
npm install zip-dir
and use it
var zipdir = require('zip-dir');
let foldername = src_path.split('/').pop()
zipdir(<<src_path>>, { saveTo: 'demo.zip' }, function (err, buffer) {
});
I ended up wrapping archiver to emulate JSZip, as refactoring through my project woult take too much effort. I understand Archiver might not be the best choice, but here you go.
// USAGE:
const zip=JSZipStream.to(myFileLocation)
.onDone(()=>{})
.onError(()=>{});
zip.file('something.txt','My content');
zip.folder('myfolder').file('something-inFolder.txt','My content');
zip.finalize();
// NodeJS file content:
var fs = require('fs');
var path = require('path');
var archiver = require('archiver');
function zipper(archive, settings) {
return {
output: null,
streamToFile(dir) {
const output = fs.createWriteStream(dir);
this.output = output;
archive.pipe(output);
return this;
},
file(location, content) {
if (settings.location) {
location = path.join(settings.location, location);
}
archive.append(content, { name: location });
return this;
},
folder(location) {
if (settings.location) {
location = path.join(settings.location, location);
}
return zipper(archive, { location: location });
},
finalize() {
archive.finalize();
return this;
},
onDone(method) {
this.output.on('close', method);
return this;
},
onError(method) {
this.output.on('error', method);
return this;
}
};
}
exports.JSzipStream = {
to(destination) {
console.log('stream to',destination)
const archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
});
return zipper(archive, {}).streamToFile(destination);
}
};

Categories