I am working in windows 8 using HTML/JS. I am trying to Extract a Zip file which is picked file using FilePicker.
To extract Zip file I am using this page.
In this Link there is a function to Extract Zip file unzipAsync
function unzipAsync(filePath, replaceIfExists) {
var fileCollisionOption = replaceIfExists ?
storage.CreationCollisionOption.replaceExisting :
storage.CreationCollisionOption.failIfExists;
return storage.StorageFile
.getFileFromPathAsync(filePath)
.then(getFileAsUint8Array)
.then(function (zipFileContents) {
//Create the zip data in memory
var zip = new JSZip(zipFileContents);
//Extract files
var promises = [];
var lf = storage.ApplicationData.current.localFolder;
_.each(zip.files, function (zippedFile) {
//Create new file
promises.push(lf
.createFileAsync(zippedFile.name, fileCollisionOption)
.then(function (localStorageFile) {
//Copy the zipped file's contents into the local storage file
var fileContents = zip.file(zippedFile.name).asUint8Array();
return storage.FileIO
.writeBytesAsync(localStorageFile, fileContents);
})
);
});
return WinJS.Promise.join(promises);
});
}
Before this I added JSZIP Library to Project folder.
Help me, How Can I integrate the Library to my project. Here is my project Link
Edit:
function getFileAsUint8Array(file) {
return storage.FileIO.readBufferAsync(file)
.then(function (buffer) {
//Read the file into a byte array
var fileContents = new Uint8Array(buffer.length);
var dataReader = storage.Streams.DataReader.fromBuffer(buffer);
dataReader.readBytes(fileContents);
dataReader.close();
return fileContents;
});
}
Now it is working with out error. But it is not doing any thing like extracting my file.
NOTE:
- If anyone knows any another way which better than this or other Library which I can use to extract file for WinJS; please suggest me.
Well, I'm guessing you haven't created a getFileAsUint8Array function (or at least, you aren't showing it above). I am doing something similar (although getting the zip file from an XHR call instead). Once I have the zip file and the folder I want to put the zip files in I do something like the code below.
Note, however, that I had to modify this code as I do a few other things, so I haven't tested it exactly as is (and obviously it wouldn't work within your code above).
Here's the (mostly) full code:
WinJS.xhr({ "url": zipUrl, "responseType": "arraybuffer" })
.done(
function (e) {
if (!e.getResponseHeader("content-type") === "application/zip") {
console.error("Remote file was not sent with correct Content-Type: expected 'application/zip', but received '" + e.getResponseHeader("content-type") + "'");
}
unzipAndStore(new JSZip(e.response), someLocalFolder);
},
function() { /* handle ajax errors */ }
);
/**
* #param {JSZip} jszipobj The JSZip object
* #param {StorageFolder} localFolder The folder to unzip into
* #return {Promise}
*/
var unzipAndStore = function (jszipobj, localFolder) {
var promises = [];
Object.keys(jszipobj.files).forEach(function (key) {
var fileName;
// ignore folder entries, they're handled as needed below
if (/\/$/.test(key)) { return; }
fileName = jszipobj.files[key].name.match(/[^\/]+\.[^\.\/]+$/);
if (!fileName) {
console.error("Unable to process zip entry without proper filename: ", jszipobj.files[key].name);
return;
}
fileName = fileName[0];
promises.push(
getFolderFromPathRecursive(jszipobj.files[key].name, localFolder)
.then(
function (subFolder) {
console.log("creating file in folder: ", fileName, subFolder.name);
return subFolder.createFileAsync(fileName, Windows.Storage.CreationCollisionOption.replaceExisting)
}
)
.then(
function (localStorageFile) {
return Windows.Storage.FileIO
.writeBytesAsync(localStorageFile, jszipobj.file(jszipobj.files[key].name).asUint8Array());
}
)
);
});
return WinJS.Promise.join(promises);
};
/**
* Promise completes with the lowest level folder in the given path,
* creating subfolders along the way
* #param {String} path The path to the lowest subfolder you want a reference to
* #param {StorageFolder} rootFolder The folder to begin at for this iteration
* #return {Promise}
*/
var getFolderFromPathRecursive = function (path, rootFolder) {
var normalizedPath = path.replace(/\/?[^\/]+\.[^\.\/]+$/, ""), // remove a possible filename from the end of the path
folders = normalizedPath.split(/\//), // get an array of the folders in the path
subFolderName = folders.shift(); // remove the first folder in the path as the new one to create
return new WinJS.Promise(function (complete, error) {
if (!subFolderName || !subFolderName.length) {
complete(rootFolder);
return;
}
rootFolder
.createFolderAsync(subFolderName, Windows.Storage.CreationCollisionOption.openIfExists)
.then(
function (folder) {
return getFolderFromPathRecursive(folders.join("/"), folder);
},
error
)
.then(
function(folder) {
complete(folder);
return;
},
error
)
});
};
I did using Decompressing Sample by adding C# windows runtime to my JavaScript project
Here is my post
Related
I'm writing a WebExtension that uses C++ code compiled with emscripten. The WebExtension downloads files which I want to process inside the C++ code. I'm aware of the File System API and I think I read most of it, but I don't get it to work - making a downloaded file accessible in emscripten.
This is the relevant JavaScript part of my WebExtension:
// Download a file
let blob = await fetch('https://stackoverflow.com/favicon.ico').then(response => {
if (!response.ok) {
return null;
}
return response.blob();
});
// Setup FS API
FS.mkdir('/working');
FS.mount(IDBFS, {}, '/working');
FS.syncfs(true, function(err) {
if (err) {
console.error('Error: ' + err);
}
});
// Store the file "somehow"
let filename = 'favicon.ico';
// ???
// Call WebAssembly/C++ to process the file
Module.processFile(filename);
The directory is created, what can be seen, when inspecting the Web Storage of the browser. If I understand the File System API correctly, I have to "somehow" write my data to a file inside /working. Then, I should be able to call a function of my C++ code (from JavaScript) and open that file as if there was a directory called 'working' at the root, containing the file. The call of the C++ function works (I can print the provided filename).
But how do I add the file (currently a blob) to that directory?
C++ code:
#include "emscripten/bind.h"
using namespace emscripten;
std::string processFile(std::string filename)
{
// open and process the file
}
EMSCRIPTEN_BINDINGS(my_module)
{
function("processFile", &processFile);
}
It turned out, that I was mixing some things up while trying different methods, and I was also misinterpreting my debugging tools. So the easiest way to accomplish this task (without using IDBFS) is:
JS:
// Download a file
let blob = await fetch('https://stackoverflow.com/favicon.ico').then(response => {
if (!response.ok) {
return null;
}
return response.blob();
});
// Convert blob to Uint8Array (more abstract: ArrayBufferView)
let data = new Uint8Array(await blob.arrayBuffer());
// Store the file
let filename = 'favicon.ico';
let stream = FS.open(filename, 'w+');
FS.write(stream, data, 0, data.length, 0);
FS.close(stream);
// Call WebAssembly/C++ to process the file
console.log(Module.processFile(filename));
C++:
#include "emscripten/bind.h"
#include <fstream>
using namespace emscripten;
std::string processFile(std::string filename)
{
std::fstream fs;
fs.open (filename, std::fstream::in | std::fstream::binary);
if (fs) {
fs.close();
return "File '" + filename + "' exists!";
} else {
return "File '" + filename + "' does NOT exist!";
}
}
EMSCRIPTEN_BINDINGS(my_module)
{
function("processFile", &processFile);
}
If you want to do it with IDBFS, you can do it like this:
// Download a file
let blob = await fetch('https://stackoverflow.com/favicon.ico').then(response => {
if (!response.ok) {
return null;
}
return response.blob();
});
// Convert blob to Uint8Array (more abstract: ArrayBufferView)
let data = new Uint8Array(await blob.arrayBuffer());
// Setup FS API
FS.mkdir('/persist');
FS.mount(IDBFS, {}, '/persist');
// Load persistant files (sync from IDBFS to MEMFS), will do nothing on first run
FS.syncfs(true, function(err) {
if (err) {
console.error('Error: ' + err);
}
});
FS.chdir('/persist');
// Store the file
let filename = 'favicon.ico';
let stream = FS.open(filename, 'w+');
FS.write(stream, data, 0, data.length, 0);
FS.close(stream);
// Persist the changes (sync from MEMFS to IDBFS)
FS.syncfs(false, function(err) {
if (err) {
console.error('Error: ' + err);
}
});
// NOW you will be able to see the file in your browser's IndexedDB section of the web storage inspector!
// Call WebAssembly/C++ to process the file
console.log(Module.processFile(filename));
Notes:
When using FS.chdir() in the JS world to change the directory, this also changes the working directory in the C++ world. So respect that, when working with relative paths.
When working with IDBFS instead of MEMFS, you are actually still working with MEMFS and just have the opportunity to sync data from or to IDBFS on demand. But all your work is still done with MEMFS. I would consider IDBFS as an add-on to MEMFS. Didn't read that directly from the docs.
Is there a way to move files with firebase.storage()?
Example:
user1/public/image.jpg to user1/private/image.jpg
Since Firebase Storage is backed by Google Cloud Storage, you can use GCS's rewrite API (docs) or gsutil mv (docs).
Also, an example of move (docs) in GCloud Node follows:
var bucket = gcs.bucket('my-bucket');
var file = bucket.file('my-image.png');
var newLocation = 'gs://another-bucket/my-image-new.png';
file.move(newLocation, function(err, destinationFile, apiResponse) {
// `my-bucket` no longer contains:
// - "my-image.png"
//
// `another-bucket` now contains:
// - "my-image-new.png"
// `destinationFile` is an instance of a File object that refers to your
// new file.
});
There is no such way to move to other location, rather you can download and then put it to other reference and deleting the previous location.
As of March 29, 2022 it is possible to call the "move" function directly in this way:
import * as functions from "firebase-functions";
import {getStorage} from "firebase-admin/storage";
export const triggerStorage = functions
.storage
.object()
.onFinalize(async (object) => {
const bucket = getStorage().bucket(object.bucket);
const destPath = "user1/private/image.jpg";
await bucket.file(object.name).move(destPath);
});
I wrote a JavaScript function that accomplishes this using only the firebase storage API.
Happy Coding!
/**
* Moves a file in firebase storage from its current location to the destination
* returns the status object for the moved file.
* #param {String} currentPath The path to the existing file from storage root
* #param {String} destinationPath The desired pathe for the existing file after storage
*/
function moveFirebaseFile(currentPath, destinationPath) {
let oldRef = storage.ref().child(currentPath)
oldRef.getDownloadURL().then(url => {
fetch(url).then(htmlReturn => {
let fileArray = new Uint8Array()
const reader = htmlReturn.body.getReader()
//get the reader that reads the readable stream of data
reader
.read()
.then(function appendStreamChunk({ done, value }) {
//If the reader doesn't return "done = true" append the chunk that was returned to us
// rinse and repeat until it is done.
if (value) {
fileArray = mergeTypedArrays(fileArray, value)
}
if (done) {
console.log(fileArray)
return fileArray
} else {
// "Readout not complete, reading next chunk"
return reader.read().then(appendStreamChunk)
}
})
.then(file => {
//Write the file to the new storage place
let status = storage
.ref()
.child(destinationPath)
.put(file)
//Remove the old reference
oldRef.delete()
return status
})
})
})
}
I have a set of images I am storing in my /private sub-directory, I am trying to retrieve the data inside a server method and sending the data back to the client to be displayed.
How can I do that?
I have an image named test.png inside /private/photos. Here's what I've tried.
/client/test.js
Template.test.onRendered(function () {
Meteor.call('returnPhoto', 'photos/test.png', function (e, data) {
console.log(data);
console.log(window.btoa(data));
$('#imgContainerImg').attr('src', 'data:image/png;base64,' + window.btoa(data));
});
})
/server/methods.js
returnPhoto: function (assetPath) {
return Assets.getText(assetPath);
return Assets.getBinary(assetPath);
}
I tried both Assets.getText and Assets.getBinary, the first gives me some binary gibberish, and the second gives me an array of numbers. Using the btoa function doesn't work regardless.
I have looked at the CollectionFS package, but I do not need to upload the pictures and store them all in a collection. I'd like the images to be available as soon as I put them in that directory, without having to call myFSCollection.insert.
Using the following, I was able to get images from the private directory, send it over to the client as a byte array, which then gets converted into a base64 string and displayed as data URL.
client/test.js
Template.test.onRendered(function () {
Meteor.call('returnPhoto', 'photos/test.png', function (e, data) {
var base64String = btoa(String.fromCharCode.apply(null, new Uint8Array(data)));
$('#imgContainerImg').attr('src', 'data:image/png;base64,' + base64String);
});
})
server/methods.js
returnPhoto: function (assetPath) {
return Assets.getBinary(assetPath);
}
This is the solution I work with:
client/main.js
const imagesLookup = new ReactiveDict();
Template.registerHelper('img', function(src) {
Meteor.call('image', src, (err, img)=> {
imagesLookup.set(src, img);
});
return imagesLookup.get(src);
});
client/main.html
<template="stuffWithImage">
<!-- src is the path of the image in private -->
<img src="{{img src}}"/>
</template>
imports/methods.js
Meteor.methods({
image(src){
//validate input and check if use is allowed to see this asset
if(Meteor.isClient){
//return some loading animation
}
const buffer = new Buffer(Assets.getBinary(src), 'binary');
const magicNumber = buffer.toString('hex',0,4)
const base64String = buffer.toString('base64');
return `data:image/${getImageType(magicNumber)};base64,${base64String}`;
}
});
function getImageType(magicNumber){
if (magicNumber === 'ffd8ffe0') {
return 'jpeg';
}
//check for other formats... here is a table: https://asecuritysite.com/forensics/magic
}
I created plugin for send json data in json file.
But I don't understand why send my object json in pipe, and not write file directly in my plugin.
I want use my plugin whit this syntax:
gulp.task('js-hash', function()
{
// Get all js in redis
gulp.src('./build/js/**/*.js')
.pipe(getHashFile('/build/js/'))
.pipe(gulp.dest('./build/js/hash.json'));
});
And not that:
gulp.task('js-hash', function()
{
// Get all js in redis
gulp.src('./build/js/**/*.js')
.pipe(getHashFile('./build/js/hash.json', '/build/js/'));
});
This is my plugin:
var through = require('through2');
var gutil = require('gulp-util');
var crypto = require('crypto');
var fs = require('fs');
var PluginError = gutil.PluginError;
// Consts
const PLUGIN_NAME = 'get-hash-file';
var json = {};
function getHashFile(filename, basename)
{
if (!filename) {
throw PluginError(PLUGIN_NAME, "Missing filename !");
}
// Creating a stream through which each file will pass
var stream = through.obj(function (file, enc, callback) {
if (file.isNull()) {
this.push(file); // Do nothing if no contents
return callback();
}
if (file.isBuffer()) {
var hash = crypto.createHash('sha256').update(String(file.contents)).digest('hex');
json[file.path.replace(file.cwd+basename, '')] = hash;
return callback();
}
if (file.isStream()) {
this.emit('error', new PluginError(PLUGIN_NAME, 'Stream not supported!'));
return callback();
}
}).on('finish', function () {
fs.writeFile(filename, JSON.stringify(json), function(err) {
if (err) {
throw err;
}
});
});
// returning the file stream
return stream;
}
// Exporting the plugin main function
module.exports = getHashFile;
Your are idea
Nothing prevents you from doing this... besides not respecting plugins guidelines!
Users actually assume a plugin will stream files and that they can pipe them to other plugins.
If I get your code right, you're trying to generate a file that contains all sha hashes of inbound files. Why not let users take this file and pipe it to other plugins? You'd be surprised what people could do.
While this question looks a bit opinion-based, you could definitely put the focus on how to deal with files that may not belong to the main stream of files. Issues like this can be found in many plugins; for example, gulp-uglify authors are wondering how they can add source-maps without mixing js and source map downstream.
I have the following JS function which serves as a first prototype for a mozilla thunderbird extension.
The goal is to connect to a server and download a sample file, then unzipping it and storing the contents in the thunderbird profile folder.
Now this all works fine, except that the execution of the function stops after creating the zip file on the file system. So i have to restart the function again, in order to get the second part of the function executed which extracts the user.js file from the zip file.
Any ideas what the problem could be?
function downloadFile(httpLoc) {
// get profile directory
var file = Components.classes["#mozilla.org/file/directory_service;1"].
getService(Components.interfaces.nsIProperties).
get("ProfD", Components.interfaces.nsIFile);
var profilePath = file.path;
// change profile directory to native style
profilePath = profilePath.replace(/\\/gi , "\\\\");
profilePath = profilePath.toLowerCase();
// download the zip file
try {
//new obj_URI object
var obj_URI = Components.classes["#mozilla.org/network/io-service;1"].getService(Components.interfaces.nsIIOService).newURI(httpLoc, null, null);
//new file object
var obj_TargetFile = Components.classes["#mozilla.org/file/local;1"].createInstance(Components.interfaces.nsILocalFile);
//set to download the zip file into the profil direct
obj_TargetFile.initWithPath(profilePath + "\/" + "test.zip");
//if file the zip file doesn't exist, create it
if(!obj_TargetFile.exists()) {
alert("zip file wird erstellt");
obj_TargetFile.create(0x00,0644);
}
//new persitence object
var obj_Persist = Components.classes["#mozilla.org/embedding/browser/nsWebBrowserPersist;1"].createInstance(Components.interfaces.nsIWebBrowserPersist);
// with persist flags if desired ??
const nsIWBP = Components.interfaces.nsIWebBrowserPersist;
const flags = nsIWBP.PERSIST_FLAGS_REPLACE_EXISTING_FILES;
obj_Persist.persistFlags = flags | nsIWBP.PERSIST_FLAGS_FROM_CACHE;
//save file to target
obj_Persist.saveURI(obj_URI,null,null,null,null,obj_TargetFile);
} catch (e) {
alert(e);
} finally {
// unzip the user.js file to the profile direc
// creat a zipReader, open the zip file
var zipReader = Components.classes["#mozilla.org/libjar/zip-reader;1"]
.createInstance(Components.interfaces.nsIZipReader);
zipReader.open(obj_TargetFile);
//new file object, thats where the user.js will be extracted
var obj_UnzipTarget = Components.classes["#mozilla.org/file/local;1"].createInstance(Components.interfaces.nsILocalFile);
//set path for the user.js
obj_UnzipTarget.initWithPath(profilePath + "\/" + "user.js");
// if user.js doesn't exist, create it
if(!obj_UnzipTarget.exists()) {
alert("user.js wird erstellt");
obj_UnzipTarget.create(0x00,0644);
}
// extract the user.js out of the zip file, to the specified path
zipReader.extract("user.js", obj_UnzipTarget);
zipReader.close();
}
}
var hello = {
click: function() {
downloadFile("http://pse2.iam.unibe.ch/profiles/profile.zip");
},
};
saveURI is asynchronous, so you need to set the progress listener on the persist object to know when it has finished. Here's an example of setting a progress listener and later on there's a check to see whether the transfer has finished.