Is there a way to move files with firebase.storage()?
Example:
user1/public/image.jpg to user1/private/image.jpg
Since Firebase Storage is backed by Google Cloud Storage, you can use GCS's rewrite API (docs) or gsutil mv (docs).
Also, an example of move (docs) in GCloud Node follows:
var bucket = gcs.bucket('my-bucket');
var file = bucket.file('my-image.png');
var newLocation = 'gs://another-bucket/my-image-new.png';
file.move(newLocation, function(err, destinationFile, apiResponse) {
// `my-bucket` no longer contains:
// - "my-image.png"
//
// `another-bucket` now contains:
// - "my-image-new.png"
// `destinationFile` is an instance of a File object that refers to your
// new file.
});
There is no such way to move to other location, rather you can download and then put it to other reference and deleting the previous location.
As of March 29, 2022 it is possible to call the "move" function directly in this way:
import * as functions from "firebase-functions";
import {getStorage} from "firebase-admin/storage";
export const triggerStorage = functions
.storage
.object()
.onFinalize(async (object) => {
const bucket = getStorage().bucket(object.bucket);
const destPath = "user1/private/image.jpg";
await bucket.file(object.name).move(destPath);
});
I wrote a JavaScript function that accomplishes this using only the firebase storage API.
Happy Coding!
/**
* Moves a file in firebase storage from its current location to the destination
* returns the status object for the moved file.
* #param {String} currentPath The path to the existing file from storage root
* #param {String} destinationPath The desired pathe for the existing file after storage
*/
function moveFirebaseFile(currentPath, destinationPath) {
let oldRef = storage.ref().child(currentPath)
oldRef.getDownloadURL().then(url => {
fetch(url).then(htmlReturn => {
let fileArray = new Uint8Array()
const reader = htmlReturn.body.getReader()
//get the reader that reads the readable stream of data
reader
.read()
.then(function appendStreamChunk({ done, value }) {
//If the reader doesn't return "done = true" append the chunk that was returned to us
// rinse and repeat until it is done.
if (value) {
fileArray = mergeTypedArrays(fileArray, value)
}
if (done) {
console.log(fileArray)
return fileArray
} else {
// "Readout not complete, reading next chunk"
return reader.read().then(appendStreamChunk)
}
})
.then(file => {
//Write the file to the new storage place
let status = storage
.ref()
.child(destinationPath)
.put(file)
//Remove the old reference
oldRef.delete()
return status
})
})
})
}
Related
I'm using the azure file storage, and using express JS to write a backend to render the contents stored in the azure file storage.
I am writing the code based on https://learn.microsoft.com/en-us/javascript/api/#azure/storage-file-share/shareserviceclient?view=azure-node-latest
const { ShareServiceClient, StorageSharedKeyCredential } = require("#azure/storage-file-share");
const account = "<account>";
const accountKey = "<accountkey>";
const credential = new StorageSharedKeyCredential(account, accountKey);
const serviceClient = new ShareServiceClient(
`https://${account}.file.core.windows.net`,
credential
);
const shareName = "<share name>";
const fileName = "<file name>";
// [Node.js only] A helper method used to read a Node.js readable stream into a Buffer
async function streamToBuffer(readableStream) {
return new Promise((resolve, reject) => {
const chunks = [];
readableStream.on("data", (data) => {
chunks.push(data instanceof Buffer ? data : Buffer.from(data));
});
readableStream.on("end", () => {
resolve(Buffer.concat(chunks));
});
readableStream.on("error", reject);
});
}
And you can view the contents through
const downloadFileResponse = await fileClient.download();
const output = await streamToBuffer(downloadFileResponse.readableStreamBody)).toString()
Thing is, I only want to find if the file exists and not spend time downloading the entire file, how could I do this?
I looked at https://learn.microsoft.com/en-us/javascript/api/#azure/storage-file-share/shareserviceclient?view=azure-node-latest
to see if the file client class has what I want, but it doesn't seem to have methods useful for this.
If you are using #azure/storage-file-share (version 12.x) Node package, there's an exists method in ShareFileClient. You can use that to find if a file exists or not. Something like:
const fileExists = await fileClient.exists();//returns true or false.
I would like to ask if is it possible to add data in a nested array.The result i want is this
But i get this when i add a new rating with the code i use
async function getAll(){
const userEmail= firebase.firestore().collection('users')
const snapshot=await userEmail.where('email','==',index.email).get()
if (snapshot.empty) {
console.log('No matching documents.');
return;
}
snapshot.forEach(doc => {
userEmail.doc(doc.id).set({userRatings2:firebase.firestore.FieldValue.arrayUnion({parking:[markerName]})},{merge:true})
userEmail.doc(doc.id).set({userRatings2:firebase.firestore.FieldValue.arrayUnion({rating:[currentRating]})},{merge:true})
console.log(userEmail.doc(doc.id));
});
}
getAll()
Unfortunately in Firebase Firestore you can't even change a simple Array value at a specific index. That means also that you can't save nested array values. You can read more about it on this answer.
The only way to do it is to download the whole Array make your modifications and save the whole array again to the databse. There is not much context of your app so I can't tell if that is a good solution for you. It depends how much data you have in the array.
I've managed to do it by using forEach function to an array of image path
const [imageUri, setImageUri] = useState([]);
const [uploading, setUploading] = useState(false);
const UploadImage = () => {
setUploading(true);
imageUri.forEach(async (image, index) => {
// setTransferred(0);
const pathToFile = image;
let filename = pathToFile.substring(pathToFile.lastIndexOf('-') + 1);
const extension = filename.split('.').pop();
const name = filename.split('.').slice(0, -1).join('.');
filename = name + Date.now() + '.' + extension;
const reference = storage().ref().child(`/userprofile/${filename}`);
// path to existing file on filesystem
// uploads file
const task = reference.putFile(pathToFile);
try {
await task;
const url = await reference.getDownloadURL();
downloadableURI.push(url);
if (index == imageUri.length - 1) {
setUploading(false);
Alert.alert(
'Image uploaded!',
'Your image has been uploaded to the Firebase Cloud Storage Successfully!',
);
}
} catch (e) {
console.log(e);
}
});
};
whenever the function is called, then the array of images is uploaded to firebase storage,
The following is my cloud function code.
exports.increaseVolume = functions.storage.object().onFinalize(async (object) => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
// Exit if this is triggered on a file that is not an audio.
if (!contentType.startsWith('video/mp4')) {
console.log('This is not an audio.');
return null;
}
// Get the file name.
const fileName = path.basename(filePath);
// Exit if the audio is already converted.
if (fileName.endsWith('_output.mp4')) {
console.log('Already a converted audio.');
return null;
}
// Download file from bucket.
const bucket = gcs.bucket(fileBucket);
const tempFilePath = path.join(os.tmpdir(), fileName);
// We add a '_output.flac' suffix to target audio file name. That's where we'll upload the converted audio.
const targetTempFileName = fileName.replace(/\.[^/.]+$/, '') + '_output.mp4';
const targetTempFilePath = path.join(os.tmpdir(), targetTempFileName);
const targetStorageFilePath = path.join(path.dirname(filePath), targetTempFileName);
await bucket.file(filePath).download({destination: tempFilePath});
console.log('Audio downloaded locally to', tempFilePath);
// Convert the audio to mono channel using FFMPEG.
let command = ffmpeg(tempFilePath)
.audioFilters([
{
filter: 'volume',
options: '5dB'
},
{
filter: 'afftdn'
}
])
.format('mp4')
.output(targetTempFilePath);
await promisifyCommand(command);
console.log('Output audio created at', targetTempFilePath);
// Uploading the audio.
await bucket.upload(targetTempFilePath, {destination: targetStorageFilePath});
console.log('Output audio uploaded to', targetStorageFilePath);
// Once the audio has been uploaded delete the local file to free up disk space.
fs.unlinkSync(tempFilePath);
fs.unlinkSync(targetTempFilePath);
return console.log('Temporary files removed.', targetTempFilePath);
});
This is how the file in my storage bucket shows. Where do I get the download link or how can I access the file?. When I typed the link in the browser it returns a JSON saying 403 - unauthorized access.
You need to use the getSignedUrl() method as follows:
const uploadResp = await bucket.upload(targetTempFilePath, {destination: targetStorageFilePath});
const file = uploadResp[0];
const options = {
action: 'read',
expires: '03-17-2025'
};
const getSignedUrlResponse = await file.getSignedUrl(options);
const url = getSignedUrlResponse[0];
//Do wathever you want with this url: save it in Firestore for example
The message you are facing is about permissions. Who should have access to what?
First of all you have to think about your business logic. Who should have access to that file? Is it good if that file should be public? Is there a time-frame in which the file should be accessible? The data should remain in the Bucket or it can be deleted?
I think there are two options for you:
Create a Bucket with public data, in such case the data would accessible for everyone who has access to the specific filename in the Bucket.
If the above is not allowed, then you can create a SignedURL, as mentioned by #Renaud Tarnec link example. You should keep in mind that the SignedURL has time-limited access and everyone who has the URL of it, will be able to get the object. Once the time is expired, the object will not be longer accessible.
Once you have defined this, you can either delete the object in your Bucket programatically or you can set a Lifecycle Management. There, you can set configurations which contains a set of rules, for example, delete objects created by their age (in days).
I have written a function to upload a local file (using cordova File) to my firebase storage. It looks like there is no API to iterate all the files in storage (based on other SO threads), so I decided to write the download location to my realtime DB.
At the moment, authentication is OFF in my realtime DB and storage for testing. The file is uploaded correctly and I can see it in my storage, but I don't see any DB entry. Can someone help on what is going wrong?
// upload trip data to firebase. currently a public bucket
cloudUpload(prg) {
console.log ("cloud upload");
//this.presentLoader("loading...");
let storageRef = firebase.storage().ref();
console.log ("storage ref is "+storageRef);
this.file.readAsArrayBuffer(this.file.dataDirectory, this.logFile)
.then (succ=>{
console.log ("File read");
console.log (succ);
let blob = new Blob([succ],{type:"text/plain"});
console.log ("Blob created");
let name = "file-"+Date()+".txt";
let uploadUrl = storageRef.child(`tripdata/${name}`);
let uploadTask = uploadUrl.put(blob);
uploadTask.on(firebase.storage.TaskEvent.STATE_CHANGED,
(snapshot) => {
let progress = Math.round((snapshot.bytesTransferred / snapshot.totalBytes) * 100);
prg.val = progress;
},
(error) => {
console.log ("Firebase put error "+error);
setTimeout(()=>{prg.val = -1; },500);
this.presentToast("upload error","error") },
() => { prg.val = 100;
setTimeout(()=>{prg.val = -1; },500);
// write download URL to realtime DB so we can iter it later
// there is no API in storage today to iterate
let downloadURL = uploadTask.snapshot.downloadURL;
console.log ("Download url is "+downloadURL);
let key = 'tripDataIndex/'+name;
console.log ("key="+key);
firebase.database().ref(key)
.set ({'url':downloadURL, 'uploadedon':Date()}); // nothing created
.catch (err=> {console.log ("ERROR "+err);this.presentToast("error creating index","error")})
this.presentToast("upload complete")}
)
})
.catch (err=>{console.log ("Cordova Read Error "+err);})
}
It seems the problem was in the key value. My "name" was
let name = "file-"+Date()+".txt";
Date() includes spaces/parenthesis etc, which conflict with the naming rules -- using a different key name worked perfectly!
What is very odd, however, is it did not throw an error. I added a .catch at the end like Frank suggested, but it never went to the catch handler.
I am working in windows 8 using HTML/JS. I am trying to Extract a Zip file which is picked file using FilePicker.
To extract Zip file I am using this page.
In this Link there is a function to Extract Zip file unzipAsync
function unzipAsync(filePath, replaceIfExists) {
var fileCollisionOption = replaceIfExists ?
storage.CreationCollisionOption.replaceExisting :
storage.CreationCollisionOption.failIfExists;
return storage.StorageFile
.getFileFromPathAsync(filePath)
.then(getFileAsUint8Array)
.then(function (zipFileContents) {
//Create the zip data in memory
var zip = new JSZip(zipFileContents);
//Extract files
var promises = [];
var lf = storage.ApplicationData.current.localFolder;
_.each(zip.files, function (zippedFile) {
//Create new file
promises.push(lf
.createFileAsync(zippedFile.name, fileCollisionOption)
.then(function (localStorageFile) {
//Copy the zipped file's contents into the local storage file
var fileContents = zip.file(zippedFile.name).asUint8Array();
return storage.FileIO
.writeBytesAsync(localStorageFile, fileContents);
})
);
});
return WinJS.Promise.join(promises);
});
}
Before this I added JSZIP Library to Project folder.
Help me, How Can I integrate the Library to my project. Here is my project Link
Edit:
function getFileAsUint8Array(file) {
return storage.FileIO.readBufferAsync(file)
.then(function (buffer) {
//Read the file into a byte array
var fileContents = new Uint8Array(buffer.length);
var dataReader = storage.Streams.DataReader.fromBuffer(buffer);
dataReader.readBytes(fileContents);
dataReader.close();
return fileContents;
});
}
Now it is working with out error. But it is not doing any thing like extracting my file.
NOTE:
- If anyone knows any another way which better than this or other Library which I can use to extract file for WinJS; please suggest me.
Well, I'm guessing you haven't created a getFileAsUint8Array function (or at least, you aren't showing it above). I am doing something similar (although getting the zip file from an XHR call instead). Once I have the zip file and the folder I want to put the zip files in I do something like the code below.
Note, however, that I had to modify this code as I do a few other things, so I haven't tested it exactly as is (and obviously it wouldn't work within your code above).
Here's the (mostly) full code:
WinJS.xhr({ "url": zipUrl, "responseType": "arraybuffer" })
.done(
function (e) {
if (!e.getResponseHeader("content-type") === "application/zip") {
console.error("Remote file was not sent with correct Content-Type: expected 'application/zip', but received '" + e.getResponseHeader("content-type") + "'");
}
unzipAndStore(new JSZip(e.response), someLocalFolder);
},
function() { /* handle ajax errors */ }
);
/**
* #param {JSZip} jszipobj The JSZip object
* #param {StorageFolder} localFolder The folder to unzip into
* #return {Promise}
*/
var unzipAndStore = function (jszipobj, localFolder) {
var promises = [];
Object.keys(jszipobj.files).forEach(function (key) {
var fileName;
// ignore folder entries, they're handled as needed below
if (/\/$/.test(key)) { return; }
fileName = jszipobj.files[key].name.match(/[^\/]+\.[^\.\/]+$/);
if (!fileName) {
console.error("Unable to process zip entry without proper filename: ", jszipobj.files[key].name);
return;
}
fileName = fileName[0];
promises.push(
getFolderFromPathRecursive(jszipobj.files[key].name, localFolder)
.then(
function (subFolder) {
console.log("creating file in folder: ", fileName, subFolder.name);
return subFolder.createFileAsync(fileName, Windows.Storage.CreationCollisionOption.replaceExisting)
}
)
.then(
function (localStorageFile) {
return Windows.Storage.FileIO
.writeBytesAsync(localStorageFile, jszipobj.file(jszipobj.files[key].name).asUint8Array());
}
)
);
});
return WinJS.Promise.join(promises);
};
/**
* Promise completes with the lowest level folder in the given path,
* creating subfolders along the way
* #param {String} path The path to the lowest subfolder you want a reference to
* #param {StorageFolder} rootFolder The folder to begin at for this iteration
* #return {Promise}
*/
var getFolderFromPathRecursive = function (path, rootFolder) {
var normalizedPath = path.replace(/\/?[^\/]+\.[^\.\/]+$/, ""), // remove a possible filename from the end of the path
folders = normalizedPath.split(/\//), // get an array of the folders in the path
subFolderName = folders.shift(); // remove the first folder in the path as the new one to create
return new WinJS.Promise(function (complete, error) {
if (!subFolderName || !subFolderName.length) {
complete(rootFolder);
return;
}
rootFolder
.createFolderAsync(subFolderName, Windows.Storage.CreationCollisionOption.openIfExists)
.then(
function (folder) {
return getFolderFromPathRecursive(folders.join("/"), folder);
},
error
)
.then(
function(folder) {
complete(folder);
return;
},
error
)
});
};
I did using Decompressing Sample by adding C# windows runtime to my JavaScript project
Here is my post