I'm using this method to try upload image to Firebase with my React-Native app because it seems to be a very common example on the internet, However I think it is quite old and I suspect that it no longer works on newer versions of React-Native.
Can someone please show me the correct way to save images in Firebase storage, thank you!
const uploadImage = (uri, imageName, mime = 'image/jpg') => {
return new Promise((resolve, reject) => {
const uploadUri = Platform.OS === 'ios' ? uri.replace('file://', '')
: uri;
let uploadBlob = null
const imageRef = firebase.storage().ref('images/').child(imageName)
fs.readFile(uploadUri, 'base64')
.then((data) => {
return Blob.build(data, {type: `${mime};BASE64`})
})
.then((blob) => {
uploadBlob = blob
return imageRef.put(blob, {contentType: mime})
})
.then(() => {
uploadBlob.close()
return imageRef.getDownloadURL()
})
.then((url) => {
resolve(url)
})
.catch((error) =>{reject(error)})
})
}
You can just use the putFile method.
Here is my FirebaseStorageService with saveImage method (I'm using RN 0.53 and react-native-firebase 4.1.0):
saveImage(ref, image, imageName, onSuccess, onError){
LOG.debug("FirebaseStorageService :: saveImage ", {ref:ref, image:image, imageName:imageName});
var firebaseStorageRef = firebase.storage().ref(ref);
const imageRef = firebaseStorageRef.child(imageName + ".jpeg");
LOG.debug("FirebaseStorageService :: imageRef ", {imageRef:imageRef});
imageRef.putFile(image.path, {contentType: 'image/jpeg'}).then(function(){
return imageRef.getDownloadURL();
}).then(function(url){
LOG.debug("Image url", {url:url});
onSuccess(url);
}).catch(function(error){
LOG.error("Error while saving the image.. ", error);
onError(error);
});
}
The image is the one returned by react-native-image-crop-picker. User can choose between open camera and open gallery and it returns an image object.
The path property of the image object is just a string like "file://.." for Android and "/Users/..." for iOS.
Related
I built a image detection mobile app (e.g. Plastic Bottle, Aluminum Can, Milk Jug, etc.) with React-Native by using google vision API.
It worked well before and got response successfully.
But after I add Firebase image uploading function for store image, it (google vision api) didn't work.
In my guess, Firebase image upload and google vision API seems conflict and not compatible with each other.
Or in my image upload function, there seems error, but I am still not sure what is issue. Following is my code.
const takePicture = async () => {
if (this.camera) {
const options = { quality: 0.5, base64: true };
const data = await this.camera.takePictureAsync(options);
setScannedURI(data.uri)
imageUploadToFirebase(data)
// callGoogleVisionApi(data.base64) //============> After comment image upload function(above line) and if I call vision api here, it works well.
setIsLoading(true)
}
};
const imageUploadToFirebase = (imageData) => {
const Blob = RNFetchBlob.polyfill.Blob; //firebase image upload
const fs = RNFetchBlob.fs;
window.XMLHttpRequest = RNFetchBlob.polyfill.XMLHttpRequest;
window.Blob = Blob;
const Fetch = RNFetchBlob.polyfill.Fetch
window.fetch = new Fetch({
auto: true,
binaryContentTypes: [
'image/',
'video/',
'audio/',
'foo/',
]
}).build()
let uploadBlob = null;
var path = Platform.OS === "ios" ? imageData.uri.replace("file://", "") : imageData.uri
var newItemKey = Firebase.database().ref().child('usersummary').push().key;
var _name = newItemKey + 'img.jpg';
setIsLoading(true)
fs.readFile(path, "base64")
.then(data => {
let mime = "image/jpg";
return Blob.build(data, { type: `${mime};BASE64` });
})
.then(blob => {
uploadBlob = blob;
Firebase.storage()
.ref("scannedItems/" + _name)
.put(blob)
.then(() => {
uploadBlob.close();
return Firebase.storage()
.ref("scannedItems/" + _name)
.getDownloadURL();
})
.then(async uploadedFile => {
setFirebaseImageURL(uploadedFile)
// callGoogleVisionApi(imageData.base64) //============> If I call here, it didn't work.
})
.catch(error => {
console.log({ error });
});
});
}
This is my callGoogleVisionApi function.
const callGoogleVIsionApi = async (base64) => {
let googleVisionRes = await fetch(config.googleCloud.api + config.googleCloud.apiKey, {
method: 'POST',
body: JSON.stringify({
"requests": [{
"image": { "content": base64 },
features: [
{ type: "LABEL_DETECTION", maxResults: 30 },
{ type: "WEB_DETECTION", maxResults: 30 }
],
}]
})
})
.catch(err => { console.log('Network error=>: ', err) })
await googleVisionRes.json()
.then(googleResp => {
if (googleResp) {
let responseArray = googleResp.responses[0].labelAnnotations
responseArray.map((item, index) => {
if (item.description != "" && item.description != undefined && item.description != null) {
newArr.push(item.description)
}
})
}
}).catch((error) => {console.log(error)})
}
Note: If I upload an image to firebase after getting the result from google vision api, the second call to vision api does not work.
I added my callGoogleVIsionApi function. (It is working well without Firebase image upload function.)
What will be the solution of this issue?
I found the reason, but I am still curious why.
Fetch blob and google vision seems conflict each other.
I changed Firebase image upload function, and it worked well.
Following is my modified Firebase image upload function.
const imageUploadToFirebase = () => {
var path = Platform.OS === 'ios' ? scannedURI.replace('file://', '') : scannedURI;
const response = await fetch(path)
const blob = await response.blob();
var newItemKey = Firebase.database()
.ref()
.child('usersummary')
.push().key;
var _name = newItemKey + 'img.jpg';
Firebase.storage()
.ref(_name)
.put(blob)
.then(() => {
return Firebase.storage()
.ref(_name)
.getDownloadURL();
})
.then(async uploadedFile => {
let image = selectImage(sendItem.name?.toLowerCase());
sendItem.image = image;
sendItem.scannedURI = uploadedFile;
AsyncStorage.getItem('#scanedItemList')
.then(res => {
if (res != null && res != undefined && res != '') {
let result = `${res}#${JSON.stringify(sendItem)}`;
AsyncStorage.setItem('#scanedItemList', result);
} else {
AsyncStorage.setItem(
'#scanedItemList',
JSON.stringify(sendItem),
);
}
})
.catch(err => console.log(err));
})
.catch(error => {
console.log({error});
});
}
I'm not sure if you are using #google-cloud/vision package (in the callGoogleVisionApi() function) but as far as I know that is meant to be used in server side and authenticate with a service account. As an alternative to this method, you can use Cloud Storage Triggers for Cloud functions which will trigger a function whenever a new file is uploaded and then use Cloud Vision API.
The Google Vision API can use a base64-encoded image, a publicly accessible HTTP URI, or a blob in google cloud storage.
In order to use an HTTP URI you should change the JSON payload from your callGoogleVisionAPI function from this:
{
"requests": [{
"image": { "content": base64 },
features: [
{ type: "LABEL_DETECTION", maxResults: 30 },
{ type: "WEB_DETECTION", maxResults: 30 }
],
}]
}
to this:
{
"requests": [{
"image": { "source": {"imageUri": 'https://PUBLIC_URI_FOR_THE_IMAGE' } },
features: [
{ type: "LABEL_DETECTION", maxResults: 30 },
{ type: "WEB_DETECTION", maxResults: 30 }
],
}]
}
You've got a better explanation here: Make a Vision API request.
I am working on a program to upload image. I am using "expo-image-picker" to pick the image and it is correctly updating the "image" state as we can see below. Code executes and image file gets uploaded in Firebase storage but its uploaded as 9 bytes file and not as full 2.4 mb image.
Please help me understand the mistake that I am making.
image value:
file:///var/mobile/Containers/Data/Application/15A92FD5-894C-47D6-ABEF-36F73CA7F778/Library/Caches/ExponentExperienceData/%2540anonymous%252Fapp26Apr-a5cceae1-72c0-4cec-a926-ffb46822d087/ImagePicker/C7ABC0E1-80FC-4B87-A1AD-8B6DD4A1BB9A.jpg
uploadUri value:
/var/mobile/Containers/Data/Application/15A92FD5-894C-47D6-ABEF-36F73CA7F778/Library/Caches/ExponentExperienceData/%2540anonymous%252Fapp26Apr-a5cceae1-72c0-4cec-a926-ffb46822d087/ImagePicker/C7ABC0E1-80FC-4B87-A1AD-8B6DD4A1BB9A.jpg
filename value:
C7ABC0E1-80FC-4B87-A1AD-8B6DD4A1BB9A.jpg
const uri = image;
const filename = uri.substring(uri.lastIndexOf("/") + 1);
const uploadUri = Platform.OS === "ios" ? uri.replace("file://", "") : uri;
firebase
.storage()
.ref(filename)
.put(uploadUri)
.then((value) => {
console.log("Image uploaded");
});
I could find the solution of using blob to save the image. Sharing it below in case anyone faces same issue.
let uri = image;
const filename = uri.substring(uri.lastIndexOf("/") + 1);
const uploadUri = Platform.OS === "ios" ? uri.replace("file://", "") : uri;
const blob = await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.onload = function () {
resolve(xhr.response);
};
xhr.onerror = function (e) {
console.log(e);
reject(new TypeError("Network request failed"));
};
xhr.responseType = "blob";
xhr.open("GET", uri, true);
xhr.send(null);
});
firebase
.storage()
.ref()
.child("user/" + userid)
.put(blob)
.then((uri) => {
console.log(uri);
})
.catch((error) => {
console.log(error);
});
I faced the same problem and found the solution after a couple of hours. You have to convert your image to a file/blob format and put it on put().
The solution is like below code :
fetch(filePath)
.then(response => {
return response.blob();
})
.then(blob => {
const storageRef = firebase.storage().ref().child('test.png');
storageRef.put(blob).then(snapshot => {
console.log('Uploaded a blob or file!');
});
});
I've been building an app with Firebase & React Native primarily using Firestore. I started to use Firestore and its been great, but for some reason when writing to Firestore, it is only working on the first attempt (when i remove the app, rebuild, and perform my write).
I tried to do the exact same thing except write to Firestore and everything works as expected.
I am also receiving no error!
Here is what I am doing:
export const addBrandProduct = (postObj) => {
return () => {
firebase
.firestore()
.collection('brandProducts')
.add(postObj)
.then((docRef) => {
console.log("Document written with ID: ", docRef.id);
Actions.categories();
})
.catch(error => {
console.error("Error adding document: ", error);
});
};
};
For more of a reference, here is my component code that calls addBrandProduct()
onUploadImages = () => {
let photo =
Platform.OS === 'ios'
? this.state.images.map(img => img.uri.replace('file://', ''))
: this.state.images.map(img => img.uri);
photo.forEach((image, i) => {
const sessionId = new Date().getTime();
const Blob = RNFetchBlob.polyfill.Blob;
const fs = RNFetchBlob.fs;
window.XMLHttpRequest = RNFetchBlob.polyfill.XMLHttpRequest;
window.Blob = Blob;
let uploadBlob = null;
let mime = 'image/jpg';
const imageRef = firebase
.storage()
.ref('brandProducts/')
.child(`${this.props.userData.uid}`)
.child(`${sessionId}-${i}`);
fs.readFile(image, 'base64')
.then(data => {
return Blob.build(data, {type: `${mime};BASE64`});
})
.then(blob => {
uploadBlob = blob;
return imageRef.put(blob, {contentType: mime});
})
.then(() => {
uploadBlob.close();
return imageRef.getDownloadURL();
})
.then(url => {
//if this is the last uploaded image, post data to db
if (i === this.state.images.length - 1) {
const urls = {
...this.state.urls,
[i]: url,
};
const postObj = {
...this.state.postObj,
urls: urls,
};
this.props.addBrandProduct(postObj);
} else {
this.setState({
urls: {
...this.state.urls,
[i]: url,
},
});
}
})
.catch(error => {
console.log(error);
});
});
};
Basically, I am uploading a maximum of 3 images along with some data for it. In order to ensure I am uploading them all prior to adding the post data (writing to firestore) I am using a forEach and on the last upload, when it completes, I am calling the action to write the post data.
Edition
Hum addBrandProduct is a function that create another function.
So when you call this.props.addBrandProduct(postObj) nothing is sent to firestore, you just create a new function that should be called.
Maybe you can go out this stuff and call firebase directly, ensuring that everything works and then go back to the redux way if you still want to use it. I also make it parallelized instead of sequentials. Hope it help, hard to find the real problem when it can come from anywhere.
onUploadImages = () => {
let photo = Platform.OS === 'ios'
? this.state.images.map(img => img.uri.replace('file://', ''))
: this.state.images.map(img => img.uri);
Promise.all( photo.map( image => {
const sessionId = new Date().getTime();
const Blob = RNFetchBlob.polyfill.Blob;
//This is kind useless
//const fs = RNFetchBlob.fs;
//This is not used
//window.XMLHttpRequest = RNFetchBlob.polyfill.XMLHttpRequest;
//This is not adviced
//window.Blob = Blob;
let uploadBlob = null;
let mime = 'image/jpg';
const imageRef = firebase
.storage()
.ref('brandProducts/')
.child(`${this.props.userData.uid}`)
.child(`${sessionId}-${i}`);
return fs.readFile(image, 'base64')
.then(data => {
return RNFetchBlob.polyfill.Blob.build(data, {type: `${mime};BASE64`});
})
.then(blob => {
uploadBlob = blob;
return imageRef.put(blob, {contentType: mime});
})
.then(() => {
uploadBlob.close();
return imageRef.getDownloadURL();
});
))
.then( results => {
//results is, here, [ urlFromFirst, urlFronSecond, ...]
const urls = { ...this.state.urls};
results.forEach( (r, i) => urls[i] = r );
const postObj = {
...this.state.postObj,
urls
};
return firebase
.firestore()
.collection('brandProducts')
.add(postObj)
})
.then( docRef => {
console.log("Document written with ID: ", docRef.id);
})
.catch(error => {
console.error(error);
});
};
I'm facing yet another issue.
I'm using firebase db to store text and firebase storage to store files. And here comes my issue.
Q: How to fetch a correct image from storage when fetching particular element from database?
Here's my attempt:
const storageRef = firebase.storage().ref('companyImages/companyImage' + 123);
^^^^^^^^^^^^ I dont have access to id yet :(
const task = storageRef.put(companyImage);
task.on('state_changed', () => {
const percentage = (snap.bytesTransferred / snap.totalBytes) * 100;
// ^^^^^^^^^^^^ not sure if i even need this
}, (err) => {
console.log(err);
}, () => {
firebase.database().ref('offers').push(values);
^^^^^^^^^^^^^^^ now I could retrieve id from it with .key but its too late
});
As you can see, first what Im doing is uploading the image and when it's succesful, Im starting to upload the data to database.
Still, it doesnt work as it is supposed to. When uploading image I have to name it with a correct id to retrieve it easily later, in components.
It may look a lil bit complex but will appreciate any kind of help. Any suggestion or hint.
Should I firstly upload data to DB and then image to the storage?
You can generate the push ID before you upload the file – you can also just save the download URL of the returned snapshot at task.snapshot.downloadURL so you don't have to retrieve the file from storage using the storage ref.
const offerRef = firebase.database().ref('offers').push();
const storageRef = firebase.storage().ref(`companyImages/${offerRef.key}`);
const task = storageRef.put(companyImage);
task.on('state_changed', (snap) => {
const percentage = (snap.bytesTransferred / snap.totalBytes) * 100;
}, (error) => {
console.log(err);
}, () => {
offerRef.set(values);
});
I would suggest using .getDownloadURL(). Then push all your uploadedfileDownloadURL's into an object or array and then store that into your database. So in the future you can access this object or array from, lets say your user/ProfilePHotos, and then in your app level code you can just use the DownloadURL as a uri links inside an image tag!
In this example I am using react-native, I upload multiple photos, save the download URL each time in an array, then set the array to firebase under the users account.
export const userVehiclePhotoUploadRequest = (photos, user, year) => dispatch => {
console.log('Inside vehiclePhotoUpload Actions', photos, user)
let referenceToUploadedPhotos = [];
return new Promise((resolve, reject) => {
photos.map(ele => {
let mime = 'application/octet-stream'
let uri = ele.uri
let uploadUri = Platform.OS === 'ios' ? uri.replace('file://', '') : uri
let sessionId = new Date().getTime()
let uploadBlob = null
let imageRef = firebase.storage().ref('vehicleImages/' + `${user.account.uid}`).child(`${sessionId}`)
fs.readFile(uploadUri, 'base64')
.then((data) => {
return Blob.build(data, { type: `${mime};BASE64` })
})
.then((blob) => {
uploadBlob = blob
return imageRef.put(blob, { contentType: mime })
})
.then(() => {
uploadBlob.close()
return imageRef.getDownloadURL()
})
.then((url) => {
referenceToUploadedPhotos.push(url)
console.log('ARRAY OF URLS WHILE PUSHING', referenceToUploadedPhotos)
resolve(url)
})
.catch((error) => {
reject(error)
})
})
})
.then(() => {
//I did this to not go home until photos are done uploading.
let vehicles;
firebase.database().ref('users/' + user.account.uid + `/allVehicles/allVehiclesArray`).limitToFirst(1).once('value').then(function (snapshot) {
// ******** This method is straight from their docs ********
// ******** It returns whatever is found at the path xxxxx/users/user.uid ********
vehicles = snapshot.val();
}).then(() => {
console.log('ARRAY OF URLS BEFORE SETTING', referenceToUploadedPhotos)
// let lastVehicle = vehicles.length - 1;
firebase.database().ref('users/' + user.account.uid + `/allVehicles/allVehiclesArray/` + `${Object.keys(vehicles)[0]}` + `/photosReference`).set({
referenceToUploadedPhotos
}).then(() => {
dispatch(loginRequest(user.account))
})
})
})
};
And then in your code, lets say inside a map of the user's information...
{ ele.photosReference !== undefined ? dynamicAvatar = { uri: `${ele.photosReference.referenceToUploadedPhotos[0]}` } : undefined }
Could you show me source code about uploading multiple images? I have tried to upload multiple images to my firebase. So i'm using react-native-image-crop picker for select images, and then react native-fetch-blob for convert the images before upload to firebase. After select images, i'm looping the arrays then converting to fetch blob inside looping. But sometimes it works but sometimes the images url is empty. I hope i can find the answer here
Try this:
const uploadImages = (photos) => {
const uploadImagePromises = _.map(photos, (p, index) => uploadImage({ uri: p, imageName: "image_" + index }))
const urls = await Promise.all(uploadImagePromises)
console.log(urls);
}
const uploadImage = ({ uri, imageName }) => {
const Blob = RNFetchBlob.polyfill.Blob
const fs = RNFetchBlob.fs
window.XMLHttpRequest = RNFetchBlob.polyfill.XMLHttpRequest
window.Blob = Blob
const mime = 'image/jpg'
return new Promise((resolve, reject) => {
const uploadUri = Platform.OS === 'ios' ? uri.replace('file://', '') : uri
let uploadBlob = null
const imageRef = firebase.storage().ref('/images/').child(imageName)
fs.readFile(uploadUri, 'base64')
.then((data) => {
return Blob.build(data, { type: `${mime};BASE64` })
})
.then((blob) => {
uploadBlob = blob
return imageRef.put(blob, { contentType: mime })
})
.then(() => {
uploadBlob.close()
resolve(imageRef.getDownloadURL())
})
.catch(error => {
console.log("error", error);
reject()
})
})
}