Javascript promises + useState + firebase onSnapshot - javascript

I have a database listener in my code and I am trying to get every user's new posts and then (when I have all of them in an array) update the posts state.
My code looks like this but it is not working good, because setPosts is async and sometimes it might be called again before ending the state update. I think that I need to wrap the listener in a Promise but I have no idea how to do it detaching the listener when the component unmounts.
useEffect(() => {
const { firebase } = props;
// Realtime database listener
const unsuscribe = firebase
.getDatabase()
.collection("posts")
.doc(firebase.getCurrentUser().uid)
.collection("userPosts")
.onSnapshot((snapshot) => {
let changes = snapshot.docChanges();
changes.forEach(async (change) => {
if (change.type === "added") {
// Get the new post
const newPost = change.doc.data();
// TODO - Move to flatlist On end reached
const uri = await firebase
.getStorage()
.ref(`photos/${newPost.id}`)
.getDownloadURL();
// TODO - Add the new post *(sorted by time)* to the posts list
setPosts([{ ...newPost, uri }, ...posts]);
}
});
});
/* Pd: At the first time, this function will get all the user's posts */
return () => {
// Detach the listening agent
unsuscribe();
};
}, []);
Any ideas?
Also, I have think to do:
useEffect(() => {
const { firebase } = props;
let postsArray = [];
// Realtime database listener
const unsuscribe = firebase
.getDatabase()
.collection("posts")
.doc(firebase.getCurrentUser().uid)
.collection("userPosts")
.orderBy("time") // Sorted by date
.onSnapshot((snapshot) => {
let changes = snapshot.docChanges();
changes.forEach(async (change) => {
if (change.type === "added") {
// Get the new post
const newPost = change.doc.data();
// Add the new post to the posts list
postsArray.push(newPost);
}
});
setPosts(postsArray.reverse());
});
But in this case, the post uri is saved too in the firestore document (something I can do because I write on the firestore with a cloud function that gets the post from storage), and I don't know if it is a good practice.
Thanks.
Update
Cloud Function code:
exports.validateImageDimensions = functions
.region("us-central1")
.runWith({ memory: "2GB", timeoutSeconds: 120 })
.https.onCall(async (data, context) => {
// Libraries
const admin = require("firebase-admin");
const sizeOf = require("image-size");
const url = require("url");
const https = require("https");
const sharp = require("sharp");
const path = require("path");
const os = require("os");
const fs = require("fs");
// Lazy initialization of the Admin SDK
if (!is_validateImageDimensions_initialized) {
const serviceAccount = require("./serviceAccountKey.json");
admin.initializeApp({
// ...
});
is_validateImageDimensions_initialized = true;
}
// Create Storage
const storage = admin.storage();
// Create Firestore
const firestore = admin.firestore();
// Get the image's owner
const owner = context.auth.token.uid;
// Get the image's info
const { id, description, location, tags } = data;
// Photos's bucket
const bucket = storage.bucket("bucket-name");
// File Path
const filePath = `photos/${id}`;
// Get the file
const file = getFile(filePath);
// Check if the file is a jpeg image
const metadata = await file.getMetadata();
const isJpgImage = metadata[0].contentType === "image/jpeg";
// Get the file's url
const fileUrl = await getUrl(file);
// Get the photo dimensions using the `image-size` library
getImageFromUrl(fileUrl)
.then(async (image) => {
// Check if the image has valid dimensions
let dimensions = sizeOf(image);
// Create the associated Firestore's document to the valid images
if (isJpgImage && hasValidDimensions(dimensions)) {
// Create a thumbnail for the uploaded image
const thumbnailPath = await generateThumbnail(filePath);
// Get the thumbnail
const thumbnail = getFile(thumbnailPath);
// Get the thumbnail's url
const thumbnailUrl = await getUrl(thumbnail);
try {
await firestore
.collection("posts")
.doc(owner)
.collection("userPosts")
.add({
id,
uri: fileUrl,
thumbnailUri: thumbnailUrl, // Useful for progress images
description,
location,
tags,
date: admin.firestore.FieldValue.serverTimestamp(),
likes: [], // At the first time, when a post is created, zero users has liked it
comments: [], // Also, there aren't any comments
width: dimensions.width,
height: dimensions.height,
});
// TODO: Analytics posts counter
} catch (err) {
console.error(
`Error creating the document in 'posts/{owner}/userPosts/' where 'id === ${id}': ${err}`
);
}
} else {
// Remove the files that are not jpeg images, or whose dimensions are not valid
try {
await file.delete();
console.log(
`The image '${id}' has been deleted because it has invalid dimensions.
This may be an attempt to break the security of the app made by the user '${owner}'`
);
} catch (err) {
console.error(`Error deleting invalid file '${id}': ${err}`);
}
}
})
.catch((e) => {
console.log(e);
});
/* ---------------- AUXILIAR FUNCTIONS ---------------- */
function getFile(filePath) {
/* Get a file from the storage bucket */
return bucket.file(filePath);
}
async function getUrl(file) {
/* Get the public url of a file */
const signedUrls = await file.getSignedUrl({
action: "read",
expires: "01-01-2100",
});
// signedUrls[0] contains the file's public URL
return signedUrls[0];
}
function getImageFromUrl(uri) {
return new Promise((resolve, reject) => {
const options = url.parse(uri); // Automatically converted to an ordinary options object.
const request = https.request(options, (response) => {
if (response.statusCode < 200 || response.statusCode >= 300) {
return reject(new Error("statusCode=" + response.statusCode));
}
let chunks = [];
response.on("data", (chunk) => {
chunks.push(chunk);
});
response.on("end", () => {
try {
chunks = Buffer.concat(chunks);
} catch (e) {
reject(e);
}
resolve(chunks);
});
});
request.on("error", (e) => {
reject(e.message);
});
// Send the request
request.end();
});
}
function hasValidDimensions(dimensions) {
// Posts' valid dimensions
const validDimensions = [
{
width: 1080,
height: 1080,
},
{
width: 1080,
height: 1350,
},
{
width: 1080,
height: 750,
},
];
return (
validDimensions.find(
({ width, height }) =>
width === dimensions.width && height === dimensions.height
) !== undefined
);
}
async function generateThumbnail(filePath) {
/* Generate thumbnail for the progressive images */
// Download file from bucket
const fileName = filePath.split("/").pop();
const tempFilePath = path.join(os.tmpdir(), fileName);
const thumbnailPath = await bucket
.file(filePath)
.download({
destination: tempFilePath,
})
.then(() => {
// Generate a thumbnail using Sharp
const size = 50;
const newFileName = `${fileName}_${size}_thumb.jpg`;
const newFilePath = `thumbnails/${newFileName}`;
const newFileTemp = path.join(os.tmpdir(), newFileName);
sharp(tempFilePath)
.resize(size, null)
.toFile(newFileTemp, async (_err, info) => {
// Uploading the thumbnail.
await bucket.upload(newFileTemp, {
destination: newFilePath,
});
// Once the thumbnail has been uploaded delete the temporal file to free up disk space.
fs.unlinkSync(tempFilePath);
});
// Return the thumbnail's path
return newFilePath;
});
return thumbnailPath;
}
});

Related

How to call multiple Airtable bases and fields at the same time with the same script

Im making a script that will download images from an Airtable to a folder. this script calls a specific field where the images url are stored and download the images. i have managed to download the images from one base. how can i modify my code to download images from multiple bases
this is what i have so far
const download = require('image-downloader');
const fs = require('fs');
const path = require('path');
``
require('dotenv').config();
const api_key = process.env.AIRTABLE_APIKEY;
const base1_id = process.env.BASEID;
const base2_id = process.env.BASEID2
const base1_table = process.env.TABLE1;
const base2_table = process.env.TABLE2;
const base1_table_imagefield = process.env.FIELD1;
const base2_table_imagefield= process.env.FIELD2;
var base = new Airtable({ apiKey: api_key }).base(base1_id );
let table_names = [base1_table, base2_table];
let field_names = [base1_table_imagefield, base2_table_imagefield]
// this create an image folder in the app root directory
fs.mkdir(path.join(__dirname, 'images/'), { recursive: true }, (err) => {
if (err) {
return console.error(err);
}
console.log('Directory created successfully!');
});
// get records and download the images to the temp images folder created
const table = base( base1_table);
const getRecords = async() => {
// try {
table.select({
maxRecords: 10,
view: "Website Export",
}).eachPage(function page(records, fetchNextPage) {
// This function (`page`) will get called for each page of records.
records.forEach((record) => {
let data = (record.get(base1_table_imagefield));
let lnk = "";
if (data !== undefined) {
lnk = data.substring(data.indexOf('(h') + 1, data.length - 1);
} else {
return;
}
console.log(lnk);
//save images to thew folder we created
const newFolder = {
url: lnk,
dest: '../../images', // will be saved to /path/to/dest/image.jpg
};
download.image(newFolder)
.then(({ filename }) => {
console.log('Saved to', filename); // saved to /path/to/dest/image.jpg
})
.catch((err) => console.error(err))
});
// To fetch the next page of records, call `fetchNextPage`.
// If there are more records, `page` will get called again.
// If there are no more records, `done` will get called.
fetchNextPage();
}, function done(err) {
if (err) { console.error(err); return; }
});
};
getRecords();
this code works well and calls table1 prints the images from the image field into the newly created folder called "images". how can i make this code work for 2 or more bases and fields.

string to bufferstream not always writing data

I have a cloud function receiving a json string in a pubsub topic.
The goal is to extracts some data into a new json string.
Next parse it as JSONL.
And finally stream it to Google Cloud Storage.
I notice that sometimes the files seem to contain data and sometimes they do not.
The pubsub is working fine and data is coming into this cloud function just fine.
I tried adding some async awaits where I seem it might fit but I am afraid it has do to with the bufferstream. Both topics on where I have trouble getting my head around.
What could be the issue?
const stream = require('stream');
const { Storage } = require('#google-cloud/storage');
// Initiate the source
const bufferStream = new stream.PassThrough();
// Creates a client
const storage = new Storage();
// save stream to bucket
const toBucket = (message, filename) => {
// Write your buffer
bufferStream.end(Buffer.from(message));
const myBucket = storage.bucket(process.env.BUCKET);
const file = myBucket.file(filename);
// Pipe the 'bufferStream' into a 'file.createWriteStream' method.
bufferStream.pipe(file.createWriteStream({
validation: 'md5',
}))
.on('error', (err) => { console.error(err); })
.on('finish', () => {
// The file upload is complete.
console.log(`${filename} is uploaded`);
});
};
// extract correct fields
const extract = (entry) => ({
id: entry.id,
status: entry.status,
date_created: entry.date_created,
discount_total: entry.discount_total,
discount_tax: entry.discount_tax,
shipping_total: entry.shipping_total,
shipping_tax: entry.shipping_tax,
total: entry.total,
total_tax: entry.total_tax,
customer_id: entry.customer_id,
payment_method: entry.payment_method,
payment_method_title: entry.payment_method_title,
transaction_id: entry.transaction_id,
date_completed: entry.date_completed,
billing_city: entry.billing.city,
billing_state: entry.billing.state,
billing_postcode: entry.billing.postcode,
coupon_lines_id: entry.coupon_lines.id,
coupon_lines_code: entry.coupon_lines.code,
coupon_lines_discount: entry.coupon_lines.discount,
coupon_lines_discount_tax: entry.coupon_lines.discount_tax,
});
// format json to jsonl
const format = async (message) => {
let jsonl;
try {
// extract only the necessary
const jsonMessage = await JSON.parse(message);
const rows = await jsonMessage.map((row) => {
const extractedRow = extract(row);
return `${JSON.stringify(extractedRow)}\n`;
});
// join all lines as one string with no join symbol
jsonl = rows.join('');
console.log(jsonl);
} catch (e) {
console.error('jsonl conversion failed');
}
return jsonl;
};
exports.jsonToBq = async (event, context) => {
const message = Buffer.from(event.data, 'base64').toString();
const { filename } = event.attributes;
console.log(filename);
const jsonl = await format(message, filename);
toBucket(jsonl, filename);
};
it's fixed by moving the bufferstream const into the tobucket function.

Try to upload an image into firebase storage

i'm trying to upload and image from a device to firebase storage but i don't know which format i should use. i've try with put and putString, but both of them gave me invalid argument.
This is the code to pick and upload the image.
const pickImage = async () => {
let result = await ImagePicker.launchImageLibraryAsync({
mediaTypes: ImagePicker.MediaTypeOptions.All,
allowsEditing: true,
aspect: [4, 3],
quality: 1,
});
if (!result.cancelled) {
setImage(result.uri);
}
};
const uploadImage = async () => {
if (!image) {
Alert.alert(
'You have to choose an image first'
);
} else {
const uri = image;
console.log(uri);
const filename = uri.substring(uri.lastIndexOf('/') + 1);
const uploadUri = Platform.OS === 'ios' ? uri.replace('file://', '') : uri;
setUploading(true);
setTransferred(0);
const task = firebase.storage()
.ref(filename)
.put(uploadUri);
// set progress state
task.on('state_changed', snapshot => {
setTransferred(
Math.round(snapshot.bytesTransferred / snapshot.totalBytes) * 10000
);
});
try {
await task;
} catch (e) {
console.error(e);
}
setUploading(false);
Alert.alert(
'Photo uploaded!',
'Your photo has been uploaded to Firebase Cloud Storage!'
);
setImage(null);
}
};
This is the uri of the image (the console.log output) :
This is the error using .put(uploadUri):
This is the error using .putString(uploadUri, 'data_url') :
In order to upload an image on Firebase storage using put you need to pass a blob as param instead of string.
For example you can do something like this:
import path from 'path'
const uid = 'image-id'
const fileName = uid + path.extname(uri)
const response = await fetch(uri)
const blob = await response.blob()
const uploadImage = firebase
.storage()
.ref()
.put(blob, {
contentType: `image/${path.extname(uri).split('.').pop()}`
})
uploadImage.on(
'state_changed',
snapshot => {
// progress
},
err => {
// error
},
() => {
// complete
}

Firebase cloud function storage trigger first thumbnail urls are fine then the next ones are all the same thumbnails urls as the first

I am trying to upload an image to firebase and then produce 2 thumbnails. I am able to do this with no problems. My current road block is when I write the urls to the realtime database, I am always getting the same url as the initial upload.
For example:
1st upload I get my uploaded image with the two proper thumbnails for the image
2nd upload I get my uploaded image with the two previous thumbnails (first image)
3rd upload I get my uploaded image with the first images thumbnails...
...this continues to reproduce the urls for the first upload
In my storage the correct thumbnails are being generated, but the urls are always for the first upload?
I don't know if this is a problem with the getSignedUrl() or not, really not sure whats going on here.
Here is my cloud function:
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket); // The Storage object.
// console.log(object);
console.log(object.name);
const filePath = object.name; // File path in the bucket.
const fileName = filePath.split('/').pop();
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), 'thumbs');
const tmpFilePath = join(workingDir, 'source.png');
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Sounrce fileName
await bucket.file(filePath).download({
destination: tmpFilePath
});
//3. resize the images and define an array of upload promises
const sizes = [64, 256];
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = join(workingDir, thumbName);
//Resize source image
await sharp(tmpFilePath)
.resize(size, size)
.toFile(thumbPath);
//upload to gcs
return bucket.upload(thumbPath, {
destination: join(bucketDir, thumbName),
metadata: {
contentType: 'image/jpeg'
}
}).then((data) => {
const file = data[0]
// console.log(data)
file.getSignedUrl({
action: 'read',
expires: '03-17-2100'
}).then((response) => {
const url = response[0];
if (size === 64) {
// console.log('generated 64');
return admin.database().ref('profileThumbs').child(fileName).set({ thumb: url });
} else {
// console.log('generated 128');
return admin.database().ref('categories').child(fileName).child('thumb').set(url);
}
})
.catch(function (error) {
console.error(err);
return;
});
})
});
//4. Run the upload operations
await Promise.all(uploadPromises);
//5. Cleanup remove the tmp/thumbs from the filesystem
return fs.remove(workingDir);
})
Cleaned up my code and solved my problem, here is how I generated the urls and passed them to the proper URLs by accessing the users UID and postId in the file path:
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const fileName = filePath.split('/').pop();
const userUid = filePath.split('/')[2];
const sizes = [64, 256];
const bucketDir = dirname(filePath);
console.log(userUid);
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
const bucket = gcs.bucket(fileBucket);
const tempFilePath = path.join(tmpdir(), fileName);
return bucket.file(filePath).download({
destination: tempFilePath
}).then(() => {
sizes.map(size => {
const newFileName = `thumb#${size}_${fileName}.png`
const newFileTemp = path.join(tmpdir(), newFileName);
const newFilePath = `thumbs/${newFileName}`
return sharp(tempFilePath)
.resize(size, size)
.toFile(newFileTemp, () => {
return bucket.upload(newFileTemp, {
destination: join(bucketDir, newFilePath),
metadata: {
contentType: 'image/jpeg'
}
}).then((data) => {
const file = data[0]
console.log(data)
file.getSignedUrl({
action: 'read',
expires: '03-17-2100'
}, function(err, url) {
console.log(url);
if (err) {
console.error(err);
return;
}
if (size === 64) {
return admin.database().ref('profileThumbs').child(userUid).child(fileName).set({ thumb: url });
} else {
return admin.database().ref('categories').child(fileName).child('thumb').set(url);
}
})
})
})
})
}).catch(error =>{
console.log(error);
});
})

Relation between storage and database - Firebase

I'm facing yet another issue.
I'm using firebase db to store text and firebase storage to store files. And here comes my issue.
Q: How to fetch a correct image from storage when fetching particular element from database?
Here's my attempt:
const storageRef = firebase.storage().ref('companyImages/companyImage' + 123);
^^^^^^^^^^^^ I dont have access to id yet :(
const task = storageRef.put(companyImage);
task.on('state_changed', () => {
const percentage = (snap.bytesTransferred / snap.totalBytes) * 100;
// ^^^^^^^^^^^^ not sure if i even need this
}, (err) => {
console.log(err);
}, () => {
firebase.database().ref('offers').push(values);
^^^^^^^^^^^^^^^ now I could retrieve id from it with .key but its too late
});
As you can see, first what Im doing is uploading the image and when it's succesful, Im starting to upload the data to database.
Still, it doesnt work as it is supposed to. When uploading image I have to name it with a correct id to retrieve it easily later, in components.
It may look a lil bit complex but will appreciate any kind of help. Any suggestion or hint.
Should I firstly upload data to DB and then image to the storage?
You can generate the push ID before you upload the file – you can also just save the download URL of the returned snapshot at task.snapshot.downloadURL so you don't have to retrieve the file from storage using the storage ref.
const offerRef = firebase.database().ref('offers').push();
const storageRef = firebase.storage().ref(`companyImages/${offerRef.key}`);
const task = storageRef.put(companyImage);
task.on('state_changed', (snap) => {
const percentage = (snap.bytesTransferred / snap.totalBytes) * 100;
}, (error) => {
console.log(err);
}, () => {
offerRef.set(values);
});
I would suggest using .getDownloadURL(). Then push all your uploadedfileDownloadURL's into an object or array and then store that into your database. So in the future you can access this object or array from, lets say your user/ProfilePHotos, and then in your app level code you can just use the DownloadURL as a uri links inside an image tag!
In this example I am using react-native, I upload multiple photos, save the download URL each time in an array, then set the array to firebase under the users account.
export const userVehiclePhotoUploadRequest = (photos, user, year) => dispatch => {
console.log('Inside vehiclePhotoUpload Actions', photos, user)
let referenceToUploadedPhotos = [];
return new Promise((resolve, reject) => {
photos.map(ele => {
let mime = 'application/octet-stream'
let uri = ele.uri
let uploadUri = Platform.OS === 'ios' ? uri.replace('file://', '') : uri
let sessionId = new Date().getTime()
let uploadBlob = null
let imageRef = firebase.storage().ref('vehicleImages/' + `${user.account.uid}`).child(`${sessionId}`)
fs.readFile(uploadUri, 'base64')
.then((data) => {
return Blob.build(data, { type: `${mime};BASE64` })
})
.then((blob) => {
uploadBlob = blob
return imageRef.put(blob, { contentType: mime })
})
.then(() => {
uploadBlob.close()
return imageRef.getDownloadURL()
})
.then((url) => {
referenceToUploadedPhotos.push(url)
console.log('ARRAY OF URLS WHILE PUSHING', referenceToUploadedPhotos)
resolve(url)
})
.catch((error) => {
reject(error)
})
})
})
.then(() => {
//I did this to not go home until photos are done uploading.
let vehicles;
firebase.database().ref('users/' + user.account.uid + `/allVehicles/allVehiclesArray`).limitToFirst(1).once('value').then(function (snapshot) {
// ******** This method is straight from their docs ********
// ******** It returns whatever is found at the path xxxxx/users/user.uid ********
vehicles = snapshot.val();
}).then(() => {
console.log('ARRAY OF URLS BEFORE SETTING', referenceToUploadedPhotos)
// let lastVehicle = vehicles.length - 1;
firebase.database().ref('users/' + user.account.uid + `/allVehicles/allVehiclesArray/` + `${Object.keys(vehicles)[0]}` + `/photosReference`).set({
referenceToUploadedPhotos
}).then(() => {
dispatch(loginRequest(user.account))
})
})
})
};
And then in your code, lets say inside a map of the user's information...
{ ele.photosReference !== undefined ? dynamicAvatar = { uri: `${ele.photosReference.referenceToUploadedPhotos[0]}` } : undefined }

Categories