Upload Image to Cloud Storage from Cloud Function Trigger - javascript

I'm currently seeking some help with my Cloud Function that is triggered by a Cloud Storage Upload. It checks if the file is a Video, if so we process this Video through ffmpeg to extract a single frame to be used for a Poster Image later.
It all seems to work except my upload of the image back to Cloud Storage doesn't work. At this point where my Cloud Function is it doesn't produce any errors at all, so i have no clue why the upload of the image to Cloud Storage is not working. I would greatly appreciate if anyone with the experience can review my Cloud Function below and provide some insight into why it's not working. Please advice if possible!! Thank you!!!! ^_^
Note: Screenshot of Cloud Function Log is provided below the code snippet
const admin = require('firebase-admin'); // Firebase Admin SDK
const functions = require('firebase-functions'); // Firebase Cloud Functions
const gcs = require('#google-cloud/storage')(); // Cloud Storage Node.js Client
const path = require('path'); // Node.js file and directory utility
const os = require('os'); // Node.js operating system-related utility
const fs = require('fs'); // Node.js file system API
const ffmpeg = require('fluent-ffmpeg');
const ffmpegPath = require('#ffmpeg-installer/ffmpeg').path;
const ffprobePath = require('#ffprobe-installer/ffprobe').path;
// Initialize Firebase Admin
admin.initializeApp(functions.config().firebase);
// Listen for changes in Cloud Storage bucket
exports.storageFunction = functions.storage.object()
.onChange((event) => {
const file = event.data; // The Storage object.
const fileBucket = file.bucket; // The Storage bucket that contains the file.
const filePath = file.name; // File path in the bucket.
const fileName = path.basename(filePath); // Get the file name.
const fileType = file.contentType; // File content type.
if (!fileType.startsWith('video/')) {
return;
}
const bucket = gcs.bucket(fileBucket);
const tempFilePath = path.join(os.tmpdir(), fileName);
const tempFolderPath = os.tmpdir();
// Download video to temp directory
return bucket.file(filePath).download({
destination: tempFilePath
}).then(() => {
console.log('Video downloaded locally to', tempFilePath);
// Generate screenshot from video
ffmpeg(tempFilePath)
.setFfmpegPath(ffmpegPath)
.setFfprobePath(ffprobePath)
.on('filenames', (filenames) => {
console.log(`Will generate ${filenames}`);
})
.on('error', (err) => {
console.log(`An error occurred: ${err.message}`);
})
.on('end', () => {
console.log(`Output image created at ${tempFilePath}`);
const targetTempFileName = `${fileName}.png`;
const targetFilePath = path.join(path.dirname(filePath), targetTempFileName);
console.log(targetTempFileName);
console.log(targetFilePath);
// Uploading the image.
return bucket.upload(tempFilePath, { destination: targetFilePath })
.then(() => {
console.log('Output image uploaded to', filePath);
})
.catch((err) => {
console.log(err.message);
});
})
.screenshots({
count: 1,
folder: tempFolderPath
});
});
});
Cloud Function Log

It looks like you're trying to return a promise from the ffmpeg callback API:
.on('end', () => {
return bucket.upload(tempFilePath, { destination: targetFilePath })
.then(...)
})
I don't know the ffmpeg API, but I'm almost certain that will not cause the function to wait for the upload to complete. Instead, you need to return a promise from directly from your function that resolves only after all the async work is complete.
If the last item of work is inside a callback, and you need to wait for that, you can wrap the entire thing into a new promise and manually resolve it at the right time. In pseudocode:
return new Promise((resolve, reject) => {
// ffmpeg stuff here...
.on('end', () => {
// the last bit of work here...
bucket.upload(...)
.then(() => { resolve() })
})
})
Notice how the resolve method provided by the new promise is being called to indicate when that promise should itself resolve.

Related

Firebase Storage upload always times out ("storage/retry-limit-exceeded" error)

I'm writing a script using the Firebase JavaScript (client, not admin) SDK to handle uploading a large quantity of images (~1.5 mb each) from an external download link to Google Firebase Storage, and I cannot successfully upload a single image.
Here's the piece of code that I'm testing out with a single image URL:
const firebaseConfig = {
// firebase config details
};
const app = initializeApp(firebaseConfig);
const storage = getStorage(app);
storage.maxOperationRetryTime = 120000;
storage.maxUploadRetryTime = 120000;
const addImageToFirebase = async (url) => {
const download = await fetch(url);
const blob = await download.blob();
const path = "test-folder/test-file.png";
const imageRef = ref(storage, path);
try {
await uploadBytes(imageRef, blob);
console.log('success!');
} catch (error) {
console.log(error);
}
}
const imageUrl = // download link for test url ;
addImageToFirebase(imageUrl);
When running the code, I get the following error in the terminal, no matter how long I set the maxOperationRetyTime or maxUploadRetryTime:
[StorageError [FirebaseError]: Firebase Storage: Max retry time for operation exceeded, please try again. (storage/retry-limit-exceeded)] {
code: 'storage/retry-limit-exceeded',
customData: { serverResponse: null },
_baseMessage: 'Firebase Storage: Max retry time for operation exceeded, please try again. (storage/retry-limit-exceeded)'
}
Does anyone know why the upload would fail to go through?

Uploading an mp3 to Firebase Storage with React Native Expo

I am attempting to upload an mp3 to firebase storage using expo and react native. So far I've got the file into firebase storage, but it's only 9bytes large, so I'm doing something wrong. I've attempted this with blob as shown below with no success.
Here is a screenshot of the firebase storage folder showing the file uploaded but not the data of said file:
Any help is greatly appreciated, I feel like I'm missing a step to actually upload the data along with the file.
export default function SongPicker() {
const [song, setSong] = useState(null);
//Get current user through authentication
const user = auth.currentUser;
const pickDocument = async () => {
let result = await DocumentPicker.getDocumentAsync({});
// Fetch the photo with it's local URI
const response = fetch(result.uri);
alert(result.uri);
console.log(result);
const file = new Blob(
[response.value], {
type: 'audio/mpeg'
});
console.log('do we see this?');
try {
//Create the file reference
const storage = getStorage();
const storageRef = ref(storage, `songs/${user.uid}/${result.name}`);
// Upload Blob file to Firebase
const snapshot = uploadBytes(storageRef, file, 'blob').then((snapshot) => {
console.log('Uploaded a song to firebase storage!');
});
setSong(result.uri);
} catch (error) {
console.log(error);
}
}
The fetch() returns a Promise so you should add an await for that as well.
const response = await fetch(result.uri);
Then try using blob() method on the Response:
const file = await response.blob()
The third param in uploadBytes should be upload metadata object but you can skip that here:
const snapshot = await uploadBytes(storageRef, file).

How to upload a file into Firebase Storage from a callable https cloud function

I have been trying to upload a file to Firebase storage using a callable firebase cloud function.
All i am doing is fetching an image from an URL using axios and trying to upload to storage.
The problem i am facing is, I don't know how to save the response from axios and upload it to storage.
First , how to save the received file in the temp directory that os.tmpdir() creates.
Then how to upload it into storage.
Here i am receiving the data as arraybuffer and then converting it to Blob and trying to upload it.
Here is my code. I have been missing a major part i think.
If there is a better way, please recommend me. Ive been looking through a lot of documentation, and landed up with no clear solution. Please guide. Thanks in advance.
const bucket = admin.storage().bucket();
const path = require('path');
const os = require('os');
const fs = require('fs');
module.exports = functions.https.onCall((data, context) => {
try {
return new Promise((resolve, reject) => {
const {
imageFiles,
companyPIN,
projectId
} = data;
const filename = imageFiles[0].replace(/^.*[\\\/]/, '');
const filePath = `ProjectPlans/${companyPIN}/${projectId}/images/${filename}`; // Path i am trying to upload in FIrebase storage
const tempFilePath = path.join(os.tmpdir(), filename);
const metadata = {
contentType: 'application/image'
};
axios
.get(imageFiles[0], { // URL for the image
responseType: 'arraybuffer',
headers: {
accept: 'application/image'
}
})
.then(response => {
console.log(response);
const blobObj = new Blob([response.data], {
type: 'application/image'
});
return blobObj;
})
.then(async blobObj => {
return bucket.upload(blobObj, {
destination: tempFilePath // Here i am wrong.. How to set the path of downloaded blob file
});
}).then(buffer => {
resolve({ result: 'success' });
})
.catch(ex => {
console.error(ex);
});
});
} catch (error) {
// unknown: 500 Internal Server Error
throw new functions.https.HttpsError('unknown', 'Unknown error occurred. Contact the administrator.');
}
});
I'd take a slightly different approach and avoid using the local filesystem at all, since its just tmpfs and will cost you memory that your function is using anyway to hold the buffer/blob, so its simpler to just avoid it and write directly from that buffer to GCS using the save method on the GCS file object.
Here's an example. I've simplified out a lot of your setup, and I am using an http function instead of a callable. Likewise, I'm using a public stackoverflow image and not your original urls. In any case, you should be able to use this template to modify back to what you need (e.g. change the prototype and remove the http response and replace it with the return value you need):
const functions = require('firebase-functions');
const axios = require('axios');
const admin = require('firebase-admin');
admin.initializeApp();
exports.doIt = functions.https.onRequest((request, response) => {
const bucket = admin.storage().bucket();
const IMAGE_URL = 'https://cdn.sstatic.net/Sites/stackoverflow/company/img/logos/so/so-logo.svg';
const MIME_TYPE = 'image/svg+xml';
return axios.get(IMAGE_URL, { // URL for the image
responseType: 'arraybuffer',
headers: {
accept: MIME_TYPE
}
}).then(response => {
console.log(response); // only to show we got the data for debugging
const destinationFile = bucket.file('my-stackoverflow-logo.svg');
return destinationFile.save(response.data).then(() => { // note: defaults to resumable upload
return destinationFile.setMetadata({ contentType: MIME_TYPE });
});
}).then(() => { response.send('ok'); })
.catch((err) => { console.log(err); })
});
As a commenter noted, in the above example the axios request itself makes an external network access, and you will need to be on the Blaze or Flame plan for that. However, that alone doesn't appear to be your current problem.
Likewise, this also defaults to using a resumable upload, which the documentation does not recommend when you are doing large numbers of small (<10MB files) as there is some overhead.
You asked how this might be used to download multiple files. Here is one approach. First, lets assume you have a function that returns a promise that downloads a single file given its filename (I've abridged this from the above but its basically identical except for the change of INPUT_URL to filename -- note that it does not return a final result such as response.send(), and there's sort of an implicit assumption all the files are the same MIME_TYPE):
function downloadOneFile(filename) {
const bucket = admin.storage().bucket();
const MIME_TYPE = 'image/svg+xml';
return axios.get(filename, ...)
.then(response => {
const destinationFile = ...
});
}
Then, you just need to iteratively build a promise chain from the list of files. Lets say they are in imageUrls. Once built, return the entire chain:
let finalPromise = Promise.resolve();
imageUrls.forEach((item) => { finalPromise = finalPromise.then(() => downloadOneFile(item)); });
// if needed, add a final .then() section for the actual function result
return finalPromise.catch((err) => { console.log(err) });
Note that you could also build an array of the promises and pass them to Promise.all() -- that would likely be faster as you would get some parallelism, but I wouldn't recommend that unless you are very sure all of the data will fit inside the memory of your function at once. Even with this approach, you need to make sure the downloads can all complete within your function's timeout.

How to write firebase function for data change that writes a file to cloud storage bucket in one chained idiomatic promise?

I'm trying to create a bucket and upload a file to the bucket. Originally I had code like this:
storage
.createBucket(bucketName)
.upload(tempFilePath)
But this gave me the error:
TypeError: storage.createBucket(...).upload is not a function
I assume this means I don't understand the API to storage. I would have assumed the createBucket call returns the "bucket" object which I could then use to upload to it. Am I misusing promises or misunderstanding the storage NodeJS API?
Can anyone help me understand how to write a function that has these properties:
If the bucket is not created, create it.
If the bucket already exists, don't fail because it already exists, just skip the createBucket call.
Set the bucket (if needed, looks like this is needed before calling upload in other sample code).
Uploads the file.
Does not have nested promises.
Complete code is here:
exports.writeChunk = functions.firestore
.document('data/{userId}/data/{eventId}/chunks/chunk')
.onUpdate((change, context) => {
const projectId = 'chunk-abcdef';
const Storage = require('#google-cloud/storage');
const storage = new Storage({
projectId,
});
const data = change.after.data();
// data = { "chunk" : "abcdef" }
if( data && data.chunk ) {
let bucketName = context.params.eventId;
let fileName = `${context.params.eventId}-${context.params.userId}.txt`;
const tempFilePath = path.join(os.tmpdir(), fileName);
console.log( `Writing out to ${tempFilePath}` );
fs.writeFileSync(tempFilePath, data.chunk );
storage
.createBucket(bucketName)
.then( () =>
storage
.bucket(bucketName)
.upload(tempFilePath)
.then( () => fs.unlinkSync(tempFilePath) )
.catch( err => console.error('ERROR inside upload', err))
)
.catch(err => console.error('ERROR inside createBucket', err) );
}
else {
console.log( "Nope!", data );
}

ECONRESET socket hungup

I have a function that triggers on firebase database onWrite. The function body use two google cloud apis (DNS and Storage).
While the function is running and working as expected (mostly), the issue is that the Socket hang up more often than I'd like. (50%~ of times)
My questions are:
Is it similar to what the rest of the testers have experienced? Is it a well known issue that is outstanding or expected behavior?
the example code is as follows:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const {credentials} = functions.config().auth;
credentials.private_key = credentials.private_key.replace(/\\n/g, '\n');
const config = Object.assign({}, functions.config().firebase, {credentials});
admin.initializeApp(config);
const gcs = require('#google-cloud/storage')({credentials});
const dns = require('#google-cloud/dns')({credentials});
const zoneName = 'applambda';
const zone = dns.zone(zoneName);
exports.createDeleteDNSAndStorage = functions.database.ref('/apps/{uid}/{appid}/name')
.onWrite(event => {
// Only edit data when it is first created.
const {uid, appid} = event.params;
const name = event.data.val();
const dbRef = admin.database().ref(`/apps/${uid}/${appid}`);
if (event.data.previous.exists()) {
console.log(`already exists ${uid}/${appid}`);
return;
}
// Exit when the data is deleted.
if (!event.data.exists()) {
console.log(`data is being deleted ${uid}/${appid}`);
return;
}
const url = `${name}.${zoneName}.com`;
console.log(`data: ${uid}/${appid}/${name}\nsetting up: ${url}`);
setupDNS({url, dbRef});
setupStorage({url, dbRef});
return;
});
function setupDNS({url, dbRef}) {
// Create an NS record.
let cnameRecord = zone.record('cname', {
name: `${url}.`,
data: 'c.storage.googleapis.com.',
ttl: 3000
});
zone.addRecords(cnameRecord).then(function() {
console.log(`done setting up zonerecord for ${url}`);
dbRef.update({dns: url}).then(res => console.log(res)).catch(err => console.log(err));
}).catch(function(err) {
console.error(`error setting up zonerecord for ${url}`);
console.error(err);
});
}
function setupStorage({url, dbRef}) {
console.log(`setting up storage bucket for ${url}`);
gcs.createBucket(url, {
website: {
mainPageSuffix: `https://${url}`,
notFoundPage: `https://${url}/404.html`
}
}).then(function(res) {
let bucket = res[0];
console.log(`created bucket ${url}, setting it as public`);
dbRef.update({storage: url}).then(function() {
console.log(`done setting up bucket for ${url}`);
}).catch(function(err) {
console.error(`db update for storage failed ${url}`);
console.error(err);
});
bucket.makePublic().then(function() {
console.log(`bucket set as public for ${url}`);
}).catch(function(err) {
console.error(`setting public for storage failed ${url}`);
console.error(err);
});
}).catch(function(err) {
console.error(`creating bucket failed ${url}`);
console.error(err);
});
}
I'm thinking your function needs to return a promise so that all the other async work has time to complete before the function shuts down. As it's shown now, your functions simply returns immediately without waiting for the work to complete.
I don't know the cloud APIs you're using very well, but I'd guess that you should make your setupDns() and setupStorage() return the promises from the async work that they're doing, then return Promise.all() passing those two promises to let Cloud Functions know it should wait until all that work is complete before cleaning up the container that's running the function.

Categories