I am having a slightly odd issue, and due to the lack of errors, I am not exactly sure what I am doing wrong. What I am trying to do is on an onCreate event, make an API call, and then update a field on the database if the field is not set to null. Based on my console logs for cloud functions, I can see the API call getting a ok, and everything is working properly, but after about 2-5 minutes, it will update. A few times, it didnt update after 15 mins. What is causing such a slow update?
I have eliminated the gaxios call as the bottleneck simply from the functions logs, and local testing.
Some context: I am on the firebase blaze plan to allow for egress and my dataset isnt really big. I am using gaxios because it is already part of firebase-funcstions npm install.
The code is:
const functions = require('firebase-functions');
const { request } = require('gaxios');
const { parse } = require('url');
exports.getGithubReadme = functions.firestore.document('readmes/{name}').onCreate((snapshot, context) => {
const toolName = context.params.name;
console.log(toolName);
const { name, description, site } = snapshot.data();
console.log(name, description, site);
const parsedUrl = parse(site);
console.log(parsedUrl);
if (description) return;
if (parsedUrl.hostname === 'github.com') {
let githubUrl = `https://api.github.com/repos${parsedUrl.path}/readme`;
request({
method : 'GET',
url : githubUrl
})
.then((res) => {
let { content } = res.data;
return snapshot.ref.update({ description: content });
})
.catch((error) => {
console.log(error);
return null;
});
}
return null;
});
When you execute an asynchronous operation (i.e. request() in your case) in a background triggered Cloud Function, you must return a promise, in such a way the Cloud Function waits that this promise resolves in order to terminate.
This is very well explained in the official Firebase video series here (Learning Cloud Functions for Firebase (video series)). In particular watch the three videos titled "Learn JavaScript Promises" (Parts 2 & 3 especially focus on background triggered Cloud Functions, but it really worth watching Part 1 before).
So you should adapt your code as follows, returning the promise returned by request():
const functions = require('firebase-functions');
const { request } = require('gaxios');
const { parse } = require('url');
exports.getGithubReadme = functions.firestore.document('readmes/{name}').onCreate((snapshot, context) => {
const toolName = context.params.name;
console.log(toolName);
const { name, description, site } = snapshot.data();
console.log(name, description, site);
const parsedUrl = parse(site);
console.log(parsedUrl);
if (description) return null;
if (parsedUrl.hostname === 'github.com') {
let githubUrl = `https://api.github.com/repos${parsedUrl.path}/readme`;
return request({
method: 'GET',
url: githubUrl
})
.then((res) => {
let { content } = res.data;
return snapshot.ref.update({ description: content });
})
.catch((error) => {
console.log(error);
return null;
});
} else {
return null;
}
});
Related
I am trying to fetch 20 random users data from 'https://randomuser.me/api' and in some cases, I get the 503 Service Unavailable error.
I tried to resolve the problem with this kind of code.
async function fetchDriver() {
const driver = await fetch(driverUrl)
.then((response) => {
if (response.status === 200) {
return response.json();
}
if (response.status === 503) {
return fetchDriver();
}
})
.then((data) => console.log(data))
.catch((error) => {
console.log(error);
fetchDriver();
});
return driver;
}
do {
const geoPosition = fetchGeoPosition();
const phoneNumber = fetchPhoneNumber();
const licenseNumber = fetchLicense(3);
const speed = fetchSpeed(60, 200);
const driver = fetchDriver();
const first = driver.name.first;
const last = driver.name.last;
fetchedCars.push({
licenseNumber,
first,
last,
phoneNumber,
geoPosition,
speed,
favorite: false,
more: false,
});
} while (fetchedCars.length < 20);
But console.log(data) in some cases still shows the value of undefined and I just don't know what to do with this.
random user API has a rate limiter. use this to fetch multiple users in one request (for example 20 users):
https://randomuser.me/api/?results=20
check the documentation here
This GitHub thread mentions the rate-limiting and the solution I've included
New to Cloud Functions and trying to understand my error here from the log. It says cannot read property 'uid' of undefined. I am trying to match users together. onCreate will call matching function to check if a user exists under live-Channels and if so will set channel value under both users in live-Users to uid+uid2. Does the log also say which line the error is from? Confused where it shows that.
const functions = require('firebase-functions');
//every time user added to liveLooking node
exports.command = functions.database
.ref('/liveLooking/{uid}')
.onCreate(event => {
const uid = event.params.uid
console.log(`${uid} this is the uid`)
const root = event.data.adminRef.root
//match with another user
let pr_cmd = match(root, uid)
const pr_remove = event.data.adminRef.remove()
return Promise.all([pr_cmd, pr_remove])
})
function match(root, uid) {
let m1uid, m2uid
return root.child('liveChannels').transaction((data) => {
//if no existing channels then add user to liveChannels
if (data === null) {
console.log(`${uid} waiting for match`)
return { uid: uid }
}
else {
m1uid = data.uid
m2uid = uid
if (m1uid === m2uid) {
console.log(`$m1uid} tried to match with self!`)
return
}
//match user with liveChannel user
else {
console.log(`matched ${m1uid} with ${m2uid}`)
return {}
}
}
},
(error, committed, snapshot) => {
if (error) {
throw error
}
else {
return {
committed: committed,
snapshot: snapshot
}
}
},
false)
.then(result => {
// Add channels for each user matched
const channel_id = m1uid+m2uid
console.log(`starting channel ${channel_id} with m1uid: ${m1uid}, m2uid: ${m2uid}`)
const m_state1 = root.child(`liveUsers/${m1uid}`).set({
channel: channel_id
})
const m_state2 = root.child(`liveUsers/${m2uid}`).set({
channel: channel_id
})
return Promise.all([m_state1, m_state2])
})
}
You are referring to a very old version of the Cloud Functions API. Whatever site or tutorial you might be looking it, it's showing examples that are no longer relevant.
In modern Cloud Functions for Firebase, Realtime Database onCreate triggers receive two parameters, a DataSnapshot, and a Context. It no longer receives an "event" as the only parameter. You're going to have to port the code you're using now to the new way of doing things. I strongly suggest reviewing the product documentation for modern examples.
If you want to get the wildcard parameters as you are trying with the code const uid = event.params.uid, you will have to use the second context parameter as illustrated in the docs. To access the data from snapshot, use the first parameter.
I have a script in Reactjs that get data (numbers) from api and addup this numbers with numbers from Firebase collection when user opens the page and the user can see this numbers.
There are going to be many users in the app and every user is going to have diffrent numbers from the same script
I was wondering if its possible with Firebase Cloud Functions to run this Client side script on the server and do the callculations of this numbers on the server and store this numbers in a Firestore collection.
im a begginer in nodejs and cloud functions i dont know if this is possible to do
get the numbers from Api
getLatestNum = (sym) => {
return API.getMarketBatch(sym).then((data) => {
return data;
});
};
Cloud function i was trying
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
const db = admin.firestore();
exports.resetAppointmentTimes = functions.pubsub
.schedule('30 20 * * *')
.onRun((context) => {
const appointmentTimesCollectionRef = db.collection('data');
return appointmentTimesCollectionRef
.get()
.then((querySnapshot) => {
if (querySnapshot.empty) {
return null;
} else {
let batch = db.batch();
querySnapshot.forEach((doc) => {
console.log(doc);
});
return batch.commit();
}
})
.catch((error) => {
console.log(error);
return null;
});
});
It is indeed possible to call a REST API from a Cloud Function. You need to use a Node.js library which returns Promises, like axios.
It's not 100% clear, in your question, to which specific Firestore doc(s) you want to write, but I make the asumption it will be done within the batched write.
So, something along the following lines should do the trick:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const axios = require('axios');
admin.initializeApp();
const db = admin.firestore();
exports.resetAppointmentTimes = functions.pubsub
.schedule('30 20 * * *')
.onRun((context) => {
let apiData;
return axios.get('https://yourapiuri...')
.then(response => {
apiData = response.data; //For example, it depends on what the API returns
const appointmentTimesCollectionRef = db.collection('data');
return appointmentTimesCollectionRef.get();
})
.then((querySnapshot) => {
if (querySnapshot.empty) {
return null;
} else {
let batch = db.batch();
querySnapshot.forEach((doc) => {
batch.update(doc.ref, { fieldApiData: apiData});
});
return batch.commit();
}
})
.catch((error) => {
console.log(error);
return null;
});
});
Two things to note:
If you want to add the API result to some fields value, you need to give more details on your exact need
Important: You need to be on the "Blaze" pricing plan. As a matter of fact, the free "Spark" plan "allows outbound network requests only to Google-owned services". See https://firebase.google.com/pricing/ (hover your mouse on the question mark situated after the "Cloud Functions" title)
I have a problem with calling a cloud function from Flutter. In this Cloud Function I am doing some different things, like safe search checking an image via ML-Vision ImageAnnotatorClient, adding a document to Firestore or updating metadata for a Storage file. Usually all this is working fine, but nearly every time when a device is the first time calling this function after some time, it fails with the CloudFunctionException DEADLINE_EXCEEDED. In every other try it is working just fine without any issue. Does someone has an idea what could cause it?
I read that there is a 'cold start' of a function, which is causing the function to taking a much longer time to execute. But still, it is always executed in less than 10seconds, so a timeout does not seem to be an issue here.
This is how I call the Flutter Cloud Function
final HttpsCallable callable = _cloudFunctions.getHttpsCallable(
functionName: 'function',
);
dynamic resp = await callable.call({'prop1': prop1String, 'prop2': prop2String});
This is the content of the Cloud Function
exports.function = functions.https.onCall(async (req, context) => {
if (!context.auth) return {status: 'ERROR', code: 401, body: 'Not signed in'};
const [result] = await client.safeSearchDetection(req.url);
const detections = result.safeSearchAnnotation;
if (detections) {
if (detections.adult === 'VERY_LIKELY') {
const storage = new Storage();
await storage
.bucket('xxx.appspot.com/')
.file(req.filename)
.delete();
return {status: 'ERROR', code: 400, body: 'NSFW'};
}
}
return refs.add({
'prop1': req.prop2,
'prop2': req.prop1
})
.then(async () => {
const images = [];
const query = await refs
.where('prop1', '==', req.prop1)
.get();
query.forEach((document) => images.push(document));
if (images.length > 0) {
const docRef = refs.doc(images[0].id);
const filename = images[0['_fieldsProto']['filename']['stringValue'];
const storage = new Storage();
await storage
.bucket('xxx.appspot.com/')
.file(filename)
.setMetadata({metadata: {receiver: req.uid}});
docRef.update({'receiver': req.uid, 'receivedTimestamp': FieldValue.serverTimestamp()});
return {status: 'OK', code: 200, body: images[0]};
} else {
return {status: 'OK', code: 200, body: null};
}
})
.catch(err => {
console.log(err);
return {status: 'ERROR', code: 500, body: 'failed to create ref in firestore'};
});
});
There error occurs in the Flutter code, when calling the callable.
I couldn't figure out what call is generating this error, but there are a couple of things that you can do to workaround that.
Increase the memory and timeout of your function.
Your function is currently running on default settings, which is 128MB of memory and 60s before timeout. You can change that to up to 2GB and 540s, note that this will have an impact on your billing. You can implement that by doing the following:
const runtimeOpts = {
timeoutSeconds: 300,
memory: '1GB'
}
exports.function = functions.runWith(runtimeOpts).https.onCall(...)
Also here is a documentation with more details on that.
Increase the timeout of your Firestore calls
You can add the following code to your firestore initialization, replacing YOUR_NUMBER_HERE, to increase the time before timeout (The default is 60000 or 1 minute), this is also likely to have impacts on your billing:
const firestore = new Firestore({
clientConfig: {
interfaces: {
'google.firestore.v1.Firestore': {
methods: {
RunQuery: {
timeout_millis: YOUR_NUMBER_HERE
}
}
}
}
}
});
Let me know if this solved the issue.
I have a function that triggers on firebase database onWrite. The function body use two google cloud apis (DNS and Storage).
While the function is running and working as expected (mostly), the issue is that the Socket hang up more often than I'd like. (50%~ of times)
My questions are:
Is it similar to what the rest of the testers have experienced? Is it a well known issue that is outstanding or expected behavior?
the example code is as follows:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const {credentials} = functions.config().auth;
credentials.private_key = credentials.private_key.replace(/\\n/g, '\n');
const config = Object.assign({}, functions.config().firebase, {credentials});
admin.initializeApp(config);
const gcs = require('#google-cloud/storage')({credentials});
const dns = require('#google-cloud/dns')({credentials});
const zoneName = 'applambda';
const zone = dns.zone(zoneName);
exports.createDeleteDNSAndStorage = functions.database.ref('/apps/{uid}/{appid}/name')
.onWrite(event => {
// Only edit data when it is first created.
const {uid, appid} = event.params;
const name = event.data.val();
const dbRef = admin.database().ref(`/apps/${uid}/${appid}`);
if (event.data.previous.exists()) {
console.log(`already exists ${uid}/${appid}`);
return;
}
// Exit when the data is deleted.
if (!event.data.exists()) {
console.log(`data is being deleted ${uid}/${appid}`);
return;
}
const url = `${name}.${zoneName}.com`;
console.log(`data: ${uid}/${appid}/${name}\nsetting up: ${url}`);
setupDNS({url, dbRef});
setupStorage({url, dbRef});
return;
});
function setupDNS({url, dbRef}) {
// Create an NS record.
let cnameRecord = zone.record('cname', {
name: `${url}.`,
data: 'c.storage.googleapis.com.',
ttl: 3000
});
zone.addRecords(cnameRecord).then(function() {
console.log(`done setting up zonerecord for ${url}`);
dbRef.update({dns: url}).then(res => console.log(res)).catch(err => console.log(err));
}).catch(function(err) {
console.error(`error setting up zonerecord for ${url}`);
console.error(err);
});
}
function setupStorage({url, dbRef}) {
console.log(`setting up storage bucket for ${url}`);
gcs.createBucket(url, {
website: {
mainPageSuffix: `https://${url}`,
notFoundPage: `https://${url}/404.html`
}
}).then(function(res) {
let bucket = res[0];
console.log(`created bucket ${url}, setting it as public`);
dbRef.update({storage: url}).then(function() {
console.log(`done setting up bucket for ${url}`);
}).catch(function(err) {
console.error(`db update for storage failed ${url}`);
console.error(err);
});
bucket.makePublic().then(function() {
console.log(`bucket set as public for ${url}`);
}).catch(function(err) {
console.error(`setting public for storage failed ${url}`);
console.error(err);
});
}).catch(function(err) {
console.error(`creating bucket failed ${url}`);
console.error(err);
});
}
I'm thinking your function needs to return a promise so that all the other async work has time to complete before the function shuts down. As it's shown now, your functions simply returns immediately without waiting for the work to complete.
I don't know the cloud APIs you're using very well, but I'd guess that you should make your setupDns() and setupStorage() return the promises from the async work that they're doing, then return Promise.all() passing those two promises to let Cloud Functions know it should wait until all that work is complete before cleaning up the container that's running the function.