onRequest vs onCall returning null - javascript

Please help me figure out the difference in return behaviour between the onCall and onRequest google functions below.
onCall, the problem: returns null on all returns, except at the first return (as commented below). The db entries and rest of the code works fine. Just no returns problem.
onRequest, returns perfectly fine on every return. The db entries and rest of the code also works fine.
Both as you will see compare the same, but I just can't seem to get it to work at all. Any advice on how to get my returns to work for the onCall (and structure it better) would be much appreciated.
I am keen on sticking with async await (as opposed to a promise). Using Node.js 12. I am calling the onCall in Flutter, don't know if that is relevant for the question.
The onCall:
exports.applyUserDiscount = functions.https.onCall(async (data, context) => {
if (!context.auth) return {message: "Authentication Required!", code: 401};
const uid = context.auth.uid;
const discountCode = data["discountCode"];
const cartTotal = data["cartTotal"];
try {
return await db.collection("discountCodes").where("itemID", "==", discountCode).limit(1).get()
.then(async (snapshot) => {
if (snapshot.empty) {
return "doesNotExist"; // The only return that works.
} else { // Everything else from here onwards returns null.
snapshot.forEach(async (doc) => {
if (doc.data().redeemed == true) {
return "codeUsed";
} else {
const newCartTotal = cartTotal - doc.data().discountAmount;
if (newCartTotal < 0) {
return "lessThanTotal";
} else {
doc.ref.update({
redeemed: true,
uid: uid,
redeemDate: fireDateTimeNow,
});
await db.collection("userdata").doc(uid).set({
cartDiscount: admin.firestore.FieldValue.increment(-doc.data().discountAmount),
}, {merge: true});
return doc.data().discountAmount.toString();
}
}
});
}
});
} catch (error) {
console.log("Error:" + error);
return "error";
}
});
The onRequest:
exports.applyUserDiscount = functions.https.onRequest(async (req, res) => {
const uid = req.body.uid;
const discountCode = req.body.discountCode;
const cartTotal = req.body.cartTotal;
try {
return await db.collection("discountCodes").where("itemID", "==", discountCode).limit(1).get()
.then(async (snapshot) => {
if (snapshot.isempty) {
res.send("doesNotExist");
} else {
snapshot.forEach(async (doc) => {
if (doc.data().redeemed == true) {
res.send("codeUsed");
} else {
const newCartTotal = cartTotal - doc.data().discountAmount;
if (newCartTotal < 0) {
res.send("lessThanTotal");
} else {
doc.ref.update({
redeemed: true,
uid: uid,
redeemDate: fireDateTimeNow,
});
await db.collection("userdata").doc(uid).set({
cartDiscount: admin.firestore.FieldValue.increment(-doc.data().discountAmount),
}, {merge: true});
res.send(doc.data().discountAmount.toString());
}
}
});
}
});
} catch (error) {
console.log(error);
res.send("error");
}
});

There are several points to be noted when looking at your code(s):
You should not use async/await within a forEach loop. The problem is that the callback passed to forEach() is not being awaited, see more explanations here or here. HOWEVER, in your case you don't need to loop over the QuerySnapshot since it contains only one doc. You can use the docs property which return an array of all the documents in the QuerySnapshot and take the first (and unique) element.
You mix-up then() with async/await, which is not recommended.
I would advise to throw exceptions for the "error" cases, like doesNotExist, codeUsed or lessThanTotal but it's up to you to choose. The fact that, for example, the lessThanTotal case is an error or a standard business case is debatable... So if you prefer to send a "text" response, I would advise to encapsulate this response in a Object with one property: in your front-end the response will always have the same format.
So, the following should do the trick. Note that I send back on object with a response element, including for the cases that could be considered as errors. As said above you could throw an exception in these cases.
exports.applyUserDiscount = functions.https.onCall(async (data, context) => {
if (!context.auth) ... //See https://firebase.google.com/docs/functions/callable#handle_errors
const uid = context.auth.uid;
const discountCode = data["discountCode"];
const cartTotal = data["cartTotal"];
try {
const snapshot = await db.collection("discountCodes").where("itemID", "==", discountCode).limit(1).get();
if (snapshot.empty) {
//See https://firebase.google.com/docs/functions/callable#handle_errors
} else {
const uniqueDoc = snapshot.docs[0];
if (uniqueDoc.data().redeemed == true) {
return { response: "codeUsed" };
} else {
const newCartTotal = cartTotal - uniqueDoc.data().discountAmount;
if (newCartTotal < 0) {
return { response: "lessThanTotal" };
} else {
await uniqueDoc.ref.update({ // See await here!!
redeemed: true,
uid: uid,
redeemDate: fireDateTimeNow,
});
await db.collection("userdata").doc(uid).set({
cartDiscount: admin.firestore.FieldValue.increment(-uniqueDoc.data().discountAmount),
}, { merge: true });
return {
response: uniqueDoc.data().discountAmount.toString()
}
}
}
}
} catch (error) {
console.log("Error:" + error);
return "error";
}
});

Related

Stripe functions returning calls after function finished

Relatively new to Javascript however, i'm trying to work with Stripe and my way around a user submitting another payment method and then paying an invoice with that method. if the payment fails again - it should remove the subscription alltogether. I'm using Firebase Realtime Database with GCF & Node.js 8.
Here is what i have so far
exports.onSecondPaymentAttempt = functions.database.ref("users/{userId}/something/somethingHistory/{subDbId}/newPayment").onCreate((snapshot, context)=>{
var s = snapshot.val();
var fields = s.split(",");
const cardToken = fields[0];
const cus_id = fields[1];
const conn_id = fields[2];
const subDbId = context.params.subDbId;
const userId = context.params.userId;
return stripe.customers.createSource(
cus_id,
{source: cardToken},{
stripeAccount: `${conn_id}`,
},
(err, card)=> {
console.log(err);
if(err){
return console.log("error attaching card "+ err)
}else{
const invoiceNo = admin.database().ref(`users/${userId}/something/somethingHistory/${subDbId}`)
return invoiceNo.once('value').then(snapshot=>{
const invoiceNumber = snapshot.child("invoiceId").val();
const subId = snapshot.child("subscriptionId").val();
return stripe.invoices.pay(
invoiceNumber,
{
expand: ['payment_intent','charge','subscription'],
},{
stripeAccount: `${conn_id}`,
},
(err, invoice)=>{
if(err){
return console.log("error paying invoice "+ err)
}else{
if(invoice.payment_intent.status==="succeeded"){
//DO SOME CODE
return console.log("New Payment succeeded for "+invoiceNumber)
}else{
//DO SOME OTHER CODE
//CANCEL
return stripe.subscriptions.del(
subId,{
stripeAccount: `${conn_id}`,
},
(err, confirmation)=> {
if(err){
return console.log("Subscription error")
}else{
return console.log("Subscription cancelled")
}
});
}
}
});
})
}
});
To me it looks like an incredibly inefficient / ugly way of achieving the effect and overall the user is sitting waiting for a response for approx 15 seconds although the function finishes its execution after 1862ms - I still get responses up to 5 - 10 seconds after.
What's the most efficient way of achieving the same desired effect of registering a new payment source, paying subscription and then handling the result of that payment?
You should use the Promises returned by the Stripe asynchronous methods, as follows (untested, it probably needs some fine tuning, in particular with the objects passed to the Stripe methods):
exports.onSecondPaymentAttempt = functions.database.ref("users/{userId}/something/somethingHistory/{subDbId}/newPayment").onCreate((snapshot, context) => {
var s = snapshot.val();
var fields = s.split(",");
const cardToken = fields[0];
const cus_id = fields[1];
const conn_id = fields[2];
const subDbId = context.params.subDbId;
const userId = context.params.userId;
return stripe.customers.createSource(
//Format of this object to be confirmed....
cus_id,
{ source: cardToken },
{ stripeAccount: `${conn_id}` }
)
.then(card => {
const invoiceNo = admin.database().ref(`users/${userId}/something/somethingHistory/${subDbId}`)
return invoiceNo.once('value')
})
.then(snapshot => {
const invoiceNumber = snapshot.child("invoiceId").val();
const subId = snapshot.child("subscriptionId").val();
return stripe.invoices.pay(
invoiceNumber,
{ expand: ['payment_intent', 'charge', 'subscription'] },
{ stripeAccount: `${conn_id}` }
)
})
.then(invoice => {
if (invoice.payment_intent.status === "succeeded") {
//DO SOME CODE
console.log("New Payment succeeded for " + invoiceNumber)
return null;
} else {
//DO SOME OTHER CODE
//CANCEL
return stripe.subscriptions.del(
subId, {
stripeAccount: `${conn_id}`,
});
}
})
.catch(err => {
//....
return null;
})
});
I would suggest you watch the three videos about "JavaScript Promises" from the official Firebase video series, which explain why it is key to correctly chain and return the promises returned by the asynchronous methods.

Nested HTTP requests in Firebase cloud function

I'm using an HTTP-triggered Firebase cloud function to make an HTTP request. I get back an array of results (events from Meetup.com), and I push each result to the Firebase realtime database. But for each result, I also need to make another HTTP request for one additional piece of information (the category of the group hosting the event) to fold into the data I'm pushing to the database for that event. Those nested requests cause the cloud function to crash with an error that I can't make sense of.
const functions = require("firebase-functions");
const admin = require("firebase-admin");
admin.initializeApp();
const request = require('request');
exports.foo = functions.https.onRequest(
(req, res) => {
var ref = admin.database().ref("/foo");
var options = {
url: "https://api.meetup.com/2/open_events?sign=true&photo-host=public&lat=39.747988&lon=-104.994945&page=20&key=****",
json: true
};
return request(
options,
(error, response, body) => {
if (error) {
console.log(JSON.stringify(error));
return res.status(500).end();
}
if ("results" in body) {
for (var i = 0; i < body.results.length; i++) {
var result = body.results[i];
if ("name" in result &&
"description" in result &&
"group" in result &&
"urlname" in result.group
) {
var groupOptions = {
url: "https://api.meetup.com/" + result.group.urlname + "?sign=true&photo-host=public&key=****",
json: true
};
var categoryResult = request(
groupOptions,
(groupError, groupResponse, groupBody) => {
if (groupError) {
console.log(JSON.stringify(error));
return null;
}
if ("category" in groupBody &&
"name" in groupBody.category
) {
return groupBody.category.name;
}
return null;
}
);
if (categoryResult) {
var event = {
name: result.name,
description: result.description,
category: categoryResult
};
ref.push(event);
}
}
}
return res.status(200).send("processed events");
} else {
return res.status(500).end();
}
}
);
}
);
The function crashes, log says:
Error: Reference.push failed: first argument contains a function in property 'foo.category.domain._events.error' with contents = function (err) {
if (functionExecutionFinished) {
logDebug('Ignoring exception from a finished function');
} else {
functionExecutionFinished = true;
logAndSendError(err, res);
}
}
at validateFirebaseData (/user_code/node_modules/firebase-admin/node_modules/#firebase/database/dist/index.node.cjs.js:1436:15)
at /user_code/node_modules/firebase-admin/node_modules/#firebase/database/dist/index.node.cjs.js:1479:13
at Object.forEach (/user_code/node_modules/firebase-admin/node_modules/#firebase/util/dist/index.node.cjs.js:837:13)
at validateFirebaseData (/user_code/node_modules/firebase-admin/node_modules/#firebase/database/dist/index.node.cjs.js:1462:14)
at /user_code/node_modules/firebase-admin/node_modules/#firebase/database/dist/index.node.cjs.js:1479:13
at Object.forEach (/user_code/node_modules/firebase-admin/node_modules/#firebase/util/dist/index.node.cjs.js:837:13)
at validateFirebaseData (/user_code/node_modules/firebase-admin/node_modules/#firebase/database/dist/index.node.cjs.js:1462:14)
at /user_code/node_modules/firebase-admin/node_modules/#firebase/database/dist/index.node.cjs.js:1479:13
at Object.forEach (/user_code/node_modules/firebase-admin/node_modules/#firebase/util/dist/index.node.cjs.js:837:13)
at validateFirebaseData (/user_code/node_modules/firebase-admin/node_modules/#firebase/database/dist/index.node.cjs.js:1462:14)
If I leave out the bit for getting the group category, the rest of the code works fine (just writing the name and description for each event to the database, no nested requests). So what's the right way to do this?
I suspect this issue is due to the callbacks. When you use firebase functions, the exported function should wait on everything to execute or return a promise that resolves once everything completes executing. In this case, the exported function will return before the rest of the execution completes.
Here's a start of something more promise based -
const functions = require("firebase-functions");
const admin = require("firebase-admin");
admin.initializeApp();
const request = require("request-promise-native");
exports.foo = functions.https.onRequest(async (req, res) => {
const ref = admin.database().ref("/foo");
try {
const reqEventOptions = {
url:
"https://api.meetup.com/2/open_events?sign=true&photo-host=public&lat=39.747988&lon=-104.994945&page=20&key=xxxxxx",
json: true
};
const bodyEventRequest = await request(reqEventOptions);
if (!bodyEventRequest.results) {
return res.status(200).end();
}
await Promise.all(
bodyEventRequest.results.map(async result => {
if (
result.name &&
result.description &&
result.group &&
result.group.urlname
) {
const event = {
name: result.name,
description: result.description
};
// get group information
const groupOptions = {
url:
"https://api.meetup.com/" +
result.group.urlname +
"?sign=true&photo-host=public&key=xxxxxx",
json: true
};
const categoryResultResponse = await request(groupOptions);
if (
categoryResultResponse.category &&
categoryResultResponse.category.name
) {
event.category = categoryResultResponse.category.name;
}
// save to the databse
return ref.push(event);
}
})
);
return res.status(200).send("processed events");
} catch (error) {
console.error(error.message);
}
});
A quick overview of the changes -
Use await and async calls to wait for things to complete vs. being triggered in a callback (async and await are generally much easier to read than promises with .then functions as the execution order is the order of the code)
Used request-promise-native which supports promises / await (i.e. the await means wait until the promise returns so we need something that returns a promise)
Used const and let vs. var for variables; this improves the scope of variables
Instead of doing checks like if(is good) { do good things } use a if(isbad) { return some error} do good thin. This makes the code easier to read and prevents lots of nested ifs where you don't know where they end
Use a Promise.all() so retrieving the categories for each event is done in parallel
There are two main changes you should implement in your code:
Since request does not return a promise you need to use an interface wrapper for request, like request-promise in order to correctly chain the different asynchronous events (See Doug's comment to your question)
Since you will then call several times (in parallel) the different endpoints with request-promise you need to use Promise.all() in order to wait all the promises resolve before sending back the response. This is also the case for the different calls to the Firebase push() method.
Therefore, modifying your code along the following lines should work.
I let you modifying it in such a way you get the values of name and description used to construct the event object. The order of the items in the results array is exactly the same than the one of the promises one. So you should be able, knowing that, to get the values of name and description within results.forEach(groupBody => {}) e.g. by saving these values in a global array.
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
var rp = require('request-promise');
exports.foo = functions.https.onRequest((req, res) => {
var ref = admin.database().ref('/foo');
var options = {
url:
'https://api.meetup.com/2/open_events?sign=true&photo-host=public&lat=39.747988&lon=-104.994945&page=20&key=****',
json: true
};
rp(options)
.then(body => {
if ('results' in body) {
const promises = [];
for (var i = 0; i < body.results.length; i++) {
var result = body.results[i];
if (
'name' in result &&
'description' in result &&
'group' in result &&
'urlname' in result.group
) {
var groupOptions = {
url:
'https://api.meetup.com/' +
result.group.urlname +
'?sign=true&photo-host=public&key=****',
json: true
};
promises.push(rp(groupOptions));
}
}
return Promise.all(promises);
} else {
throw new Error('err xxxx');
}
})
.then(results => {
const promises = [];
results.forEach(groupBody => {
if ('category' in groupBody && 'name' in groupBody.category) {
var event = {
name: '....',
description: '...',
category: groupBody.category.name
};
promises.push(ref.push(event));
} else {
throw new Error('err xxxx');
}
});
return Promise.all(promises);
})
.then(() => {
res.send('processed events');
})
.catch(error => {
res.status(500).send(error);
});
});
I made some changes and got it working with Node 8. I added this to my package.json:
"engines": {
"node": "8"
}
And this is what the code looks like now, based on R. Wright's answer and some Firebase cloud function sample code.
const functions = require("firebase-functions");
const admin = require("firebase-admin");
admin.initializeApp();
const request = require("request-promise-native");
exports.foo = functions.https.onRequest(
async (req, res) => {
var ref = admin.database().ref("/foo");
var options = {
url: "https://api.meetup.com/2/open_events?sign=true&photo-host=public&lat=39.747988&lon=-104.994945&page=20&key=****",
json: true
};
await request(
options,
async (error, response, body) => {
if (error) {
console.error(JSON.stringify(error));
res.status(500).end();
} else if ("results" in body) {
for (var i = 0; i < body.results.length; i++) {
var result = body.results[i];
if ("name" in result &&
"description" in result &&
"group" in result &&
"urlname" in result.group
) {
var groupOptions = {
url: "https://api.meetup.com/" + result.group.urlname + "?sign=true&photo-host=public&key=****",
json: true
};
var groupBody = await request(groupOptions);
if ("category" in groupBody && "name" in groupBody.category) {
var event = {
name: result.name,
description: result.description,
category: groupBody.category.name
};
await ref.push(event);
}
}
}
res.status(200).send("processed events");
}
}
);
}
);

How can I update more than 500 docs in Firestore using Batch?

I'm trying to update a field timestamp with the Firestore admin timestamp in a collection with more than 500 docs.
const batch = db.batch();
const serverTimestamp = admin.firestore.FieldValue.serverTimestamp();
db
.collection('My Collection')
.get()
.then((docs) => {
serverTimestamp,
}, {
merge: true,
})
.then(() => res.send('All docs updated'))
.catch(console.error);
This throws an error
{ Error: 3 INVALID_ARGUMENT: cannot write more than 500 entities in a single call
at Object.exports.createStatusError (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\common.js:87:15)
at Object.onReceiveStatus (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:1188:28)
at InterceptingListener._callNext (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:564:42)
at InterceptingListener.onReceiveStatus (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:614:8)
at callback (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:841:24)
code: 3,
metadata: Metadata { _internal_repr: {} },
details: 'cannot write more than 500 entities in a single call' }
Is there a way that I can write a recursive method which creates a batch object updating a batch of 500 docs one by one until all the docs are updated.
From the docs I know that delete operation is possible with the recursive approach as mentioned here:
https://firebase.google.com/docs/firestore/manage-data/delete-data#collections
But, for updating, I'm not sure how to end the execution since the docs are not being deleted.
I also ran into the problem to update more than 500 documents inside a Firestore collection. And i would like to share how i solved this problem.
I use cloud functions to update my collection inside Firestore but this should also work on client side code.
The solution counts every operation which is made to the batch and after the limit is reached a new batch is created and pushed to the batchArray.
After all updates are completed the code loops through the batchArray and commits every batch which is inside the array.
It is important to count every operation set(), update(), delete() which is made to the batch because they all count to the 500 operation limit.
const documentSnapshotArray = await firestore.collection('my-collection').get();
const batchArray = [];
batchArray.push(firestore.batch());
let operationCounter = 0;
let batchIndex = 0;
documentSnapshotArray.forEach(documentSnapshot => {
const documentData = documentSnapshot.data();
// update document data here...
batchArray[batchIndex].update(documentSnapshot.ref, documentData);
operationCounter++;
if (operationCounter === 499) {
batchArray.push(firestore.batch());
batchIndex++;
operationCounter = 0;
}
});
batchArray.forEach(async batch => await batch.commit());
return;
I liked this simple solution:
const users = await db.collection('users').get()
const batches = _.chunk(users.docs, 500).map(userDocs => {
const batch = db.batch()
userDocs.forEach(doc => {
batch.set(doc.ref, { field: 'myNewValue' }, { merge: true })
})
return batch.commit()
})
await Promise.all(batches)
Just remember to add import * as _ from "lodash" at the top. Based on this answer.
You can use default BulkWriter. This method used 500/50/5 rule.
Example:
let bulkWriter = firestore.bulkWriter();
bulkWriter.create(documentRef, {foo: 'bar'});
bulkWriter.update(documentRef2, {foo: 'bar'});
bulkWriter.delete(documentRef3);
await close().then(() => {
console.log('Executed all writes');
});
As mentioned above, #Sebastian's answer is good and I upvoted that too. Although faced an issue while updating 25000+ documents in one go.
The tweak to logic is as below.
console.log(`Updating documents...`);
let collectionRef = db.collection('cities');
try {
let batch = db.batch();
const documentSnapshotArray = await collectionRef.get();
const records = documentSnapshotArray.docs;
const index = documentSnapshotArray.size;
console.log(`TOTAL SIZE=====${index}`);
for (let i=0; i < index; i++) {
const docRef = records[i].ref;
// YOUR UPDATES
batch.update(docRef, {isDeleted: false});
if ((i + 1) % 499 === 0) {
await batch.commit();
batch = db.batch();
}
}
// For committing final batch
if (!(index % 499) == 0) {
await batch.commit();
}
console.log('write completed');
} catch (error) {
console.error(`updateWorkers() errored out : ${error.stack}`);
reject(error);
}
Explanations given on previous comments already explain the issue.
I'm sharing the final code that I built and worked for me, since I needed something that worked in a more decoupled manner, instead of the way that most of the solutions presented above do.
import { FireDb } from "#services/firebase"; // = firebase.firestore();
type TDocRef = FirebaseFirestore.DocumentReference;
type TDocData = FirebaseFirestore.DocumentData;
let fireBatches = [FireDb.batch()];
let batchSizes = [0];
let batchIdxToUse = 0;
export default class FirebaseUtil {
static addBatchOperation(
operation: "create",
ref: TDocRef,
data: TDocData
): void;
static addBatchOperation(
operation: "update",
ref: TDocRef,
data: TDocData,
precondition?: FirebaseFirestore.Precondition
): void;
static addBatchOperation(
operation: "set",
ref: TDocRef,
data: TDocData,
setOpts?: FirebaseFirestore.SetOptions
): void;
static addBatchOperation(
operation: "create" | "update" | "set",
ref: TDocRef,
data: TDocData,
opts?: FirebaseFirestore.Precondition | FirebaseFirestore.SetOptions
): void {
// Lines below make sure we stay below the limit of 500 writes per
// batch
if (batchSizes[batchIdxToUse] === 500) {
fireBatches.push(FireDb.batch());
batchSizes.push(0);
batchIdxToUse++;
}
batchSizes[batchIdxToUse]++;
const batchArgs: [TDocRef, TDocData] = [ref, data];
if (opts) batchArgs.push(opts);
switch (operation) {
// Specific case for "set" is required because of some weird TS
// glitch that doesn't allow me to use the arg "operation" to
// call the function
case "set":
fireBatches[batchIdxToUse].set(...batchArgs);
break;
default:
fireBatches[batchIdxToUse][operation](...batchArgs);
break;
}
}
public static async runBatchOperations() {
// The lines below clear the globally available batches so we
// don't run them twice if we call this function more than once
const currentBatches = [...fireBatches];
fireBatches = [FireDb.batch()];
batchSizes = [0];
batchIdxToUse = 0;
await Promise.all(currentBatches.map((batch) => batch.commit()));
}
}
Based on all the above answers, I put together the following pieces of code that one can put into a module in JavaScript back-end and front-end to easily use Firestore batch writes, without worrying about the 500 writes limit.
Back-end (Node.js)
// The Firebase Admin SDK to access Firestore.
const admin = require("firebase-admin");
admin.initializeApp();
// Firestore does not accept more than 500 writes in a transaction or batch write.
const MAX_TRANSACTION_WRITES = 499;
const isFirestoreDeadlineError = (err) => {
console.log({ err });
const errString = err.toString();
return (
errString.includes("Error: 13 INTERNAL: Received RST_STREAM") ||
errString.includes("Error: 4 DEADLINE_EXCEEDED: Deadline exceeded")
);
};
const db = admin.firestore();
// How many transactions/batchWrites out of 500 so far.
// I wrote the following functions to easily use batchWrites wthout worrying about the 500 limit.
let writeCounts = 0;
let batchIndex = 0;
let batchArray = [db.batch()];
// Commit and reset batchWrites and the counter.
const makeCommitBatch = async () => {
console.log("makeCommitBatch");
await Promise.all(batchArray.map((bch) => bch.commit()));
};
// Commit the batchWrite; if you got a Firestore Deadline Error try again every 4 seconds until it gets resolved.
const commitBatch = async () => {
try {
await makeCommitBatch();
} catch (err) {
console.log({ err });
if (isFirestoreDeadlineError(err)) {
const theInterval = setInterval(async () => {
try {
await makeCommitBatch();
clearInterval(theInterval);
} catch (err) {
console.log({ err });
if (!isFirestoreDeadlineError(err)) {
clearInterval(theInterval);
throw err;
}
}
}, 4000);
}
}
};
// If the batchWrite exeeds 499 possible writes, commit and rest the batch object and the counter.
const checkRestartBatchWriteCounts = () => {
writeCounts += 1;
if (writeCounts >= MAX_TRANSACTION_WRITES) {
batchIndex++;
batchArray.push(db.batch());
writeCounts = 0;
}
};
const batchSet = (docRef, docData) => {
batchArray[batchIndex].set(docRef, docData);
checkRestartBatchWriteCounts();
};
const batchUpdate = (docRef, docData) => {
batchArray[batchIndex].update(docRef, docData);
checkRestartBatchWriteCounts();
};
const batchDelete = (docRef) => {
batchArray[batchIndex].delete(docRef);
checkRestartBatchWriteCounts();
};
module.exports = {
admin,
db,
MAX_TRANSACTION_WRITES,
checkRestartBatchWriteCounts,
commitBatch,
isFirestoreDeadlineError,
batchSet,
batchUpdate,
batchDelete,
};
Front-end
// Firestore does not accept more than 500 writes in a transaction or batch write.
const MAX_TRANSACTION_WRITES = 499;
const isFirestoreDeadlineError = (err) => {
return (
err.message.includes("DEADLINE_EXCEEDED") ||
err.message.includes("Received RST_STREAM")
);
};
class Firebase {
constructor(fireConfig, instanceName) {
let app = fbApp;
if (instanceName) {
app = app.initializeApp(fireConfig, instanceName);
} else {
app.initializeApp(fireConfig);
}
this.name = app.name;
this.db = app.firestore();
this.firestore = app.firestore;
// How many transactions/batchWrites out of 500 so far.
// I wrote the following functions to easily use batchWrites wthout worrying about the 500 limit.
this.writeCounts = 0;
this.batch = this.db.batch();
this.isCommitting = false;
}
async makeCommitBatch() {
console.log("makeCommitBatch");
if (!this.isCommitting) {
this.isCommitting = true;
await this.batch.commit();
this.writeCounts = 0;
this.batch = this.db.batch();
this.isCommitting = false;
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.isCommitting = true;
await this.batch.commit();
this.writeCounts = 0;
this.batch = this.db.batch();
this.isCommitting = false;
clearInterval(batchWaitInterval);
}
}, 400);
}
}
async commitBatch() {
try {
await this.makeCommitBatch();
} catch (err) {
console.log({ err });
if (isFirestoreDeadlineError(err)) {
const theInterval = setInterval(async () => {
try {
await this.makeCommitBatch();
clearInterval(theInterval);
} catch (err) {
console.log({ err });
if (!isFirestoreDeadlineError(err)) {
clearInterval(theInterval);
throw err;
}
}
}, 4000);
}
}
}
async checkRestartBatchWriteCounts() {
this.writeCounts += 1;
if (this.writeCounts >= MAX_TRANSACTION_WRITES) {
await this.commitBatch();
}
}
async batchSet(docRef, docData) {
if (!this.isCommitting) {
this.batch.set(docRef, docData);
await this.checkRestartBatchWriteCounts();
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.batch.set(docRef, docData);
await this.checkRestartBatchWriteCounts();
clearInterval(batchWaitInterval);
}
}, 400);
}
}
async batchUpdate(docRef, docData) {
if (!this.isCommitting) {
this.batch.update(docRef, docData);
await this.checkRestartBatchWriteCounts();
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.batch.update(docRef, docData);
await this.checkRestartBatchWriteCounts();
clearInterval(batchWaitInterval);
}
}, 400);
}
}
async batchDelete(docRef) {
if (!this.isCommitting) {
this.batch.delete(docRef);
await this.checkRestartBatchWriteCounts();
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.batch.delete(docRef);
await this.checkRestartBatchWriteCounts();
clearInterval(batchWaitInterval);
}
}, 400);
}
}
}
No citations or documentation, this code i invented by myself and for me it worked and looks clean, and simple for read and usage. If some one like it, then can use it too.
Better make autotest becose code use private var _ops wich can be changed after packages upgrade. Forexample in old versions its can be _mutations
async function commitBatch(batch) {
const MAX_OPERATIONS_PER_COMMIT = 500;
while (batch._ops.length > MAX_OPERATIONS_PER_COMMIT) {
const batchPart = admin.firestore().batch();
batchPart._ops = batch._ops.splice(0, MAX_OPERATIONS_PER_COMMIT - 1);
await batchPart.commit();
}
await batch.commit();
}
Usage:
const batch = admin.firestore().batch();
batch.delete(someRef);
batch.update(someRef);
...
await commitBatch(batch);
Simple solution
Just fire twice ?
my array is "resultsFinal"
I fire batch once with a limit of 490 , and second with a limit of the lenght of the array ( results.lenght)
Works fine for me :)
How you check it ?
You go to firebase and delete your collection , firebase say you have delete XXX docs , same as the lenght of your array ? Ok so you are good to go
async function quickstart(results) {
// we get results in parameter for get the data inside quickstart function
const resultsFinal = results;
// console.log(resultsFinal.length);
let batch = firestore.batch();
// limit of firebase is 500 requests per transaction/batch/send
for (i = 0; i < 490; i++) {
const doc = firestore.collection('testMore490').doc();
const object = resultsFinal[i];
batch.set(doc, object);
}
await batch.commit();
// const batchTwo = firestore.batch();
batch = firestore.batch();
for (i = 491; i < 776; i++) {
const objectPartTwo = resultsFinal[i];
const doc = firestore.collection('testMore490').doc();
batch.set(doc, objectPartTwo);
}
await batch.commit();
}

How to delete document from firestore using where clause

var jobskill_ref = db.collection('job_skills').where('job_id','==',post.job_id);
jobskill_ref.delete();
Error thrown
jobskill_ref.delete is not a function
You can only delete a document once you have a DocumentReference to it. To get that you must first execute the query, then loop over the QuerySnapshot and finally delete each DocumentSnapshot based on its ref.
var jobskill_query = db.collection('job_skills').where('job_id','==',post.job_id);
jobskill_query.get().then(function(querySnapshot) {
querySnapshot.forEach(function(doc) {
doc.ref.delete();
});
});
I use batched writes for this. For example:
var jobskill_ref = db.collection('job_skills').where('job_id','==',post.job_id);
let batch = firestore.batch();
jobskill_ref
.get()
.then(snapshot => {
snapshot.docs.forEach(doc => {
batch.delete(doc.ref);
});
return batch.commit();
})
ES6 async/await:
const jobskills = await store
.collection('job_skills')
.where('job_id', '==', post.job_id)
.get();
const batch = store.batch();
jobskills.forEach(doc => {
batch.delete(doc.ref);
});
await batch.commit();
//The following code will find and delete the document from firestore
const doc = await this.noteRef.where('userId', '==', userId).get();
doc.forEach(element => {
element.ref.delete();
console.log(`deleted: ${element.id}`);
});
the key part of Frank's answer that fixed my issues was the .ref in doc.ref.delete()
I originally only had doc.delete() which gave a "not a function" error. now my code looks like this and works perfectly:
let fs = firebase.firestore();
let collectionRef = fs.collection(<your collection here>);
collectionRef.where("name", "==", name)
.get()
.then(querySnapshot => {
querySnapshot.forEach((doc) => {
doc.ref.delete().then(() => {
console.log("Document successfully deleted!");
}).catch(function(error) {
console.error("Error removing document: ", error);
});
});
})
.catch(function(error) {
console.log("Error getting documents: ", error);
});
or try this, but you must have the id beforehand
export const deleteDocument = (id) => {
return (dispatch) => {
firebase.firestore()
.collection("contracts")
.doc(id)
.delete()
}
}
You can now do this:
db.collection("cities").doc("DC").delete().then(function() {
console.log("Document successfully deleted!");
}).catch(function(error) {
console.error("Error removing document: ", error);
});
And of course, you can use await/async:
exports.delete = functions.https.onRequest(async (req, res) => {
try {
var jobskill_ref = db.collection('job_skills').where('job_id','==',post.job_id).get();
jobskill_ref.forEach((doc) => {
doc.ref.delete();
});
} catch (error) {
return res.json({
status: 'error', msg: 'Error while deleting', data: error,
});
}
});
I have no idea why you have to get() them and loop on them, then delete() them, while you can prepare one query with where to delete in one step like any SQL statement, but Google decided to do it like that. so, for now, this is the only option.
If you're using Cloud Firestore on the Client side, you can use a Unique key generator package/module like uuid to generate an ID. Then you set the ID of the document to the ID generated from uuid and store a reference to the ID on the object you're storing in Firestore.
For example:
If you wanted to save a person object to Firestore, first, you'll use uuid to generate an ID for the person, before saving like below.
const uuid = require('uuid')
const person = { name: "Adebola Adeniran", age: 19}
const id = uuid() //generates a unique random ID of type string
const personObjWithId = {person, id}
export const sendToFireStore = async (person) => {
await db.collection("people").doc(id).set(personObjWithId);
};
// To delete, get the ID you've stored with the object and call // the following firestore query
export const deleteFromFireStore = async (id) => {
await db.collection("people").doc(id).delete();
};
Hope this helps anyone using firestore on the Client side.
The way I resolved this is by giving each document a uniqueID, querying on that field, getting the documentID of the returned document, and using that in the delete. Like so:
(Swift)
func rejectFriendRequest(request: Request) {
DispatchQueue.global().async {
self.db.collection("requests")
.whereField("uniqueID", isEqualTo: request.uniqueID)
.getDocuments { querySnapshot, error in
if let e = error {
print("There was an error fetching that document: \(e)")
} else {
self.db.collection("requests")
.document(querySnapshot!.documents.first!.documentID)
.delete() { err in
if let e = err {
print("There was an error deleting that document: \(e)")
} else {
print("Document successfully deleted!")
}
}
}
}
}
}
The code could be cleaned up a bit, but this is the solution I came up with. Hope it can help someone in the future!
const firestoreCollection = db.collection('job_skills')
var docIds = (await firestoreCollection.where("folderId", "==", folderId).get()).docs.map((doc => doc.id))
// for single result
await firestoreCollection.doc(docIds[0]).delete()
// for multiple result
await Promise.all(
docIds.map(
async(docId) => await firestoreCollection.doc(docId).delete()
)
)
delete(seccion: string, subseccion: string)
{
const deletlist = this.db.collection('seccionesclass', ref => ref.where('seccion', '==', seccion).where('subseccion', '==' , subseccion))
deletlist.get().subscribe(delitems => delitems.forEach( doc=> doc.ref.delete()));
alert('record erased');
}
The code for Kotlin, including failure listeners (both for the query and for the delete of each document):
fun deleteJobs(jobId: String) {
db.collection("jobs").whereEqualTo("job_id", jobId).get()
.addOnSuccessListener { documentSnapshots ->
for (documentSnapshot in documentSnapshots)
documentSnapshot.reference.delete().addOnFailureListener { e ->
Log.e(TAG, "deleteJobs: failed to delete document ${documentSnapshot.reference.id}", e)
}
}.addOnFailureListener { e ->
Log.e(TAG, "deleteJobs: query failed", e)
}
}

The right solution for promises

I have the app on node.js with connecting to firebase. I need to update the data correctly.
How to call the function getOrSetUserTrackDay(day) in a promise to get a good value, but not undefined?
let userData = [];
let userID = req.params.userID;
let today = req.params.today;
let yesterday = req.params.yesterday;
db.collection('users').doc(userID).get()
.then((userDataFromDB) => {
if (!userDataFromDB.exists) {
res.status(404).send('User not found');
}
else {
function getOrSetUserTrackDay(day) {
let userTrackRef = db.collection('history').doc('daily').collection(day).doc(userID);
userTrackRef.get()
.then(userTrackData => {
if (userTrackData.exists) {
return userTrackData.data(); // good
}
else {
let userNewData = {
username: userDataFromDB.data().username,
photoUrl: userDataFromDB.data().photoUrl
};
userTrackRef.update(userNewData);
return userNewData; // good
}
})
}
userData = {
user: userDataFromDB.data(),
today: getOrSetUserTrackDay(today), // undefined
yesterday: getOrSetUserTrackDay(yesterday) // undefined
};
res.json(userData);
}
})
.catch((err) => {
console.log(err);
res.status(404).send(err);
});
well getOrSetUserTrackDay has no return statement, hence it returns undefined - but, since it contains asynchronous code, you'll never be able to use it synchronously
So, you can do the following
let userData = [];
let userID = req.params.userID;
let today = req.params.today;
let yesterday = req.params.yesterday;
db.collection('users').doc(userID).get()
.then((userDataFromDB) => {
if (!userDataFromDB.exists) {
res.status(404).send('User not found');
}
else {
let getOrSetUserTrackDay = day => {
let userTrackRef = db.collection('history').doc('daily').collection(day).doc(userID);
return userTrackRef.get()
.then(userTrackData => {
if (userTrackData.exists) {
return userTrackData.data(); // good
} else {
let userNewData = {
username: userDataFromDB.data().username,
photoUrl: userDataFromDB.data().photoUrl
};
userTrackRef.update(userNewData);
return userNewData; // good
}
});
};
Promise.all([getOrSetUserTrackDay(today), getOrSetUserTrackDay(yesterday)])
.then(([today, yesterday]) => res.json({
user: userDataFromDB.data(),
today,
yesterday
}));
}
}).catch((err) => {
console.log(err);
res.status(404).send(err);
});
Note: changed getOrSetUserTrackDay from a function declaration to a function expression (in this case, an arrow function for no particular reason) - because Function declarations should not be placed in blocks. Use a function expression or move the statement to the top of the outer function.

Categories