NodeJS script not actioning with async/await - javascript

Cannot figure out why the below script won't run. It is likely the script is not going to do what I want but using
node ./contentful/contentful-assets.js
in the terminal, it does nothing - No errors, nothing logged for me to even start debugging. However, if I remove async it will attempt the script and shoot back an error.
./contentful/contentful-assets.js
const contentful = require('contentful-management');
const iterator = require('make-iterator');
const assets = require('./assetObject.js');
async resolve => {
console.log('Creating Contentful client');
const client = contentful.createClient({
accessToken: 'token',
logHandler: (level, data) => console.log(`${level} | ${data}`)
});
const iterableAssets = iterator(assets);
const space = await client.getSpace('space');
const environment = await space.getEnvironment('enviroment');
const cmsAssets = [];
const assetProcessingTimes = [];
const inProcess = new Map();
let processedAssetsCounter = 0;
const createAndPublishSingleAsset = async ({ asset, done, index }) => {
if (done) {
if (inProcess.size > 0) return false;
return resolve(cmsAssets);
}
const start = Date.now();
const id = '' + start + Math.round(Math.random() * 100);
inProcess.set(id, true);
let cmsAsset;
try {
cmsAsset = await environment.createAssetWithId(asset.postId, {
fields: {
title: {
'en-US': asset.title
},
description: {
'en-US': asset.description
},
file: {
'en-US': {
contentType: 'image/jpg',
fileName: asset.filename,
upload: asset.link
}
}
}
});
} catch (e) {
console.log(`Asset "${asset.title}" failed to create, retrying...`);
createAndPublishSingleAsset({
asset,
done,
index
});
}
try {
const processedCMSAsset = await cmsAsset.processForAllLocales();
const publishedCMSAsset = await processedCMSAsset.publish();
cmsAssets.push(publishedCMSAsset);
assetProcessingTimes.push((Date.now() - start) / 1000);
inProcess.clear(id);
const eta = Math.floor(
assetProcessingTimes.reduce((a, b) => a + b, 0) /
assetProcessingTimes.length *
(assets.length - index) /
60
);
processedAssetsCounter += 1;
console.log(
`Processed asset ${processedAssetsCounter}/${assets.length} - eta: ${eta}m`
);
createAndPublishSingleAsset(iterableAssets.next());
} catch (e) {
console.log(`Asset "${asset.title}" failed to process, retrying...`);
await cmsAsset.delete();
createAndPublishSingleAsset({
asset,
done,
index
});
}
};
console.log('Starting to create assets');
createAndPublishSingleAsset(iterableAssets.next());
createAndPublishSingleAsset(iterableAssets.next());
createAndPublishSingleAsset(iterableAssets.next());
};
assetObject.js
[
{
link: 'https://example.com/example1.jpg',
title: 'Example 1',
description: 'Description of example 1',
postId: '1234567890',
filename: 'example1.jpeg'
}, ... // Many more
]
What have I missed here?

I fear that you are not calling the function, could you try, the following?
const contentful = require('contentful-management');
const iterator = require('make-iterator');
const assets = require('./assetObject.js');
const doWork = async resolve => {
console.log('Creating Contentful client');
...
}
doWork();
You are just declaring a function that is async and does all of the code defined, but you are not actually calling it.

In this code snippet you are declaring a function, but never invoking it:
//declaring an async function, with "resolve" as the argument
async resolve => {
//function definition
}
In order to be able to later reference the function to invoke you can assign it to const/let/etc.:
const createAssets = async resolve => { }
//now, invoke
createAssets()

Related

bot send msg when someone speaks

I have another question concerning my catch cmd. If I wanted to make it so that it was no longer an activable command but instead had a small chance of triggering when anyone speaks ever, how would I go about that? Would I need to put it in my message.js file? I know if I put it there as is it will trigger every time someone uses a command. However, I don't want it limited to someone using a command and I don't want it to happen every time. I've also heard of putting it in a separate json file and linking it back somehow. Any help is appreciated.
const profileModel = require("../models/profileSchema");
module.exports = {
name: "catch",
description: "users must type catch first to catch the animal",
async execute(client, message, msg, args, cmd, Discord, profileData) {
const prey = [
"rabbit",
"rat",
"bird",
];
const caught = [
"catch",
];
const chosenPrey = prey.sort(() => Math.random() - Math.random()).slice(0, 1);
const whenCaught = caught.sort(() => Math.random() - Math.random()).slice(0, 1);
const filter = ({ content }) => whenCaught.some((caught) => caught.toLowerCase() == content.toLowerCase());
const collector = message.channel.createMessageCollector({ max: 1, filter, time: 15000 });
const earnings = Math.floor(Math.random() * (20 - 5 + 1)) + 5;
collector.on('collect', async (m) => {
if(m.content?.toLowerCase() === 'catch') {
const user = m.author;
const userData = await profileModel.findOne({ userID: user.id });
message.channel.send(`${userData.name} caught the ${chosenPrey}! You gained ${earnings} coins.`);
}
await profileModel.findOneAndUpdate(
{
userID: m.author.id,
},
{
$inc: {
coins: earnings,
},
}
);
});
collector.on('end', (collected, reason) => {
if (reason == "time") {
message.channel.send('Too slow');
}
});
message.channel.send(`Look out, a ${chosenPrey}! Type CATCH before it gets away!`);
}
}
message.js file just in case
const profileModel = require("../../models/profileSchema");
const cooldowns = new Map();
module.exports = async (Discord, client, message) => {
let profileData;
try {
profileData = await profileModel.findOne({ userID: message.author.id });
if(!profileData){
let profile = await profileModel.create({
name: message.member.user.tag,
userID: message.author.id,
serverID: message.guild.id,
coins: 0,
});
profile.save();
}
} catch (err) {
console.log(err);
}
const prefix = '-';
if(!message.content.startsWith(prefix) || message.author.bot) return;
const args = message.content.slice(prefix.length).split(/ +/);
const cmd = args.shift().toLowerCase();
const command = client.commands.get(cmd) || client.commands.find(a => a.aliases && a.aliases.includes(cmd));
if(!command) return;
if(!cooldowns.has(command.name)){
cooldowns.set(command.name, new Discord.Collection());
}
const current_time = Date.now();
const time_stamps = cooldowns.get(command.name);
const cooldown_amount = (command.cooldown) * 1000;
if(time_stamps.has(message.author.id)){
const expiration_time = time_stamps.get(message.author.id) + cooldown_amount;
if(current_time < expiration_time){
const time_left = (expiration_time - current_time) / 1000;
return message.reply(`Slow down there! You have to wait ${time_left.toFixed(0)} seconds before you can perform ${command.name} again.`);
}
}
if(command) command.execute(client, message, args, cmd, Discord, profileData);
}
Yes...that can be possible. In your command handler, create and export a variable with a boolean
Make sure to do this in global scope (outside any function)
let canBeActivated = true;
module.exports.getCanBeActivated = function () { return canBeActivated };
module.exports.setCanBeActivated = function(value) { canBeActivated = value };
Then, in your command file, import it and check whether it can be activated
const { getCanBeActivated, setCanBeActivated } = require("path/to/message.js")
module.exports = {
...
execute(...) {
// command wont run
if(!getCanBeActivated()) return
}
You can make some logic to make the command run if canBeActivated is false (like get a random number between 1-100 and if it matches 4 run it).In case you need to change it, just run, setCanBeActivated

How to call balanceOf() method for non published ABI

I'm trying to call balanceOf() method on a specific Tron contract (TYukBQZ2XXCcRCReAUguyXncCWNY9CEiDQ), but what I get is Failed to execute . If I do not provide the parameters I get Invalid argument count provided - meaning at some level it works for this contract.
What is interesting it works well on contracts other than the ones created with JustSwap Factory contract eg. https://tronscan.io/#/contract/TYukBQZ2XXCcRCReAUguyXncCWNY9CEiDQ/code .
The code includes the standard TRC20 methods - including balanceOf() - I'm stuck and tried all that's possible form my side, but let's just say I'm not fluent in tronweb api.
My code:
export const getDataToken = async (contractAddress, account, account2) => {
try {
const instance = await window.tronWeb.contract(
[
{
constant: true,
inputs: [{ name: "owner", type: "address" }],
name: "balanceOf",
outputs: [{ name: "", type: "uint256" }],
payable: false,
stateMutability: "view",
type: "function"
}
],
contractAddress
);
console.log(instance);
if (instance.balanceOf) {
console.log("dadadad if");
const tokenBalance = await instance.balanceOf(account).call();
const tokenBalance2 = await instance.balanceOf(account2).call();
return {
tokenBalance: (tokenBalance / Math.pow(10, 18)).toString(),
tokenContract: instance,
tokenBalance2: (tokenBalance2 / Math.pow(10, 18)).toString()
};
}
} catch (message) {
console.log("error getData :" + message);
}
};
const { tokenBalance, tokenContract, tokenBalance2 } = getDataToken(
"TYukBQZ2XXCcRCReAUguyXncCWNY9CEiDQ",
"TL4HzzxGMc1LMfs3XCi4yTJikaBVubz5y4",
"TTFp171XD4JdUB33sDq2ydXJyUEEZjNhLD"
);
This function can help (example for getting JustSwap pair price):
async function takePrice(contractAddress, token){
var functionSelector = 'getTokenToTrxInputPrice(uint256)';
var parameter = [
{
type: 'uint256',
value: token
}
]
var options = {};
transaction = await window.tronWeb.transactionBuilder.triggerConstantContract(contractAddress, functionSelector, options, parameter);
return window.tronWeb.BigNumber("0x"+transaction['constant_result'][0]);
}
priceUSDTTRX = window.tronWeb.fromSun(await takePrice(USDTTRX_ADDRESS, "1000000"));
priceSomeTone18TRX = window.tronWeb.fromSun(await takePrice(SomeTone18TRX_ADDRESS, "1"+"0".repeat(18)));
I have good result:
const TronWeb = require("tronweb");
const ethers = require('ethers')
const MAINNET_RPC = "https://api.trongrid.io";
const PLACEHOLDER_PRIVATE_KEY = "YOUR_PRIVATE_KEY";
const HttpProvider = TronWeb.providers.HttpProvider;
const fullNode = new HttpProvider(MAINNET_RPC);
const solidityNode = new HttpProvider(MAINNET_RPC);
const eventServer = new HttpProvider(MAINNET_RPC);
const tronWeb = new TronWeb(fullNode,solidityNode,eventServer,PLACEHOLDER_PRIVATE_KEY);
const startJustSwap = async () => {
try {
const contractTokenExchangeUSDT = 'TQn9Y2khEsLJW1ChVWFMSMeRDow5KcbLSE'; //S-USDT-TRX Token
const parameter = [{type:`uint256`,value: 10000000}];
const tx = await tronWebLocal.transactionBuilder.triggerConstantContract(contractToken, `trxToTokenSwapInput(uint256,uint256)`, {}, parameter);
console.log(tx);
} catch (e) {
console.log(e);
}
};
startJustSwap();
try const tokenBalance = await instance.balanceOf(account).call({ _isConstant: true });
It works for me.
transactionBuilder.triggerConstantContract works too, but mine is simpler

Firebase Cloud Functions: Promise not resolving

I am having some issues with my a particular call in my cloud function that doesn't seem to be resolving correctly.
This is the code that doesn't want to resolve:
console.log('Getting Search Patterns');
let searchPatterns: FirebaseFirestore.QuerySnapshot;
try {
searchPatterns = await admin
.firestore()
.collection('FYP_LOCATIONS')
.get();
} catch (error) {
console.error(error);
}
console.log(`Search Patterns Received: ${searchPatterns}`);
LOG:
As you can see in the log, my function runs up until the console log before the try block, then stops until the function times out. I'm not sure what it is that is causing this issue.
EDIT: I have reformatted my code by separating out each of the different parts in my cloud function into separate functions that I can call; the resulting getSearchTerms() function is as follows:
async function getSearchTerms(): Promise<FirebaseFirestore.DocumentData[]> {
try {
const snapshot = await admin
.firestore()
.collection('FYP_LOCATIONS')
.get();
console.log('Get Returned');
return snapshot.docs.map(doc => doc.data());
} catch (e) {
console.error(e);
return [];
}
}
This still stops at the same point in the function execution, the full function is here, this has been updated to the latest version:
import * as functions from 'firebase-functions';
import * as admin from 'firebase-admin';
import * as path from 'path';
import * as WordExtractor from 'word-extractor';
import * as textract from 'textract';
import suffixArray from './suffixArray';
// interface Location {
// lid: string;
// location_name: string;
// location_type: string;
// sentimental_value: number;
// }
// interface Context {
// lid: string;
// context_string: string;
// fid: string;
// }
export const processFile = functions.storage.object().onFinalize(async file => {
const serviceAccount = require(__dirname + '/../config/serviceAccount.json');
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: 'https://fyp-alex.firebaseio.com',
});
const firestore = admin.firestore();
const fileBucket: string = file.bucket;
const filePath: string = file.name;
const fileDet: string = path.basename(filePath);
const fileNameSplit: string[] = fileDet.split('.');
const fileExt: string = fileNameSplit.pop();
const fileName: string = fileNameSplit.join('.');
const bucket = admin.storage().bucket(fileBucket);
const fileRef = bucket.file(filePath);
const _path: string = `/tmp/${fileName}.${fileExt}`;
console.log(`File path ${filePath}`);
console.log('Getting Download URL');
try {
console.log(`Downloading to: ${_path}`);
await fileRef.download({ destination: _path });
console.log('File Saved');
console.log(`Getting Details: ${_path}`);
const text: string = await getText(_path, fileExt);
console.log(`Processing: ${fileName}`);
console.log('Creating Suffix Array');
const suffix_array = suffixArray(text);
console.log(`Suffix Array Created: ${suffix_array}`);
console.log('Getting Search Patterns');
const searchTerms: FirebaseFirestore.DocumentData[] = await getSearchTerms();
console.log('Search Patterns Received');
const promises = [];
const allContexts: Object[] = [];
for (const searchDoc of searchTerms) {
const searchTerm = searchDoc.location_name.toLowerCase();
console.log(searchTerm);
const matchedIndexes = search(text, searchTerm, suffix_array);
const contexts = createContexts(matchedIndexes, searchDoc, text, fileName);
allContexts.concat(contexts);
}
for (const context of allContexts) {
const p = admin
.firestore()
.collection('FYP_CONTEXTS')
.add(context);
promises.push(p);
}
await Promise.all(promises);
const data = {
processed: 1,
};
return firestore.doc(`FYP_FILES/${fileName}`).update(data);
} catch (e) {
console.error(e);
const data = {
processed: 2,
};
return firestore.doc(`FYP_FILES/${fileName}`).update(data);
}
});
async function getText(_path: string, fileExt: string) {
let text: string = '';
switch (fileExt) {
case 'docx':
case 'doc':
const extractor = new WordExtractor();
const extracted = await extractor.extract(_path);
text = extracted.getBody();
break;
case 'pdf':
break;
case 'txt':
textract.fromFileWithPath(_path, function(extractedError: any, string: string) {
if (extractedError) {
console.error(extractedError);
}
if (string !== null) {
text = string;
}
});
break;
default:
console.log('Unsupported File Type');
}
return text;
}
async function getSearchTerms(): Promise<FirebaseFirestore.DocumentData[]> {
try {
const snapshot = await admin
.firestore()
.collection('FYP_LOCATIONS')
.get();
console.log('Get Returned');
return snapshot.docs.map(doc => doc.data());
} catch (e) {
console.error(e);
return [];
}
}
function createContexts(
matchedIndexes: number[],
searchDoc: FirebaseFirestore.DocumentData,
text: string,
fileName: string
) {
console.log('Creating Contexts');
const contexts = [];
const searchTerm = searchDoc.location_name.toLowerCase();
for (const index of matchedIndexes) {
let left = index - 25;
let right = index + searchTerm.length + 25;
if (left < 0) {
left = 0;
}
if (right > text.length) {
right = text.length;
}
const context = text.substring(left, right);
contexts.push({
lid: searchDoc.lid,
context_string: context,
fid: fileName,
});
}
return contexts;
}
function search(text: string, searchTerm: string, suffix_array: number[]) {
console.log(`Beginning search for: ${searchTerm}`);
let start = 0;
let end = suffix_array.length;
const matchedIndexes: Array<number> = [];
while (start < end) {
const mid: number = (end - 1) / 2;
const index: number = suffix_array[mid];
const finalIndex: number = index + searchTerm.length;
if (finalIndex <= text.length) {
const substring: string = text.substring(index, finalIndex);
const match: number = searchTerm.localeCompare(substring);
if (match === 0) {
console.log(`Match Found at Index: ${index}`);
matchedIndexes.push(index);
} else if (match < 0) {
end = mid;
} else if (match > 0) {
start = mid;
}
console.log(matchedIndexes);
}
}
if (matchedIndexes.length === 0) {
console.log(`No matches found for search term: ${searchTerm}`);
}
return matchedIndexes;
}
Hopefully the full function provides a bit more context.
I have watched Doug's videos through a few times but I am still coming up against this, I did notice that removing the await that seems to be failing (as in removing the promise all together) seemed to cause an earlier await to fail. This is indicative of it being an issue with promises later on in the function but I cannot for the life of me find the issue, I will keep trying but hopefully that provides some useful context.
You aren't letting the function know when the async operations have finished.
I would guess that you want to collect all of the async operations in an array and wait for all of them to finish before letting the function exit.
(Those youtube videos by Doug, mentioned in the comments above are quite good and do a more thorough job of explaining why)
ie.
const requests = [];
const things = [1,2,3];
for (let index = 0; index < things.length; index++) {
const element = things[index];
const promise = firebase.firestore().push(element);
requests.push(promise);
}
return Promise.all(requests);

How can I update more than 500 docs in Firestore using Batch?

I'm trying to update a field timestamp with the Firestore admin timestamp in a collection with more than 500 docs.
const batch = db.batch();
const serverTimestamp = admin.firestore.FieldValue.serverTimestamp();
db
.collection('My Collection')
.get()
.then((docs) => {
serverTimestamp,
}, {
merge: true,
})
.then(() => res.send('All docs updated'))
.catch(console.error);
This throws an error
{ Error: 3 INVALID_ARGUMENT: cannot write more than 500 entities in a single call
at Object.exports.createStatusError (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\common.js:87:15)
at Object.onReceiveStatus (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:1188:28)
at InterceptingListener._callNext (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:564:42)
at InterceptingListener.onReceiveStatus (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:614:8)
at callback (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:841:24)
code: 3,
metadata: Metadata { _internal_repr: {} },
details: 'cannot write more than 500 entities in a single call' }
Is there a way that I can write a recursive method which creates a batch object updating a batch of 500 docs one by one until all the docs are updated.
From the docs I know that delete operation is possible with the recursive approach as mentioned here:
https://firebase.google.com/docs/firestore/manage-data/delete-data#collections
But, for updating, I'm not sure how to end the execution since the docs are not being deleted.
I also ran into the problem to update more than 500 documents inside a Firestore collection. And i would like to share how i solved this problem.
I use cloud functions to update my collection inside Firestore but this should also work on client side code.
The solution counts every operation which is made to the batch and after the limit is reached a new batch is created and pushed to the batchArray.
After all updates are completed the code loops through the batchArray and commits every batch which is inside the array.
It is important to count every operation set(), update(), delete() which is made to the batch because they all count to the 500 operation limit.
const documentSnapshotArray = await firestore.collection('my-collection').get();
const batchArray = [];
batchArray.push(firestore.batch());
let operationCounter = 0;
let batchIndex = 0;
documentSnapshotArray.forEach(documentSnapshot => {
const documentData = documentSnapshot.data();
// update document data here...
batchArray[batchIndex].update(documentSnapshot.ref, documentData);
operationCounter++;
if (operationCounter === 499) {
batchArray.push(firestore.batch());
batchIndex++;
operationCounter = 0;
}
});
batchArray.forEach(async batch => await batch.commit());
return;
I liked this simple solution:
const users = await db.collection('users').get()
const batches = _.chunk(users.docs, 500).map(userDocs => {
const batch = db.batch()
userDocs.forEach(doc => {
batch.set(doc.ref, { field: 'myNewValue' }, { merge: true })
})
return batch.commit()
})
await Promise.all(batches)
Just remember to add import * as _ from "lodash" at the top. Based on this answer.
You can use default BulkWriter. This method used 500/50/5 rule.
Example:
let bulkWriter = firestore.bulkWriter();
bulkWriter.create(documentRef, {foo: 'bar'});
bulkWriter.update(documentRef2, {foo: 'bar'});
bulkWriter.delete(documentRef3);
await close().then(() => {
console.log('Executed all writes');
});
As mentioned above, #Sebastian's answer is good and I upvoted that too. Although faced an issue while updating 25000+ documents in one go.
The tweak to logic is as below.
console.log(`Updating documents...`);
let collectionRef = db.collection('cities');
try {
let batch = db.batch();
const documentSnapshotArray = await collectionRef.get();
const records = documentSnapshotArray.docs;
const index = documentSnapshotArray.size;
console.log(`TOTAL SIZE=====${index}`);
for (let i=0; i < index; i++) {
const docRef = records[i].ref;
// YOUR UPDATES
batch.update(docRef, {isDeleted: false});
if ((i + 1) % 499 === 0) {
await batch.commit();
batch = db.batch();
}
}
// For committing final batch
if (!(index % 499) == 0) {
await batch.commit();
}
console.log('write completed');
} catch (error) {
console.error(`updateWorkers() errored out : ${error.stack}`);
reject(error);
}
Explanations given on previous comments already explain the issue.
I'm sharing the final code that I built and worked for me, since I needed something that worked in a more decoupled manner, instead of the way that most of the solutions presented above do.
import { FireDb } from "#services/firebase"; // = firebase.firestore();
type TDocRef = FirebaseFirestore.DocumentReference;
type TDocData = FirebaseFirestore.DocumentData;
let fireBatches = [FireDb.batch()];
let batchSizes = [0];
let batchIdxToUse = 0;
export default class FirebaseUtil {
static addBatchOperation(
operation: "create",
ref: TDocRef,
data: TDocData
): void;
static addBatchOperation(
operation: "update",
ref: TDocRef,
data: TDocData,
precondition?: FirebaseFirestore.Precondition
): void;
static addBatchOperation(
operation: "set",
ref: TDocRef,
data: TDocData,
setOpts?: FirebaseFirestore.SetOptions
): void;
static addBatchOperation(
operation: "create" | "update" | "set",
ref: TDocRef,
data: TDocData,
opts?: FirebaseFirestore.Precondition | FirebaseFirestore.SetOptions
): void {
// Lines below make sure we stay below the limit of 500 writes per
// batch
if (batchSizes[batchIdxToUse] === 500) {
fireBatches.push(FireDb.batch());
batchSizes.push(0);
batchIdxToUse++;
}
batchSizes[batchIdxToUse]++;
const batchArgs: [TDocRef, TDocData] = [ref, data];
if (opts) batchArgs.push(opts);
switch (operation) {
// Specific case for "set" is required because of some weird TS
// glitch that doesn't allow me to use the arg "operation" to
// call the function
case "set":
fireBatches[batchIdxToUse].set(...batchArgs);
break;
default:
fireBatches[batchIdxToUse][operation](...batchArgs);
break;
}
}
public static async runBatchOperations() {
// The lines below clear the globally available batches so we
// don't run them twice if we call this function more than once
const currentBatches = [...fireBatches];
fireBatches = [FireDb.batch()];
batchSizes = [0];
batchIdxToUse = 0;
await Promise.all(currentBatches.map((batch) => batch.commit()));
}
}
Based on all the above answers, I put together the following pieces of code that one can put into a module in JavaScript back-end and front-end to easily use Firestore batch writes, without worrying about the 500 writes limit.
Back-end (Node.js)
// The Firebase Admin SDK to access Firestore.
const admin = require("firebase-admin");
admin.initializeApp();
// Firestore does not accept more than 500 writes in a transaction or batch write.
const MAX_TRANSACTION_WRITES = 499;
const isFirestoreDeadlineError = (err) => {
console.log({ err });
const errString = err.toString();
return (
errString.includes("Error: 13 INTERNAL: Received RST_STREAM") ||
errString.includes("Error: 4 DEADLINE_EXCEEDED: Deadline exceeded")
);
};
const db = admin.firestore();
// How many transactions/batchWrites out of 500 so far.
// I wrote the following functions to easily use batchWrites wthout worrying about the 500 limit.
let writeCounts = 0;
let batchIndex = 0;
let batchArray = [db.batch()];
// Commit and reset batchWrites and the counter.
const makeCommitBatch = async () => {
console.log("makeCommitBatch");
await Promise.all(batchArray.map((bch) => bch.commit()));
};
// Commit the batchWrite; if you got a Firestore Deadline Error try again every 4 seconds until it gets resolved.
const commitBatch = async () => {
try {
await makeCommitBatch();
} catch (err) {
console.log({ err });
if (isFirestoreDeadlineError(err)) {
const theInterval = setInterval(async () => {
try {
await makeCommitBatch();
clearInterval(theInterval);
} catch (err) {
console.log({ err });
if (!isFirestoreDeadlineError(err)) {
clearInterval(theInterval);
throw err;
}
}
}, 4000);
}
}
};
// If the batchWrite exeeds 499 possible writes, commit and rest the batch object and the counter.
const checkRestartBatchWriteCounts = () => {
writeCounts += 1;
if (writeCounts >= MAX_TRANSACTION_WRITES) {
batchIndex++;
batchArray.push(db.batch());
writeCounts = 0;
}
};
const batchSet = (docRef, docData) => {
batchArray[batchIndex].set(docRef, docData);
checkRestartBatchWriteCounts();
};
const batchUpdate = (docRef, docData) => {
batchArray[batchIndex].update(docRef, docData);
checkRestartBatchWriteCounts();
};
const batchDelete = (docRef) => {
batchArray[batchIndex].delete(docRef);
checkRestartBatchWriteCounts();
};
module.exports = {
admin,
db,
MAX_TRANSACTION_WRITES,
checkRestartBatchWriteCounts,
commitBatch,
isFirestoreDeadlineError,
batchSet,
batchUpdate,
batchDelete,
};
Front-end
// Firestore does not accept more than 500 writes in a transaction or batch write.
const MAX_TRANSACTION_WRITES = 499;
const isFirestoreDeadlineError = (err) => {
return (
err.message.includes("DEADLINE_EXCEEDED") ||
err.message.includes("Received RST_STREAM")
);
};
class Firebase {
constructor(fireConfig, instanceName) {
let app = fbApp;
if (instanceName) {
app = app.initializeApp(fireConfig, instanceName);
} else {
app.initializeApp(fireConfig);
}
this.name = app.name;
this.db = app.firestore();
this.firestore = app.firestore;
// How many transactions/batchWrites out of 500 so far.
// I wrote the following functions to easily use batchWrites wthout worrying about the 500 limit.
this.writeCounts = 0;
this.batch = this.db.batch();
this.isCommitting = false;
}
async makeCommitBatch() {
console.log("makeCommitBatch");
if (!this.isCommitting) {
this.isCommitting = true;
await this.batch.commit();
this.writeCounts = 0;
this.batch = this.db.batch();
this.isCommitting = false;
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.isCommitting = true;
await this.batch.commit();
this.writeCounts = 0;
this.batch = this.db.batch();
this.isCommitting = false;
clearInterval(batchWaitInterval);
}
}, 400);
}
}
async commitBatch() {
try {
await this.makeCommitBatch();
} catch (err) {
console.log({ err });
if (isFirestoreDeadlineError(err)) {
const theInterval = setInterval(async () => {
try {
await this.makeCommitBatch();
clearInterval(theInterval);
} catch (err) {
console.log({ err });
if (!isFirestoreDeadlineError(err)) {
clearInterval(theInterval);
throw err;
}
}
}, 4000);
}
}
}
async checkRestartBatchWriteCounts() {
this.writeCounts += 1;
if (this.writeCounts >= MAX_TRANSACTION_WRITES) {
await this.commitBatch();
}
}
async batchSet(docRef, docData) {
if (!this.isCommitting) {
this.batch.set(docRef, docData);
await this.checkRestartBatchWriteCounts();
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.batch.set(docRef, docData);
await this.checkRestartBatchWriteCounts();
clearInterval(batchWaitInterval);
}
}, 400);
}
}
async batchUpdate(docRef, docData) {
if (!this.isCommitting) {
this.batch.update(docRef, docData);
await this.checkRestartBatchWriteCounts();
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.batch.update(docRef, docData);
await this.checkRestartBatchWriteCounts();
clearInterval(batchWaitInterval);
}
}, 400);
}
}
async batchDelete(docRef) {
if (!this.isCommitting) {
this.batch.delete(docRef);
await this.checkRestartBatchWriteCounts();
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.batch.delete(docRef);
await this.checkRestartBatchWriteCounts();
clearInterval(batchWaitInterval);
}
}, 400);
}
}
}
No citations or documentation, this code i invented by myself and for me it worked and looks clean, and simple for read and usage. If some one like it, then can use it too.
Better make autotest becose code use private var _ops wich can be changed after packages upgrade. Forexample in old versions its can be _mutations
async function commitBatch(batch) {
const MAX_OPERATIONS_PER_COMMIT = 500;
while (batch._ops.length > MAX_OPERATIONS_PER_COMMIT) {
const batchPart = admin.firestore().batch();
batchPart._ops = batch._ops.splice(0, MAX_OPERATIONS_PER_COMMIT - 1);
await batchPart.commit();
}
await batch.commit();
}
Usage:
const batch = admin.firestore().batch();
batch.delete(someRef);
batch.update(someRef);
...
await commitBatch(batch);
Simple solution
Just fire twice ?
my array is "resultsFinal"
I fire batch once with a limit of 490 , and second with a limit of the lenght of the array ( results.lenght)
Works fine for me :)
How you check it ?
You go to firebase and delete your collection , firebase say you have delete XXX docs , same as the lenght of your array ? Ok so you are good to go
async function quickstart(results) {
// we get results in parameter for get the data inside quickstart function
const resultsFinal = results;
// console.log(resultsFinal.length);
let batch = firestore.batch();
// limit of firebase is 500 requests per transaction/batch/send
for (i = 0; i < 490; i++) {
const doc = firestore.collection('testMore490').doc();
const object = resultsFinal[i];
batch.set(doc, object);
}
await batch.commit();
// const batchTwo = firestore.batch();
batch = firestore.batch();
for (i = 491; i < 776; i++) {
const objectPartTwo = resultsFinal[i];
const doc = firestore.collection('testMore490').doc();
batch.set(doc, objectPartTwo);
}
await batch.commit();
}

Imported async data from another module in Node does not resolve, how can I fix this?

This is the module that collections and exports async data: scraper.js
const express = require('express')
const cheerio = require('cheerio')
const request = require("tinyreq")
const fs = require('fs')
const _ = require('lodash')
const uuid = require('uuid/v4')
const async = require('async')
const mental_models = {
url: 'https://www.farnamstreetblog.com/mental-models/',
data: {}
}
const decision_making = {
url: 'https://www.farnamstreetblog.com/smart-decisions/',
data: {}
}
const cognitive_bias = {
url: 'https://betterhumans.coach.me/cognitive-bias-cheat-sheet-55a472476b18',
data: {}
}
const DATA_URLS = [mental_models, decision_making, cognitive_bias]
const filterScrape = async (source, params) => {
let filtered_data = {
topics: [],
content: [],
additional_content: []
}
let response = await scrape(source)
try {
let $ = cheerio.load(response)
params.forEach((elem) => {
let headers = ['h1', 'h2', 'h3']
if ($(elem) && headers.includes(elem)) {
let topic = {}
let content = {}
let id = uuid()
topic.id = id
topic.text = $(elem).text()
if ($(elem).closest('p')) {
content.text = $(elem).closest('p').text()
content.id = id
}
filtered_data.topics.push(topic)
filtered_data.content.push(content)
} else if ($(elem) && !headers.includes(elem)) {
let content = {}
let id = uuid()
content.text = $(elem).text()
content.id = id
filtered_data.additional_content.push(content)
} else {
}
})
}
catch (err) {
console.log(err)
}
return filtered_data
}
const scrape = (source) => {
return new Promise((resolve, reject) => {
request(source.url, function (err, body) {
if (err) {
reject(err)
return
}
resolve(body)
})
})
}
const DATA = _.map(DATA_URLS, async (source) => {
let params = ['h1', 'h2', 'h3', 'p']
let new_data = await filterScrape(source, params)
try {
source.data = new_data
}
catch (err) {
console.log(err)
}
})
module.exports = DATA
This is the module that imports the data: neural.js
const brain = require('brain')
const neural_net = new brain.NeuralNetwork()
const DATA = require('./scraper')
console.log(DATA)
Obviously not much going on, I've removed the code since the variable doesn't resolve. When logged it logs a promise but the promise does not resolve. However in the imported module, the promise is logged and then resolves. What gives? Should I import a function that resolves the data?
Of course it would be best to import that function, however it won't change the issue in your code which is here:
const DATA = _.map(DATA_URLS, async (source) => {
Lodash doesn't support async iteration - so you need to have some other method, one would be to use the newest nodejs version (10.x) and make use of async iteration - but that won't use the full power of asynchronous code.
You can also use scramjet - a framework my company is supporting. The code above would take the following form:
const {DataStream} = require("scramjet");
const DATA_URLS = [mental_models, decision_making, cognitive_bias];
module.exports = async () => DataStream.fromArray(DATA_URLS)
.setOptions({maxParallel: 2}) // if you need to limit that at all.
.map(async ({url}) => {
let params = ['h1', 'h2', 'h3', 'p']
let data = await filterScrape(source, params);
return { url, data };
})
.toArray();
The other file would take the following form:
const brain = require('brain')
const neural_net = new brain.NeuralNetwork()
const scraper = require('./scraper')
(async (){
const DATA = await scraper();
console.log(DATA); // or do whatever else you were expecting...
})();

Categories