opensea place bid using metamask - javascript

const NetworkToUse = process.env.REACT_APP_NETWORK;
const mnemonicWalletSubprovider = new MnemonicWalletSubprovider({
mnemonic: process.env.REACT_APP_MNEMONIC,
});
const infuraRpcSubprovider = new RPCSubprovider({
rpcUrl: `https://${NetworkToUse}.infura.io/v3/${process.env.REACT_APP_INFURA_KEY}`,
});
const providerEngine = new Web3ProviderEngine();
if (window.ethereum) {
providerEngine.addProvider(new SignerSubprovider(window.ethereum));
}
// providerEngine.addProvider(mnemonicWalletSubprovider);
providerEngine.addProvider(infuraRpcSubprovider);
providerEngine.start();
const seaport = new OpenSeaPort(
providerEngine,
{
networkName: NetworkToUse === "mainnet" ? Network.Main : Network.Rinkeby,
apiKey: process.env.REACT_APP_API_KEY,
},
(arg) => {
console.log("From OpenSeaPort CB:");
console.log(arg);
}
);
const placeBidMetaMask = async (order) => {
setIsProcessing(true);
if (typeof window.ethereum === "undefined") {
setError("Please make sure you have MetaMask installed!");
return;
}
if (!bidPrice || bidPrice < asset.price) {
setError("Insufficient Funds!");
return;
}
const { tokenId, tokenAddress } = order.asset;
try {
const [userAccount] = await window.ethereum.request({
method: "eth_requestAccounts",
});
const offer = await seaport.createBuyOrder({
asset: {
tokenId,
tokenAddress,
schemaName: asset.details.assetContract.schemaName,
},
accountAddress: userAccount,
startAmount: bidPrice,
});
console.log(offer);
setMessage("Buy Order Created");
} catch (err) {
setError(err.message);
console.log(err.message);
} finally {
setIsProcessing(false);
}
};
I am using metamask as wellet for bidding
Hi, I am using above code to place bid on opensea It is working but, I am using my personal MNEMONIC
But, in real time i can't get this from users meta mask wallet.
Is there any alternate way to place the bid.
I am using metamask as wellet for bidding
Hi, I am using above code to place bid on opensea It is working but, I am using my personal MNEMONIC
But, in real time i can't get this from users meta mask wallet.
Is there any alternate way to place the bid.

Related

window.web3.eth.Contract no longer works. How do i now connect to a contract?

reading through this resource, https://docs.metamask.io/guide/provider-migration.html#summary-of-breaking-changes, it seems it is still possible to interact with a contract using window.ethereum,
eg (taken from the above link)
const transactionHash = await ethereum.request({
method: 'eth_sendTransaction',
params: [
{
to: '0x...',
'from': '0x...',
value: '0x...',
// And so on...
},
],
});
but I cannot figure out how to connect to the contract.
What do Ireplace this with?
contract = await new window.web3.eth.Contract(ABI,ADDRESS);
I am using the web3 library via this resource
https://cdn.jsdelivr.net/npm/web3#latest/dist/web3.min.js
my simple script is below;
var account = null;
var contract = null;
const ABI = "the abi is here"
const ADDRESS = "contract address is here";
async function asyncCall() {
console.log('async');
if (window.ethereum) {
try {
const accounts = await window.ethereum.request({ method: 'eth_requestAccounts' });
account = accounts[0];
document.getElementById('wallet-address').textContent = account;
// this is the bit i'm stuck on currently
contract = await new window.web3.eth.Contract(ABI,ADDRESS);
//
var mintButton = document.getElementById('mint');
mintButton.addEventListener("click", function(){
console.log('minting');
contract.methods.mint(account).send({from:account,value:"100"});
});
var totalsupply = await contract.methods.totalsupply().call();
document.getElementById('ttt').textContent = totalsupply;
} catch (error) {
if (error.code === 4001) {
console.log('User rejected request');
}
console.log(error);
}
}
}
asyncCall();
So instead of using window.web3 since Metamask no longer injects it into webpages, you can do this before making the call to get the contract:
const Web3 = require('web3');
// Create Web3 instance
const web3 = new Web3(window.ethereum); // Where window.etherem is your provider.
You can then get your contract by calling:
const contract = new web3.eth.Contract(ABI, ADDRESS);
try this:
let provider = window.ethereum;
if (provider) {
await provider.request({ method: "eth_requestAccounts" });
this.web3 = new Web3(provider);
this.contract = new this.web3.eth.Contract(abi_v2, address);
}

Unable to retrieve the Ethereum balance of MetaMask account

const showAccount = document.querySelector('.showAccount');
const showBalance = document.querySelector('.showBalance');
const Web3 = require("web3");
getAccount();
getBalance();
getAccount returns the wallet id
async function getAccount() {
const accounts = await ethereum.request({ method: 'eth_requestAccounts' });
const account = accounts[0];
showAccount.innerHTML = account;
}
getBalance returns undefined instead of the amount of Ether from Metamask
async function getBalance() {
const accounts = await ethereum.request({ method: 'eth_requestAccounts' });
const account = accounts[0];
showBalance.innerHTML = account.balance;
}
Maybe someone knows a good API for retrieving more values than just these two with examples or got a nice video to learn from.
I found the properties for my accounts object: here
I customized your code and now it displays the amount of Ether from the Account:
const showAccount = document.querySelector('.showAccount');
const showBalance = document.querySelector('.showBalance');
getAccount();
loadBalance();
async function getAccount() {
const accounts = await ethereum.request({ method: 'eth_requestAccounts' });
const account = accounts[0];
showAccount.innerHTML = account;
}
function loadBalance(){
web3Provider = null;
contracts = {};
account = '0x0';
const Web3 = require("web3");
const ethEnabled = async () => {
if (window.ethereum) {
await window.ethereum.send('eth_requestAccounts');
window.web3 = new Web3(window.ethereum);
return true;
}
}
if (typeof web3 !== 'undefined') {
// If a web3 instance is already provided by Meta Mask.
web3Provider = web3.currentProvider;
web3 = new Web3(web3.currentProvider);
} else {
// Specify default instance if no web3 instance provided
web3Provider = new Web3.providers.HttpProvider('http://localhost:7545');
web3 = new Web3(App.web3Provider);
}
$.getJSON("Market.json", function (market) {
console.log("initializing Market contract")
// Instantiate a new truffle contract from the artifact
contracts.Market = TruffleContract(market);
// Connect provider to interact with contract
contracts.Market.setProvider(web3Provider);
});
$.getJSON("Users.json", function (users) {
console.log("initializing User contract")
// Instantiate a new truffle contract from the artifact
contracts.Users = TruffleContract(users);
// Connect provider to interact with contract
contracts.Users.setProvider(App.web3Provider);
});
var marketInstance;
var userInstance;
var loader = $("#loader");
var content = $("#content");
//loader.show();
content.show();
// Load account data
console.log("loading account data")
var currentAccount;
web3.eth.getCoinbase(function (err, account) {
if (err === null) {
console.log("Your Account: " + account)
account = account;
currentAccount = account;
web3.eth.getBalance(account, function(err, balance) {
if (err === null) { //Note:set id="accountBalance" in your html page
$("#accountBalance").text(web3.fromWei(balance, "ether") + " ETH");
}
});
}
});
}

How to save telegram data in a google spreedsheet?

I have built a simple telegram bot using telegraf and used this code to log a specific information that I need :
bot.on('text', (ctx, next) => {
console.log(`[text] ${ ctx.message.chat.id } ${ ctx.from.username } ${ ctx.message.chat.first_name+ " " + ctx.message.chat.last_name } ${ ctx.message.text }`);
return next();
});
and as result, the log be something like this
[text] 563789142 xMA3x Mohammed Abbas /start
Now I want to save that information in a google spreadsheet, I had followed this Tutorial and was able to push a quotation marked values into the spreadsheet only, but i don't know how to push the console.log result into the spreddsheet
anyway, here is my code
const { Telegraf } = require('telegraf');
const bot = new Telegraf("xyz")
const { google } = require("googleapis");
const keys = require("./Keys.json")
bot.on('text', (ctx, next) => {
console.log(`[text] ${ ctx.message.chat.id } ${ ctx.from.username } ${ ctx.message.chat.first_name+ " " + ctx.message.chat.last_name } ${ ctx.message.text }`);
return next();
});
bot.start((ctx) => ctx.reply('Welcome'))
bot.help((ctx) => ctx.reply('Send me a sticker'))
bot.on('sticker', (ctx) => ctx.reply('👍'))
bot.hears('hi', (ctx) => ctx.reply('Hey there'))
const client = new google.auth.JWT(
keys.client_email,
null,
keys.private_key,
["https://www.googleapis.com/auth/spreadsheets"]
);
client.authorize(function(err){
if(err){
console.log(err);
return;
} else {
console.log("connected");
gsrun(client);
}
});
async function gsrun(cl){
const gsapi = google.sheets({version:"v4", auth: cl});
const updateOptions = {
spreadsheetId: "xyz",
range: "Sheet1",
valueInputOption: "RAW",
insertDataOption: "INSERT_ROWS",
resource: {
values:[
["this is working"]
]}
};
let res = await gsapi.spreadsheets.values.append(updateOptions);
console.log(res);
}
bot.launch()
so as you see the "this is working" is pushed successfully in the spreadsheet, but when I try to add another value like ctx.message.chat.id it give me ReferenceError: ctx is not defined
so how I can make the google sheet API recognize the telegraf commands? or to be more general, how I can save the *ctx.message.chat.id,ctx.from.username..etc * info (that come form the telegram) into the spreedhsset ?
ctx lives within your bot hooks, so to save the information to the sheet, you have to call your googlesheets function inside the relevant hook.
Possible updates:
const { Telegraf } = require('telegraf');
const bot = new Telegraf("xyz")
const { google } = require("googleapis");
const keys = require("./Keys.json")
const client = new google.auth.JWT(
keys.client_email,
null,
keys.private_key,
["https://www.googleapis.com/auth/spreadsheets"]
);
async function gsrun(cl, data){
const gsapi = google.sheets({version:"v4", auth: cl});
const updateOptions = {
spreadsheetId: "xyz",
range: "Sheet1",
valueInputOption: "RAW",
insertDataOption: "INSERT_ROWS",
resource: {
values:[
[data]
]}
};
let res = await gsapi.spreadsheets.values.append(updateOptions);
console.log(res);
}
const saveMetadataToSheets = (data) => {
client.authorize(function(err){
if(err){
console.log(err);
return;
} else {
console.log("connected");
gsrun(client, data);
}
});
}
bot.on('text', (ctx, next) => {
const data = `[text] ${ ctx.message.chat.id } ${ ctx.from.username } ${ ctx.message.chat.first_name+ " " + ctx.message.chat.last_name } ${ ctx.message.text }`
console.log(data);
// pass any data that you need to save to the sheets
saveMetadataToSheets(data)
return next();
});
bot.start((ctx) => ctx.reply('Welcome'))
bot.help((ctx) => ctx.reply('Send me a sticker'))
bot.on('sticker', (ctx) => ctx.reply('👍'))
bot.hears('hi', (ctx) => ctx.reply('Hey there'))
bot.launch()

Can't figure out why my app.get is being run twice?

I have a app.get which inside of it is quite a bit of logic. Which everything works great aside from some of the logic being called twice for some reason. I have noticed when I was saving something to by db that it would save two rows.
So I put a console.log in that area and sure enough it was logging it twice.
Any reason why this is happening?
app.get('/shopify/callback', (req, res) => {
const { shop, hmac, code, state } = req.query;
const stateCookie = cookie.parse(req.headers.cookie).state;
if (state !== stateCookie) {
return res.status(403).send('Request origin cannot be verified');
}
if (shop && hmac && code) {
// DONE: Validate request is from Shopify
const map = Object.assign({}, req.query);
delete map['signature'];
delete map['hmac'];
const message = querystring.stringify(map);
const providedHmac = Buffer.from(hmac, 'utf-8');
const generatedHash = Buffer.from(
crypto
.createHmac('sha256', config.oauth.client_secret)
.update(message)
.digest('hex'),
'utf-8'
);
let hashEquals = false;
try {
hashEquals = crypto.timingSafeEqual(generatedHash, providedHmac)
} catch (e) {
hashEquals = false;
};
if (!hashEquals) {
return res.status(400).send('HMAC validation failed');
}
// DONE: Exchange temporary code for a permanent access token
const accessTokenRequestUrl = 'https://' + shop + '/admin/oauth/access_token';
const accessTokenPayload = {
client_id: config.oauth.api_key,
client_secret: config.oauth.client_secret,
code,
};
request.post(accessTokenRequestUrl, { json: accessTokenPayload })
.then((accessTokenResponse) => {
const accessToken = accessTokenResponse.access_token;
// DONE: Use access token to make API call to 'shop' endpoint
const shopRequestUrl = 'https://' + shop + '/admin/shop.json';
const shopRequestHeaders = {
'X-Shopify-Access-Token': accessToken,
}
request.get(shopRequestUrl, { headers: shopRequestHeaders })
.then((shopResponse) => {
const response = JSON.parse(shopResponse);
const shopData = response.shop;
console.log('BEING CALLED TWICE...')
res.render('pages/brand_signup',{
shop: shopData.name
})
})
.catch((error) => {
res.status(error.statusCode).send(error.error.error_description);
});
})
.catch((error) => {
res.status(error.statusCode).send(error.error.error_description);
});
} else {
res.status(400).send('Required parameters missing');
}
});

How can I update more than 500 docs in Firestore using Batch?

I'm trying to update a field timestamp with the Firestore admin timestamp in a collection with more than 500 docs.
const batch = db.batch();
const serverTimestamp = admin.firestore.FieldValue.serverTimestamp();
db
.collection('My Collection')
.get()
.then((docs) => {
serverTimestamp,
}, {
merge: true,
})
.then(() => res.send('All docs updated'))
.catch(console.error);
This throws an error
{ Error: 3 INVALID_ARGUMENT: cannot write more than 500 entities in a single call
at Object.exports.createStatusError (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\common.js:87:15)
at Object.onReceiveStatus (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:1188:28)
at InterceptingListener._callNext (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:564:42)
at InterceptingListener.onReceiveStatus (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:614:8)
at callback (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:841:24)
code: 3,
metadata: Metadata { _internal_repr: {} },
details: 'cannot write more than 500 entities in a single call' }
Is there a way that I can write a recursive method which creates a batch object updating a batch of 500 docs one by one until all the docs are updated.
From the docs I know that delete operation is possible with the recursive approach as mentioned here:
https://firebase.google.com/docs/firestore/manage-data/delete-data#collections
But, for updating, I'm not sure how to end the execution since the docs are not being deleted.
I also ran into the problem to update more than 500 documents inside a Firestore collection. And i would like to share how i solved this problem.
I use cloud functions to update my collection inside Firestore but this should also work on client side code.
The solution counts every operation which is made to the batch and after the limit is reached a new batch is created and pushed to the batchArray.
After all updates are completed the code loops through the batchArray and commits every batch which is inside the array.
It is important to count every operation set(), update(), delete() which is made to the batch because they all count to the 500 operation limit.
const documentSnapshotArray = await firestore.collection('my-collection').get();
const batchArray = [];
batchArray.push(firestore.batch());
let operationCounter = 0;
let batchIndex = 0;
documentSnapshotArray.forEach(documentSnapshot => {
const documentData = documentSnapshot.data();
// update document data here...
batchArray[batchIndex].update(documentSnapshot.ref, documentData);
operationCounter++;
if (operationCounter === 499) {
batchArray.push(firestore.batch());
batchIndex++;
operationCounter = 0;
}
});
batchArray.forEach(async batch => await batch.commit());
return;
I liked this simple solution:
const users = await db.collection('users').get()
const batches = _.chunk(users.docs, 500).map(userDocs => {
const batch = db.batch()
userDocs.forEach(doc => {
batch.set(doc.ref, { field: 'myNewValue' }, { merge: true })
})
return batch.commit()
})
await Promise.all(batches)
Just remember to add import * as _ from "lodash" at the top. Based on this answer.
You can use default BulkWriter. This method used 500/50/5 rule.
Example:
let bulkWriter = firestore.bulkWriter();
bulkWriter.create(documentRef, {foo: 'bar'});
bulkWriter.update(documentRef2, {foo: 'bar'});
bulkWriter.delete(documentRef3);
await close().then(() => {
console.log('Executed all writes');
});
As mentioned above, #Sebastian's answer is good and I upvoted that too. Although faced an issue while updating 25000+ documents in one go.
The tweak to logic is as below.
console.log(`Updating documents...`);
let collectionRef = db.collection('cities');
try {
let batch = db.batch();
const documentSnapshotArray = await collectionRef.get();
const records = documentSnapshotArray.docs;
const index = documentSnapshotArray.size;
console.log(`TOTAL SIZE=====${index}`);
for (let i=0; i < index; i++) {
const docRef = records[i].ref;
// YOUR UPDATES
batch.update(docRef, {isDeleted: false});
if ((i + 1) % 499 === 0) {
await batch.commit();
batch = db.batch();
}
}
// For committing final batch
if (!(index % 499) == 0) {
await batch.commit();
}
console.log('write completed');
} catch (error) {
console.error(`updateWorkers() errored out : ${error.stack}`);
reject(error);
}
Explanations given on previous comments already explain the issue.
I'm sharing the final code that I built and worked for me, since I needed something that worked in a more decoupled manner, instead of the way that most of the solutions presented above do.
import { FireDb } from "#services/firebase"; // = firebase.firestore();
type TDocRef = FirebaseFirestore.DocumentReference;
type TDocData = FirebaseFirestore.DocumentData;
let fireBatches = [FireDb.batch()];
let batchSizes = [0];
let batchIdxToUse = 0;
export default class FirebaseUtil {
static addBatchOperation(
operation: "create",
ref: TDocRef,
data: TDocData
): void;
static addBatchOperation(
operation: "update",
ref: TDocRef,
data: TDocData,
precondition?: FirebaseFirestore.Precondition
): void;
static addBatchOperation(
operation: "set",
ref: TDocRef,
data: TDocData,
setOpts?: FirebaseFirestore.SetOptions
): void;
static addBatchOperation(
operation: "create" | "update" | "set",
ref: TDocRef,
data: TDocData,
opts?: FirebaseFirestore.Precondition | FirebaseFirestore.SetOptions
): void {
// Lines below make sure we stay below the limit of 500 writes per
// batch
if (batchSizes[batchIdxToUse] === 500) {
fireBatches.push(FireDb.batch());
batchSizes.push(0);
batchIdxToUse++;
}
batchSizes[batchIdxToUse]++;
const batchArgs: [TDocRef, TDocData] = [ref, data];
if (opts) batchArgs.push(opts);
switch (operation) {
// Specific case for "set" is required because of some weird TS
// glitch that doesn't allow me to use the arg "operation" to
// call the function
case "set":
fireBatches[batchIdxToUse].set(...batchArgs);
break;
default:
fireBatches[batchIdxToUse][operation](...batchArgs);
break;
}
}
public static async runBatchOperations() {
// The lines below clear the globally available batches so we
// don't run them twice if we call this function more than once
const currentBatches = [...fireBatches];
fireBatches = [FireDb.batch()];
batchSizes = [0];
batchIdxToUse = 0;
await Promise.all(currentBatches.map((batch) => batch.commit()));
}
}
Based on all the above answers, I put together the following pieces of code that one can put into a module in JavaScript back-end and front-end to easily use Firestore batch writes, without worrying about the 500 writes limit.
Back-end (Node.js)
// The Firebase Admin SDK to access Firestore.
const admin = require("firebase-admin");
admin.initializeApp();
// Firestore does not accept more than 500 writes in a transaction or batch write.
const MAX_TRANSACTION_WRITES = 499;
const isFirestoreDeadlineError = (err) => {
console.log({ err });
const errString = err.toString();
return (
errString.includes("Error: 13 INTERNAL: Received RST_STREAM") ||
errString.includes("Error: 4 DEADLINE_EXCEEDED: Deadline exceeded")
);
};
const db = admin.firestore();
// How many transactions/batchWrites out of 500 so far.
// I wrote the following functions to easily use batchWrites wthout worrying about the 500 limit.
let writeCounts = 0;
let batchIndex = 0;
let batchArray = [db.batch()];
// Commit and reset batchWrites and the counter.
const makeCommitBatch = async () => {
console.log("makeCommitBatch");
await Promise.all(batchArray.map((bch) => bch.commit()));
};
// Commit the batchWrite; if you got a Firestore Deadline Error try again every 4 seconds until it gets resolved.
const commitBatch = async () => {
try {
await makeCommitBatch();
} catch (err) {
console.log({ err });
if (isFirestoreDeadlineError(err)) {
const theInterval = setInterval(async () => {
try {
await makeCommitBatch();
clearInterval(theInterval);
} catch (err) {
console.log({ err });
if (!isFirestoreDeadlineError(err)) {
clearInterval(theInterval);
throw err;
}
}
}, 4000);
}
}
};
// If the batchWrite exeeds 499 possible writes, commit and rest the batch object and the counter.
const checkRestartBatchWriteCounts = () => {
writeCounts += 1;
if (writeCounts >= MAX_TRANSACTION_WRITES) {
batchIndex++;
batchArray.push(db.batch());
writeCounts = 0;
}
};
const batchSet = (docRef, docData) => {
batchArray[batchIndex].set(docRef, docData);
checkRestartBatchWriteCounts();
};
const batchUpdate = (docRef, docData) => {
batchArray[batchIndex].update(docRef, docData);
checkRestartBatchWriteCounts();
};
const batchDelete = (docRef) => {
batchArray[batchIndex].delete(docRef);
checkRestartBatchWriteCounts();
};
module.exports = {
admin,
db,
MAX_TRANSACTION_WRITES,
checkRestartBatchWriteCounts,
commitBatch,
isFirestoreDeadlineError,
batchSet,
batchUpdate,
batchDelete,
};
Front-end
// Firestore does not accept more than 500 writes in a transaction or batch write.
const MAX_TRANSACTION_WRITES = 499;
const isFirestoreDeadlineError = (err) => {
return (
err.message.includes("DEADLINE_EXCEEDED") ||
err.message.includes("Received RST_STREAM")
);
};
class Firebase {
constructor(fireConfig, instanceName) {
let app = fbApp;
if (instanceName) {
app = app.initializeApp(fireConfig, instanceName);
} else {
app.initializeApp(fireConfig);
}
this.name = app.name;
this.db = app.firestore();
this.firestore = app.firestore;
// How many transactions/batchWrites out of 500 so far.
// I wrote the following functions to easily use batchWrites wthout worrying about the 500 limit.
this.writeCounts = 0;
this.batch = this.db.batch();
this.isCommitting = false;
}
async makeCommitBatch() {
console.log("makeCommitBatch");
if (!this.isCommitting) {
this.isCommitting = true;
await this.batch.commit();
this.writeCounts = 0;
this.batch = this.db.batch();
this.isCommitting = false;
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.isCommitting = true;
await this.batch.commit();
this.writeCounts = 0;
this.batch = this.db.batch();
this.isCommitting = false;
clearInterval(batchWaitInterval);
}
}, 400);
}
}
async commitBatch() {
try {
await this.makeCommitBatch();
} catch (err) {
console.log({ err });
if (isFirestoreDeadlineError(err)) {
const theInterval = setInterval(async () => {
try {
await this.makeCommitBatch();
clearInterval(theInterval);
} catch (err) {
console.log({ err });
if (!isFirestoreDeadlineError(err)) {
clearInterval(theInterval);
throw err;
}
}
}, 4000);
}
}
}
async checkRestartBatchWriteCounts() {
this.writeCounts += 1;
if (this.writeCounts >= MAX_TRANSACTION_WRITES) {
await this.commitBatch();
}
}
async batchSet(docRef, docData) {
if (!this.isCommitting) {
this.batch.set(docRef, docData);
await this.checkRestartBatchWriteCounts();
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.batch.set(docRef, docData);
await this.checkRestartBatchWriteCounts();
clearInterval(batchWaitInterval);
}
}, 400);
}
}
async batchUpdate(docRef, docData) {
if (!this.isCommitting) {
this.batch.update(docRef, docData);
await this.checkRestartBatchWriteCounts();
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.batch.update(docRef, docData);
await this.checkRestartBatchWriteCounts();
clearInterval(batchWaitInterval);
}
}, 400);
}
}
async batchDelete(docRef) {
if (!this.isCommitting) {
this.batch.delete(docRef);
await this.checkRestartBatchWriteCounts();
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.batch.delete(docRef);
await this.checkRestartBatchWriteCounts();
clearInterval(batchWaitInterval);
}
}, 400);
}
}
}
No citations or documentation, this code i invented by myself and for me it worked and looks clean, and simple for read and usage. If some one like it, then can use it too.
Better make autotest becose code use private var _ops wich can be changed after packages upgrade. Forexample in old versions its can be _mutations
async function commitBatch(batch) {
const MAX_OPERATIONS_PER_COMMIT = 500;
while (batch._ops.length > MAX_OPERATIONS_PER_COMMIT) {
const batchPart = admin.firestore().batch();
batchPart._ops = batch._ops.splice(0, MAX_OPERATIONS_PER_COMMIT - 1);
await batchPart.commit();
}
await batch.commit();
}
Usage:
const batch = admin.firestore().batch();
batch.delete(someRef);
batch.update(someRef);
...
await commitBatch(batch);
Simple solution
Just fire twice ?
my array is "resultsFinal"
I fire batch once with a limit of 490 , and second with a limit of the lenght of the array ( results.lenght)
Works fine for me :)
How you check it ?
You go to firebase and delete your collection , firebase say you have delete XXX docs , same as the lenght of your array ? Ok so you are good to go
async function quickstart(results) {
// we get results in parameter for get the data inside quickstart function
const resultsFinal = results;
// console.log(resultsFinal.length);
let batch = firestore.batch();
// limit of firebase is 500 requests per transaction/batch/send
for (i = 0; i < 490; i++) {
const doc = firestore.collection('testMore490').doc();
const object = resultsFinal[i];
batch.set(doc, object);
}
await batch.commit();
// const batchTwo = firestore.batch();
batch = firestore.batch();
for (i = 491; i < 776; i++) {
const objectPartTwo = resultsFinal[i];
const doc = firestore.collection('testMore490').doc();
batch.set(doc, objectPartTwo);
}
await batch.commit();
}

Categories