how to send whatsapp messages multiple times in whatsappweb - javascript

able to send 50 messages in 2.5 sec
i tried again it worked
so this is update code if you find any errores feel free to update me
let messageYouWantToSend = `Good Morning`;
let howManyTime =50;
function resolveAfter2Seconds(message) {
return new Promise(resolve => {
const mk1 = document.querySelector('#main')
const textmk1 = mk1.querySelector('div[contenteditable="true"]')
if (!textmk1) {
throw new Error('There is no opened conversation')
}
textmk1.focus()
document.execCommand('insertText', false, message)
textmk1.dispatchEvent(new Event('change', {
bubbles: true
}))
setTimeout(() => {
(mk1.querySelector('[data-testid="send"]') || mk1.querySelector('[data-icon="send"]')).click()
resolve('sent');
}, 50);
});
}
async function asyncCall(messageUwantToSend2) {
for (let i = 0; i < howManyTime; i++) {
const result = await resolveAfter2Seconds(messageUwantToSend2);
console.log(`${result}==>${i+1}`);
}
}
asyncCall(messageYouWantToSend);

Related

How to get the author from the "channelDelete" event Discord.js v13

I'm trying to create an anti-crash function, but got confused at the moment that the channel does not return the author. How can I get the author in another way?
I tried to connect to AuditLogEvent, but it didn't work
My code:
const { AuditLogEvent } = requier('discord.js')
const usersMap = new Map();
const LIMIT = 3;
const TIMES = 10000
bot.rest.on('channelDelete', async channel => {
const fetchedLogs = await channel.guild.fetchAuditLogs({
limit: 1,
type: AuditLogEvent.ChannelDelete,
})
const deletionLog = fetchedLogs.entries.first();
const { executor, target } = deletionLog
if(channel.guild.id != "940990129307263046") return
if(usersMap.has(executor.id)) {
const userData = usersMap.get(executor.id);
const { lastDelete, timer } = userData;
let deleteCount = userData.deleteCount;
const tim = channel.createdTimestamp - lastDelete.createdTimestamp
if(tim > TIMES) {
usersMap.delete(executor.id)
} else {
++deleteCount;
if(parseInt(deleteCount) === LIMIT) {
executor.ban()
}
}
}
})

Handle all promise rejections in nested promises? [duplicate]

This question already has an answer here:
Weird behavior with Promise throwing "Unhandled promise rejection" error
(1 answer)
Closed 8 months ago.
I'm using Promise.race so I can time out the fetch call. During a test to an API endpoint that doesn't exist, I'm getting an "Unhandled promise rejection" error. The code is supposed to console.log once upon any failure. Where am I not handling the promise rejection? FYI the main function is function Retrieve
window.path = "http://localhost:3000/records";
function isPrimary(color) {
let colorIsPrimary = false;
(color.startsWith("red") || color.startsWith("blue") || color.startsWith("yellow")) ? colorIsPrimary = true : null;
return colorIsPrimary;
}
function transformData(records,page) {
const transformedData = {
"ids" : [],
"open": [],
"closedPrimaryCount": 0,
"previousPage": null,
"nextPage": null
}
let closedPrimaryCount = 0;
records.forEach((record, index) => {
if (index < 10) {
transformedData.ids.push(record.id);
record["isPrimary"] = false;
isPrimary(record.color) ? record.isPrimary = true : null;
record.disposition == "open" ? transformedData.open.push(record) : null;
if (record.disposition == "closed") {
isPrimary(record.color) ? closedPrimaryCount++ : null;
}
}
})
transformedData.closedPrimaryCount = closedPrimaryCount;
let previousPage = null;
page > 1 ? previousPage = page - 1 : null;
transformedData.previousPage = previousPage;
let nextPage = null;
records.length > 10 ? nextPage = page + 1 : null;
transformedData.nextPage = nextPage;
return transformedData;
}
function promiseTimeout(promise, ms) {
let timeout = new Promise((resolve, reject) => {
let timeoutID = setTimeout(() => {
clearTimeout(timeoutID);
reject("fetch failed - timeout");
}, ms)
})
return Promise.race([promise, timeout]);
}
function doFetch(url) {
return new Promise((resolve, reject) => {
fetch(url)
.then((response) => {
if (!response.ok) {
reject(new Error("fetch failed - non 200"));
}
response.json()
.then((records) => {
resolve(records);
})
.catch((error) => {
reject(new Error("fetch failed - error from response.json"));
})
})
.catch((error) => {
reject(new Error("fetch failed - error from fetch"));
})
})
}
function retrieve({page = 1, colors = ["red", "brown", "blue", "yellow", "green"], thetest = false, windowPath = window.path} = {}) {
return new Promise((resolve,reject)=>{
!thetest ? windowPath = "http://localhost:3000/records" : null;
// limit set to 11 so we can determine nextPage
const limit = "11";
const offset = "" + ((page * 10) - 10);
let colorArgs = "";
colors.forEach((color, index) => {
colorArgs = colorArgs + "&color[]=" + color;
});
const requestQuery = `limit=${limit}&offset=${offset}${colorArgs}`;
const requestURL = new URI(windowPath);
requestURL.query(requestQuery);
const promiseRace = promiseTimeout(doFetch(requestURL.toString()), 4000);
promiseRace.then((records) => {
const transformedData = transformData(records, page);
resolve(transformedData);
})
promiseRace.catch((error) => {
console.log(error);
})
});
};
export default retrieve;
After ggorlen's excellent advice, I refactored to this much cleaner (and test-passing) code:
async function getTransformedData(url,page) {
try {
const response = await fetch(url);
if (response.ok) {
const records = await response.json();
const transformedData = transformData(records,page);
return(transformedData);
} else {
throw new Error("failed");
}
}
catch(error) {
console.log(error);
}
// getTransformedData
}
function retrieve({page = 1, colors = ["red", "brown", "blue", "yellow", "green"]} = {}) {
// limit set to 11 so we can determine nextPage
const limit = "11";
const offset = "" + ((page * 10) - 10);
let colorArgs = "";
colors.forEach((color, index) => {
colorArgs = colorArgs + "&color[]=" + color;
});
const requestQuery = `limit=${limit}&offset=${offset}${colorArgs}`;
const requestURL = new URI(window.path);
requestURL.query(requestQuery);
return getTransformedData(requestURL.toString(),page);
};

How do I get my data to display on flash cards using JavaScript?

Right now I'm working on full stack application that uses JS on the front and back end. This app lets a user generate their own set of flash cards. Whenever the user clicks "View Cards" data will then be fetched and will display the question and answer on each side of the card. It's supposed to display one card at a time and allows the user to scroll through other cards using either the "Previous" or "Next" buttons. I'm able to successfully fetch data convert it to JSON and can get at least one item from the data base display properly. The problem is whenever I try to scroll through the cards. I'm only able to scroll through some of the cards before the browser returns an error. I've even noticed that some of the cards won't render both sides properly. How could I address these issues?
const flashCard = document.querySelector(".flashcard");
const flipBtn = document.querySelector(".flip-btn");
const prevBtn = document.querySelector(".prev-btn");
const nextBtn = document.querySelector(".next-btn");
let frontOfCard = document.querySelector(".front-content");
let backOfCard = document.querySelector(".back-content");
const displayCards = () => {
getCardInfo()
flipBtn.innerHTML = "Flip"
flipBtn.removeEventListener("click", displayCards)
}
flipBtn.addEventListener("click", displayCards)
const flash = () => {
if (flashCard.style.transform != "rotateX(180deg)") {
flashCard.style.transform = "rotateX(180deg)"
} else {
flashCard.style.transform = "none"
}
}
const getCardInfo = async () => {
const itemBody = {
method: "PUT",
headers: {
Accept: "application/json",
"Content-Type": "application/json"
}
}
const data = await fetch(window.location.href, itemBody)
const jsonData = await data.json()
console.log(jsonData)
let idx = 0;
frontOfCard.innerHTML = jsonData[idx].Answer
backOfCard.innerHTML = jsonData[idx].Question
flashCard.style.display = "block";
flipBtn.addEventListener("click", flash);
scrollThroughCards(idx, jsonData);
}
function scrollThroughCards(idx, data) {
prevBtn.addEventListener("click", (e) => {
flashCard.style.display = "none"
setTimeout(() => {
frontOfCard.innerHTML = data[idx--].Answer
backOfCard.innerHTML = data[idx--].Question
flashCard.style.display = "block"
}, 1000)
e.preventDefault()
})
nextBtn.addEventListener("click", (e) => {
flashCard.style.display = "none"
setTimeout(() => {
frontOfCard.innerHTML = data[idx++].Answer
backOfCard.innerHTML = data[idx++].Question
flashCard.style.display = "block"
}, 1000)
e.preventDefault()
})
}
app.get("/card/:id", checkAuthenticated, async (req,res) => {
const { id } = req.params
const data = await Card.findAll({ where: { NoteId: id } });
res.render("cards-page", {
noteid: id,
Cards: data
})
});
app.put("/card/:id", checkAuthenticated, async (req,res) => {
const { id } = req.params
const data = await Card.findAll({ where: { NoteId: id } });
res.json(data)
})
app.post("/card/:id", checkAuthenticated, async (req, res) => {
const { Question, Answer, NoteId } = req.body;
const newCard = await Card.create({
Question,
Answer,
NoteId
});
res.redirect(`/card/${NoteId}`)
});
In the scrollThroughCards function, boundary checks were not performed and the increment and decrement operators were misused.
function scrollThroughCards(idx, data) {
prevBtn.addEventListener("click", (e) => {
// there's no more card on the left of index 0
// so exit the function early
if (idx <= 0) return;
flashCard.style.display = "none"
setTimeout(() => {
idx--; // decrease the index first
// then use the modified index
frontOfCard.innerHTML = data[idx].Answer
backOfCard.innerHTML = data[idx].Question
flashCard.style.display = "block"
}, 1000)
e.preventDefault()
})
nextBtn.addEventListener("click", (e) => {
// there's no more cards beyond the end of the list
// so exit the function early
if (idx >= data.length - 1) return;
flashCard.style.display = "none"
setTimeout(() => {
idx++; // increase the index first
// then use the modified index next
frontOfCard.innerHTML = data[idx].Answer
backOfCard.innerHTML = data[idx].Question
flashCard.style.display = "block"
}, 1000)
e.preventDefault()
})
}

How can I update more than 500 docs in Firestore using Batch?

I'm trying to update a field timestamp with the Firestore admin timestamp in a collection with more than 500 docs.
const batch = db.batch();
const serverTimestamp = admin.firestore.FieldValue.serverTimestamp();
db
.collection('My Collection')
.get()
.then((docs) => {
serverTimestamp,
}, {
merge: true,
})
.then(() => res.send('All docs updated'))
.catch(console.error);
This throws an error
{ Error: 3 INVALID_ARGUMENT: cannot write more than 500 entities in a single call
at Object.exports.createStatusError (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\common.js:87:15)
at Object.onReceiveStatus (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:1188:28)
at InterceptingListener._callNext (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:564:42)
at InterceptingListener.onReceiveStatus (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:614:8)
at callback (C:\Users\Growthfile\Desktop\cf-test\functions\node_modules\grpc\src\client_interceptors.js:841:24)
code: 3,
metadata: Metadata { _internal_repr: {} },
details: 'cannot write more than 500 entities in a single call' }
Is there a way that I can write a recursive method which creates a batch object updating a batch of 500 docs one by one until all the docs are updated.
From the docs I know that delete operation is possible with the recursive approach as mentioned here:
https://firebase.google.com/docs/firestore/manage-data/delete-data#collections
But, for updating, I'm not sure how to end the execution since the docs are not being deleted.
I also ran into the problem to update more than 500 documents inside a Firestore collection. And i would like to share how i solved this problem.
I use cloud functions to update my collection inside Firestore but this should also work on client side code.
The solution counts every operation which is made to the batch and after the limit is reached a new batch is created and pushed to the batchArray.
After all updates are completed the code loops through the batchArray and commits every batch which is inside the array.
It is important to count every operation set(), update(), delete() which is made to the batch because they all count to the 500 operation limit.
const documentSnapshotArray = await firestore.collection('my-collection').get();
const batchArray = [];
batchArray.push(firestore.batch());
let operationCounter = 0;
let batchIndex = 0;
documentSnapshotArray.forEach(documentSnapshot => {
const documentData = documentSnapshot.data();
// update document data here...
batchArray[batchIndex].update(documentSnapshot.ref, documentData);
operationCounter++;
if (operationCounter === 499) {
batchArray.push(firestore.batch());
batchIndex++;
operationCounter = 0;
}
});
batchArray.forEach(async batch => await batch.commit());
return;
I liked this simple solution:
const users = await db.collection('users').get()
const batches = _.chunk(users.docs, 500).map(userDocs => {
const batch = db.batch()
userDocs.forEach(doc => {
batch.set(doc.ref, { field: 'myNewValue' }, { merge: true })
})
return batch.commit()
})
await Promise.all(batches)
Just remember to add import * as _ from "lodash" at the top. Based on this answer.
You can use default BulkWriter. This method used 500/50/5 rule.
Example:
let bulkWriter = firestore.bulkWriter();
bulkWriter.create(documentRef, {foo: 'bar'});
bulkWriter.update(documentRef2, {foo: 'bar'});
bulkWriter.delete(documentRef3);
await close().then(() => {
console.log('Executed all writes');
});
As mentioned above, #Sebastian's answer is good and I upvoted that too. Although faced an issue while updating 25000+ documents in one go.
The tweak to logic is as below.
console.log(`Updating documents...`);
let collectionRef = db.collection('cities');
try {
let batch = db.batch();
const documentSnapshotArray = await collectionRef.get();
const records = documentSnapshotArray.docs;
const index = documentSnapshotArray.size;
console.log(`TOTAL SIZE=====${index}`);
for (let i=0; i < index; i++) {
const docRef = records[i].ref;
// YOUR UPDATES
batch.update(docRef, {isDeleted: false});
if ((i + 1) % 499 === 0) {
await batch.commit();
batch = db.batch();
}
}
// For committing final batch
if (!(index % 499) == 0) {
await batch.commit();
}
console.log('write completed');
} catch (error) {
console.error(`updateWorkers() errored out : ${error.stack}`);
reject(error);
}
Explanations given on previous comments already explain the issue.
I'm sharing the final code that I built and worked for me, since I needed something that worked in a more decoupled manner, instead of the way that most of the solutions presented above do.
import { FireDb } from "#services/firebase"; // = firebase.firestore();
type TDocRef = FirebaseFirestore.DocumentReference;
type TDocData = FirebaseFirestore.DocumentData;
let fireBatches = [FireDb.batch()];
let batchSizes = [0];
let batchIdxToUse = 0;
export default class FirebaseUtil {
static addBatchOperation(
operation: "create",
ref: TDocRef,
data: TDocData
): void;
static addBatchOperation(
operation: "update",
ref: TDocRef,
data: TDocData,
precondition?: FirebaseFirestore.Precondition
): void;
static addBatchOperation(
operation: "set",
ref: TDocRef,
data: TDocData,
setOpts?: FirebaseFirestore.SetOptions
): void;
static addBatchOperation(
operation: "create" | "update" | "set",
ref: TDocRef,
data: TDocData,
opts?: FirebaseFirestore.Precondition | FirebaseFirestore.SetOptions
): void {
// Lines below make sure we stay below the limit of 500 writes per
// batch
if (batchSizes[batchIdxToUse] === 500) {
fireBatches.push(FireDb.batch());
batchSizes.push(0);
batchIdxToUse++;
}
batchSizes[batchIdxToUse]++;
const batchArgs: [TDocRef, TDocData] = [ref, data];
if (opts) batchArgs.push(opts);
switch (operation) {
// Specific case for "set" is required because of some weird TS
// glitch that doesn't allow me to use the arg "operation" to
// call the function
case "set":
fireBatches[batchIdxToUse].set(...batchArgs);
break;
default:
fireBatches[batchIdxToUse][operation](...batchArgs);
break;
}
}
public static async runBatchOperations() {
// The lines below clear the globally available batches so we
// don't run them twice if we call this function more than once
const currentBatches = [...fireBatches];
fireBatches = [FireDb.batch()];
batchSizes = [0];
batchIdxToUse = 0;
await Promise.all(currentBatches.map((batch) => batch.commit()));
}
}
Based on all the above answers, I put together the following pieces of code that one can put into a module in JavaScript back-end and front-end to easily use Firestore batch writes, without worrying about the 500 writes limit.
Back-end (Node.js)
// The Firebase Admin SDK to access Firestore.
const admin = require("firebase-admin");
admin.initializeApp();
// Firestore does not accept more than 500 writes in a transaction or batch write.
const MAX_TRANSACTION_WRITES = 499;
const isFirestoreDeadlineError = (err) => {
console.log({ err });
const errString = err.toString();
return (
errString.includes("Error: 13 INTERNAL: Received RST_STREAM") ||
errString.includes("Error: 4 DEADLINE_EXCEEDED: Deadline exceeded")
);
};
const db = admin.firestore();
// How many transactions/batchWrites out of 500 so far.
// I wrote the following functions to easily use batchWrites wthout worrying about the 500 limit.
let writeCounts = 0;
let batchIndex = 0;
let batchArray = [db.batch()];
// Commit and reset batchWrites and the counter.
const makeCommitBatch = async () => {
console.log("makeCommitBatch");
await Promise.all(batchArray.map((bch) => bch.commit()));
};
// Commit the batchWrite; if you got a Firestore Deadline Error try again every 4 seconds until it gets resolved.
const commitBatch = async () => {
try {
await makeCommitBatch();
} catch (err) {
console.log({ err });
if (isFirestoreDeadlineError(err)) {
const theInterval = setInterval(async () => {
try {
await makeCommitBatch();
clearInterval(theInterval);
} catch (err) {
console.log({ err });
if (!isFirestoreDeadlineError(err)) {
clearInterval(theInterval);
throw err;
}
}
}, 4000);
}
}
};
// If the batchWrite exeeds 499 possible writes, commit and rest the batch object and the counter.
const checkRestartBatchWriteCounts = () => {
writeCounts += 1;
if (writeCounts >= MAX_TRANSACTION_WRITES) {
batchIndex++;
batchArray.push(db.batch());
writeCounts = 0;
}
};
const batchSet = (docRef, docData) => {
batchArray[batchIndex].set(docRef, docData);
checkRestartBatchWriteCounts();
};
const batchUpdate = (docRef, docData) => {
batchArray[batchIndex].update(docRef, docData);
checkRestartBatchWriteCounts();
};
const batchDelete = (docRef) => {
batchArray[batchIndex].delete(docRef);
checkRestartBatchWriteCounts();
};
module.exports = {
admin,
db,
MAX_TRANSACTION_WRITES,
checkRestartBatchWriteCounts,
commitBatch,
isFirestoreDeadlineError,
batchSet,
batchUpdate,
batchDelete,
};
Front-end
// Firestore does not accept more than 500 writes in a transaction or batch write.
const MAX_TRANSACTION_WRITES = 499;
const isFirestoreDeadlineError = (err) => {
return (
err.message.includes("DEADLINE_EXCEEDED") ||
err.message.includes("Received RST_STREAM")
);
};
class Firebase {
constructor(fireConfig, instanceName) {
let app = fbApp;
if (instanceName) {
app = app.initializeApp(fireConfig, instanceName);
} else {
app.initializeApp(fireConfig);
}
this.name = app.name;
this.db = app.firestore();
this.firestore = app.firestore;
// How many transactions/batchWrites out of 500 so far.
// I wrote the following functions to easily use batchWrites wthout worrying about the 500 limit.
this.writeCounts = 0;
this.batch = this.db.batch();
this.isCommitting = false;
}
async makeCommitBatch() {
console.log("makeCommitBatch");
if (!this.isCommitting) {
this.isCommitting = true;
await this.batch.commit();
this.writeCounts = 0;
this.batch = this.db.batch();
this.isCommitting = false;
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.isCommitting = true;
await this.batch.commit();
this.writeCounts = 0;
this.batch = this.db.batch();
this.isCommitting = false;
clearInterval(batchWaitInterval);
}
}, 400);
}
}
async commitBatch() {
try {
await this.makeCommitBatch();
} catch (err) {
console.log({ err });
if (isFirestoreDeadlineError(err)) {
const theInterval = setInterval(async () => {
try {
await this.makeCommitBatch();
clearInterval(theInterval);
} catch (err) {
console.log({ err });
if (!isFirestoreDeadlineError(err)) {
clearInterval(theInterval);
throw err;
}
}
}, 4000);
}
}
}
async checkRestartBatchWriteCounts() {
this.writeCounts += 1;
if (this.writeCounts >= MAX_TRANSACTION_WRITES) {
await this.commitBatch();
}
}
async batchSet(docRef, docData) {
if (!this.isCommitting) {
this.batch.set(docRef, docData);
await this.checkRestartBatchWriteCounts();
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.batch.set(docRef, docData);
await this.checkRestartBatchWriteCounts();
clearInterval(batchWaitInterval);
}
}, 400);
}
}
async batchUpdate(docRef, docData) {
if (!this.isCommitting) {
this.batch.update(docRef, docData);
await this.checkRestartBatchWriteCounts();
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.batch.update(docRef, docData);
await this.checkRestartBatchWriteCounts();
clearInterval(batchWaitInterval);
}
}, 400);
}
}
async batchDelete(docRef) {
if (!this.isCommitting) {
this.batch.delete(docRef);
await this.checkRestartBatchWriteCounts();
} else {
const batchWaitInterval = setInterval(async () => {
if (!this.isCommitting) {
this.batch.delete(docRef);
await this.checkRestartBatchWriteCounts();
clearInterval(batchWaitInterval);
}
}, 400);
}
}
}
No citations or documentation, this code i invented by myself and for me it worked and looks clean, and simple for read and usage. If some one like it, then can use it too.
Better make autotest becose code use private var _ops wich can be changed after packages upgrade. Forexample in old versions its can be _mutations
async function commitBatch(batch) {
const MAX_OPERATIONS_PER_COMMIT = 500;
while (batch._ops.length > MAX_OPERATIONS_PER_COMMIT) {
const batchPart = admin.firestore().batch();
batchPart._ops = batch._ops.splice(0, MAX_OPERATIONS_PER_COMMIT - 1);
await batchPart.commit();
}
await batch.commit();
}
Usage:
const batch = admin.firestore().batch();
batch.delete(someRef);
batch.update(someRef);
...
await commitBatch(batch);
Simple solution
Just fire twice ?
my array is "resultsFinal"
I fire batch once with a limit of 490 , and second with a limit of the lenght of the array ( results.lenght)
Works fine for me :)
How you check it ?
You go to firebase and delete your collection , firebase say you have delete XXX docs , same as the lenght of your array ? Ok so you are good to go
async function quickstart(results) {
// we get results in parameter for get the data inside quickstart function
const resultsFinal = results;
// console.log(resultsFinal.length);
let batch = firestore.batch();
// limit of firebase is 500 requests per transaction/batch/send
for (i = 0; i < 490; i++) {
const doc = firestore.collection('testMore490').doc();
const object = resultsFinal[i];
batch.set(doc, object);
}
await batch.commit();
// const batchTwo = firestore.batch();
batch = firestore.batch();
for (i = 491; i < 776; i++) {
const objectPartTwo = resultsFinal[i];
const doc = firestore.collection('testMore490').doc();
batch.set(doc, objectPartTwo);
}
await batch.commit();
}

How to prevent a race condition in javascript?

I am working on a Resource Manager and some jobs must be atomic, because there may be race condition.
I see a solution is provided in a similar post
Javascript semaphore / test-and-set / lock?
which says implement a basic integer semaphore, just add the variable into the DOM and lock/unlock it and make sure your functions keep checking it, else timeout them
Can someone help me how can I implement it for below code ? and How can I add a variable to DOM ?
In the code below retrievePartners methods returns partners based on what user asked in capabilities object, and retrievePartners method may have been called same time from another user, asking for same partners (browsers.) So this method should be atomic, means should deal only with one capabilities at a same time.
async retrievePartners (capabilities) {
const appropriatePartners = { chrome: [], firefox: [], safari: [], ie: [] }
const partners = await this.getAllPartners()
allTypeNumber = 0
// first check if there is available appropriate Partners
Object.keys(capabilities.type).forEach(key => {
let typeNumber = parseInt(capabilities.type[key])
allTypeNumber = allTypeNumber + typeNumber
for (let i = 0; i < typeNumber; i++) {
partners.forEach((partner, i) => {
if (
key === partner.value.type &&
partner.value.isAvailable &&
appropriatePartners[key].length < typeNumber
) {
appropriatePartners[key].push(partner)
}
})
if (appropriatePartners[key].length < typeNumber) {
throw new Error(
'Sorry there are no appropriate Partners for this session'
)
}
}
})
if (allTypeNumber === 0) {
throw new Error('Please mention at least 1 type of browser !')
} else {
Object.keys(appropriatePartners).forEach(key => {
appropriatePartners[key].forEach(partner => {
this.instructorPeer.set('/partners/' + partner.id + '/states/', {
isAvailable: false
})
})
})
return appropriatePartners
}
}
getAllPartners method
async getAllPartners (capabilities) {
const partners = []
const paths = await this.instructorPeer.get({
path: { startsWith: '/partners/' }
})
paths.forEach((path, i) => {
if (path.fetchOnly) {
let obj = {}
obj.value = path.value
obj.id = path.path.split('/partners/')[1]
obj.value.isAvailable = paths[i + 1].value.isAvailable
partners.push(obj)
}
})
return partners
}
Here is how I call the method
async function startTest () {
const capabilities = {
type: {
chrome: 1
}
}
test('Example test', async t => {
try {
session = await workout.createSession(capabilities)
const partners = session.partners
const partner = partners.chrome[0]
...

Categories