I'm trying to generate tokens on the fly and save them to the database as I do so.
This is the code to generate a token.
const generateToken = function (maxUse) {
// 12 digit token numbers. 9e+11 possibilities
const min = 100000000000;
const max = 999999999999;
const token = Math.floor(Math.random() * (max -min) + min);
// ensure token doesn't exit exist in db before saving
Token.count({ token }, function (err, count) {
if (count > 0) {
generateToken() ;
} else {
let newToken = new Token({ token, maxUse });
newToken.save(function (err, savedToken) {
if (err) {
console.log(err);
return;
} else {
generateSerial(savedToken._id);
console.log("saved token is =>", savedToken.token);
return savedToken.token;
}
})
}
})
}
How do I write a function that calls this function an arbitrary number of times, appending the the tokens to a file as they are being saved to the database. I realized a while loop wouldn't wouldn't work because of the asynchronous nature of the process.
All the answers I've seen assume that I have the bulk data ahead of time e.g using bulkwrite (mongoose).
An alternative approach is welcomed
Thank you.
The way I see this, you are probably best off keeping a "local list" of the generated tokens and "bulk" inserting via .insertMany(). Mileage may vary on the actual implementation, so we will discuss both that approach as well as handling your recursive function in a sane way with async methods.
Async Loop
You have created a problem where you need to test the values present in order to determine that they are "unique" for insertion. This of course requires async calls in order to look at the database, and therefore rules out "bulk" actions such as "upserts" because you don't know if the item exists before you send it in a loop. So recursion does work in this case.
So the very first thing you should do is to make the "function" asynchronous itself, either returning a callback or a promise.
In essence:
function generateToken(maxUse) {
const min = 100000000000;
const max = 999999999999;
const token = Math.floor(Math.random() * (max -min) + min);
return Token.count({ token }).then( count => {
if ( count > 0 ) {
generateToken(maxUse);
} else {
return Token.create({ token, maxUse });
}
})
}
Or in more modern terms with async/await
async function generateToken(maxUse) {
const min = 100000000000;
const max = 999999999999;
const token = Math.floor(Math.random() * (max -min) + min);
let count = await Token.count({ token });
if ( count > 0 ) {
generateToken(maxUse);
} else {
return Token.create({ token, maxUse });
}
}
Then it's really just a matter of calling in a loop, either in modern terms as:
let count = 0;
while (count < 500) {
// Random usage 1-5
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = await generateToken(maxUse);
log(token.token);
count++;
}
Or using async.whilst if running under a node version that does not support async/await:
asyncWhilst(
() => count < 500,
(callback) => {
const maxUse = Math.floor(Math.random() * 5 ) + 1;
generateToken(maxUse).then(token => {
log(token.token);
count++;
callback();
}).catch(err => callback(err));
},
(err) => {
if (err) throw err;
// Loop complete, issue callback or promise
}
);
So it's all relatively simple.
Hold Unique Local and "Bulk Insert"
The "alternate" way to handle this is instead to keep an array of the generated tokens "on the client". Then all you need do on each random generation is see if the token was "already seen" and only create an insert operation when a "unique" value is obtained.
This should be much faster than going back and forth to the database with recursive calls since it's all "cached" locally.
In essence, make your generator function very basic:
function generateToken(maxUse) {
const min = 100000000000;
const max = 999999999999;
const token = Math.floor(Math.random() * (max -min) + min);
return ({ token, maxUse });
}
And then during the loop, make two arrays for the seenTokens and the ops, where the latter denotes the items to later insert in "bulk batches" instead of individual writes:
let count = 0,
seenTokens = [],
ops = [];
while ( count < 500 ) {
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = generateToken(maxUse);
if ( seenTokens.indexOf(token.token) === -1 ) {
seenTokens.push(token.token);
ops.push(token);
count++
if ( count % 500 === 0 ) {
await Token.insertMany(ops);
ops = [];
}
} else {
continue
}
}
if ( count % 500 !== 0 ) {
await Token.insertMany(ops);
ops = [];
}
Of course we are applying the async/await methods there, but that's only for the .insertMany() method being async, and if you are not actually inserting "tens of thousands" then it should be easily handled without even needing to "await" such a call, and then only issue "once".
But the demonstration here accounts for what the code should look like when it "is tens of thousands" with no other alteration. Again you can use other library functions to "await" such calls as required.
And again we can employ async.series and async.whilst for such control:
let count = 0,
seenTokens = [],
ops = [];
asyncSeries(
[
(callback) =>
asyncWhilst(
() => count < 500,
(callback) => {
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = generateToken(maxUse);
if ( seenTokens.indexOf(token.token) === -1 ) {
seenTokens.push(token.token);
ops.push(token);
count++;
if ( count % 500 === 0 ) {
Token.insertMany(ops,(err,response) => {
console.log(count);
ops = [];
callback(err);
});
} else {
callback();
}
} else {
console.log("trying again: seen token %s", token.token);
callback();
}
},
callback
),
(callback) => {
if ( count % 500 !== 0 ) {
Token.insertMany(ops,callback)
} else {
callback()
}
}
],
(err) => {
if (err) throw err;
ops = [];
// Operations complete, so callback to continue
}
);
All very much the same, and again the "flow control" is really only there to cater for "larger batches", and you could simply use the regular loop to build the ops entries and make one call only to .insertMany(), much as the 500 limit here actually does.
So the simplest form basically is:
let count = 0,
seenTokens = [],
ops = [];
// Regular loop
while ( count < 500 ) {
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = generateToken(maxUse);
if ( seenTokens.indexOf(token.token) === -1 ) {
seenTokens.push(token.token);
ops.push(token);
count++;
}
}
// Insert all at once
Token.insertMany(ops,(err,result) => {
if (err) throw err;
// now it's complete
})
Of course this whole alternate approach "hinges on" the fact that you never actually maintain "persistence" of the "tokens" in the database, and would not call this function again until those existing entries are cleared. We could "slurp" in all the "taken tokens" and exclude by the same "local cache". But over time this would grow significantly, so there is that point to consider in your overall choice.
As a full listing scaffolded for latest nodejs release, but the general usage is applied inside:
const asyncWhilst = require('async').whilst,
mongoose = require('mongoose'),
Schema = mongoose.Schema;
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const uri = 'mongodb://localhost/test',
options = { useMongoClient: true };
const tokenSchema = new Schema({
token: { type: Number, unique: true },
maxUse: Number
});
const Token = mongoose.model('Token', tokenSchema);
// Logger helper
function log(data) {
console.log(JSON.stringify(data,undefined,2))
}
// Function implementation
function generateToken(maxUse) {
const min = 100000000000;
const max = 999999999999;
const token = Math.floor(Math.random() * (max -min) + min);
return Token.count({ token }).then( count => {
if ( count > 0 ) {
generateToken(maxUse);
} else {
return Token.create({ token, maxUse });
}
})
}
// Main program
(async function() {
try {
const conn = await mongoose.connect(uri,options);
console.log("using async/await");
// clean data
await Promise.all(
Object.keys(conn.models).map(m => conn.models[m].remove({}))
);
let count = 0;
while (count < 500) {
// Random usage 1-5
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = await generateToken(maxUse);
log(token.token);
count++;
}
let totalCount = await Token.count();
console.log("Count is: %s", totalCount);
// Or using async.whilst
console.log("Using async.whilst");
// clean data
await Promise.all(
Object.keys(conn.models).map(m => conn.models[m].remove({}))
);
count = 0;
await new Promise((resolve,reject) => {
asyncWhilst(
() => count < 500,
(callback) => {
const maxUse = Math.floor(Math.random() * 5 ) + 1;
generateToken(maxUse).then(token => {
log(token.token);
count++;
callback();
}).catch(err => callback(err));
},
(err) => {
if (err) reject(err);
resolve();
}
);
});
totalCount = await Token.count();
console.log("Count is: %s", totalCount);
} catch (e) {
console.error(e);
} finally {
mongoose.disconnect();
}
})();
Or as an "alternate" process:
const asyncSeries = require('async').series,
asyncWhilst = require('async').whilst,
mongoose = require('mongoose'),
Schema = mongoose.Schema;
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const uri = 'mongodb://localhost/test',
options = { useMongoClient: true };
const tokenSchema = new Schema({
token: { type: Number, unique: true },
maxUse: Number
});
const Token = mongoose.model('Token', tokenSchema);
// Logger helper
function log(data) {
console.log(JSON.stringify(data,undefined,2))
}
// Function implementation
function generateToken(maxUse) {
const min = 100000000000;
const max = 999999999999;
const token = Math.floor(Math.random() * (max -min) + min);
return ({ token, maxUse });
}
// Main program
(async function() {
try {
const conn = await mongoose.connect(uri,options);
console.log("Using async/await");
// clean data
await Promise.all(
Object.keys(conn.models).map(m => conn.models[m].remove({}))
);
let count = 0,
seenTokens = [],
ops = [];
while ( count < 500 ) {
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = generateToken(maxUse);
if ( seenTokens.indexOf(token.token) === -1 ) {
seenTokens.push(token.token);
ops.push(token);
count++
if ( count % 500 === 0 ) {
await Token.insertMany(ops);
ops = [];
}
} else {
continue
}
}
if ( count % 500 !== 0 ) {
await Token.insertMany(ops);
ops = [];
}
totalCount = await Token.count();
console.log("Count is: %s", totalCount);
// using async.whilst and indeed async.series for control
console.log("using asyc.whilst");
await Promise.all(
Object.keys(conn.models).map(m => conn.models[m].remove({}))
);
await new Promise((resolve,reject) => {
count = 0,
seenTokens = [],
ops = [];
asyncSeries(
[
(callback) =>
asyncWhilst(
() => count < 500,
(callback) => {
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = generateToken(maxUse);
if ( seenTokens.indexOf(token.token) === -1 ) {
seenTokens.push(token.token);
ops.push(token);
count++;
if ( count % 500 === 0 ) {
Token.insertMany(ops,(err,response) => {
console.log(count);
ops = [];
callback(err);
});
} else {
callback();
}
} else {
console.log("trying again: seen token %s", token.token);
callback();
}
},
callback
),
(callback) => {
if ( count % 500 !== 0 ) {
Token.insertMany(ops,callback)
} else {
callback()
}
}
],
(err) => {
if (err) reject(err);
ops = [];
resolve();
}
);
});
totalCount = await Token.count();
console.log("Count is: %s", totalCount);
} catch (e) {
console.error(e);
} finally {
mongoose.disconnect();
}
})();
Related
I need to re-execute a hand made for loop (because I need time between each step) 10s after it ends forever.
So after few tries, I came with this code, but it doesn't work because it re-executes every 10s, not after the loop finishes. I tried to but async function in the interval and put all my code in an await one, but it didn't work.
async function refreshEDT() {
let rows = appModel.getAll()
let orderFiliere = {};
await rows.then((res) => {
for (let i = 0; i < res.length; i++) {
if (res[i].filiere in orderFiliere) {
orderFiliere[res[i].filiere].push(res[i])
} else {
orderFiliere[res[i].filiere] = [res[i]]
}
}
})
return orderFiliere
}
let edt = refreshEDT()
setInterval(() => {
edt = refreshEDT()
}, 1000 * 60 * 60) //ms to s to m to h
setInterval(() => {
edt.then((lessons) => {
(function loop(i) {
let lessons_key = Object.keys(lessons)
setTimeout(() => {
// processing things
if (--i) loop(i);
}, 1000 * 10) //ms to s to 10s
})(Object.keys(lessons).length - 1)
})
console.log(1)
}, 1000 * 10) //ms to s to 10s
Do you have any solution?
Ty!
EDIT
Let's explain what I try to do:
I get things on my DB, try to classify by "filiere", one of my column, and return that object.
Those data are reloaded every hour.
After that, I need to emit with socket.io every "filiere" with 10s between, and repeat that.
Perhaps the code will be easier to read when you wrap setTimeout with a promise to await.
function sleep(ms) {
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}
async function refreshEDT() {
const res = await appModel.getAll();
const orderFiliere = res.reduce((acc, value) => {
const { filiere } = value;
if (filiere in acc) {
acc[filiere].push(value);
} else {
acc[filiere] = [value];
}
return acc;
}, {});
return orderFiliere;
}
// consider naming this
(async () => {
let edt = refreshEDT();
setInterval(() => {
edt = refreshEDT();
}, 1000 * 60 * 60);
while (true) {
const lessons = await edt;
for (const lessons_key of Object.keys(lessons)) {
await sleep(1000 * 10);
// processing things
}
// repeat after 10 seconds
await sleep(1000 * 10);
}
})();
With ES2022, you can use Array.prototype.groupBy() to simplify your refreshEDT() implementation:
async function refreshEDT() {
const res = await appModel.getAll();
const orderFiliere = res.groupBy(({ filiere }) => filiere);
return orderFiliere;
}
You could try using setTimeout to call your function again.
const myFunc = () => {
// processing things
setTimeout(myFunc , 10000)
}
myFunc();
I think this could help if i understood the problem correctly :
const wait = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
async yourFunction() {
const secondsToWait: number = 10;
let i = 0;
for (let item of List) {
// Do stuff
if (i === list.length -1) {
await wait(secondsToWait*1000);
yourFunction();
}
i++;
}
}
I have written a cloud function that runs every 5 minutes on my Firebase app. In essence, the function gathers trends data from the Google Trends website and parses the JSON into a variable.
After doing so I want to then connect to the Twitter API and search for tweets using the trending topics fetched in the first part.
My Issue seems to lie with the second part. It fetches the data but the remainder of the function does not wait for the result before writing to Firebase.
I have tried two different methods but both don't seem to work as intended. I am struggling to understand how the function should wait for the second part to gather and store the information before writing to Firebase.
Method 1
exports.callTo = functions.pubsub.schedule("5 * * * *").onRun((context) => {
let searchTrends;
const ts = Date.now();
const dateOb = new Date(ts);
const date = dateOb.getDate();
const month = dateOb.getMonth() + 1;
const year = dateOb.getFullYear();
const twitterTrends = [];
googleTrends.dailyTrends({
trendDate: new Date(year + "-" + month + "-" + date),
geo: "CA",
}, function(err, res) {
if (err) {
functions.logger.error(err);
} else {
searchTrends = JSON.parse(res).default.trendingSearchesDays[0]
.trendingSearches;
functions.logger.info(searchTrends);
for (let i = 0; i < searchTrends.length; i++) {
functions.logger.log(searchTrends[i].title.query);
T.get("search/tweets", {q: searchTrends[i].title.query, count: 1},
function(err, data, response) {
if (err) {
functions.logger.error(err);
}
functions.logger.info("Twitter data" +
JSON.stringify(data.statuses));
twitterTrends[i] = JSON.stringify(data.statuses);
});
}
const dbRef = admin.database().ref("searchTrends");
dbRef.set({google: searchTrends, twitter: twitterTrends});
}
});
});
Method 2
exports.callTo = functions.pubsub.schedule("5 * * * *").onRun((context) => {
let searchTrends;
const ts = Date.now();
const dateOb = new Date(ts);
const date = dateOb.getDate();
const month = dateOb.getMonth() + 1;
const year = dateOb.getFullYear();
const twitterTrends = [];
async function getTrends(){
googleTrends.dailyTrends({
trendDate: new Date(year + "-" + month + "-" + date),
geo: "CA",
}, function(err, res) {
if (err) {
functions.logger.error(err);
} else {
searchTrends = JSON.parse(res).default.trendingSearchesDays[0]
.trendingSearches;
functions.logger.info(searchTrends);
}
});
await getTwitterTrends();
}
async function getTwitterTrends(){
for (let i = 0; i < 1; i++) {
functions.logger.log(searchTrends[i].title.query);
T.get("search/tweets", {q: searchTrends[i].title.query, count: 1},
function(err, data, response) {
if (err) {
functions.logger.error(err);
} else {
functions.logger.info("Twitter data" +
JSON.stringify(data.statuses));
twitterTrends[i] = JSON.stringify(data.statuses);
}
});
}
return "done";
}
const dbRef = admin.database().ref("searchTrends");
dbRef.set({google: searchTrends, twitter: twitterTrends});
});
After checking your function it looks like a Promises issue. The reason you are seeing only the searchTrends data in Firestore is because the Firestore reference and upload is being done inside the callback for the dailyTrends method (taking for reference the method 1 code). However this does not wait for each request to the Twitter API to be resolved before writing to Firestore.
Based on the documentation for twit (which seems to be the wrapper you are using), it also supports standard promises. You could add each promise to an array, and then use Promise.all() to wait until they are all resolved to then write the data into Firestore. It would look something like this (which I haven’t tested since I don’t have Twitter API access).
exports.callTo = functions.pubsub.schedule("5 * * * *").onRun((context) => {
const ts = Date.now();
const dateOb = new Date(ts);
const date = dateOb.getDate();
const month = dateOb.getMonth() + 1;
const year = dateOb.getFullYear();
let searchTrends;
const twitterTrends = [];
const twPromises = [];
googleTrends.dailyTrends({
trendDate: new Date(year + "-" + month + "-" + date),
geo: "CA",
}, function(err, res) {
if (err) {
functions.logger.error(err);
} else {
searchTrends = JSON.parse(res).default.trendingSearchesDays[0]
.trendingSearches;
functions.logger.info(searchTrends);
for (let i = 0; i < searchTrends.length; i++) {
functions.logger.log(searchTrends[i].title.query);
twPromises.push(T.get("search/tweets", {q: searchTrends[i].title.query, count: 1})); // adds promises to the array
}
Promise.all(twPromises).then((responses) => { // runs when all promises from the array are resolved
responses.forEach((response) => {
twitterTrends.push(JSON.stringify(response.statuses));
})
const dbRef = admin.database().ref("searchTrends");
dbRef.set({google: searchTrends, twitter: twitterTrends});
})
}
});
});
I'm trying to execute query and use some rows of the results.
When I tried this code:
const pg = require('pg')
var Log = require('fancy-log');
var jsonPath = require('jsonpath');
var validator = require('validator');
var isEmpty = require('is-empty-array')
const argv = require('yargs').argv
require('custom-env').env(argv.env)
var db = require('./db');
var checker = require('./checklist')
let txn_id;
var param = []
//////////////////////////////////// DB Connection Block & executeQuery ////////////////////////////////////
var config = {
user: process.env.DB_USER,
password: process.env.DB_PASS,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
database: process.env.DB,
max: 10, // max number of clients in the pool
idleTimeoutMillis: 30000 // how long a client is allowed to remain idle before being closed
}
const pool = new pg.Pool(config)
async function query(q,param) {
const client = await pool.connect()
let res
try {
await client.query('BEGIN')
try {
res = await client.query(q, param)
await client.query('COMMIT')
Log("Connection succeed.")
} catch (err) {
await client.query('ROLLBACK')
throw err
}
} finally {
client.release()
}
return res
}
//////////////////////////////////// DB Connection Block & executeQuery ////////////////////////////////////
//client.getConnection();
//db.main()
//db.print()
async function executeQuery(queryParam, conditions) {
try {
const { rows, rowCount } = await query(queryParam, conditions)
txn_id = await rows[Math.floor(Math.random() * rowCount + 1)].txn_id
Log("Related txn_id: " + txn_id)
} catch (err) {
console.log('Database ' + err)
}
}
executeQuery("SELECT * from table_name",[])
I got this error all the time:
Database TypeError: Cannot read property 'txn_id' of undefined
My first usecase is working two different js file like db.js for connection and query function, export it and use it in main.js. Unfortunately, I couldn't get success. Probably, I don't understand fully of await usage.
Thanks for any idea in advance!
EDIT: This is main goal. I want to seperate all my logics.
db.js
const pg = require('pg')
var Log = require('fancy-log');
const argv = require('yargs').argv
require('custom-env').env(argv.env)
//var db = function(){};
let txn_id;
var param = []
var config = {
user: process.env.DB_USER,
password: process.env.DB_PASS,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
database: process.env.DB,
max: 10, // max number of clients in the pool
idleTimeoutMillis: 30000 // how long a client is allowed to remain idle before being closed
}
const pool = new pg.Pool(config)
async function query(q,param) {
const client = await pool.connect()
let res
try {
await client.query('BEGIN')
try {
res = await client.query(q, param)
await client.query('COMMIT')
Log("Connection succeed.")
} catch (err) {
await client.query('ROLLBACK')
throw err
}
} finally {
client.release()
}
return res
}
async function executeQuery(queryParam, conditions) {
try {
const { rows, rowCount } = await query(queryParam,conditions)
txn_id = await rows[Math.floor(Math.random() * rowCount + 1)].txn_id
Log("Related txn_id: " + txn_id)
} catch (err) {
console.log('Database ' + err)
}
}
module.exports = {
executeQuery
}
main.js
const pg = require('pg')
var Log = require('fancy-log');
var jsonPath = require('jsonpath');
var validator = require('validator');
var isEmpty = require('is-empty-array')
const argv = require('yargs').argv
require('custom-env').env(argv.env)
var db = require('./db');
var checker = require('./checklist')
async function foo(){
await db.executeQuery("SELECT * from table_name",[])
}
foo()
Also same error here:
Database TypeError: Cannot read property 'txn_id' of undefined
You probably solved this already, but since there are no answers, in case someone needs help with this in the future.
TL;DR: You have to remove +1 from txn_id = await rows[Math.floor(Math.random() * rowCount + 1)].txn_id.
rows is an array of length N (N rows found in table_name) with indices from 0 to N-1, and rowCount equals (in this case) rows.length; and since:
1) 0 <= Math.random() < 1
2) 0 <= Math.random() * rowCount < N
3) 1 <= Math.random() * rowCount + 1 < N+1
4) 1 <= Math.floor(Math.random() * rowCount + 1) < N+1 (only natural numbers now)
then Math.floor(Math.random() * rowCount + 1) produces indeces from 1 to N, which is why sometimes (in case of N) rows[Math.floor(Math.random() * rowCount + 1)] equals undefined.
Also, await is not needed here: await rows[Math.floor(Math.random() * rowCount)]; you already awaited for the query to finish here const { rows, rowCount } = await query(queryParam,conditions).
Only place await in front of an expression which returns a Promise, otherwise it's doing nothing. (suggested)
I have a small Node.js program that calculates two Fibonacci numbers concurrently. When both numbers have been calculated the program prints Finished. I am doing this by implementing on('exit', ...) where my callback decreases a counter and prints Finished if it reaches 0. My question is: Is there something more elegant, something like waitAll()?
Here is the program:
const { Worker, isMainThread, workerData } = require('worker_threads');
function fibonacci(n) {
if (n <= 1) {
return n;
} else {
return fibonacci(n - 1) +
fibonacci(n - 2);
}
}
let counter = 2;
if (isMainThread) {
let func = (code) => {
if (--counter === 0) {
console.log("Finished");
}
};
let w1 = new Worker(__filename, {workerData: 40});
w1.on('exit', func);
console.log("1");
let w2 = new Worker(__filename, {workerData: 45});
w2.on('exit', func);
console.log("2");
} else {
console.log(`Calculate fib(${workerData})`);
console.log(`fib(${workerData}) = ${fibonacci(workerData)}`);
}
If you promisify the event handler, you can use Promise.all:
const done = el => new Promise(res => el.on("exit", res));
Promise.all([
done(w1),
done(w2)
]).then(/*...*/)
I have several promises that I need to resolve before going further.
Promise.all(promises).then((results) => {
// going further
});
Is there any way I can have the progress of the Promise.all promise?
From the doc, it appears that it is not possible. And this question doesn't answer it either.
So:
Don't you agree that this would be useful? Shouldn't we query for this feature?
How can one implement it manually for now?
I've knocked up a little helper function that you can re-use.
Basically pass your promises as normal, and provide a callback to do what you want with the progress..
function allProgress(proms, progress_cb) {
let d = 0;
progress_cb(0);
for (const p of proms) {
p.then(()=> {
d ++;
progress_cb( (d * 100) / proms.length );
});
}
return Promise.all(proms);
}
function test(ms) {
return new Promise((resolve) => {
setTimeout(() => {
console.log(`Waited ${ms}`);
resolve();
}, ms);
});
}
allProgress([test(1000), test(3000), test(2000), test(3500)],
(p) => {
console.log(`% Done = ${p.toFixed(2)}`);
});
You can add a .then() to each promise to count whos finished.
something like :
var count = 0;
var p1 = new Promise((resolve, reject) => {
setTimeout(resolve, 5000, 'boo');
});
var p2 = new Promise((resolve, reject) => {
setTimeout(resolve, 7000, 'yoo');
});
var p3 = new Promise((resolve, reject) => {
setTimeout(resolve, 3000, 'foo');
});
var promiseArray = [
p1.then(function(val) {
progress(++count);
return val
}),
p2.then(function(val) {
progress(++count);
return val
}),
p3.then(function(val) {
progress(++count);
return val
})
]
function progress(count) {
console.log(count / promiseArray.length);
}
Promise.all(promiseArray).then(values => {
console.log(values);
});
This has a few advantages over Keith's answer:
The onprogress() callback is never invoked synchronously. This ensures that the callback can depend on code which is run synchronously after the call to Promise.progress(...).
The promise chain propagates errors thrown in progress events to the caller rather than allowing uncaught promise rejections. This ensures that with robust error handling, the caller is able to prevent the application from entering an unknown state or crashing.
The callback receives a ProgressEvent instead of a percentage. This eases the difficulty of handling 0 / 0 progress events by avoiding the quotient NaN.
Promise.progress = async function progress (iterable, onprogress) {
// consume iterable synchronously and convert to array of promises
const promises = Array.from(iterable).map(this.resolve, this);
let resolved = 0;
// helper function for emitting progress events
const progress = increment => this.resolve(
onprogress(
new ProgressEvent('progress', {
total: promises.length,
loaded: resolved += increment
})
)
);
// lift all progress events off the stack
await this.resolve();
// emit 0 progress event
await progress(0);
// emit a progress event each time a promise resolves
return this.all(
promises.map(
promise => promise.finally(
() => progress(1)
)
})
);
};
Note that ProgressEvent has limited support. If this coverage doesn't meet your requirements, you can easily polyfill this:
class ProgressEvent extends Event {
constructor (type, { loaded = 0, total = 0, lengthComputable = (total > 0) } = {}) {
super(type);
this.lengthComputable = lengthComputable;
this.loaded = loaded;
this.total = total;
}
}
#Keith in addition to my comment, here is a modification
(edited to fully detail hopefuly)
// original allProgress
//function allProgress(proms, progress_cb) {
// let d = 0;
// progress_cb(0);
// proms.forEach((p) => {
// p.then(()=> {
// d ++;
// progress_cb( (d * 100) / proms.length );
// });
// });
// return Promise.all(proms);
//}
//modifying allProgress to delay 'p.then' resolution
//function allProgress(proms, progress_cb) {
// let d = 0;
// progress_cb(0);
// proms.forEach((p) => {
// p.then(()=> {
// setTimeout( //added line
// () => {
// d ++;
// progress_cb( (d * 100) / proms.length );
// }, //added coma :)
// 4000); //added line
// });
// });
// return Promise.all(proms
// ).then(()=>{console.log("Promise.all completed");});
// //added then to report Promise.all resolution
// }
//modified allProgress
// version 2 not to break any promise chain
function allProgress(proms, progress_cb) {
let d = 0;
progress_cb(0);
proms.forEach((p) => {
p.then((res)=> { //added 'res' for v2
return new Promise((resolve) => { //added line for v2
setTimeout( //added line
() => {
d ++;
progress_cb( (d * 100) / proms.length );
resolve(res); //added line for v2
}, //added coma :)
4000); //added line
}); //added line for v2
});
});
return Promise.all(proms
).then(()=>{console.log("Promise.all completed");});
//added then chaining to report Promise.all resolution
}
function test(ms) {
return new Promise((resolve) => {
setTimeout(() => {
console.log(`Waited ${ms}`);
resolve();
}, ms);
});
}
allProgress([test(1000), test(3000), test(2000), test(3500)],
(p) => {
console.log(`% Done = ${p.toFixed(2)}`);
});
"Promise.all completed" will output before any progress message
here is the output that I get
% Done = 0.00
Waited 1000
Waited 2000
Waited 3000
Waited 3500
Promise.all completed
% Done = 25.00
% Done = 50.00
% Done = 75.00
% Done = 100.00
Here's my take on this. You create a wrapper for the progressCallback and telling how many threads you have. Then, for every thread you create a separate callback from this wrapper with the thread index. Threads each report through their own callback as before, but then their individual progress values are merged and reported through the wrapped callback.
function createMultiThreadProgressWrapper(threads, progressCallback) {
var threadProgress = Array(threads);
var sendTotalProgress = function() {
var total = 0;
for (var v of threadProgress) {
total = total + (v || 0);
}
progressCallback(total / threads);
};
return {
getCallback: function(thread) {
var cb = function(progress) {
threadProgress[thread] = progress;
sendTotalProgress();
};
return cb;
}
};
}
// --------------------------------------------------------
// Usage:
// --------------------------------------------------------
function createPromise(progressCallback) {
return new Promise(function(resolve, reject) {
// do whatever you need and report progress to progressCallback(float)
});
}
var wrapper = createMultiThreadProgressWrapper(3, mainCallback);
var promises = [
createPromise(wrapper.getCallback(0)),
createPromise(wrapper.getCallback(1)),
createPromise(wrapper.getCallback(2))
];
Promise.all(promises);
You can use my npm package with an extended version of the native promise, that supports advanced progress capturing, including nested promises, out of the box Live sandbox
import { CPromise } from "c-promise2";
(async () => {
const results = await CPromise.all([
CPromise.delay(1000, 1),
CPromise.delay(2000, 2),
CPromise.delay(3000, 3),
CPromise.delay(10000, 4)
]).progress((p) => {
console.warn(`Progress: ${(p * 100).toFixed(1)}%`);
});
console.log(results); // [1, 2, 3, 4]
})();
Or with concurrency limitation (Live sandbox):
import { CPromise } from "c-promise2";
(async () => {
const results = await CPromise.all(
[
"filename1.txt",
"filename2.txt",
"filename3.txt",
"filename4.txt",
"filename5.txt",
"filename6.txt",
"filename7.txt"
],
{
async mapper(filename) {
console.log(`load and push file [${filename}]`);
// your async code here to upload a single file
return CPromise.delay(1000, `operation result for [${filename}]`);
},
concurrency: 2
}
).progress((p) => {
console.warn(`Uploading: ${(p * 100).toFixed(1)}%`);
});
console.log(results);
})();
const dataArray = [];
let progress = 0;
Promise.all(dataArray.map(async (data) => {
await something();
console.log('progress = ', Math.celi(progress++ * 100 / dataArray.length))
}))