Cloud function does not write second argument to Firebase - javascript

I have written a cloud function that runs every 5 minutes on my Firebase app. In essence, the function gathers trends data from the Google Trends website and parses the JSON into a variable.
After doing so I want to then connect to the Twitter API and search for tweets using the trending topics fetched in the first part.
My Issue seems to lie with the second part. It fetches the data but the remainder of the function does not wait for the result before writing to Firebase.
I have tried two different methods but both don't seem to work as intended. I am struggling to understand how the function should wait for the second part to gather and store the information before writing to Firebase.
Method 1
exports.callTo = functions.pubsub.schedule("5 * * * *").onRun((context) => {
let searchTrends;
const ts = Date.now();
const dateOb = new Date(ts);
const date = dateOb.getDate();
const month = dateOb.getMonth() + 1;
const year = dateOb.getFullYear();
const twitterTrends = [];
googleTrends.dailyTrends({
trendDate: new Date(year + "-" + month + "-" + date),
geo: "CA",
}, function(err, res) {
if (err) {
functions.logger.error(err);
} else {
searchTrends = JSON.parse(res).default.trendingSearchesDays[0]
.trendingSearches;
functions.logger.info(searchTrends);
for (let i = 0; i < searchTrends.length; i++) {
functions.logger.log(searchTrends[i].title.query);
T.get("search/tweets", {q: searchTrends[i].title.query, count: 1},
function(err, data, response) {
if (err) {
functions.logger.error(err);
}
functions.logger.info("Twitter data" +
JSON.stringify(data.statuses));
twitterTrends[i] = JSON.stringify(data.statuses);
});
}
const dbRef = admin.database().ref("searchTrends");
dbRef.set({google: searchTrends, twitter: twitterTrends});
}
});
});
Method 2
exports.callTo = functions.pubsub.schedule("5 * * * *").onRun((context) => {
let searchTrends;
const ts = Date.now();
const dateOb = new Date(ts);
const date = dateOb.getDate();
const month = dateOb.getMonth() + 1;
const year = dateOb.getFullYear();
const twitterTrends = [];
async function getTrends(){
googleTrends.dailyTrends({
trendDate: new Date(year + "-" + month + "-" + date),
geo: "CA",
}, function(err, res) {
if (err) {
functions.logger.error(err);
} else {
searchTrends = JSON.parse(res).default.trendingSearchesDays[0]
.trendingSearches;
functions.logger.info(searchTrends);
}
});
await getTwitterTrends();
}
async function getTwitterTrends(){
for (let i = 0; i < 1; i++) {
functions.logger.log(searchTrends[i].title.query);
T.get("search/tweets", {q: searchTrends[i].title.query, count: 1},
function(err, data, response) {
if (err) {
functions.logger.error(err);
} else {
functions.logger.info("Twitter data" +
JSON.stringify(data.statuses));
twitterTrends[i] = JSON.stringify(data.statuses);
}
});
}
return "done";
}
const dbRef = admin.database().ref("searchTrends");
dbRef.set({google: searchTrends, twitter: twitterTrends});
});

After checking your function it looks like a Promises issue. The reason you are seeing only the searchTrends data in Firestore is because the Firestore reference and upload is being done inside the callback for the dailyTrends method (taking for reference the method 1 code). However this does not wait for each request to the Twitter API to be resolved before writing to Firestore.
Based on the documentation for twit (which seems to be the wrapper you are using), it also supports standard promises. You could add each promise to an array, and then use Promise.all() to wait until they are all resolved to then write the data into Firestore. It would look something like this (which I haven’t tested since I don’t have Twitter API access).
exports.callTo = functions.pubsub.schedule("5 * * * *").onRun((context) => {
const ts = Date.now();
const dateOb = new Date(ts);
const date = dateOb.getDate();
const month = dateOb.getMonth() + 1;
const year = dateOb.getFullYear();
let searchTrends;
const twitterTrends = [];
const twPromises = [];
googleTrends.dailyTrends({
trendDate: new Date(year + "-" + month + "-" + date),
geo: "CA",
}, function(err, res) {
if (err) {
functions.logger.error(err);
} else {
searchTrends = JSON.parse(res).default.trendingSearchesDays[0]
.trendingSearches;
functions.logger.info(searchTrends);
for (let i = 0; i < searchTrends.length; i++) {
functions.logger.log(searchTrends[i].title.query);
twPromises.push(T.get("search/tweets", {q: searchTrends[i].title.query, count: 1})); // adds promises to the array
}
Promise.all(twPromises).then((responses) => { // runs when all promises from the array are resolved
responses.forEach((response) => {
twitterTrends.push(JSON.stringify(response.statuses));
})
const dbRef = admin.database().ref("searchTrends");
dbRef.set({google: searchTrends, twitter: twitterTrends});
})
}
});
});

Related

Reducing code repetition in a connecting flights scenario in Javascript

Without going too much in the details, my question is, how would you go about reducing the repetition in NodeJS? I am very much a beginner so please have mercy.
I am getting an api with the information, and output my own api which is that information filtered and sorted according to the req.query parameters (from, to, date).
This is the code that works, but has too much repetition in it:
app.get('/search', async (req, res, next) => {
try {
const apiRes = await axios.get('https://thistheapi.net/api/TravelPrices');
result = apiRes.data;
searchFrom = req.query.from;
searchTo = req.query.to;
searchDate = req.query.date;
const routesArray = [];
for (let route of result) { routesArray.push(route) };
if (searchFrom.toLowerCase() == "mercury" && searchTo.toLowerCase() == "earth") {
finalResult = [];
// Finding and filtering the first flight
const fromFilterF1 = "Mercury";
// Create an array, which has the results of routes that match the req.query from name/ aka starting point
firstArrayF1 = routesArray.filter(obj => obj.routeInfo.from.name == fromFilterF1);
const toFilterF1 = "Venus";
// Filter the resulting array with the next 'to' location
secondArrayF1 = firstArrayF1.filter(obj => obj.routeInfo.to.name == toFilterF1);
// Create an array that has all the providers with their data for this specific route / flight
const providerArrayF1 = secondArrayF1.map(x => x.providers)
const trialArrayF1 = [];
for (let x of providerArrayF1) { for (let y of x) { trialArrayF1.push(y) } }
// Use the req.query selected date to filter all flights that match the date
dateFilterF1 = { flightStart: searchDate };
// options for the date variable, since in the api data it has specific time of day also added
const options = { year: 'numeric', month: 'numeric', day: 'numeric' };
thirdArrayF1 = trialArrayF1.filter(obj => new Date(obj.flightStart).toLocaleDateString('en-CA', options) == dateFilterF1.flightStart);
// Sort the resulting array of matching from-location, to-location, and date - starting from the earliest flights to the latest one
thirdArrayF1.sort((a, b) => { return new Date(a.flightStart) - new Date(b.flightStart) });
finalResult.push(thirdArrayF1[0]);
// ALL OF THIS REPEATS FOR THE SECOND & THIRD FLIGHT, except the flight start date/time has to be later than the flight end time of the previous flight
// Finding and filtering the second flight
if (thirdArrayF1.length == 0) { finalResult.push(null) } else {
const fromFilterF2 = "Venus";
firstArrayF2 = routesArray.filter(obj => obj.routeInfo.from.name == fromFilterF2);
const toFilterF2 = "Earth";
secondArrayF2 = firstArrayF2.filter(obj => obj.routeInfo.to.name == toFilterF2);
const providerArrayF2 = secondArrayF2.map(x => x.providers)
const trialArrayF2 = [];
for (let x of providerArrayF2) { for (let y of x) { trialArrayf2.push(y) } }
dateFilterF2 = { flightStart: thirdArrayF1[0].flightEnd };
thirdArrayF2 = trialArrayF2.filter(obj => new Date(obj.flightStart).toLocaleDateString('en-CA', options) >= dateFilterF2.flightStart);
thirdArrayF2.sort((a, b) => { return new Date(a.flightStart) - new Date(b.flightStart) });
finalResult.push(thirdArrayF2[0])
};
// Finding and filtering the third flight
if (thirdArrayF2.length == 0) { finalResult.push(null) } else {
const fromFilterF3 = "Earth";
firstArrayF3 = routesArray.filter(obj => obj.routeInfo.from.name == fromFilterF3);
const toFilterF3 = "Jupiter";
secondArrayF3 = firstArrayF3.filter(obj => obj.routeInfo.to.name == toFilterF3);
const providerArrayF3 = secondArrayF3.map(x => x.providers)
const trialArrayF3 = [];
for (let x of providerArrayF3) { for (let y of x) { trialArrayF3.push(y) } }
dateFilterF3 = { flightStart: thirdArrayF2[0].flightEnd };
thirdArrayF3 = trialArrayF3.filter(obj => new Date(obj.flightStart).toLocaleDateString('en-CA', options) >= dateFilterF3.flightStart);
thirdArrayF3.sort((a, b) => { return new Date(a.flightStart) - new Date(b.flightStart) });
finalResult.push(thirdArrayF3[0])
};
res.json(finalResult);
} else if (searchFrom.toLowerCase() == "mercury" && searchTo.toLowerCase() == "jupiter"){ etc...
As you can see, there is a lot of similar code, but I can't figure out how to make it more compact and less repetitive, without breaking the code and it stopping to work.
I appreciate all the help and advice!
Also, this is a snippet of the api that I use:
"legs":[{"id":"a0ee2c2b-667c-46d7-87c0-2ca32da88a46","routeInfo":{"id":"44edd88d-8904-4266-9df5-f37701741123","from":{"id":"0ee3379b-98fb-4b46-9aef-0a3a81a46ad4","name":"Earth"},"to":{"id":"a504bf72-2be2-4f2b-bab1-61d818757e3a","name":"Jupiter"},"distance":628730000},"providers":[{"id":"0257eab0-7c5c-4a4c-af79-cdf6f3ab9349","company":{"id":"27b1ce2f-c88a-45f4-96e1-dd9fcbb2db73","name":"Spacegenix"},"price":570774.60,"flightStart":"2022-02-04T07:17:16.4529653Z","flightEnd":"2022-02-08T13:57:16.4529653Z"},{"id":"e6ed4071-e29c-46a1-a38f-a082eff0e4de","company":{"id":"eb12838f-afb4-4447-9781-2d87b0641337","name":"Galaxy Express"},"price":180679.62,"flightStart":"2022-02-13T00:30:16.4529883Z","flightEnd":"2022-02-17T14:29:16.4529883Z"} et cetera.
Basically I'm doing custom connected flights between different locations. I am sure there is a way to make this less repetitive, but I can't figure it out.

Simple Git works when tested singularly but doesn't work when used in a larger project Node.js

I have a very simple js git code that doesn't seem to work. It sometimes pushes random commits, but for most of the commits, it doesn't work. Here is the code:
const jsonfile = require("jsonfile");
const crypto = require('crypto');
const fs = require('fs');
const chalk = require("chalk");
const ora = require("ora");
const boxen = require("boxen");
const simpleGit = require("simple-git");
const FILE_PATH = "./data.json";
function daysIntoYear(){
let date = new Date();
return (Date.UTC(date.getFullYear(), date.getMonth(), date.getDate()) - Date.UTC(date.getFullYear(), 0, 0)) / 24 / 60 / 60 / 1000;
}
const makeCommit = (DATE, n) => {
if (n === 0) return simpleGit().push();
const data = {
date: DATE,
iter: n
};
d = new Date(DATE);
var curr_date = d.getDate();
var curr_month = d.getMonth() + 1;
var curr_year = d.getFullYear();
// Log for debugging purposes
// console.log(curr_month + "-" + curr_date + "-" + curr_year);
jsonfile.writeFile(FILE_PATH, data, () => {
simpleGit()
.add([FILE_PATH])
.commit(crypto.randomBytes(20).toString('hex'), ['--date=' + d])
.push();
});
makeCommit(DATE, --n)
};
function getRandomCommits () {
let result = crypto.randomInt(0, 101)
if (result <= 30) {
return crypto.randomInt(0, 6)
} else if (result <= 45) {
return crypto.randomInt(0, 14)
} else {
return crypto.randomInt(0, 25)
}
}
function commitsAndContributions () {
let iter = daysIntoYear();
let result = [];
for (let i = 0; i < iter; i++) {
result.push({
date: new Date(new Date().getFullYear(), 0, i),
commits: getRandomCommits()
})
}
result.splice(0, 1)
return result;
}
function startCommits () {
let data = commitsAndContributions();
const spinner = ora("Generating your GitHub activity\n").start();
fs.writeFileSync('./commits.json', JSON.stringify(data, null, 4))
for (let i = 0; i < data.length; i++) {
makeCommit(data[i].date, data[i].commits);
}
spinner.succeed();
console.log(
boxen(
`${chalk.green("Success!")} ${
data.length
} commits have been created.`,
{ borderColor: "yellow", padding: 1, align: "center" }
)
);
}
startCommits()
Data.json is an actual file, commits.json is also a file, and this is linked to a github repo. When I run the script:
jsonfile.writeFile('./data.json', new Date(), () => {
simpleGit()
.add(['./data.json'])
.commit(crypto.randomBytes(20).toString('hex'), ['--date=' + new Date()])
.push();
});
I can see the random commit get pushed. However, it doesn't work when I run the main script. Is there a reason for this?
Edit: As far as I can tell, it pushes one commit, and ignores the rest. Is there some sort of ratelimit going on?
The loop in startCommits runs synchronously (ie: doesn't wait for the commit/push to finish before carrying on to the next iteration). You will therefore likely see a success message before anything more than the first commit has been started.
In this example the makeCommit and startCommits functions are changed to be async with each entry in the data array processed in full before the next one is begun:
const {writeFile} = require('fs/promises');
const makeCommit = async (DATE, n) => {
if (n === 0) return simpleGit().push();
const data = {
date: DATE,
iter: n
};
d = new Date(DATE);
await writeFile(FILE_PATH, 'utf8', JSON.stringify(data));
await simpleGit()
.add([FILE_PATH])
.commit(crypto.randomBytes(20).toString('hex'), ['--date=' + d])
.push();
});
makeCommit(DATE, --n)
};
async function startCommits () {
let data = commitsAndContributions();
const spinner = ora("Generating your GitHub activity\n").start();
fs.writeFileSync('./commits.json', JSON.stringify(data, null, 4))
for (const entry of data) {
await makeCommit(entry.date, entry.commits);
}
spinner.succeed();
}

Using data i get from request function in node.JS again until a condition is met

I want to access shopify api using Node.js with request method. I get first 50 items but i need to send the last id of the products i get as a response so it can loop through all the products until we don't have another id (i check that if the last array is not 50 in length.)
So when i get the response of lastID i want to feed that again to the same function until the Parraylength is not 50 or not 0.
Thing is request works asynchronously and i don't know how to feed the same function with the result lastID in node.js.
Here is my code
let importedData = JSON.parse(body);
//for ( const product in importedData.products ){
// console.log(`${importedData.products[product].id}`);
//}
lastID = importedData.products[importedData.products.length-1].id;
let lastIDD = lastID;
console.log(`This is ${lastID}`);
importedData ? console.log('true') : console.log('false');
let Prarraylength = importedData.products.length;
console.log(Prarraylength);
//console.log(JSON.stringify(req.headers));
return lastIDD;
});```
You can use a for loop and await to control the flow of your script in this case.
I'd suggest using the request-native-promise module to get items, since it has a promise based interface, but you could use node-fetch or axios (or any other http client) too.
In this case, to show you the logic, I've created a mock rp which normally you'd create as follows:
const rp = require("request-promise-native");
You can see we're looping through the items, 50 at a time. We're passing the last id as a url parameter to the next rp call. Now this is obviously going to be different in reality, but I believe you can easily change the logic as you require.
const totalItems = 155;
const itemsPerCall = 50;
// Mock items array...
const items = Array.from({ length: totalItems}, (v,n) => { return { id: n+1, name: `item #${n+1}` } });
// Mock of request-promise (to show logic..)
// Replace with const rp = require("request-promise-native");
const rp = function(url) {
let itemPointer = parseInt(url.split("/").slice(-1)[0]);
return new Promise((resolve, reject) => {
setTimeout(() => {
let slice = items.slice(itemPointer, itemPointer + itemsPerCall);
itemPointer += itemsPerCall;
resolve( { products: slice });
}, 500);
})
}
async function getMultipleRequests() {
let callIndex = 0;
let lastID = 0;
const MAX_CALLS = 20;
const EXPECTED_ARRAY_LENGTH = 50;
for(let callCount = 1; callCount < MAX_CALLS; callCount++) {
// Replace with the actual url..
let url = "/products/" + lastID;
let importedData = await rp(url);
lastID = importedData.products[importedData.products.length - 1].id;
console.log("Call #: " + ++callIndex + ", Item count: " + importedData.products.length + ", lastID: " + lastID);
if (importedData.products.length < EXPECTED_ARRAY_LENGTH) {
console.log("Reached the end of products...exiting loop...");
break;
}
}
}
getMultipleRequests();

once("child_added") on non existing node does not fail but timeout

First I'm not sure that there is a real problem but I guess I'll share my reasoning.
I use Firebase as a database / backend for the archiving of all the data from various sensors at home and an UI with cool graphs in hosting. So every 10 minutes I push various data (temperature, humidity, CO2 level, illumination, ...) coming from various rooms. I have almost 3 years of data available (so my base has a lots of nodes)
So my database structure is like that :
root
readings
room_id
GUID
time
temp
hum
lum
For a few years I had a PHP script hosted at home that checked if the latest item inside each readings/room_id has a time value that is not too old (no more than 11 minutes old). I translated it to Firebase cloud function some days ago and I got something like this :
exports.monitor = functions.https.onRequest((req, res) => {
const tstamp = Math.floor(Date.now() / 1000);
var sensors = ["r01", "r02", "r03", "r04", "r05"];
var promiseArray = [];
var result = {};
for (var i = 0; i < sensors.length; i++) {
console.log('Adding promise for ' + sensors[i]);
promiseArray.push(admin.database().ref('/readings/' + sensors[i]).limitToLast(1).once("child_added"));
}
Promise.all(promiseArray).then(snapshots => {
console.log('All promises done : ' + snapshots.length);
res.set('Cache-Control', 'private, max-age=300');
for (var i = 0; i < snapshots.length; i++) {
differenceInMinutes = (tstamp - snapshots[i].val().time) / 60;
result[sensors[i]] = {current: tstamp,
sensor: snapshots[i].val().time,
diff: Math.round(differenceInMinutes * 10) / 10};
if (differenceInMinutes < 11) {
result[sensors[i]]['status'] = "OK";
} else {
result[sensors[i]]['status'] = "KO";
}
}
return res.status(200).json(result);
}).catch(error => {
console.error('Error while getting sensors details', error.message);
res.sendStatus(500);
});
});
The code works well. So my question is : if I add another room ID in the sensors array that does not exists inside "readings" in my database, I thought I'll get an error (failed promise) instead I only got a huge timeout error, I don't want that kind of timeout on Firebase Cloud Functions (to avoid any unwanted cost).
Is that normal ? Is my code wrong ? Do I have to start by getting a shallow snapshot of "readings/room_id" check that it exists and check if has children ?
Thanks a lot for your help.
EDIT : With the help of Frank I fixed my code, here is the revised version :
exports.monitor = functions.https.onRequest((req, res) => {
const tstamp = Math.floor(Date.now() / 1000);
var sensors = ["r01", "r02", "r03", "r04", "r05"];
var promiseArray = [];
var result = {};
for (var i = 0; i < sensors.length; i++) {
console.log('Adding promise for ' + sensors[i]);
promiseArray.push(admin.database().ref('/readings/' + sensors[i]).limitToLast(1).once("value"));
}
Promise.all(promiseArray).then(queryResults => {
console.log('All promises done : ' + queryResults.length);
res.set('Cache-Control', 'private, max-age=300');
queryResults.forEach((snapshots, i) => {
snapshots.forEach((snapshot) => {
var currentData = snapshot.val();
differenceInMinutes = (tstamp - currentData.time) / 60;
result[sensors[i]] = {current: tstamp,
sensor: currentData.time,
diff: Math.round(differenceInMinutes * 10) / 10};
if (differenceInMinutes < 11) {
result[sensors[i]]['status'] = "OK";
} else {
result[sensors[i]]['status'] = "KO";
}
});
});
return res.status(200).json(result);
}).catch(error => {
console.error('Error while getting sensors details', error.message);
res.sendStatus(500);
});
});
a child_added event only fires when there is a child node. If there are not child nodes under the location (or matching the query) it will not fire.
To ensure you also get notified in the condition there are no children, you should listen to the value event:
for (var i = 0; i < sensors.length; i++) {
console.log('Adding promise for ' + sensors[i]);
var query = admin.database().ref('/readings/' + sensors[i]).limitToLast(1).once("value")
promiseArray.push(query);
}
Since a value event may match multiple children in a single snapshot (despite your query only requesting a single child), you will need to loop over the children of the resulting snapshot:
Promise.all(promiseArray).then((queryResults) => {
console.log('All promises done : ' + queryResults.length);
res.set('Cache-Control', 'private, max-age=300');
queryResults.forEach((snapshots) => {
snapshots.forEach((snapshot) => {
differenceInMinutes = (tstamp - snapshot.val().time) / 60;
...

How do I measure the execution time of JavaScript code with callbacks?

I have a piece of JavaScript code that I am executing using the node.js interpreter.
for(var i = 1; i < LIMIT; i++) {
var user = {
id: i,
name: "MongoUser [" + i + "]"
};
db.users.save(user, function(err, saved) {
if(err || !saved) {
console.log("Error");
} else {
console.log("Saved");
}
});
}
How can I measure the time taken by these database insert operations? I could compute the difference of date values after and before this piece of code but that would be incorrect because of the asynchronous nature of the code.
Use the Node.js console.time() and console.timeEnd():
var i;
console.time("dbsave");
for(i = 1; i < LIMIT; i++){
db.users.save({id : i, name : "MongoUser [" + i + "]"}, end);
}
end = function(err, saved) {
console.log(( err || !saved )?"Error":"Saved");
if(--i === 1){
console.timeEnd("dbsave");
}
};
There is a method that is designed for this. Check out process.hrtime(); .
So, I basically put this at the top of my app.
var start = process.hrtime();
var elapsed_time = function(note){
var precision = 3; // 3 decimal places
var elapsed = process.hrtime(start)[1] / 1000000; // divide by a million to get nano to milli
console.log(process.hrtime(start)[0] + " s, " + elapsed.toFixed(precision) + " ms - " + note); // print message + time
start = process.hrtime(); // reset the timer
}
Then I use it to see how long functions take. Here's a basic example that prints the contents of a text file called "output.txt":
var debug = true;
http.createServer(function(request, response) {
if(debug) console.log("----------------------------------");
if(debug) elapsed_time("recieved request");
var send_html = function(err, contents) {
if(debug) elapsed_time("start send_html()");
response.writeHead(200, {'Content-Type': 'text/html' } );
response.end(contents);
if(debug) elapsed_time("end send_html()");
}
if(debug) elapsed_time("start readFile()");
fs.readFile('output.txt', send_html);
if(debug) elapsed_time("end readFile()");
}).listen(8080);
Here's a quick test you can run in a terminal (BASH shell):
for i in {1..100}; do echo $i; curl http://localhost:8080/; done
Invoking console.time('label') will record the current time in milliseconds, then later calling console.timeEnd('label') will display the duration from that point.
The time in milliseconds will be automatically printed alongside the label, so you don't have to make a separate call to console.log to print a label:
console.time('test');
//some code
console.timeEnd('test'); //Prints something like that-> test: 11374.004ms
For more information, see Mozilla's developer docs on console.time.
Surprised no one had mentioned yet the new built in libraries:
Available in Node >= 8.5, and should be in Modern Browers
https://developer.mozilla.org/en-US/docs/Web/API/Performance
https://nodejs.org/docs/latest-v8.x/api/perf_hooks.html#
Node 8.5 ~ 9.x (Firefox, Chrome)
// const { performance } = require('perf_hooks'); // enable for node
const delay = time => new Promise(res=>setTimeout(res,time))
async function doSomeLongRunningProcess(){
await delay(1000);
}
performance.mark('A');
(async ()=>{
await doSomeLongRunningProcess();
performance.mark('B');
performance.measure('A to B', 'A', 'B');
const measure = performance.getEntriesByName('A to B')[0];
// firefox appears to only show second precision.
console.log(measure.duration);
// apparently you should clean up...
performance.clearMarks();
performance.clearMeasures();
// Prints the number of milliseconds between Mark 'A' and Mark 'B'
})();
https://repl.it/#CodyGeisler/NodeJsPerformanceHooks
Node 12.x
https://nodejs.org/docs/latest-v12.x/api/perf_hooks.html
const { PerformanceObserver, performance } = require('perf_hooks');
const delay = time => new Promise(res => setTimeout(res, time))
async function doSomeLongRunningProcess() {
await delay(1000);
}
const obs = new PerformanceObserver((items) => {
console.log('PerformanceObserver A to B',items.getEntries()[0].duration);
// apparently you should clean up...
performance.clearMarks();
// performance.clearMeasures(); // Not a function in Node.js 12
});
obs.observe({ entryTypes: ['measure'] });
performance.mark('A');
(async function main(){
try{
await performance.timerify(doSomeLongRunningProcess)();
performance.mark('B');
performance.measure('A to B', 'A', 'B');
}catch(e){
console.log('main() error',e);
}
})();
For anyone want to get time elapsed value instead of console output :
use process.hrtime() as #D.Deriso suggestion, below is my simpler approach :
function functionToBeMeasured() {
var startTime = process.hrtime();
// do some task...
// ......
var elapsedSeconds = parseHrtimeToSeconds(process.hrtime(startTime));
console.log('It takes ' + elapsedSeconds + 'seconds');
}
function parseHrtimeToSeconds(hrtime) {
var seconds = (hrtime[0] + (hrtime[1] / 1e9)).toFixed(3);
return seconds;
}
var start = +new Date();
var counter = 0;
for(var i = 1; i < LIMIT; i++){
++counter;
db.users.save({id : i, name : "MongoUser [" + i + "]"}, function(err, saved) {
if( err || !saved ) console.log("Error");
else console.log("Saved");
if (--counter === 0)
{
var end = +new Date();
console.log("all users saved in " + (end-start) + " milliseconds");
}
});
}
Old question but for a simple API and light-weight solution; you can use perfy which uses high-resolution real time (process.hrtime) internally.
var perfy = require('perfy');
function end(label) {
return function (err, saved) {
console.log(err ? 'Error' : 'Saved');
console.log( perfy.end(label).time ); // <——— result: seconds.milliseconds
};
}
for (var i = 1; i < LIMIT; i++) {
var label = 'db-save-' + i;
perfy.start(label); // <——— start and mark time
db.users.save({ id: i, name: 'MongoUser [' + i + ']' }, end(label));
}
Note that each time perfy.end(label) is called, that instance is auto-destroyed.
Disclosure: Wrote this module, inspired by D.Deriso's answer. Docs here.
You could also try exectimer. It gives you feedback like:
var t = require("exectimer");
var myFunction() {
var tick = new t.tick("myFunction");
tick.start();
// do some processing and end this tick
tick.stop();
}
// Display the results
console.log(t.timers.myFunction.duration()); // total duration of all ticks
console.log(t.timers.myFunction.min()); // minimal tick duration
console.log(t.timers.myFunction.max()); // maximal tick duration
console.log(t.timers.myFunction.mean()); // mean tick duration
console.log(t.timers.myFunction.median()); // median tick duration
[edit] There is an even simpler way now to use exectime. Your code could be wrapped like this:
var t = require('exectimer'),
Tick = t.Tick;
for(var i = 1; i < LIMIT; i++){
Tick.wrap(function saveUsers(done) {
db.users.save({id : i, name : "MongoUser [" + i + "]"}, function(err, saved) {
if( err || !saved ) console.log("Error");
else console.log("Saved");
done();
});
});
}
// Display the results
console.log(t.timers.myFunction.duration()); // total duration of all ticks
console.log(t.timers.saveUsers.min()); // minimal tick duration
console.log(t.timers.saveUsers.max()); // maximal tick duration
console.log(t.timers.saveUsers.mean()); // mean tick duration
console.log(t.timers.saveUsers.median()); // median tick duration
You can use a wrapper function to easily report the execution time of any existing function.
A wrapper is a used to extend an existing function to do something before and after the existing function's execution - and is a convenient way to compose logic.
Here is an example of using the withDurationReporting wrapper:
// without duration reporting
const doSomethingThatMayTakeAWhile = async (someArg: string, anotherArg: number) => {
/** your logic goes here */
}
// with duration reporting
const doSomethingThatMayTakeAWhileWithReporting = withDurationReporting(
'doSomethingThatMayTakeAWhile',
doSomethingThatMayTakeAWhile
);
// note: you can define the function with duration reporting directly, too
const doSomethingThatMayTakeAWhile = withDurationReporting(
'doSomethingThatMayTakeAWhile',
async (someArg: string, anotherArg: number) => {
/** your logic goes here */
}
)
And here is the wrapper itself:
import { hrtime } from 'process';
const roundToHundredths = (num: number) => Math.round(num * 100) / 100; // https://stackoverflow.com/a/14968691/3068233
/**
* a wrapper which reports how long it took to execute a function, after the function completes
*/
export const withDurationReporting = <R extends any, T extends (...args: any[]) => Promise<R>>(
title: string,
logic: T,
options: {
reportingThresholdSeconds: number;
logMethod: (message: string, metadata?: Record<string, any>) => void;
} = {
reportingThresholdSeconds: 1, // report on anything that takes more than 1 second, by default
logMethod: console.log, // log with `console.log` by default
},
) => {
return (async (...args: Parameters<T>): Promise<R> => {
const startTimeInNanoseconds = hrtime.bigint();
const result = await logic(...args);
const endTimeInNanoseconds = hrtime.bigint();
const durationInNanoseconds = endTimeInNanoseconds - startTimeInNanoseconds;
const durationInSeconds = roundToHundredths(Number(durationInNanoseconds) / 1e9); // https://stackoverflow.com/a/53970656/3068233
if (durationInSeconds >= options.reportingThresholdSeconds)
options.logMethod(`${title} took ${durationInSeconds} seconds to execute`, { title, durationInSeconds });
return result;
}) as T;
};
I designed a simple method for this, using console.time() & console.timeEnd():
measure function definition
function measureRunningTime(func,...args){
const varToString = varObj => Object.keys(varObj)[0]
const displayName = func.name || varToString({ func })
console.time(displayName)
func(...args)
console.timeEnd(displayName)
}
To use it, pass a function without arguments, with arguments binded, or with arguments as the following parameters.
Examples:
let's say I want to check the running time of the simplest searching algorithm - SimpleSearch:
measured function definition (your code here)
const simpleSearch = (array = [1,2,3] ,item = 3) => {
for(let i = 0; i< array.length; i++){
if (array[i] === item) return i;
}
return -1
}
implementation without arguments
measureRunningTime(simpleSearch)
//Prints something like that-> simpleSearch: 0.04ms
implementation with arguments using .bind()
const array = [1,2,3]
const item = 3
measureRunningTime(simpleSearch.bind(null, array, item))
//Prints something like that-> bound simpleSearch: 0.04ms
implementation with arguments without using .bind()
const array = [1,2,3]
const item = 3
measureRunningTime(simpleSearch, array, item)
//Prints something like that-> simpleSearch: 0.04ms
-> Take notice!! this implementation is far from perfect - for example there is no error handling - but it can be used to check the running times of simple algorithms,
Moreover , I'm not an experienced programmer so take everything with a grain of salt 🧂 👌
I had same issue while moving from AWS to Azure
For express & aws, you can already use, existing time() and timeEnd()
For Azure, use this:
https://github.com/manoharreddyporeddy/my-nodejs-notes/blob/master/performance_timers_helper_nodejs_azure_aws.js
These time() and timeEnd() use the existing hrtime() function, which give high-resolution real time.
Hope this helps.
I need this to be cumulative, and to measure different stuff.
Built these functions:
function startMeasuring(key) {
measureTimers[key] = process.hrtime();
}
function stopMeasuring(key) {
if (!measures[key]) {
measures[key] = 0;
}
let hrtime = process.hrtime(measureTimers[key]);
measures[key] += hrtime[0] + hrtime[1] / 1e9;
measureTimers[key] = null;
}
Usage:
startMeasuring("first Promise");
startMeasuring("first and second Promises");
await new Promise((resolve) => {
setTimeout(resolve, 1400);
});
stopMeasuring("first Promise");
stopMeasuring("first and second Promises");
startMeasuring("first and second Promises");
await new Promise((resolve) => {
setTimeout(resolve, 600);
});
stopMeasuring("first and second Promises");
console.log("Measure Results", measures);
/*
Measusre Results {
setting: 0.00002375,
'first Promise': 1.409392916,
'first and second Promise': 2.015160376
}
*/

Categories