This is a really unusual code i haven't written myself, each day a database is updated with around 200 records.
This means we have to do 200 api calls to figure out infomation about theese records every day at 7 am.
This has been working fine until now,
the api has implemented a limit of 50 api calls pr hour, and IF you try to do 51 calls, you get banned for 24 hours...
SO, how do i make the for loop do a maximum of 50 loops here?
for (let record of records ) {
//This loop has to be limited to 50 iterations pr run.
let brregRawData = await fetch(`some url/${record.name}/someurl`);
let brregJson = await brregRawData.json()
let personNavn = brregJson.rollegrupper[0].roller[0].person.navn.fornavn
let personEtternavn = brregJson.rollegrupper[0].roller[0].person.navn.etternavn
// if mellomnavn = undefined, then do nothing
if (brregJson.rollegrupper[0].roller[0].person.navn.mellomnavn == undefined) {
var personMellomNavn = ""
} else {
let personMellomNavn = brregJson.rollegrupper[0].roller[0].person.navn.mellomnavn + " "
}
I tried looking up different ways to use a for loop, but all the solutions i looked at broke the "${record.name}" part of the url, i have to loop through an entire view of an entire database.
I'm not sure if this is right for the case, but can you define a counter outside of the loop, increment internally and break when it reaches 50.
let count = 0;
for (....)
...
if (count++ > 50) break;
...
If I'm understanding you correctly, you need to limit the number of api calls to 50 but the current implementation uses a for ... of loop. The simplest way to achieve what you're looking for with the least amount of modification is to use a standard for loop.
Assuming this all happens inside an async function...
async function limitToFiftyApiCalls(records) {
const max = Math.min(records.length, 50);
for (let i = 0; i < max; i++) {
const record = records[i];
let brregRawData = await fetch(`some url/${record.name}/someurl`);
let brregJson = await brregRawData.json();
let personNavn = brregJson.rollegrupper[0].roller[0].person.navn.fornavn;
let personEtternavn = brregJson.rollegrupper[0].roller[0].person.navn.etternavn;
// if mellomnavn = undefined, then do nothing
if (brregJson.rollegrupper[0].roller[0].person.navn.mellomnavn == undefined) {
var personMellomNavn = "";
} else {
let personMellomNavn = brregJson.rollegrupper[0].roller[0].person.navn.mellomnavn + " ";
}
}
}
The code above doesn't modify your existing code much other than limiting the number of API calls. However there's a few things you could do that would generally make the code easier to read and edit.
async function limitToFiftyApiCalls(records) {
const max = Math.min(records.length, 50);
for (let i = 0; i < max; i++) {
const record = records[i];
let personMellomNavn = "";
let brregRawData = await fetch(`some url/${record.name}/someurl`);
let brregJson = await brregRawData.json();
// why write this more than once?
// save it to a variable and make life easier
let someVar = brregJson.rollegrupper[0].roller[0].person.navn;
let personNavn = someVar.fornavn;
let personEtternavn = someVar.etternavn;
if (someVar.mellomnavn) {
personMellomNavn = someVar.mellomnavn + ' '
}
}
}
A very simple way to do this is implementing a count that increments every time the loop executes the body. So for the the loop you provided it would look like this:
let count = 0; // initialize count as 0
for (let record of records ) {
if(count >= 50) break; // break out of loop if count is equal to or greater than 50 (since count starts from 0)
//This loop has to be limited to 50 iterations pr run.
let brregRawData = await fetch(`some_url/${record.name}/some_url`);
let brregJson = await brregRawData.json()
let personNavn = brregJson.rollegrupper[0].roller[0].person.navn.fornavn
let personEtternavn = brregJson.rollegrupper[0].roller[0].person.navn.etternavn
// if mellomnavn = undefined, then do nothing
if (brregJson.rollegrupper[0].roller[0].person.navn.mellomnavn == undefined) {
var personMellomNavn = ""
} else {
let personMellomNavn = brregJson.rollegrupper[0].roller[0].person.navn.mellomnavn + " "
}
count++; // Increment the count after each iteration
}
Answer to you question:
for (let i=0; i<math.min(records.length, 50); i++ ) {
let record = records[i];
But what happens then with the records you have not checked? Will you wait 24h? I guess that's not what's expected, and instead will have to check 50 records every hour until you have checked them all (so 4 times for 200 records).
I need to send X requests to http://date.jsontest.com/ where X is an argument to my program. I've managed to get the latency for one request but now need to calculate the average time in milliseconds for multiple requests. I have an array responseTimes[] where I'd like to store the response times. How do I possibly add the multiple response times and get the average? Any help? Do I need to use the setTimeout function in the first place?
I'm using node v8.9.4 if fetchURL does not work on your end.
Here's my code
var fetchUrl = require("fetch").fetchUrl;
fetchUrl("http://date.jsontest.com", function(error, meta, body){
const start = new Date();
const responseTimes = [];
let count = 0;
setTimeout(function (argument) {
// execution time simulated with setTimeout function
var end = new Date() - start;
console.log("Execution time: %dms", end);
});
});
You need to put the start variable outside the request.
var fetchUrl = require("fetch").fetchUrl;
var start = new Date();
fetchUrl("http://date.jsontest.com", function(error, meta, body){
const requests = [];
let count = 0;
// execution time simulated with setTimeout function
var end = new Date() - start;
console.log("Execution time: %dms", end);
});
If you're storing responseTime[] then with that you can store count of requests. So at any point in time, you can get average by the following function -
function getAvg(timeArr, count){
let sum = 0;
for(let i=0; i<timeArr.length; i++){
sum += timeArr[i];
}
return sum/count;
}
Also, It's not necessary to store responseTime[]. You can just calculate avg on each request and store only the average and count If you just need average.
var fetchUrl = require("fetch").fetchUrl;
let responseTime = [];
let count = 0;
fetchUrl("http://date.jsontest.com", function(error, meta, body){
const start = new Date();
const requests = [];
let count = 0;
setTimeout(function (argument) {
// execution time simulated with setTimeout function
var end = new Date() - start;
console.log("Execution time: %dms", end);
responseTime.push(end);
count++;
let averageNow = getAvg(responseTime, count);
console.log(averageNow);// this is your average
});
});
You can use Artillery npm module to get the average response time of an API.
I have the below code where d3.json is an asynchronous function. I am trying to run it in a loop while my count is equal to 100. My while loop stops after the first iteration, because the count is happening inside the asynchronous function, so I don't run it as many times as I should. How can I get the correct count so my while loop keeps executing while keeping the asynchronous trait?
$(document).ready(function() {
$('button').click(function() {
var start = new Date().getTime();
while(count == 100){
console.log("first iteration");
count = 0;
d3.json("/api/messages/" + offset, function(error, json) {
if (error) return console.warn(error);
data = json;
for(var i = 0; i < data.messages.length; i++){
console.log(data.messages[i].date);
count++;
console.log(count);
}
});
offset += 100;
}
var end = new Date().getTime();
var time = end - start;
console.log("Time to execute : " + time);
});
});
EDIT: I'm trying for my calls to be as shown below. At each call you would check and make sure that there are 100 items(count) returned , and if not, you would stop the while loop
/api/messages/0
/api/messages/100
/api/messages/200
/api/messages/300
/api/messages/400
/api/messages/500
/api/messages/600
Here is how I would do it:
Create a function that accepts the relevant parameters: start offset, increment, and most importantly a done callback that should execute in the end.
This function contains a worker function, which will call the API, check the result and either call itself, or the done callback:
function fetchAllMessages(start, increment, done) {
var messages = [];
(function nextCall(offset) {
d3.json("/api/messages/" + offset, function (error, data) {
if (error) return done(error, messages);
if (!data.messages) return done("unexpected response format", messages);
messages.push.apply(messages, data.messages);
if (data.messages.length === increment) {
nextCall(offset + increment);
} else {
done(null, messages);
}
});
})(start);
}
Now you can use it simply from your click event handler:
$(function() {
$('button').click(function() {
var start = Date.now();
fetchAllMessages(0, 100, function (err, messages) {
var end = Date.now();
if (err) console.warn(err);
console.log(messages);
console.log("Time to execute : " + (start - end));
});
});
});
The idea is to chain this ajax calls until a certain cut-off point is reach (in this example offset is greater than max).
I have changed the d3.json call to jQuery.getJSON for this answer as its easier to debug on jsfiddle, but the concept is exactly the same. I also had to change the url for the request to use the jsfiddle debug api.
var start = new Date().getTime();
var offset = 0;
var maxOffset = 600;
var baseUrl = "/echo/json"; // change this to /api/messages in production
var callback = function(json) {
console.log(json);
console.log("current offset: " + offset);
data = json;
// ... do something with data ...
// increment the offset
offset += 100;
// don't run any more and return the execution time
if (offset > maxOffset) {
var end = new Date().getTime();
var time = end - start;
console.log("Time to execute : " + time);
return; // don't run any more
}
// offset too small so run another getJSON call with our callback
$.getJSON(baseUrl + "?" + offset, callback);
}
// when button is click, start the json call chain
$('button').click(function() {
// change the "?" to "/" in production
$.getJSON(baseUrl + "?" + offset, callback);
});
If you need help translating this to your exact problem let me know.
Here is the jsfiddle.
I have a timer function and I want to clear the timeouts or reset the function, cause every time I execute it, a new timeouts are created, so I recieve several counts.
My idea is to reset the count every time I execute the function. I only want a 1 instance of timer and get the correct count. If if execute several times the function I want to restart to 0.
Here is my code:
var timeouts = new Array();
var timer = null;
io.sockets.on('connection', function (client)
{
client.on("start", function (){
console.log('Someone has pressed Start button',new Date().getTime());
//try to kill all timeouts
for (var timeout in timeouts) {
clearTimeout(timeout);
};
if(this.timer == null) {
this.timer = new timer(1000, function (data) {
io.sockets.emit('timeupdate', data);
})
}else {
this.timer = null;
});
});
function timer(delay, callback)
{
// self-reference
var self = this;
if (!(this instanceof timer)) {
return new timer();
}
// attributes
var counter = 0;
var start = new Date().getTime();
/**
* Delayed running of the callback.
*/
function delayed()
{
console.log(counter);
callback(counter);
counter ++;
var diff = (new Date().getTime() - start) - counter * delay;
var timeOut = setTimeout(delayed, delay - diff);
timeouts.push(timeOut);
}
// start timer
delayed();
var timeout = setTimeout(delayed, delay);
timeouts.push(timeout);
}
Thank you in advance.
Using clearTimeout() is the correct way. The problem is your for-loop. This might look like a classic foreach-loop, but it is not. You have to do:
for (var i=0; i< timeouts.length; i++) {
clearTimeout(timeouts[i]);
}
Alternatively, also I don't like this personally:
for (var i in timeouts) {
clearTimeout(timeouts[i]); // note how the array is indexed using var i
}
This is a common JavaScript pitfall - the for (x in y)-loop actually iterates over the array's indices, not the values. It can also iterate over an object's properties. Try it out:
var a = [3, 2, 5, 8];
for (var i in a) {
console.log(i);
console.log(a[i]);
}
var o = { test: 'hello', number: 1234 };
for (var x in o)
console.log(x);
I have a piece of JavaScript code that I am executing using the node.js interpreter.
for(var i = 1; i < LIMIT; i++) {
var user = {
id: i,
name: "MongoUser [" + i + "]"
};
db.users.save(user, function(err, saved) {
if(err || !saved) {
console.log("Error");
} else {
console.log("Saved");
}
});
}
How can I measure the time taken by these database insert operations? I could compute the difference of date values after and before this piece of code but that would be incorrect because of the asynchronous nature of the code.
Use the Node.js console.time() and console.timeEnd():
var i;
console.time("dbsave");
for(i = 1; i < LIMIT; i++){
db.users.save({id : i, name : "MongoUser [" + i + "]"}, end);
}
end = function(err, saved) {
console.log(( err || !saved )?"Error":"Saved");
if(--i === 1){
console.timeEnd("dbsave");
}
};
There is a method that is designed for this. Check out process.hrtime(); .
So, I basically put this at the top of my app.
var start = process.hrtime();
var elapsed_time = function(note){
var precision = 3; // 3 decimal places
var elapsed = process.hrtime(start)[1] / 1000000; // divide by a million to get nano to milli
console.log(process.hrtime(start)[0] + " s, " + elapsed.toFixed(precision) + " ms - " + note); // print message + time
start = process.hrtime(); // reset the timer
}
Then I use it to see how long functions take. Here's a basic example that prints the contents of a text file called "output.txt":
var debug = true;
http.createServer(function(request, response) {
if(debug) console.log("----------------------------------");
if(debug) elapsed_time("recieved request");
var send_html = function(err, contents) {
if(debug) elapsed_time("start send_html()");
response.writeHead(200, {'Content-Type': 'text/html' } );
response.end(contents);
if(debug) elapsed_time("end send_html()");
}
if(debug) elapsed_time("start readFile()");
fs.readFile('output.txt', send_html);
if(debug) elapsed_time("end readFile()");
}).listen(8080);
Here's a quick test you can run in a terminal (BASH shell):
for i in {1..100}; do echo $i; curl http://localhost:8080/; done
Invoking console.time('label') will record the current time in milliseconds, then later calling console.timeEnd('label') will display the duration from that point.
The time in milliseconds will be automatically printed alongside the label, so you don't have to make a separate call to console.log to print a label:
console.time('test');
//some code
console.timeEnd('test'); //Prints something like that-> test: 11374.004ms
For more information, see Mozilla's developer docs on console.time.
Surprised no one had mentioned yet the new built in libraries:
Available in Node >= 8.5, and should be in Modern Browers
https://developer.mozilla.org/en-US/docs/Web/API/Performance
https://nodejs.org/docs/latest-v8.x/api/perf_hooks.html#
Node 8.5 ~ 9.x (Firefox, Chrome)
// const { performance } = require('perf_hooks'); // enable for node
const delay = time => new Promise(res=>setTimeout(res,time))
async function doSomeLongRunningProcess(){
await delay(1000);
}
performance.mark('A');
(async ()=>{
await doSomeLongRunningProcess();
performance.mark('B');
performance.measure('A to B', 'A', 'B');
const measure = performance.getEntriesByName('A to B')[0];
// firefox appears to only show second precision.
console.log(measure.duration);
// apparently you should clean up...
performance.clearMarks();
performance.clearMeasures();
// Prints the number of milliseconds between Mark 'A' and Mark 'B'
})();
https://repl.it/#CodyGeisler/NodeJsPerformanceHooks
Node 12.x
https://nodejs.org/docs/latest-v12.x/api/perf_hooks.html
const { PerformanceObserver, performance } = require('perf_hooks');
const delay = time => new Promise(res => setTimeout(res, time))
async function doSomeLongRunningProcess() {
await delay(1000);
}
const obs = new PerformanceObserver((items) => {
console.log('PerformanceObserver A to B',items.getEntries()[0].duration);
// apparently you should clean up...
performance.clearMarks();
// performance.clearMeasures(); // Not a function in Node.js 12
});
obs.observe({ entryTypes: ['measure'] });
performance.mark('A');
(async function main(){
try{
await performance.timerify(doSomeLongRunningProcess)();
performance.mark('B');
performance.measure('A to B', 'A', 'B');
}catch(e){
console.log('main() error',e);
}
})();
For anyone want to get time elapsed value instead of console output :
use process.hrtime() as #D.Deriso suggestion, below is my simpler approach :
function functionToBeMeasured() {
var startTime = process.hrtime();
// do some task...
// ......
var elapsedSeconds = parseHrtimeToSeconds(process.hrtime(startTime));
console.log('It takes ' + elapsedSeconds + 'seconds');
}
function parseHrtimeToSeconds(hrtime) {
var seconds = (hrtime[0] + (hrtime[1] / 1e9)).toFixed(3);
return seconds;
}
var start = +new Date();
var counter = 0;
for(var i = 1; i < LIMIT; i++){
++counter;
db.users.save({id : i, name : "MongoUser [" + i + "]"}, function(err, saved) {
if( err || !saved ) console.log("Error");
else console.log("Saved");
if (--counter === 0)
{
var end = +new Date();
console.log("all users saved in " + (end-start) + " milliseconds");
}
});
}
Old question but for a simple API and light-weight solution; you can use perfy which uses high-resolution real time (process.hrtime) internally.
var perfy = require('perfy');
function end(label) {
return function (err, saved) {
console.log(err ? 'Error' : 'Saved');
console.log( perfy.end(label).time ); // <——— result: seconds.milliseconds
};
}
for (var i = 1; i < LIMIT; i++) {
var label = 'db-save-' + i;
perfy.start(label); // <——— start and mark time
db.users.save({ id: i, name: 'MongoUser [' + i + ']' }, end(label));
}
Note that each time perfy.end(label) is called, that instance is auto-destroyed.
Disclosure: Wrote this module, inspired by D.Deriso's answer. Docs here.
You could also try exectimer. It gives you feedback like:
var t = require("exectimer");
var myFunction() {
var tick = new t.tick("myFunction");
tick.start();
// do some processing and end this tick
tick.stop();
}
// Display the results
console.log(t.timers.myFunction.duration()); // total duration of all ticks
console.log(t.timers.myFunction.min()); // minimal tick duration
console.log(t.timers.myFunction.max()); // maximal tick duration
console.log(t.timers.myFunction.mean()); // mean tick duration
console.log(t.timers.myFunction.median()); // median tick duration
[edit] There is an even simpler way now to use exectime. Your code could be wrapped like this:
var t = require('exectimer'),
Tick = t.Tick;
for(var i = 1; i < LIMIT; i++){
Tick.wrap(function saveUsers(done) {
db.users.save({id : i, name : "MongoUser [" + i + "]"}, function(err, saved) {
if( err || !saved ) console.log("Error");
else console.log("Saved");
done();
});
});
}
// Display the results
console.log(t.timers.myFunction.duration()); // total duration of all ticks
console.log(t.timers.saveUsers.min()); // minimal tick duration
console.log(t.timers.saveUsers.max()); // maximal tick duration
console.log(t.timers.saveUsers.mean()); // mean tick duration
console.log(t.timers.saveUsers.median()); // median tick duration
You can use a wrapper function to easily report the execution time of any existing function.
A wrapper is a used to extend an existing function to do something before and after the existing function's execution - and is a convenient way to compose logic.
Here is an example of using the withDurationReporting wrapper:
// without duration reporting
const doSomethingThatMayTakeAWhile = async (someArg: string, anotherArg: number) => {
/** your logic goes here */
}
// with duration reporting
const doSomethingThatMayTakeAWhileWithReporting = withDurationReporting(
'doSomethingThatMayTakeAWhile',
doSomethingThatMayTakeAWhile
);
// note: you can define the function with duration reporting directly, too
const doSomethingThatMayTakeAWhile = withDurationReporting(
'doSomethingThatMayTakeAWhile',
async (someArg: string, anotherArg: number) => {
/** your logic goes here */
}
)
And here is the wrapper itself:
import { hrtime } from 'process';
const roundToHundredths = (num: number) => Math.round(num * 100) / 100; // https://stackoverflow.com/a/14968691/3068233
/**
* a wrapper which reports how long it took to execute a function, after the function completes
*/
export const withDurationReporting = <R extends any, T extends (...args: any[]) => Promise<R>>(
title: string,
logic: T,
options: {
reportingThresholdSeconds: number;
logMethod: (message: string, metadata?: Record<string, any>) => void;
} = {
reportingThresholdSeconds: 1, // report on anything that takes more than 1 second, by default
logMethod: console.log, // log with `console.log` by default
},
) => {
return (async (...args: Parameters<T>): Promise<R> => {
const startTimeInNanoseconds = hrtime.bigint();
const result = await logic(...args);
const endTimeInNanoseconds = hrtime.bigint();
const durationInNanoseconds = endTimeInNanoseconds - startTimeInNanoseconds;
const durationInSeconds = roundToHundredths(Number(durationInNanoseconds) / 1e9); // https://stackoverflow.com/a/53970656/3068233
if (durationInSeconds >= options.reportingThresholdSeconds)
options.logMethod(`${title} took ${durationInSeconds} seconds to execute`, { title, durationInSeconds });
return result;
}) as T;
};
I designed a simple method for this, using console.time() & console.timeEnd():
measure function definition
function measureRunningTime(func,...args){
const varToString = varObj => Object.keys(varObj)[0]
const displayName = func.name || varToString({ func })
console.time(displayName)
func(...args)
console.timeEnd(displayName)
}
To use it, pass a function without arguments, with arguments binded, or with arguments as the following parameters.
Examples:
let's say I want to check the running time of the simplest searching algorithm - SimpleSearch:
measured function definition (your code here)
const simpleSearch = (array = [1,2,3] ,item = 3) => {
for(let i = 0; i< array.length; i++){
if (array[i] === item) return i;
}
return -1
}
implementation without arguments
measureRunningTime(simpleSearch)
//Prints something like that-> simpleSearch: 0.04ms
implementation with arguments using .bind()
const array = [1,2,3]
const item = 3
measureRunningTime(simpleSearch.bind(null, array, item))
//Prints something like that-> bound simpleSearch: 0.04ms
implementation with arguments without using .bind()
const array = [1,2,3]
const item = 3
measureRunningTime(simpleSearch, array, item)
//Prints something like that-> simpleSearch: 0.04ms
-> Take notice!! this implementation is far from perfect - for example there is no error handling - but it can be used to check the running times of simple algorithms,
Moreover , I'm not an experienced programmer so take everything with a grain of salt 🧂 👌
I had same issue while moving from AWS to Azure
For express & aws, you can already use, existing time() and timeEnd()
For Azure, use this:
https://github.com/manoharreddyporeddy/my-nodejs-notes/blob/master/performance_timers_helper_nodejs_azure_aws.js
These time() and timeEnd() use the existing hrtime() function, which give high-resolution real time.
Hope this helps.
I need this to be cumulative, and to measure different stuff.
Built these functions:
function startMeasuring(key) {
measureTimers[key] = process.hrtime();
}
function stopMeasuring(key) {
if (!measures[key]) {
measures[key] = 0;
}
let hrtime = process.hrtime(measureTimers[key]);
measures[key] += hrtime[0] + hrtime[1] / 1e9;
measureTimers[key] = null;
}
Usage:
startMeasuring("first Promise");
startMeasuring("first and second Promises");
await new Promise((resolve) => {
setTimeout(resolve, 1400);
});
stopMeasuring("first Promise");
stopMeasuring("first and second Promises");
startMeasuring("first and second Promises");
await new Promise((resolve) => {
setTimeout(resolve, 600);
});
stopMeasuring("first and second Promises");
console.log("Measure Results", measures);
/*
Measusre Results {
setting: 0.00002375,
'first Promise': 1.409392916,
'first and second Promise': 2.015160376
}
*/