Most efficient way to create a zero filled JavaScript array? - javascript

What is the most efficient way to create an arbitrary length zero filled array in JavaScript?

ES6 introduces Array.prototype.fill. It can be used like this:
new Array(len).fill(0);
Not sure if it's fast, but I like it because it's short and self-describing.
It's still not in IE (check compatibility), but there's a polyfill available.

Although this is an old thread, I wanted to add my 2 cents to it. Not sure how slow/fast this is, but it's a quick one liner. Here is what I do:
If I want to pre-fill with a number:
Array.apply(null, Array(5)).map(Number.prototype.valueOf,0);
// [0, 0, 0, 0, 0]
If I want to pre-fill with a string:
Array.apply(null, Array(3)).map(String.prototype.valueOf,"hi")
// ["hi", "hi", "hi"]
Other answers have suggested:
new Array(5+1).join('0').split('')
// ["0", "0", "0", "0", "0"]
but if you want 0 (the number) and not "0" (zero inside a string), you can do:
new Array(5+1).join('0').split('').map(parseFloat)
// [0, 0, 0, 0, 0]

In short
Fastest solution:
let a = new Array(n); for (let i=0; i<n; ++i) a[i] = 0;
Shortest (handy) solution (3x slower for small arrays, slightly slower for big (slowest on Firefox))
Array(n).fill(0)
Details
Today 2020.06.09 I perform tests on macOS High Sierra 10.13.6 on browsers Chrome 83.0, Firefox 77.0, and Safari 13.1. I test chosen solutions for two test cases
small array - with 10 elements - you can perform test HERE
big arrays - with 1M elements - you can perform test HERE
Conclusions
solution based on new Array(n)+for (N) is fastest solution for small arrays and big arrays (except Chrome but still very fast there) and it is recommended as fast cross-browser solution
solution based on new Float32Array(n) (I) returns non typical array (e.g. you cannot call push(..) on it) so I not compare its results with other solutions - however this solution is about 10-20x faster than other solutions for big arrays on all browsers
solutions based on for (L,M,N,O) are fast for small arrays
solutions based on fill (B,C) are fast on Chrome and Safari but surprisingly slowest on Firefox for big arrays. They are medium fast for small arrays
solution based on Array.apply (P) throws error for big arrays
function P(n) {
return Array.apply(null, Array(n)).map(Number.prototype.valueOf,0);
}
try {
P(1000000);
} catch(e) {
console.error(e.message);
}
Code and example
Below code presents solutions used in measurements
function A(n) {
return [...new Array(n)].fill(0);
}
function B(n) {
return new Array(n).fill(0);
}
function C(n) {
return Array(n).fill(0);
}
function D(n) {
return Array.from({length: n}, () => 0);
}
function E(n) {
return [...new Array(n)].map(x => 0);
}
// arrays with type
function F(n) {
return Array.from(new Int32Array(n));
}
function G(n) {
return Array.from(new Float32Array(n));
}
function H(n) {
return Array.from(new Float64Array(n)); // needs 2x more memory than float32
}
function I(n) {
return new Float32Array(n); // this is not typical array
}
function J(n) {
return [].slice.apply(new Float32Array(n));
}
// Based on for
function K(n) {
let a = [];
a.length = n;
let i = 0;
while (i < n) {
a[i] = 0;
i++;
}
return a;
}
function L(n) {
let a=[]; for(let i=0; i<n; i++) a[i]=0;
return a;
}
function M(n) {
let a=[]; for(let i=0; i<n; i++) a.push(0);
return a;
}
function N(n) {
let a = new Array(n); for (let i=0; i<n; ++i) a[i] = 0;
return a;
}
function O(n) {
let a = new Array(n); for (let i=n; i--;) a[i] = 0;
return a;
}
// other
function P(n) {
return Array.apply(null, Array(n)).map(Number.prototype.valueOf,0);
}
function Q(n) {
return "0".repeat( n ).split("").map( parseFloat );
}
function R(n) {
return new Array(n+1).join('0').split('').map(parseFloat)
}
// ---------
// TEST
// ---------
[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R].forEach(f => {
let a = f(10);
console.log(`${f.name} length=${a.length}, arr[0]=${a[0]}, arr[9]=${a[9]}`)
});
This snippets only present used codes
Example results for Chrome:

Elegant way to fill an array with precomputed values
Here is another way to do it using ES6 that nobody has mentioned so far:
> Array.from(Array(3), () => 0)
< [0, 0, 0]
It works by passing a map function as the second parameter of Array.from.
In the example above, the first parameter allocates an array of 3 positions filled with the value undefined and then the lambda function maps each one of them to the value 0.
Although Array(len).fill(0) is shorter, it doesn't work if you need to fill the array by doing some computation first (I know the question didn't ask for it, but a lot of people end up here looking for this).
For instance, if you need an array with 10 random numbers:
> Array.from(Array(10), () => Math.floor(10 * Math.random()))
< [3, 6, 8, 1, 9, 3, 0, 6, 7, 1]
It's more concise (and elegant) than the equivalent:
const numbers = Array(10);
for (let i = 0; i < numbers.length; i++) {
numbers[i] = Math.round(10 * Math.random());
}
This method can also be used to generate sequences of numbers by taking advantage of the index parameter provided in the callback:
> Array.from(Array(10), (d, i) => i)
< [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Bonus answer: fill an array using String repeat()
Since this answer is getting a good deal of attention, I also wanted to show this cool trick. Although not as useful as my main answer, will introduce the still not very known, but very useful String repeat() method. Here's the trick:
> "?".repeat(10).split("").map(() => Math.floor(10 * Math.random()))
< [5, 6, 3, 5, 0, 8, 2, 7, 4, 1]
Cool, huh? repeat() is a very useful method to create a string that is the repetition of the original string a certain number of times. After that, split() creates an array for us, which is then map()ped to the values we want. Breaking it down in steps:
> "?".repeat(10)
< "??????????"
> "?".repeat(10).split("")
< ["?", "?", "?", "?", "?", "?", "?", "?", "?", "?"]
> "?".repeat(10).split("").map(() => Math.floor(10 * Math.random()))
< [5, 6, 3, 5, 0, 8, 2, 7, 4, 1]

The already mentioned ES 6 fill method takes care of this nicely. Most modern desktop browsers already support the required Array prototype methods as of today (Chromium, FF, Edge and Safari) [1]. You can look up details on MDN. A simple usage example is
a = new Array(10).fill(0);
Given the current browser support you should be cautious to use this unless you are sure your audience uses modern Desktop browsers.

Note added August 2013, updated February 2015: The answer below from 2009 relates to JavaScript's generic Array type. It doesn't relate to the newer typed arrays defined in ES2015 [and available now in many browsers], like Int32Array and such. Also note that ES2015 adds a fill method to both Arrays and typed arrays, which is likely to be the most efficient way to fill them...
Also, it can make a big difference to some implementations how you create the array. Chrome's V8 engine, in particular, tries to use a highly-efficient, contiguous-memory array if it thinks it can, shifting to the object-based array only when necessary.
With most languages, it would be pre-allocate, then zero-fill, like this:
function newFilledArray(len, val) {
var rv = new Array(len);
while (--len >= 0) {
rv[len] = val;
}
return rv;
}
But, JavaScript arrays aren't really arrays, they're key/value maps just like all other JavaScript objects, so there's no "pre-allocate" to do (setting the length doesn't allocate that many slots to fill), nor is there any reason to believe that the benefit of counting down to zero (which is just to make the comparison in the loop fast) isn't outweighed by adding the keys in reverse order when the implementation may well have optimized their handling of the keys related to arrays on the theory you'll generally do them in order.
In fact, Matthew Crumley pointed out that counting down is markedly slower on Firefox than counting up, a result I can confirm — it's the array part of it (looping down to zero is still faster than looping up to a limit in a var). Apparently adding the elements to the array in reverse order is a slow op on Firefox. In fact, the results vary quite a bit by JavaScript implementation (which isn't all that surprising). Here's a quick and dirty test page (below) for browser implementations (very dirty, doesn't yield during tests, so provides minimal feedback and will run afoul of script time limits). I recommend refreshing between tests; FF (at least) slows down on repeated tests if you don't.
The fairly complicated version that uses Array#concat is faster than a straight init on FF as of somewhere between 1,000 and 2,000 element arrays. On Chrome's V8 engine, though, straight init wins out every time...
Here's a test:
const tests = [
{
name: "downpre",
total: 0,
desc: "Count down, pre-decrement",
func: makeWithCountDownPre
},
{
name: "downpost",
total: 0,
desc: "Count down, post-decrement",
func: makeWithCountDownPost
},
{
name: "up",
total: 0,
desc: "Count up (normal)",
func: makeWithCountUp
},
{
name: "downandup",
total: 0,
desc: "Count down (for loop) and up (for filling)",
func: makeWithCountDownArrayUp
},
{
name: "concat",
total: 0,
desc: "Concat",
func: makeWithConcat
}
];
const q = sel => document.querySelector(sel);
let markup = "";
for (const {name, desc} of tests) {
markup += `
<div><input type="checkbox" id="chk_${name}" checked>
<label for="chk_${name}">${desc}</label></div>`;
}
q("#checkboxes").innerHTML = markup;
q("#btnTest").addEventListener("click", btnTestClick);
function btnTestClick() {
// Clear log
q("#log").innerHTML = "Testing...";
// Show running
q("#btnTest").disabled = true;
// Run after a pause while the browser updates display
setTimeout(btnTestClickPart2, 0);
}
function btnTestClickPart2() {
try {
runTests();
} catch (e) {
log(`Exception: ${e.message}`);
}
// Re-enable the button
q("#btnTest").disabled = false;
}
function getNumField(name) {
const val = q("#" + name).value.trim();
const num = /^\d+$/.test(val) ? parseInt(val) : NaN;
if (isNaN(num) || num <= 0) {
throw new Error(`Invalid ${name} value ${JSON.stringify(val)}`);
}
return num;
}
function runTests() {
try {
// Clear log
q("#log").innerHTML = "";
const runCount = getNumField("loops");
const length = getNumField("length");
// Do it (we run runCount + 1 times, first time is a warm up)
for (let counter = 0; counter <= runCount; ++counter) {
for (const test of tests) {
if (q("#chk_" + test.name).checked) {
const start = Date.now();
const a = test.func(length);
const time = Date.now() - start;
if (counter == 0) {
// Don't count (warm up), but do check the algorithm works
const invalid = validateResult(a, length);
if (invalid) {
log(`<span class=error>FAILURE</span> with test ${test.name}: ${invalid}`);
return;
}
} else {
// Count this one
log(`#${counter}: ${test.desc}: ${time}ms`);
test.total += time;
}
}
}
}
for (const test of tests) {
if (q("#chk_" + test.name).checked) {
test.avg = test.total / runCount;
if (typeof lowest != "number" || lowest > test.avg) {
lowest = test.avg;
}
}
}
let results =
"<p>Results:" +
"<br>Length: " + length +
"<br>Loops: " + runCount +
"</p>";
for (const test of tests) {
if (q("#chk_" + test.name).checked) {
results +=
`<p ${lowest == test.avg ? " class=winner" : ""}>${test.desc}, average time: ${test.avg}ms</p>`;
}
}
results += "<hr>";
q("#log").insertAdjacentHTML("afterbegin", results);
} catch (e) {
log(e.message);
return;
}
}
function validateResult(a, length) {
if (a.length != length) {
return "Length is wrong";
}
for (let n = length - 1; n >= 0; --n) {
if (a[n] != 0) {
return "Index " + n + " is not zero";
}
}
return undefined;
}
function makeWithCountDownPre(len) {
const a = new Array(len);
while (--len >= 0) {
a[len] = 0;
}
return a;
}
function makeWithCountDownPost(len) {
const a = new Array(len);
while (len-- > 0) {
a[len] = 0;
}
return a;
}
function makeWithCountUp(len) {
const a = new Array(len);
for (let i = 0; i < len; ++i) {
a[i] = 0;
}
return a;
}
function makeWithCountDownArrayUp(len) {
const a = new Array(len);
let i = 0;
while (--len >= 0) {
a[i++] = 0;
}
return a;
}
function makeWithConcat(len) {
if (len == 0) {
return [];
}
let a = [0];
let currlen = 1;
while (currlen < len) {
const rem = len - currlen;
if (rem < currlen) {
a = a.concat(a.slice(0, rem));
} else {
a = a.concat(a);
}
currlen = a.length;
}
return a;
}
function log(msg) {
const p = document.createElement("p");
p.textContent = msg;
q("#log").appendChild(p);
}
body {
font-family: sans-serif;
}
#log p {
margin: 0;
padding: 0;
}
.error {
color: red;
}
.winner {
color: green;
}
<div>
<label for='txtLength'>Length:</label><input type='text' id='length' value='1000'>
<br><label for='txtLoops'>Loops:</label><input type='text' id='loops' value='100000'>
<div id='checkboxes'></div>
<br><input type='button' id='btnTest' value='Test'>
<hr>
<div id='log'></div>
</div>

If you use ES6, you can use Array.from() like this:
Array.from({ length: 3 }, () => 0);
//[0, 0, 0]
Has the same result as
Array.from({ length: 3 }).map(() => 0)
//[0, 0, 0]
Because
Array.from({ length: 3 })
//[undefined, undefined, undefined]

By default Uint8Array, Uint16Array and Uint32Array classes keep zeros as its values, so you don't need any complex filling techniques, just do:
var ary = new Uint8Array(10);
all elements of array ary will be zeros by default.

function makeArrayOf(value, length) {
var arr = [], i = length;
while (i--) {
arr[i] = value;
}
return arr;
}
makeArrayOf(0, 5); // [0, 0, 0, 0, 0]
makeArrayOf('x', 3); // ['x', 'x', 'x']
Note that while is usually more efficient than for-in, forEach, etc.

using object notation
var x = [];
zero filled? like...
var x = [0,0,0,0,0,0];
filled with 'undefined'...
var x = new Array(7);
obj notation with zeros
var x = [];
for (var i = 0; i < 10; i++) x[i] = 0;
As a side note, if you modify Array's prototype, both
var x = new Array();
and
var y = [];
will have those prototype modifications
At any rate, I wouldn't be overly concerned with the efficiency or speed of this operation, there are plenty of other things that you will likely be doing that are far more wasteful and expensive than instanciating an array of arbitrary length containing zeros.

I've tested all combinations of pre-allocating/not pre-allocating, counting up/down, and for/while loops in IE 6/7/8, Firefox 3.5, Chrome, and Opera.
The functions below was consistently the fastest or extremely close in Firefox, Chrome, and IE8, and not much slower than the fastest in Opera and IE 6. It's also the simplest and clearest in my opinion. I've found several browsers where the while loop version is slightly faster, so I'm including it too for reference.
function newFilledArray(length, val) {
var array = [];
for (var i = 0; i < length; i++) {
array[i] = val;
}
return array;
}
or
function newFilledArray(length, val) {
var array = [];
var i = 0;
while (i < length) {
array[i++] = val;
}
return array;
}

ES6 solution:
[...new Array(5)].map(x => 0); // [0, 0, 0, 0, 0]

const arr = Array.from({ length: 10 }).fill(0);
console.log(arr)

If you need to create many zero filled arrays of different lengths during the execution of your code, the fastest way I've found to achieve this is to create a zero array once, using one of the methods mentioned on this topic, of a length which you know will never be exceeded, and then slice that array as necessary.
For example (using the function from the chosen answer above to initialize the array), create a zero filled array of length maxLength, as a variable visible to the code that needs zero arrays:
var zero = newFilledArray(maxLength, 0);
Now slice this array everytime you need a zero filled array of length requiredLength < maxLength:
zero.slice(0, requiredLength);
I was creating zero filled arrays thousands of times during execution of my code, this speeded up the process tremendously.

function zeroFilledArray(size) {
return new Array(size + 1).join('0').split('');
}

Using lodash or underscore
_.range(0, length - 1, 0);
Or if you have an array existing and you want an array of the same length
array.map(_.constant(0));

I have nothing against:
Array.apply(null, Array(5)).map(Number.prototype.valueOf,0);
new Array(5+1).join('0').split('').map(parseFloat);
suggested by Zertosh, but in a new ES6 array extensions allow you to do this natively with fill method. Now IE edge, Chrome and FF supports it, but check the compatibility table
new Array(3).fill(0) will give you [0, 0, 0]. You can fill the array with any value like new Array(5).fill('abc') (even objects and other arrays).
On top of that you can modify previous arrays with fill:
arr = [1, 2, 3, 4, 5, 6]
arr.fill(9, 3, 5) # what to fill, start, end
which gives you: [1, 2, 3, 9, 9, 6]

To create an all new Array
new Array(arrayLength).fill(0);
To add some values at the end of an existing Array
[...existingArray, ...new Array(numberOfElementsToAdd).fill(0)]
Example
//**To create an all new Array**
console.log(new Array(5).fill(0));
//**To add some values at the end of an existing Array**
let existingArray = [1,2,3]
console.log([...existingArray, ...new Array(5).fill(0)]);

The way I usually do it (and is amazing fast) is using Uint8Array. For example, creating a zero filled vector of 1M elements:
var zeroFilled = [].slice.apply(new Uint8Array(1000000))
I'm a Linux user and always have worked for me, but once a friend using a Mac had some non-zero elements. I thought his machine was malfunctioning, but still here's the safest way we found to fix it:
var zeroFilled = [].slice.apply(new Uint8Array(new Array(1000000))
Edited
Chrome 25.0.1364.160
Frederik Gottlieb - 6.43
Sam Barnum - 4.83
Eli - 3.68
Joshua 2.91
Mathew Crumley - 2.67
bduran - 2.55
Allen Rice - 2.11
kangax - 0.68
Tj. Crowder - 0.67
zertosh - ERROR
Firefox 20.0
Allen Rice - 1.85
Joshua - 1.82
Mathew Crumley - 1.79
bduran - 1.37
Frederik Gottlieb - 0.67
Sam Barnum - 0.63
Eli - 0.59
kagax - 0.13
Tj. Crowder - 0.13
zertosh - ERROR
Missing the most important test (at least for me): the Node.js one. I suspect it close to Chrome benchmark.

As of ECMAScript2016, there is one clear choice for large arrays.
Since this answer still shows up near the top on google searches, here's an answer for 2017.
Here's a current jsbench with a few dozen popular methods, including many proposed up to now on this question. If you find a better method please add, fork and share.
I want to note that there is no true most efficient way to create an arbitrary length zero filled array. You can optimize for speed, or for clarity and maintainability - either can be considered the more efficient choice depending on the needs of the project.
When optimizing for speed, you want to: create the array using literal syntax; set the length, initialize iterating variable, and iterate through the array using a while loop. Here's an example.
const arr = [];
arr.length = 120000;
let i = 0;
while (i < 120000) {
arr[i] = 0;
i++;
}
Another possible implementation would be:
(arr = []).length = n;
let i = 0;
while (i < n) {
arr[i] = 0;
i++;
}
But I strongly discourage using this second implantation in practice as it's less clear and doesn't allow you to maintain block scoping on your array variable.
These are significantly faster than filling with a for loop, and about 90% faster than the standard method of
const arr = Array(n).fill(0);
But this fill method is still the most efficient choice for smaller arrays due to it's clarity, conciseness and maintainability. The performance difference likely won't kill you unless you're making a lot of arrays with lengths on the order of thousands or more.
A few other important notes. Most style guides recommend you no longer use varwithout a very special reason when using ES6 or later. Use const for variables that won't be redefined and let for variables that will. The MDN and Airbnb's Style Guide are great places to go for more information on best practices. The questions wasn't about syntax, but it's important that people new to JS know about these new standards when searching through these reams of old and new answers.

Didn't see this method in answers, so here it is:
"0".repeat( 200 ).split("").map( parseFloat )
In result you will get zero-valued array of length 200:
[ 0, 0, 0, 0, ... 0 ]
I'm not sure about the performance of this code, but it shouldn't be an issue if you use it for relatively small arrays.

What about new Array(51).join('0').split('')?

let filled = [];
filled.length = 10;
filled.fill(0);
console.log(filled);

This concat version is much faster in my tests on Chrome (2013-03-21). About 200ms for 10,000,000 elements vs 675 for straight init.
function filledArray(len, value) {
if (len <= 0) return [];
var result = [value];
while (result.length < len/2) {
result = result.concat(result);
}
return result.concat(result.slice(0, len-result.length));
}
Bonus: if you want to fill your array with Strings, this is a concise way to do it (not quite as fast as concat though):
function filledArrayString(len, value) {
return new Array(len+1).join(value).split('');
}

I was testing out the great answer by T.J. Crowder, and came up with a recursive merge based on the concat solution that outperforms any in his tests in Chrome (i didn't test other browsers).
function makeRec(len, acc) {
if (acc == null) acc = [];
if (len <= 1) return acc;
var b = makeRec(len >> 1, [0]);
b = b.concat(b);
if (len & 1) b = b.concat([0]);
return b;
},
call the method with makeRec(29).

It might be worth pointing out, that Array.prototype.fill had been added as part of the ECMAScript 6 (Harmony) proposal. I would rather go with the polyfill written below, before considering other options mentioned on the thread.
if (!Array.prototype.fill) {
Array.prototype.fill = function(value) {
// Steps 1-2.
if (this == null) {
throw new TypeError('this is null or not defined');
}
var O = Object(this);
// Steps 3-5.
var len = O.length >>> 0;
// Steps 6-7.
var start = arguments[1];
var relativeStart = start >> 0;
// Step 8.
var k = relativeStart < 0 ?
Math.max(len + relativeStart, 0) :
Math.min(relativeStart, len);
// Steps 9-10.
var end = arguments[2];
var relativeEnd = end === undefined ?
len : end >> 0;
// Step 11.
var final = relativeEnd < 0 ?
Math.max(len + relativeEnd, 0) :
Math.min(relativeEnd, len);
// Step 12.
while (k < final) {
O[k] = value;
k++;
}
// Step 13.
return O;
};
}

Shortest for loop code
a=i=[];for(;i<100;)a[i++]=0;
edit:
for(a=i=[];i<100;)a[i++]=0;
or
for(a=[],i=100;i--;)a[i]=0;
Safe var version
var a=[],i=0;for(;i<100;)a[i++]=0;
edit:
for(var i=100,a=[];i--;)a[i]=0;

My fastest function would be:
function newFilledArray(len, val) {
var a = [];
while(len--){
a.push(val);
}
return a;
}
var st = (new Date()).getTime();
newFilledArray(1000000, 0)
console.log((new Date()).getTime() - st); // returned 63, 65, 62 milliseconds
Using the native push and shift to add items to the array is much faster (about 10 times) than declaring the array scope and referencing each item to set it's value.
fyi: I consistently get faster times with the first loop, which is counting down, when running this in firebug (firefox extension).
var a = [];
var len = 1000000;
var st = (new Date()).getTime();
while(len){
a.push(0);
len -= 1;
}
console.log((new Date()).getTime() - st); // returned 863, 894, 875 milliseconds
st = (new Date()).getTime();
len = 1000000;
a = [];
for(var i = 0; i < len; i++){
a.push(0);
}
console.log((new Date()).getTime() - st); // returned 1155, 1179, 1163 milliseconds
I'm interested to know what T.J. Crowder makes of that ? :-)

I knew I had this proto'd somewhere :)
Array.prototype.init = function(x,n)
{
if(typeof(n)=='undefined') { n = this.length; }
while (n--) { this[n] = x; }
return this;
}
var a = (new Array(5)).init(0);
var b = [].init(0,4);
Edit: tests
In response to Joshua and others methods I ran my own benchmarking, and I'm seeing completely different results to those reported.
Here's what I tested:
//my original method
Array.prototype.init = function(x,n)
{
if(typeof(n)=='undefined') { n = this.length; }
while (n--) { this[n] = x; }
return this;
}
//now using push which I had previously thought to be slower than direct assignment
Array.prototype.init2 = function(x,n)
{
if(typeof(n)=='undefined') { n = this.length; }
while (n--) { this.push(x); }
return this;
}
//joshua's method
function newFilledArray(len, val) {
var a = [];
while(len--){
a.push(val);
}
return a;
}
//test m1 and m2 with short arrays many times 10K * 10
var a = new Date();
for(var i=0; i<10000; i++)
{
var t1 = [].init(0,10);
}
var A = new Date();
var b = new Date();
for(var i=0; i<10000; i++)
{
var t2 = [].init2(0,10);
}
var B = new Date();
//test m1 and m2 with long array created once 100K
var c = new Date();
var t3 = [].init(0,100000);
var C = new Date();
var d = new Date();
var t4 = [].init2(0,100000);
var D = new Date();
//test m3 with short array many times 10K * 10
var e = new Date();
for(var i=0; i<10000; i++)
{
var t5 = newFilledArray(10,0);
}
var E = new Date();
//test m3 with long array created once 100K
var f = new Date();
var t6 = newFilledArray(100000, 0)
var F = new Date();
Results:
IE7 deltas:
dA=156
dB=359
dC=125
dD=375
dE=468
dF=412
FF3.5 deltas:
dA=6
dB=13
dC=63
dD=8
dE=12
dF=8
So by my reckoning push is indeed slower generally but performs better with longer arrays in FF but worse in IE which just sucks in general (quel surprise).

Anonymous function:
(function(n) { while(n-- && this.push(0)); return this; }).call([], 5);
// => [0, 0, 0, 0, 0]
A bit shorter with for-loop:
(function(n) { for(;n--;this.push(0)); return this; }).call([], 5);
// => [0, 0, 0, 0, 0]
Works with any Object, just change what's inside this.push().
You can even save the function:
function fill(size, content) {
for(;size--;this.push(content));
return this;
}
Call it using:
var helloArray = fill.call([], 5, 'hello');
// => ['hello', 'hello', 'hello', 'hello', 'hello']
Adding elements to an already existing array:
var helloWorldArray = fill.call(helloArray, 5, 'world');
// => ['hello', 'hello', 'hello', 'hello', 'hello', 'world', 'world', 'world', 'world', 'world']
Performance: http://jsperf.com/zero-filled-array-creation/25

Related

How to make reverse array work with for loop only [duplicate]

I was asked recently what was the most efficient way to reverse an array in Javascript. At the moment, I suggested using a for loop and fiddling with the array but then realized there is a native Array.reverse() method.
For curiosity's sake, can anyone help me explore this by showing examples or pointing in the right direction so I can read into this? Any suggestions regarding how to measure performance would be awesome too.
Based on this setup:
var array = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
var length = array.length;
Array.reverse(); is the first or second slowest!
The benchmarks are here:
https://jsperf.com/js-array-reverse-vs-while-loop/9
Across browsers, swap loops are faster. There are two common types of swap algorithms (see Wikipedia), each with two variations.
The two types of swap algorithms are temporary swap and XOR swap.
The two variations handle index calculations differently. The first variation compares the current left index and the right index and then decrements the right index of the array. The second variation compares the current left index and the length divided by half and then recalculates the right index for each iteration.
You may or may not see huge differences between the two variations. For example, in Chrome 18, the first variations of the temporary swap and XOR swap are over 60% slower than the second variations, but in Opera 12, both variations of the temporary swap and XOR swap have similar performance.
Temporary swap:
First variation:
function temporarySwap(array)
{
var left = null;
var right = null;
var length = array.length;
for (left = 0, right = length - 1; left < right; left += 1, right -= 1)
{
var temporary = array[left];
array[left] = array[right];
array[right] = temporary;
}
return array;
}
Second variation:
function temporarySwapHalf(array)
{
var left = null;
var right = null;
var length = array.length;
for (left = 0; left < length / 2; left += 1)
{
right = length - 1 - left;
var temporary = array[left];
array[left] = array[right];
array[right] = temporary;
}
return array;
}
XOR swap:
First variation:
function xorSwap(array)
{
var i = null;
var r = null;
var length = array.length;
for (i = 0, r = length - 1; i < r; i += 1, r -= 1)
{
var left = array[i];
var right = array[r];
left ^= right;
right ^= left;
left ^= right;
array[i] = left;
array[r] = right;
}
return array;
}
Second variation:
function xorSwapHalf(array)
{
var i = null;
var r = null;
var length = array.length;
for (i = 0; i < length / 2; i += 1)
{
r = length - 1 - i;
var left = array[i];
var right = array[r];
left ^= right;
right ^= left;
left ^= right;
array[i] = left;
array[r] = right;
}
return array;
}
There is another swap method called destructuring assignment:
http://wiki.ecmascript.org/doku.php?id=harmony:destructuring
Destructuring assignment:
First variation:
function destructuringSwap(array)
{
var left = null;
var right = null;
var length = array.length;
for (left = 0, right = length - 1; left < right; left += 1, right -= 1)
{
[array[left], array[right]] = [array[right], array[left]];
}
return array;
}
Second variation:
function destructuringSwapHalf(array)
{
var left = null;
var right = null;
var length = array.length;
for (left = 0; left < length / 2; left += 1)
{
right = length - 1 - left;
[array[left], array[right]] = [array[right], array[left]];
}
return array;
}
Right now, an algorithm using destructuring assignment is the slowest of them all. It is even slower than Array.reverse();. However, the algorithms using destructuring assignments and Array.reverse(); methods are the shortest examples, and they look the cleanest. I hope their performance gets better in the future.
Another mention is that modern browsers are improving their performance of array push and splice operations.
In Firefox 10, this for loop algorithm using array push and splice rivals the temporary swap and XOR swap loop algorithms.
for (length -= 2; length > -1; length -= 1)
{
array.push(array[length]);
array.splice(length, 1);
}
However, you should probably stick with the swap loop algorithms until many of the other browsers match or exceed their array push and splice performance.
In simple way you can do this using map.
let list = [10, 20, 30, 60, 90]
let reversedList = list.map((e, i, a)=> a[(a.length -1) -i]) // [90, 60...]
Native methods are always faster.
So use Array.reverse where possible. Otherwise an implementation that runs in O(1) would be best ;)
Otherwise just use something like this
var reverse = function(arr) {
var result = [],
ii = arr.length;
for (var i = ii - 1;i !== 0;i--) {
result.push(arr[i]);
}
return result;
}
Benchmark!
Interesting the loop is faster if you use all three stages of the for construct instead of only one.
for(var i = ii - 1; i !== 0;i--) is faster then var i = ii - 1;for(;i-- !== 0;)
I opened a Firefox bug about slow reverse performance in Firefox. Someone from Mozilla looked at the benchmark used in the accepted post, and says that it is pretty misleading -- in their analysis the native method is better in general for reversing arrays. (As it should be!)
This is the most efficient and clean way to reverse an array with the ternary operator.
function reverse(arr) {
return arr.length < 2 ? arr : [arr.pop()].concat(reverse(arr));
}
console.log(reverse([4, 3, 3, 1]));
Swap functions are the fastest. Here's a reverse function I wrote that is only slightly similar to the swap functions mentioned above but performs faster.
function reverse(array) {
var first = null;
var last = null;
var tmp = null;
var length = array.length;
for (first = 0, last = length - 1; first < length / 2; first++, last--) {
tmp = array[first];
array[first] = array[last];
array[last] = tmp;
}
}
You can find the benchmarking here http://jsperf.com/js-array-reverse-vs-while-loop/19
Since no one came up with it and to complete the list of ways to reverse an array...
array.sort(function() {
return 1;
})
It's twice as fast as both while-approaches, but other than that, horribly slow.
http://jsperf.com/js-array-reverse-vs-while-loop/53
You can do this with .slice().reverse():
const yourArray = ["first", "second", "third", "...", "etc"];
const reversedArray = yourArray.slice().reverse();
console.log(reversedArray);
Note that .slice() is used to prevent modification of yourArray since .reverse() is in-place.
Here's a java example http://www.leepoint.net/notes-java/data/arrays/arrays-ex-reverse.html showing how to reverse an array. Very easy to convert to javascript.
I would suggest using something that simply captures the time before the function is called, and after the function is called. Which ever takes the least time / clock cycles will be the fastest.
Here is another example to permanently modify the array reversing it's elements:
var theArray = ['a', 'b', 'c', 'd', 'e', 'f'];
function reverseArrayInPlace(array) {
for (var i = array.length - 1; i >= 0; i -= 1) {
array.push(array[i]);
}
array.splice(0, array.length / 2);
return array;
};
reverseArrayInPlace(theArray);
console.log(theArray); // -> ["f", "e", "d", "c", "b", "a"]
Another suggestion, similar to the above, but using splice instead:
var myArray=["one","two","three","four","five","six"];
console.log(myArray);
for(i=0;i<myArray.length;i++){
myArray.splice(i,0,myArray.pop(myArray[myArray.length-1]));
}
console.log(myArray);
If you want to copy a reversed version of an array and keep the original as it is:
a = [0,1,2,3,4,5,6,7,8,9];
b = []
for(i=0;i<a.length;i++){
b.push(a.slice(a.length-i-1,a.length-i)[0])
}
Output of b:
[ 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
You could also make use of reduceRight which will iterate through each value of the array (from right-to-left)
const myArray = [1, 2, 3, 4, 5]
const reversedArray = myArray.reduceRight((acc, curr) => [...acc, curr], [])
console.log(reversedArray) // [5, 4, 3, 2, 1]
Pasting the below into any javascript runtime console either on the browser or node.js would do a straight way benchmark test on a large array of number.
Say 40,000 in my case
var array = Array.from({ length: 40000 }, () =>
Math.floor(Math.random() * 40000)
);
var beforeStart = Date.now();
var reversedArray = array.map((obj, index, passedArray) => {
return passedArray[passedArray.length - index - 1];
});
console.log(reversedArray);
var afterCustom = Date.now();
console.log(array.reverse());
var afterNative = Date.now();
console.log(`custom took : ${afterCustom - beforeStart}ms`);
console.log(`native took : ${afterNative - afterCustom}ms`);
You can simply run the snippet directly to see how the custom version fare too.
For the sake of completeness there should be also thought about efficiency in a broader way, than only execution-time performance.
Therefore I'd like to add a swap function that is minimal slower (please test on your own using perf tools) than than the ones from the accepted answer but it has the following other beneficial properties:
it doesn't require a temp variable for swapping
it keeps the input array untouched
it works also with arrays of values other than numerical because it simply swaps references
it does not start a function call on every iteration step
it keeps being readable (although this could still be improved)
function fastSwapReverse (array) {
// half length for odd arrays is added by 1
var len = array.length;
var half = len % 2 === 0 ? len / 2 : Math.ceil(len / 2)
var k = 0, A = new Array(len)
// from 0 to half swap the elements at
// start: 0 + i
// end: len - i -1
for (k = 0; k < half; k++) {
A[k] = array[len - k - 1];
A[len - k - 1] = array[k];
}
return A;
}
Although the original Array.prototype.reverse method also does mutate the array
Just start copying array from the backside using a for loop and return that new array. This is efficient and has O(n) time complexity.
var array1 = ["yes", "no", "maybe", "always", "sometimes", "never", "if"];
var array2 = [5,8,2,9,5,6,3,1];
function reverseArray(arr) {
var newArray = [];
for (var x = arr.length - 1; 0 <= x; --x) {
newArray.push(arr[x]);
}
return newArray;
}
console.log(reverseArray(array1)); // ["if", "never", "sometimes", "always", "maybe", "no", "yes"]
console.log(reverseArray(array2)) // [1, 3, 6, 5, 9, 2, 8, 5]
Here are a couple of tricks I found. Credit goes to Codemanx for the original solution of
array.sort(function() {
return 1;
})
In Typescript, this can be simplified to just one line
array.sort(() => 1)
var numbers = [1,4,9,13,16];
console.log(numbers.sort(() => 1));
Since this will be the future of JavaScript, I thought I'd share that.
Here's another trick if your array only has 2 elements
array.push(array.shift());
// array-reverse-polyfill.js v1
Array.prototype.reverse = Array.prototype.reverse || function() {
return this.sort(function() {
return -1;
});
};
If you are in a modern browser you use the native reverse function. If it's not modern this polyfill will add this function for you. So just use it:
I haven't found any cases where Firefox does the opposite behavior to other browsers. Anyway, if you want to ensure that the function will sort alphabetically, just do a little test first.
// array-reverse-polyfill.js v1.1
;(function(){
var order = ['a', 'b'].sort(function(a, b){
return -1;
});
order = order[0] === 'b' ? -1 : 1;
Array.prototype.reverse = Array.prototype.reverse || function() {
return this.sort(function() {
return order;
});
};
})();
Example of use:
let a = [1,2,3,4,5];
a.reverse();
console.log(a);

JS - Lesson on codility involving perm check

I'm studing a solution of this lesson:
https://app.codility.com/programmers/lessons/4-counting_elements/perm_check/
I headed up of this solution made my a github user.
https://github.com/daraosn/codility/tree/master/02-CountingElements/02-PermCheck/javascript
I did understand everything of the code below:
function solution(A) {
var N = A.length;
var sum = (N * (N+1)) / 2;
var tap = [];
for (var i in A) {
sum-=A[i];
if(tap[A[i]]) {
return 0;
}
tap[A[i]] = true;
}
return +(sum==0);
}
with exception of these code lines below:
if(tap[A[i]]) {
return 0;
}
tap[A[i]] = true;
What is its purppose? I didn't understand.
I did a test deleting these code lines from the answer in the
codility interface and it returned 75% right instead of 100% when I had these lines
function solution(A) {
const set = new Set(A)
const max = Math.max(...A)
return set.size === max && set.size === A.length ? 1:0
}
That section checks to see if the number being iterated over has been found before, and per the instructions, duplicates are forbidden:
A permutation is a sequence containing each element from 1 to N once, and only once.
On every normal iteration, the current number being iterated over is assigned to a property of tap:
tap[A[i]] = true;
Then, on subsequent iterations, that test checks to see if the new number being iterated over has already been used:
if(tap[A[i]]) {
return 0;
}
This helps to invalidate inputs like [2, 2, 2], while permitting inputs like [1, 2, 3].
That said, there are two major red flags with this. First, for..in shouldn't be used to iterate over arrays. Instead:
for (const num of A) {
// use num
}
Also, sparse arrays are a very bad idea - it would make much more sense to use an object:
var tap = {};
or a Set:
var tap = new Set();
for (const num of A) {
sum -= num;
if (tap.has(num)) {
return 0;
}
tap.add(num);
}
return +(sum == 0);
Array solution is not so proper way such above explaining. But I will put the solution(O(n)) in case you want :)
const solution = A => ~~(A.sort((a,b) => a-b).every((a,i) => a === i+1));

Check if an element is present in an array [duplicate]

This question already has answers here:
How do I check if an array includes a value in JavaScript?
(60 answers)
Closed 6 years ago.
The function I am using now to check this is the following:
function inArray(needle,haystack)
{
var count=haystack.length;
for(var i=0;i<count;i++)
{
if(haystack[i]===needle){return true;}
}
return false;
}
It works. Is there a better way to do this?
ECMAScript 2016 incorporates an includes() method for arrays that specifically solves the problem, and so is now the preferred method.
[1, 2, 3].includes(2); // true
[1, 2, 3].includes(4); // false
[1, 2, 3].includes(1, 2); // false (second parameter is the index position in this array at which to begin searching)
As of JULY 2018, this has been implemented in almost all major browsers, if you need to support an older browser a polyfill is available.
Edit: Note that this returns false if the item in the array is an object. This is because similar objects are two different objects in JavaScript.
Code:
function isInArray(value, array) {
return array.indexOf(value) > -1;
}
Execution:
isInArray(1, [1,2,3]); // true
Update (2017):
In modern browsers which follow the ECMAScript 2016 (ES7) standard, you can use the function Array.prototype.includes, which makes it way more easier to check if an item is present in an array:
const array = [1, 2, 3];
const value = 1;
const isInArray = array.includes(value);
console.log(isInArray); // true
Just use indexOf:
haystack.indexOf(needle) >= 0
If you want to support old Internet Explorers (< IE9), you'll have to include your current code as a workaround though.
Unless your list is sorted, you need to compare every value to the needle. Therefore, both your solution and indexOf will have to execute n/2 comparisons on average. However, since indexOf is a built-in method, it may use additional optimizations and will be slightly faster in practice. Note that unless your application searches in lists extremely often (say a 1000 times per second) or the lists are huge (say 100k entries), the speed difference will not matter.
I benchmarked it multiple times on Google Chrome 52, but feel free to copypaste it into any other browser's console.
~ 1500 ms, includes (~ 2700 ms when I used the polyfill)
var array = [0,1,2,3,4,5,6,7,8,9];
var result = 0;
var start = new Date().getTime();
for(var i = 0; i < 10000000; i++)
{
if(array.includes("test") === true){ result++; }
}
console.log(new Date().getTime() - start);
~ 1050 ms, indexOf
var array = [0,1,2,3,4,5,6,7,8,9];
var result = 0;
var start = new Date().getTime();
for(var i = 0; i < 10000000; i++)
{
if(array.indexOf("test") > -1){ result++; }
}
console.log(new Date().getTime() - start);
~ 650 ms, custom function
function inArray(target, array)
{
/* Caching array.length doesn't increase the performance of the for loop on V8 (and probably on most of other major engines) */
for(var i = 0; i < array.length; i++)
{
if(array[i] === target)
{
return true;
}
}
return false;
}
var array = [0,1,2,3,4,5,6,7,8,9];
var result = 0;
var start = new Date().getTime();
for(var i = 0; i < 10000000; i++)
{
if(inArray("test", array) === true){ result++; }
}
console.log(new Date().getTime() - start);
Single line code.. will return true or false
!!(arr.indexOf("val")+1)
You can use indexOf But not working well in the last version of internet explorer.
Code:
function isInArray(value, array) {
return array.indexOf(value) > -1;
}
Execution:
isInArray(1, [1,2,3]); // true
I suggest you use the following code:
function inArray(needle, haystack) {
var length = haystack.length;
for (var i = 0; i < length; i++) {
if (haystack[i] == needle)
return true;
}
return false;
}
Since ECMAScript6, one can use Set :
var myArray = ['A', 'B', 'C'];
var mySet = new Set(myArray);
var hasB = mySet.has('B'); // true
var hasZ = mySet.has('Z'); // false
You can use the _contains function from the underscore.js library to achieve this:
if (_.contains(haystack, needle)) {
console.log("Needle found.");
};
In lodash you can use _.includes (which also aliases to _.contains)
You can search the whole array:
_.includes([1, 2, 3], 1); // true
You can search the array from a starting index:
_.includes([1, 2, 3], 1, 1); // false (begins search at index 1)
Search a string:
_.includes('pebbles', 'eb'); // true (string contains eb)
Also works for checking simple arrays of objects:
_.includes({ 'user': 'fred', 'age': 40 }, 'fred'); // true
_.includes({ 'user': 'fred', 'age': false }, false); // true
One thing to note about the last case is it works for primitives like strings, numbers and booleans but cannot search through arrays or objects
_.includes({ 'user': 'fred', 'age': {} }, {}); // false
_.includes({ 'user': [1,2,3], 'age': {} }, 3); // false

Fastest way to duplicate an array in JavaScript - slice vs. 'for' loop

In order to duplicate an array in JavaScript: Which of the following is faster to use?
Slice method
var dup_array = original_array.slice();
For loop
for(var i = 0, len = original_array.length; i < len; ++i)
dup_array[i] = original_array[i];
I know both ways do only a shallow copy: if original_array contains references to objects, objects won't be cloned, but only the references will be copied, and therefore both arrays will have references to the same objects.
But this is not the point of this question.
I'm asking only about speed.
There are at least 6 (!) ways to clone an array:
loop
slice
Array.from()
concat
spread syntax (FASTEST)
map A.map(function(e){return e;});
There has been a huuuge BENCHMARKS thread, providing following information:
for blink browsers slice() is the fastest method, concat() is a bit slower, and while loop is 2.4x slower.
for other browsers while loop is the fastest method, since those browsers don't have internal optimizations for slice and concat.
This remains true in Jul 2016.
Below are simple scripts that you can copy-paste into your browser's console and run several times to see the picture. They output milliseconds, lower is better.
while loop
n = 1000*1000;
start = + new Date();
a = Array(n);
b = Array(n);
i = a.length;
while(i--) b[i] = a[i];
console.log(new Date() - start);
slice
n = 1000*1000;
start = + new Date();
a = Array(n);
b = a.slice();
console.log(new Date() - start);
Please note that these methods will clone the Array object itself, array contents however are copied by reference and are not deep cloned.
origAr == clonedArr //returns false
origAr[0] == clonedArr[0] //returns true
Technically slice is the fastest way. However, it is even faster if you add the 0 begin index.
myArray.slice(0);
is faster than
myArray.slice();
https://jsben.ch/F0SZ3
what about es6 way?
arr2 = [...arr1];
Easiest way to deep clone Array or Object:
var dup_array = JSON.parse(JSON.stringify(original_array))
🏁 Fastest Way to Clone an Array
I made this very plain utility function to test the time that it takes to clone an array. It is not 100% reliable however it can give you a bulk idea as for how long it takes to clone an existing array:
function clone(fn) {
const arr = [...Array(1000000)];
console.time('timer');
fn(arr);
console.timeEnd('timer');
}
And tested different approach:
1) 5.79ms -> clone(arr => Object.values(arr));
2) 7.23ms -> clone(arr => [].concat(arr));
3) 9.13ms -> clone(arr => arr.slice());
4) 24.04ms -> clone(arr => { const a = []; for (let val of arr) { a.push(val); } return a; });
5) 30.02ms -> clone(arr => [...arr]);
6) 39.72ms -> clone(arr => JSON.parse(JSON.stringify(arr)));
7) 99.80ms -> clone(arr => arr.map(i => i));
8) 259.29ms -> clone(arr => Object.assign([], arr));
9) Maximum call stack size exceeded -> clone(arr => Array.of(...arr));
UPDATE:
Tests were made back in 2018, so today most likely you'll get different result with current browsers.
Out of all of those, the only way to deep clone an array is by using JSON.parse(JSON.stringify(arr)).
That said, do not use the above if your array might include functions as it will return null.Thank you #GilEpshtain for this update.
var cloned_array = [].concat(target_array);
I put together a quick demo: http://jsbin.com/agugo3/edit
My results on Internet Explorer 8 are 156, 782, and 750, which would indicate slice is much faster in this case.
a.map(e => e) is another alternative for this job. As of today .map() is very fast (almost as fast as .slice(0)) in Firefox, but not in Chrome.
On the other hand, if an array is multi-dimensional, since arrays are objects and objects are reference types, none of the slice or concat methods will be a cure... So one proper way of cloning an array is an invention of Array.prototype.clone() as follows.
Array.prototype.clone = function(){
return this.map(e => Array.isArray(e) ? e.clone() : e);
};
var arr = [ 1, 2, 3, 4, [ 1, 2, [ 1, 2, 3 ], 4 , 5], 6 ],
brr = arr.clone();
brr[4][2][1] = "two";
console.log(JSON.stringify(arr));
console.log(JSON.stringify(brr));
Fastest way to clone an Array of Objects will be using spread operator
var clonedArray=[...originalArray]
or
var clonedArray = originalArray.slice(0); //with 0 index it's little bit faster than normal slice()
but the objects inside that cloned array will still pointing at the old memory location. hence change to clonedArray objects will also change the orignalArray. So
var clonedArray = originalArray.map(({...ele}) => {return ele})
this will not only create new array but also the objects will be cloned to.
disclaimer if you are working with nested object in that case spread operator will work as SHALLOW CLONE. At that point better to use
var clonedArray=JSON.parse(JSON.stringify(originalArray));
Take a look at: link. It's not about speed, but comfort. Besides as you can see you can only use slice(0) on primitive types.
To make an independent copy of an array rather than a copy of the refence to it, you can use the array slice method.
Example:
To make an independent copy of an array rather than a copy of the refence to it, you can use the array slice method.
var oldArray = ["mip", "map", "mop"];
var newArray = oldArray.slice();
To copy or clone an object :
function cloneObject(source) {
for (i in source) {
if (typeof source[i] == 'source') {
this[i] = new cloneObject(source[i]);
}
else{
this[i] = source[i];
}
}
}
var obj1= {bla:'blabla',foo:'foofoo',etc:'etc'};
var obj2= new cloneObject(obj1);
Source: link
ECMAScript 2015 way with the Spread operator:
Basic examples:
var copyOfOldArray = [...oldArray]
var twoArraysBecomeOne = [...firstArray, ..seccondArray]
Try in the browser console:
var oldArray = [1, 2, 3]
var copyOfOldArray = [...oldArray]
console.log(oldArray)
console.log(copyOfOldArray)
var firstArray = [5, 6, 7]
var seccondArray = ["a", "b", "c"]
var twoArraysBecomOne = [...firstArray, ...seccondArray]
console.log(twoArraysBecomOne);
References
6 Great Uses of the Spread Operator
Spread syntax
As #Dan said "This answer becomes outdated fast. Use benchmarks to check the actual situation", there is one specific answer from jsperf that has not had an answer for itself: while:
var i = a.length;
while(i--) { b[i] = a[i]; }
had 960,589 ops/sec with the runnerup a.concat() at 578,129 ops/sec, which is 60%.
This is the lastest Firefox (40) 64 bit.
#aleclarson created a new, more reliable benchmark.
Benchmark time!
function log(data) {
document.getElementById("log").textContent += data + "\n";
}
benchmark = (() => {
time_function = function(ms, f, num) {
var z = 0;
var t = new Date().getTime();
for (z = 0;
((new Date().getTime() - t) < ms); z++)
f(num);
return (z)
}
function clone1(arr) {
return arr.slice(0);
}
function clone2(arr) {
return [...arr]
}
function clone3(arr) {
return [].concat(arr);
}
Array.prototype.clone = function() {
return this.map(e => Array.isArray(e) ? e.clone() : e);
};
function clone4(arr) {
return arr.clone();
}
function benchmark() {
function compare(a, b) {
if (a[1] > b[1]) {
return -1;
}
if (a[1] < b[1]) {
return 1;
}
return 0;
}
funcs = [clone1, clone2, clone3, clone4];
results = [];
funcs.forEach((ff) => {
console.log("Benchmarking: " + ff.name);
var s = time_function(2500, ff, Array(1024));
results.push([ff, s]);
console.log("Score: " + s);
})
return results.sort(compare);
}
return benchmark;
})()
log("Starting benchmark...\n");
res = benchmark();
console.log("Winner: " + res[0][0].name + " !!!");
count = 1;
res.forEach((r) => {
log((count++) + ". " + r[0].name + " score: " + Math.floor(10000 * r[1] / res[0][1]) / 100 + ((count == 2) ? "% *winner*" : "% speed of winner.") + " (" + Math.round(r[1] * 100) / 100 + ")");
});
log("\nWinner code:\n");
log(res[0][0].toString());
<textarea rows="50" cols="80" style="font-size: 16; resize:none; border: none;" id="log"></textarea>
The benchmark will run for 10s since you click the button.
My results:
Chrome (V8 engine):
1. clone1 score: 100% *winner* (4110764)
2. clone3 score: 74.32% speed of winner. (3055225)
3. clone2 score: 30.75% speed of winner. (1264182)
4. clone4 score: 21.96% speed of winner. (902929)
Firefox (SpiderMonkey Engine):
1. clone1 score: 100% *winner* (8448353)
2. clone3 score: 16.44% speed of winner. (1389241)
3. clone4 score: 5.69% speed of winner. (481162)
4. clone2 score: 2.27% speed of winner. (192433)
Winner code:
function clone1(arr) {
return arr.slice(0);
}
Winner engine:
SpiderMonkey (Mozilla/Firefox)
It depends on the browser. If you look in the blog post Array.prototype.slice vs manual array creation, there is a rough guide to performance of each:
Results:
There is a much cleaner solution:
var srcArray = [1, 2, 3];
var clonedArray = srcArray.length === 1 ? [srcArray[0]] : Array.apply(this, srcArray);
The length check is required, because the Array constructor behaves differently when it is called with exactly one argument.
Remember .slice() won't work for two-dimensional arrays. You'll need a function like this:
function copy(array) {
return array.map(function(arr) {
return arr.slice();
});
}
It depends on the length of the array. If the array length is <= 1,000,000, the slice and concat methods are taking approximately the same time. But when you give a wider range, the concat method wins.
For example, try this code:
var original_array = [];
for(var i = 0; i < 10000000; i ++) {
original_array.push( Math.floor(Math.random() * 1000000 + 1));
}
function a1() {
var dup = [];
var start = Date.now();
dup = original_array.slice();
var end = Date.now();
console.log('slice method takes ' + (end - start) + ' ms');
}
function a2() {
var dup = [];
var start = Date.now();
dup = original_array.concat([]);
var end = Date.now();
console.log('concat method takes ' + (end - start) + ' ms');
}
function a3() {
var dup = [];
var start = Date.now();
for(var i = 0; i < original_array.length; i ++) {
dup.push(original_array[i]);
}
var end = Date.now();
console.log('for loop with push method takes ' + (end - start) + ' ms');
}
function a4() {
var dup = [];
var start = Date.now();
for(var i = 0; i < original_array.length; i ++) {
dup[i] = original_array[i];
}
var end = Date.now();
console.log('for loop with = method takes ' + (end - start) + ' ms');
}
function a5() {
var dup = new Array(original_array.length)
var start = Date.now();
for(var i = 0; i < original_array.length; i ++) {
dup.push(original_array[i]);
}
var end = Date.now();
console.log('for loop with = method and array constructor takes ' + (end - start) + ' ms');
}
a1();
a2();
a3();
a4();
a5();
If you set the length of original_array to 1,000,000, the slice method and concat method are taking approximately the same time (3-4 ms, depending on the random numbers).
If you set the length of original_array to 10,000,000, then the slice method takes over 60 ms and the concat method takes over 20 ms.
In ES6, you can simply utilize the Spread syntax.
Example:
let arr = ['a', 'b', 'c'];
let arr2 = [...arr];
Please note that the spread operator generates a completely new array, so modifying one won't affect the other.
Example:
arr2.push('d') // becomes ['a', 'b', 'c', 'd']
console.log(arr) // while arr retains its values ['a', 'b', 'c']
A simple solution:
original = [1,2,3]
cloned = original.map(x=>x)
const arr = ['1', '2', '3'];
// Old way
const cloneArr = arr.slice();
// ES6 way
const cloneArrES6 = [...arr];
// But problem with 3rd approach is that if you are using muti-dimensional
// array, then only first level is copied
const nums = [
[1, 2],
[10],
];
const cloneNums = [...nums];
// Let's change the first item in the first nested item in our cloned array.
cloneNums[0][0] = '8';
console.log(cloneNums);
// [ [ '8', 2 ], [ 10 ], [ 300 ] ]
// NOOooo, the original is also affected
console.log(nums);
// [ [ '8', 2 ], [ 10 ], [ 300 ] ]
So, in order to avoid these scenarios to happen, use
const arr = ['1', '2', '3'];
const cloneArr = Array.from(arr);
There were several ways to clone an array. Basically, Cloning was categorized in two ways:
Shallow copy
Deep copy
Shallow copies only cover the 1st level of the array and the rest are
referenced. If you want a true copy of nested elements in the arrays, you’ll need a
deep clone.
Example :
const arr1 = [1,2,3,4,5,6,7]
// Normal Array (shallow copy is enough)
const arr2 = [1,2,3,[4],[[5]],6,7]
// Nested Array (Deep copy required)
Approach 1 : Using (...)Spread Operator (Shallow copy enough)
const newArray = [...arr1] // [1,2,3,4,5,6,7]
Approach 2 : Using Array builtIn Slice method (Deep copy)
const newArray = arr1.slice() // [1,2,3,4,5,6,7]
Approach 3 : Using Array builtIn Concat method (Deep a copy)
const newArray = [].concat(arr1) // [1,2,3,4,5,6,7]
Approach 4 : Using JSON.stringify/parse. (Deep a copy & fastest)
const newArray = JSON.parse(JSON.stringify(arr2));) // [1,2,3,[4],[[5]],6,7]
Approach 5: Using own recursive function or using loadash's __.cloneDeep method. (Deep copy)
Fast ways to duplicate an array in JavaScript in Order:
#1: array1copy = [...array1];
#2: array1copy = array1.slice(0);
#3: array1copy = array1.slice();
If your array objects contain some JSON-non-serializable content (functions, Number.POSITIVE_INFINITY, etc.) better to use
array1copy = JSON.parse(JSON.stringify(array1))
You can follow this code. Immutable way array clone. This is the perfect way to array cloning
const array = [1, 2, 3, 4]
const newArray = [...array]
newArray.push(6)
console.log(array)
console.log(newArray)
If you want a REAL cloned object/array in JS with cloned references of all attributes and sub-objects:
export function clone(arr) {
return JSON.parse(JSON.stringify(arr))
}
ALL other operations do not create clones, because they just change the base address of the root element, not of the included objects.
Except you traverse recursive through the object-tree.
For a simple copy, these are OK. For storage address relevant operations I suggest (and in most all other cases, because this is fast!) to type convert into string and back in a complete new object.
If you are taking about slice it is used to copy elements from an array and create a clone with same no. of elements or less no. of elements.
var arr = [1, 2, 3 , 4, 5];
function slc() {
var sliced = arr.slice(0, 5);
// arr.slice(position to start copying master array , no. of items in new array)
console.log(sliced);
}
slc(arr);

Find the min/max element of an array in JavaScript

How can I easily obtain the min or max element of a JavaScript array?
Example pseudocode:
let array = [100, 0, 50]
array.min() //=> 0
array.max() //=> 100
How about augmenting the built-in Array object to use Math.max/Math.min instead:
Array.prototype.max = function() {
return Math.max.apply(null, this);
};
Array.prototype.min = function() {
return Math.min.apply(null, this);
};
let p = [35,2,65,7,8,9,12,121,33,99];
console.log(`Max value is: ${p.max()}` +
`\nMin value is: ${p.min()}`);
Here is a JSFiddle.
Augmenting the built-ins can cause collisions with other libraries (some see), so you may be more comfortable with just apply'ing Math.xxx() to your array directly:
var min = Math.min.apply(null, arr),
max = Math.max.apply(null, arr);
Alternately, assuming your browser supports ECMAScript 6, you can use spread syntax which functions similarly to the apply method:
var min = Math.min( ...arr ),
max = Math.max( ...arr );
var max_of_array = Math.max.apply(Math, array);
For a full discussion see:
http://aaroncrane.co.uk/2008/11/javascript_max_api/
Using spread operator (ES6)
Math.max(...array) // The same with "min" => Math.min(...array)
const array = [10, 2, 33, 4, 5];
console.log(
Math.max(...array)
)
For big arrays (~10⁷ elements), Math.min and Math.max both produces the following error in Node.js.
RangeError: Maximum call stack size exceeded
A more robust solution is to not add every element to the call stack, but to instead pass an array:
function arrayMin(arr) {
return arr.reduce(function (p, v) {
return ( p < v ? p : v );
});
}
function arrayMax(arr) {
return arr.reduce(function (p, v) {
return ( p > v ? p : v );
});
}
If you are concerned about speed, the following code is ~3 times faster then Math.max.apply is on my computer. See https://jsben.ch/JPOyL.
function arrayMin(arr) {
var len = arr.length, min = Infinity;
while (len--) {
if (arr[len] < min) {
min = arr[len];
}
}
return min;
};
function arrayMax(arr) {
var len = arr.length, max = -Infinity;
while (len--) {
if (arr[len] > max) {
max = arr[len];
}
}
return max;
};
If your arrays contains strings instead of numbers, you also need to coerce them into numbers. The below code does that, but it slows the code down ~10 times on my machine. See https://jsben.ch/uPipD.
function arrayMin(arr) {
var len = arr.length, min = Infinity;
while (len--) {
if (Number(arr[len]) < min) {
min = Number(arr[len]);
}
}
return min;
};
function arrayMax(arr) {
var len = arr.length, max = -Infinity;
while (len--) {
if (Number(arr[len]) > max) {
max = Number(arr[len]);
}
}
return max;
};
tl;dr
// For regular arrays:
var max = Math.max(...arrayOfNumbers);
// For arrays with tens of thousands of items:
let max = testArray[0];
for (let i = 1; i < testArrayLength; ++i) {
if (testArray[i] > max) {
max = testArray[i];
}
}
MDN solution
The official MDN docs on Math.max() already covers this issue:
The following function uses Function.prototype.apply() to find the maximum element in a numeric array. getMaxOfArray([1, 2, 3]) is equivalent to Math.max(1, 2, 3), but you can use getMaxOfArray() on programmatically constructed arrays of any size.
function getMaxOfArray(numArray) {
return Math.max.apply(null, numArray);
}
Or with the new spread operator, getting the maximum of an array becomes a lot easier.
var arr = [1, 2, 3];
var max = Math.max(...arr);
Maximum size of an array
According to MDN the apply and spread solutions had a limitation of 65536 that came from the limit of the maximum number of arguments:
But beware: in using apply this way, you run the risk of exceeding the JavaScript engine's argument length limit. The consequences of applying a function with too many arguments (think more than tens of thousands of arguments) vary across engines (JavaScriptCore has hard-coded argument limit of 65536), because the limit (indeed even the nature of any excessively-large-stack behavior) is unspecified. Some engines will throw an exception. More perniciously, others will arbitrarily limit the number of arguments actually passed to the applied function. To illustrate this latter case: if such an engine had a limit of four arguments (actual limits are of course significantly higher), it would be as if the arguments 5, 6, 2, 3 had been passed to apply in the examples above, rather than the full array.
They even provide a hybrid solution which doesn't really have good performance compared to other solutions. See performance test below for more.
In 2019 the actual limit is the maximum size of the call stack. For modern Chromium based desktop browsers this means that when it comes to finding min/max with apply or spread, practically the maximum size for numbers only arrays is ~120000. Above this, there will be a stack overflow and the following error will be thrown:
RangeError: Maximum call stack size exceeded
With the script below (based on this blog post), by catching that error you can calculate the limit for your specific environment.
Warning! Running this script takes time and depending on the performance of your system it might slow or crash your browser/system!
let testArray = Array.from({length: 10000}, () => Math.floor(Math.random() * 2000000));
for (i = 10000; i < 1000000; ++i) {
testArray.push(Math.floor(Math.random() * 2000000));
try {
Math.max.apply(null, testArray);
} catch (e) {
console.log(i);
break;
}
}
Performance on large arrays
Based on the test in EscapeNetscape's comment I created some benchmarks that tests 5 different methods on a random number only array with 100000 items.
In 2019, the results show that the standard loop (which BTW doesn't have the size limitation) is the fastest everywhere. apply and spread comes closely after it, then much later MDN's hybrid solution then reduce as the slowest.
Almost all tests gave the same results, except for one where spread somewhy ended up being the slowest.
If you step up your array to have 1 million items, things start to break and you are left with the standard loop as a fast solution and reduce as a slower.
JSPerf benchmark
JSBen benchmark
JSBench.me benchmark
Benchmark source code
var testArrayLength = 100000
var testArray = Array.from({length: testArrayLength}, () => Math.floor(Math.random() * 2000000));
// ES6 spread
Math.min(...testArray);
Math.max(...testArray);
// reduce
testArray.reduce(function(a, b) {
return Math.max(a, b);
});
testArray.reduce(function(a, b) {
return Math.min(a, b);
});
// apply
Math.min.apply(Math, testArray);
Math.max.apply(Math, testArray);
// standard loop
let max = testArray[0];
for (let i = 1; i < testArrayLength; ++i) {
if (testArray[i] > max) {
max = testArray[i];
}
}
let min = testArray[0];
for (let i = 1; i < testArrayLength; ++i) {
if (testArray[i] < min) {
min = testArray[i];
}
}
// MDN hibrid soltuion
// Source: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/apply#Using_apply_and_built-in_functions
function minOfArray(arr) {
var min = Infinity;
var QUANTUM = 32768;
for (var i = 0, len = arr.length; i < len; i += QUANTUM) {
var submin = Math.min.apply(null, arr.slice(i, Math.min(i + QUANTUM, len)));
min = Math.min(submin, min);
}
return min;
}
minOfArray(testArray);
function maxOfArray(arr) {
var max = -Infinity;
var QUANTUM = 32768;
for (var i = 0, len = arr.length; i < len; i += QUANTUM) {
var submax = Math.max.apply(null, arr.slice(i, Math.max(i + QUANTUM, len)));
max = Math.max(submax, max);
}
return max;
}
maxOfArray(testArray);
If you're paranoid like me about using Math.max.apply (which could cause errors when given large arrays according to MDN), try this:
function arrayMax(array) {
return array.reduce(function(a, b) {
return Math.max(a, b);
});
}
function arrayMin(array) {
return array.reduce(function(a, b) {
return Math.min(a, b);
});
}
Or, in ES6:
function arrayMax(array) {
return array.reduce((a, b) => Math.max(a, b));
}
function arrayMin(array) {
return array.reduce((a, b) => Math.min(a, b));
}
The anonymous functions are unfortunately necessary (instead of using Math.max.bind(Math) because reduce doesn't just pass a and b to its function, but also i and a reference to the array itself, so we have to ensure we don't try to call max on those as well.
Alternative Methods
The Math.min and Math.max are great methods to get the minimum and maximum item out of a collection of items, however it's important to be aware of some cavities that can comes with it.
Using them with an array that contains large number of items (more than ~10⁷ items, depends on the user's browser) most likely will crash and give the following error message:
const arr = Array.from(Array(1000000).keys());
Math.min(arr);
Math.max(arr);
Uncaught RangeError: Maximum call stack size exceeded
UPDATE
Latest browsers might return NaN instead. That might be a better way to handle errors, however it doesn't solve the problem just yet.
Instead, consider using something like so:
function maxValue(arr) {
return arr.reduce((max, val) => max > val ? max : val)
}
Or with better run-time:
function maxValue(arr) {
let max = arr[0];
for (let val of arr) {
if (val > max) {
max = val;
}
}
return max;
}
Or to get both Min and Max:
function getMinMax(arr) {
return arr.reduce(({min, max}, v) => ({
min: min < v ? min : v,
max: max > v ? max : v,
}), { min: arr[0], max: arr[0] });
}
Or with even better run-time*:
function getMinMax(arr) {
let min = arr[0];
let max = arr[0];
let i = arr.length;
while (i--) {
min = arr[i] < min ? arr[i] : min;
max = arr[i] > max ? arr[i] : max;
}
return { min, max };
}
* Tested with 1,000,000 items:
Just for a reference, the 1st function run-time (on my machine) was 15.84ms vs 2nd function with only 4.32ms.
Two ways are shorter and easy:
let arr = [2, 6, 1, 0]
Way 1:
let max = Math.max.apply(null, arr)
Way 2:
let max = arr.reduce(function(a, b) {
return Math.max(a, b);
});
.apply is often used when the intention is to invoke a variadic function with a list of argument values, e.g.
The Math.max([value1[,value2, ...]]) function returns the largest of zero or more numbers.
Math.max(10, 20); // 20
Math.max(-10, -20); // -10
Math.max(-10, 20); // 20
The Math.max() method doesn't allow you to pass in an array. If you have a list of values of which you need to get the largest, you would normally call this function using Function.prototype.apply(), e.g.
Math.max.apply(null, [10, 20]); // 20
Math.max.apply(null, [-10, -20]); // -10
Math.max.apply(null, [-10, 20]); // 20
However, as of the ECMAScript 6 you can use the spread operator:
The spread operator allows an expression to be expanded in places where multiple arguments (for function calls) or multiple elements (for array literals) are expected.
Using the spread operator, the above can be rewritten as such:
Math.max(...[10, 20]); // 20
Math.max(...[-10, -20]); // -10
Math.max(...[-10, 20]); // 20
When calling a function using the variadic operator, you can even add additional values, e.g.
Math.max(...[10, 20], 50); // 50
Math.max(...[-10, -20], 50); // 50
Bonus:
Spread operator enables you to use the array literal syntax to create new arrays in situations where in ES5 you would need to fall back to imperative code, using a combination of push, splice, etc.
let foo = ['b', 'c'];
let bar = ['a', ...foo, 'd', 'e']; // ['a', 'b', 'c', 'd', 'e']
You do it by extending the Array type:
Array.max = function( array ){
return Math.max.apply( Math, array );
};
Array.min = function( array ){
return Math.min.apply( Math, array );
};
Boosted from here (by John Resig)
A simple solution to find the minimum value over an Array of elements is to use the Array prototype function reduce:
A = [4,3,-9,-2,2,1];
A.reduce((min, val) => val < min ? val : min, A[0]); // returns -9
or using JavaScript's built-in Math.Min() function (thanks #Tenflex):
A.reduce((min,val) => Math.min(min,val), A[0]);
This sets min to A[0], and then checks for A[1]...A[n] whether it is strictly less than the current min. If A[i] < min then min is updated to A[i]. When all array elements has been processed, min is returned as the result.
EDIT: Include position of minimum value:
A = [4,3,-9,-2,2,1];
A.reduce((min, val) => val < min._min ? {_min: val, _idx: min._curr, _curr: min._curr + 1} : {_min: min._min, _idx: min._idx, _curr: min._curr + 1}, {_min: A[0], _idx: 0, _curr: 0}); // returns { _min: -9, _idx: 2, _curr: 6 }
For a concise, modern solution, one can perform a reduce operation over the array, keeping track of the current minimum and maximum values, so the array is only iterated over once (which is optimal). Destructuring assignment is used here for succinctness.
let array = [100, 0, 50];
let [min, max] = array.reduce(([prevMin,prevMax], curr)=>
[Math.min(prevMin, curr), Math.max(prevMax, curr)], [Infinity, -Infinity]);
console.log("Min:", min);
console.log("Max:", max);
To only find either the minimum or maximum, we can use perform a reduce operation in much the same way, but we only need to keep track of the previous optimal value. This method is better than using apply as it will not cause errors when the array is too large for the stack.
const arr = [-1, 9, 3, -6, 35];
//Only find minimum
const min = arr.reduce((a,b)=>Math.min(a,b), Infinity);
console.log("Min:", min);//-6
//Only find maximum
const max = arr.reduce((a,b)=>Math.max(a,b), -Infinity);
console.log("Max:", max);//35
Others have already given some solutions in which they augment Array.prototype. All I want in this answer is to clarify whether it should be Math.min.apply( Math, array ) or Math.min.apply( null, array ). So what context should be used, Math or null?
When passing null as a context to apply, then the context will default to the global object (the window object in the case of browsers). Passing the Math object as the context would be the correct solution, but it won't hurt passing null either. Here's an example when null might cause trouble, when decorating the Math.max function:
// decorate Math.max
(function (oldMax) {
Math.max = function () {
this.foo(); // call Math.foo, or at least that's what we want
return oldMax.apply(this, arguments);
};
})(Math.max);
Math.foo = function () {
print("foo");
};
Array.prototype.max = function() {
return Math.max.apply(null, this); // <-- passing null as the context
};
var max = [1, 2, 3].max();
print(max);
The above will throw an exception because this.foo will be evaluated as window.foo, which is undefined. If we replace null with Math, things will work as expected and the string "foo" will be printed to the screen (I tested this using Mozilla Rhino).
You can pretty much assume that nobody has decorated Math.max so, passing null will work without problems.
One more way to do it:
var arrayMax = Function.prototype.apply.bind(Math.max, null);
Usage:
var max = arrayMax([2, 5, 1]);
I am surprised not one mentiond the reduce function.
var arr = [1, 10, 5, 11, 2]
var b = arr.reduce(function(previous,current){
return previous > current ? previous:current
});
b => 11
arr => [1, 10, 5, 11, 2]
https://developer.mozilla.org/ru/docs/Web/JavaScript/Reference/Global_Objects/Math/max
function getMaxOfArray(numArray) {
return Math.max.apply(null, numArray);
}
var arr = [100, 0, 50];
console.log(getMaxOfArray(arr))
this worked for me.
This may suit your purposes.
Array.prototype.min = function(comparer) {
if (this.length === 0) return null;
if (this.length === 1) return this[0];
comparer = (comparer || Math.min);
var v = this[0];
for (var i = 1; i < this.length; i++) {
v = comparer(this[i], v);
}
return v;
}
Array.prototype.max = function(comparer) {
if (this.length === 0) return null;
if (this.length === 1) return this[0];
comparer = (comparer || Math.max);
var v = this[0];
for (var i = 1; i < this.length; i++) {
v = comparer(this[i], v);
}
return v;
}
let array = [267, 306, 108]
let longest = Math.max(...array);
I thought I'd share my simple and easy to understand solution.
For the min:
var arr = [3, 4, 12, 1, 0, 5];
var min = arr[0];
for (var k = 1; k < arr.length; k++) {
if (arr[k] < min) {
min = arr[k];
}
}
console.log("Min is: " + min);
And for the max:
var arr = [3, 4, 12, 1, 0, 5];
var max = arr[0];
for (var k = 1; k < arr.length; k++) {
if (arr[k] > max) {
max = arr[k];
}
}
console.log("Max is: " + max);
For big arrays (~10⁷ elements), Math.min and Math.max procuces a RangeError (Maximum call stack size exceeded) in node.js.
For big arrays, a quick & dirty solution is:
Array.prototype.min = function() {
var r = this[0];
this.forEach(function(v,i,a){if (v<r) r=v;});
return r;
};
For an array containing objects instead of numbers:
arr = [
{ name: 'a', value: 5 },
{ name: 'b', value: 3 },
{ name: 'c', value: 4 }
]
You can use reduce to get the element with the smallest value (min)
arr.reduce((a, b) => a.value < b.value ? a : b)
// { name: 'b', value: 3 }
or the largest value (max)
arr.reduce((a, b) => a.value > b.value ? a : b)
// { name: 'a', value: 5 }
Aside using the math function max and min, another function to use is the built in function of sort(): here we go
const nums = [12, 67, 58, 30].sort((x, y) =>
x - y)
let min_val = nums[0]
let max_val = nums[nums.length -1]
I had the same problem, I needed to obtain the minimum and maximum values of an array and, to my surprise, there were no built-in functions for arrays. After reading a lot, I decided to test the "top 3" solutions myself:
discrete solution: a FOR loop to check every element of the array against the current max and/or min value;
APPLY solution: sending the array to the Math.max and/or Math.min internal functions using apply(null,array);
REDUCE solution: recursing a check against every element of the array using reduce(function).
The test code was this:
function GetMaxDISCRETE(A)
{ var MaxX=A[0];
for (var X=0;X<A.length;X++)
if (MaxX<A[X])
MaxX=A[X];
return MaxX;
}
function GetMaxAPPLY(A)
{ return Math.max.apply(null,A);
}
function GetMaxREDUCE(A)
{ return A.reduce(function(p,c)
{ return p>c?p:c;
});
}
The array A was filled with 100,000 random integer numbers, each function was executed 10,000 times on Mozilla Firefox 28.0 on an intel Pentium 4 2.99GHz desktop with Windows Vista. The times are in seconds, retrieved by performance.now() function. The results were these, with 3 fractional digits and standard deviation:
Discrete solution: mean=0.161s, sd=0.078
APPLY solution: mean=3.571s, sd=0.487
REDUCE solution: mean=0.350s, sd=0.044
The REDUCE solution was 117% slower than the discrete solution. The APPLY solution was the worse, 2,118% slower than the discrete solution. Besides, as Peter observed, it doesn't work for large arrays (about more than 1,000,000 elements).
Also, to complete the tests, I tested this extended discrete code:
var MaxX=A[0],MinX=A[0];
for (var X=0;X<A.length;X++)
{ if (MaxX<A[X])
MaxX=A[X];
if (MinX>A[X])
MinX=A[X];
}
The timing: mean=0.218s, sd=0.094
So, it is 35% slower than the simple discrete solution, but it retrieves both the maximum and the minimum values at once (any other solution would take at least twice that to retrieve them). Once the OP needed both values, the discrete solution would be the best choice (even as two separate functions, one for calculating maximum and another for calculating minimum, they would outperform the second best, the REDUCE solution).
Iterate through, keeping track as you go.
var min = null;
var max = null;
for (var i = 0, len = arr.length; i < len; ++i)
{
var elem = arr[i];
if (min === null || min > elem) min = elem;
if (max === null || max < elem) max = elem;
}
alert( "min = " + min + ", max = " + max );
This will leave min/max null if there are no elements in the array. Will set min and max in one pass if the array has any elements.
You could also extend Array with a range method using the above to allow reuse and improve on readability. See a working fiddle at http://jsfiddle.net/9C9fU/
Array.prototype.range = function() {
var min = null,
max = null,
i, len;
for (i = 0, len = this.length; i < len; ++i)
{
var elem = this[i];
if (min === null || min > elem) min = elem;
if (max === null || max < elem) max = elem;
}
return { min: min, max: max }
};
Used as
var arr = [3, 9, 22, -7, 44, 18, 7, 9, 15];
var range = arr.range();
console.log(range.min);
console.log(range.max);
You can use the following function anywhere in your project:
function getMin(array){
return Math.min.apply(Math,array);
}
function getMax(array){
return Math.max.apply(Math,array);
}
And then you can call the functions passing the array:
var myArray = [1,2,3,4,5,6,7];
var maximo = getMax(myArray); //return the highest number
The following code works for me :
var valueList = [10,4,17,9,3];
var maxValue = valueList.reduce(function(a, b) { return Math.max(a, b); });
var minValue = valueList.reduce(function(a, b) { return Math.min(a, b); });
array.sort((a, b) => b - a)[0];
Gives you the maximum value in an array of numbers.
array.sort((a, b) => a - b)[0];
Gives you the minimum value in an array of numbers.
let array = [0,20,45,85,41,5,7,85,90,111];
let maximum = array.sort((a, b) => b - a)[0];
let minimum = array.sort((a, b) => a - b)[0];
console.log(minimum, maximum)
let arr=[20,8,29,76,7,21,9]
Math.max.apply( Math, arr ); // 76
Simple stuff, really.
var arr = [10,20,30,40];
arr.max = function() { return Math.max.apply(Math, this); }; //attach max funct
arr.min = function() { return Math.min.apply(Math, this); }; //attach min funct
alert("min: " + arr.min() + " max: " + arr.max());
Here's one way to get the max value from an array of objects. Create a copy (with slice), then sort the copy in descending order and grab the first item.
var myArray = [
{"ID": 1, "Cost": 200},
{"ID": 2, "Cost": 1000},
{"ID": 3, "Cost": 50},
{"ID": 4, "Cost": 500}
]
maxsort = myArray.slice(0).sort(function(a, b) { return b.ID - a.ID })[0].ID;

Categories