Max / min of large array in JS - javascript

I need to calculate min/max of large array. I know about Math.max.apply(), but on large arrays it fails with Stack overflow exception. Any simple solutions?

Sort the array by using sort() method it sorts array by using quicksort algorithm
Since array is sorted in ascending order then the last element is the max
var arr = [1,4,6,4, ...];
arr.sort((a, b) => a - b);
var max = arr[arr.length - 1];

Array.prototype.min = function() {
var r = this[0];
this.forEach(function(v,i,a){if (v<r) r=v;});
return r;
};
From JavaScript: min & max Array values? where other solutions from this problem are discussed
FYI: I just googled "max min large array" and found this as the first result...

Why not just loop through the entire array?
var max = Number.MIN_VALUE, min = Number.MAX_VALUE;
for (var i = 0, len=list.length; i < len; i++) {
if (list[i] > max) max = list[i];
if (list[i] < min) min = list[i];
}
Edit:
For max:
if (typeof Array.prototype.GetMax === "undefined") {
Array.prototype.GetMax = function() {
var max = Number.MAX_VALUE;
for (var i = 0, len=this.length; i < len; i++) {
if (this[i] > max) max = this[i];
}
return max;
}
}
For min:
if (typeof Array.prototype.GetMin === "undefined") {
Array.prototype.GetMin = function() {
var min = Number.MIN_VALUE;
for (var i = 0, len=this.length; i < len; i++) {
if (this[i] < min) min = this[i];
}
return min;
}
}
For both:
if (typeof Array.prototype.GetMaxMin === "undefined") {
Array.prototype.GetMaxMin = function() {
var max = Number.MIN_VALUE, min = Number.MAX_VALUE;
for (var i = 0, len=this.length; i < len; i++) {
if (this[i] > max) max = this[i];
if (this[i] < min) min = this[i];
}
return { Max: max, Min: min};
}
}

Should I assume you have thought of this:
var maxSoFar = -9999999;
for (var i = 0; i < array.length ; ++i) {
if (array[i] > maxSoFar) {
maxSoFar = array[i];
}
... similar for minSoFar ...
}

try this
var arr = [];
for(var i=1000000;i>0;i--)
{
arr.push(i);
}
//we create a copy of the original array through arr.concat() since we do not want to change the original sorting order of the array
//we pass a function in the sort method as by default it sorts alphabetically instead of numerically. So 99 will be smaller than 100.
var arrMaxMin = arr.concat().sort(function(a,b){return a-b});
arrMaxMin[0]; //min
arrMaxMin[arrMaxMin.length - 1]; //max

Hey why dont you slice your array into smaller arrays then on that arrays you can easily use Math.max.apply(Math,individual arrays).But remember to reinitialize all subarrays back to null so as to get the memory back once desired max value is obtained

This is exactly what reduce is for:
function max(arr) {
if (arr.length === 0) {
return NaN // or whatever answer you want for an empty array, or throw an error.
}
return arr.reduce((a, b) => Math.max(a, b), -Infinity)
}
console.log(max([...new Array(100000).keys()]))
(note that [...new Array(100000).keys()] is just a fancy way in a modern browsers to make a huge array of the numbers 0 to 999999. The max function itself will run in anything made in the last 20 years.)
You can also reduce it to this one-liner:
arr.reduce((cur, val, i) => i === 0 ? val : Math.max(cur, val), NaN)
here NaN is the value you get back if the array is empty
Or even
arr.reduce((a, b) => Math.max(a, b), -Infinity)
although this will return -Infinity for an empty array.
Finally, it may be tempting to just do:
arr.reduce(Math.max, -Infinity) //don't do this!!
but this won't work. This is because reduce calls it's function (Math.max) with 4 parameters, one of which is the original array, so a Math.max on those will always result in a NaN.

Related

merge two sorted array into new array in sorted order and remove element(number) from original array.(JavaScript) [duplicate]

I have three sorted arrays like below
[{name:"a"}, {name:"b"}, {name:"m"}, {name:"x"}]
[{name:"a"}, {name:"e"}, {name:"i"}, {name:"o"}]
[{name:"g"}, {name:"h"}, {name:"m"}, {name:"n"}]
Those arrays are sorted based on name property of each object in Array. Here is the method I converted from Java to merge two sorted arrays
function mergeSorted(a, b) {
var answer = new Array(a.length + b.length), i = 0, j = 0, k = 0;
while (i < a.length && j < b.length) {
if (a[i].name < b[j].name) {
answer[k] = a[i];
i++;
}else {
answer[k] = b[j];
j++;
}
k++;
}
while (i < a.length) {
answer[k] = a[i];
i++;
k++;
}
while (j < b.length) {
answer[k] = b[j];
j++;
k++;
}
return answer;
}
Here is the working fiddle with two arrays http://jsfiddle.net/euRn5/. What is the best approach to achieve the same with n number of Arrays, the thought I have in my mind currently is take one by one, merge it with previously merged till the last item, like n += i stuff. Is this a best approach?
The standard and most understanding code I believe..
function mergeArray(arr1, arr2) {
var new_array = [];
var i = 0,
j = 0,
index = 0;
while (new_array.length != (arr1.length + arr2.length) - 1) {
if (arr1[i] < arr2[j]) {
new_array.push(arr1[i]);
i++;
} else {
new_array.push(arr2[j]);
j++;
}
}
return new_array;
}
Function call:
var merged_array = mergeArray([1,6,9,95], [2,7,10,11,14,18]);
Update:
Seeing as it is current_year this would now be:
const mergeAll = (...arrays) => arrays.reduce(mergeSorted);
Original:
If you're feeling functional this is a perfect place to use reduce.
var mergeAll = function(){
return Array.prototype.slice.call(arguments).reduce(mergeSorted);
};
example:
var a = [{name:"a"}, {name:"b"}, {name:"m"}, {name:"x"}];
var b = [{name:"a"}, {name:"e"}, {name:"i"}, {name:"o"}];
var c = [{name:"g"}, {name:"h"}, {name:"m"}, {name:"n"}];
console.log(mergeAll(a,b,c).map(function(x){return x.name;}));
jsfiddle: http://jsfiddle.net/FeT6m/
The native implementations are not always the fastest (as you may have noticed) and have, historically, been somewhat sluggish, due to extensive error checking. That being said, there may be performance enhancements in the future, due to more robust integration with the hardware or routines specifically built to optimize certain tasks. If you write your own code, your application won't be able to take advantage of these boosts in performance once they're implemented. It's up to you to decide where the advantages lie and what the risks are.
At any rate, I've written a prettier version of your optimized code for funsies:
function mergeSorted(a,b){
var alen = a.length
, blen = b.length
, i, j, k = j = i = 0
, answer = new Array(alen + blen)
;//var
while(i < alen && j < blen)
answer[k++] = a[i].name < b[j].name ? a[i++] : b[j++];
while(i < alen) answer[k++] = a[i++];
while(j < blen) answer[k++] = b[j++];
return answer;
}
Faster, merges in only 1 pass, with more flexibility (keepDuplicates, custom comparator):
/* mergeSortedArrays(arrays[, keepDuplicates[, comparator[, thisArg]]])
Merges multiple sorted arrays into a new sorted array.
Arguments:
- arrays: array of sorted arrays to be merged
- keepDuplicates (optional): (true/false) whether to keep duplicate values
Default: false
- comparator (optional): function used to compare values
Default: sort numbers in ascending order
Example comparator: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort
- thisArg (optional): comparator is bound to thisArg when invoked
Returns: a new sorted array containing all the values from the arrays
*/
function mergeSortedArrays(arrays, keepDuplicates, comparator, thisArg) {
// Coerce to boolean to speed up testings in some javascript engines:
keepDuplicates = !!keepDuplicates;
// By default, sort numbers in ascending order:
if(!comparator) comparator = function(a, b) { return a - b; };
var nb = arrays.length, // Number of arrays to be merged
iter = new Array(nb), // Current position of iteration of each array
next = [], // Keep each array sorted by the value of their next element
length = 0; // The combined length of all arrays
// Populate iter and next:
for(var i = 0, arr; i < nb; i++) {
arr = arrays[i];
iter[i] = 0;
if(arr.length > 0) {
insertNextIndex(next, i, arr[0], comparator, thisArg);
}
length += arr.length;
}
// Insert index of array into next:
function insertNextIndex(next, index, val, comparator, thisArg) {
var i = next.length;
while(i--) { // Reverse loop...
var j = next[i];
if(comparator.call(thisArg, arrays[j][iter[j]], val) >= 0) { // ...until we find a greater value
break;
}
}
next.splice(i + 1, 0, index);
}
var merged = keepDuplicates ? new Array(length) : [],
k = 0, // Iterate over merged
min, val, lastVal;
// First iteration to get a value for lastVal (for duplicate checks):
if(!keepDuplicates && next.length > 0) {
min = next.pop();
arr = arrays[min];
i = iter[min]++;
val = arr[i];
merged[k++] = val;
lastVal = val;
if(++i < arr.length) { // If available, insert next value in next:
insertNextIndex(next, min, arr[i], comparator, thisArg);
}
}
// Merge multiple arrays:
while(next.length > 1) { // While there is still multiple arrays to be merged
min = next.pop();
arr = arrays[min];
i = iter[min]++;
val = arr[i];
if(keepDuplicates || comparator.call(thisArg, lastVal, val) !== 0) {
merged[k++] = val;
lastVal = val;
}
if(++i < arr.length) { // If available, insert next value in next:
insertNextIndex(next, min, arr[i], comparator, thisArg);
}
}
// When there remain only 1 array with unmerged values, use a faster loop:
if(next.length > 0) {
arr = arrays[next[0]];
i = iter[next[0]];
length = arr.length;
while(i < length) { // To the end
val = arr[i++];
if(keepDuplicates || comparator.call(thisArg, lastVal, val) !== 0) {
merged[k++] = val;
lastVal = val;
}
}
}
return merged;
}
Merging in 1 pass eliminates the creation of intermediate arrays which takes time and memory. Also, the number of comparisons is nicely reduced by keeping a sorted list of the next element from each array (see the next array). And when array sizes are known, they are pre-allocated to prevent dynamic re-allocations (though that will depend on your javascript engine).
For your case, I would call it like this:
mergeSortedArrays(arrays, true, function(a, b) {
return a.name < b.name ? -1 : 1;
});
Note: If you have a large number of arrays you may benefit from using a binary search instead of the linear search in insertNextIndex(). Or from using a Binary Heap for next.
Edited to reflect that Exception's original solution, extended by calling it like mergeSorted(mergeSorted(a,b),c) is faster than my solution here.
Javascript's builtin sort is [not] fast enough that you can just concatenate all the arrays together and sort the entire thing in one go. Javascript is not good for re-implementing things that should be done lower level.
var a1 = [{name:"a"}, {name:"b"}, {name:"m"}, {name:"x"}]
var a2 = [{name:"a"}, {name:"e"}, {name:"i"}, {name:"o"}]
var a3 = [{name:"g"}, {name:"h"}, {name:"m"}, {name:"n"}]
a1.concat(a2,a3).sort(function(a,b){return (a.name>b.name)-(a.name<b.name)})
// [{name:"a"}, {name:"a"}, {name:"b"}, {name:"e"}, {name:"h"}, {name:"i"}, {name:"g"}, {name:"m"}, {name:"m"}, {name:"n"}, {name:"o"}, {name:"x"}]
I like thetheChad's answer but I prefer something more readable
let mergeSorted = function(a, b) {
let alen = a.length;
let blen = b.length;
let i = j = k = 0;
let sortedNums = new Array(alen + blen);
while (i < alen && j < blen) {
if (a[i] < b[j]) {
sortedNums[k++] = a[i++];
} else {
sortedNums[k++] = b[j++];
}
}
while (i < alen) {
sortedNums[k++] = a[i++];
}
while (j < blen) {
sortedNums[k++] = b[j++];
}
return sortedNums;
};
basically mergeSorted allocates memory for a new array the size of the two input arrays combined. Then it populates the destination with items in order. If one of the arrays is longer we while loop until all the items are copied over. Should pass the following test suite.
// test
console.log(mergeSorted([1,6,9,95], [2,7,10,11,14,18]));
console.log(mergeSorted([2,3], [1]))
console.log(mergeSorted([0,0], [0,0]))
console.log(mergeSorted([1,3], [2]))
console.log(mergeSorted([1,2], [3,4]))

Max/Min/Avg returning array in that order

Can anyone tell me what I'm doing wrong? I am printing the array just not in the right order. How do i call it differently? This is javascript by the way.
Given an array x (e.g. [1,5, 10, -2]), create an algorithm (sets of instructions) that returns an array with the max, min, and average values ([max, min, avg]. ex [0,2,4] should return [4,0,2]).
My code:
function maxMinAvg(arr) {
var newarr= [];
var max = arr[0];
var min = arr[0];
sum = sum + arr[0];
var avg = sum/arr.length;
for (var i = 1; i < arr.length; i++) {
if (arr[i] > max) {
max = arr[i];
}
if (arr[i] < min) {
min = arr[i];
}
sum = sum + arr[i];
}
newarr.push([max[i],min[i],avg[i]]);
}
return newarr;
The function should look like this
function maxMinAvg(arr) {
var max = arr[0];
var min = arr[0];
var sum = arr[0]; //changed from original post
for (var i = 1; i < arr.length; i++) {
if (arr[i] > max) {
max = arr[i];
}
if (arr[i] < min) {
min = arr[i];
}
sum = sum + arr[i];
}
return [max, min, sum/arr.length]; //changed from original post
}
Not sure if this works but you could use the arr.sort() function to sort it and you have your min and max, then just add all values and find mean.
function maxMinMean(arr) {
arr.sort();
var sum = 0;
for (let i = 0; i<arr.length; i++){
sum = sum + +arr[i];
}
return [+arr[length - 1], +arr[0], sum / arr.length];
}
This assumes you are trying to do this with array of numbers (It will also work if the numbers in the array are stored as string)
Have not tested it but should work fine.
try this...
function maxMinAvg(arr) {
var max = arr[0];
var min = arr[0];
var sum = 0 ;
arr.forEach(function(value){
if(value > max)
max = value;
if(value < min)
min = value;
sum +=value;
})
var avg = sum/arr.length;
return [max,min,avg];
}
You already have the values min, max and avg (you have the sum so you can calculate it). Just a create a new array (not your original arr), add those values (min, max and avg) to it and return it.
Your code is not returning the values in the wrong order, it's returning the original array instead of an array with the max, min and average.
The reason that it looks like the items are in the wron g order, is that you have picked test data that happens to be similar to the result that you expect. If you pick data where the input is not so similar to the output, for example [1,2,3,4,100], you see that you get the same array back.
Just create a new array with the values and return it:
return [ max, min, avg ];
Edit:
With the new code that you posted I see some problems:
It's calculating the average before it has the sum of all items. it's just using the first item.
It doesn't initialise the sum variable, so it will contain undefined. When adding to it, the result will continue to be undefined.
The sum variable is not local, so it could interfer with other code.
It's pushing an array into the array, so you get an array or arrays as result instead of an array.
It's using the min, max and avg variables as if they were arrays when it puts them in the result.
Fixing that, you get:
function maxMinAvg(arr) {
var max = arr[0];
var min = arr[0];
var sum = arr[0];
for (var i = 1; i < arr.length; i++) {
if (arr[i] > max) {
max = arr[i];
}
if (arr[i] < min) {
min = arr[i];
}
sum = sum + arr[i];
}
var avg = sum / arr.length;
return [max, min, avg];
}
function maxMinMean(arr: number[]) {
if (!arr.length) return [NaN,NaN,NaN];
arr.sort((a,b)=>a-b);
return [
arr.slice(-1)[0],
arr[0],
arr.reduce((sum, x) => sum + x,0) / arr.length,
];
}
If you didn't want to modify the original array
function maxMinMean(arrIn: number[]) {
if (!arrIn.length) return [NaN,NaN,NaN];
var arr =[...arrIn].sort((a,b)=>a-b);
return [
arr.slice(-1)[0],
arr[0],
arr.reduce((sum, x) => sum + x,0) / arr.length,
];
}
If you wanted fixed point strings
function maxMinMean(arr: number[]) {
if (!arr.length) return [NaN,NaN,NaN];
arr.sort((a,b)=>a-b);
return [
arr.slice(-1)[0],
arr[0],
arr.reduce((sum, x) => sum + x,0) / arr.length,
].map(f=>f.toFixed(2));
}
A little more elegant solution, via a library:
import {map, pipe} from 'iter-ops';
import {numberStats} from './extras';
const input = [1, 5, 10, -2];
const i = pipe(input, numberStats(), map(a => [a.max, a.min, a.avg]));
console.log(...i); //=> [ 10, -2, 3.5 ]
The above solution is based on iter-ops library, plus custom numberStats operator, from the extras.

How to Merge sorted Arrays in JavaScript

I have three sorted arrays like below
[{name:"a"}, {name:"b"}, {name:"m"}, {name:"x"}]
[{name:"a"}, {name:"e"}, {name:"i"}, {name:"o"}]
[{name:"g"}, {name:"h"}, {name:"m"}, {name:"n"}]
Those arrays are sorted based on name property of each object in Array. Here is the method I converted from Java to merge two sorted arrays
function mergeSorted(a, b) {
var answer = new Array(a.length + b.length), i = 0, j = 0, k = 0;
while (i < a.length && j < b.length) {
if (a[i].name < b[j].name) {
answer[k] = a[i];
i++;
}else {
answer[k] = b[j];
j++;
}
k++;
}
while (i < a.length) {
answer[k] = a[i];
i++;
k++;
}
while (j < b.length) {
answer[k] = b[j];
j++;
k++;
}
return answer;
}
Here is the working fiddle with two arrays http://jsfiddle.net/euRn5/. What is the best approach to achieve the same with n number of Arrays, the thought I have in my mind currently is take one by one, merge it with previously merged till the last item, like n += i stuff. Is this a best approach?
The standard and most understanding code I believe..
function mergeArray(arr1, arr2) {
var new_array = [];
var i = 0,
j = 0,
index = 0;
while (new_array.length != (arr1.length + arr2.length) - 1) {
if (arr1[i] < arr2[j]) {
new_array.push(arr1[i]);
i++;
} else {
new_array.push(arr2[j]);
j++;
}
}
return new_array;
}
Function call:
var merged_array = mergeArray([1,6,9,95], [2,7,10,11,14,18]);
Update:
Seeing as it is current_year this would now be:
const mergeAll = (...arrays) => arrays.reduce(mergeSorted);
Original:
If you're feeling functional this is a perfect place to use reduce.
var mergeAll = function(){
return Array.prototype.slice.call(arguments).reduce(mergeSorted);
};
example:
var a = [{name:"a"}, {name:"b"}, {name:"m"}, {name:"x"}];
var b = [{name:"a"}, {name:"e"}, {name:"i"}, {name:"o"}];
var c = [{name:"g"}, {name:"h"}, {name:"m"}, {name:"n"}];
console.log(mergeAll(a,b,c).map(function(x){return x.name;}));
jsfiddle: http://jsfiddle.net/FeT6m/
The native implementations are not always the fastest (as you may have noticed) and have, historically, been somewhat sluggish, due to extensive error checking. That being said, there may be performance enhancements in the future, due to more robust integration with the hardware or routines specifically built to optimize certain tasks. If you write your own code, your application won't be able to take advantage of these boosts in performance once they're implemented. It's up to you to decide where the advantages lie and what the risks are.
At any rate, I've written a prettier version of your optimized code for funsies:
function mergeSorted(a,b){
var alen = a.length
, blen = b.length
, i, j, k = j = i = 0
, answer = new Array(alen + blen)
;//var
while(i < alen && j < blen)
answer[k++] = a[i].name < b[j].name ? a[i++] : b[j++];
while(i < alen) answer[k++] = a[i++];
while(j < blen) answer[k++] = b[j++];
return answer;
}
Faster, merges in only 1 pass, with more flexibility (keepDuplicates, custom comparator):
/* mergeSortedArrays(arrays[, keepDuplicates[, comparator[, thisArg]]])
Merges multiple sorted arrays into a new sorted array.
Arguments:
- arrays: array of sorted arrays to be merged
- keepDuplicates (optional): (true/false) whether to keep duplicate values
Default: false
- comparator (optional): function used to compare values
Default: sort numbers in ascending order
Example comparator: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort
- thisArg (optional): comparator is bound to thisArg when invoked
Returns: a new sorted array containing all the values from the arrays
*/
function mergeSortedArrays(arrays, keepDuplicates, comparator, thisArg) {
// Coerce to boolean to speed up testings in some javascript engines:
keepDuplicates = !!keepDuplicates;
// By default, sort numbers in ascending order:
if(!comparator) comparator = function(a, b) { return a - b; };
var nb = arrays.length, // Number of arrays to be merged
iter = new Array(nb), // Current position of iteration of each array
next = [], // Keep each array sorted by the value of their next element
length = 0; // The combined length of all arrays
// Populate iter and next:
for(var i = 0, arr; i < nb; i++) {
arr = arrays[i];
iter[i] = 0;
if(arr.length > 0) {
insertNextIndex(next, i, arr[0], comparator, thisArg);
}
length += arr.length;
}
// Insert index of array into next:
function insertNextIndex(next, index, val, comparator, thisArg) {
var i = next.length;
while(i--) { // Reverse loop...
var j = next[i];
if(comparator.call(thisArg, arrays[j][iter[j]], val) >= 0) { // ...until we find a greater value
break;
}
}
next.splice(i + 1, 0, index);
}
var merged = keepDuplicates ? new Array(length) : [],
k = 0, // Iterate over merged
min, val, lastVal;
// First iteration to get a value for lastVal (for duplicate checks):
if(!keepDuplicates && next.length > 0) {
min = next.pop();
arr = arrays[min];
i = iter[min]++;
val = arr[i];
merged[k++] = val;
lastVal = val;
if(++i < arr.length) { // If available, insert next value in next:
insertNextIndex(next, min, arr[i], comparator, thisArg);
}
}
// Merge multiple arrays:
while(next.length > 1) { // While there is still multiple arrays to be merged
min = next.pop();
arr = arrays[min];
i = iter[min]++;
val = arr[i];
if(keepDuplicates || comparator.call(thisArg, lastVal, val) !== 0) {
merged[k++] = val;
lastVal = val;
}
if(++i < arr.length) { // If available, insert next value in next:
insertNextIndex(next, min, arr[i], comparator, thisArg);
}
}
// When there remain only 1 array with unmerged values, use a faster loop:
if(next.length > 0) {
arr = arrays[next[0]];
i = iter[next[0]];
length = arr.length;
while(i < length) { // To the end
val = arr[i++];
if(keepDuplicates || comparator.call(thisArg, lastVal, val) !== 0) {
merged[k++] = val;
lastVal = val;
}
}
}
return merged;
}
Merging in 1 pass eliminates the creation of intermediate arrays which takes time and memory. Also, the number of comparisons is nicely reduced by keeping a sorted list of the next element from each array (see the next array). And when array sizes are known, they are pre-allocated to prevent dynamic re-allocations (though that will depend on your javascript engine).
For your case, I would call it like this:
mergeSortedArrays(arrays, true, function(a, b) {
return a.name < b.name ? -1 : 1;
});
Note: If you have a large number of arrays you may benefit from using a binary search instead of the linear search in insertNextIndex(). Or from using a Binary Heap for next.
Edited to reflect that Exception's original solution, extended by calling it like mergeSorted(mergeSorted(a,b),c) is faster than my solution here.
Javascript's builtin sort is [not] fast enough that you can just concatenate all the arrays together and sort the entire thing in one go. Javascript is not good for re-implementing things that should be done lower level.
var a1 = [{name:"a"}, {name:"b"}, {name:"m"}, {name:"x"}]
var a2 = [{name:"a"}, {name:"e"}, {name:"i"}, {name:"o"}]
var a3 = [{name:"g"}, {name:"h"}, {name:"m"}, {name:"n"}]
a1.concat(a2,a3).sort(function(a,b){return (a.name>b.name)-(a.name<b.name)})
// [{name:"a"}, {name:"a"}, {name:"b"}, {name:"e"}, {name:"h"}, {name:"i"}, {name:"g"}, {name:"m"}, {name:"m"}, {name:"n"}, {name:"o"}, {name:"x"}]
I like thetheChad's answer but I prefer something more readable
let mergeSorted = function(a, b) {
let alen = a.length;
let blen = b.length;
let i = j = k = 0;
let sortedNums = new Array(alen + blen);
while (i < alen && j < blen) {
if (a[i] < b[j]) {
sortedNums[k++] = a[i++];
} else {
sortedNums[k++] = b[j++];
}
}
while (i < alen) {
sortedNums[k++] = a[i++];
}
while (j < blen) {
sortedNums[k++] = b[j++];
}
return sortedNums;
};
basically mergeSorted allocates memory for a new array the size of the two input arrays combined. Then it populates the destination with items in order. If one of the arrays is longer we while loop until all the items are copied over. Should pass the following test suite.
// test
console.log(mergeSorted([1,6,9,95], [2,7,10,11,14,18]));
console.log(mergeSorted([2,3], [1]))
console.log(mergeSorted([0,0], [0,0]))
console.log(mergeSorted([1,3], [2]))
console.log(mergeSorted([1,2], [3,4]))

Partial sort in JavaScript

Is there any built-in JavaScript function to do a partial sort? If not, what is a good way to implement it?
Given an unsorted array of N elements, I would like to find K elements that are minimal with respect to some weighting function. K is much smaller than N, so it would be inefficient to sort the whole array and take the first K elements.
I would be happy even if there was something non-standard, browser-dependent. I could still fallback to the custom JavaScript implementation.
PS: This is my current custom implementation (without taking a weighting function into account, just sorting the elements as they are for simplicity):
function bisect(items, x, lo, hi) {
var mid;
if (typeof(lo) == 'undefined') lo = 0;
if (typeof(hi) == 'undefined') hi = items.length;
while (lo < hi) {
mid = Math.floor((lo + hi) / 2);
if (x < items[mid]) hi = mid;
else lo = mid + 1;
}
return lo;
}
function insort(items, x) {
items.splice(bisect(items, x), 0, x);
}
function partialSort(items, k) {
var smallest = [];
for (var i = 0, len = items.length; i < len; ++i) {
var item = items[i];
if (smallest.length < k || item < smallest[smallest.length - 1]) {
insort(smallest, item);
if (smallest.length > k)
smallest.splice(k, 1);
}
}
return smallest;
}
console.log(partialSort([5, 4, 3, 2, 1, 6, 7, 8, 1, 9], 3));
The algorithm walks through the given array one single time, keeping track of a sorted list of the k smallest items so far, using binary search to insert new elements.
Please post alternative solutions if you think they might be faster or more elegant. Timings are very welcome.
No. There's only the full array sort, so you will need to use your own implementation.
Little improvement on your code (I had thought of exactly the same algorithm :-)):
function partialSort(items, k) {
var smallest = items.slice(0, k).sort(),
max = smallest[k-1];
for (var i = k, len = items.length; i < len; ++i) {
var item = items[i];
if (item < max) {
insort(smallest, item);
smallest.length = k;
max = smallest[k-1];
}
}
return smallest;
}
(Even seems to be a little faster, I guess due to caching the max variable)
For relatively small k it can be worth it to implement a Max Heap (by lack of a native one in JavaScript):
Create a Max Heap of the first k values
For each remaining value:
If it is less than the root of the heap, replace the root with this value. Otherwise ignore the value. Note that the heap never changes in size.
Finally sort the heap and return it.
This is in fact an improvement on another idea using a Min Heap, but that one needs to heapify the whole array, and so will not run as fast. After heapifying the whole array, you just extract k times a value from that heap, and return those values.
I have added both solutions to Bergi's jsperf.com performance tests (copied to jsbench.me). For that particular test (5000 array values, k = 10), the Max Heap solution is faster. But this advantage will shrink as k is increased.
Here is the code for the Max Heap solution:
// A few Heap-functions that operate on an array
function maxSiftDown(arr, i=0, value=arr[i]) {
if (i >= arr.length) return;
while (true) {
var j = i*2+1;
if (j+1 < arr.length && arr[j] < arr[j+1]) j++;
if (j >= arr.length || value >= arr[j]) break;
arr[i] = arr[j];
i = j;
}
arr[i] = value;
}
function maxHeapify(arr) {
for (var i = arr.length>>1; i--; ) maxSiftDown(arr, i);
return arr;
}
// The main algorithm
function partialSortWithMaxHeap(items, k) {
var heap = maxHeapify(items.slice(0, k));
for (var i = k, len = items.length; i < len; ++i) {
var item = items[i];
if (item < heap[0]) maxSiftDown(heap, 0, item);
}
return heap.sort((a,b) => a-b);
}
// Sample data & call
var arr = Array.from({length:5000}, () => Math.floor(Math.random() * 1e5));
console.log(partialSortWithMaxHeap(arr, 10));
There's no native partial sort function. The closest thing to what you want is Array.filter.
function isSmallEnough(element, index, array) {
return (element <= 10);
}
var filtered = [12, 5, 8, 130, 44].filter(isSmallEnough);
// filtered is [5, 8]
The example was borrowed (and slightly modified) from the above link.
I made a version than works with objects, like Array.sort(f):
function partialSort(items, k,f) {
function bisect(items, x, lo, hi) {
var mid;
if (typeof(lo) == 'undefined') lo = 0;
if (typeof(hi) == 'undefined') hi = items.length;
while (lo < hi) {
mid = Math.floor((lo + hi) / 2);
if (0>f(x,items[mid])) hi = mid;
else lo = mid + 1;
}
return lo;
}
function insort(items, x) {
items.splice(bisect(items, x), 0, x);
}
var smallest = items.slice(0, k).sort(f),
max = smallest[k-1];
for (var i = k, len = items.length; i < len; ++i) {
var item = items[i];
if (0>f(item,max)) {
insort(smallest, item);
smallest.length = k;
max = smallest[k-1];
}
}
return smallest;
}
// [ { e: 1 }, { e: 1 }, { e: 2 } ]
console.log(partialSort([{e:4},{e:6},{e:1},{e:8},{e:3},{e:1},{e:6},{e:2}],3,(a,b)=>a.e-b.e))
console.log()
In the code block below, the nisetamafibo function keeps an array of the smallest items found so far. The array is sorted and truncated to length K after a certain number of new items have been added to the array, where the number is taken from the Fibonacci sequence so that it is first 1, next 1, then 2, then 3, then 5, and so on. The nisetamadouble method doubles the interval after which the array is sorted instead, so that it is first 1, then 2, then 4, and so on. (I also tried the approach that I sorted the array each time after a fixed number of new items like 10 had been added, but it was slower. And I also tried to initialize the array at the start of the function so that I took in a fixed number of the first items of the input and sorted them, but I found that initializing the array with 1 or 0 items was the fastest, so I removed the initialization step.)
The nisetamainsertion function uses insertion sort to sort the items. It's very slow at high K-values because insertion sort has quadratic time complexity, but it's fast at K-values of around 10 to 100 or lower, because insertion sort is fast for short arrays. The nisetamachoose method chooses nisetamainsertion for K-values of 100 or less but nisetamafibo otherwise. (In the Java JDK, the file DualPivotQuicksort.java uses insertion sort instead of quicksort for arrays with less than 47 items. A presentation about sorting algorithms in R said that "fastest for < 30 items is insert sort".)
I also tried to implement the quickselect algorithm with and without recursion. The version that didn't use recursion was a bit faster, but both versions were still slow compared to other methods especially in cases where N was high and K was low.
On another Stack Exchange site, someone came up with new variants of the Floyd-Rivest algorithm which were faster than the regular Floyd-Rivest algorithm in C: https://softwareengineering.stackexchange.com/questions/284767/kth-selection-routine-floyd-algorithm-489. I tried to implement the variant called select7MO3 in JavaScript, but it ended up being one of the slowest options in my benchmark.
function nisetamafibo(a,k=1){
let found=[],len=a.length,unsorted=0,biggestfound=Infinity,nextsort=1,prevsort=1,oldsort
for(let i=0;i<len;i++){
if(a[i]<biggestfound||i<k){
found.push(a[i])
if(++unsorted==nextsort){
found.sort((l,r)=>l<r?-1:l>r?1:0)
found=found.slice(0,k)
biggestfound=found[found.length-1]
oldsort=nextsort;nextsort+=prevsort;prevsort=oldsort
unsorted=0
}
}
}
found.sort((l,r)=>l<r?-1:l>r?1:0)
return found.slice(0,k)
}
function nisetamadouble(a,k=1){
let found=[],len=a.length,unsorted=0,biggestfound=Infinity,nextsort=1
for(let i=0;i<len;i++){
if(a[i]<biggestfound||i<k){
found.push(a[i])
if(++unsorted==nextsort){
found.sort((l,r)=>l<r?-1:l>r?1:0)
found=found.slice(0,k)
biggestfound=found[found.length-1]
nextsort*=2
unsorted=0
}
}
}
found.sort((l,r)=>l<r?-1:l>r?1:0)
return found.slice(0,k)
}
function nisetamainsertion(a,k=1){
let found=a.slice(0,k),l=a.length
found.sort((l,r)=>l<r?-1:l>r?1:0)
let biggestfound=found[k-1]
for(let i=0;i<l;i++){
let v=a[i]
if(v<biggestfound){
let insertat=k-1
for(let j=0;j<k-1;j++)if(v<found[j]||j==i){insertat=j;break}
for(let j=k-1;j>insertat;j--)found[j]=found[j-1]
found[insertat]=v
biggestfound=found[k-1]
}
}
return found
}
function nisetamachoose(a,k=1){
return k<=100?nisetamainsertion(a,k):nisetamafibo(a,k)
}
function quickselect(a,k,l,r){
l=l||0
r=r||a.length-1
while(true){
let pivot=a[r],pos=l
for(let i=l;i<=r;i++)if(a[i]<pivot){let temp=a[i];a[i]=a[pos];a[pos++]=temp}
let temp=a[r];a[r]=a[pos];a[pos]=temp
if(pos==k)break
pos<k?l=pos+1:r=pos-1
}
}
function quickselectrecursive(a,k,l,r){
l=l||0
r=r||a.length-1
let pivot=a[r],pos=l
for(let i=l;i<=r;i++)if(a[i]<pivot){let temp=a[i];a[i]=a[pos];a[pos++]=temp}
let temp=a[r];a[r]=a[pos];a[pos]=temp
if(pos<k)quickselectrecursive(a,pos+1,r,k)
if(pos>k)quickselectrecursive(a,l,pos-1,k)
}
function sortslice(a,k){
a.sort((l,r)=>l<r?-1:l>r?1:0)
return a.slice(0,k)
}
// https://softwareengineering.stackexchange.com/questions/284767/kth-selection-routine-floyd-algorithm-489
function select7MO3(a,k){
let l=0,i,r=a.length-1,rr=r,ll=l
while(r>l){
if(a[k]<a[l]){let t=a[l];a[l]=a[k];a[k]=t}
if(a[r]<a[l]){let t=a[l];a[l]=a[r];a[r]=t}
if(a[r]<a[k]){let t=a[k];a[k]=a[r];a[r]=t}
if((r-l)>k){
let n=r-l+1
i=k-l+1
let s=(2*n/3)
let div=i-n
let sd=(n*s*(n-s)/n)*(div<0?-1:div>0?1:0)
ll=Math.max(l,k-i*s/n+sd)
rr=Math.min(r,k+(n-i)*s/n+sd)
}
let pivot=a[k]
i=l
let j=r
let t=a[l];a[l]=a[k];a[k]=t
if(a[r]>pivot){t=a[r];a[r]=a[l];a[l]=t}
while(i<j){
let t=a[i];a[i]=a[j];a[j]=t
i++
j--
while(a[i]<pivot)i++
while(a[j]>pivot)j--
}
if(a[l]==pivot){i--;let t=a[l];a[l]=a[j];a[j]=t}
else{j++;let t=a[j];a[j]=a[r];a[r]=t}
if(j<=k)l=j+1
else if(k<=j)r=j-1
}
let out=a.slice(0,k)
out.sort((l,r)=>l<r?-1:l>r?1:0)
return out
}
// OP and Bergi
function bisect(items, x, lo, hi) {
var mid;
if (typeof(lo) == 'undefined') lo = 0;
if (typeof(hi) == 'undefined') hi = items.length;
while (lo < hi) {
mid = Math.floor((lo + hi) / 2);
if (x < items[mid]) hi = mid;
else lo = mid + 1;
}
return lo;
}
function insort(items, x) {
items.splice(bisect(items, x), 0, x);
}
function OP(items, k) {
var smallest = [];
for (var i = 0, len = items.length; i < len; ++i) {
var item = items[i];
if (smallest.length < k || item < smallest[smallest.length - 1]) {
insort(smallest, item);
if (smallest.length > k)
smallest.splice(k, 1);
}
}
return smallest;
}
function OP_Bergi(items, k) {
var smallest = items.slice(0, k).sort(),
max = smallest[k-1];
for (var i = k, len = items.length; i < len; ++i) {
var item = items[i];
if (item < max) {
insort(smallest, item);
smallest.length = k;
max = smallest[k-1];
}
}
return smallest;
}
// trincot
function maxSiftDown(arr, i=0, value=arr[i]) {
if (i >= arr.length) return;
while (true) {
var j = i*2+1;
if (j+1 < arr.length && arr[j] < arr[j+1]) j++;
if (j >= arr.length || value >= arr[j]) break;
arr[i] = arr[j];
i = j;
}
arr[i] = value;
}
function maxHeapify(arr) {
for (var i = arr.length>>1; i--; ) maxSiftDown(arr, i);
return arr;
}
function trincot_MaxHeap(items, k) {
var heap = maxHeapify(items.slice(0, k));
for (var i = k, len = items.length; i < len; ++i) {
var item = items[i];
if (item < heap[0]) maxSiftDown(heap, 0, item);
}
return heap.sort((a,b) => a-b);
}
// DiazJara
function DiazJara(items, k,f) {
function bisect(items, x, lo, hi) {
var mid;
if (typeof(lo) == 'undefined') lo = 0;
if (typeof(hi) == 'undefined') hi = items.length;
while (lo < hi) {
mid = Math.floor((lo + hi) / 2);
if (0>f(x,items[mid])) hi = mid;
else lo = mid + 1;
}
return lo;
}
function insort(items, x) {
items.splice(bisect(items, x), 0, x);
}
var smallest = items.slice(0, k).sort(f),
max = smallest[k-1];
for (var i = k, len = items.length; i < len; ++i) {
var item = items[i];
if (0>f(item,max)) {
insort(smallest, item);
smallest.length = k;
max = smallest[k-1];
}
}
return smallest;
}
// benchmark
for(let nk of'31 33 40 42 44 51 53 55 60 62 64 66 71 73 75'.split(' ')){
let n=parseInt(nk[0]),k0=parseInt(nk[1]),k=10**k0
let opt=[
'OP(a,k)',
'OP_Bergi(a,k)',
'trincot_MaxHeap(a,k)',
'DiazJara(a,k,(l,r)=>l-r)',
'DiazJara(a,k,(l,r)=>l<r?-1:l>r?1:0)',
'nisetamafibo(a,k)',
'nisetamadouble(a,k)',
// 'nisetamainsertion(a,k)', // this would've taken too long to run at K=1e6
'nisetamachoose(a,k)',
'quickselect(a,k);a=a.slice(0,k);a.sort((l,r)=>l<r?-1:l>r?1:0)',
'quickselectrecursive(a,k);a=a.slice(0,k);a.sort((l,r)=>l<r?-1:l>r?1:0)',
'select7MO3(a,k);a=a.slice(0,k);a.sort((l,r)=>l<r?-1:l>r?1:0)',
'sortslice(a,k)'
]
let ord=Array.from({length:100},()=>Array(opt.length).fill().map((_,i)=>i)).flat()
ord.sort(()=>Math.random()-.5)
for(let x of ord){
let o=opt[x]
let a=Array.from({length:10**n},()=>Math.random())
let t1=process.hrtime.bigint()
eval(o)
let t2=process.hrtime.bigint()-t1
console.log(n+' '+k0+' '+o+' '+t2)
}
}
This shows the median time of a hundred runs in ms and the average rank of each method (where for example 7/4 means that N was 1e7 and K was 1e4):
For most combinations of N and K, Bergi's modified version of the OP's code was actually slower than the OP's code, even though the OP's code was extremely slow in the case where N and K were both 1e6.
(l,r)=>l<r?-1:l>r?1:0 is faster than (l,r)=>l-r as you can see by comparing the two versions of Díaz-Jara's method above.
Here's also versions of my nisetamadouble and nisetamainsertion methods which return the indexes of the smallest items in addition to the values:
let a=Array.from({length:1e5},()=>Math.random())
let k=10
let l=a.length
let biggestfound=Infinity,foundind=[],foundval=[]
for(let i=0;i<l;i++){
let v=a[i]
if(i<k||v<biggestfound){
let insertat=k-1
for(let j=0;j<k-1;j++)if(v<foundval[j]||j==i){insertat=j;break}
for(let j=k-1;j>insertat;j--){foundind[j]=foundind[j-1];foundval[j]=foundval[j-1]}
foundind[insertat]=i
foundval[insertat]=v
biggestfound=foundval[k-1]
}
}
console.log(foundind)
console.log(foundval)
function nisetama(a,k=1){
let found=[],len=a.length,unsorted=0,biggestfound=Infinity,nextsort=1
for(let i=0;i<len;i++){
if(a[i]<biggestfound||i<k){
found.push(a[i])
if(++unsorted==nextsort){
found.sort((l,r)=>l<r?-1:l>r?1:0)
found=found.slice(0,k)
biggestfound=found[found.length-1]
nextsort*=2
unsorted=0
}
}
}
found.sort((l,r)=>l<r?-1:l>r?1:0)
return found.slice(0,k)
}
let a2=a
nisetama(a2,k)
biggestfound=a2[k-1],foundind=[]
for(let i=0;i<l;i++)if(a[i]<=biggestfound)foundind.push(i)
foundind.sort((l,r)=>a[l]<a[r]?-1:a[l]>a[r]?1:0)
foundind=foundind.slice(0,k)
console.log(foundind)
console.log(foundval)

Get max and min value from array in JavaScript

I am creating the following array from data attributes and I need to be able to grab the highest and lowest value from it so I can pass it to another function later on.
var allProducts = $(products).children("li");
prices = []
$(allProducts).each(function () {
var price = parseFloat($(this).data('price'));
prices[price] = price;
});
console.log(prices[0]) <!-- this returns undefined
My list items look like this (I have cut down for readability):
<li data-price="29.97">Product</li>
<li data-price="31.00">Product</li>
<li data-price="19.38">Product</li>
<li data-price="20.00">Product</li>
A quick console.log on prices shows me my array which appears to be sorted so I could grab the first and last element I assume, but presently the names and values in the array are the same so whenever I try and do a prices[0], I get undefined
[]
19.38 19.38
20.00 20.00
29.97 29.97
31.00 31.00
Got a feeling this is a stupidly easy question, so please be kind :)
To get min/max value in array, you can use:
var _array = [1,3,2];
Math.max.apply(Math,_array); // 3
Math.min.apply(Math,_array); // 1
Why not store it as an array of prices instead of object?
prices = []
$(allProducts).each(function () {
var price = parseFloat($(this).data('price'));
prices.push(price);
});
prices.sort(function(a, b) { return a - b }); //this is the magic line which sort the array
That way you can just
prices[0]; // cheapest
prices[prices.length - 1]; // most expensive
Note that you can do shift() and pop() to get min and max price respectively, but it will take off the price from the array.
Even better alternative is to use Sergei solution below, by using Math.max and min respectively.
EDIT:
I realized that this would be wrong if you have something like [11.5, 3.1, 3.5, 3.7] as 11.5 is treated as a string, and would come before the 3.x in dictionary order, you need to pass in custom sort function to make sure they are indeed treated as float:
prices.sort(function(a, b) { return a - b });
Instead of .each, another (perhaps more concise) approach to getting all those prices might be:
var prices = $(products).children("li").map(function() {
return $(this).prop("data-price");
}).get();
additionally you may want to consider filtering the array to get rid of empty or non-numeric array values in case they should exist:
prices = prices.filter(function(n){ return(!isNaN(parseFloat(n))) });
then use Sergey's solution above:
var max = Math.max.apply(Math,prices);
var min = Math.min.apply(Math,prices);
if you have "scattered" (not inside an array) values you can use:
var max_value = Math.max(val1, val2, val3, val4, val5);
arr = [9,4,2,93,6,2,4,61,1];
ArrMax = Math.max.apply(Math, arr);
use this and it works on both the static arrays and dynamically generated arrays.
var array = [12,2,23,324,23,123,4,23,132,23];
var getMaxValue = Math.max.apply(Math, array );
I had the issue when I use trying to find max value from code below
$('#myTabs').find('li.active').prevAll().andSelf().each(function () {
newGetWidthOfEachTab.push(parseInt($(this).outerWidth()));
});
for (var i = 0; i < newGetWidthOfEachTab.length; i++) {
newWidthOfEachTabTotal += newGetWidthOfEachTab[i];
newGetWidthOfEachTabArr.push(parseInt(newWidthOfEachTabTotal));
}
getMaxValue = Math.max.apply(Math, array);
I was getting 'NAN' when I use
var max_value = Math.max(12, 21, 23, 2323, 23);
with my code
Find largest and smallest number in an array with lodash.
var array = [1, 3, 2];
var func = _.over(Math.max, Math.min);
var [max, min] = func(...array);
// => [3, 1]
console.log(max);
console.log(min);
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.11/lodash.js"></script>
If there exists requirement to find solution without using Math library, or Sorting logic, the below solutions might help.
To find the max value in javascript,
var max = -Infinity;
for (var i = 0; i < arr.length; ++i) {
if (arr[i] < max) continue;
if (arr[i] > max) {
max = arr[i];
}
}
return max;
To find the min value,
var min = +Infinity;
for (var i = 0; i < arr.length; ++i) {
if (arr[i] > min) continue;
if (arr[i] < min) {
min = arr[i];
}
}
return min;
To find all the occurrences of max values, (alter the comparisons to get all min values)
var max = -Infinity, result = [];
for (var i = 0; i < arr.length; ++i) {
if (arr[i] < max) continue;
if (arr[i] > max) {
result = [];
max = arr[i];
}
result.push(max);
}
return result; // return result.length to return the number of occurrences of max values.

Categories