I have three sorted arrays like below
[{name:"a"}, {name:"b"}, {name:"m"}, {name:"x"}]
[{name:"a"}, {name:"e"}, {name:"i"}, {name:"o"}]
[{name:"g"}, {name:"h"}, {name:"m"}, {name:"n"}]
Those arrays are sorted based on name property of each object in Array. Here is the method I converted from Java to merge two sorted arrays
function mergeSorted(a, b) {
var answer = new Array(a.length + b.length), i = 0, j = 0, k = 0;
while (i < a.length && j < b.length) {
if (a[i].name < b[j].name) {
answer[k] = a[i];
i++;
}else {
answer[k] = b[j];
j++;
}
k++;
}
while (i < a.length) {
answer[k] = a[i];
i++;
k++;
}
while (j < b.length) {
answer[k] = b[j];
j++;
k++;
}
return answer;
}
Here is the working fiddle with two arrays http://jsfiddle.net/euRn5/. What is the best approach to achieve the same with n number of Arrays, the thought I have in my mind currently is take one by one, merge it with previously merged till the last item, like n += i stuff. Is this a best approach?
The standard and most understanding code I believe..
function mergeArray(arr1, arr2) {
var new_array = [];
var i = 0,
j = 0,
index = 0;
while (new_array.length != (arr1.length + arr2.length) - 1) {
if (arr1[i] < arr2[j]) {
new_array.push(arr1[i]);
i++;
} else {
new_array.push(arr2[j]);
j++;
}
}
return new_array;
}
Function call:
var merged_array = mergeArray([1,6,9,95], [2,7,10,11,14,18]);
Update:
Seeing as it is current_year this would now be:
const mergeAll = (...arrays) => arrays.reduce(mergeSorted);
Original:
If you're feeling functional this is a perfect place to use reduce.
var mergeAll = function(){
return Array.prototype.slice.call(arguments).reduce(mergeSorted);
};
example:
var a = [{name:"a"}, {name:"b"}, {name:"m"}, {name:"x"}];
var b = [{name:"a"}, {name:"e"}, {name:"i"}, {name:"o"}];
var c = [{name:"g"}, {name:"h"}, {name:"m"}, {name:"n"}];
console.log(mergeAll(a,b,c).map(function(x){return x.name;}));
jsfiddle: http://jsfiddle.net/FeT6m/
The native implementations are not always the fastest (as you may have noticed) and have, historically, been somewhat sluggish, due to extensive error checking. That being said, there may be performance enhancements in the future, due to more robust integration with the hardware or routines specifically built to optimize certain tasks. If you write your own code, your application won't be able to take advantage of these boosts in performance once they're implemented. It's up to you to decide where the advantages lie and what the risks are.
At any rate, I've written a prettier version of your optimized code for funsies:
function mergeSorted(a,b){
var alen = a.length
, blen = b.length
, i, j, k = j = i = 0
, answer = new Array(alen + blen)
;//var
while(i < alen && j < blen)
answer[k++] = a[i].name < b[j].name ? a[i++] : b[j++];
while(i < alen) answer[k++] = a[i++];
while(j < blen) answer[k++] = b[j++];
return answer;
}
Faster, merges in only 1 pass, with more flexibility (keepDuplicates, custom comparator):
/* mergeSortedArrays(arrays[, keepDuplicates[, comparator[, thisArg]]])
Merges multiple sorted arrays into a new sorted array.
Arguments:
- arrays: array of sorted arrays to be merged
- keepDuplicates (optional): (true/false) whether to keep duplicate values
Default: false
- comparator (optional): function used to compare values
Default: sort numbers in ascending order
Example comparator: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort
- thisArg (optional): comparator is bound to thisArg when invoked
Returns: a new sorted array containing all the values from the arrays
*/
function mergeSortedArrays(arrays, keepDuplicates, comparator, thisArg) {
// Coerce to boolean to speed up testings in some javascript engines:
keepDuplicates = !!keepDuplicates;
// By default, sort numbers in ascending order:
if(!comparator) comparator = function(a, b) { return a - b; };
var nb = arrays.length, // Number of arrays to be merged
iter = new Array(nb), // Current position of iteration of each array
next = [], // Keep each array sorted by the value of their next element
length = 0; // The combined length of all arrays
// Populate iter and next:
for(var i = 0, arr; i < nb; i++) {
arr = arrays[i];
iter[i] = 0;
if(arr.length > 0) {
insertNextIndex(next, i, arr[0], comparator, thisArg);
}
length += arr.length;
}
// Insert index of array into next:
function insertNextIndex(next, index, val, comparator, thisArg) {
var i = next.length;
while(i--) { // Reverse loop...
var j = next[i];
if(comparator.call(thisArg, arrays[j][iter[j]], val) >= 0) { // ...until we find a greater value
break;
}
}
next.splice(i + 1, 0, index);
}
var merged = keepDuplicates ? new Array(length) : [],
k = 0, // Iterate over merged
min, val, lastVal;
// First iteration to get a value for lastVal (for duplicate checks):
if(!keepDuplicates && next.length > 0) {
min = next.pop();
arr = arrays[min];
i = iter[min]++;
val = arr[i];
merged[k++] = val;
lastVal = val;
if(++i < arr.length) { // If available, insert next value in next:
insertNextIndex(next, min, arr[i], comparator, thisArg);
}
}
// Merge multiple arrays:
while(next.length > 1) { // While there is still multiple arrays to be merged
min = next.pop();
arr = arrays[min];
i = iter[min]++;
val = arr[i];
if(keepDuplicates || comparator.call(thisArg, lastVal, val) !== 0) {
merged[k++] = val;
lastVal = val;
}
if(++i < arr.length) { // If available, insert next value in next:
insertNextIndex(next, min, arr[i], comparator, thisArg);
}
}
// When there remain only 1 array with unmerged values, use a faster loop:
if(next.length > 0) {
arr = arrays[next[0]];
i = iter[next[0]];
length = arr.length;
while(i < length) { // To the end
val = arr[i++];
if(keepDuplicates || comparator.call(thisArg, lastVal, val) !== 0) {
merged[k++] = val;
lastVal = val;
}
}
}
return merged;
}
Merging in 1 pass eliminates the creation of intermediate arrays which takes time and memory. Also, the number of comparisons is nicely reduced by keeping a sorted list of the next element from each array (see the next array). And when array sizes are known, they are pre-allocated to prevent dynamic re-allocations (though that will depend on your javascript engine).
For your case, I would call it like this:
mergeSortedArrays(arrays, true, function(a, b) {
return a.name < b.name ? -1 : 1;
});
Note: If you have a large number of arrays you may benefit from using a binary search instead of the linear search in insertNextIndex(). Or from using a Binary Heap for next.
Edited to reflect that Exception's original solution, extended by calling it like mergeSorted(mergeSorted(a,b),c) is faster than my solution here.
Javascript's builtin sort is [not] fast enough that you can just concatenate all the arrays together and sort the entire thing in one go. Javascript is not good for re-implementing things that should be done lower level.
var a1 = [{name:"a"}, {name:"b"}, {name:"m"}, {name:"x"}]
var a2 = [{name:"a"}, {name:"e"}, {name:"i"}, {name:"o"}]
var a3 = [{name:"g"}, {name:"h"}, {name:"m"}, {name:"n"}]
a1.concat(a2,a3).sort(function(a,b){return (a.name>b.name)-(a.name<b.name)})
// [{name:"a"}, {name:"a"}, {name:"b"}, {name:"e"}, {name:"h"}, {name:"i"}, {name:"g"}, {name:"m"}, {name:"m"}, {name:"n"}, {name:"o"}, {name:"x"}]
I like thetheChad's answer but I prefer something more readable
let mergeSorted = function(a, b) {
let alen = a.length;
let blen = b.length;
let i = j = k = 0;
let sortedNums = new Array(alen + blen);
while (i < alen && j < blen) {
if (a[i] < b[j]) {
sortedNums[k++] = a[i++];
} else {
sortedNums[k++] = b[j++];
}
}
while (i < alen) {
sortedNums[k++] = a[i++];
}
while (j < blen) {
sortedNums[k++] = b[j++];
}
return sortedNums;
};
basically mergeSorted allocates memory for a new array the size of the two input arrays combined. Then it populates the destination with items in order. If one of the arrays is longer we while loop until all the items are copied over. Should pass the following test suite.
// test
console.log(mergeSorted([1,6,9,95], [2,7,10,11,14,18]));
console.log(mergeSorted([2,3], [1]))
console.log(mergeSorted([0,0], [0,0]))
console.log(mergeSorted([1,3], [2]))
console.log(mergeSorted([1,2], [3,4]))
Related
I want to find all possible arrays -of non-negative numbers- that sum up to -at most- N in JavaScript:
function findArrays(maxSize, maxSum){}
Example input: findArrays(3, 10)
Some acceptable outputs: (not writing all as it would be too long)
[[0], [0,0,0], [10,0,0], [1,9], [1,2,3] /*, ... */]
What I tried so far:
I know it looks like homework but it's not :) I can think of a solution that simply generates all (size*maxSum) possible arrays of acceptable sizes and then iterate through them to check if sum is greater than maxSum. However, I think this solution is very bad in terms of performance as maxSum gets bigger. I'm looking for a more efficient implementation but I just don't know where to start.
My "bad" solution
function getNextArray(r,maxVal){
for(var i=r.length-1;i>=0;i--){
if(r[i]<maxVal){
r[i]++;
if(i<r.length-1){
r[i+1]=0;
}
break;
}
}
return r;
}
function getAllArraysOfSize(size, maxVal){
var arrays=[],r=[],i;
for(i=0;i<size;i++){
r[i]=0;
}
while(r.reduce((a, b) => a + b, 0) < (maxVal*size)){
r = getNextArray(r.slice(),maxVal);
arrays.push(r);
}
return arrays;
};
function findArrays(maxSize, maxSum){
var allArrays=[],arraysOfFixedSize=[],acceptableArrays=[],i,j;
for(i=1; i<=maxSize; i++){
arraysOfFixedSize=getAllArraysOfSize(i,maxSum);
for(j=0; j<arraysOfFixedSize.length; j++){
allArrays.push(arraysOfFixedSize[j]);
}
}
for(i=0; i<allArrays.length; i++){
if(allArrays[i].reduce((a, b) => a + b, 0) <= maxSum){
acceptableArrays.push(allArrays[i]);
}
}
return acceptableArrays;
};
You can use recursion and a generator. The number of outputs grows quickly for higher valued arguments, so I keep them low here:
function * findArrays(maxSize, maxSum) {
let arr = [];
function * recur(maxSum) {
let k = arr.length;
yield [...arr]; // or: if (k) yield [...arr]
if (k === maxSize) return;
for (let i = 0; i <= maxSum; i++) {
arr[k] = i;
yield * recur(maxSum - i);
}
arr.length = k;
}
yield * recur(maxSum);
}
// demo
for (let arr of findArrays(2, 4))
console.log(JSON.stringify(arr));
NB: this also produces the empty array, which makes sense. If you want to avoid this, then just check that you don't yield an empty array.
If you prefer working with plain functions instead of generators, then translate the innermost yield expression to a push unto a result array, as follows:
function findArrays(maxSize, maxSum) {
let arr = [];
let result = []; // <--- will collect all the subarrays
function recur(maxSum) {
let k = arr.length;
result.push([...arr]);
if (k === maxSize) return;
for (let i = 0; i <= maxSum; i++) {
arr[k] = i;
recur(maxSum - i);
}
arr.length = k;
}
recur(maxSum);
return result;
}
// demo
for (let arr of findArrays(2, 4))
console.log(JSON.stringify(arr));
i hope this is helpful
const data = [[0],[0,0,0],[10,0,0],[1,9],[1,2,3]];
function findArrays(maxSize, maxSum){
return data.reduce(
(acc, value) => {
if (value.length <= maxSize) {
const tempValue = value;
const sum = tempValue.reduce((acc, val) => val >= 0 ? acc + val : 0, 0);
if (sum <= maxSum && sum > 0) acc.push(value);
}
return acc
}, []
)
}
console.log(findArrays(3, 10));
I have a solution that seems to pass most of the tests but is too slow. If i'm not mistaken, the complexity is O(n^3) due to the three for loops.
My idea was to start at the first three positions of the array at i, j and k, sum them, and see if it adds up to 0.
The functions objective is:
Given an array nums of n integers, are there elements a, b, c in nums such that a + b + c = 0? Find all unique triplets in the array which gives the sum of zero.
Note:
The solution set must not contain duplicate triplets.
Example:
Given array nums = [-1, 0, 1, 2, -1, -4],
A solution set is:
[
[-1, 0, 1],
[-1, -1, 2]
]
var threeSum = function(nums) {
var originalArray = nums
var lengthArray = nums.length
//sort array smallest to largest
nums.sort(function(a,b) {
return a-b
})
function arrayEqual(array1, array2){
var equal = false
array1.forEach((value1) => {
if(array1 === array2){
equal = true
}
})
return equal
}
var sum = 0;
var answerArray = [];
//start from first digit and add from there
for(var i = 0; i<lengthArray; i++){
for(var j = i+1; j<lengthArray; j++){
for(var k = j+1; k<lengthArray; k++){
if((nums[i]+nums[j]+nums[k] === 0)){
if(!arrayEqual(answerArray, [nums[i],nums[j],nums[k]])){
answerArray.push([nums[i],nums[j],nums[k]])
}
}
}
}
}
return Array.from(new Set(answerArray.map(JSON.stringify)), JSON.parse)
};
How can i get around having to use three for loops to make this work (aka how do i optimize this solution?)
Think this problem in this way. Choose any number from the array say k. Now you need to find two other numbers in the array which add to -k. The resulting sum of three numbers will be k + (-k) = 0.
So this problem is reduced to find two numbers in the array which adds to a given number which is O(n) using two pointers method if given array is sorted.
In a nutshell, sort the array, take each number (k) one by one (O(n)), find two other numbers with sum -k (O(n)).
Total time complexity : O(n) * O(n) = O(n2)
You can solve the problem in a runtime of O(n^2). Here is the solution using JavaScript
var threeSum = function(nums) {
var solutions = [];
var target = 0;
nums.sort(function(a, b) {
return a - b;
});
for(var i = 0; i < nums.length - 2; i++) {
if(i === 0 || (i > 0 && nums[i] !== nums[i - 1])) {
var lo = i + 1;
var hi = nums.length - 1;
var sum = - nums[i];
while(lo < hi) {
if(nums[lo] + nums[hi] === sum) {
solutions.push([nums[i],nums[lo],nums[hi]]);
while (lo < hi && nums[lo] === nums[lo + 1]) lo++;
while (lo < hi && nums[hi] == nums[hi-1]) hi--;
lo++; hi--;
}else if (nums[lo] + nums[hi] > sum) {
hi--;
}else {
lo++;
}
}
}
};
return solutions;
}
I have three sorted arrays like below
[{name:"a"}, {name:"b"}, {name:"m"}, {name:"x"}]
[{name:"a"}, {name:"e"}, {name:"i"}, {name:"o"}]
[{name:"g"}, {name:"h"}, {name:"m"}, {name:"n"}]
Those arrays are sorted based on name property of each object in Array. Here is the method I converted from Java to merge two sorted arrays
function mergeSorted(a, b) {
var answer = new Array(a.length + b.length), i = 0, j = 0, k = 0;
while (i < a.length && j < b.length) {
if (a[i].name < b[j].name) {
answer[k] = a[i];
i++;
}else {
answer[k] = b[j];
j++;
}
k++;
}
while (i < a.length) {
answer[k] = a[i];
i++;
k++;
}
while (j < b.length) {
answer[k] = b[j];
j++;
k++;
}
return answer;
}
Here is the working fiddle with two arrays http://jsfiddle.net/euRn5/. What is the best approach to achieve the same with n number of Arrays, the thought I have in my mind currently is take one by one, merge it with previously merged till the last item, like n += i stuff. Is this a best approach?
The standard and most understanding code I believe..
function mergeArray(arr1, arr2) {
var new_array = [];
var i = 0,
j = 0,
index = 0;
while (new_array.length != (arr1.length + arr2.length) - 1) {
if (arr1[i] < arr2[j]) {
new_array.push(arr1[i]);
i++;
} else {
new_array.push(arr2[j]);
j++;
}
}
return new_array;
}
Function call:
var merged_array = mergeArray([1,6,9,95], [2,7,10,11,14,18]);
Update:
Seeing as it is current_year this would now be:
const mergeAll = (...arrays) => arrays.reduce(mergeSorted);
Original:
If you're feeling functional this is a perfect place to use reduce.
var mergeAll = function(){
return Array.prototype.slice.call(arguments).reduce(mergeSorted);
};
example:
var a = [{name:"a"}, {name:"b"}, {name:"m"}, {name:"x"}];
var b = [{name:"a"}, {name:"e"}, {name:"i"}, {name:"o"}];
var c = [{name:"g"}, {name:"h"}, {name:"m"}, {name:"n"}];
console.log(mergeAll(a,b,c).map(function(x){return x.name;}));
jsfiddle: http://jsfiddle.net/FeT6m/
The native implementations are not always the fastest (as you may have noticed) and have, historically, been somewhat sluggish, due to extensive error checking. That being said, there may be performance enhancements in the future, due to more robust integration with the hardware or routines specifically built to optimize certain tasks. If you write your own code, your application won't be able to take advantage of these boosts in performance once they're implemented. It's up to you to decide where the advantages lie and what the risks are.
At any rate, I've written a prettier version of your optimized code for funsies:
function mergeSorted(a,b){
var alen = a.length
, blen = b.length
, i, j, k = j = i = 0
, answer = new Array(alen + blen)
;//var
while(i < alen && j < blen)
answer[k++] = a[i].name < b[j].name ? a[i++] : b[j++];
while(i < alen) answer[k++] = a[i++];
while(j < blen) answer[k++] = b[j++];
return answer;
}
Faster, merges in only 1 pass, with more flexibility (keepDuplicates, custom comparator):
/* mergeSortedArrays(arrays[, keepDuplicates[, comparator[, thisArg]]])
Merges multiple sorted arrays into a new sorted array.
Arguments:
- arrays: array of sorted arrays to be merged
- keepDuplicates (optional): (true/false) whether to keep duplicate values
Default: false
- comparator (optional): function used to compare values
Default: sort numbers in ascending order
Example comparator: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort
- thisArg (optional): comparator is bound to thisArg when invoked
Returns: a new sorted array containing all the values from the arrays
*/
function mergeSortedArrays(arrays, keepDuplicates, comparator, thisArg) {
// Coerce to boolean to speed up testings in some javascript engines:
keepDuplicates = !!keepDuplicates;
// By default, sort numbers in ascending order:
if(!comparator) comparator = function(a, b) { return a - b; };
var nb = arrays.length, // Number of arrays to be merged
iter = new Array(nb), // Current position of iteration of each array
next = [], // Keep each array sorted by the value of their next element
length = 0; // The combined length of all arrays
// Populate iter and next:
for(var i = 0, arr; i < nb; i++) {
arr = arrays[i];
iter[i] = 0;
if(arr.length > 0) {
insertNextIndex(next, i, arr[0], comparator, thisArg);
}
length += arr.length;
}
// Insert index of array into next:
function insertNextIndex(next, index, val, comparator, thisArg) {
var i = next.length;
while(i--) { // Reverse loop...
var j = next[i];
if(comparator.call(thisArg, arrays[j][iter[j]], val) >= 0) { // ...until we find a greater value
break;
}
}
next.splice(i + 1, 0, index);
}
var merged = keepDuplicates ? new Array(length) : [],
k = 0, // Iterate over merged
min, val, lastVal;
// First iteration to get a value for lastVal (for duplicate checks):
if(!keepDuplicates && next.length > 0) {
min = next.pop();
arr = arrays[min];
i = iter[min]++;
val = arr[i];
merged[k++] = val;
lastVal = val;
if(++i < arr.length) { // If available, insert next value in next:
insertNextIndex(next, min, arr[i], comparator, thisArg);
}
}
// Merge multiple arrays:
while(next.length > 1) { // While there is still multiple arrays to be merged
min = next.pop();
arr = arrays[min];
i = iter[min]++;
val = arr[i];
if(keepDuplicates || comparator.call(thisArg, lastVal, val) !== 0) {
merged[k++] = val;
lastVal = val;
}
if(++i < arr.length) { // If available, insert next value in next:
insertNextIndex(next, min, arr[i], comparator, thisArg);
}
}
// When there remain only 1 array with unmerged values, use a faster loop:
if(next.length > 0) {
arr = arrays[next[0]];
i = iter[next[0]];
length = arr.length;
while(i < length) { // To the end
val = arr[i++];
if(keepDuplicates || comparator.call(thisArg, lastVal, val) !== 0) {
merged[k++] = val;
lastVal = val;
}
}
}
return merged;
}
Merging in 1 pass eliminates the creation of intermediate arrays which takes time and memory. Also, the number of comparisons is nicely reduced by keeping a sorted list of the next element from each array (see the next array). And when array sizes are known, they are pre-allocated to prevent dynamic re-allocations (though that will depend on your javascript engine).
For your case, I would call it like this:
mergeSortedArrays(arrays, true, function(a, b) {
return a.name < b.name ? -1 : 1;
});
Note: If you have a large number of arrays you may benefit from using a binary search instead of the linear search in insertNextIndex(). Or from using a Binary Heap for next.
Edited to reflect that Exception's original solution, extended by calling it like mergeSorted(mergeSorted(a,b),c) is faster than my solution here.
Javascript's builtin sort is [not] fast enough that you can just concatenate all the arrays together and sort the entire thing in one go. Javascript is not good for re-implementing things that should be done lower level.
var a1 = [{name:"a"}, {name:"b"}, {name:"m"}, {name:"x"}]
var a2 = [{name:"a"}, {name:"e"}, {name:"i"}, {name:"o"}]
var a3 = [{name:"g"}, {name:"h"}, {name:"m"}, {name:"n"}]
a1.concat(a2,a3).sort(function(a,b){return (a.name>b.name)-(a.name<b.name)})
// [{name:"a"}, {name:"a"}, {name:"b"}, {name:"e"}, {name:"h"}, {name:"i"}, {name:"g"}, {name:"m"}, {name:"m"}, {name:"n"}, {name:"o"}, {name:"x"}]
I like thetheChad's answer but I prefer something more readable
let mergeSorted = function(a, b) {
let alen = a.length;
let blen = b.length;
let i = j = k = 0;
let sortedNums = new Array(alen + blen);
while (i < alen && j < blen) {
if (a[i] < b[j]) {
sortedNums[k++] = a[i++];
} else {
sortedNums[k++] = b[j++];
}
}
while (i < alen) {
sortedNums[k++] = a[i++];
}
while (j < blen) {
sortedNums[k++] = b[j++];
}
return sortedNums;
};
basically mergeSorted allocates memory for a new array the size of the two input arrays combined. Then it populates the destination with items in order. If one of the arrays is longer we while loop until all the items are copied over. Should pass the following test suite.
// test
console.log(mergeSorted([1,6,9,95], [2,7,10,11,14,18]));
console.log(mergeSorted([2,3], [1]))
console.log(mergeSorted([0,0], [0,0]))
console.log(mergeSorted([1,3], [2]))
console.log(mergeSorted([1,2], [3,4]))
I need to calculate min/max of large array. I know about Math.max.apply(), but on large arrays it fails with Stack overflow exception. Any simple solutions?
Sort the array by using sort() method it sorts array by using quicksort algorithm
Since array is sorted in ascending order then the last element is the max
var arr = [1,4,6,4, ...];
arr.sort((a, b) => a - b);
var max = arr[arr.length - 1];
Array.prototype.min = function() {
var r = this[0];
this.forEach(function(v,i,a){if (v<r) r=v;});
return r;
};
From JavaScript: min & max Array values? where other solutions from this problem are discussed
FYI: I just googled "max min large array" and found this as the first result...
Why not just loop through the entire array?
var max = Number.MIN_VALUE, min = Number.MAX_VALUE;
for (var i = 0, len=list.length; i < len; i++) {
if (list[i] > max) max = list[i];
if (list[i] < min) min = list[i];
}
Edit:
For max:
if (typeof Array.prototype.GetMax === "undefined") {
Array.prototype.GetMax = function() {
var max = Number.MAX_VALUE;
for (var i = 0, len=this.length; i < len; i++) {
if (this[i] > max) max = this[i];
}
return max;
}
}
For min:
if (typeof Array.prototype.GetMin === "undefined") {
Array.prototype.GetMin = function() {
var min = Number.MIN_VALUE;
for (var i = 0, len=this.length; i < len; i++) {
if (this[i] < min) min = this[i];
}
return min;
}
}
For both:
if (typeof Array.prototype.GetMaxMin === "undefined") {
Array.prototype.GetMaxMin = function() {
var max = Number.MIN_VALUE, min = Number.MAX_VALUE;
for (var i = 0, len=this.length; i < len; i++) {
if (this[i] > max) max = this[i];
if (this[i] < min) min = this[i];
}
return { Max: max, Min: min};
}
}
Should I assume you have thought of this:
var maxSoFar = -9999999;
for (var i = 0; i < array.length ; ++i) {
if (array[i] > maxSoFar) {
maxSoFar = array[i];
}
... similar for minSoFar ...
}
try this
var arr = [];
for(var i=1000000;i>0;i--)
{
arr.push(i);
}
//we create a copy of the original array through arr.concat() since we do not want to change the original sorting order of the array
//we pass a function in the sort method as by default it sorts alphabetically instead of numerically. So 99 will be smaller than 100.
var arrMaxMin = arr.concat().sort(function(a,b){return a-b});
arrMaxMin[0]; //min
arrMaxMin[arrMaxMin.length - 1]; //max
Hey why dont you slice your array into smaller arrays then on that arrays you can easily use Math.max.apply(Math,individual arrays).But remember to reinitialize all subarrays back to null so as to get the memory back once desired max value is obtained
This is exactly what reduce is for:
function max(arr) {
if (arr.length === 0) {
return NaN // or whatever answer you want for an empty array, or throw an error.
}
return arr.reduce((a, b) => Math.max(a, b), -Infinity)
}
console.log(max([...new Array(100000).keys()]))
(note that [...new Array(100000).keys()] is just a fancy way in a modern browsers to make a huge array of the numbers 0 to 999999. The max function itself will run in anything made in the last 20 years.)
You can also reduce it to this one-liner:
arr.reduce((cur, val, i) => i === 0 ? val : Math.max(cur, val), NaN)
here NaN is the value you get back if the array is empty
Or even
arr.reduce((a, b) => Math.max(a, b), -Infinity)
although this will return -Infinity for an empty array.
Finally, it may be tempting to just do:
arr.reduce(Math.max, -Infinity) //don't do this!!
but this won't work. This is because reduce calls it's function (Math.max) with 4 parameters, one of which is the original array, so a Math.max on those will always result in a NaN.
How would I go about calculating the average of all the values in a multidimensional array? I've written a function to calculate the average from a 1-dimensional array, but I'm not sure what the best method is when there are more than 1-dimensions.
For example, let's say we have the following:
var A = Array(3);
for (i=0; i<A.length; i++) {
A[i] = new Array(2);
for (j=0; j<A[i].length; j++) {
A[i][j] = i+j;
}
}
Therefore, A is a 2-dimensional array, or 3x2 matrix:
A = 0 1
1 2
2 3
So I'd like to find the average of all the values, which in this case would equal 1.5. I imagine I need to create a new 1-dimensional array of all the values, which I could then feed into my averaging function. However, I'm not sure of the easiest way to do this when the array is highly-dimensional (e.g. 5x3x6x9).
Thanks!
EDIT
Thanks everyone! I've used your advice and flattened the array using code I found in one of the attached links which uses the reduce function. My averaging function is now like this:
function average(x) {
// Flatten multi-dimensional array
while (x[0] instanceof Array) {
x = x.reduce( function(a, b) { return a.concat(b); } );
}
// Calculate average
return x.reduce( function(a, b) { return a + b; } )/x.length;
}
You can use this code to flatten the multi-dimensional array:
function flatten(array){
var flat = [];
for (var i = 0, l = array.length; i < l; i++){
var type = Object.prototype.toString.call(array[i]).split(' ').pop().split(']').shift().toLowerCase();
if (type) { flat = flat.concat(/^(array|collection|arguments|object)$/.test(type) ? flatten(array[i]) : array[i]); }
}
return flat;
}
and then just sum and divide:
var total = 0;
for (var i = 0, l = flattenedArray.length; i<l; i++) {
total += flattenedArray[i];
}
var average = total/flattenedArray.length;
I don't see any particular reason you need to create a new array. Just loop through the ones you have:
var i, j, sub, total, count, avg;
total = count = 0;
for (i = 0; i < A.length; ++i) {
sub = A[i];
count += sub.length;
for (j = 0; j < sub.length; ++j) {
total += sub[j];
}
}
avg = count === 0 ? NaN : total / count;
Note that the above assumes the arrays are not sparse (they aren't in your example).
// The simplest method is to flatten the array
Array.prototype.flatten= function(){
var A= [];
this.forEach(function(itm){
if(!itm || !itm.flatten)A.push(itm);
else{
A= A.concat(itm.flatten());
}
});
return A;
}
// shim for older browsers (without array forEach)
Array.prototype.forEach= [].forEach || function(fun, scope){
var T= this, L= T.length, i= 0;
if(typeof fun== 'function'){
while(i< L){
if(i in T){
fun.call(scope, T[i], i, T);
}
++i;
}
}
return T;
}
var a=[[1,2,3,[1,2,3]],[4,5,6,[7,8,[1,2,3,[1,2]]]],11,[1,[2,[2,4,[5]]]]];
a.flatten().join('\n')
/* returned value: (String)
1
2
3
1
2
3
4
5
6
7
8
1
2
3
1
2
11
1
2
2
4
5
*/
You can flatten your multidimensional-array with this function:
function flatten(arr) {
var acc = [];
var f = function(arr) {
for (var i = 0; i < arr.length; ++i) {
if (arr[i] instanceof Array) {
f(arr[i]);
}
else {
acc.push(arr[i]);
}
}
};
f(arr);
return acc;
}
And then you can calculate the average with your function.