This question already has answers here:
Split array into chunks
(73 answers)
Closed 8 years ago.
I'm trying to split an array into chunks. The chunks should be as many as the function specifies. What I have already is
groupBySize = function(array, groupSize){
if(groupSize === 0){
return;
}
var groups = [];
var i,j,temparray;
for (i=0,j=array.length; i<j; i+=groupSize) {
temparray = array.slice(i,i+groupSize);
groups.push(temparray);
}
return groups;
};
groupByNumberOfGroups = function(array, NumberOfGroups){
var groupSize = Math.floor(array.length/NumberOfGroups);
var groups = this.groupBySize(array, groupSize);
// Let's make sure we get the right amount of groups
while(groups.length > NumberOfGroups){
console.log(groups.length + ">" + NumberOfGroups);
var last = groups[(groups.length-1)];
for(var j = 0; j< last.length; j++){
var temp = j;
while(groups[temp].length > groups[temp+1]){
temp++;
}
groups[j].push(last[j]);
}
groups.pop();
}
return groups;
};
This successfully splits the array up into the correct amount of chunks. I would like it to then make the length of each chunk as uniform as possible so if I were to split up an array like [1,2,3,4,5,6] into 4 chunks i would get [[1,2],[3,4],[5],[6]].
Any suggestions?
Another example of shortcomings: splitting up [1,2,3,4,5,6,7,8,9,10,11,12,13,14] into 8 chunks gives [[1,2,3,4,5,6,7],[8],[9],[10],[11],[12],[13],[14]]
Simple maths. If You have n items and want to split them into k chunks, every chunk will have n / k items. But what if n / k isn't an integer? Then some of the chunks will have n / k rounded down items and some will have n / k rounded down + 1 items. How many of them will have the + 1 items? n % k.
function distribute(a, k) {
var count = Math.floor(a.length / k);
var remain = a.length % k;
var result = [];
var index = 0;
for (var i = 0; i < k; i++) {
var number = count + (i < remain ? 1 : 0);
result.push(a.slice(index, index + number));
index += number;
}
return result;
}
distribute([1, 2, 3, 4, 5, 6, 7], 2);
--> [[1, 2, 3, 4], [5, 6, 7]]
Edit by Nordfjord: Shorter, but groups different values than the solution above:
groupByNumberOfGroups = function(array, numberOfGroups){
var groups = []
for(var i = 0; i < numberOfGroups; ++i) groups.push([]);
for(var i = 0; i < array.length; ++i) groups[i%numberOfGroups].push(array[i]);
return groups;
}
Try this,
var groupByNumberOfGroups = function( array, NumberOfGroups ){
var div = Math.floor( array.length / NumberOfGroups );
var mod = array.length % NumberOfGroups;
var result = [];
var lowerBound=0, upperBound=0;
for ( var k=0; k<NumberOfGroups; k++ ) {
lowerBound = ( upperBound === 0 ) ? ( ( k < mod ) ? k * ( div + 1 ) : k * ( div ) ) : upperBound
upperBound = ( k < mod ) ? ( lowerBound + div + 1 ) : ( lowerBound + div )
result[ k ] = array.slice( lowerBound, upperBound );
}
return result;
};
groupByNumberOfGroups([1,2,3,4,5,6], 4); // [[1, 2], [3, 4], [5], [6]]
groupByNumberOfGroups([1,2,3,4,5,6,7,8,9,10,11,12,13,14], 8); // [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10], [11, 12], [13], [14]]
js fiddle : link
var a = [1,3,6,10,-1];
function combinations(array, n) {
}
combinations(a, 9) // should return...
[[1], [3], [6], [-1], [1,3], [1,6], [1,-1], [3,6], [3,-1], [6, -1], [10, -1], [1,3,-1], [3,6,-1], [1,6,-1], [1,3,6,-1]]
maybe i'm missing some correct answers but you get the idea. Really dying to know how to solve this!
I would say the problem here is to take the power set of an array, and filter it down to only the elements whose sum is greater than a certain number.
The power set of a set is the set of all subsets of that set. (Say that five times fast and you'll be a mathematician)
For example, the power set of [1] is [[], [1]] and the power set of [1, 2] is [[], [1], [2], [1, 2]].
First I would define a powerSet function like this:
var powerSet = function (arr) {
// the power set of [] is [[]]
if(arr.length === 0) {
return [[]];
}
// remove and remember the last element of the array
var lastElement = arr.pop();
// take the powerset of the rest of the array
var restPowerset = powerSet(arr);
// for each set in the power set of arr minus its last element,
// include that set in the powerset of arr both with and without
// the last element of arr
var powerset = [];
for(var i = 0; i < restPowerset.length; i++) {
var set = restPowerset[i];
// without last element
powerset.push(set);
// with last element
set = set.slice(); // create a new array that's a copy of set
set.push(lastElement);
powerset.push(set);
}
return powerset;
};
Then I would define a function that takes the power set of the array and only includes elements whose sum is less than or equal to some amount:
var subsetsLessThan = function (arr, number) {
// all subsets of arr
var powerset = powerSet(arr);
// subsets summing less than or equal to number
var subsets = [];
for(var i = 0; i < powerset.length; i++) {
var subset = powerset[i];
var sum = 0;
for(var j = 0; j < subset.length; j++) {
sum += subset[j];
}
if(sum <= number) {
subsets.push(subset);
}
}
return subsets;
};
This might not be fast on large arrays, but it works well for small ones.
It looks like it gives the right answer for console.log(subsetsLessThan([1,3,6,10,-1], 9));
edit: a little more about the power set function as implemented here
The only subset of [] is [], so the power set of [] is a set containing only []. That would be [[]].
The initial if statement in the powerSet function immediately returns [[]] if you pass in [].
var powerSet = function (arr) {
if(arr.length === 0) {
return [[]];
}
If you pass in a set with at least one element, the powerSet function begins by removing the last element. For example, if you call powerSet on [1, 2], the variable lastElement will be set to 2 and arr will be set to [1].
var lastElement = arr.pop();
Then the powerSet function recursively calls itself to get the power set of the "rest" of the list. If you had passed in [1, 2], then restPowerset is assigned to powerSet([1]) which is [[], [1]].
var restPowerset = powerSet(arr);
We define a variable that's going to hold the power set of what was passed in, here [1, 2]
var powerset = [];
We loop through every set in restPowerset.
for(var i = 0; i < restPowerset.length; i++) {
var set = restPowerset[i];
Any subset of [1] is also a subset of [1, 2] so we add it to the list. That is, [] and [1] are both subsets of [1, 2].
powerset.push(set);
If you add the element 2 to any subset of [1], that is also a subset of [1, 2], so we add it to the list. Both [2] and [1, 2] are subsets of [1, 2].
set = set.slice(); // copy the array
set.push(lastElement); // add the element
powerset.push(set);
That's all. At this point, the variable powerset is [[], [2], [1], [1, 2]]. Return it!
}
return powerset;
};
Brute force O(N*2N) solution, where N = a.length < 31.
This uses the index i as a bit field to filter the elements of a in each iteration into a sublist.
var a = [1,3,6,10,-1];
function combinations(array, n) {
var lists = [], M = 1<<array.length;
for( var i = 1 ; i < M ; ++i ) {
var sublist = array.filter(function(c,k){return i>>k & 1});
if( sublist.reduce(function(p,c){return p+c},0) <= n )
lists.push(sublist);
}
return lists;
}
console.log(JSON.stringify(combinations(a,9)));
[[1],[3],[1,3],[6],[1,6],[3,6],[-1],[1,-1],[3,-1],[1,3,-1],[6,-1],[1,6,-1],[3,6,-1],[1,3,6,-1],[10,-1]]
Similar to Matt's answer, but uses Array.filter() and Array.reduce() to pack a punch. The variable, mask is incremented from 1 to 32-1 in this example (because array length is 5 and count = 1 << 5, which is 32). The array is filtered for each mask increment, producing a new array or permutation where only certain values are included.
A value is included in the permutation if the mask shifted right by the value's index is odd. Think binary here, because either a value is supposed to be in the permutation or it isn't (0 or 1) and since the mask will go through all possible numbers, all of the possible permutations are covered directly in the number when expressed as binary:
index: 4,3,2,1,0
mask: 0 0 0 0 1 (grab index 0, [1])
mask: 0 0 0 1 0 (grab index 1, [3])
mask: 0 0 0 1 1 (grab index 0 and 1, [1,3])
mask: 1 1 0 0 0 (grab index 3 and 4, [10,-1])
var a = [1,3,6,10,-1];
function combinations(array, n) {
var mask, len = array.length, count = 1 << len, permutations = [];
var indexVisible = function(v, i) { return ((mask >> i) & 1) == 1 }
var sum = function(a, b) { return a + b }
for (mask = 1; mask < count; ++mask) {
permutations.push(array.filter(indexVisible))
}
return permutations.filter(function(p) { return p.reduce(sum) <= n })
}
console.log(JSON.stringify(combinations(a, 9)));
The function, indexVisible() is used to filter the original array and return a permutation that matches the mask.
The function, sum() is used to reduce each permutation to the sum of its values, and if that sum is less than or equal to n then it is included in the final result and returned from combinations()
Here are the permutations:
[[1],[3],[1,3],[6],[1,6],[3,6],[1,3,6],[10],[1,10],[3,10],[1,3,10],[6,10],[1,6,10],[3,6,10],[1,3,6,10],[-1],[1,-1],[3,-1],[1,3,-1],[6,-1],[1,6,-1],[3,6,-1],[1,3,6,-1],[10,-1],[1,10,-1],[3,10,-1],[1,3,10,-1],[6,10,-1],[1,6,10,-1],[3,6,10,-1],[1,3,6,10,-1]]
Here are the results:
[[1],[3],[1,3],[6],[1,6],[3,6],[-1],[1,-1],[3,-1],[1,3,-1],[6,-1],[1,6,-1],[3,6,-1],[1,3,6,-1],[10,-1]]
You can see how all of this works and play with different combinations in this JSFiddle.
The following code will give you all sub-arrays summing up to 9 or less..
function getSubArrays(arr,n){
var len = arr.length,
subs = Array(Math.pow(2,len)).fill();
return subs.map((_,i) => { var j = -1,
k = i,
res = [];
while (++j < len ) {
k & 1 && res.push(arr[j]);
k = k >> 1;
}
return res;
}).slice(1)
.filter(a => a.reduce((p,c) => p+c) <= n);
}
var arr = [1,3,6,10,-1],
result = getSubArrays(arr,9);
console.log(JSON.stringify(result));
edit: giving credit where due.. borrowed the bulk of this logic from this answer
var combinations = function(a,m) {
var gc = function(a) {
var fn = function(n, src, got, all) {
if (n == 0) {
if (got.length > 0) {
all[all.length] = got;
}
return;
}
for (var j = 0; j < src.length; j++) {
fn(n - 1, src.slice(j + 1), got.concat([src[j]]), all);
}
return;
}
var all = [];
for (var i = 0; i < a.length; i++) {
fn(i, a, [], all);
}
all.push(a);
return all;
}
var c = gc(a);
return c.filter(function(e) {
var n = e.length;
var sum = 0;
while(n--)
sum += parseFloat(e[n]) || 0;
return sum<=m;
},m);
}
var a = [1,3,6,10,-1];
combinations(a,9);
output
[[1], [3], [6], [-1], [1, 3], [1, 6], [1, -1], [3, 6], [3, -1], [6, -1], [10, -1], [1, 3, -1], [1, 6, -1], [3, 6, -1], [1, 3, 6, -1]]
It looked like to much fun not to play, here's what I have.
Javascript
function kCombs(set, k) {
var setLength = set.length,
combs = [],
i = 0,
tailLength,
head,
tail,
j,
t,
u;
if (k > 0 && k <= setLength) {
if (k === setLength) {
combs.push(set);
} else if (k === 1) {
while (i < setLength) {
combs.push([set[i]]);
i += 1;
}
} else {
u = k - 1;
setLength = setLength - k + 1;
while (i < setLength) {
t = i + 1;
head = set.slice(i, t);
tail = kCombs(set.slice(t), u);
j = 0;
tailLength = tail.length;
while (j < tailLength) {
combs.push(head.concat(tail[j]));
j += 1;
}
i = t;
}
}
}
return combs;
}
function combinations(array, n) {
var arrayLength = array.length,
combs = [],
combsLength,
results = [],
temp = 0,
current,
currentLength,
i,
j,
k = 1;
while (k <= arrayLength) {
i = 0;
current = kCombs(array, k);
currentLength = current.length;
while (i < currentLength) {
combs.push(current[i]);
i += 1;
}
k += 1;
}
i = 0;
combsLength = combs.length;
while (i < combsLength) {
j = 0;
current = combs[i];
currentLength = current.length;
while (j < currentLength) {
temp += current[j];
j += 1;
}
if (temp <= n) {
results.push(current);
}
temp = 0;
i += 1;
}
return results;
}
var a = [1, 3, 6, 10, -1];
console.log(JSON.stringify(combinations(a, 9)));
Output
[[1],[3],[6],[-1],[1,3],[1,6],[1,-1],[3,6],[3,-1],[6,-1],[10,-1],[1,3,-1],[1,6,-1],[3,6,-1],[1,3,6,-1]]
On jsFiddle
And a jsPerf of all these, although #jcarpenter solutions gives an ambiguity.
On a modern browser you could squeeze more out of this solution using for intead of while as they are highly optimised for for. And assign by index rather than push would also give you a performance boost.
It would be nice to extend the performance tests to include some more test sets, maybe if I get bored.
Brevity is very cryptic here. How about some descriptive functions?
The approach uses binary to create maps of all the possible combinations. Then the map is used to pluck items from the array. The plucked items are summed, and that's about it.
The result of combinations([1, 3, 6, 10, -1], 9) produced is: [[-1],[10,-1],[6],[6,-1],[3],[3,-1],[3,6],[3,6,-1],[1],[1,-1],[1,6],[1,6,-1],[1,3],[1,3,-1],[1,3,6,-1]].
Here is a Fiddle.
/**
* Get an array of all the possible combinations
* of x items. Combinations are represented as binary.
* #param {Number} x - example 2
* #return {String[]} - example ['00', '01', '10', '11']
*/
function getCombinationsOfXItems(x) {
var allOn = '',
numCombos = 0,
i = 0,
combos = [];
// find upper limit
while (allOn.length < x) {
allOn += 1;
}
// number of possible combinations
numCombos = parseInt(allOn, 2) + 1;
// generate the combos
while(i < numCombos) {
combos.push(pad(toBase2(i++), allOn.length));
}
return combos;
}
/**
* Pad a string with leading zeros.
* #param {String} x - example '100'
* #param {Number} length - example 6
* #return {String} - example '000100'
*/
function pad(x, length) {
while (x.length < length) {
x = 0 + x;
}
return x;
}
/**
* Get a number as a binary string.
* #param {Number} x - example 3
* #return {String} - example '11'
*/
function toBase2(x) {
return x.toString(2);
}
/**
* Given an array and a map of its items as a binary string,
* return the items identified by 1.
* #param {Array} arr - example [1,2,3]
* #param {String} binary - example '101'
* #return {Array} - example [1,3]
*/
function pluckFromArrayByBinary(arr, binary) {
var plucked = [],
i = 0,
max = binary.length;
for (; i < max; i++) {
if (binary[i] === '1') {
plucked.push(arr[i]);
}
}
return plucked;
}
/**
* Given an array, return a multi-dimensional
* array of all the combinations of its items.
* #param {Array} - example [1, 2];
* #return {Array[]} - [ [1], [1, 2], [2] ]
*/
function getCombosOfArrayItems(arr) {
var comboMaps = getCombinationsOfXItems(arr.length),
combos = [];
// remove the "all off" combo (ex: '00000')
comboMaps.shift();
for (var i = 0; i < comboMaps.length; i++) {
combos.push(pluckFromArrayByBinary(arr, comboMaps[i]));
}
return combos;
}
/**
* Return all possible combinations of numbers in an
* array whose sum is less than or equal to n
* #param {Number[]} arr
* #param {Number} x
* return {Number[]} - stringified for readability
*/
function combinations(arr, x) {
var combos = getCombosOfArrayItems(arr),
i = 0,
max = combos.length,
combo;
for (; i < max; i++) {
if (sumArray(combos[i]) > x) {
combos.splice(i, 1);
i--;
max--;
}
}
return JSON.stringify(combos);
}
/**
* Return the sum of an array of numbers.
* #param {Number[]} arr
* #return {Number}
*/
function sumArray(arr) {
var sum = 0,
i = 0,
max = arr.length;
for (; i < max; i++) {
sum += arr[i];
}
return sum;
}
console.log(combinations([1, 3, 6, 10, -1], 9));
#jcarpenter solution was so nice I just had to rework it for those that love ECMA5. This will not be as fast as the raw power of for, the modern methods have not had the length of time to be so highly optimised (and they do quite a bit more work). But the performance results do show just how good the powerSet algorithm is (and it is a reusable function). I've also filtered out the ambiguity, which slows things slightly.
Javascript
function powerSet(arr) {
var lastElement,
val;
if (!arr.length) {
val = [[]];
} else {
lastElement = arr.pop();
val = powerSet(arr).reduce(function (previous, element) {
previous.push(element);
element = element.slice();
element.push(lastElement);
previous.push(element);
return previous;
}, []);
}
return val;
}
function combinations(array, n) {
return powerSet(array).filter(function (set) {
return set.length && set.reduce(function (previous, element) {
return previous + element;
}, 0) <= n;
});
}
var a = [1, 3, 6, 10, -1];
console.log(JSON.stringify(combinations(a, 9)));
Output
[[-1],[10,-1],[6],[6,-1],[3],[3,-1],[3,6],[3,6,-1],[1],[1,-1],[1,6],[1,6,-1],[1,3],[1,3,-1],[1,3,6,-1]]
On jsFiddle
And added to the jsPerf
Try this:
var a = [1,3,6,10,-1];
function combinations(array, n) {
var arrayCopy = [],
results = [];
// duplicate the array
for (var i in array)
arrayCopy[i] = array[i];
for (var i in array)
for (var j in arrayCopy)
if ((array[i] + arrayCopy[j]) <= n)
results.push([array[i], arrayCopy[j]]);
return results;
}
console.log(combinations(a, 9));
This logged:
[1, 1], [1, 3], [1, 6], [1, -1],
[3, 1], [3, 3], [3, 6], [3, -1],
[6, 1], [6, 3], [6, -1],
[10, -1],
[-1, 1], [-1, 3], [-1, 6], [-1, 10], [-1, -1]
Suppose we have an array of variable length, and I want to process it by chunks that are of a maximum length of 100, and do it in the minimum number of chunks. So for an array of length 241, it would be 3 sub arrays of sizes 41, 100, 100 (or 100, 100, 41).
curr_len = arr.length;
offset = curr_len%100;
doSomethingWithSubArray(arr.slice(offset))
for(j = offset; j <= curr_len; j = j+100){
doSomethingWithSubArray(arr.slice(j,j+100))
}
I'm sure there are more elegant ways of doing this, possibly without the special case before the for loop. Any ideas?
I'd expect the last chunk to be of smaller size. The code then would be:
for (var i=0; i<arr.length; i+=100)
doSomethingWithSubArray(arr.slice(i, 100));
This is exactly what my splitBy function does:
Array.prototype.splitBy = function(n) {
/* get: number of items per array
return: array of n-sized arrays with the items (last array may contain less then n) */
for (var r=[], i=0; i<this.length; i+=n)
r.push(this.slice(i, i+n));
return r;
}
Then write only:
arr.splitBy(100).forEach(doSomethingWithSubArray);
use chunk function~
function chunk(a, s){
for(var x, i = 0, c = -1, l = a.length, n = []; i < l; i++)
(x = i % s) ? n[c][x] = a[i] : n[++c] = [a[i]];
return n;
}
console.log(chunk([1,2,3,4,5,6,7,8,9,10], 3));
it's functional style recursive solutions.
no var, no loop, no count, because it's more cleary
var chunk = function(arr, n){
if (arr.length == 0) return [];
var head = arr.slice(0, n), rest = arr.slice(n);
return [head].concat( chunk(rest, n) );
};
console.log(chunk([1,2,3,4,5,6,7,8,9,10], 3));
Not really, using reduce looks like this:
var array = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11];
var splitArrays = array.reduce(function(arr, cur, i) {
if (i % 3 === 0) arr.push([]);
arr[i / 3 | 0].push(cur);
return arr;
}, []);
//splitArrays looks like:
//[[1,2,3],[4,5,6],[7,8,9],[10,11]]
More generic function
function splitArray(array, num) {
return array.reduce(function(arr, cur, i) {
if (i % num === 0) arr.push([]);
arr[i / num | 0].push(cur);
return arr;
}, []);
}
Make your doSomethingWithSubArray function accept a starting index and return a next unprocessed index or null if there's no more work. Put this "iterator" in a while loop. Do rest of work that you want to do between chunks (update UI?) right after calling this "iterator" in a while condition.