I have an array:
var data = [0,1,2,3,4,5];
that I would like to splice into [0,1,2] and [3,4,5] followed by averaging it so the final result would be:
var dataOptimised = [1,4];
this is what I have found so far:
function chunk (arr, len) {
var chunks = [];
var i = 0;
var n = arr.length;
while (i < n) {
chunks.push(arr.slice(i, i += len)); // gives [[0,1,2] [3,4,5]]
}
return chunks;
};
How to reduce it?
Thanks
Sum each chunk using Array.reduce() and divide by the chunk's length.
var data = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30];
function chunkAverage(arr, len) {
var chunks = [];
var i = 0;
var n = arr.length;
var chunk;
while (i < n) {
chunk = arr.slice(i, i += len);
chunks.push(
chunk.reduce((s, n) => s + n) / chunk.length
);
}
return chunks;
};
console.log(chunkAverage(data, 3));
You can map the array and reduce it.
function chunk(arr, len) {
var chunks = [];
var i = 0;
var n = arr.length;
while (i < n) {
chunks.push(arr.slice(i, i += len)); // gives [[0,1,2] [3,4,5]]
}
return chunks;
};
var data = [0, 1, 2, 3, 4, 5];
var result = chunk(data, 3).map(o => (o.reduce((c, v) => c + v, 0)) / o.length);
console.log(result);
Split the array in half using splice. And use .reduce to take sum and average finally
var arrR = [0, 1, 2, 3, 4, 5],
arrL = arrR.splice(0, Math.ceil(arrR.length / 2)),
results = [getAverave(arrL), getAverave(arrR)];
console.log(results)
function getAverave(arr) {
return arr.reduce(function(a, b) {
return a + b;
}) / arr.length;
}
Here is the sortest answer possible for this question.
n is the index you want to slice from.
function chunk (arr, n) {
return [Math.sum.apply(null, arr.slice(0, n)), Math.sum.apply(null, arr.slice(n, arr.length))];
};
If you don't mind using underscore.js, you can use the _.chunk() function to easily chunk your array, then map() each chunk to a reduce() function which averages each chunk.
When importing the underscore.js library, you can reference the library using the _ symbol.
const arr = [0, 1, 2, 3, 4, 5];
const len = 3;
const result = _.chunk(arr, len).map(chunk => chunk.reduce((a, b) => a + b, 0) / chunk.length);
console.log(result); // Outputs [1, 4]
If you have an odd-length array; say that arr = [0, 1, 2, 3, 4, 5, 6], then result would be [1, 4, 6].
In HTML, you can include the library in a <script> tag:
<script src="http://underscorejs.org/underscore.js"></script>
Here's a working jsfiddle where you can see it in action (you'll have to open the F12 tools to see console output; the StackOverflow embedded snippets are kind of funky and don't work right).
Agreeing with both Ori Drori and Eddie, but I thought I might also provide some additional minor changes to your chunk function for consistency and maintainability's sake...
When writing JavaScript, I would recommend using function names that won't collide with common/expected variable names. For example, with a function like chunk() that returns a "chunk", it's likely you would want to create a variable called chunk to hold its return value. A line of code like var chunk = chunk() is an obvious problem, but if it gets any less direct it can easily wreak havoc down the line. Using the const var = function pattern (see snippet) helps you avoid writing over the original function by throwing an error on the correct line, but I would argue it's also still good to get in the habit of using a naming convention that doesn't have this drawback just in case you can't use something like const. My approach is to always include a verb in the function name. In your case, "chunk" can also be considered a verb, but it conflicts. So, I prefixed it with "get".
const getChunk = (arr, len) => {
const chunks = []
const n = arr.length
let i = 0
while (i < n) {
chunks.push(arr.slice(i, i += len))
}
return chunks
}
const data = [0,1,2,3,4,5]
const optimizedData =
getChunk(data, 3).map(chunk =>
chunk.reduce((total, val) => total + val) / chunk.length
)
console.log(optimizedData)
Related
I have made a basic implementation of Selection sort, using Math.min() of javascript. Can anyone point out ways in which one can make this more efficient or elegant? Something that I could have avoided doing, etc? Thanks everyone, the code is below:
let arr = [2, 0, 5, 1, 3, 2, 6, 4, 9, 0, 10, 2, 14, 8];
function selectionSort(array) {
let workingArr = [...array]; //don't want to modify original array
let sortedArr = []; //this will be returned as result
for (let i = 0; i < array.length; i++) {
let sliced = workingArr.slice(0);
let min = Math.min(...sliced); //minimum of the slice
sortedArr[i] = min;
let index = workingArr.indexOf(min);
workingArr.splice(index, 1);
}
return sortedArr;
}
let x = selectionSort(arr);
console.log(x);
document.body.innerHTML = x;
I am not sure about the definition of selection sort being used here but here you have two versions of your code where: 1) you remove unnecessary copies of arrays (space inefficient) and 2) you have a more elegant solution.
Your original solution optimised
function selectionSort(array) {
const localArr = [...array];
const res = [];
for (let i = 0; i < localArr.length; i++) {
const min = Math.min(...localArr);
localArr.splice(localArr.indexOf(min), 1);
i--;
res.push(min);
}
return res;
}
Use Array.prototype.reduce
function selectionSort(array) {
const localArr = [...array];
return array.reduce((acc) => {
const min = Math.min(...localArr);
localArr.splice(localArr.indexOf(min), 1);
return acc.concat(min);
}, []);
}
Note: in your original version of the function you seemed to care about immutability. Then in the body of the function you use Array.prototype.splice and Array.prototype.push which both contravene the FP principle of immutability. I am not using a pure FP approach here just for brevity but you should look into other arrays methods that are more 'reliable' so to speak.
It seems nobody found anything here. But I finally found something that could have been avoided in original code. I figured out that there is no need to make slices of the array named workingArr in code above (in the question). Here is the modified code which is simpler.
let arr = [2, 0, 5, 1, 3, 2, 6, 4, 9, 0, 10, 2, 14, 8];
function selectionSort(array) {
let workingArr = [...array]; //don't want to modify original array
let sortedArr = []; //this will be returned as result
for (let i = 0; i < array.length; i++) {
//run upto full length of original array
let min = Math.min(...workingArr); //minimum of the slice
sortedArr[i] = min; //minimum found inserted into sortedArr
let index = workingArr.indexOf(min); //find inserted ele's position in original input array's copy, so that we can use it to removed ele from that same array (otherwise in next pass that element will still come out as min)
workingArr.splice(index, 1);
}
return sortedArr; //return resulting array
}
let x = selectionSort(arr);
console.log(x);
console.log(x.reverse()); //for descending sort
I was trying to write a algorithm in javascript that returns all the possible 3 digit numbers numbers from a given array of length 6
For Example
var arr = [1, 2, 3, 4, 5, 6];
I have already got the combinations with the same sets of numbers in different positions in the 2D array.
(The code which I took the help of)
If I have the same numbers in different combinations then I would like to remove them form the array. like I have [1, 2, 3] at index i in the array comtaining all the possible combinations then I would like to remove other combination with the same numbers like [2, 1, 3], [1, 3, 2] and so on..
Note the array also contains numbers repeated like [3, 3, 3], [2, 2, 2], [3, 2, 3] and so on
I expect an 2d array which has the values : [[1,2,3],[1,2,4],[1,2,5],[1,2,6],[1,3,4]] and so on (24 possibilities)
Is there any way to do this?
Extending the answer you linked, just filter out the results with the help of a Set.
Sort an individual result, convert them into a String using join(), check if it's present in set or not, and if not, then store them in the final result.
function cartesian_product(xs, ys) {
var result = [];
for (var i = 0; i < xs.length; i++) {
for (var j = 0; j < ys.length; j++) {
// transform [ [1, 2], 3 ] => [ 1, 2, 3 ] and append it to result []
result.push([].concat.apply([], [xs[i], ys[j]]));
}
}
return result;
}
function cartesian_power(xs, n) {
var result = xs;
for (var i = 1; i < n; i++) {
result = cartesian_product(result, xs)
}
return result;
}
function unique_cartesian_power(xs, n) {
var result = cartesian_power(xs, n);
var unique_result = [];
const set = new Set();
result.forEach(function(value) {
var representation = value.sort().join(' ');
if (!set.has(representation)) {
set.add(representation);
unique_result.push(value);
}
});
return unique_result;
}
console.log(unique_cartesian_power([1, 2, 3, 4, 5, 6], 3));
const arr = [1, 2, 3, 4, 5, 6];
const result = arr.reduce((a, v) => arr.reduce((a, v2) => {
arr.reduce((a, v3) => {
const current = [v, v2, v3].sort().join(",");
!a.find(_ => _.sort().join() === current) && a.push([v, v2, v3]);
return a;
}, a);
return a;
}, a), []);
console.log(result.length);
console.log(...result.map(JSON.stringify));
You could take an iterative and recursive approach by sorting the index and a temporary array for the collected values.
Because of the nature of going upwards with the index, no duplicate set is created.
function getCombination(array, length) {
function iter(index, right) {
if (right.length === length) return result.push(right);
if (index === array.length) return;
for (let i = index, l = array.length - length + right.length + 1; i < l; i++) {
iter(i + 1, [...right, array[i]]);
}
}
var result = [];
iter(0, []);
return result;
}
var array = [1, 2, 3, 4, 5, 6],
result = getCombination(array, 3);
console.log(result.length);
result.forEach(a => console.log(...a));
.as-console-wrapper { max-height: 100% !important; top: 0; }
This is a good example, that it is usually worthwhile not asking for a specific answer for a generic problem shown with a specific question; however as you've requested - if you really have the above constraints which kind of don't make much sense to me, you could do it like that:
function combine(firstDigits, secondDigits, thirdDigits) {
let result = [];
firstDigits.forEach(firstDigit => {
// combine with all secondDigitPermutations
secondDigits.forEach(secondDigit => {
// combine with all thirdDigitPermutations
thirdDigits.forEach(thirdDigit => {
result.push([firstDigit, secondDigit, thirdDigit])
})
})
});
// now we have all permutations and simply need to filter them
// [1,2,3] is the same as [2,3,1]; so we need to sort them
// and check them for equality (by using a hash) and memoize them
// [1,2,3] => '123'
function hashCombination(combination) {
return combination.join('ಠ_ಠ');
}
return result
// sort individual combinations to make them equal
.map(combination => combination.sort())
.reduce((acc, currentCombination) => {
// transform the currentCombination into a "hash"
let hash = hashCombination(currentCombination);
// and look it up; if it is not there, add it to cache and result
if (!(hash in acc.cache)) {
acc.cache[hash] = true;
acc.result.push(currentCombination);
}
return acc;
}, {result: [], cache: {}})
.result;
}
console.log(combine([1,2,3,4,5,6],[1,2,3,4,5,6],[1,2,3,4,5,6]).length);
console.log(...combine([1,2,3,4,5,6],[1,2,3,4,5,6],[1,2,3,4,5,6]).map(JSON.stringify));
This does not include some super-clever assumptions about some index, but it does abuse the fact, that it's all about numbers. It is deliberately using no recursion, because this would easily explode, if the amount of combinations is going to be bigger and because recursion in itself is not very readable.
For a real world problem™ - you'd employ a somewhat similar strategy though; generating all combinations and then filter them. Doing both at the same time, is an exercise left for the astute reader. For finding combinations, that look different, but are considered to be the same you'd also use some kind of hashing and memoizing.
let arr1 = [1,2,3,4,5,6];
function getCombination(arr){
let arr2 = [];
for(let i=0; i<arr.length; i++){
for(let j=i; j<arr.length; j++){
for(let k=j; k<arr.length; k++){
arr2.push([arr[i],arr[j],arr[k]]);
}
}
}
return arr2;
}
console.log(getCombination(arr1));
I need to get all possible subsets of an array.
Say I have this:
[1, 2, 3]
How do I get this?
[], [1], [2], [3], [1, 2], [2, 3], [1, 3], [1, 2, 3]
I am interested in all subsets. For subsets of specific length, refer to the following questions:
Finding subsets of size n: 1, 2
Finding subsets of size > 1: 1
Here is one more very elegant solution with no loops or recursion, only using the map and reduce array native functions.
const getAllSubsets =
theArray => theArray.reduce(
(subsets, value) => subsets.concat(
subsets.map(set => [value,...set])
),
[[]]
);
console.log(getAllSubsets([1,2,3]));
We can solve this problem for a subset of the input array, starting from offset. Then we recurse back to get a complete solution.
Using a generator function allows us to iterate through subsets with constant memory usage:
// Generate all array subsets:
function* subsets(array, offset = 0) {
while (offset < array.length) {
let first = array[offset++];
for (let subset of subsets(array, offset)) {
subset.push(first);
yield subset;
}
}
yield [];
}
// Example:
for (let subset of subsets([1, 2, 3])) {
console.log(subset);
}
Runtime complexity is proportional to the number of solutions (2ⁿ) times the average length per solution (n/2) = O(n2ⁿ).
Simple solution without recursion:
function getAllSubsets(array) {
const subsets = [[]];
for (const el of array) {
const last = subsets.length-1;
for (let i = 0; i <= last; i++) {
subsets.push( [...subsets[i], el] );
}
}
return subsets;
}
How does it work?
If we have some subsets generated from input numbers and we want to add one more number to our input array, it means that we can take all already existing subsets and generate new ones by appending the new number to each of the existing.
Here is an example for [1, 2, 3]
Start with an empty subset: []
Create new subsets by adding "1" to each existing subset. It will be:[] [1]
Create new subsets by adding "2" to each existing subset. It will be:[], [1] [2], [1, 2]
Create new subsets by adding "3" to each existing subset. It will be: [], [1], [2], [1, 2] [3], [1, 3], [2, 3], [1, 2, 3]
Another simple solution.
function getCombinations(array) {
function fork(i, t) {
if (i === array.length) {
result.push(t);
return;
}
fork(i + 1, t.concat([array[i]]));
fork(i + 1, t);
}
var result = [];
fork(0, []);
return result;
}
var data = [1, 2, 3],
result = getCombinations(data);
console.log(result);
.as-console-wrapper { max-height: 100% !important; top: 0; }
You can easily generate the powerset from an array, using something like the following:
var arr = [1, 2, 3];
function generatePowerSet(array) {
var result = [];
result.push([]);
for (var i = 1; i < (1 << array.length); i++) {
var subset = [];
for (var j = 0; j < array.length; j++)
if (i & (1 << j))
subset.push(array[j]);
result.push(subset);
}
return result;
}
console.log(generatePowerSet(arr));
Throughout the main loop of the function, subsets are created and then pushed into the result array.
I set out to understand what is happening with the examples in this post. While the function generator example, bit-wise operator example, and the example use of the array map and reduce functions are very elegant and impressive, I found it tough to mentally visual what precisely was happening. I have 2 examples below that I believe are easy to visualize both a non-recursive and a recursive solution. I hope this helps others attempting to wrap their heads around the process of finding all subsets.
NON-RECURSIVE:
For each value of the array clone all existing subsets (including the empty set) and add the new value to each of the clones, pushing the clones back to the results.
const PowerSet = array => {
const result = [[]] // Starting with empty set
for (let value of array) { // For each value of the array
const length = result.length // Can't use result.length in loop since
// results length is increased in loop
for (let i = 0; i < length; i++){
let temp = result[i].slice(0) // Make a clone of the value at index i
temp.push(value) // Add current value to clone
result.push(temp) // Add clone back to results array
}
}
return result;
}
console.log(PowerSet([1,2,3]))
RECURSIVELY:
Build the powerset by recursively pushing a combination of the current index value concatenated with an ever increasing prefix array of values.
const powerSetRecursive = (arr, prefix=[], set=[[]]) => {
if(arr.length === 0) return// Base case, end recursion
for (let i = 0; i < arr.length; i++) {
set.push(prefix.concat(arr[i]))// If a prefix comes through, concatenate value
powerSetRecursive(arr.slice(i + 1), prefix.concat(arr[i]), set)
// Call function recursively removing values at or before i and adding
// value at i to prefix
}
return set
}
console.log(powerSetRecursive([1,2,3]))
function subSets(num){
/*
example given number : [1,3]
[]
1: copy push 1
[] [1]
3: copy push 3
[] [1] [3] [1,3]
*/
let result = [];
result.push([]);
for(let i=0; i < num.length;i++){
let currentNum = num[i];
let len = result.length;
for(let j=0; j < len; j++){
let cloneData = result[j].slice();
cloneData.push(currentNum);
result.push(cloneData)
}
}
return result;
}
let test = [1,3];
console.log(subSets(test))//[ [], [ 1 ], [ 3 ], [ 1, 3 ] ]
let subsets = (n) => {
let result = [];
result.push([]);
n.forEach(a => {
//array length
let length = result.length;
let i =0;
while(i < length){
let temp = result[i].slice(0);
temp.push(a);
result.push(temp);
i++;
}
})
return result;
}
Using flatMap and rest/spread, this can be fairly elegant:
const subsets = ([x, ...xs]) =>
x == undefined
? [[]]
: subsets (xs) .flatMap (ss => [ss, [x, ...ss]])
console .log (subsets ([1, 2, 3]))
.as-console-wrapper {max-height: 100% !important; top: 0}
This version does not return them in the requested order. Doing that seems slightly less elegant, and there's probably a better version:
const subset = (xs = []) => {
if (xs.length == 0) {return [[]]}
const ys = subset (xs .slice (0, -1))
const x = xs .slice (-1) [0]
return [... ys, ... ys .map (y => [... y, x])]
}
Or, the same algorithm in a different style,
const subsets = (
xs = [],
x = xs .slice (-1) [0],
ys = xs.length && subsets (xs .slice (0, -1))
) =>
xs .length == 0
? [[]]
: [... ys, ... ys .map (y => [... y, x])]
A shorter version of #koorchik's answer.
var getAllSubsets = (nums) => {
const subsets = [[]];
for (n of nums) {
subsets.map((el) => {
subsets.push([...el, n]);
});
}
return subsets;
};
console.log(getAllSubsets([1, 2, 3]));
// [[],[1],[2],[1,2],[3],[1,3],[2,3],[1,2,3]]
For loop:
function powerSet(numbers) {
const subsets = [[]]
for (const number of numbers) {
subsets.forEach(subset => subsets.push([...subset, number]))
}
return subsets
}
Recursion:
function powerSet(numbers) {
const subsets = [[]]
if (numbers.length === 0) return subsets
for (let i = 0; i < numbers.length; i++) {
subsets.push(...powerSet(numbers.slice(i + 1)).map(subset => [numbers[i], ...subset]))
// Or step by step:
// const number = numbers[i]
// const otherNumbers = numbers.slice(i + 1)
// const otherNumbersSubsets = powerSet(otherNumbers)
// const otherNumbersSubsetsWithNumber = otherNumbersSubsets.map(subset => [number, ...subset])
// subsets.push(...otherNumbersSubsetsWithNumber)
}
return subsets
}
Using reduceRight:
const subsets = array =>
array.reduceRight(
(accumulator, a) => [...accumulator, ...accumulator.map(b => [a, ...b])],
[[]]
);
console.log(subsets([1, 2, 3])); // [[], [3], [2], [2, 3], [1], [1, 3], [1, 2], [1, 2, 3]]
This one is with recursion
var subsets = function(s){
if(s.length === 0) {
return [[]]
}
var h,t,ss_excl_h;
var ss_incl_h = [];
[h,...t] = s;
ss_excl_h = subsets(t)
for(ss of ss_excl_h) {
let hArr = [];
hArr.push(h);
let temp = hArr.concat(ss)
ss_incl_h.push(temp);
}
return ss_incl_h.concat(ss_excl_h)
}
console.log(subsets([1,2,3])) // returns distinct subsets
Update ES2020
With ES2020 BigInts have become available.
Bigints don’t have a fixed storage size in bits; their sizes adapt to the integers they represent.
- Dr. Axel Rauschmayer; JavaScript for impatient programmers - Chapter 18.2 BigInts
See source.
Using BitInts we can use a binary counter to calculate the power set and are no longer limited by the maximum integer size.
Using a generator we can additionally loop over a power set with constant memory requirement which is important if you want to generate a huge power set.
Here an example using you original array [1, 2, 3].
/**
* Generate power set from a given array
* #param {Array<any>} array array to create power set from
*/
function* powerSet(array){
// use BigInt to be no longer limited by maximum length of 53-bits
const size = 2n ** BigInt(array.length);
for (let i = 0; i < size; i++) {
const cur = [];
for(let j = 0; j < array.length; j++){
// check if i-th bit is set to 1
if((i & (1 << j)) > 0){
// push array value (represented by that 1-bit) to result
cur.push(array[j]);
}
}
// generate next result
yield cur;
}
}
// generate power set for [1, 2, 3] and print results
console.log([...powerSet([1, 2, 3])]);
.as-console-wrapper { max-height: 100% !important; top: 0; }
Here how you could loop over a very large power set with constant memory and no upper bound (theoretically, there will be an upper bound in terms of compute time) for the array length.
/**
* Generate power set from a given array
* #param {Array<any>} array array to create power set from
*/
function* powerSet(array){
// use BigInt to no longer limited by maximum length of 53-bits
const size = 2n ** BigInt(array.length);
for (let i = 0; i < size; i++) {
const cur = [];
for(let j = 0; j < array.length; j++){
// check if i-th bit is set to 1
if((i & (1 << j)) > 0){
// push array value (represented by that 1-bit) to result
cur.push(array[j]);
}
}
// generate next result
yield cur;
}
}
/**
* Helper function to generate an array containing more than 53 elements
* #param {number} start
* #param {number} end
*/
function* range(start, end){
for (let i = start; i < end; i++) {
yield i;
}
}
// create an array containing elments 1 through 60 ([1, 2, 3, ..., 60])
const oneToSixty = [...range(1, 61)];
let i = 0;
const max = 1000;
// loop over whole powerSet with constant memory requirement
// abort after 1000 subsets, otherwise this will take a very long time to complete
for(const subset of powerSet(oneToSixty)){
console.log(subset);
if(i++ === max) break;
}
.as-console-wrapper { max-height: 100% !important; top: 0; }
I need to get all possible subsets of an array.
Say I have this:
[1, 2, 3]
How do I get this?
[], [1], [2], [3], [1, 2], [2, 3], [1, 3], [1, 2, 3]
I am interested in all subsets. For subsets of specific length, refer to the following questions:
Finding subsets of size n: 1, 2
Finding subsets of size > 1: 1
Here is one more very elegant solution with no loops or recursion, only using the map and reduce array native functions.
const getAllSubsets =
theArray => theArray.reduce(
(subsets, value) => subsets.concat(
subsets.map(set => [value,...set])
),
[[]]
);
console.log(getAllSubsets([1,2,3]));
We can solve this problem for a subset of the input array, starting from offset. Then we recurse back to get a complete solution.
Using a generator function allows us to iterate through subsets with constant memory usage:
// Generate all array subsets:
function* subsets(array, offset = 0) {
while (offset < array.length) {
let first = array[offset++];
for (let subset of subsets(array, offset)) {
subset.push(first);
yield subset;
}
}
yield [];
}
// Example:
for (let subset of subsets([1, 2, 3])) {
console.log(subset);
}
Runtime complexity is proportional to the number of solutions (2ⁿ) times the average length per solution (n/2) = O(n2ⁿ).
Simple solution without recursion:
function getAllSubsets(array) {
const subsets = [[]];
for (const el of array) {
const last = subsets.length-1;
for (let i = 0; i <= last; i++) {
subsets.push( [...subsets[i], el] );
}
}
return subsets;
}
How does it work?
If we have some subsets generated from input numbers and we want to add one more number to our input array, it means that we can take all already existing subsets and generate new ones by appending the new number to each of the existing.
Here is an example for [1, 2, 3]
Start with an empty subset: []
Create new subsets by adding "1" to each existing subset. It will be:[] [1]
Create new subsets by adding "2" to each existing subset. It will be:[], [1] [2], [1, 2]
Create new subsets by adding "3" to each existing subset. It will be: [], [1], [2], [1, 2] [3], [1, 3], [2, 3], [1, 2, 3]
Another simple solution.
function getCombinations(array) {
function fork(i, t) {
if (i === array.length) {
result.push(t);
return;
}
fork(i + 1, t.concat([array[i]]));
fork(i + 1, t);
}
var result = [];
fork(0, []);
return result;
}
var data = [1, 2, 3],
result = getCombinations(data);
console.log(result);
.as-console-wrapper { max-height: 100% !important; top: 0; }
You can easily generate the powerset from an array, using something like the following:
var arr = [1, 2, 3];
function generatePowerSet(array) {
var result = [];
result.push([]);
for (var i = 1; i < (1 << array.length); i++) {
var subset = [];
for (var j = 0; j < array.length; j++)
if (i & (1 << j))
subset.push(array[j]);
result.push(subset);
}
return result;
}
console.log(generatePowerSet(arr));
Throughout the main loop of the function, subsets are created and then pushed into the result array.
I set out to understand what is happening with the examples in this post. While the function generator example, bit-wise operator example, and the example use of the array map and reduce functions are very elegant and impressive, I found it tough to mentally visual what precisely was happening. I have 2 examples below that I believe are easy to visualize both a non-recursive and a recursive solution. I hope this helps others attempting to wrap their heads around the process of finding all subsets.
NON-RECURSIVE:
For each value of the array clone all existing subsets (including the empty set) and add the new value to each of the clones, pushing the clones back to the results.
const PowerSet = array => {
const result = [[]] // Starting with empty set
for (let value of array) { // For each value of the array
const length = result.length // Can't use result.length in loop since
// results length is increased in loop
for (let i = 0; i < length; i++){
let temp = result[i].slice(0) // Make a clone of the value at index i
temp.push(value) // Add current value to clone
result.push(temp) // Add clone back to results array
}
}
return result;
}
console.log(PowerSet([1,2,3]))
RECURSIVELY:
Build the powerset by recursively pushing a combination of the current index value concatenated with an ever increasing prefix array of values.
const powerSetRecursive = (arr, prefix=[], set=[[]]) => {
if(arr.length === 0) return// Base case, end recursion
for (let i = 0; i < arr.length; i++) {
set.push(prefix.concat(arr[i]))// If a prefix comes through, concatenate value
powerSetRecursive(arr.slice(i + 1), prefix.concat(arr[i]), set)
// Call function recursively removing values at or before i and adding
// value at i to prefix
}
return set
}
console.log(powerSetRecursive([1,2,3]))
function subSets(num){
/*
example given number : [1,3]
[]
1: copy push 1
[] [1]
3: copy push 3
[] [1] [3] [1,3]
*/
let result = [];
result.push([]);
for(let i=0; i < num.length;i++){
let currentNum = num[i];
let len = result.length;
for(let j=0; j < len; j++){
let cloneData = result[j].slice();
cloneData.push(currentNum);
result.push(cloneData)
}
}
return result;
}
let test = [1,3];
console.log(subSets(test))//[ [], [ 1 ], [ 3 ], [ 1, 3 ] ]
let subsets = (n) => {
let result = [];
result.push([]);
n.forEach(a => {
//array length
let length = result.length;
let i =0;
while(i < length){
let temp = result[i].slice(0);
temp.push(a);
result.push(temp);
i++;
}
})
return result;
}
Using flatMap and rest/spread, this can be fairly elegant:
const subsets = ([x, ...xs]) =>
x == undefined
? [[]]
: subsets (xs) .flatMap (ss => [ss, [x, ...ss]])
console .log (subsets ([1, 2, 3]))
.as-console-wrapper {max-height: 100% !important; top: 0}
This version does not return them in the requested order. Doing that seems slightly less elegant, and there's probably a better version:
const subset = (xs = []) => {
if (xs.length == 0) {return [[]]}
const ys = subset (xs .slice (0, -1))
const x = xs .slice (-1) [0]
return [... ys, ... ys .map (y => [... y, x])]
}
Or, the same algorithm in a different style,
const subsets = (
xs = [],
x = xs .slice (-1) [0],
ys = xs.length && subsets (xs .slice (0, -1))
) =>
xs .length == 0
? [[]]
: [... ys, ... ys .map (y => [... y, x])]
A shorter version of #koorchik's answer.
var getAllSubsets = (nums) => {
const subsets = [[]];
for (n of nums) {
subsets.map((el) => {
subsets.push([...el, n]);
});
}
return subsets;
};
console.log(getAllSubsets([1, 2, 3]));
// [[],[1],[2],[1,2],[3],[1,3],[2,3],[1,2,3]]
For loop:
function powerSet(numbers) {
const subsets = [[]]
for (const number of numbers) {
subsets.forEach(subset => subsets.push([...subset, number]))
}
return subsets
}
Recursion:
function powerSet(numbers) {
const subsets = [[]]
if (numbers.length === 0) return subsets
for (let i = 0; i < numbers.length; i++) {
subsets.push(...powerSet(numbers.slice(i + 1)).map(subset => [numbers[i], ...subset]))
// Or step by step:
// const number = numbers[i]
// const otherNumbers = numbers.slice(i + 1)
// const otherNumbersSubsets = powerSet(otherNumbers)
// const otherNumbersSubsetsWithNumber = otherNumbersSubsets.map(subset => [number, ...subset])
// subsets.push(...otherNumbersSubsetsWithNumber)
}
return subsets
}
Using reduceRight:
const subsets = array =>
array.reduceRight(
(accumulator, a) => [...accumulator, ...accumulator.map(b => [a, ...b])],
[[]]
);
console.log(subsets([1, 2, 3])); // [[], [3], [2], [2, 3], [1], [1, 3], [1, 2], [1, 2, 3]]
This one is with recursion
var subsets = function(s){
if(s.length === 0) {
return [[]]
}
var h,t,ss_excl_h;
var ss_incl_h = [];
[h,...t] = s;
ss_excl_h = subsets(t)
for(ss of ss_excl_h) {
let hArr = [];
hArr.push(h);
let temp = hArr.concat(ss)
ss_incl_h.push(temp);
}
return ss_incl_h.concat(ss_excl_h)
}
console.log(subsets([1,2,3])) // returns distinct subsets
Update ES2020
With ES2020 BigInts have become available.
Bigints don’t have a fixed storage size in bits; their sizes adapt to the integers they represent.
- Dr. Axel Rauschmayer; JavaScript for impatient programmers - Chapter 18.2 BigInts
See source.
Using BitInts we can use a binary counter to calculate the power set and are no longer limited by the maximum integer size.
Using a generator we can additionally loop over a power set with constant memory requirement which is important if you want to generate a huge power set.
Here an example using you original array [1, 2, 3].
/**
* Generate power set from a given array
* #param {Array<any>} array array to create power set from
*/
function* powerSet(array){
// use BigInt to be no longer limited by maximum length of 53-bits
const size = 2n ** BigInt(array.length);
for (let i = 0; i < size; i++) {
const cur = [];
for(let j = 0; j < array.length; j++){
// check if i-th bit is set to 1
if((i & (1 << j)) > 0){
// push array value (represented by that 1-bit) to result
cur.push(array[j]);
}
}
// generate next result
yield cur;
}
}
// generate power set for [1, 2, 3] and print results
console.log([...powerSet([1, 2, 3])]);
.as-console-wrapper { max-height: 100% !important; top: 0; }
Here how you could loop over a very large power set with constant memory and no upper bound (theoretically, there will be an upper bound in terms of compute time) for the array length.
/**
* Generate power set from a given array
* #param {Array<any>} array array to create power set from
*/
function* powerSet(array){
// use BigInt to no longer limited by maximum length of 53-bits
const size = 2n ** BigInt(array.length);
for (let i = 0; i < size; i++) {
const cur = [];
for(let j = 0; j < array.length; j++){
// check if i-th bit is set to 1
if((i & (1 << j)) > 0){
// push array value (represented by that 1-bit) to result
cur.push(array[j]);
}
}
// generate next result
yield cur;
}
}
/**
* Helper function to generate an array containing more than 53 elements
* #param {number} start
* #param {number} end
*/
function* range(start, end){
for (let i = start; i < end; i++) {
yield i;
}
}
// create an array containing elments 1 through 60 ([1, 2, 3, ..., 60])
const oneToSixty = [...range(1, 61)];
let i = 0;
const max = 1000;
// loop over whole powerSet with constant memory requirement
// abort after 1000 subsets, otherwise this will take a very long time to complete
for(const subset of powerSet(oneToSixty)){
console.log(subset);
if(i++ === max) break;
}
.as-console-wrapper { max-height: 100% !important; top: 0; }
Given a rotate function like the one below, which rotates the array a set number of slots.
Is there an equivalent Ramda.js function or composition that will do this rotation?
var test = [1,2,3,4,5,6,7,8,9];
function rotate(arr, count) {
arr = arr.slice();
while (count < 0) {
count += arr.length;
}
count %= arr.length;
if (count) {
arr.splice.apply(arr, [0, 0].concat([].slice.call(arr.splice(arr.length - count, count))));
}
return arr;
}
example
rotate(test, 2) // -> [8, 9, 1, 2, 3, 4, 5, 6, 7]
Here's a point-free one liner which takes the count first and the data second, consistent with ramda's composable style:
const rotate = pipe(splitAt, reverse, flatten);
Of course you can always flip(rotate) to get a data first version.
UPDATE
Sorry, I read too fast and assumed the normal, left-wise direction for the rotation (eg, as it is in ruby). Here's a variation of the idea that does what your original does:
const rotate = pipe(useWith(splitAt, [negate, identity]), reverse, flatten);
This is similar to #donnut's answer, but includes modulo arithmetic to handle counts which exceed the length of the given list:
var rotate2 = function(xs, count) {
var n = -(count % xs.length);
return R.concat(R.slice(n, Infinity, xs),
R.slice(0, n, xs));
};
Here's a mutation-free equivalent which doesn't use Ramda at all:
var rotate3 = function(xs, count) {
var n = -(count % xs.length);
return xs.slice(n).concat(xs.slice(0, n));
};
Both solutions are significantly more declarative than the solution in the original post.
You could try:
function reverse(arr, count) {
return R.concat(R.slice(arr.length-count, arr.length, arr), R.slice(0, arr.length-count, arr));
}
See http://bit.ly/1G90ny8