Find the minimum cost combination algorithm - javascript

Given an array N which contains at least 5 items, I want to find 2 numbers(P and Q) in which 0 < P < Q < N - 1.
Suppose we have the following array:
const N = [1, 9, 4, 5, 8];
if P = 1 , Q = 2 , the cost will be N[P] + N[Q] = N[1] + N[2] = 9 + 4 = 13
if P = 1, Q = 3 , the cost will be N[P] + N[Q] = N[1] + N[3] = 9 + 5 = 14
if P = 2, Q = 3 , the cost will be N[P] + N[Q] = N[2] + N[3] = 4 + 5 = 9
From here the combination which gives the minimum cost is P = 2 and Q = 3.
Here is the solution that I found and I am looking for your help if I can improve its time complexity:
function solution(N) {
// since 0 < P < Q < N - 1
const sliced = N.slice(1, N.length - 1);
const sorted = sliced.sort((a, b) => a - b);
// the minimum should be from the start since we have sorted the array
const P = 0;
const Q = 1;
return getCost(P, Q, sorted);
}
function getCost(P, Q, N) {
return N[P] + N[Q];
}
// output should be 9
console.log(solution([1, 9, 4, 5, 8]))
In a best-case scenario it's 0(n log(n)) because of the sort, but I am wondering if we can improve it to O(n) for example.
Thanks for your help

function twoSmallest(arr) {
let [first, second] = [arr[1], arr[2]]
for (let i = 3; i < arr.length - 1; i++) {
const el = arr[i]
if (el < first && el < second) {
[first, second] = [Math.min(first, second), el]
} else if (el < first) {
[first, second] = [second, el]
} else if (el < second) {
second = el
}
}
return first + second
}
This is an O(n) time and O(1) space solution. It also makes sure that the element with the smaller index is kept in first for the case where you need to use the indices and it is of interest for some reason.
The algorithm is clear, IMO, but the JS code is probably not the best implementation. I haven't written JS for some time.

What do you think of this solution?
function solution([_, ...n]) {
n.pop()
n.sort((a, b) => a - b);
return n[0] + n[1];
}
// output should be 9
console.log(solution([1, 9, 4, 5, 8]))
The logic is the same that you outlined - only using some other approach that JS offers.

I'm pretty sure this is O(n):
const solution = (arr) => {
// find smallest that's not at either end
let idx = 1;
let P = arr[1];
for(let i = 2; i < arr.length-1; i++) {
if(arr[i] < P) {
idx = i;
P = arr[i];
}
}
// find second smallest that's not at either end
let Q = Infinity;
for(let i = 1; i < arr.length-1; i++) {
if(i == idx) continue;
if(arr[i] < Q) Q = arr[i];
}
return P + Q;
}

Here is the fastest way to find k smallest numbers in a list with Python. The rest is trivial
fastest method of getting k smallest numbers in unsorted list of size N in python?

Related

HeapSort implementation in Javascript

I am learning about heaps and I wanted to implement the heap sort algorithm in Javascript using MinHeap.
The issue is that I keep getting a non-sorted array.
I even tried to just translate a working algorithm from C++ to Javascript.
Orginal algorithm link: https://www.geeksforgeeks.org/heap-sort-for-decreasing-order-using-min-heap/
C++:
// To heapify a subtree rooted with node i which is
// an index in arr[]. n is size of heap
void heapify(int arr[], int n, int i)
{
int smallest = i; // Initialize smalles as root
int l = 2 * i + 1; // left = 2*i + 1
int r = 2 * i + 2; // right = 2*i + 2
// If left child is smaller than root
if (l < n && arr[l] < arr[smallest])
smallest = l;
// If right child is smaller than smallest so far
if (r < n && arr[r] < arr[smallest])
smallest = r;
// If smallest is not root
if (smallest != i) {
swap(arr[i], arr[smallest]);
// Recursively heapify the affected sub-tree
heapify(arr, n, smallest);
}
}
// main function to do heap sort
void heapSort(int arr[], int n)
{
// Build heap (rearrange array)
for (int i = n / 2 - 1; i >= 0; i--)
heapify(arr, n, i);
// One by one extract an element from heap
for (int i = n - 1; i >= 0; i--) {
// Move current root to end
swap(arr[0], arr[i]);
// call max heapify on the reduced heap
heapify(arr, i, 0);
}
}
Javascipt (translated code):
function swap(arr, i, j){
const c = arr[i];
arr[i] = arr[j];
arr[j] = c;
}
function heapify(arr, n, i)
{
let smallest = i; // Initialize smalles as root
let l = 2 * i + 1; // left = 2*i + 1
let r = 2 * i + 2; // right = 2*i + 2
// If left child is smaller than root
if (l < n && arr[l] < arr[smallest])
smallest = l;
// If right child is smaller than smallest so far
if (r < n && arr[r] < arr[smallest])
smallest = r;
// If smallest is not root
if (smallest != i) {
swap(arr[i], arr[smallest]);
// Recursively heapify the affected sub-tree
heapify(arr, n, smallest);
}
}
// main function to do heap sort
function heapSort(arr, n)
{
// Build heap (rearrange array)
for (let i = n / 2 - 1; i >= 0; i--)
heapify(arr, n, i);
// One by one extract an element from heap
for (let i = n - 1; i >= 0; i--) {
// Move current root to end
swap(arr[0], arr[i]);
// call max heapify on the reduced heap
heapify(arr, i, 0);
}
}
when I try with this array arr = [1,2,7,3,5], the heapSort algorithm returns this table [ 1, 2, 7, 3, 5 ];
Can you please help me figure out what's wrong with the JS implementation?
thank you in advance!
This code should go fine:
const heapify = (arr, length, i) => {
let largest = i
const left = i * 2 + 1
const right = left + 1
if (left < length && arr[left] > arr[largest]) {
largest = left
}
if (right < length && arr[right] > arr[largest]) {
largest = right
}
if (largest !== i) {
[arr[i], arr[largest]] = [arr[largest], arr[i]]
heapify(arr, length, largest)
}
return arr
}
const heapSort = arr => {
const length = arr.length
let i = Math.floor(length / 2 - 1)
let k = length - 1
while (i >= 0) {
heapify(arr, length, i)
i--
}
while (k >= 0) {
[arr[0], arr[k]] = [arr[k], arr[0]]
heapify(arr, k, 0)
k--
}
return arr
}
const arr = [4, 6, 3, 2, 9];
sortedArr = heapSort(arr);
console.log("Sorted array is \n", sortedArr)
I took it from here. Take a look at the post if you are more interested in how it's implemented. It's very well explained.
UPDATE
Ok so, about your code, I see exactly 2 problems:
You are incorrectly using the "swap" function. Just change swap(arr[i], arr[smallest] by swap(arr, i, smallest); and swap(arr[0], arr[i]) by swap(arr, 0, i). Also, if you want to use the latest ES6 features you can swap elements in an array without implementing that "swap" function, just like this: [arr[0], arr[2]] = [arr[2], arr[0]] (this will swap the element at position 0 with the element at position 2). This is called destructuring assignment.
In the first for loop in your "heapSort" function, initialize i variable to an integer (notice that n / 2 could be a float). You can do it like this: let i = Math.floor(n / 2 - 1).
Here I leave you the fixed code. I've executed it by myself and it works:
function swap(arr, i, j){
const c = arr[i];
arr[i] = arr[j];
arr[j] = c;
}
function heapify(arr, n, i)
{
let smallest = i; // Initialize smallest as root
let l = 2 * i + 1; // left = 2*i + 1
let r = 2 * i + 2; // right = 2*i + 2
// If left child is smaller than root
if (l < n && arr[l] < arr[smallest])
smallest = l;
// If right child is smaller than smallest so far
if (r < n && arr[r] < arr[smallest])
smallest = r;
// If smallest is not root
if (smallest != i) {
swap(arr, i, smallest);
// Recursively heapify the affected sub-tree
heapify(arr, n, smallest);
}
}
// main function to do heap sort
function heapSort(arr, n)
{
// Build heap (rearrange array)
for (let i = Math.floor(n / 2 - 1); i >= 0; i--)
heapify(arr, n, i);
// One by one extract an element from heap
for (let i = n - 1; i >= 0; i--) {
// Move current root to end
swap(arr, 0, i);
// call max heapify on the reduced heap
heapify(arr, i, 0);
}
}
const arr = [4, 6, 3, 2, 9];
heapSort(arr, arr.length);
console.log("Sorted array is \n", arr)
Here is my version of heapsort.
This is non-recursive solution and modifies the original array.
function swap(arr, i, j) {
const tmp = arr[i];
arr[i] = arr[j];
arr[j] = tmp;
}
function heapify(arr, start = 0) {
for(let i = start;i < arr.length; i++) {
let j = i;
let root = start + Math.floor((j-start)/2);
while(( root >= start ) && (arr[root] < arr[j])) {
swap(arr, root, j);
j = root;
root = start + Math.floor((j-start)/2);
}
}
}
function heapSort(arr) {
for(let i = 0; i < arr.length;i++)
heapify(arr, i);
}
const arr = [1,2,8000,3,4,5,-1,200000,8000,-1,20000];
heapSort(arr);
console.log(arr);
const HeapSort = (arg) => {
const Income_arr = [...arg];
const Output_arr = [];
const InnerSort = () => {
const length = Income_arr.length;
for (let i = 0; i < Income_arr.length - 1; i++) {
let max = i;
const left = i + 1;
const right = i + 2;
// will change '>' or '<' depends on which order we want, like either descending or ascending order
if (i <= length && Income_arr[i] > Income_arr[left]) {
// swapping the array
[Income_arr[i], Income_arr[left]] = [Income_arr[left], Income_arr[i]];
}
if (i <= length && Income_arr[i] > Income_arr[right]) {
// swapping the array
[Income_arr[i], Income_arr[right]] = [Income_arr[right], Income_arr[i]];
}
}
Output_arr.push(Income_arr.shift()); // Add the largest Number in output_arr & remove the largest Number
return Income_arr;
};
for (let i = arg.length - 1; i >= 0; i--) {
// Run untill array length ends
InnerSort(); // To Find the largest number
}
console.log(Output_arr)
return Output_arr;
};
HeapSort([16, 20, 99, 34, 17, 15]);
HeapSort([16, 20, -99, 34, 17, 15]);
HeapSort([4, 20, 9, 34, 2, 15]);
const HeapSort = (arg) => {
const Income_arr = [...arg];
const Output_arr = [];
const InnerSort = () => {
const length = Income_arr.length;
for (let i = 0; i < Income_arr.length - 1; i++) {
let max = i;
const left = i + 1;
const right = i + 2;
// will change '>' or '<' depends on which order we want, like either descending or ascending order
if (i <= length && Income_arr[i] > Income_arr[left]) {
// swapping the array
[Income_arr[i], Income_arr[left]] = [Income_arr[left], Income_arr[i]];
}
if (i <= length && Income_arr[i] > Income_arr[right]) {
// swapping the array
[Income_arr[i], Income_arr[right]] = [Income_arr[right], Income_arr[i]];
}
}
Output_arr.push(Income_arr[length - 1]); // Add the largest Number in output_arr
Income_arr.pop(); // Remove the largest Number
return Income_arr;
};
for (let i = arg.length - 1; i >= 0; i--) {
// Run untill array length ends
InnerSort(); // To Find the largest number
}
console.log(Output_arr)
return Output_arr;
};
HeapSort([16, 20, 99, 34, 17, 15]);
HeapSort([16, 20, -99, 34, 17, 15]);
HeapSort([4, 20, 9, 34, 2, 15]);

How do i compare with the rest of the elements in the array

I am working on a leetcode question and I cant quite think of a way to compare the rest of the elements in the array with one another. I figured out for the biggest and smallest numbers but to compare with the rest of them is something I am having trouble with. Below you will find the question and my work with it:
How Many Numbers Are Smaller Than the Current Number?
Given the array nums, for each nums[i] find out how many numbers in the array are smaller than it. That is, for each nums[i] you have to count the number of valid j's such that j != i and nums[j] < nums[i].
Return the answer in an array.
Example 1:
Input: nums = [8,1,2,2,3]
Output: [4,0,1,1,3]
Explanation:
For nums[0]=8 there exist four smaller numbers than it (1, 2, 2 and 3).
For nums[1]=1 does not exist any smaller number than it.
For nums[2]=2 there exist one smaller number than it (1).
For nums[3]=2 there exist one smaller number than it (1).
For nums[4]=3 there exist three smaller numbers than it (1, 2 and 2).
My work:
var smallerNumbersThanCurrent = (nums) => {
const output = []
const max = nums.reduce(function(a, b) {
return Math.max(a, b);
});
const min = nums.reduce(function(a, b) {
return Math.min(a, b);
});
for(let i = 0; i < nums.length; i++){
if(nums[i] === max){
output.push(nums.length - 1)
} else if (nums[i] === min){
output.push(0)
}
else if (nums[i] < max && nums[i] > min){
//how do i compare with rest of the elements in the array?
}
}
}
Use a nested loop.
nums = [8,1,2,2,3];
answer = [];
for (let i = 0; i < nums.length; i++) {
let count = 0;
for (let j = 0; j < nums.length; j++) {
if (nums[j] < nums[i]) {
count++;
}
}
answer.push(count);
console.log(`For nums[${i}]=${nums[i]} there are ${count} lower numbers`);
}
console.log(`Answer: ${answer}`);
It's not necessary to test i != j since a number will never be lower than itself.
A much easier way would be to simply sort the array, and then the index of the element will tell you how many are less than it:
const nums = [8,1,2,2,3]
const sorted = [...nums].sort();
const result = nums.map((i) => {
return sorted.findIndex(s => s === i);
});
console.log(result);
This has the added benefit that you don't have to search the entire array for each number.
I'd do like:
function rankZero(array){
const s = [...array], r = [];
s.sort((a, b)=>{
return a - b;
});
for(let n of array){
r.push(s.indexOf(n));
}
return r;
}
console.log(rankZero([8, 1, 2, 2, 3]));
One way to do this is to filter the array on the condition that the value is less than the current one and then count the number of values in the filtered array:
const nums = [8,1,2,2,3];
const smallerNums = nums.map(v => nums.filter(n => n < v).length);
console.log(smallerNums); // [4,0,1,1,3]
Alternatively you can do a count in reduce, which should be significantly faster:
const nums = [8, 1, 2, 2, 3];
const smallerNums = nums.map(v => nums.reduce((c, n) => c += (n < v), 0));
console.log(smallerNums); // [4,0,1,1,3]
Inspired by #tao I did performance testing of each solution. On my computer (an Intel Core I9-9900 with 64GB RAM) #StackSlave's solution is consistently the fastest, followed by the other sorting solution, the reduce solution, the basic iteration and the filter. You can run the tests yourself below:
const datalength = 1000;
const iterations = 100;
const getRandom = (min, max) => Math.random() * (max - min) + min;
const data = Array.from({
length: datalength
}, () => getRandom(1, 100));
const mapper = arr => arr.map(i => arr.filter(n => n < i).length);
const sorter = nums => {
const sorted = [...nums].sort();
const result = nums.map((i) => {
return sorted.findIndex(s => s === i);
});
};
const iterator = arr => {
const answer = [];
for (let i = 0; i < arr.length; i++) {
let count = 0;
for (let j = 0; j < arr.length; j++) {
if (arr[j] < arr[i]) {
count++;
}
}
answer.push(count);
}
return answer;
};
const rankZero = array => {
const s = [...array],
r = [];
s.sort((a, b) => {
return a - b;
});
for (let n of array) {
r.push(s.indexOf(n));
}
return r;
}
const reducer = arr => arr.map(v => arr.reduce((c, n) => c += (n < v), 0));
let fns = {
'iterator': iterator,
'mapper': mapper,
'sorter': sorter,
'reducer': reducer,
'rankZero': rankZero
}
for (let [name, fn] of Object.entries(fns)) {
let total = 0;
for (i = 0; i < iterations; i++) {
let t0 = performance.now();
fn(data);
let t1 = performance.now();
total += t1 - t0;
}
console.log(name, (total / iterations).toFixed(2));
}

Recursive algorithm fails to complete tests in allotted time

I was doing a test that required an algorithm for Binary Tomography. A set of 38 test values are supplied that test correctness, but there is also a time limit of 1 CPU sec to complete all the tests. The problem is as follows:
Output “Yes” if there exists an m-by-n matrix A, with each element either being 0 or 1, such that
Otherwise output “No”.
For each test, 2 arrays are provided:
r (the sum of each row in the matrix)
c (the sum of each column in the matrix)
In the equation:
m is the length of the r array, where 1 <= m
n is the length of the c array, where n <= 1000
ri is an element of r, where 0 <= ri <= n
cj is an element of c, where 0 <= cj <= m
A "Yes" example
m = 3;
n = 4;
r = [2, 3, 2];
c = [1, 1, 3, 2];
A "No" example
m = 3;
n = 3;
r = [0, 0, 3];
c = [0, 0, 3];
I have a solution that appears to give correct answers, however it only manages 12 / 38 tests before the 1 second of CPU time is exceeded.
I originally wrote the code in ES5 and then went back and converted to to ES3 to try and get more performance out of it. (originally managed 9 tests as ES5). There doesn't seem a great deal left that I can do to the current algorithm to improve the performance (unless I am mistaken). This leads me to believe that my algorithm is at fault an that there must be a faster algorithm for doing this. I did a ton of reading trying to find one and ended up with a headache :)
So I'm turning to the community to see if anyone can suggest a faster algorithm than I am currently using.
'use strict';
const ZEROS = (function (seed) {
let string = seed;
for (let i = 0; i < 19; i += 1) {
string += seed;
}
return string;
}('00000000000000000000000000000000000000000000000000'));
const ZEROSLEN = ZEROS.length;
const permutate = function (n, ri) {
const result = [];
const memoize = {};
let count = 0;
do {
const bin = count.toString(2);
if (ZEROSLEN + bin.length > ZEROSLEN + n) {
break;
}
if (!memoize[bin] && (bin.split('1').length - 1) === ri) {
const string = (ZEROS + bin).slice(-n);
const sLen = string.length;
const perm = new Array(sLen);
for (let i = sLen - 1; i >= 0; i -= 1) {
perm[i] = +string[i];
}
memoize[bin] = result.push(perm);
}
count += 1;
} while (count);
return result;
};
const getMatrixSum = function (n, matrix) {
const mLength = matrix.length;
const rows = new Array(mLength);
const a = new Array(n);
const last = mLength - 1;
for (let x = n - 1; x >= 0; x -= 1) {
for (let y = last; y >= 0; y -= 1) {
rows[y] = matrix[y][x];
}
let sum = 0;
for (let i = rows.length - 1; i >= 0; i -= 1) {
sum += rows[i];
}
a[x] = sum;
}
return a;
};
const isEqual = function (a, b) {
const length = a.length;
if (length !== b.length) {
return false;
}
for (let i = length - 1; i >= 0; i -= 1) {
if (a[i] !== b[i]) {
return false;
}
}
return true;
};
const addRow = function (i, prev, r, c, result) {
if (result) {
return result;
}
const n = c.length;
const ri = r[i];
if (ri < 0 || ri > n) {
throw new RangeError('ri out of range');
}
const p = permutate(n, ri);
const m = r.length;
const rsLast = m - 1;
const nextI = i + 1;
for (let x = p.length - 1; x >= 0; x -= 1) {
const permutation = p[x];
const next = prev.slice();
next.push(permutation);
const sums = getMatrixSum(n, next);
if (i < rsLast) {
let memo = 0;
for (let j = sums.length - 1; j >= 0; j -= 1) {
if (sums[j] > c[j]) {
memo += 1;
}
}
if (!memo && addRow(nextI, next, r, c, result)) {
return true;
}
} else if (isEqual(sums, c)) {
return true;
}
}
return false;
};
const isSolvable = function (r, c) {
const m = r.length;
const n = c.length;
if (m < 1 || n > 1000) {
throw new Error('Bad data');
}
for (let j = n; j >= 0; j -= 1) {
const cj = c[j];
if (cj < 0 || cj > m) {
throw new RangeError('cj out of range');
}
}
return addRow(0, [], r, c, false) ? 'Yes' : 'No';
};
console.log(isSolvable([2, 3, 2], [1, 1, 3, 2]));
console.log(isSolvable([0, 0, 3], [0, 0, 3]));
It may be worth noting that the tests are being run on SpiderMonkey version JavaScript-C24.2.0
Refs:
https://en.wikipedia.org/wiki/Discrete_tomography
https://open.kattis.com/problems/tomography
Since permutations yield to brute force, they should be the last resort when developing algorithms similar to this one. Most of the time they are not needed.
As i have commented above, I have a feeling that one strategy could be first sorting the r and c arrays descending and start with the bigger ones. I haven't had time to implemented a JS code to work out this, so I haven't had a chance to test thoroughly. Please have a look and if you discover a flaw please mention.
In the below visual representation of the algorithm we try r = [1,3,1,3] and c = [3,2,1,2]. X denotes an occupied cell and a red dot denotes an untouchable cell while the empty ones are obviously the free cells. So in the real algorithm to represent a cell we need a data type like {value: false, avail: false} for a red dot while {value: false, avail: true} would mean a free space. Or to save space and speed you may use a data type like 0b00 for red dot, 0b01 for free space and 0b1X for occupied (X here means don't care) cells.
Note: It's worth mentioning Step 3 where we process c[0]. After we insert the three Xs we have to check the rows occupied by the Xs to update the status of the empty cells in those rows. In this case for r[2], all empty cells become untouchable.
Edit:
Well.. OK since we don't need to construct the solution in a 2D array like structure but only need an answer on wheather the supplied data is meaningful or not, I have come up with another and simpler idea which is essentially based on the above approach. I really don't think it can get any faster than this. It solves a 999 by 1000 board in like 50ms.
Lets get into it.
The input is r = [2, 3, 2]; c = [1, 1, 3, 2]; However one important condition here is both c and r arrays should sum up to the same number. We can simply check this at the beginning of our code or leave it, go through the following steps and if they pass check only if c is full of 0s. The following code prefers the latter approach.
Sort r descending so; r = [3, 2, 2]; c = [1, 1, 3, 2];
Try reducing r[0] (3 in the first case) many non-zero elements of c by 1. Now c becomes [0, 0, 2, 2]. If it fails then try no more and return false.
Now that we have finished with row r[0], recursivelly call function with r = [2, 2]; c = [0, 0, 2, 2]; while r.length is bigger than 0 and the bool argument b is true. Next call will be r = [2]; c = [0, 0, 1, 1]; and finally r = []; c = [0, 0, 0, 0];
If finally a recursive call with empty r is invoked then check b is true and all items of c are 0. (b && cs.every(n => !n)).
I believe this is just fine but as i don't have your test cases it's for you to try. I am sure it will pass the time test though. Here is the code in it's simplest. Here i am testing rs = [7,3,5,4,6,2,8] and cs = [7,1,6,3,4,5,2,7]. It looks like;
71634527
7 x xxxxxx
3 x x x
5 x x xx x
4 x x x x
6 x xxxx x
2 x x
8 xxxxxxxx
function nonogram(rs,cs){
function runner(rs,cs, b = true){//console.log(rs,cs,b)
return b && rs.length ? runner(rs.slice(1), // rows argument
cs.map(e => rs[0] ? e ? (b = !--rs[0], e-1) // cols argument
: e
: e),
b) // bool argument
: b && cs.every(n => !n);
}
return runner(rs.sort((a,b) => b-a), cs);
}
var rs = [7,3,5,4,6,2,8],
cs = [7,1,6,3,4,5,2,7],
result;
console.time("test");
result = nonogram(rs,cs);
console.timeEnd("test");
console.log(result);
I didn't have this ready for my test, but I found a far more efficient algorithm after the event.
'use strict';
const sortNumber = function (a, b) {
return b - a;
};
const isSolvable = function (r, c) {
const m = r.length;
const n = c.length;
if (m < 1 || n > 1000) {
throw new Error('Bad data');
}
for (let j = n; j >= 0; j -= 1) {
const cj = c[j];
if (cj < 0 || cj > m) {
throw new RangeError('cj out of range');
}
}
while (r.length) {
c.sort(sortNumber);
const ri = r.pop();
if (ri < 0 || ri > n) {
throw new RangeError('ri out of range');
}
if (ri) {
if (!c[ri - 1]) {
return 'No';
}
for (let j = ri - 1; j >= 0; j -= 1) {
c[j] -= 1;
}
}
}
for (let j = n - 1; j >= 0; j -= 1) {
if (c[j]) {
return 'No';
}
}
return 'Yes';
};
console.log(isSolvable([2, 3, 2], [1, 1, 3, 2]));
console.log(isSolvable([0, 0, 3], [0, 0, 3]));

Javascript least common multiple algorithm

I'm trying to script a function that takes two numbers and returns the smallest common multiple that is also divisible by all the numbers between those numbers, what I've got only works for 1,1 through 1,12, but for some reason stops working at 1,13. Other set like 12,14 work but I can't figure out why or what the pattern is.
function smallestCommons(arr) {
arr.sort(function(a, b) {
return a-b;
});
var arr1 = [];
var arr2 = [];
for (var k = arr[0]; k<=arr[1]; k++) {
arr1.push(k);
}
function remainder(val1, val2) {
return val1%val2;
}
var b = arr1.reduce(function(a, b) {
return a*b;
});
var i = arr1[arr1.length-1]*arr1[arr1.length-2];
while (i<=b) {
for (var m = 0; m<arr1.length; m++) {
var a = remainder(i, arr1[m]);
arr2.push(a);
}
var answer = arr2.reduce(function(c, d) {
return c+d;
});
if (answer === 0) {
return i;
} else {
arr2 = [];
i++;
}
}
}
I guess you can do as follows in JavaScript; It can calculate the common LCM up to an 216 item array, such as [1,2,3,...,216] in less than 0.25 ms.
function gcd(a,b){
var t = 0;
a < b && (t = b, b = a, a = t); // swap them if a < b
t = a%b;
return t ? gcd(b,t) : b;
}
function lcm(a,b){
return a/gcd(a,b)*b;
}
var arr = [1,2,3,4,5,6,7,8,9,10,11,12,13],
brr = Array(216).fill().map((_,i) => i+1), // limit before Infinity
result = arr.reduce(lcm);
console.log(result);
console.time("limit");
result = brr.reduce(lcm);
console.timeEnd("limit");
console.log(result);
A way is to keep multiplying the largest number in your range with an increasing number and check if all the others are divisible by that. If yes, return that or continue the loop.
Here is my solution in typescript...
function findLowestCommonMultipleBetween(start: number, end: number): number {
let numbers: number[] = [];
for (let i = start; i <= end; i++) {
numbers.push(i);
}
for (let i = 1; true; i++) {
let divisor = end * i;
if (numbers.every((number) => divisor % number == 0)) {
return divisor;
}
}
}
...but for larger ranges, this is a more efficient answer :)
As far as I can tell your algorithm is giving you a correct answer.
I am far from being a professional programmer so anyone who wants please give options to improve my code or its style :)
If you want to be able to check for the answer yourself you can check this fiddle:
https://jsfiddle.net/cowCrazy/Ld8khrx7/
function multiplyDict(arr) {
arr.sort(function (a, b) {
return a - b;
});
if (arr[0] === 1) {
arr[0] = 2;
}
var currentArr = [];
for (var i = arr[0]; i <= arr[1]; i++) {
currentArr.push(i);
}
var primeDivs = allPrimes(arr[1]);
var divsDict = {};
for (var j = currentArr[0]; j <= currentArr[currentArr.length -1]; j++){
divsDict[j] = [];
if (primeDivs.indexOf(j) > -1) {
divsDict[j].push(j);
} else {
var x = j;
for (var n = 2; n <= Math.floor(j / 2); n++) {
if (x % n === 0) {
divsDict[j].push(n);
x = x / n;
n--;
continue;
}
}
}
}
return divsDict;
}
function allPrimes(num) {
var primeArr = [];
var smallestDiv = 2;
loopi:
for (var i = 2; i <= num; i++) {
loopj:
for (var j = smallestDiv; j <= largestDiv(i); j++) {
if (i % j === 0) {
continue loopi;
}
}
primeArr.push(i);
}
return primeArr;
}
function largestDiv (a) {
return Math.floor(Math.sqrt(a));
}
multiplyDict([1,13]);
it gives a dictionary of the requested array and the divisors of each element.
from there you can go on your own to check that your algorithm is doing the right job or you can check it here:
https://jsfiddle.net/cowCrazy/kr04mas7/
I hope it helps
It is true! The result of [1, 13] is 360360. and after this we have [1, 14].
14 = 2 * 7 and we now 360360 is dividable to 2 and 7 so the answer is 360360 again.
[1, 15]: 15 = 3 * 5 and result is same.
[1, 16]: result is 720720.
[1, 17]: result is: 12252240
[1, 18]: 18 = 2 * 9 and result is 12252240 same as 17
[1, 19]: for my computer this process is so heavy and can not do this. But in a strong machine it will work. I promise. But your code is not good in performance.
To find the LCM in N numbers.
It is Compatible with ES6, and consider that is there is no control for boundaries in case that we need to find for large numbers.
var a = [10, 40, 50, 7];
console.log(GetMinMultiple(a));
function GetMinMultiple(data) {
var maxOf = data.reduce((max, p) => p > max ? p : max, 0);
var incremental = maxOf;
var found = false;
do {
for (var j = 0; j < data.length; j++) {
if (maxOf % data[j] !== 0) {
maxOf += incremental;
break;
}
else {
if (j === data.length - 1) {
found = true;
break;
}
}
}
} while (!found);
return maxOf;
}
https://jsfiddle.net/djp30gfz/
Here is my solution in Typescript
function greatestCommonDivider(x: number, y: number): number {
if (y === 0) {
return x;
}
return greatestCommonDivider(y, x % y);
}
function singleLowestCommonMultiply(x: number, y: number): number {
return (x * y) / greatestCommonDivider(x, y);
}
function lowestCommonMultiply(...numbers: number[]): number {
/**
* For each number, get it's lowest common multiply with next number.
*
* Then using new number, compute new lowest common multiply
*/
return numbers.reduce((a, b) => {
return singleLowestCommonMultiply(a, b);
});
}
lowestCommonMultiply(2, 3); // Outputs 6
lowestCommonMultiply(2, 3, 5); // Outputs 30
Playground - click here

Partial sort in JavaScript

Is there any built-in JavaScript function to do a partial sort? If not, what is a good way to implement it?
Given an unsorted array of N elements, I would like to find K elements that are minimal with respect to some weighting function. K is much smaller than N, so it would be inefficient to sort the whole array and take the first K elements.
I would be happy even if there was something non-standard, browser-dependent. I could still fallback to the custom JavaScript implementation.
PS: This is my current custom implementation (without taking a weighting function into account, just sorting the elements as they are for simplicity):
function bisect(items, x, lo, hi) {
var mid;
if (typeof(lo) == 'undefined') lo = 0;
if (typeof(hi) == 'undefined') hi = items.length;
while (lo < hi) {
mid = Math.floor((lo + hi) / 2);
if (x < items[mid]) hi = mid;
else lo = mid + 1;
}
return lo;
}
function insort(items, x) {
items.splice(bisect(items, x), 0, x);
}
function partialSort(items, k) {
var smallest = [];
for (var i = 0, len = items.length; i < len; ++i) {
var item = items[i];
if (smallest.length < k || item < smallest[smallest.length - 1]) {
insort(smallest, item);
if (smallest.length > k)
smallest.splice(k, 1);
}
}
return smallest;
}
console.log(partialSort([5, 4, 3, 2, 1, 6, 7, 8, 1, 9], 3));
The algorithm walks through the given array one single time, keeping track of a sorted list of the k smallest items so far, using binary search to insert new elements.
Please post alternative solutions if you think they might be faster or more elegant. Timings are very welcome.
No. There's only the full array sort, so you will need to use your own implementation.
Little improvement on your code (I had thought of exactly the same algorithm :-)):
function partialSort(items, k) {
var smallest = items.slice(0, k).sort(),
max = smallest[k-1];
for (var i = k, len = items.length; i < len; ++i) {
var item = items[i];
if (item < max) {
insort(smallest, item);
smallest.length = k;
max = smallest[k-1];
}
}
return smallest;
}
(Even seems to be a little faster, I guess due to caching the max variable)
For relatively small k it can be worth it to implement a Max Heap (by lack of a native one in JavaScript):
Create a Max Heap of the first k values
For each remaining value:
If it is less than the root of the heap, replace the root with this value. Otherwise ignore the value. Note that the heap never changes in size.
Finally sort the heap and return it.
This is in fact an improvement on another idea using a Min Heap, but that one needs to heapify the whole array, and so will not run as fast. After heapifying the whole array, you just extract k times a value from that heap, and return those values.
I have added both solutions to Bergi's jsperf.com performance tests (copied to jsbench.me). For that particular test (5000 array values, k = 10), the Max Heap solution is faster. But this advantage will shrink as k is increased.
Here is the code for the Max Heap solution:
// A few Heap-functions that operate on an array
function maxSiftDown(arr, i=0, value=arr[i]) {
if (i >= arr.length) return;
while (true) {
var j = i*2+1;
if (j+1 < arr.length && arr[j] < arr[j+1]) j++;
if (j >= arr.length || value >= arr[j]) break;
arr[i] = arr[j];
i = j;
}
arr[i] = value;
}
function maxHeapify(arr) {
for (var i = arr.length>>1; i--; ) maxSiftDown(arr, i);
return arr;
}
// The main algorithm
function partialSortWithMaxHeap(items, k) {
var heap = maxHeapify(items.slice(0, k));
for (var i = k, len = items.length; i < len; ++i) {
var item = items[i];
if (item < heap[0]) maxSiftDown(heap, 0, item);
}
return heap.sort((a,b) => a-b);
}
// Sample data & call
var arr = Array.from({length:5000}, () => Math.floor(Math.random() * 1e5));
console.log(partialSortWithMaxHeap(arr, 10));
There's no native partial sort function. The closest thing to what you want is Array.filter.
function isSmallEnough(element, index, array) {
return (element <= 10);
}
var filtered = [12, 5, 8, 130, 44].filter(isSmallEnough);
// filtered is [5, 8]
The example was borrowed (and slightly modified) from the above link.
I made a version than works with objects, like Array.sort(f):
function partialSort(items, k,f) {
function bisect(items, x, lo, hi) {
var mid;
if (typeof(lo) == 'undefined') lo = 0;
if (typeof(hi) == 'undefined') hi = items.length;
while (lo < hi) {
mid = Math.floor((lo + hi) / 2);
if (0>f(x,items[mid])) hi = mid;
else lo = mid + 1;
}
return lo;
}
function insort(items, x) {
items.splice(bisect(items, x), 0, x);
}
var smallest = items.slice(0, k).sort(f),
max = smallest[k-1];
for (var i = k, len = items.length; i < len; ++i) {
var item = items[i];
if (0>f(item,max)) {
insort(smallest, item);
smallest.length = k;
max = smallest[k-1];
}
}
return smallest;
}
// [ { e: 1 }, { e: 1 }, { e: 2 } ]
console.log(partialSort([{e:4},{e:6},{e:1},{e:8},{e:3},{e:1},{e:6},{e:2}],3,(a,b)=>a.e-b.e))
console.log()
In the code block below, the nisetamafibo function keeps an array of the smallest items found so far. The array is sorted and truncated to length K after a certain number of new items have been added to the array, where the number is taken from the Fibonacci sequence so that it is first 1, next 1, then 2, then 3, then 5, and so on. The nisetamadouble method doubles the interval after which the array is sorted instead, so that it is first 1, then 2, then 4, and so on. (I also tried the approach that I sorted the array each time after a fixed number of new items like 10 had been added, but it was slower. And I also tried to initialize the array at the start of the function so that I took in a fixed number of the first items of the input and sorted them, but I found that initializing the array with 1 or 0 items was the fastest, so I removed the initialization step.)
The nisetamainsertion function uses insertion sort to sort the items. It's very slow at high K-values because insertion sort has quadratic time complexity, but it's fast at K-values of around 10 to 100 or lower, because insertion sort is fast for short arrays. The nisetamachoose method chooses nisetamainsertion for K-values of 100 or less but nisetamafibo otherwise. (In the Java JDK, the file DualPivotQuicksort.java uses insertion sort instead of quicksort for arrays with less than 47 items. A presentation about sorting algorithms in R said that "fastest for < 30 items is insert sort".)
I also tried to implement the quickselect algorithm with and without recursion. The version that didn't use recursion was a bit faster, but both versions were still slow compared to other methods especially in cases where N was high and K was low.
On another Stack Exchange site, someone came up with new variants of the Floyd-Rivest algorithm which were faster than the regular Floyd-Rivest algorithm in C: https://softwareengineering.stackexchange.com/questions/284767/kth-selection-routine-floyd-algorithm-489. I tried to implement the variant called select7MO3 in JavaScript, but it ended up being one of the slowest options in my benchmark.
function nisetamafibo(a,k=1){
let found=[],len=a.length,unsorted=0,biggestfound=Infinity,nextsort=1,prevsort=1,oldsort
for(let i=0;i<len;i++){
if(a[i]<biggestfound||i<k){
found.push(a[i])
if(++unsorted==nextsort){
found.sort((l,r)=>l<r?-1:l>r?1:0)
found=found.slice(0,k)
biggestfound=found[found.length-1]
oldsort=nextsort;nextsort+=prevsort;prevsort=oldsort
unsorted=0
}
}
}
found.sort((l,r)=>l<r?-1:l>r?1:0)
return found.slice(0,k)
}
function nisetamadouble(a,k=1){
let found=[],len=a.length,unsorted=0,biggestfound=Infinity,nextsort=1
for(let i=0;i<len;i++){
if(a[i]<biggestfound||i<k){
found.push(a[i])
if(++unsorted==nextsort){
found.sort((l,r)=>l<r?-1:l>r?1:0)
found=found.slice(0,k)
biggestfound=found[found.length-1]
nextsort*=2
unsorted=0
}
}
}
found.sort((l,r)=>l<r?-1:l>r?1:0)
return found.slice(0,k)
}
function nisetamainsertion(a,k=1){
let found=a.slice(0,k),l=a.length
found.sort((l,r)=>l<r?-1:l>r?1:0)
let biggestfound=found[k-1]
for(let i=0;i<l;i++){
let v=a[i]
if(v<biggestfound){
let insertat=k-1
for(let j=0;j<k-1;j++)if(v<found[j]||j==i){insertat=j;break}
for(let j=k-1;j>insertat;j--)found[j]=found[j-1]
found[insertat]=v
biggestfound=found[k-1]
}
}
return found
}
function nisetamachoose(a,k=1){
return k<=100?nisetamainsertion(a,k):nisetamafibo(a,k)
}
function quickselect(a,k,l,r){
l=l||0
r=r||a.length-1
while(true){
let pivot=a[r],pos=l
for(let i=l;i<=r;i++)if(a[i]<pivot){let temp=a[i];a[i]=a[pos];a[pos++]=temp}
let temp=a[r];a[r]=a[pos];a[pos]=temp
if(pos==k)break
pos<k?l=pos+1:r=pos-1
}
}
function quickselectrecursive(a,k,l,r){
l=l||0
r=r||a.length-1
let pivot=a[r],pos=l
for(let i=l;i<=r;i++)if(a[i]<pivot){let temp=a[i];a[i]=a[pos];a[pos++]=temp}
let temp=a[r];a[r]=a[pos];a[pos]=temp
if(pos<k)quickselectrecursive(a,pos+1,r,k)
if(pos>k)quickselectrecursive(a,l,pos-1,k)
}
function sortslice(a,k){
a.sort((l,r)=>l<r?-1:l>r?1:0)
return a.slice(0,k)
}
// https://softwareengineering.stackexchange.com/questions/284767/kth-selection-routine-floyd-algorithm-489
function select7MO3(a,k){
let l=0,i,r=a.length-1,rr=r,ll=l
while(r>l){
if(a[k]<a[l]){let t=a[l];a[l]=a[k];a[k]=t}
if(a[r]<a[l]){let t=a[l];a[l]=a[r];a[r]=t}
if(a[r]<a[k]){let t=a[k];a[k]=a[r];a[r]=t}
if((r-l)>k){
let n=r-l+1
i=k-l+1
let s=(2*n/3)
let div=i-n
let sd=(n*s*(n-s)/n)*(div<0?-1:div>0?1:0)
ll=Math.max(l,k-i*s/n+sd)
rr=Math.min(r,k+(n-i)*s/n+sd)
}
let pivot=a[k]
i=l
let j=r
let t=a[l];a[l]=a[k];a[k]=t
if(a[r]>pivot){t=a[r];a[r]=a[l];a[l]=t}
while(i<j){
let t=a[i];a[i]=a[j];a[j]=t
i++
j--
while(a[i]<pivot)i++
while(a[j]>pivot)j--
}
if(a[l]==pivot){i--;let t=a[l];a[l]=a[j];a[j]=t}
else{j++;let t=a[j];a[j]=a[r];a[r]=t}
if(j<=k)l=j+1
else if(k<=j)r=j-1
}
let out=a.slice(0,k)
out.sort((l,r)=>l<r?-1:l>r?1:0)
return out
}
// OP and Bergi
function bisect(items, x, lo, hi) {
var mid;
if (typeof(lo) == 'undefined') lo = 0;
if (typeof(hi) == 'undefined') hi = items.length;
while (lo < hi) {
mid = Math.floor((lo + hi) / 2);
if (x < items[mid]) hi = mid;
else lo = mid + 1;
}
return lo;
}
function insort(items, x) {
items.splice(bisect(items, x), 0, x);
}
function OP(items, k) {
var smallest = [];
for (var i = 0, len = items.length; i < len; ++i) {
var item = items[i];
if (smallest.length < k || item < smallest[smallest.length - 1]) {
insort(smallest, item);
if (smallest.length > k)
smallest.splice(k, 1);
}
}
return smallest;
}
function OP_Bergi(items, k) {
var smallest = items.slice(0, k).sort(),
max = smallest[k-1];
for (var i = k, len = items.length; i < len; ++i) {
var item = items[i];
if (item < max) {
insort(smallest, item);
smallest.length = k;
max = smallest[k-1];
}
}
return smallest;
}
// trincot
function maxSiftDown(arr, i=0, value=arr[i]) {
if (i >= arr.length) return;
while (true) {
var j = i*2+1;
if (j+1 < arr.length && arr[j] < arr[j+1]) j++;
if (j >= arr.length || value >= arr[j]) break;
arr[i] = arr[j];
i = j;
}
arr[i] = value;
}
function maxHeapify(arr) {
for (var i = arr.length>>1; i--; ) maxSiftDown(arr, i);
return arr;
}
function trincot_MaxHeap(items, k) {
var heap = maxHeapify(items.slice(0, k));
for (var i = k, len = items.length; i < len; ++i) {
var item = items[i];
if (item < heap[0]) maxSiftDown(heap, 0, item);
}
return heap.sort((a,b) => a-b);
}
// DiazJara
function DiazJara(items, k,f) {
function bisect(items, x, lo, hi) {
var mid;
if (typeof(lo) == 'undefined') lo = 0;
if (typeof(hi) == 'undefined') hi = items.length;
while (lo < hi) {
mid = Math.floor((lo + hi) / 2);
if (0>f(x,items[mid])) hi = mid;
else lo = mid + 1;
}
return lo;
}
function insort(items, x) {
items.splice(bisect(items, x), 0, x);
}
var smallest = items.slice(0, k).sort(f),
max = smallest[k-1];
for (var i = k, len = items.length; i < len; ++i) {
var item = items[i];
if (0>f(item,max)) {
insort(smallest, item);
smallest.length = k;
max = smallest[k-1];
}
}
return smallest;
}
// benchmark
for(let nk of'31 33 40 42 44 51 53 55 60 62 64 66 71 73 75'.split(' ')){
let n=parseInt(nk[0]),k0=parseInt(nk[1]),k=10**k0
let opt=[
'OP(a,k)',
'OP_Bergi(a,k)',
'trincot_MaxHeap(a,k)',
'DiazJara(a,k,(l,r)=>l-r)',
'DiazJara(a,k,(l,r)=>l<r?-1:l>r?1:0)',
'nisetamafibo(a,k)',
'nisetamadouble(a,k)',
// 'nisetamainsertion(a,k)', // this would've taken too long to run at K=1e6
'nisetamachoose(a,k)',
'quickselect(a,k);a=a.slice(0,k);a.sort((l,r)=>l<r?-1:l>r?1:0)',
'quickselectrecursive(a,k);a=a.slice(0,k);a.sort((l,r)=>l<r?-1:l>r?1:0)',
'select7MO3(a,k);a=a.slice(0,k);a.sort((l,r)=>l<r?-1:l>r?1:0)',
'sortslice(a,k)'
]
let ord=Array.from({length:100},()=>Array(opt.length).fill().map((_,i)=>i)).flat()
ord.sort(()=>Math.random()-.5)
for(let x of ord){
let o=opt[x]
let a=Array.from({length:10**n},()=>Math.random())
let t1=process.hrtime.bigint()
eval(o)
let t2=process.hrtime.bigint()-t1
console.log(n+' '+k0+' '+o+' '+t2)
}
}
This shows the median time of a hundred runs in ms and the average rank of each method (where for example 7/4 means that N was 1e7 and K was 1e4):
For most combinations of N and K, Bergi's modified version of the OP's code was actually slower than the OP's code, even though the OP's code was extremely slow in the case where N and K were both 1e6.
(l,r)=>l<r?-1:l>r?1:0 is faster than (l,r)=>l-r as you can see by comparing the two versions of Díaz-Jara's method above.
Here's also versions of my nisetamadouble and nisetamainsertion methods which return the indexes of the smallest items in addition to the values:
let a=Array.from({length:1e5},()=>Math.random())
let k=10
let l=a.length
let biggestfound=Infinity,foundind=[],foundval=[]
for(let i=0;i<l;i++){
let v=a[i]
if(i<k||v<biggestfound){
let insertat=k-1
for(let j=0;j<k-1;j++)if(v<foundval[j]||j==i){insertat=j;break}
for(let j=k-1;j>insertat;j--){foundind[j]=foundind[j-1];foundval[j]=foundval[j-1]}
foundind[insertat]=i
foundval[insertat]=v
biggestfound=foundval[k-1]
}
}
console.log(foundind)
console.log(foundval)
function nisetama(a,k=1){
let found=[],len=a.length,unsorted=0,biggestfound=Infinity,nextsort=1
for(let i=0;i<len;i++){
if(a[i]<biggestfound||i<k){
found.push(a[i])
if(++unsorted==nextsort){
found.sort((l,r)=>l<r?-1:l>r?1:0)
found=found.slice(0,k)
biggestfound=found[found.length-1]
nextsort*=2
unsorted=0
}
}
}
found.sort((l,r)=>l<r?-1:l>r?1:0)
return found.slice(0,k)
}
let a2=a
nisetama(a2,k)
biggestfound=a2[k-1],foundind=[]
for(let i=0;i<l;i++)if(a[i]<=biggestfound)foundind.push(i)
foundind.sort((l,r)=>a[l]<a[r]?-1:a[l]>a[r]?1:0)
foundind=foundind.slice(0,k)
console.log(foundind)
console.log(foundval)

Categories