Optimize looping through 2 arrays javascript canvas game - javascript

I'm working on my first javascript canvas game, and I wonder is there a better way for comparing collisons between objects in 2 arrays. For example i have an array with rockets, and array with enemies, the code is working, but i think when arrays length becomes much larger it will have effect on the performance. Example 100 rockets through 100 enemies is 10000 iterations per frame
for (i in rockets){
rockets[i].x+=projectile_speed;
for (j in enemies){
if(collision(rockets[i], enemies[j])){
enemies[j].health-=5;
sound_hit[hit_counter-1].play();
hit_counter--;
if (hit_counter==0){
hit_counter=5;
}
rockets.splice(i,1);
if (enemies[j].health <= 0) {
score += enemies[j].score;
sound_explode[Math.floor(Math.random()*25)].play();
enemies[j].isDead = true;
}
} else if(rockets[i].x >= width){
rockets.splice(i,1);
}
}
}

If you want to test every rocket on every player its not really possible to do differently, without knowing more about position of players and rockets.
If you keep the collision function fast, this should though be no problem at all.
I can only think of two easy improvements on this:
when a collision is found use continue since looping over the rest of players should not be necessary (unless players is allowed to collide)
instead of splice'ing the rockets array multiple times, build a new one, excluding all "dead" rockets.
You should also consider using forEach, map and filter to make the code a bit easier to read:
rockets = rockets.filter(function(rocket) {
rocket.x+=projectile_speed;
if(rocket.x >= width) {
return false;
}
var enemy = enemies.find(function(enemy) { return collision(rocket, enemy) });
if(enemy) {
enemy.health-=5;
sound_hit[--hit_counter].play();
if (hit_counter==0){
hit_counter=5;
}
if (enemy.health <= 0) {
score += enemy.score;
sound_explode[Math.floor(Math.random()*25)].play();
enemy.isDead = true;
}
return false;
}
return true;
});

What you could try to do is to reduce the number of tests by grouping the enemies and rockets, so that you only have to test the elements in the same group.
Here is a simple implementation to show what I mean, this only partitions in X-direction, because your rockets only seem to travel horizontally:
var groupWidth = 100; // do some experiments to find a suitable value
var rocketGroups = [];
var enemyGroups = [];
// initalize groups, not shown (array of array of rocket/enemy),
// but here are some other function examples...
function addToGroups(element, groups) {
groups[element.x / groupWidth].push(element);
}
function move(element, groups, distance) {
if (element.x / groupWidth != (element.x + distance) / groupWidth) {
// remove element from the old group and put it in the new one
}
element.x += distance;
}
// Note: this is only to show the idea, see comments about length
function checkCollisions() {
var i,j,k;
for (i = 0; i < rocketGroups.length; i++) {
for (j = 0; j < rocketGroups[i].length; j++) {
for (k = 0; k < enemyGroups[i].length; k++) {
// only compares elements in the same group
checkPossibleCollision(rocketGroups[i], enemyGroups[i], j, k);
}
}
}
}

Related

Finding total number of maximum number in array fail only for big data

I doing some Hackerrank challange to improve my problem solving skills, so one of the challanges was about finding the total maximum numbers from an array of numbers. For example if we have 3 2 1 3 1 3 it should return 3
This is what I did :
function birthdayCakeCandles(ar) {
let total= 0
let sortedArray = ar.sort((cur,next)=>{
return cur<next
})
ar.map(item => {
if(item===sortedArray[0]) {
total ++;
}
})
return total
}
So I sorted the given array and then map through the array and check how many of the numbers are equal to the maximum number in that array and count the total.
This will pass 8/9 test cases, one of the test cases, have a array with length of 100000 and for this one it failed, this is the given data for this test case.
Really can't get it why it fails in this test, is it possible that this happened because of JavaScript which is always synchronous and single-threaded?
I tried to use Promise and async await, but hackerrank will consider the first return as the output ( Which is the Promise itself ) and it not use the resolve value as a output, so can't really test this.
Is it something wrong with my logic?
The sorting approach is too slow (O(n log n) time complexity). For algorithmic challenges on HR, it's unlikely that features somewhat particular to your language choice like promises/async are going to rescue you.
You can do this in one pass using an object to keep track of how many times you've "seen" each number and the array's maximum number, then simply index into the object to get your answer:
function birthdayCakeCandles(ar) {
let best = -Infinity;
const seen = {};
for (let i = 0; i < ar.length; i++) {
if (ar[i] > best) {
best = ar[i];
}
seen[ar[i]] = ++seen[ar[i]] || 1;
}
return seen[best];
}
Time and space complexity: O(n).
Edit:
This answer is even better, with constant space (here it is in JS):
function birthdayCakeCandles(ar) {
let best = -Infinity;
let count = 0;
for (const n of ar) {
if (n > best) {
best = n;
count = 1;
}
else if (n === best) {
count++;
}
}
return count;
}
In your case, the build in function sort is using the resource heavily. Maybe that's the reason it is failing for a space/time complexity.
BTW, This problem can be solved easily using a for loop. The idea is
Pseudocode
var maxNum = -999999; // put here the highest limit of int or what ever data type
int count = 0;
for(x in arr)
{
if (x > maxNum)
{
maxNum = x;
count = 1;
}
if(x==maxNum) count ++;
}
Here count will be the output.
The full code is
function birthdayCakeCandles(ar) {
var maxNum = -1;
var count = 0;
for(var i=0; i< ar.length; i++){
var x = ar[i];
if(x<maxNum) continue;
if(x>maxNum){
maxNum = x;
count = 1;
}
else{
count++;
}
}
return count;
}

All distributions of items between buckets

I recently ran into a problem where I need to figure out how to distribute items into buckets but I need to find all the ways to distribute them.
the input comes in as an array of integers that tell you the maximum each column can hold and there must be N amount of items in the array.
for example:
maxItems = 3
maximums = [4,2,1] # The order of maximums DOES matter meaning
# This means that the results of maximums = [2,4,1] are different from maximums = [1,2,4]
outputs = [[3,0,0],[2,1,0],[1,1,1],[2,0,1],[0,2,1]] # results are in no particular order
# notice how the sum of each result is equal to maxItems and each value in each of the rows are less than the value inside of maximums
I attempted to solve this problem in javascript but I am unable to figure out how to approach this problem. I wanted to start off by filling the first columns with as many numbers as possible and start moving to the right, but as the maximums array gets bigger, this method gets more inaccurate and I don't exactly know how to approach it at all.
If you have any more questions please feel free to ask if you dont understand the problem.
The code I started off with in javascript was
var all_combinations = function(N, maximums){
var empty = maximums.map(function(){return 0;}); // create empty array size of maximums filled with 0s
var s = 0;
for (var i = 0; i < empty.length && s < N;){
if (empty[i] >= maximums[i]){i++;continue;}
empty[i]++;
s++;
} // fill the left side with as many items as possible
// Then i would proceed to move one item at a time to the right side but some how i would need to do it for the whole array and this is where I get stuck.
};
I tried searching up this problem, but I never found out how to do it the way it was set up here. I tried finding similar problems but they were always unrelated to this. Maybe I am searching up the problem wrong. If someone can link a helpful resource that would be great.
If you have any questions please ask them. I will answer to the best of my abilities.
You could use a recursive approach with checking all parts of the constraints.
It works with an index and a temporary array for keeping the count of the items.
At start, the index is zero and the array is empty. With the call of fork, the first exit option is checked, which means the constraints are checked and if greater or equal count, then the recursion stops.
The second exit option is when the sum of the items reaches the wanted count, then the temporary array is pushed to the result set and the recursion ends.
In all other cases, fork is called again with either
same index i and an incremented value of the temporary array at the index, or
incremented index and the actual temporary array.
function getCombination(max, count) {
function fork(index, temp) {
var sum = temp.reduce((a, b) => a + b, 0);
if (max.some((a, i) => (temp[i] || 0) > a) || index === max.length || sum > count) {
return;
}
if (sum === count) {
result.push(temp);
return;
}
fork(index, max.map((a, i) => (temp[i] || 0) + (i === index)));
fork(index + 1, temp);
}
var result = [];
fork(0, []);
return result;
}
console.log(getCombination([4, 2, 1], 3));
.as-console-wrapper { max-height: 100% !important; top: 0; }
An iterative approach with a previous check if the sum plus value is smaller or equal than the wanted count.
function getCombination(max, count) {
function iter(index, sum, temp) {
var i;
if (count === sum) {
result.push(temp);
return;
}
for (i = max[index]; i >= 0; i--) {
if (sum + i <= count) {
iter(index + 1, sum + i, temp.concat(i));
}
}
}
var result = [];
iter(0, 0, []);
return result;
}
console.log(getCombination([4, 2, 1], 3));
.as-console-wrapper { max-height: 100% !important; top: 0; }
An easy to understand recursive solution with ECMA 6 generators:
for each i, place i items into the first slot if they fit, then distribute the others among the rest.
function* bucket_distributions(capacities,nItems){
if (capacities.length==1) {
if (capacities[0] >= nItems)
yield [nItems];
}
else if (capacities.length>1) {
for (var i=Math.min(capacities[0],nItems);i>=0;i--) {
for (subdist of
bucket_distributions(capacities.slice(1),nItems-i))
yield [i].concat(subdist);
}
}
}
console.log(Array.from(bucket_distributions([4,2,1],3)))
Here's a well-commented iterative solution with an interactive demo:
// reducer function for calculating sum of array
function sum(prev, next) {
return prev + next;
}
// returns the contextual constraints of a bucket
function bucketMinMax(maxItems, otherItems, bucketMax) {
return {
// minimum values in bucket to meet maxItems
min: Math.max(0, maxItems - otherItems),
// maximum values in bucket to meet maxItems
max: Math.min(maxItems, bucketMax),
};
}
// takes an incomplete combination and expands it with the next bucket
// starting from the left
function expandCombination(maxItems, maximums, combinations) {
// get next combo group to expand
var comboGroup = combinations.shift();
// get index of expansion bucket
var index = comboGroup.length;
// calculate maximum possible otherItems
var otherItems = maximums.slice(index + 1).reduce(sum, 0);
// removes already used spaces from maxItems in combination group being expanded
maxItems -= comboGroup.reduce(sum, 0);
// get constraints for expansion bucket
var {min, max} = bucketMinMax(maxItems, otherItems, maximums[index]);
for (var i = min; i <= max; i++) {
// add combo group expansions to output
combinations.push(comboGroup.concat([i]));
}
}
// main function
function allCombinations(maxItems, maximums) {
// will eventually contain all combinations
var output = [[]];
// loops through array of combinations, expanding each one iteratively
while (output.length > 0 && output[0].length < maximums.length) {
// takes incomplete combination group and expands it with possible values
// for next bucket starting from the left
expandCombination(maxItems, maximums, output);
}
return output;
}
document.addEventListener('change', () => {
var maxes = JSON.parse(maximums.value);
var items = JSON.parse(maxItems.value);
console.log(JSON.stringify(allCombinations(items, maxes)));
});
document.dispatchEvent(new Event('change'));
<label>maxItems
<input id="maxItems" value="3">
</label>
<label>maximums
<input id="maximums" value="[4,2,1]">
</label>

Javascript - For loop vs Linked List vs ES6 Set to find two matching integers

I have prepared 2 Javascript functions to find matching integer pairs that add up to a sum and returns a boolean.
The first function uses a binary search like that:
function find2PairsBySumLog(arr, sum) {
for (var i = 0; i < arr.length; i++) {
for (var x = i + 1; x < arr.length; x++) {
if (arr[i] + arr[x] == sum) {
return true;
}
}
}
return false;
}
For the second function I implemented my own singly Linked List, in where I add the complementary integer to the sum and search for the value in the Linked List. If value is found in the Linked List we know there is a match.
function find2PairsBySumLin(arr, sum) {
var complementList = new LinkedList();
for (var i = 0; i < arr.length; i++) {
if (complementList.find(arr[i])) {
return true;
} else {
complementList.add(sum - arr[i]);
}
}
return false;
}
When I run both functions I clearly see that the Linked List search executes ~75% faster
var arr = [9,2,4,1,3,2,2,8,1,1,6,1,2,8,7,8,2,9];
console.time('For loop search');
console.log(find2PairsBySumLog(arr, 18));
console.timeEnd(‘For loop search’);
console.time('Linked List search');
console.log(find2PairsBySumLin(arr, 18));
console.timeEnd('Linked List search');
true
For loop search: 4.590ms
true
Linked List search: 0.709ms
Here my question: Is the Linked List approach a real linear search? After all I loop through all the nodes, while my outer loop iterates through the initial array.
Here is my LinkedList search function:
LinkedList.prototype.find = function(data) {
var headNode = this.head;
if(headNode === null) {
return false;
}
while(headNode !== null) {
if(headNode.data === data) {
return true;
} else {
headNode = headNode.next;
}
}
return false;
}
UPDATE:
It was a good idea to go back and have another think of the problem based the comments so far.
Thanks to #nem035 comment on small datasets, I ran another test but this time with 100,000 integers between 1 and 8. I assigned 9 to the first and last position and searched for 18 to make sure the entire array will be searched.
I also included the relatively new ES6 Set function for comparison thanks to #Oriol.
Btw #Oriol and #Deepak you are right. The first function is not a binary search but rather a O(n*n) search, which has no logarithmic complexity.
It turns out my Linked List implementation was the slowest of all searches. I ran 10 iterations for each function individually. Here the result:
For loop search: 24.36 ms (avg)
Linked List search: 64328.98 ms (avg)
Set search: 35.63 ms (avg)
Here the same test for a dataset of 10,000,000 integers:
For loop search: 30.78 ms (avg)
Set search: 1557.98 ms (avg)
Summary:
So it seems the Linked List is really fast for smaller dataset up to ~1,000, while ES6 Set is great for larger datasets.
Nevertheless the For loop is the clear winner in all tests.
All 3 methods will scale linearly with the amount of data.
Please note: ES6 Set is not backward compatible with old browsers in case this operation has to be done client side.
Don't use this. Use a set.
function find2PairsBySum(arr, sum) {
var set = new Set();
for(var num of arr) {
if (set.has(num)) return true;
set.add(sum - num);
}
return false;
}
That's all. Both add and has are guaranteed to be sublinear (probably constant) in average.
You can optimize this substantially, by pre-sorting the array and then using a real binary search.
// Find an element in a sorted array.
function includesBinary(arr, elt) {
if (!arr.length) return false;
const middle = Math.floor(arr.length / 2);
switch (Math.sign(elt - arr[middle])) {
case -1: return includesBinary(arr.slice(0, middle - 1), elt);
case 0: return true;
case +1: return includesBinary(arr.slice(middle + 1), elt);
}
}
// Given an array, pre-sort and return a function to detect pairs adding up to a sum.
function makeFinder(arr) {
arr = arr.slice().sort((a, b) => a - b);
return function(sum) {
for (let i = 0; i < arr.length; i++) {
const remaining = sum - arr[i];
if (remaining < 0) return false;
if (includesBinary(arr, remaining)) return true;
}
return false;
};
}
// Test data: 100 random elements between 0 and 99.
const arr = Array.from(Array(100), _ => Math.floor(Math.random() * 100));
const finder = makeFinder(arr);
console.time('test');
for (let i = 0; i < 1000; i++) finder(100);
console.timeEnd('test');
According to this rough benchmark, one lookup into an array of 100 elements costs a few microseconds.
Rewriting includesBinary to avoid recursion would probably provide a further performance win.
first of all find2PairsBySumLog function is not a binary search, it's a kind of brute force method which parses all the elements of array and it's worst case time complexity should be O(n*n), and the second function is a linear search that' why you are getting the second method to run fastly, for the first function i.e. find2PairsBySumLog what you can do is initialize binary HashMap and check for every pair of integers in array kind of like you are doing in the second function probably like
bool isPairsPresent(int arr[], int arr_size, int sum)
{
int i, temp;
bool binMap[MAX] = {0};
for (i = 0; i < arr_size; i++)
{
temp = sum - arr[i];
if (temp >= 0 && binMap[temp] == 1)
return true;
binMap[arr[i]] = 1;
}
}

How to sort and slice an array of objects

I have an array of shots. I have been able to take that array and loop through it to get all shots that occurred on hole #1 and then rearrange them in order based on "shot_number". I now need to do this for every hole and to create an array for each hole (ex: holeArray1, holeArray2). I have attempted a number of solutions to increment x but if I do I end up missing some shots that occurred on certain holes.
How can I refactor this function to create this array for every hole without just copying and pasting the code and changing the variable x myself? Thank you for your help. I know I should be able to figure this one out but am struggling.
$scope.createHoleShotsArrays = function () {
var i = 0;
var x = 1;
var holeArray = [];
var len = $scope.shots.length;
for (; i < len; i++) {
if ($scope.shots[i].attributes.hole == x) {
holeArray.push($scope.shots[i]);
holeArray.sort(function (a, b) {
if (a.attributes.shot_number > b.attributes.shot_number) {
return 1;
}
if (a.attributes.shot_number < b.attributes.shot_number) {
return -1;
}
// a must be equal to b
return 0;
});
}
}
console.log(holeArray);
};
Push the items you want into arrays, and sort them once. I don't have cases to test the code. You may modified it a little if something goes wrong.
$scope.createHoleShotsArrays = function() {
var holeArrays = [];
$scope.shots.forEach(function(shot) {
if (holeArrays.length < shot.attributes.hole) {
holeArrays[shot.attributes.hole - 1] = [];
}
holeArrays[shot.attributes.hole - 1].push(shot);
});
holeArrays.forEach(function(arr) {
arr.sort(function(a, b) {
return a.attributes.shot_number - b.attributes.shot_number;
});
});
console.log(holeArrays);
};

Blackjack javascript game infinite loop

I have created an utterly simple blackjack game that stores the first value of a shuffled array of cards into corresponding players' arrays, dealing them as actual hands. For some odd reason, I can't seem to find a way to execute the core part of the code multiple times without getting an infinite loop. For the time being, I have only tried running the quite commonplace "for" command which is meant for multiple statements, but just doesn't seem to work here.
The programm on its primitive form is as follows...
var dealerCards = [];
var playerCards = [];
var firstDeck = [];
function shuffle(o){
for(var j, x, i = o.length; i; j = Math.floor(Math.random() * i), x = o[--i], o[i] = o[j], o[j] = x);
return o;
}
function createShuffledDeckNumber(array, x) {
for (i = 0; i < 4*x; i++) {
array.push(1,2,3,4,5,6,7,8,9,10,11,12,13);
}
shuffle(array);
}
function drawCard(playersHand, playerSoft, playerHard) {
playersHand.push(firstDeck[0]);
firstDeck.shift();
}
function checkDeckDrawOne(playersHand) {
if (firstDeck.length === 0) {
createShuffledDeckNumber(firstDeck, 1);
drawCard(playersHand);
}else{
for (i = 0; i < 1; i++) {
drawCard(playersHand);
}
}
}
for (i = 0; i < 4; i++) {
dealerCards = [];
playerCards = [];
checkDeckDrawOne(dealerCards);
checkDeckDrawOne(dealerCards);
checkDeckDrawOne(playerCards);
checkDeckDrawOne(playerCards);
console.log("dealerCards",dealerCards,"playerCards",playerCards);
console.log("firstDeckDrawn", firstDeck, "Number", firstDeck.length);
}
Additional Notes;
The presumed objective could be performing calculations to figure out the winner by imitating the effect of consecutive computing rounds based on a finite number of values stored in each player's array. Although, I've tried a seried of different things when it comes to emulating the real life circumstances of actually playing blackjack, this version seems to do just that, by also giving the programmer the ability to introduce counting systems like KO or HiLo. The main logic behind the whole thing is fairly simple; order x shuffled decks whenever a command that involves drawing a card is being executed unless the deck has at least one card.
It's rather fair to ponder why should I possibly bother creating multiple rounds in such a game. Reason is, that I want to create an autoplayer application that provides me with percentages on processed data.
Your variable i in function checkDeckDrawOne() has global scope, meaning it alters the value of i in the main loop:
for (i = 0; i < 4; i++) {
dealerCards = [];
playerCards = [];
checkDeckDrawOne(dealerCards);
checkDeckDrawOne(dealerCards);
checkDeckDrawOne(playerCards);
checkDeckDrawOne(playerCards);
console.log("dealerCards",dealerCards,"playerCards",playerCards);
console.log("firstDeckDrawn", firstDeck, "Number", firstDeck.length);
}
Change this:
for (i = 0; i < 1; i++) {
drawCard(playersHand);
}
to this:
for (var i = 0; i < 1; i++) {
drawCard(playersHand);
}
although why you need a loop here anyway is baffling.

Categories