javascript, function, mathrandom, array - javascript

I'm supposed to do a math exercise with subtractions for a basic Javascript course. The exercise has to contain 5 exercises with 5 prompt-windows (e.g "10-7=", "5-1=" etc). The first number has to be bigger than the second one.
Therefore, I need to create a function that generates two random numbers and then returns them in an array. Before it returns, it has to make sure that the number in position 0 is bigger than the number in position 1, the random numbers has to be between 1-10. If the person answers the question right, an alert window should pop up and say "Right!" and the same if it's wrong.
My code so far is this, and I'm aware that it's not completely right. What's wrong? How can I make it right?
function number ()
{
var array = [a, b];
var numbers = Math.floor(Math.random()*10)+1;
array[0] = a ;
array[1] = b ;
if (a <= b)
a = a+b;
b = a-b;
a = a-b;
return a + b;
}
var mathQuestion = a + " - " + b +" = ";
var answer = a - b;
for(var i =0; i<6; i++) {
var yourAnswer = parseInt(prompt(mathQuestion));
if (yourAnswer === answer) {
alert("Right!");
}
else {
alert("Wrong!");
}
}

I'm not really sure what you are trying to achieve but I considered to have a look at your code to kind of improve it.
This code is working fine, so please have a look :)
function getTask() {
var array = [];
var numberA = getRandomNumber()
var numberB = getRandomNumber()
if (numberA > numberB) {
array[0] = numberA;
array[1] = numberB;
} else {
array[0] = numberB;
array[1] = numberA;
}
return array;
}
function getRandomNumber() {
return Math.floor(Math.random() * 10) + 1;
}
let questionCount = 1;
for (var i = 0; i < questionCount; i++) {
let numbers = getTask()
var mathQuestion = numbers[0] + " - " + numbers[1] + " = ";
var answear = numbers[0] - numbers[1];
var yourAnswear = parseInt(prompt(mathQuestion));
if (yourAnswear === answear)
alert("Right!");
else
alert("Wrong!");
}
Note: The problems was that you were missing the declaration of a and b. Put the keyword var before the values.

You can impress your teacher with ternary operators, join and sort array methods and arrow functions
Codepen demo
const getRandom = () => Math.floor(Math.random() * 10) + 1
const ask = () => {
const numbers = [getRandom(), getRandom()].sort((a, b) => b - a)
alert(
prompt(numbers.join(' - ') + ' = ') == numbers[0] - numbers[1] ?
'Right!' : 'Wrong!'
)
}
for (var i = 0; i < 6; i++) {
ask();
}

Related

Adding asterisks to array?

I'm trying to add elements as asterisks inside array based on number of elements. Basically If numberOfRows is 3 then I want this output:
[
' * ',
' *** ',
'*****'
]
I'm struggling on setting asterisks using the index. Can anyone point me in the right direction? Thanks a lot!
Here's my code:
function myFunction(numberOfRows) {
var arr = [];
var value = "";
var asterisk = "*"; // Need to update this based on number of rows
for (var i = 1; i <= numberOfRows; i++) {
value += asterisk;
arr.push(value);
}
return arr;
}
Got it working! Here's a perfect solution.
function myFunction(n) {
let arr = [];
for(let f = 1; f <= n; f++) {
arr.push(' '.repeat(n - f) + '*'.repeat(f + f - 1) + ' '.repeat(n - f));
}
return arr;
}
console.log(myFunction(3));
Try something like this;
function myFunction(numberOfRows) {
var arr = [];
var value = "";
var slots = numberOfRows * 2 - 1;
var spaceSlots, asteriskSlots, spaces;
for (var i = 0; i < numberOfRows; i++) {
asteriskSlots = i * 2 + 1;
spaceSlots = Math.floor((slots - asteriskSlots)/2);
spaces = new Array(spaceSlots).fill(' ').join('');
value = spaces + '*'.repeat(asteriskSlots) + spaces;
arr.push(value);
}
return arr;
}
console.log(myFunction(20));

Least Common Multiple - for loop breaks down - javascript

I'm taking a course on FreeCodeCamp.org and the assignment is to find "Smallest Common Multiple". So I came up with a solution I think works and I does up to a certain point. Then the code just seems like it's breaking down. Here is my code:
function smallestCommons(arr) {
arr = arr.sort((a,b) => {return a - b;});
console.log(arr);
var truesec = false;
for(var a = arr[1]; truesec != true; a++){
for(var e = 1; e <= arr[1]; e++){
//console.log(a % e + " " + e);
if(a % e != 0){
truesec = false;
break;
}else{
truesec = true;
}
}
//console.log(truesec + " " + a);
if(truesec == true){
return a;
}
}
return a;
}
console.log(smallestCommons([23,18]));
This should return 6056820 according to their checklist but every time I check I get a different result I've gotten both 114461 & 122841 from the same code. Can somebody please tell me what is wrong with this?
Here is the assignment if it helps:
Intermediate Algorithm Scripting: Smallest Common Multiple
What your algorithm trying to do is find the common multiple between 1 and the greater number in the array, which might take a very long time. However, the question from the FreeCodeCamp asks you to find the common multiple between the two numbers in the array, so the result calculated from your algorithm does not match the tests.
To make your solution works, you can change
from for (var e = 1; e <= arr[1]; e++)
to for (var e = arr[0]; e <= arr[1]; e++)
in order to loop between two numbers in the array.
I would take a different approach to this problem:
create function to get all prime factors
create array of prime factor of all number between a[0] and a[1]
reduce the array as the biggest power for each prime factor.
multiple all the prime factor left in the array
Your approach will take O(k*a[1]) when k is the answer - and k can be very high... This approach will take O((a[1])^2)
Consider the following code:
function smallestCommons2(arr) {
arr.sort((a,b) => {return a - b;});
let factors = [];
for(let i = arr[0]; i <= arr[1]; i++)
factors.push(findPrimeFactors(i));
let reduced = reduceFactors(factors);
let ans = 1;
for (let i in reduced)
ans *= Math.pow(i, reduced[i]);
return ans;
}
function reduceFactors(factorsArr) {
let factorObject = {};
for (let i in factorsArr) {
for(let key in factorsArr[i]) {
if (!(key in factorObject) || factorObject[key] < factorsArr[i][key])
factorObject[key] = factorsArr[i][key];
}
}
return factorObject;
}
function findPrimeFactors (num) {
var primeFactors = [];
while (num % 2 === 0) {
primeFactors.push(2);
num = num / 2;
}
var sqrtNum = Math.sqrt(num);
for (var i = 3; i <= sqrtNum; i++) {
while (num % i === 0) {
primeFactors.push(i);
num = num / i;
}
}
if (num > 2)
primeFactors.push(num);
let factorObject = {};
for (let item of primeFactors) {
if (item in factorObject)
factorObject[item] += 1;
else factorObject[item] = 1;
}
return factorObject;
}
console.log(smallestCommons2([23,18]));
This code will output 6056820 in sec
Edited - found a post that do the same thing in a better way

Counting duplicate random numbers from a for loop

I am trying to create a score predictor based on a teams goal difference (football). I am new to JavaScript, and I have managed to get this far.
I want it to be like spinning a ten-sided dice 20 times + the team's goal difference. I have got this bit sorted I think. With my code now I have a list of random numbers logged in the console which is what I wanted. Now I would like to choose a number (e.g., 2) and see how many times this occurs in the list. I'd like to save this in a new variable called homeFinalScore (So if '2' occurs three times in the list of random numbers, the homeFinalScore variable should be 3). I've tried several things but have been unable to sort it yet!
Any help would be extremely helpful. Thank you in advance!
var homeFinalScore = 0;
function calculateScore(){
var homeTeam = document.getElementById("HomeTeam").value;
var awayTeam = document.getElementById("AwayTeam").value;
var homeGd = parseInt(document.getElementById("HomeGD").value);
var awayGd = parseInt(document.getElementById("AwayGD").value);
var homeGd = 20 + homeGd;
var awayGd = 15 + awayGd;
for (i = 0; i < homeGd; i++) {
var randNum = Math.floor(Math.random() * 11);
console.log(randNum);
}
}
You can create an array, use Array.prototype.push() to push randNum to the array, then use Array.prototype.filter(), .length to determine how many occurrences of a value are present within array.
var homeGd = 20 + 2;
var awayGd = 15 + 2;
var arr = [];
function countOccurrences(n, arr) {
return arr.filter(function(value) {
return value === n
}).length;
}
for (i = 0; i < homeGd; i++) {
var randNum = Math.floor(Math.random() * 11);
arr.push(randNum);
}
console.log(arr);
console.log(countOccurrences(2, arr));
Alternatively, you can increment a variable when randNum is equal to a value.
var homeGd = 20 + 2;
var awayGd = 15 + 2;
var n = 0;
var num = 2;
for (i = 0; i < homeGd; i++) {
var randNum = Math.floor(Math.random() * 11);
console.log(randNum);
if (randNum === num) {
++n
}
}
console.log("occurrences of 2:", n);
const homeGd = 10;
const randomNumbers = []; // array of random numbers
for (i = 0; i < homeGd; i++) {
randomNumbers.push(Math.floor(Math.random() * 11));
}
const countBy = randomNumbers.reduce((acc, current) => {
acc[current] = (acc[current] || 0) + 1;
return acc;
}, {});
console.log(countBy);

How can I get the sum of all odd fibonacci vales in javaScript?

I am working through this Free Code Camp exercise.
Return the sum of all odd Fibonacci numbers up to and including the
passed number if it is a Fibonacci number. The first few numbers of the Fibonacci sequence are 1, 1, 2, 3, 5 and
8, and each subsequent number is the sum of the previous two numbers.
And here is what I have so far...
Any suggestions?
function sumFibs(num) {
var arr, isFibVal, isEvenVal, sum, i = 0, fibonacci = function (num){
var a, b, result, fibArr = [1];
a=0;
b=1;
result=b;
for(var j = 0; j < num; j++){
result = a + b;
a = b;
b = result;
fibArr.push(result);
}
return fibArr;
},
isFib = function (val){
var prev = 0;
var curr = 1;
while(prev<=val){
if(prev == val){
return true;
} else {
return false;
}
curr = prev + curr;
prev = curr - prev;
}
},
isEven = function(someNumber){
return (someNumber % 2 === 0) ? true : false;
};
function sumArray(array) {
for (
var
index = 0, // The iterator
length = array.length, // Cache the array length
sum = 0; // The total amount
index < length; // The "for"-loop condition
sum += array[index++] // Add number on each iteration
);
return sum;
}
arr = fibonacci(num);
isFibVal = isFib(num);
isEvenVal = isEven(num);
if (isFibVal && !isEvenVal){
sum += sumArray(arr);
}
return sum;
}
All I get back is undefined which seems to be weird because i thought this part of my code was pretty cool—using the function values to check vs. in the if statement.
arr = fibonacci(num);
isFibVal = isFib(num);
isEvenVal = isEven(num);
if (isFibVal && !isEvenVal){
sum += sumArray(arr);
}
I won't give you the answer outright since you're going through FCC, but I'll provide you with some hints as to where to look:
See this segment:
for(var j = 0; j < num; j++){
result = a + b;
a = b;
b = result;
fibArr.push(result);
}
And this one:
function sumArray(array) {
for (
var
index = 0, // The iterator
length = array.length, // Cache the array length
sum = 0; // The total amount
index < length; // The "for"-loop condition
sum += array[index++] // Add number on each iteration
);
return sum;
}
Also, you probably don't need this segment at all:
isFibVal = isFib(num);
isEvenVal = isEven(num);
if (isFibVal && !isEvenVal){
sum += sumArray(arr);
Good luck. As someone who has finished a good chunk of the curriculum, I can say that Free Code Camp is the real deal.
You're pretty close and the other answer is good for pushing you in the right direction, I'll post a different way that does this using native JS functions:
Example of the code below in JSBin
function fibs(n) {
var f = [0, 1];
var extraNumber = 0;
for (var i = 0; i < n; i++) {
f.push(f[f.length - 1] + f[f.length - 2]);
}
// lets check if the passed in number is a fib:
if (f.indexOf(n) > -1) {
extraNumber = n;
}
console.log(f); // just to check we can cut all the logs later...
var filtered = f.filter(function(num) {
// filter out the even numbers
return num % 2 === 1;
});
console.log(filtered);
var sum = filtered.reduce(function(a, b) {
// add up whats left
return a + b;
});
console.log(sum);
return sum + extraNumber;
}
heres my solution, and i find it to be pretty readable:
function sumOddFibs(num) {
// initialize with 2 because
// fib sequence starts with 1 and 1
var sum = 2;
var prev = 1;
var curr = 1;
var next = 2;
while (next <= num) {
prev = curr;
curr = next;
next = prev + curr;
if (curr % 2 !== 0) {
sum += curr;
}
}
return sum;
}
You could start by defining variables for the previous number, current number, and total Fibonacci
To check for odd numbers, you could use an if statement and use %:
if (currNum % 2 !== 0){ }
If current number is odd, then you add it to the total
fibTotal += currNumber;
To determine the next Fibonacci number you, you will need to add the previous and current number:
var nextNumber = prevNumber + currNumber;
You will need to update the previous number to the current one
prevNumber = currNumber;
Set the current number to the next Fibonacci number in the sequence
currNumber = nextNumber;
Hope this helps.

Javascript rounding issue when summing values [duplicate]

This question already has answers here:
Closed 10 years ago.
Possible Duplicate:
Is JavaScript's Math broken?
The user enters values in the first two text boxes and, as they type, Javascript (sorry, no jQuery, I'm not up to it yet) is used to calculate the precise sum and the sum rounded to 2 digits.
Why am I getting rounding error and what can I do to correct it?
Many thanks.
Hmmm....ParseFloat? Wrong data type?
What I would like to see if the precise answer as if it were added on a calculator. Is there a parseDecimal or other data type that I can use?
![enter image description here][1]
function SumValues() {
//debugger;
var txtSubsContrRbtAmt = document.getElementById("<%=txtSubsContrRbtAmt.ClientID%>");
var txtDeMinAmt = document.getElementById("<%=txtDeMinAmt.ClientID%>");
var txtTotRbtAmt = document.getElementById("<%=txtTotRbtAmt.ClientID%>");
var txtRndRbtAmt = document.getElementById("<%=txtRndRbtAmt.ClientID%>");
var total = Add(txtSubsContrRbtAmt.value, txtDeMinAmt.value);
txtTotRbtAmt.value = total;
txtRndRbtAmt.value = RoundToTwoDecimalPlaces(total);
}
function Add() {
var sum = 0;
for (var i = 0, j = arguments.length; i < j; i++) {
var currentValue;
if (isNumber(arguments[i])) {
currentValue = parseFloat(arguments[i]);
}
else {
currentValue = 0;
}
sum += currentValue;
}
return sum;
}
function RoundToTwoDecimalPlaces(input) {
return Math.round(input * 100) / 100
}
function IsNumeric(input) {
return (input - 0) == input && input.length > 0;
}
function isNumber(n) {
return !isNaN(parseFloat(n)) && isFinite(n);
}
[1]: http://i.stack.imgur.com/5Otrm.png
Update. I am evaluating something like this:
function AddWithPrecision(a, b, precision) {
var x = Math.pow(10, precision || 2);
return (Math.round(a * x) + Math.round(b * x)) / x;
}
There is a golden rule for anyone writing software in the financial sector (or any software dealing with money): never use floats. Therefore most software dealing with money use only integers and represent decimal numbers as a data structure.
Here's one way of doing it:
(Note: this function adds two strings that looks like numbers)
(Additional note: No error checking is done to aid clarity. Also does not handle negative numbers)
function addNumberStrings (a,b) {
a = a.split('.');
b = b.split('.');
var a_decimal = a[1] || '0';
var b_decimal = b[1] || '0';
diff = a_decimal.length - b_decimal.length;
while (diff > 0) {
b_decimal += '0';
diff --;
}
while (diff < 0) {
a_decimal += '0';
diff ++;
}
var decimal_position = a_decimal.length;
a = a[0] + a_decimal;
b = b[0] + b_decimal;
var result = (parseInt(a,10)+parseInt(b,10)) + '';
if (result.length < decimal_position) {
for (var x=result.length;x<decimal_position;x++) {
result = '0'+result;
}
result = '0.'+result
}
else {
p = result.length-decimal_position;
result = result.substring(0,p)+'.'+result.substring(p);
}
return result;
}
*note: code is simplified, additional features left out as homework.
To fix your addition the way you want, I'd suggest counting the decimal places in each number somehow This method, for instance Then passing the max value to toFixed, and trimming any leftover zeroes.
function AddTwo(n1, n2) {
var n3 = parseFloat(n1) + parseFloat(n2);
var count1 = Decimals(n1, '.');
var count2 = Decimals(n2, '.');
var decimals = Math.max(count1, count2);
var result = n3.toFixed(decimals)
var resultDecimals = Decimals(result, '.');
if (resultDecimals > 0) {
return result.replace(/\.?0*$/,'');
}
else {
return result;
}
}
// Included for reference - I didn't write this
function Decimals(x, dec_sep)
{
var tmp=new String();
tmp=x;
if (tmp.indexOf(dec_sep)>-1)
return tmp.length-tmp.indexOf(dec_sep)-1;
else
return 0;
}
Here's a JSFiddle of that

Categories