I tried to create the Diffie Hellman key exchange system in javascript without plugins. My code unfortunately doesn't work and often creates 2 different secret keys.
Code:
var g = next_Prime_num(Math.ceil(Math.random() * 50));
var n = next_Prime_num(Math.ceil(Math.random() * 50) + 50);
var a = Math.ceil(Math.random() * (n - 1));
var b = Math.ceil(Math.random() * (n - 1));
var A = mod(Math.pow(g, a), n);
var B = mod(Math.pow(g, b), n);
var Ka = mod(Math.pow(B, a), n);
var Kb = mod(Math.pow(A, b), n);
function next_Prime_num(num) {
for (var i = num + 1;; i++) {
var isPrime = true;
for (var d = 2; d * d <= i; d++) {
if (i % d === 0) {
isPrime = false;
break;
}
}
if (isPrime) {
return i;
}
}
}
function mod(n, m) {
return n%m;
}
n: must be a prime number yes, but
g: must be a primitive root of n, and not a just prime number
this is your mistake, you have to add another function to get a primitive root from the givin prim number
The Math.pow() in there is definitely going to have integer overflows every now and then. In javascript the maximum integer you can have without loosing precision or maximum safe integer is (253 - 1) or 9007199254740991 .
What you can do is create a power function which uses modular exponentiation.
Check out this similar question - Diffie-Hellman Key Exchange with Javascript sometimes wrong
Related
I built an application to suggest email addresses fixes, and I need to detect email addresses that are basically not real existing email addresses, like the following:
14370afcdc17429f9e418d5ffbd0334a#magic.com
ce06e817-2149-6cfd-dd24-51b31e93ea1a#stackoverflow.org.il
87c0d782-e09f-056f-f544-c6ec9d17943c#microsoft.org.il
root#ns3160176.ip-151-106-35.eu
ds4-f1g-54-h5-dfg-yk-4gd-htr5-fdg5h#outlook.com
h-rt-dfg4-sv6-fg32-dsv5-vfd5-ds312#gmail.com
test#454-fs-ns-dff4-xhh-43d-frfs.com
I could do multi regex checks, but I don't think I will hit the good rate % of the suspected 'not-real' email addresses, as I go to a specific regex pattern each time.
I looked in:
Javascript script to find gibberish words in form inputs
Translate this JavaScript Gibberish please?
Detect keyboard mashed email addresses
Finally I looked over this:
Unable to detect gibberish names using Python
And It seems to fit my needs, I think. A script that will give me some score about the possibility of the each part of the email address to be a Gibberish (or not real) email address.
So what I want is the output to be:
const strings = ["14370afcdc17429f9e418d5ffbd0334a", "gmail", "ce06e817-2149-6cfd-dd24-51b31e93ea1a",
"87c0d782-e09f-056f-f544-c6ec9d17943c", "space-max", "ns3160176.ip-151-106-35",
"ds4-f1g-54-h5-dfg-yk-4gd-htr5-fdg5h", "outlook", "h-rt-dfg4-sv6-fg32-dsv5-vfd5-
ds312", "system-analytics", "454-fs-ns-dff4-xhh-43d-frfs"];
for (let i = 0; i < strings.length; i++) {
validateGibbrish(strings[i]);
}
And this validateGibberish function logic will be similar to this python code:
from nltk.corpus import brown
from collections import Counter
import numpy as np
text = '\n'.join([' '.join([w for w in s]) for s in brown.sents()])
unigrams = Counter(text)
bigrams = Counter(text[i:(i+2)] for i in range(len(text)-2))
trigrams = Counter(text[i:(i+3)] for i in range(len(text)-3))
weights = [0.001, 0.01, 0.989]
def strangeness(text):
r = 0
text = ' ' + text + '\n'
for i in range(2, len(text)):
char = text[i]
context1 = text[(i-1):i]
context2 = text[(i-2):i]
num = unigrams[char] * weights[0] + bigrams[context1+char] * weights[1] + trigrams[context2+char] * weights[2]
den = sum(unigrams.values()) * weights[0] + unigrams[char] + weights[1] + bigrams[context1] * weights[2]
r -= np.log(num / den)
return r / (len(text) - 2)
So in the end I will loop on all the strings and get something like this:
"14370afcdc17429f9e418d5ffbd0334a" -> 8.9073
"gmail" -> 1.0044
"ce06e817-2149-6cfd-dd24-51b31e93ea1a" -> 7.4261
"87c0d782-e09f-056f-f544-c6ec9d17943c" -> 8.3916
"space-max" -> 1.3553
"ns3160176.ip-151-106-35" -> 6.2584
"ds4-f1g-54-h5-dfg-yk-4gd-htr5-fdg5h" -> 7.1796
"outlook" -> 1.6694
"h-rt-dfg4-sv6-fg32-dsv5-vfd5-ds312" -> 8.5734
"system-analytics" -> 1.9489
"454-fs-ns-dff4-xhh-43d-frfs" -> 7.7058
Does anybody have a hint how to do it and can help?
Thanks a lot :)
UPDATE (12-22-2020)
I manage to write some code based on #Konstantin Pribluda answer, the Shannon entropy calculation:
const getFrequencies = str => {
let dict = new Set(str);
return [...dict].map(chr => {
return str.match(new RegExp(chr, 'g')).length;
});
};
// Measure the entropy of a string in bits per symbol.
const entropy = str => getFrequencies(str)
.reduce((sum, frequency) => {
let p = frequency / str.length;
return sum - (p * Math.log(p) / Math.log(2));
}, 0);
const strings = ['14370afcdc17429f9e418d5ffbd0334a', 'or', 'sdf', 'test', 'dave coperfield', 'gmail', 'ce06e817-2149-6cfd-dd24-51b31e93ea1a',
'87c0d782-e09f-056f-f544-c6ec9d17943c', 'space-max', 'ns3160176.ip-151-106-35',
'ds4-f1g-54-h5-dfg-yk-4gd-htr5-fdg5h', 'outlook', 'h-rt-dfg4-sv6-fg32-dsv5-vfd5-ds312', 'system-analytics', '454-fs-ns-dff4-xhh-43d-frfs'];
for (let i = 0; i < strings.length; i++) {
const str = strings[i];
let result = 0;
try {
result = entropy(str);
}
catch (error) { result = 0; }
console.log(`Entropy of '${str}' in bits per symbol:`, result);
}
The output is:
Entropy of '14370afcdc17429f9e418d5ffbd0334a' in bits per symbol: 3.7417292966721747
Entropy of 'or' in bits per symbol: 1
Entropy of 'sdf' in bits per symbol: 1.584962500721156
Entropy of 'test' in bits per symbol: 1.5
Entropy of 'dave coperfield' in bits per symbol: 3.4565647621309536
Entropy of 'gmail' in bits per symbol: 2.3219280948873626
Entropy of 'ce06e817-2149-6cfd-dd24-51b31e93ea1a' in bits per symbol: 3.882021446536749
Entropy of '87c0d782-e09f-056f-f544-c6ec9d17943c' in bits per symbol: 3.787301737252941
Entropy of 'space-max' in bits per symbol: 2.94770277922009
Entropy of 'ns3160176.ip-151-106-35' in bits per symbol: 3.1477803284561103
Entropy of 'ds4-f1g-54-h5-dfg-yk-4gd-htr5-fdg5h' in bits per symbol: 3.3502926596166693
Entropy of 'outlook' in bits per symbol: 2.1280852788913944
Entropy of 'h-rt-dfg4-sv6-fg32-dsv5-vfd5-ds312' in bits per symbol: 3.619340871812292
Entropy of 'system-analytics' in bits per symbol: 3.327819531114783
Entropy of '454-fs-ns-dff4-xhh-43d-frfs' in bits per symbol: 3.1299133176846836
It's still not working as expected, as 'dave coperfield' gets about the same points as other gibberish results.
Anyone else have better logic or ideas on how to do it?
This is what I come up with:
// gibberish detector js
(function (h) {
function e(c, b, a) { return c < b ? (a = b - c, Math.log(b) / Math.log(a) * 100) : c > a ? (b = c - a, Math.log(100 - a) / Math.log(b) * 100) : 0 } function k(c) { for (var b = {}, a = "", d = 0; d < c.length; ++d)c[d] in b || (b[c[d]] = 1, a += c[d]); return a } h.detect = function (c) {
if (0 === c.length || !c.trim()) return 0; for (var b = c, a = []; a.length < b.length / 35;)a.push(b.substring(0, 35)), b = b.substring(36); 1 <= a.length && 10 > a[a.length - 1].length && (a[a.length - 2] += a[a.length - 1], a.pop()); for (var b = [], d = 0; d < a.length; d++)b.push(k(a[d]).length); a = 100 * b; for (d = b =
0; d < a.length; d++)b += parseFloat(a[d], 10); a = b / a.length; for (var f = d = b = 0; f < c.length; f++) { var g = c.charAt(f); g.match(/^[a-zA-Z]+$/) && (g.match(/^(a|e|i|o|u)$/i) && b++, d++) } b = 0 !== d ? b / d * 100 : 0; c = c.split(/[\W_]/).length / c.length * 100; a = Math.max(1, e(a, 45, 50)); b = Math.max(1, e(b, 35, 45)); c = Math.max(1, e(c, 15, 20)); return Math.max(1, (Math.log10(a) + Math.log10(b) + Math.log10(c)) / 6 * 100)
}
})("undefined" === typeof exports ? this.gibberish = {} : exports)
// email syntax validator
function validateSyntax(email) {
return /^(([^<>()[\]\\.,;:\s#"]+(\.[^<>()[\]\\.,;:\s#"]+)*)|(".+"))#((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/.test(email.toLowerCase());
}
// shannon entropy
function entropy(str) {
return Object.values(Array.from(str).reduce((freq, c) => (freq[c] = (freq[c] || 0) + 1) && freq, {})).reduce((sum, f) => sum - f / str.length * Math.log2(f / str.length), 0)
}
// vowel counter
function countVowels(word) {
var m = word.match(/[aeiou]/gi);
return m === null ? 0 : m.length;
}
// dummy function
function isTrue(value){
return value
}
// validate string by multiple tests
function detectGibberish(str){
var strWithoutPunct = str.replace(/[.,\/#!$%\^&\*;:{}=\-_`~()]/g,"");
var entropyValue = entropy(str) < 3.5;
var gibberishValue = gibberish.detect(str) < 50;
var vovelValue = 30 < 100 / strWithoutPunct.length * countVowels(strWithoutPunct) && 100 / strWithoutPunct.length * countVowels(str) < 35;
return [entropyValue, gibberishValue, vovelValue].filter(isTrue).length > 1
}
// main function
function validateEmail(email) {
return validateSyntax(email) ? detectGibberish(email.split("#")[0]) : false
}
// tests
document.write(validateEmail("dsfghjdhjs#gmail.com") + "<br/>")
document.write(validateEmail("jhon.smith#gmail.com"))
I have combined multiple tests: gibberish-detector.js, Shannon entropy and counting vowels (between 30% and 35%). You can adjust some values for more accurate result.
A thing you may consider doing is checking each time how random each string is, then sort the results according to their score and given a threshold exclude the ones with high randomness. It is inevitable that you will miss some.
There are some implementations for checking the randomness of strings, for example:
https://en.wikipedia.org/wiki/Diehard_tests
http://www.cacert.at/random/
You may have to create a hash (to map chars and symbols to sequences of integers) before you apply some of these because some work only with integers, since they test properties of random numbers generators.
Also a stack exchange link that can be of help is this:
https://stats.stackexchange.com/questions/371150/check-if-a-character-string-is-not-random
PS. I am having a similar problem in a service since robots create accounts with these type of fake emails. After years of dealing with this issue (basically deleting manually from the DB the fake emails) I am now considering introducing a visual check (captcha) in the signup page to avoid the frustration.
Given a non-negative number say 1213, it should return 12 because there are 12 possible integers similar to 1213 i.e., 1123,1132,1213,1231,1312,1321,2113,2131,2311,312,3121 and 3211. Same with 10, it should return 1 and 12 should return 2 and if the number is 120 it should return 4 as combinations are 120,102,210,201.
You can use this formula to get the total number of unique permutations excluding permutations with leading zero.
Lets define some symbols:
n = Total Number of digits
z = Number of zeros
r1, r2, ..., rn = repetition count of digits with count > 1
fact(p) = factorial of number of p
Total permutations = (n - z) * fact(n - 1) / fact(r1) * fact(r2) * .... * fact(rn)
For example, for 1213,
n = 4, z = 0, r1 (digit 1) = 2
permutations = (4 - 0) * fact(4 - 1) / fact(2) = 4 * 6 / 2 = 12
You can easily convert this to program.
function factorial(n) {
if (n <=1)
return 1;
return n * factorial(n-1);
}
function getPermutations(number) {
var n = number.toString().split('').length;
var r = {};
number.toString().split('').forEach(function(digit){
r[digit] = r[digit] || 0;
r[digit] += 1;
});
var z = number.toString().split('').reduce(function(count, digit) {
return (digit === '0') ? count + 1 : count;
}, 0);
var denominator = Object.keys(r).map(function (key) { return r[key]; }).reduce(function(result, curr) {
return result * factorial(curr);
}, 1);
//console.log(n, r, z);
return (n - z) * factorial(n - 1) / denominator;
}
var result = getPermutations(1216);
console.log(result);
Note : This is basic implementation and would not be the most optimum. Also, factorial calculation involves large numbers and would probably fail for large inputs.
You are looking for an anagram algorithm :
This script find every anagram of a string then delete every number starting with zero :
var allAnagrams = function(arr) {
var anagrams = {};
arr.forEach(function(str) {
var recurse = function(ana, str) {
if (str === '')
anagrams[ana] = 1;
for (var i = 0; i < str.length; i++)
recurse(ana + str[i], str.slice(0, i) + str.slice(i + 1));
};
recurse('', str);
});
return Object.keys(anagrams);
}
var arr = ['120']; //declare your number
var anag = allAnagrams(arr); //send it to the function
for (var i in anag) { //delete leading 0
if((anag[i].charAt(0)) === '0' ) {
anag.splice(i);
}
}
console.log(anag); //print array
console.log(anag.length); // print length
Here the output will be :
["102", "120", "201", "210"]
4
I am trying to set a function that creates a random number between a range
I need to make it working with negative values so I can do
randomBetweenRange( 10, 20)
randomBetweenRange(-10, 10)
randomBetweenRange(-20, -10)
This is what I am trying, it is a bit confusing and at the moment randomBetweenRange(-20, -10) is not working..
function randomBetweenRange(a, b){
var neg;
var pos;
if(a < 0){
neg = Math.abs(a) + 1;
pos = (b * 2) - 1;
}else{
neg = -Math.abs(a) + 1;
var pos = b;
}
var includeZero = true;
var result;
do result = Math.ceil(Math.random() * (pos + neg)) - neg;
while (includeZero === false && result === 0);
return result;
}
How can I make it working?
ASSUMING you will always have the little value on first, this code will do the tricks, see the comment below and don't hesitate to ask !
var a=parseInt(prompt("First value"));
var b=parseInt(prompt("Second value"));
var result = 0;
// Here, b - a will get the interval for any pos+neg value.
result = Math.floor(Math.random() * (b - a)) + a;
/* First case is we got two neg value
* We make the little one pos to get the intervale
* Due to this, we use - a to set the start
*/
if(a < 0) {
if(b < 0) {
a = Math.abs(a);
result = Math.floor(Math.random() * (a + b)) - a;
}
/* Second case is we got two neg value
* We make the little one neg to get the intervale
* Due to this, we use - a to set the start
*/
} else {
if(b > 0) {
a = a*-1;
result = Math.floor(Math.random() * (a + b)) - a;
}
}
console.log("A : "+a+" | B : "+b+" | Int : "+(a+b)+"/"+Math.abs((a-b)));
console.log(result);
You have declared the variable 'pos' in the beginning itself. Then why do you declare it in the 'else' part? ( var pos = b;)
Hence, for this statement,
do result = Math.ceil(Math.random() * (pos + neg)) - neg;
'pos' will not have any value.
do result = Math.ceil(Math.random() * (pos + neg)) - neg;
Specifically Math.random() * (pos + neg) returns the wrong range. If pos = -20 and neg = -30, the range between pos and neg should be 10, but your operation returns -50. You should also add one to the range because its technically the amount of possibilities (ex: if you want to generate your function to return {0,1}, the range between pos and neg is 1, but there are two possibilities of numbers to return) and subtract another 1 from result because you're using Math.ceil
Your else clause also redeclares var pos
If you want to generate a number between -50 and 50 - Get a random number between 0 and 100 then subtract 50
var randomNumber = Math.floor(Math.random() * 101) - 50;
console.log(randomNumber);
With a simple JS code I calculate the sum of the following series:
b is a constant, could be any value.
The JS code tries to find for which minimum value r (given it 1000 attempts in this example), this inequality is valid:
If r goes below 1.50000, results get silly.
var pre = 0.0;
var r = 1.50010;
var b = 0.01;
for (var p = 0; p < 1000; p++) {
var sum = 0;
for (var i = 0; i <= 33; i++) {
sum += Math.pow(r, i);
}
sum *= b;
if ((2 * b * Math.pow(r, 34)) > sum) {
pre = r;
r -= 0.00001;
r = parseFloat(r.toFixed(5));
} else {
console.log(pre);
console.log(((2 * b * Math.pow(r + 0.00001, 34)) - sum).toFixed(8));
break;
}
}
The code breaks at pre == 1.5, and if I were to force r = 1.49999, console.log(pre) returns 0. Why?
The code stops when r = 1.5 because that's the minimum value for which your inequality is valid (within the accuracy you're using, anyway). If you start r off at less than that, it's going to break on the first time through the loop since that if statement is never true, so you never set pre to be r.
Here's a graph showing what happens with the two sides of the inequality near r = 1.5:
Code for the above graph:
import numpy as np
import matplotlib.pyplot as plt
x = np.linspace(1.4, 1.6, 100)
plt.plot(x , 2 * x ** 34, label = '$2r^{34}$')
plt.plot(x , sum([x ** i for i in xrange(34)]), label = '$\sum_{i = 0}^{33}\/ r^i$')
plt.yscale('log')
plt.legend()
plt.show()
Also, if b is positive, you don't need to do anything with b in your code, since you're multiplying both sides of the inequality by b. And if b is negative, then you need to reverse the inequality.
Oh, and another thing: for algorithms of this type, you may want to consider something more like the bisection method, which halves the search space each time through the iteration. You could use 1 and 2 as the endpoints, since 1 is definitely too low and 2 is definitely too high. You'd stop when the difference between the sides of the inequality fell below some threshold.
I am assuming that when the loop breaks, you want to display the difference between the left and right of the inequality. The problem is because "sum" is a running sum that is from the previous loop, the calculation is not correct.
Now when you force r = 1.49999, the if clause is never executed so "pre" stays at zero as initiated on the first line.
Your full solution should be like this:
var pre = 0.0;
var r = 1.50010;
var b = 0.01;
for (var p = 0; p < 1000; p++) {
var sum = 0;
for (var i = 0; i <= 33; i++) {
sum += Math.pow(r, i);
}
sum *= b;
var diff = (2 * b * Math.pow(r, 34) - sum).toFixed(8);
console.log('diff: ' + diff);
if ((2 * b * Math.pow(r, 34)) > sum) {
pre = r;
r -= 0.00001;
r = parseFloat(r.toFixed(5));
} else {
console.log('--breaking--');
console.log(pre);
//console.log(((2 * b * Math.pow(r + 0.00001, 34)) - sum).toFixed(8));
break;
}
}
and the output is:
diff: 3.91098781
diff: 3.52116542
diff: 3.13150396
diff: 2.74200338
diff: 2.35266364
diff: 1.96348468
diff: 1.57446646
diff: 1.18560893
diff: 0.79691205
diff: 0.40837575
diff: 0.02000000
diff: -0.36821526
--breaking--
1.5
In math how do I obtain the closest number of a number that is divisible by 16?
For example I get the random number 100 and I want to turn that number (using a math function) into the closest number to 100 that is divisible by 16 (In this case its 96)
I'm trying to do this in JavaScript but if I knew the math formula for it I would easily do it in any language.
Thank you,
Regards
Generate a random integer. Multiply it by 16.
Divide by 16, round, and multiply by 16:
n = Math.round(n / 16) * 16;
function GetRandomNumberBetween(lo, hi) {
return Math.floor(lo + Math.random() * (hi - lo));
}
Number.prototype.FindClosestNumberThatIsDivisibleBy = function(n) {
return Math.round(this / n) * n; //simplify as per Guffa
/* originally:
var c = Math.ceil(n);
var f = Math.floor(n);
var m = num % n;
var r = f * n;
if (m > (n / 2))
r = c * n;
return r;
*/
};
var r = GetRandomNumberBetween(10, 100);
var c = r.FindClosestNumberThatIsDivisibleBy(16);
function closest(n) {
var r = 0, ans = 0;
r = n % 16
if r < 8 {
ans = n - r
} else {
ans = n + (16 - r)
}
return ans;
}
Here's how I understand your question. You're given a number A, and you have to find a number B that is the closest possible multiple of 16 to A.
Take the number given, "A" and divide it by 16
Round the answer from previous step to the nearest whole number
multiply the answer from previous step by 16
there's the pseudocode, hope it's what you're looking for ;-)
A general JS solution
var divisor = 16;
var lower = 0;
var upper = 100;
var randDivisible = (Math.floor(Math.random()*(upper-lower))+lower)*divisor;
alert(randDivisible);