I have a function in javascript that is supposed to calculate the nth prime.
function largestPrime(n) {
//const start = Date.now(); add this for testing performance
if (n < 2) {
return -1;
}
if(n === 2){
return 2;
}
let arr = [];
let i = 3;
while(arr.length < n) {
if (helper(i, arr)) {
arr.push(i);
}
i += 2;
}
//let retVal; add this for testing performance
//if(Date.now() - start > 1000){
// retVal = `${((Date.now() - start) / 1000).toFixed(2)}s`;
//}else{
// retVal = `${(Date.now() - start)}ms`;
//}
//console.log(`Calculated after ${retVal}.`);
return ([2, ...arr])[n-1];
}
It used a helper function to check if the number is prime:
function helper(n, arr){
let i = 0;
while(10*i*i<n){
if(n % arr[i] === 0){
return false;
}
i++;
}
return true;
}
The issue lies with this: while(10*i*i<n). Why does this work? It reduces the amount of calculations by a ton but it doesn't seem to have any reason behind it. After realising that I don't have to check for num % 2 === 0 because i += 2 I shifted the array of primes one index to the left. Before the max I could check for before the calculations became inaccurate was while(5*i*i<n). Does anyone have a way to generalize this? If it can be generalized then I can try shifting the array to the left once more.
I tested 5 through 10 all the way upto the 100.000.000th prime (which is 2038074743 for anyone curious) and they still work.
//f(1000000)=15485863 en f(2000000)=32452843 en f(3000000)=49979687: 5 work upto 4000000
//f(1000000)=15485863 en f(2000000)=32452843 en f(3000000)=49979687: 6 work upto 4000000
//f(1000000)=15485863 en f(2000000)=32452843 en f(3000000)=49979687: 7 work upto 4000000
//f(1000000)=15485863 en f(2000000)=32452843 en f(3000000)=49979687: 8 work upto 4000000
//f(1000000)=15485863 en f(2000000)=32452843 en f(3000000)=49979687: 9 work upto 4000000
//f(1000000)=15485863 en f(2000000)=32452843 en f(3000000)=49979687: 10 work upto 4000000
//f(1000000)=15485843 en f(2000000)=32452789: 11 worked for smaller numbers