I want to populate an array with all of the possible integers between 1 000 000 and 10 000 000. When I run the loop below, it crashes the chrome tab. How can I accomplish this?
var arrList = [];
var list;
function gen() {
for (var i = 1000000; i < 10000000; i++) {
arrList.push(i);
}
}
gen();
list = arrList.join(' '); // This line causes the crash
console.log(list);
It's the console.log(list) that causes the crash. The console can't handle trying to display a string that's 72 MB long.
When I take that line out, the script runs successfully, although it takes several seconds. This alerts 71999999 after 2-3 seconds.
var arrList = [];
var list;
function gen() {
for (var i = 1000000; i < 10000000; i++) {
arrList.push(i);
}
}
gen();
list = arrList.join(' ');
alert(list.length);
Related
I'm using Adobe Livecycle Designer ES4 to create some report. Based on XML a try to fill table. I have problem with Array. I push data into array in for loop. Below examples of my code:
Results - blank textbox
var print_data = xfa.record.containerPrintingData;
var sfcArray = [];
for (var i = 0; i < 10; i++) {
sfc = print_data.resolveNode("sfcPrintingData["+ i +"]").sfc.value;
sfcArray.push(sfc);
};
this.rawValue = sfcArray.toString();
Results - get all items
var print_data = xfa.record.containerPrintingData;
var sfcArray = [];
for (var i = 0; i < 10; i++) {
sfc = print_data.resolveNode("sfcPrintingData["+ i +"]").sfc.value;
sfcArray.push(sfc);
this.rawValue = sfcArray.toString();
}
Results - get 2nd item x 10
var print_data = xfa.record.containerPrintingData;
var sfcArray = [];
for (var i = 0; i < 10; i++) {
sfc = print_data.resolveNode("sfcPrintingData[1]").sfc.value;
sfcArray.push(sfc);
this.rawValue = sfcArray.toString();
}
Why 1st example don't work and 2nd work correct? I need use this array in another loops. How to solve it?
Because, If it has 2 items, and you looping it for 10.
What happends is, when this.rawValue = sfcArray.toString(); is inside the loop, this.rawValue gets updated 2 times. First time One item will be there. second time 2 items.
For the next iteration there is no 3rd item. So code breaks with error. But this.rawValue still have 2 items.
Where as, when this.rawValue = sfcArray.toString(); is outside the loop, the code breaks with error and this.rawValue don't have any items in it.
Java Code sample 1:
ArrayList<ArrayList<Integer>> outerList = new ArrayList<ArrayList<Integer>>();
for(i=0; i<5000; i++){
outerList.add(new ArrayList<Integer>());
for(j=0; j<5000; j++){
outerList.get(i).add(1);
}
}
Java Code sample 2:
ArrayList<ArrayList<Integer>> outerList_n = new ArrayList<ArrayList<Integer>>();
ArrayList<Integer> innerList_n = new ArrayList<Integer>();
for(j=0; j<5000; j++){
innerList_n.add(1);
}
for(i=0; i<5000; i++){
outerList_n.add(innerList_n);
}
Description:
Code sample 1 creates a new arrayList everytime for adding to the outerlist, although every entry for the innerList in always 1.
Code sample 2 creates one array with the all entries as 1 and then adds it to the outerList in the for loop.
Questions:
1) Do they both have different memory footprints or same?
I used jdk.nashorn.internal.ir.debug.ObjectSizeCalculator to calculate it and the following are the results:
Size of outerList: 514907072
Size of outerList_n: 130048
2) I do not understand why the above difference in memory. Isn't the data structure same in both cases?
3) Is this the same case in javascript? When I tried the respective codes for javascript for the above 2 scenarios, I got the same memory footprint in both cases.
Javascript Code:
function sizeof(object){
// initialise the list of objects and size
var objects = [object];
var size = 0;
// loop over the objects
for (var index = 0; index < objects.length; index ++){
// determine the type of the object
switch (typeof objects[index]){
// the object is a boolean
case 'boolean': size += 4; break;
// the object is a number
case 'number': size += 8; break;
// the object is a string
case 'string': size += 2 * objects[index].length; break;
// the object is a generic object
case 'object':
// if the object is not an array, add the sizes of the keys
if (Object.prototype.toString.call(objects[index]) != '[object Array]'){
for (var key in objects[index]) size += 2 * key.length;
}
// loop over the keys
for (var key in objects[index]){
// determine whether the value has already been processed
var processed = false;
for (var search = 0; search < objects.length; search ++){
if (objects[search] === objects[index][key]){
processed = true;
break;
}
}
// queue the value to be processed if appropriate
if (!processed) objects.push(objects[index][key]);
}
}
}
// return the calculated size
return size;
}
// TWO SEPARATE FOR LOOPS
var start = new Date().getTime();
var i,j ;
var inner_arr1 = [];
var outer_arr1 = [];
for (i = 0; i < 100; i++) {
inner_arr1.push("abcdefghijklmnopqrstuvwxyz");
}
for (j = 0; j < 100; j++) {
outer_arr1.push(inner_arr1);
}
var end = new Date().getTime();
print("size of outer_arr1: "+sizeof(outer_arr1));
print("time of outer_arr1 (in ms): "+(end-start))
// NESTED FOR LOOPS
var start = new Date().getTime();
var outer_arr2 = [];
for (j = 0; j < 100; j++) {
var inner_arr2 = [];
for (i = 0; i < 100; i++) {
inner_arr2.push("abcdefghijklmnopqrstuvwxyz");
}
outer_arr2.push(inner_arr1);
}
var end = new Date().getTime();
print("size of outer_arr2: "+sizeof(outer_arr2));
print("time of outer_arr2 (in ms): "+(end-start))
/*
COMMAND:
jjs findingSize.js
OUTPUT:
size of outer_arr1: 52
time of outer_arr1 (in ms): 45
size of outer_arr2: 52
time of outer_arr2 (in ms): 58
*/
Do they both have different memory footprints or same?
Different. In the first case you are creating 5001 instances of ArrayList. In the second you are creating only two instance of ArrayList
Isn't the data structure same in both cases?
The structure of data is the same in both cases - you have array of arrays. The only difference is that in the second case all your innner arrays are same - they share the common memory and when you change value in some row, all rows will be changed
Code sample 1 : it will create 5000 different Lists filled with 5000 times the value 1, all saved in outerList : 5000+1 = 5001 different List
Code sample 2 : outerList contains 5000 times the same list (filled with 5000 times values 1) : 1+1 = 2 different Lists
Seems to me the evaluation is correct. Your first code sample has an inner loop. This is exponentially more than the second sample.
To put it plainly, the 2nd sample will iterate the first loop 5000 times, then the second loop 5000 times.
The first example will iterate the innerloop 5000 times, for each loop of the outer loop (another 5000 times) so you get 5000*5000 iterations ofouterList.get(i).add(1).
I hope this answers your question.
I have a JavaScript Object as:
object1 = {
"abc" : "def",
"ghi" : "jkl"
}
Now, when I write object1["abc"]. Is this search a linear time search, i.e., O(n) or constant time search, i.e., O(1)?
Accessing an array/object in Javascript is an O(1) operation.
Well, I've made a simple program testing this, and it doesn't show constant access time. Most likely there are some other things involved - optimization or memory management or something, but it clearly shows dependence on amount of attributes.
For object with 10 attributes, it takes around 160 ms to do the 100 million accesses, for object with 100k attributes, it takes around 650 ms (on my PC with Chrome). So it doesn't look constant at all, but it is true, that for "normal" amounts of attributes it will not probably matter.
JS:
function go(amount) {
var object1 = {};
for (var i = 0; i < amount; i++) {
object1['id' + i] = i;
}
var start = new Date().getTime();
var j = 0;
for (var i = 0; i < 100000000; i++) {
j += object1['id3'];
}
var end = new Date().getTime();
console.log(j);
document.getElementById('result').innerHTML = end - start;
}
HTML:
<button onclick="go(10);">Run with 10 attributes</button>
<button onclick="go(100000);">Run with 100 000 attributes</button>
<br>
The result is <span id="result">0</span> ms
Here is the link of Fiddle
This question already has answers here:
How to append something to an array?
(30 answers)
Why is array.push sometimes faster than array[n] = value?
(5 answers)
Closed 9 years ago.
I was wondering if there is a reason to choose
array.push(element)
over
array[array.length] = element
or vice-versa.
Here's a simple example where I have an array of numbers and I want to make a new array of those numbers multiplied by 2:
var numbers = [5, 7, 20, 3, 13];
var arr1 = [];
var len = numbers.length;
for(var i = 0; i < len; i++){
arr1.push(numbers[i] * 2);
}
alert(arr1);
var arr2 = [];
for(var i = 0; i < len; i++){
arr2[arr2.length] = numbers[i] * 2;
}
alert(arr2);
The fastest way to do it with current JavaScript technology, while also using minimal code, is to store the last element first, thereby allocating the full set of array indices, and then counting backwards to 0 while storing the elements, thereby taking advantage of nearby memory storage positions and minimizing cache misses.
var arr3 = [];
for (var i = len; i>0;){
i--;
arr2[i] = numbers[i] * 2;
}
alert(arr2);
Note that if the number of elements being stored is "big enough" in the view of the JavaScript engine, then the array will be created as a "sparse" array and never converted to a regular flat array.
Yes, I can back this up. The only problem is that JavaScript optimizers are extremely aggressive in throwing away calculations that aren't used. So in order for the results to be calculated fairly, all the results have to be stored (temporarily). One further optimization that I believed to be obsolete, but actually improves the speed even further is to pre-initialize the array using new Array(*length*). That's an old-hat trick that for a while made no difference, but no in the days of extreme JavaScript engine optimizations, it appears to make a difference again.
<script>
function arrayFwd(set) {
var x = [];
for (var i = 0; i<set.length; i++)
x[x.length] = set[i];
return x;
}
function arrayRev(set) {
var x = new Array(set.length);
for (var i = set.length; i>0;) {
i--;
x[i] = set[i];
}
return x;
}
function arrayPush(set) {
var x = [];
for (var i = 0; i<set.length; i++)
x.push(set[i]);
return x;
}
results = []; /* we'll store the results so that
optimizers don't realize the results are not used
and thus skip the function's work completely */
function timer(f, n) {
return function(x) {
var n1 = new Date(), i = n;
do { results.push(f(x)); } while (i-- > 0); // do something here
return (new Date() - n1)/n;
};
}
set = [];
for (i=0; i<4096; i++)
set[i] = (i)*(i+1)/2;
timers = {
forward: timer(arrayFwd, 500),
backward: timer(arrayRev, 500),
push: timer(arrayPush, 500)
};
for (k in timers) {
document.write(k, ' = ', timers[k](set), ' ms<br />');
}
</script>
Opera 12.15:
forward = 0.12 ms
backward = 0.04 ms
push = 0.09 ms
Chrome (latest, v27):
forward = 0.07 ms
backward = 0.022 ms
push = 0.064 ms
(for comparison, when results are not stored, Chrome produces these numbers:
forward = 0.032 ms
backward = 0.008 ms
push = 0.022 ms
This is almost four times faster versus doing the array forwards, and almost three times faster versus doing push.)
IE 10:
forward = 0.028 ms
backward = 0.012 ms
push = 0.038 ms
Strangely, Firefox still shows push as faster. There must be some code re-writing going on under the hood with Firefox when push is used, because accessing a property and invoking a function are both slower than using an array index in terms of pure, un-enhanced JavaScript performance.
How can you, in using a random number generator, stop a number from appearing if it has already appeared once?
Here is the current code:
var random = Math.ceil(Math.random() * 24);
But the numbers appear more than once.
You can use an array of possible values ( I think in your case it will be 24 ) :
var values = [];
for (var i = 1; i <= 24; ++i){
values.push(i);
}
When you want to pick a random number you just do:
var random = values.splice(Math.random()*values.length,1)[0];
If you know how many numbers you want then it's easy, first create an array:
var arr = [];
for (var i = 0; i <= 24; i++) arr.push(i);
Then you can shuffle it with this little function:
function shuffle(arr) {
return arr.map(function(val, i) {
return [Math.random(), i];
}).sort().map(function(val) {
return val[1];
});
}
And use it like so:
console.log(shuffle(arr)); //=> [2,10,15..] random array from 0 to 24
You can always use an hashtable and before using the new number, check if it is in there or not. That would work for bigger numbers. Now for 24, you can always shuffle an array.
You could put the numbers you generate in an array and then check against that. If the value is found, try again:
var RandomNumber = (function()
{
// Store used numbers here.
var _used = [];
return {
get: function()
{
var random = Math.ceil(Math.random() * 24);
for(var i = 0; i < _used.length; i++)
{
if(_used[i] === random)
{
// Do something here to prevent stack overflow occuring.
// For example, you could just reset the _used list when you
// hit a length of 24, or return something representing that.
return this.get();
}
}
_used.push(random);
return random;
}
}
})();
You can test being able to get all unique values like so:
for(var i = 0; i < 24; i++)
{
console.log( RandomNumber.get() );
}
The only issue currently is that you will get a stack overflow error if you try and get a random number more times than the amount of possible numbers you can get (in this case 24).