// experiment to test whether common for loop optimization is worth the effort
const arraySize = 1000;
// fill an array with (crap) data:
let a = [];
for (let n=0; n<arraySize; ++n) {
a.push(Math.random());
}
const trials = 1000;
let timeTotal1 = 0;
let timeTotal2 = 0;
for (let s=0; s < trials; s++) {
// method1: reading a.length on every iteration:
var start = performance.now();
for (let i = 0; i < a.length; ++i) {}
var end = performance.now();
var time = end - start;
timeTotal1 += time;
// method2: recording a.length once at the start:
var start2 = performance.now();
for (let j = 0, l=a.length; j < l; ++j) {}
var end2 = performance.now();
var time2 = end2 - start2;
timeTotal2 += time2;
}
console.log('method1 total time: ' + timeTotal1 + ' milliseconds');
console.log('method2 total time: ' + timeTotal2 + ' milliseconds');
console.log('method1 average: ' + timeTotal1 / trials + ' milliseconds');
console.log('method2 average: ' + timeTotal2 / trials + ' milliseconds');
from my observations, this common optimization is indeed faster, even for tiny arrays.
However, the practical time saving may be insignificant for small arrays, but worthwhile for large (>1000 items) ones.
However, the practical time saving may be insignificant for small arrays, but worthwhile for large (>1000 items) ones.
Be the first to comment
You can use [html][/html], [css][/css], [php][/php] and more to embed the code. Urls are automatically hyperlinked. Line breaks and paragraphs are automatically generated.