var t1 = new Date().getTime()
for (let i = 0; i < 100; i++) {
for (let j = 0; j < 1000; j++) {
for (let k = 0; k < 10000; k++) {
}
}
}
var t2 = new Date().getTime()
console.log('first time', t2 - t1)
for (let i = 0; i < 10000; i++) {
for (let j = 0; j < 1000; j++) {
for (let k = 0; k < 100; k++) {
}
}
}
var t3 = new Date().getTime()
console.log('second time', t3 - t2)
As you can see, it looks like the above two for-loops will have the same execution time. But in fact the second loop will take more time to execute than the first one does. What makes the difference under the hood?
In the first loop, you are executing:
In the second loop, you are executing:
The second loop has 9909900 more variable initializations and therefore is expected to run longer.