querySelectorAll vs getElementsByClassName - performance

When I looping through a nodelist obtained by querySelectorAll and add a new class for each one,it takes much less time(3ms) than which obtained by getElementsByClassName(100ms).Why?
var container = document.getElementById('box-container');
var button = document.getElementById('button');
for (var i = 0; i < 3000; i++) {
var div = document.createElement('div');
div.classList.add('box');
div.index = i;
container.appendChild(div);
}
button.addEventListener('click', function() {
var box1 = container.getElementsByClassName('box');
for (var i = 1;i < box1.length; i+=2){
box1[i].classList.toggle('gray');
};
var box2 = container.querySelectorAll('.box');
for (var i = 1;i < box2.length; i+=2){
box2[i-1].classList.toggle('gray');
};
});

The difference is the type of list you are running on.
box1 is a NodeList (a.k.a a live node list) which is updated when the DOM changes. box2 is an array, which is a non-live list - so changing the DOM doesn't affect it.
What happens when you iterate on box1 is that on every class toggle, the box1 list is updated, which causes the overhead.
Here's a test you can easily run:
var container = document.getElementById('box-container');
var button = document.getElementById('button');
for (var i = 0; i < 6000; i++) { // added 3000 more to challenge modern browsers...
var div = document.createElement('div');
div.classList.add('box');
div.index = i;
container.appendChild(div);
}
button.addEventListener('click', function () {
var box1 = container.getElementsByClassName('box');
for (var i = 1; i < box1.length; i += 2) {
box1[i].classList.toggle('gray');
}
var deadBox1 = [];
for (i = 0; i < box1.length; i++) {
deadBox1[i] = box1[i];
}
for (var i = 1; i < deadBox1.length; i += 2) {
deadBox1[i].classList.toggle('gray');
}
var box2 = container.querySelectorAll('.box');
for (i = 1; i < box2.length; i += 2) {
box2[i - 1].classList.toggle('gray');
}
});
Now run the chrome performance (or timeline) tool. You can see the diff here:

Related

Is there a way to speed up this for loop in Google Script?

I have a for loop which is working on my google sheet but it takes around 5 minutes to filter through the 2100 rows of data. I have read about using filters and getting rid of the for loop all together but I'm fairly new to coding in Google Script and haven't been able to get my head around the syntax for this. Any advice greatly appreciated.
Code below:
function Inspect() {a
var sSheet = SpreadsheetApp.getActiveSpreadsheet();
var srcSheet = sSheet.getSheetByName("Inventory");
var tarSheet = sSheet.getSheetByName("Inspections");
var lastRow = srcSheet.getLastRow();
for (var i = 2; i <= lastRow; i++) {
var cell = srcSheet.getRange("A" + i);
var val = cell.getValue();
if (val == true) {
var srcRange = srcSheet.getRange("B" + i + ":I" + i);
var clrRange = srcSheet.getRange("A" + i);
var tarRow = tarSheet.getLastRow();
tarSheet.insertRowAfter(tarRow);
var tarRange = tarSheet.getRange("A" + (tarRow+1) + ":H" + (tarRow+1));
var now = new Date();
var timeRange = tarSheet.getRange("I"+(tarRow+1));
timeRange.setValue(now);
srcRange.copyTo(tarRange);
clrRange.clear();
//tarRange.activate();
timeRange.offset(0, 1).activate();
}
}
};
Yes, to speed-up you will need to get all the values first and apply your logic to the obtained 2D-arrays instead of cells, at the end you will use setValues to update your sheet. I would go for something like this:
function Inspect() {
var sSheet = SpreadsheetApp.getActiveSpreadsheet();
var srcSheet = sSheet.getSheetByName("Inventory");
var tarSheet = sSheet.getSheetByName("Inspections");
var srcLastRow = srcSheet.getLastRow();
var tarLastRow = tarSheet.getLastRow();
var srcArray = srcSheet.getRange(1,1,srcLastRow,9).getValues();//(A1:I(lastrow))
var tarArray = tarSheet.getRange(1,1,tarLastRow,9).getValues();//(A1:I(lastrow))
for (var i = 1; i < srcArray.length; i++) {
var val = srcArray[i][0];
if (val == true) {
var copyValues = srcArray[i].slice(1);//Get all elements from the row excluding first column (srcSheet.getRange("B" + i + ":I" + i);)
var now = new Date();
copyValues[8]=now;//set the time on column 9 (array starts at position 0!)
var tarNewLine = copyValues;
tarArray.push(tarNewLine);
//clear values on source (except column A):
for(var j=1;j<srcArray[i].length;j++){
srcArray[i][j]="";
}
}
}
tarSheet.clear();
tarSheet.getRange(1, 1,tarArray.length,tarArray[0].length).setValues(tarArray);
srcSheet.clear();
srcSheet.getRange(1, 1,srcArray.length,srcArray[0].length).setValues(srcArray);
};
You cannot get around a loop, but you should reduce the number of calls to the SpreadsheetApp to a minimum, see Apps Script Best Practices
It is not the for loop, but those calls that make your code slow. Instead, work with arrays as much as you can. Loops become of a problem if they are nested - this is also something you should avoid.
Sample how to perform most calls to SpreadsheetApp outside of the loop and work with arrays:
function Inspect() {
var sSheet = SpreadsheetApp.getActiveSpreadsheet();
var srcSheet = sSheet.getSheetByName("Inventory");
var tarSheet = sSheet.getSheetByName("Inspections");
var lastRow = srcSheet.getLastRow();
var Acolumn = srcSheet.getRange("A2:A" + lastRow);
var Avalues = Acolumn.getValues();
var srcRange = srcSheet.getRange("B2:I" + lastRow);
var srcValues = srcRange.getValues();
var array = [];
var now = new Date();
for (var i = 0; i < lastRow-1; i++) {
var val = Avalues[i][0];
if (val == true) {
srcValues[i].push(now);
array.push(srcValues[i]);
var clrRange = Acolumn.getCell(i+1, 1);
clrRange.clear();
}
}
var tarRow = tarSheet.getLastRow();
tarSheet.insertRowAfter(tarRow);
if(array.length!=0){
var tarRange = tarSheet.getRange("A" + (tarRow+1) + ":I" + (tarRow + array.length));
tarRange.setValues(array);
}
};

Grouping multiple moving averages in dc.js

I have a dc.js example going where I'd like to calculate moving averages over two different windows of the dataset and group them. Ultimately, I'd like to also group the ratio between the two moving averages with them so I can access them using a key.
I've got a handle on how to do single moving avg using reductio, but I'm not sure how to do two of them concurrently, make a ratio, and show all three (MA1,MA2,Ratio) in the graph.
Here is how I'm doing each independent MA:
var date_array = [];
var mapped_date_array = [];
var activities_infinity = activityDistanceByDayGroup.top(Infinity);
var i = 0;
for (i=0; i < activities_infinity.length; i++) {
date_array.push(activities_infinity[i].key);
}
date_array.sort(function (date1, date2) {
if (date1 > date2) return 1;
if (date1 < date2) return -1;
})
mapped_date_array = date_array.map(function(e) { return e.toDateString();
});
// For Chronic Load
var cLoadMovingAvg = activityByDay.groupAll();
cReducer = reductio().groupAll(function(record) {
var idx = mapped_date_array.indexOf(record.dtg.toDateString());
if (record.dtg < date_array[9]) {
return [date_array[idx]];
} else {
var i = 0;
var return_array = [];
for (i = 9; i >= 0; i--) {
return_array.push(date_array[idx - i]);
}
return return_array;
}
}).count(true).sum(dc.pluck('Distance')).avg(true)(cLoadMovingAvg);
// For Acute Load
var aLoadMovingAvg = activityByDay.groupAll();
aReducer = reductio().groupAll(function(record) {
var idx = mapped_date_array.indexOf(record.dtg.toDateString());
if (record.dtg < date_array[3]) {
return [date_array[idx]];
} else {
var i = 0;
var return_array = [];
for (i = 3; i >= 0; i--) {
return_array.push(date_array[idx - i]);
}
return return_array;
}
}).count(true).sum(dc.pluck('Distance')).avg(true)(aLoadMovingAvg);
jsFiddle is here: http://jsfiddle.net/gasteps/hLh5frc8/2/
Thanks so much for any help on this!

Loading A world from external file webgl and HTML5

I am trying to load a world(kind of..) into webglfrom an external file which has information of vertex position and face positions but the problem is the file containing the data is very large..(about 100mb). In my approach I am using the file as buffer and have a single buffer in the init buffer which is over-written again and again. What I am doing is, I am reading the values for an object from the file and drawing it on the canvas, then over-writing the buffer with the data of other object in my scene and adding it to the scene. In short I am not saving the vertex and face information. While animating I am reading the entire file again and re-drawing. Its working fine with a file size of 20mb. but for file of large size I am not able to use high frame rate while animating. Which is not looking good.
My question is should I put all the vertex information into buffer and then draw the graphics and forget about the file…or my approach can be optimized…? Also if you can suggest any other method then it would be really helpful
try {
var fileInput = document.getElementById('fileInput');
var file = fileInput.files[0];
// read from filename
var reader = new FileReader();
reader.onload = function (e) {
var count=0;
var lastline=0;
var i;
var j;
var text = reader.result;
var lines = text.split("\r\n");
while(lastline<lines.length)
{
var vertices = [];
var VertexIndices = [];
var vertexNormals=[];
/////Position of the objects
for (i = lastline; i < lines.length; i++) {
if (lines[i] == "MESH_FACE_POSITION_LIST {") {
break;
}
}
for (j = i + 1; j < lines.length; j++) {
if (lines[j] == "}") {
break;
}
else {
var currentvertices = lines[j].split(" ");
for (var k = 0; k < currentvertices.length; k++) {
VertexIndices.push(parseInt(currentvertices[k]));//Check for ","
}
}
}
noOfVerticesForTriangles = VertexIndices.length;
for (i = j; i < lines.length; i++) {
if (lines[i] == "MODEL_POSITION_LIST {") {
break;
}
}
for (j = i + 1; j < lines.length; j++) {
if (lines[j] == "}") {
break;
}
else {
var currentvertices = lines[j].split(" ");
for (var k = 0; k < currentvertices.length; k++) {
vertices.push(parseFloat(currentvertices[k]));//Check for ","
}
}
}
noOfVertices = vertices.length / 3;
lastline=j;
//this is where i am calling the function to draw the graphics after reading the data for an object
initBuffers(vertices,VertexIndices);
drawScene();
}
}
reader.readAsText(file);
}
catch (e) {
}
}
Code for init buffer
function initBuffers(vertices,VertexIndices) {
vertexPositionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexPositionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
vertexPositionBuffer.itemSize = 3;
vertexPositionBuffer.numItems = noOfVertices;
vertexIndexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, vertexIndexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(VertexIndices), gl.STATIC_DRAW);
vertexIndexBuffer.itemSize = 1;
vertexIndexBuffer.numItems = noOfVerticesForTriangles;
}
My question is should I put all the vertex information into buffer and
then draw the graphics and forget about the file…
Yes, this is pretty much how 3d works :)

Why Is My Genetic Algorithm Terrible (Why Doesn't It Converge)?

I wrote a quick experiment with a genetic algorithm. It simply takes a grid of squares and tries to mutate their color to make them all yellow. It fails miserably and I can't seem to figure out why. I've included a link to JSFiddle that demonstrates working code, as well as a copy of the code in its entirety.
http://jsfiddle.net/mankyd/X6x9L/
<!DOCTYPE html>
<html lang="en">
<head>
</head>
<body>
<div class="container">
<h1>The randomly flashing squares <i>should</i> be turning yellow</h1>
<div class="row">
<canvas id="input_canvas" width="100" height="100"></canvas>
<canvas id="output_canvas" width="100" height="100"></canvas>
</div>
<div class="row">
<span id="generation"></span>
<span id="best_fitness"></span>
<span id="avg_fitness"></span>
</div>
</div>
</body>
</html>
Note that the below javascript relies on jquery in a few places.
// A bit of code that draws several squares in a canvas
// and then attempts to use a genetic algorithm to slowly
// make those squares all yellow.
// Knobs that can be tweaked
var mutation_rate = 0.1; // how often should we mutate something
var crossover_rate = 0.6; // how often should we crossover two parents
var fitness_influence = 1; // affects the fitness's influence over mutation
var elitism = 1; // how many of the parent's generation to carry over
var num_offspring = 20; // how many spawn's per generation
var use_rank_selection = true; // false == roulette_selection
// Global variables for easy tracking
var children = []; // current generation
var best_spawn = null; // keeps track of our best so far
var best_fitness = null; // keeps track of our best so far
var generation = 0; // global generation counter
var clear_color = 'rgb(0,0,0)';
// used for output
var $gen_span = $('#generation');
var $best_fit = $('#best_fitness');
var $avg_fit = $('#avg_fitness');
var $input_canvas = $('#input_canvas');
var input_ctx = $input_canvas[0].getContext('2d');
var $output_canvas = $('#output_canvas');
var output_ctx = $output_canvas[0].getContext('2d');
// A spawn represents a genome - a collection of colored
// squares.
var Spawn = function(nodes) {
var _fitness = null; // a cache of our fitness
this.nodes = nodes; // the squares that make up our image
this.fitness = function() {
// fitness is simply a function of how close to yellow we are.
// This is defined through euclidian distance. Smaller fitnesses
// are better.
if (_fitness === null) {
_fitness = 0;
for (var i = 0; i < nodes.length; i++) {
_fitness += Math.pow(-nodes[i].color[0], 2) +
Math.pow(255 - nodes[i].color[1], 2) +
Math.pow(255 - nodes[i].color[2], 2);
}
_fitness /= 255*255*3*nodes.length; // divide by the worst possible distance
}
return _fitness;
};
this.mutate = function() {
// reset our cached fitness to unknown
_fitness = null;
var health = this.fitness() * fitness_influence;
var width = $output_canvas[0].width;
var height = $output_canvas[0].height;
for (var i = 0; i < nodes.length; i++) {
// Sometimes (most times) we don't mutate
if (Math.random() > mutation_rate) {
continue;
}
// Mutate the colors.
for (var j = 0; j < 3; j++) {
// colors can move by up to 32 in either direction
nodes[i].color[j] += 64 * (.5 - Math.random()) * health;
// make sure that our colors stay between 0 and 255
nodes[i].color[j] = Math.max(0, Math.min(255, nodes[i].color[j]));
}
}
};
this.draw = function(ctx) {
// This draw function is a little overly generic in that it supports
// arbitrary polygons.
ctx.save();
ctx.fillStyle = clear_color;
ctx.fillRect(0, 0, ctx.canvas.width, ctx.canvas.height);
for (var i = 0; i < nodes.length; i++) {
ctx.fillStyle = 'rgba(' + Math.floor(nodes[i].color[0]) + ',' + Math.floor(nodes[i].color[1]) + ',' + Math.floor(nodes[i].color[2]) + ',' + nodes[i].color[3] + ')';
ctx.beginPath();
ctx.moveTo(nodes[i].points[0][0], nodes[i].points[0][1]);
for (var j = 1; j < nodes[i].points.length; j++) {
ctx.lineTo(nodes[i].points[j][0], nodes[i].points[j][1]);
}
ctx.fill();
ctx.closePath();
}
ctx.restore();
};
};
Spawn.from_parents = function(parents) {
// Given two parents, mix them together to get another spawn
var nodes = [];
for (var i = 0; i < parents[0].nodes.length; i++) {
if (Math.random() > 0.5) {
nodes.push($.extend({}, parents[0].nodes[i]));
}
else {
nodes.push($.extend({}, parents[1].nodes[i]));
}
}
var s = new Spawn(nodes);
s.mutate();
return s;
};
Spawn.random = function(width, height) {
// Return a complete random spawn.
var nodes = [];
for (var i = 0; i < width * height; i += 10) {
var n = {
color: [Math.random() * 256, Math.random() * 256, Math.random() * 256, 1],
points: [
[i % width, Math.floor(i / width) * 10],
[(i % width) + 10, Math.floor(i / width) * 10],
[(i % width) + 10, Math.floor(i / width + 1) * 10],
[i % width, Math.floor(i / width + 1) * 10],
]
};
nodes.push(n);
}
return new Spawn(nodes);
};
var select_parents = function(gene_pool) {
if (use_rank_selection) {
return rank_selection(gene_pool);
}
return roulette_selection(gene_pool);
};
var roulette_selection = function(gene_pool) {
var mother = null;
var father = null;
gene_pool = gene_pool.slice(0);
var sum_fitness = 0;
var i = 0;
for (i = 0; i < gene_pool.length; i++) {
sum_fitness += gene_pool[i].fitness();
}
var choose = Math.floor(Math.random() * sum_fitness);
for (i = 0; i < gene_pool.length; i++) {
if (choose <= gene_pool[i].fitness()) {
mother = gene_pool[i];
break;
}
choose -= gene_pool[i].fitness();
}
// now remove the mother and repeat for the father
sum_fitness -= mother.fitness();
gene_pool.splice(i, 1);
choose = Math.floor(Math.random() * sum_fitness);
for (i = 0; i < gene_pool.length; i++) {
if (choose <= gene_pool[i].fitness()) {
father = gene_pool[i];
break;
}
choose -= gene_pool[i].fitness();
}
return [mother, father];
};
var rank_selection = function(gene_pool) {
gene_pool = gene_pool.slice(0);
gene_pool.sort(function(a, b) {
return b.fitness() - a.fitness();
});
var choose_one = function() {
var sum_fitness = (gene_pool.length + 1) * (gene_pool.length / 2);
var choose = Math.floor(Math.random() * sum_fitness);
for (var i = 0; i < gene_pool.length; i++) {
// figure out the sume of the records up to this point. if we exceed
// our chosen spot, we've found our spawn.
if ((i + 1) * (i / 2) >= choose) {
return gene_pool.splice(i, 1)[0];
}
}
return gene_pool.pop(); // last element, if for some reason we get here
};
var mother = choose_one();
var father = choose_one();
return [mother, father];
};
var start = function() {
// Initialize our first generation
var width = $output_canvas[0].width;
var height = $output_canvas[0].height;
generation = 0;
children = [];
for (var j = 0; j < num_offspring; j++) {
children.push(Spawn.random(width, height));
}
// sort by fitness so that our best comes first
children.sort(function(a, b) {
return a.fitness() - b.fitness();
});
best_spawn = children[0];
best_fitness = best_spawn.fitness();
best_spawn.draw(output_ctx);
};
var generate = function(spawn_pool) {
// generate a new set of offspring
var offspring = [];
for (var i = 0; i < num_offspring; i++) {
var parents = select_parents(spawn_pool);
// odds of crossover decrease as we get closer
if (Math.random() * best_fitness < crossover_rate) {
var s = Spawn.from_parents(parents);
}
else {
// quick hack to copy our mother, with possible mutation
var s = Spawn.from_parents([parents[0], parents[0]]);
}
offspring.push(s);
}
// select a number of best from the parent pool (elitism)
for (var i = 0; i < elitism; i++) {
offspring.push(spawn_pool[i]);
}
// sort our offspring by fitness (this includes the parents from elitism). Fittest first.
offspring.sort(function(a, b) {
return a.fitness() - b.fitness();
});
// pick off the number that we want
offspring = offspring.slice(0, num_offspring);
best_spawn = offspring[0];
best_fitness = best_spawn.fitness();
best_spawn.draw(output_ctx);
generation++;
return offspring;
};
var average_fitness = function(generation) {
debugger;
var a = 0;
for (var i = 0; i < generation.length; i++) {
a += generation[i].fitness();
}
return a / generation.length;
};
//Draw yellow and then initialize our first generation
input_ctx.fillStyle = 'yellow';
input_ctx.fillRect(0, 0, input_ctx.canvas.width, input_ctx.canvas.height);
start();
// Our loop function. Use setTimeout to prevent things from freezing
var gen = function() {
children = generate(children);
$gen_span.text('Generation: ' + generation);
$best_fit.text('Best Fitness: ' + best_fitness);
$avg_fit.text('Avg. Fitness: ' + average_fitness(children));
if (generation % 100 === 0) {
console.log('Generation', generation);
console.log('Fitness', best_fitness);
}
setTimeout(gen, 1);
};
gen();​
I've commented the code to try to make parsing it easy. The basic idea is quite simple:
Select 1 or 2 parents from the current generation
Mix those one or two parents together
Mutate the result slightly and add it to the next generation
Select the best few parents (1 in the example) and add them to the next generation
Sort and slice off N results and use them for the next generation (potentially a mix of parents and offspring)
Rinse and repeat
The output never gets anywhere near yellow. It quickly falls into a steady state of a sort that looks awful. Where have I gone wrong?
Solved it. It was in the "from_parents" method:
if (Math.random() > 0.5) {
nodes.push($.extend({}, parents[0].nodes[i]));
}
else {
nodes.push($.extend({}, parents[1].nodes[i]));
}
The $.extend() was doing a shallow copy. The obvious solution was to either put true as the first argument which causes a deep copy. This, however, is incredibly slow performance-wise. The better solution was to remove the $.extend() from that chunk of code entirely and instead to move it up to the mutate() method, where I call $.extend() only if a node is actually about to be changed. In other words, it becomes a copy-on-write.
Also, the color I put in the fitness function was wrong :P

Set visibility of MarkerClusterer

Trying to toggle the visibility of MarkerClusterer (V3):
var hydrantsShowing = true;
function ToggleHydrants() {
var markers = hydrantsClusterer.getMarkers();
for (var i = 0; i < markers.length; i++) {
markers[i].setVisible(!hydrantsShowing);
}
hydrantsShowing = !hydrantsShowing;
}
The markers do toggle but with two problems:
1. The map must be panned a bit to the change can take place.
2. The MarkerClusterer icons (with the numbers) are always there, even after the markers are not visible.
I've also tried using the setMap approach, but with similar behavior:
var hydrantsShowing = true;
function ToggleHydrants() {
var markers = hydrantsClusterer.getMarkers();
if (hydrantsShowing) {
for (var i = 0; i < markers.length; i++) {
markers[i].setMap(null);
}
}
else {
for (var i = 0; i < markers.length; i++) {
markers[i].setMap(gmap);
}
}
hydrantsShowing = !hydrantsShowing;
}
Solved it by using MarkerClustererPlus instead.
var hydrantsShowing = true;
function ToggleHydrants() {
var markers = hydrantsClusterer.getMarkers();
for (var i = 0; i < markers.length; i++) {
markers[i].setVisible(!hydrantsShowing);
}
hydrantsClusterer.repaint();
hydrantsShowing = !hydrantsShowing;
}
Calling repaint() after setting visibility sorted all issues.
The original MarkerClusterer don't have such a function.

Resources