model.fit() never ends or shows me the loss - p5.js

I'm trying to train a model and never pass the fit().
In the console doesn't show the loss result, it gets stuck there.
Already changed the async to a promise, but it's the same.
To see the entire code, click here!
function train() {
trainModel().then(result => {
console.log(result.history.loss[0]);
setTimeout(train, 100);
});
}
// entrena modelo~ params = train_xs(input) y train_ys(output)
async function trainModel() {
//Create the input data
for (let i = 0; i < 5; i++) {
train_xs = tf.tensor2d(ins.pixels[i], [28, 28], 'int32');
train_ys = tf.tensor2d(outs.coords[i], [3, 2], 'int32');
const h = await model.fit(train_xs, train_ys, {
epochs: 1
});
console.log("Loss after Epoch " + i + " : " + h.history.loss[0]);
}
console.log('end fitness model');
}
//never shows end fitness model
no error messages, the console keeps just clean

There are a couple of issues (the console is clean because it was not logging out the errors):
the shape of xs and ys does not match the input and output of the model.ins.pixels[i]
xs and ys should have the same batch size. Since in all iteration of the for loop, only one feature and one label is used, therefore the batchsize is 1.
Here is a fix of the model
let model;
let xs;
let train_xs;
let train_ys;
let inAndOut;
let resolution = 20;
let cols;
let rows;
var ins;
var outs;
function setup() {
createCanvas(400, 400);
/// visualization
ins = new Inputs13(); // ins.pixels;
outs = new Outputs13(); // outs.coords;
inAndOut = new InputsAndOutputsToTest();
///crear modelo
model = tf.sequential();
let hidden = tf.layers.dense({
inputShape: [784],
units: 28,
activation: 'sigmoid'
});
let output = tf.layers.dense({
units: 6,
activation: 'sigmoid'
});
model.add(hidden);
model.add(output);
const optimizer = tf.train.adam(0.1);
model.compile({
optimizer: optimizer,
loss: 'meanSquaredError'
})
xs = tf.tensor2d(inAndOut.pixelsToTest[0],[28,28]);
//console.log('xs');
//console.log(xs);
//xs.print();
//entrena modelo
setTimeout(train, 10);
}
//promesa, llama a entrenar modelo y muestra de losss
function train() {
console.log("im in train!");
trainModel().then(result => {
console.log(result.history.loss[0]);
setTimeout(train, 100);
});
}
// entrena modelo~ params = train_xs(input) y train_ys(output)
async function trainModel() {
let h;
//Create the input data
for (let i = 0; i < 5; i++) {
train_xs = tf.tensor(ins.pixels[i], [1, 784]); //[tens], [shape]
console.log('xs.shape', train_xs.shape)
train_ys = tf.tensor(outs.coords[i]).reshape([1, 6]);
console.log('ys.shape', train_ys.shape)
/* console.log('train_xs');
train_xs.print();
console.log("train_ys");
train_ys.print();*/
h = await model.fit(train_xs, train_ys, {
// shuffle: true,
epochs: 1
});
console.log("Loss after Epoch " + i + " : " + h.history.loss[0]);
}
console.log('end fitness model');
return h;
}
//muestra visual!
function draw() {
background(220);
//Get the predictions params xs = inputs para pruebas
tf.tidy(() => {
let ys = model.predict(xs);
//console.log("ys");
//console.log(ys);
let y_values = ys.dataSync();
// console.log("y_values");
// console.log(y_values);
});
}
However, it is possible to use all the 13 features and 13 labels all at once. The for loop will no longer be useful.
train_xs = tf.tensor(ins.pixels, [13, 784]);
console.log('xs.shape', train_xs.shape)
train_ys = tf.tensor(outs.coords).reshape([13, 6]);

Related

Google App Script Data Validation inserts too many dropdowns

I have a script that works mostly the way want. It looks at a cell then compares that to a column in another tab, finds the like items and returns that, and creates a dropdown on the cell. This moves down the column until it reaches the end. The problem is that it continues past the last row for about 20 rows. The starting row is row24.
function getInventoryItems() {
var jobSummaryInventoryItems = jobSummary.getRange(24, 8, jobSummary.getLastRow(), 1);
var jobSummaryInventoryItemsValues = jobSummaryInventoryItems.getValues();
var inventoryItems = inventory.getRange(4, 3, inventory.getLastRow(), 1);
var inventoryItemsValues = inventoryItems.getValues();
jobSummary.getRange(24, 8, jobSummary.getLastRow(), 1).setDataValidation(null);
for (z = 0; z < jobSummaryInventoryItemsValues.length; z++) {
if (jobSummaryInventoryItemsValues[z].toString().length > 1) {
var listOfInventory = [];
for (i = 0; i < inventoryItems.getLastRow() - 4; i++) {
if (inventoryItemsValues[i].toString() == jobSummaryInventoryItemsValues[z]) {
break;
}
var w = jobSummaryInventoryItemsValues[z];
if (inventoryItemsValues[i].toString().includes(jobSummaryInventoryItemsValues[z])) {
listOfInventory.push(inventoryItemsValues[i].toString());
}
}
}
if (listOfInventory.length > 0) {
var rangeRule = SpreadsheetApp.newDataValidation().requireValueInList(listOfInventory).build();
jobSummary.getRange(z + 24, 8).setDataValidation(rangeRule);
}
}
Get Inventory Items
function getInventoryItems() {
const ss = SpreadsheetApp.getActive();
const jobSummary = ss.getSheetByName('Job Summary');
const inventory = ss.getSheetByName('Inventory');
const jobSummaryInventoryItems = jobSummary.getRange(24, 8, jobSummary.getLastRow() - 23, 1);
const jobSummaryInventoryItemsValues = jobSummaryInventoryItems.getValues();
const inventoryItems = inventory.getRange(4, 3, inventory.getLastRow() - 3, 1);
const inventoryItemsValues = inventoryItems.getValues();
jobSummary.getRange(24, 8, jobSummary.getLastRow() -23, 1).setDataValidation(null);
for (z = 0; z < jobSummaryInventoryItemsValues.length; z++) {
if (jobSummaryInventoryItemsValues[z].toString().length > 1) {
let listOfInventory = [];
for (i = 0; i < inventoryItems.length; i++) {
if (inventoryItemsValues[i].toString() == jobSummaryInventoryItemsValues[z]) {
break;
}
let w = jobSummaryInventoryItemsValues[z];
if (inventoryItemsValues[i].toString().includes(jobSummaryInventoryItemsValues[z])) {
listOfInventory.push(inventoryItemsValues[i].toString());
}
}
}
if (listOfInventory.length > 0) {
let rangeRule = SpreadsheetApp.newDataValidation().requireValueInList(listOfInventory).build();
jobSummary.getRange(z + 24, 8).setDataValidation(rangeRule);
}
}
}
Sheet.getRange(row,column,number of rows, number of columns)

Is adding the new data point not at the end of array a bad idea?

This involves a design decision. An interviewer asked me to write something to plot the data assuming there are 100 data points, and new data point comes in (and given to the program) every 0.1, 0.3 or 0.5 or 1 second. (it can change in the future, and I think the smallest granularity on a common web browser is 0.03 seconds).
I proceeded to think about adding the new data point to the Nth position in the array. For example, adding the data at array entry 36, and plot the data from 37th to 99th, and then from 0th to 36th. Next time, add the data at array entry 37, and plot the data from 38th to 99th, and then from 0th to 37th.
This way, we don't need to "unshift" (shift out) the data at entry 0 and therefore needing to shift entry 1 to 99 one place forward, and then add the new data point at entry 99, and then plot data entry 0 to 99.
For some reason, the interviewer gave a big frown, and said, "why would we do that? It is not heavy to shift 99 data over." I said what if there are 500 or 1000 data point we'd like to plot in the future, we might want to avoid shifting data about 500 times or 1000 time each time when a new data point comes in.
He mentioned "let's say we just shift them anyway".
Is shifting the data actually not an issue or concern? What if on the screen we have 10 or 15 of such widgets, apps, or webpages, to monitor 15 types of data, we might want to avoid shifting 15,000 data entries constantly.
I naively tried to load n canvas to the page, and compared the time needed to plot against the time taken shifting array.
tldr: Whichever method is used to shift the points, the method is negligible against the time needed to plot the points. (~ 3%)
I only run the code in ff70 with casual js.
(For instance I am stocking an array of objects even though optimization may be available if I stock only floats)
There are two kind of measures: push and refresh.
push measures the time needed to shift a point and add a new one
refresh measures the time needed to replot the canvas
Below three approaches for pushing: either push (Array.prototype.(shift|push), tail to a queue (and move the head), or nopole's approach
Every 10ms I plot the time spent in the push method. On top of the picture, the cumulative time spent. I stop once a run has reached 100 points and reload the page for another run.
The y axis is the same accross all runs
Push
push avg: (838+886+864)/3 = 862ms
Queue
push avg: (625+760+825)/3 = 736ms
refresh avg: (40554+39934+40915+39194+39264+30480)/6 = 38390ms
Nopole
push avg: (792+861+871)/3 = 841ms
Notice that for one sample: (625/30480) we seem to have benefited from some cpu willing to work. So the shifting method feels even more irrelevant.
It is hard to tell which approach is better, because of the few samples drought for each kind of methods and it is likely more an issue of cpu's overall workload rather than the page itself
To reproduce
let timerPush = 0
let timerRefresh = 0
class Canvas {
constructor (f, el, period) {
this.w = 300
this.h = 300
this.points = []
const canvas = document.createElement('canvas')
canvas.style.cssText = 'background:#eeeeee; margin:10px;'
canvas.width = this.w
canvas.height = this.h
this.ctx = canvas.getContext('2d')
this.ctx.transform(1, 0, 0, -1, 0, this.h / 2)
this.ctx.lineWidth = 1
this.dw = this.w / this.MAX_POINTS
this.dh = this.h / 2
el.appendChild(canvas)
let x = 0
this.timer = setInterval(_ => {
x += period
this.push({ x, y: f(x) })
this.refresh()
}, period * 1000)
}
refresh () {
const now = performance.now()
this.ctx.clearRect(0, -this.h / 2, this.w, this.h)
this.ctx.beginPath()
this._plot()
this.ctx.stroke()
this.ctx.closePath()
timerRefresh += performance.now() - now
}
push (p) {
const now = performance.now()
this._push(p)
timerPush += performance.now() - now
}
_plot () {
if (!this.points.length) { return }
this.ctx.moveTo(0 * this.dw, this.points[0].y * this.dh)
for (let i = 1; i < this.points.length; ++i) {
const p = this.points[i]
this.ctx.lineTo(i * this.dw, p.y * this.dh)
}
}
_push (p) {
if (this.points.length == this.MAX_POINTS) {
this.points.shift()
}
this.points.push(p)
}
MAX_POINTS = 100
}
class CanvasQueue extends Canvas {
constructor () {
super(...arguments)
this.tail = {}
this.head = this.tail
this.n = 0
}
_plot () {
if (!this.head.next.p) return
let node = this.head.next
this.ctx.moveTo(0 * this.dw, node.p.y * this.dh)
let i = 1
node = node.next
while (node) {
this.ctx.lineTo(i * this.dw, node.p.y * this.dh)
++i
node = node.next
}
}
_push (p) {
if (this.n === this.MAX_POINTS) {
this.head = this.head.next
} else {
this.n++
}
const node = { p }
this.tail.next = node
this.tail = node
}
}
class CanvasNopole extends Canvas {
constructor () {
super(...arguments)
this.start = 0
}
_plot () {
if (!this.points.length) { return }
const s = this.start
let z = 1
let startBack = 0
if (this.points[s]){
this.ctx.moveTo(0 * this.dw, this.points[s].y * this.dh)
for (let i = s+1; i < this.points.length; ++i) {
const p = this.points[i]
this.ctx.lineTo(z++ * this.dw, p.y * this.dh)
}
}else{
this.ctx.moveTo(0 * this.dw, this.points[0].y * this.dh)
startBack = 1
}
for (let i = startBack; i < s; ++i) {
const p = this.points[i]
this.ctx.lineTo(z++ * this.dw, p.y * this.dh)
}
}
_push (p) {
this.points[this.start] = p
this.start = (this.start + 1) % this.MAX_POINTS
}
}
class CanvasSummary extends Canvas {
constructor () {
super(...arguments)
this.ctx.resetTransform()
this.ctx.transform(1, 0, 0, -1, 0, this.h)
// we know beforehand that timer should not grow bigger
const deltaTimer = 50
this.dh = this.h / deltaTimer
this.old = timerPush
}
refresh () {
this.ctx.clearRect(0, 0, this.w, this.h)
this.ctx.beginPath()
this.ctx.resetTransform()
this.ctx.fillText(`push: ${timerPush} plot: ${timerRefresh}`, 5, 20)
this.ctx.transform(1, 0, 0, -1, 0, this.h)
this._plot()
this.ctx.stroke()
this.ctx.closePath()
}
push (p) {
this._push(p)
}
}
function run () {
const $summary = document.querySelector('.summary')
const $bench = document.querySelector('.bench')
const cs = new CanvasSummary(x => {
if (cs.points.length === cs.MAX_POINTS) {
clearInterval(cs.timer)
}
const y = timerPush - cs.old
cs.old = timerPush
return y
}, $summary, 1)
//const canvas = Array(30).fill(0).map(x => new Canvas(Math.sin, $bench, 0.01))
//const canvas = Array(30).fill(0).map(x => new CanvasQueue(Math.sin, $bench, 0.01))
const canvas = Array(30).fill(0).map(x => new CanvasNopole(Math.sin, $bench, 0.01))
}
run()
<section class="summary"></section>
<hr/>
<div class="bench"></div>

Animation doesn't show steps-in-between

I have created a simple function that would "animate" the cell backcolor at a tap, it works perfectly fine:
Color nOldColor = _grid.BackgroundColor;
for (int i = 0; i <= 100; i += 5)
{
double f = (double)i / (double)100;
Color nNewColor = PCLHelper.BlendColors(nOldColor, Color.Red, f);
_grid.BackgroundColor = nNewColor;
_label1.BackgroundColor = nNewColor;
await Task.Delay(5);
}
_grid.BackgroundColor = nOldColor;
_label1.BackgroundColor = nOldColor;
Now I wanted to do the same with an Animation, but the animation doesn't show the steps "in-between" but rather (as it looks to me) switches to the final color:
private async void animateButtonTouched()
{
int repeatCountMax = 100;
Color nOldColor = _grid.BackgroundColor;
var repeatCount = 0;
_grid.Animate("changeBG", new Animation((val) =>
{
double f = (double)repeatCount / (double)100;
Color nNewColor = PCLHelper.BlendColors(nOldColor, Color.Red, f);
_grid.BackgroundColor = nNewColor;
_label1.BackgroundColor = nNewColor;
}),
5, //duration. I've also tried it with 100. Nothing helped
finished: (val, b) =>
{
repeatCount++;
}, repeat: () =>
{
return repeatCount < repeatCountMax;
});
What am I doing wrong?
"You are making it more difficult than it needs to be." Trademark pending 🍣
The Animate callback is providing the stepping value (or keyframe value). This is a double from 0 to 1 that is called ever X milliseconds (i.e. the length of a single animation frame, 16 default) over the course of X milliseconds (250 default).
So in this example the ShiftToColor gets called 125 times (2000 / 16) with a value that is evenly divided from 0 to 1, thus steps of .008.
var orgColor = aFormsElementInstance.BackgroundColor;
aFormsElementInstance.Animate("changeBG", new Animation((val) =>
{
Color ShiftToColor(Color From, Color To, double pct)
{
var r = From.R + ((To.R - From.R) * val);
var g = From.G + ((To.G - From.G) * val);
var b = From.B + ((To.B - From.B) * val);
return new Color(r, g, b);
}
Device.BeginInvokeOnMainThread(() =>
{
aFormsElementInstance.BackgroundColor = ShiftToColor(orgColor, Color.Red, val);
});
}), 16, 2000);
Results in:

Invalid Array Width without declaring new dimension

My web app is generating an "Invalid Array Width" error at line 462 of Crossfilter.js v1.3.12. This error seems to tell me I have >32 dimensions. The puzzle is that I am not knowingly declaring a new dimension when the error occurs.
I have 10 slider bars, which act as numeric filters on my dataset. At the end of a drag event on the second slider bar, a dimension is declared if none already exists at the second location within the numericDims array. (Edit: even when I declare all the 10 dimensions in advance, and remove the dynamic declaration, the problem still occurs.) About 10 dimensions already exist in the app for other graphics & filters.
The first time I move a slider handle, "new dimension" is logged. After that, every time I move a handle on the same slider, "new dimension" is not logged. This is expected behaviour. But if I move the handles enough times, I get the "Invalid Array Width" error. So, I think I must be accidentally declaring a new dimension every time I move a handle. Can anyone see how I am unwittingly declaring a new dimension? The most relevant code:
if (!numericDims[tempIndex]) {
console.log('new dimension');
numericDims[tempIndex] = facts.dimension(function(p){ return p[d]; });
}
if (flag==0) {
prt.classed("activeFilter",true);
numericDims[tempIndex].filterFunction(function(p){ return p>=min && p<=max; });
} else {
prt.classed("activeFilter",false);
numericDims[tempIndex].filterAll();
// numericDims[tempIndex].dispose(); ***I figure it's quicker to store them instead of disposing/deleting. Even when I dispose/delete, the problem still happens.
// delete numericDims[tempIndex];
// numericDims.splice(tempIndex,1);
prt.selectAll("g.handle.left").attr("title",null);
prt.selectAll("g.handle.right").attr("title",null);
}
console.log(numericDims);
Full function:
function dragended(d) {
let transformation = {
Y: Math.pow(10, 24),
Z: Math.pow(10, 21),
E: Math.pow(10, 18),
P: Math.pow(10, 15),
T: Math.pow(10, 12),
G: Math.pow(10, 9),
M: Math.pow(10, 6),
k: Math.pow(10, 3),
h: Math.pow(10, 2),
da: Math.pow(10, 1),
d: Math.pow(10, -1),
c: Math.pow(10, -2),
m: Math.pow(10, -3),
μ: Math.pow(10, -6),
n: Math.pow(10, -9),
p: Math.pow(10, -12),
f: Math.pow(10, -15),
a: Math.pow(10, -18),
z: Math.pow(10, -21),
y: Math.pow(10, -24)
}
let reverse = s => {
let returnValue;
Object.keys(transformation).some(k => {
if (s.indexOf(k) > 0) {
returnValue = parseFloat(s.split(k)[0]) * transformation[k];
return true;
}
})
return returnValue;
}
var facts = window.facts;
if (d3.select(this).attr("class").indexOf("left")==-1) { var otherHandle = 'left'; } else { var otherHandle = 'right'; }
d3.select(this).classed("dragging",false);
var filterFields = window.filterFields;
var tempIndex = filterFields[0].indexOf(d);
var min = filterFields[2][tempIndex];
var max = filterFields[3][tempIndex];
//console.log(min+', '+max);
var scale = filterFields[4][tempIndex];
var t = d3.transform(d3.select(this).attr("transform"));
var thisX = t.translate[0];
var flag=0;
var prt = d3.select("g#f_"+tempIndex);
var leftHandleX = d3.transform(prt.selectAll("g.handle.left").attr("transform")).translate[0];
var rightHandleX = d3.transform(prt.selectAll("g.handle.right").attr("transform")).translate[0];
var wid = prt.selectAll("g.axis").select("rect.numFilterBox").attr("width");
prt.selectAll("g.axis").select("rect.numFilterBox").attr("x",leftHandleX).attr("width",rightHandleX - leftHandleX);
var num = -1;
var pFlag = 0;
if (filterFields[3][tempIndex]<=1) { var fmt = d3.format('%'); pFlag=1; } else { var fmt = d3.format('4.3s'); }
if (otherHandle=='left') {
if (thisX>=300 && scale(min)==0) { flag=1; }
max = scale.invert(thisX);
if (isNaN(+fmt(max).trim())) {
if (pFlag==1) {
max = +fmt(max).substr(0,fmt(max).length-1)/100
} else {
max = reverse(fmt(max));
}
} else {
max = +fmt(max).trim();
}
prt.selectAll("g.handle.right").attr("title",function(d){ return 'The filtered maximum for '+filterFields[1][tempIndex]+' is '+max; });
} else {
if (thisX<=0 && scale(max)==300) { flag=1; }
min = scale.invert(thisX);
if (isNaN(+fmt(min).trim())) {
if (pFlag==1) {
min = +fmt(min).substr(0,fmt(min).length-1)/100
} else {
min = reverse(fmt(min));
}
} else {
min = +fmt(min).trim();
}
prt.selectAll("g.handle.left").attr("title",function(d){ return 'The filtered minimum for '+filterFields[1][tempIndex]+' is '+min; });
}
filterFields[2][tempIndex] = min;
filterFields[3][tempIndex] = max;
window.filterFields = filterFields;
if (!numericDims[tempIndex]) {
console.log('new dimension');
numericDims[tempIndex] = facts.dimension(function(p){ return p[d]; });
}
if (flag==0) {
prt.classed("activeFilter",true);
numericDims[tempIndex].filterFunction(function(p){ return p>=min && p<=max; });
} else {
prt.classed("activeFilter",false);
numericDims[tempIndex].filterAll();
// numericDims[tempIndex].dispose();
// delete numericDims[tempIndex];
// numericDims.splice(tempIndex,1);
prt.selectAll("g.handle.left").attr("title",null);
prt.selectAll("g.handle.right").attr("title",null);
}
console.log(numericDims);
update();
doHighlight();
window.dragFlag=1;
}

Why Is My Genetic Algorithm Terrible (Why Doesn't It Converge)?

I wrote a quick experiment with a genetic algorithm. It simply takes a grid of squares and tries to mutate their color to make them all yellow. It fails miserably and I can't seem to figure out why. I've included a link to JSFiddle that demonstrates working code, as well as a copy of the code in its entirety.
http://jsfiddle.net/mankyd/X6x9L/
<!DOCTYPE html>
<html lang="en">
<head>
</head>
<body>
<div class="container">
<h1>The randomly flashing squares <i>should</i> be turning yellow</h1>
<div class="row">
<canvas id="input_canvas" width="100" height="100"></canvas>
<canvas id="output_canvas" width="100" height="100"></canvas>
</div>
<div class="row">
<span id="generation"></span>
<span id="best_fitness"></span>
<span id="avg_fitness"></span>
</div>
</div>
</body>
</html>
Note that the below javascript relies on jquery in a few places.
// A bit of code that draws several squares in a canvas
// and then attempts to use a genetic algorithm to slowly
// make those squares all yellow.
// Knobs that can be tweaked
var mutation_rate = 0.1; // how often should we mutate something
var crossover_rate = 0.6; // how often should we crossover two parents
var fitness_influence = 1; // affects the fitness's influence over mutation
var elitism = 1; // how many of the parent's generation to carry over
var num_offspring = 20; // how many spawn's per generation
var use_rank_selection = true; // false == roulette_selection
// Global variables for easy tracking
var children = []; // current generation
var best_spawn = null; // keeps track of our best so far
var best_fitness = null; // keeps track of our best so far
var generation = 0; // global generation counter
var clear_color = 'rgb(0,0,0)';
// used for output
var $gen_span = $('#generation');
var $best_fit = $('#best_fitness');
var $avg_fit = $('#avg_fitness');
var $input_canvas = $('#input_canvas');
var input_ctx = $input_canvas[0].getContext('2d');
var $output_canvas = $('#output_canvas');
var output_ctx = $output_canvas[0].getContext('2d');
// A spawn represents a genome - a collection of colored
// squares.
var Spawn = function(nodes) {
var _fitness = null; // a cache of our fitness
this.nodes = nodes; // the squares that make up our image
this.fitness = function() {
// fitness is simply a function of how close to yellow we are.
// This is defined through euclidian distance. Smaller fitnesses
// are better.
if (_fitness === null) {
_fitness = 0;
for (var i = 0; i < nodes.length; i++) {
_fitness += Math.pow(-nodes[i].color[0], 2) +
Math.pow(255 - nodes[i].color[1], 2) +
Math.pow(255 - nodes[i].color[2], 2);
}
_fitness /= 255*255*3*nodes.length; // divide by the worst possible distance
}
return _fitness;
};
this.mutate = function() {
// reset our cached fitness to unknown
_fitness = null;
var health = this.fitness() * fitness_influence;
var width = $output_canvas[0].width;
var height = $output_canvas[0].height;
for (var i = 0; i < nodes.length; i++) {
// Sometimes (most times) we don't mutate
if (Math.random() > mutation_rate) {
continue;
}
// Mutate the colors.
for (var j = 0; j < 3; j++) {
// colors can move by up to 32 in either direction
nodes[i].color[j] += 64 * (.5 - Math.random()) * health;
// make sure that our colors stay between 0 and 255
nodes[i].color[j] = Math.max(0, Math.min(255, nodes[i].color[j]));
}
}
};
this.draw = function(ctx) {
// This draw function is a little overly generic in that it supports
// arbitrary polygons.
ctx.save();
ctx.fillStyle = clear_color;
ctx.fillRect(0, 0, ctx.canvas.width, ctx.canvas.height);
for (var i = 0; i < nodes.length; i++) {
ctx.fillStyle = 'rgba(' + Math.floor(nodes[i].color[0]) + ',' + Math.floor(nodes[i].color[1]) + ',' + Math.floor(nodes[i].color[2]) + ',' + nodes[i].color[3] + ')';
ctx.beginPath();
ctx.moveTo(nodes[i].points[0][0], nodes[i].points[0][1]);
for (var j = 1; j < nodes[i].points.length; j++) {
ctx.lineTo(nodes[i].points[j][0], nodes[i].points[j][1]);
}
ctx.fill();
ctx.closePath();
}
ctx.restore();
};
};
Spawn.from_parents = function(parents) {
// Given two parents, mix them together to get another spawn
var nodes = [];
for (var i = 0; i < parents[0].nodes.length; i++) {
if (Math.random() > 0.5) {
nodes.push($.extend({}, parents[0].nodes[i]));
}
else {
nodes.push($.extend({}, parents[1].nodes[i]));
}
}
var s = new Spawn(nodes);
s.mutate();
return s;
};
Spawn.random = function(width, height) {
// Return a complete random spawn.
var nodes = [];
for (var i = 0; i < width * height; i += 10) {
var n = {
color: [Math.random() * 256, Math.random() * 256, Math.random() * 256, 1],
points: [
[i % width, Math.floor(i / width) * 10],
[(i % width) + 10, Math.floor(i / width) * 10],
[(i % width) + 10, Math.floor(i / width + 1) * 10],
[i % width, Math.floor(i / width + 1) * 10],
]
};
nodes.push(n);
}
return new Spawn(nodes);
};
var select_parents = function(gene_pool) {
if (use_rank_selection) {
return rank_selection(gene_pool);
}
return roulette_selection(gene_pool);
};
var roulette_selection = function(gene_pool) {
var mother = null;
var father = null;
gene_pool = gene_pool.slice(0);
var sum_fitness = 0;
var i = 0;
for (i = 0; i < gene_pool.length; i++) {
sum_fitness += gene_pool[i].fitness();
}
var choose = Math.floor(Math.random() * sum_fitness);
for (i = 0; i < gene_pool.length; i++) {
if (choose <= gene_pool[i].fitness()) {
mother = gene_pool[i];
break;
}
choose -= gene_pool[i].fitness();
}
// now remove the mother and repeat for the father
sum_fitness -= mother.fitness();
gene_pool.splice(i, 1);
choose = Math.floor(Math.random() * sum_fitness);
for (i = 0; i < gene_pool.length; i++) {
if (choose <= gene_pool[i].fitness()) {
father = gene_pool[i];
break;
}
choose -= gene_pool[i].fitness();
}
return [mother, father];
};
var rank_selection = function(gene_pool) {
gene_pool = gene_pool.slice(0);
gene_pool.sort(function(a, b) {
return b.fitness() - a.fitness();
});
var choose_one = function() {
var sum_fitness = (gene_pool.length + 1) * (gene_pool.length / 2);
var choose = Math.floor(Math.random() * sum_fitness);
for (var i = 0; i < gene_pool.length; i++) {
// figure out the sume of the records up to this point. if we exceed
// our chosen spot, we've found our spawn.
if ((i + 1) * (i / 2) >= choose) {
return gene_pool.splice(i, 1)[0];
}
}
return gene_pool.pop(); // last element, if for some reason we get here
};
var mother = choose_one();
var father = choose_one();
return [mother, father];
};
var start = function() {
// Initialize our first generation
var width = $output_canvas[0].width;
var height = $output_canvas[0].height;
generation = 0;
children = [];
for (var j = 0; j < num_offspring; j++) {
children.push(Spawn.random(width, height));
}
// sort by fitness so that our best comes first
children.sort(function(a, b) {
return a.fitness() - b.fitness();
});
best_spawn = children[0];
best_fitness = best_spawn.fitness();
best_spawn.draw(output_ctx);
};
var generate = function(spawn_pool) {
// generate a new set of offspring
var offspring = [];
for (var i = 0; i < num_offspring; i++) {
var parents = select_parents(spawn_pool);
// odds of crossover decrease as we get closer
if (Math.random() * best_fitness < crossover_rate) {
var s = Spawn.from_parents(parents);
}
else {
// quick hack to copy our mother, with possible mutation
var s = Spawn.from_parents([parents[0], parents[0]]);
}
offspring.push(s);
}
// select a number of best from the parent pool (elitism)
for (var i = 0; i < elitism; i++) {
offspring.push(spawn_pool[i]);
}
// sort our offspring by fitness (this includes the parents from elitism). Fittest first.
offspring.sort(function(a, b) {
return a.fitness() - b.fitness();
});
// pick off the number that we want
offspring = offspring.slice(0, num_offspring);
best_spawn = offspring[0];
best_fitness = best_spawn.fitness();
best_spawn.draw(output_ctx);
generation++;
return offspring;
};
var average_fitness = function(generation) {
debugger;
var a = 0;
for (var i = 0; i < generation.length; i++) {
a += generation[i].fitness();
}
return a / generation.length;
};
//Draw yellow and then initialize our first generation
input_ctx.fillStyle = 'yellow';
input_ctx.fillRect(0, 0, input_ctx.canvas.width, input_ctx.canvas.height);
start();
// Our loop function. Use setTimeout to prevent things from freezing
var gen = function() {
children = generate(children);
$gen_span.text('Generation: ' + generation);
$best_fit.text('Best Fitness: ' + best_fitness);
$avg_fit.text('Avg. Fitness: ' + average_fitness(children));
if (generation % 100 === 0) {
console.log('Generation', generation);
console.log('Fitness', best_fitness);
}
setTimeout(gen, 1);
};
gen();​
I've commented the code to try to make parsing it easy. The basic idea is quite simple:
Select 1 or 2 parents from the current generation
Mix those one or two parents together
Mutate the result slightly and add it to the next generation
Select the best few parents (1 in the example) and add them to the next generation
Sort and slice off N results and use them for the next generation (potentially a mix of parents and offspring)
Rinse and repeat
The output never gets anywhere near yellow. It quickly falls into a steady state of a sort that looks awful. Where have I gone wrong?
Solved it. It was in the "from_parents" method:
if (Math.random() > 0.5) {
nodes.push($.extend({}, parents[0].nodes[i]));
}
else {
nodes.push($.extend({}, parents[1].nodes[i]));
}
The $.extend() was doing a shallow copy. The obvious solution was to either put true as the first argument which causes a deep copy. This, however, is incredibly slow performance-wise. The better solution was to remove the $.extend() from that chunk of code entirely and instead to move it up to the mutate() method, where I call $.extend() only if a node is actually about to be changed. In other words, it becomes a copy-on-write.
Also, the color I put in the fitness function was wrong :P

Resources