## Simple Numerical Optimization Using an Evolutionary Algorithm with C#

The goal of a numerical optimization problem is to find a vector of values that minimize some cost function. The most fundamental example is minimizing the Sphere Function f(x0, x1, .. xn) = x0^2 + x1^2 + .. + xn^2. The optimal solution is X = (0, 0, .. 0) when f(X) = 0.0.

An evolutionary algorithm loosely mimics biological crossover (combining two exisiting parent solutions to create a new child solution) and mutation (slighly modifying a solution in a random way). In high level pseudo-code:

```create population of random solutions
loop many times
pick two parent solutions
create a child solution
mutate child
evaluate child
replace weak solution in pop with child
end-loop
return best solution found
```

An evolutionary approach is a meta-heuristic, meaning there are dozens of ways to implement a specific algorithm. I set out to design and implement the simplest possible example for the Sphere Function with dim = 6.

For parent selection I keep the population of solution sorted from smallest error to largest and pick one parent from the top/best half of the population and one parent from the bottom/worst half.

For crossover, I pick a random location and produce a child that’s the left half of parent1 and the right half of parent2.

For mutation, I walk through each value in the child solution and flip a virtual coin. If the coin is heads I do nothing but if the coin is tails, I add or subtract a random value between -0.25 and +0.25.

I set up a population with 8 solutions and ran the evolutionary algorithm for 1,000 generations. The algorithm found a very good solution of X = (0.02, 0.05, 0.01, -0.00, 0.15, 0.04) with an error of .0297.

Evolutionary algorithms aren’t practical for most numerical optimization problems. The primary example is finding the best set of weights for a deep neural network. For NNs, you can use clever Calculus gradients, called back-propagation. But for problems where the error function doesn’t have gradients, evolutionary optimization can be useful.

Someday, when quantum computing becomes practical, evolutionary optimization might be useful for training huge neural networks that have trillions (or more) of weights.

In “Forbidden Planet” (1956), the crew of the United Planets Cruiser C-57D used a spherical “astrogator” for navigation. An excellent movie. Center: In the original “Star Wars” (1977), the spherical Death Star was a giant weaponized space station. A good movie. Right: In “It Came From Outer Space” (1953), benign aliens in a spherical spaceship crash into the Arizona desert and take the form of humans until they can repair their ship. An excellent movie.

Demo code. Replace “lt”, “gt” with Boolean operator symbols.

```namespace EvoOptSimple
// .NET 6.0
{
internal class EvoOptSimpleProgram
{
static void Main(string[] args)
{
Console.WriteLine("\nBegin evo optimization demo ");
Console.WriteLine("Goal is Sphere function dim = 6 ");
Console.WriteLine("Opt sln [0, 0, 0, 0, 0, 0], err = 0 ");

Solver solver = new Solver(6, 8, seed: 0);
Console.WriteLine("\nInitial population: ");
solver.Show();

Console.WriteLine("\nBegin search ");
solver.Solve(1000);
Console.WriteLine("Done ");

Console.WriteLine("\nFinal population: ");
solver.Show();

Console.WriteLine("\nEnd demo ");
} // Main
} // Program

// -----------------------------------------------------------

public class Solver
{
public int popSize;
public int dim;

public double minGene, maxGene;
public Random rnd;

public double[][] pop;
public double[] errs;

public double[] bestSoln;
public double bestErr;

public Solver(int dim, int popSize, int seed)
{
this.minGene = -5.0; this.maxGene = 5.0;
this.rnd = new Random(seed);
this.dim = dim;
this.popSize = popSize;
this.pop = new double[popSize][];
for (int i = 0; i "lt" popSize; ++i)
this.pop[i] = new double[dim];
this.errs = new double[popSize];
for (int i = 0; i "lt" popSize; ++i)
{
for (int j = 0; j "lt" dim; ++j)
this.pop[i][j] = (this.maxGene - this.minGene) *
this.rnd.NextDouble() + this.minGene;
this.errs[i] = this.ComputeError(this.pop[i]);
}

Array.Sort(this.errs, this.pop);  // parallel sort

this.bestSoln = new double[dim];
for (int j = 0; j "lt" dim; ++j)
this.bestSoln[j] = this.pop[0][j];
this.bestErr = this.errs[0];

} // ctor()

public double ComputeError(double[] soln)
{
// Sphere
double result = 0.0;
for (int j = 0; j "lt" soln.Length; ++j)
result += soln[j] * soln[j];
return result;
}

public void Show()
{
for (int i = 0; i "lt" this.popSize; ++i)
{
for (int j = 0; j "lt" this.dim; ++j)
{
Console.Write(this.pop[i][j].ToString("F4")
}
Console.WriteLine(" | " + this.errs[i].ToString("F4")
}
Console.WriteLine("-----");
for (int j = 0; j "lt" this.dim; ++j)
Console.Write(this.bestSoln[j].ToString("F4").
Console.WriteLine(" | " + this.bestErr.ToString("F4")
} // Show

public int[] PickParents()
{
int first = rnd.Next(0, this.popSize / 2);  // top half
int second = rnd.Next(this.popSize / 2, this.popSize);
int flip = rnd.Next(0, 2);  // 0 or 1
if (flip == 0)
return new int[] { first, second };
else
return new int[] { second, first };
}

public double[] CrossOver(double[] parent1,
double[] parent2)
{
int idx = this.rnd.Next(1, this.dim-1);
double[] child = new double[this.dim];
for (int k = 0; k "lt" idx; ++k)
child[k] = parent1[k];
for (int k = idx; k "lt" this.dim; ++k)
child[k] = parent2[k];
return child;
}

public void Mutate(double[] soln)
{
double lo = -0.25;
double hi = 0.25;
for (int j = 0; j "lt" soln.Length; ++j)
{
int flip = this.rnd.Next(0, 2);  // 0 or 1
if (flip == 1)
{
double delta = (hi - lo) *
this.rnd.NextDouble() + lo;
soln[j] += delta;
}
}
} // Mutate

public void Solve(int maxGen)
{
for (int gen = 0; gen "lt" maxGen; ++gen)
{
// 1. make a child
int[] parentIdxs = this.PickParents();
double[] parent1 = this.pop[parentIdxs[0]];
double[] parent2 = this.pop[parentIdxs[1]];
double[] child = this.CrossOver(parent1, parent2);

// 2. mutate and evaluate
this.Mutate(child);
double childErr = this.ComputeError(child);

// 2b. new best?
if (childErr "lt" this.bestErr)
{
if (gen "lt" 20 || gen "gt" 700)
Console.WriteLine("New best soln found at gen " +
gen);
for (int i = 0; i "lt" child.Length; ++i)
this.bestSoln[i] = child[i];
this.bestErr = childErr;
}

// 3. replace a weak soln with child
int idx = this.rnd.Next(this.popSize / 2, this.popSize);
for (int j = 0; j "lt" this.dim; ++j)
this.pop[idx][j] = child[j];
this.errs[idx] = childErr;

// 4. create immigrant
double[] imm = new double[this.dim];
for (int j = 0; j "lt" this.dim; ++j)
imm[j] = (this.maxGene - this.minGene) *
this.rnd.NextDouble() + this.minGene;
double immErr = this.ComputeError(imm);

// 4b. new best?
if (immErr "lt" this.bestErr)
{
if (gen "lt" 20 || gen "gt" 700)
Console.WriteLine("New best soln (imm) at gen " +
gen);
for (int i = 0; i "lt" child.Length; ++i)
this.bestSoln[i] = child[i];
this.bestErr = childErr;
}

idx = this.rnd.Next(this.popSize / 2, this.popSize);
this.pop[idx] = imm;
this.errs[idx] = immErr;

// 5. sort
Array.Sort(this.errs, this.pop);

if (gen == 500) Console.WriteLine(". . . ");
} // each gen
} // Solve()
} // Solver
} // ns
```
This entry was posted in Machine Learning. Bookmark the permalink.