Example #1
0
func TestAckley(t *testing.T) {
	fmt.Printf("Minimize the Ackley function with n=%d\n", dim)

	// Setup:
	// We initialize a set of 40 random solutions,
	// then add them to a generational population.
	seed := make([]evo.Genome, 40)
	for i := range seed {
		seed[i] = &ackley{
			gene:  real.Random(dim, 30),
			steps: real.Random(dim, 1),
		}
	}
	var pop gen.Population
	pop.Evolve(seed, Evolve)

	// Continuously print statistics while the optimization runs.
	pop.Poll(0, func() bool {
		count.Lock()
		n := count.n
		count.Unlock()
		stats := pop.Stats()

		// "\x1b[2K" is the escape code to clear the line
		// The fitness of minimization problems is negative
		fmt.Printf("\x1b[2K\rCount: %7d | Max: %8.3g | Mean: %8.3g | Min: %8.3g | RSD: %9.2e",
			n,
			-stats.Min(),
			-stats.Mean(),
			-stats.Max(),
			-stats.RSD())

		return false
	})

	// Terminate after 200,000 fitness evaluations.
	pop.Poll(0, func() bool {
		count.Lock()
		n := count.n
		count.Unlock()
		return n > 200000
	})

	// Terminate if the standard deviation is low.
	pop.Poll(0, func() bool {
		stats := pop.Stats()
		return stats.SD() < precision
	})

	pop.Wait()
	selector.Close()
	best := seed[0]
	bestFit := seed[0].Fitness()
	for i := range seed {
		fit := seed[i].Fitness()
		if fit > bestFit {
			best = seed[i]
			bestFit = fit
		}
	}
	fmt.Println("\nSolution:", best)
}
Example #2
0
func TestQueens(t *testing.T) {
	fmt.Printf("Find a solution to %d-queens\n", dim)

	// Setup:
	// We create an initial set of random candidates and divide them into "islands".
	// Each island is evolved independently in a generational population.
	// The islands are then linked together into a graph population with
	seed := make([]evo.Genome, size)
	for i := range seed {
		seed[i] = &queens{gene: perm.New(dim)}
	}
	islands := make([]evo.Genome, isl)
	islSize := size / isl
	for i := range islands {
		var island gen.Population
		island.Evolve(seed[i*islSize:(i+1)*islSize], Evolution)
		islands[i] = &island
	}
	pop := graph.Ring(isl)
	pop.Evolve(islands, gen.Migrate(migration, delay))

	// Continuously print statistics while the optimization runs.
	pop.Poll(0, func() bool {
		count.Lock()
		n := count.n
		count.Unlock()
		stats := pop.Stats()

		// "\x1b[2K" is the xterm escape code to clear the line
		// Because this is a minimization problem, the fitness is negative.
		// Thus we update the statistics accordingly.
		fmt.Printf("\x1b[2K\rCount: %7d | Max: %3.0f | Mean: %3.0f | Min: %3.0f | RSD: %9.2e",
			n,
			-stats.Min(),
			-stats.Mean(),
			-stats.Max(),
			-stats.RSD())

		return false
	})

	// Terminate when we've found the solution (when max is 0)
	pop.Poll(0, func() bool {
		stats := pop.Stats()
		return stats.Max() == 0
	})

	// Terminate if We've converged to a deviation is less than 0.01
	pop.Poll(0, func() bool {
		stats := pop.Stats()
		return stats.SD() < 1e-2
	})

	// Terminate after 2,000,000 fitness evaluations.
	pop.Poll(0, func() bool {
		count.Lock()
		n := count.n
		count.Unlock()
		return n > 2e6
	})

	pop.Wait()
	best := seed[0]
	bestFit := seed[0].Fitness()
	for i := range seed {
		fit := seed[i].Fitness()
		if fit > bestFit {
			best = seed[i]
			bestFit = fit
		}
	}
	fmt.Println("\nSolution:", best)
}