// firstBitTest builds a neural network to: // - output 0 for inputs starting with a 1 // - output 1 for inputs starting with a 0. func firstBitTest() { trainingSamples := make([]linalg.Vector, FirstBitTrainingSize) trainingOutputs := make([]linalg.Vector, FirstBitTrainingSize) for i := range trainingSamples { trainingSamples[i] = make(linalg.Vector, FirstBitInputSize) for j := range trainingSamples[i] { trainingSamples[i][j] = float64(rand.Intn(2)) } trainingOutputs[i] = []float64{1 - trainingSamples[i][0]} } samples := neuralnet.VectorSampleSet(trainingSamples, trainingOutputs) network := neuralnet.Network{ &neuralnet.DenseLayer{ InputCount: FirstBitInputSize, OutputCount: FirstBitHiddenSize, }, &neuralnet.Sigmoid{}, &neuralnet.DenseLayer{ InputCount: FirstBitHiddenSize, OutputCount: 1, }, &neuralnet.Sigmoid{}, } network.Randomize() batcher := &neuralnet.SingleRGradienter{ Learner: network, CostFunc: neuralnet.MeanSquaredCost{}, } sgd.SGD(batcher, samples, 0.2, 100000, 1) var totalError float64 var maxPossibleError float64 for i := 0; i < 50; i++ { sample := make([]float64, FirstBitInputSize) for j := range sample { sample[j] = float64(rand.Intn(2)) } result := network.Apply(&autofunc.Variable{sample}) output := result.Output()[0] amountError := math.Abs(output - (1 - sample[0])) totalError += amountError maxPossibleError += 1.0 } fmt.Printf("firstBitTest() error rate: %f\n", totalError/maxPossibleError) }
func runHorizontalLineTest(name string, network neuralnet.Network) { trainingSamples := make([]linalg.Vector, GridTrainingSize) trainingOutputs := make([]linalg.Vector, GridTrainingSize) for i := range trainingSamples { trainingSamples[i] = randomBitmap() if bitmapHasHorizontal(trainingSamples[i]) { trainingOutputs[i] = []float64{1} } else { trainingOutputs[i] = []float64{0} } } samples := neuralnet.VectorSampleSet(trainingSamples, trainingOutputs) network.Randomize() batcher := &neuralnet.SingleRGradienter{ Learner: network, CostFunc: neuralnet.MeanSquaredCost{}, } sgd.SGD(batcher, samples, 0.1, 1000, 100) var trainingError float64 var maxTrainingError float64 for i, sample := range trainingSamples { result := network.Apply(&autofunc.Variable{sample}) output := result.Output()[0] amountError := math.Abs(output - trainingOutputs[i][0]) trainingError += amountError maxTrainingError += 1.0 } var totalError float64 var maxPossibleError float64 for i := 0; i < 50; i++ { sample := randomBitmap() var expected float64 if bitmapHasHorizontal(sample) { expected = 1 } result := network.Apply(&autofunc.Variable{sample}) output := result.Output()[0] amountError := math.Abs(output - expected) totalError += amountError maxPossibleError += 1.0 } fmt.Printf("%s() training error: %f; cross error: %f\n", name, trainingError/maxTrainingError, totalError/maxPossibleError) }
func trainClassifier(n neuralnet.Network, d mnist.DataSet) { log.Println("Training classifier (ctrl+C to finish)...") killChan := make(chan struct{}) go func() { c := make(chan os.Signal, 1) signal.Notify(c, os.Interrupt) <-c signal.Stop(c) fmt.Println("\nCaught interrupt. Ctrl+C again to terminate.") close(killChan) }() inputs := make([]linalg.Vector, len(d.Samples)) outputs := make([]linalg.Vector, len(d.Samples)) for i, x := range d.IntensityVectors() { inputs[i] = x } for i, x := range d.LabelVectors() { outputs[i] = x } samples := neuralnet.VectorSampleSet(inputs, outputs) batcher := &neuralnet.BatchRGradienter{ Learner: n.BatchLearner(), CostFunc: neuralnet.MeanSquaredCost{}, } crossValidation := mnist.LoadTestingDataSet() sgd.SGDInteractive(batcher, samples, ClassifierStepSize, ClassifierBatchSize, func() bool { printScore("Training", n, d) printScore("Cross", n, crossValidation) return true }) }
func Autoencode(images <-chan image.Image) (neuralnet.Network, error) { firstImage := <-images if firstImage == nil { return nil, errors.New("no readable images") } width := firstImage.Bounds().Dx() height := firstImage.Bounds().Dy() log.Print("Reading images...") tensors := []*neuralnet.Tensor3{ImageTensor(firstImage)} for img := range images { if img.Bounds().Dx() != width || img.Bounds().Dy() != height { log.Printf("Image size %d,%d does not match %d,%d", img.Bounds().Dx(), img.Bounds().Dy(), width, height) } else { tensors = append(tensors, ImageTensor(img)) } } log.Print("Training network (ctrl+c to finish)...") tensorSlices := make([]linalg.Vector, len(tensors)) for i, tensor := range tensors { tensorSlices[i] = tensor.Data } samples := neuralnet.VectorSampleSet(tensorSlices, tensorSlices) average, stddev := statisticalInfo(tensorSlices) network := neuralnet.Network{ &neuralnet.RescaleLayer{ Bias: -average, Scale: 1 / stddev, }, &neuralnet.DenseLayer{ InputCount: width * height * 3, OutputCount: HiddenSize1, }, neuralnet.Sigmoid{}, &neuralnet.DenseLayer{ InputCount: HiddenSize1, OutputCount: HiddenSize2, }, neuralnet.Sigmoid{}, &neuralnet.DenseLayer{ InputCount: HiddenSize2, OutputCount: HiddenSize1, }, neuralnet.Sigmoid{}, &neuralnet.DenseLayer{ InputCount: HiddenSize1, OutputCount: width * height * 3, }, } network.Randomize() ui := hessfree.NewConsoleUI() learner := &hessfree.DampingLearner{ WrappedLearner: &hessfree.NeuralNetLearner{ Layers: network, Output: nil, Cost: neuralnet.SigmoidCECost{}, MaxSubBatch: MaxSubBatch, MaxConcurrency: 2, }, DampingCoeff: 2, UI: ui, } trainer := hessfree.Trainer{ Learner: learner, Samples: samples, BatchSize: samples.Len(), UI: ui, Convergence: hessfree.ConvergenceCriteria{ MinK: 5, }, } trainer.Train() network = append(network, neuralnet.Sigmoid{}) return network, nil }
func dataSetSamples(d mnist.DataSet) sgd.SampleSet { labelVecs := d.LabelVectors() inputVecs := d.IntensityVectors() return neuralnet.VectorSampleSet(vecVec(inputVecs), vecVec(labelVecs)) }