コード例 #1
0
ファイル: training_test.go プロジェクト: unixpickle/weakai
func testTrainingXOR(t *testing.T, maxBatch, maxGos, batchSize int, single bool) {
	if testing.Short() {
		t.Skip("skipping test in short mode.")
	}
	net := Network{
		&DenseLayer{
			InputCount:  2,
			OutputCount: 4,
		},
		&Sigmoid{},
		&DenseLayer{
			InputCount:  4,
			OutputCount: 1,
		},
		&Sigmoid{},
	}
	rand.Seed(123123)
	net.Randomize()

	samples := VectorSampleSet([]linalg.Vector{
		{0, 0},
		{0, 1},
		{1, 0},
		{1, 1},
	}, []linalg.Vector{{0}, {1}, {1}, {0}})

	var gradienter sgd.Gradienter
	if single {
		gradienter = &SingleRGradienter{
			Learner:  net,
			CostFunc: MeanSquaredCost{},
		}
	} else {
		gradienter = &BatchRGradienter{
			Learner:       net.BatchLearner(),
			CostFunc:      MeanSquaredCost{},
			MaxGoroutines: maxGos,
			MaxBatchSize:  maxBatch,
		}
	}
	sgd.SGD(gradienter, samples, 0.9, 1000, batchSize)

	for i := 0; i < samples.Len(); i++ {
		sample := samples.GetSample(i)
		vs := sample.(VectorSample)
		output := net.Apply(&autofunc.Variable{vs.Input}).Output()
		expected := vs.Output[0]
		actual := output[0]
		if math.Abs(expected-actual) > 0.08 {
			t.Errorf("expected %f for input %v but got %f", expected, sample, actual)
		}
	}
}
コード例 #2
0
ファイル: main.go プロジェクト: unixpickle/weakai
// firstBitTest builds a neural network to:
// - output 0 for inputs starting with a 1
// - output 1 for inputs starting with a 0.
func firstBitTest() {
	trainingSamples := make([]linalg.Vector, FirstBitTrainingSize)
	trainingOutputs := make([]linalg.Vector, FirstBitTrainingSize)
	for i := range trainingSamples {
		trainingSamples[i] = make(linalg.Vector, FirstBitInputSize)
		for j := range trainingSamples[i] {
			trainingSamples[i][j] = float64(rand.Intn(2))
		}
		trainingOutputs[i] = []float64{1 - trainingSamples[i][0]}
	}
	samples := neuralnet.VectorSampleSet(trainingSamples, trainingOutputs)

	network := neuralnet.Network{
		&neuralnet.DenseLayer{
			InputCount:  FirstBitInputSize,
			OutputCount: FirstBitHiddenSize,
		},
		&neuralnet.Sigmoid{},
		&neuralnet.DenseLayer{
			InputCount:  FirstBitHiddenSize,
			OutputCount: 1,
		},
		&neuralnet.Sigmoid{},
	}
	network.Randomize()

	batcher := &neuralnet.SingleRGradienter{
		Learner:  network,
		CostFunc: neuralnet.MeanSquaredCost{},
	}
	sgd.SGD(batcher, samples, 0.2, 100000, 1)

	var totalError float64
	var maxPossibleError float64
	for i := 0; i < 50; i++ {
		sample := make([]float64, FirstBitInputSize)
		for j := range sample {
			sample[j] = float64(rand.Intn(2))
		}
		result := network.Apply(&autofunc.Variable{sample})
		output := result.Output()[0]
		amountError := math.Abs(output - (1 - sample[0]))
		totalError += amountError
		maxPossibleError += 1.0
	}

	fmt.Printf("firstBitTest() error rate: %f\n", totalError/maxPossibleError)
}
コード例 #3
0
ファイル: main.go プロジェクト: unixpickle/weakai
func runHorizontalLineTest(name string, network neuralnet.Network) {
	trainingSamples := make([]linalg.Vector, GridTrainingSize)
	trainingOutputs := make([]linalg.Vector, GridTrainingSize)
	for i := range trainingSamples {
		trainingSamples[i] = randomBitmap()
		if bitmapHasHorizontal(trainingSamples[i]) {
			trainingOutputs[i] = []float64{1}
		} else {
			trainingOutputs[i] = []float64{0}
		}
	}
	samples := neuralnet.VectorSampleSet(trainingSamples, trainingOutputs)

	network.Randomize()
	batcher := &neuralnet.SingleRGradienter{
		Learner:  network,
		CostFunc: neuralnet.MeanSquaredCost{},
	}
	sgd.SGD(batcher, samples, 0.1, 1000, 100)

	var trainingError float64
	var maxTrainingError float64
	for i, sample := range trainingSamples {
		result := network.Apply(&autofunc.Variable{sample})
		output := result.Output()[0]
		amountError := math.Abs(output - trainingOutputs[i][0])
		trainingError += amountError
		maxTrainingError += 1.0
	}

	var totalError float64
	var maxPossibleError float64
	for i := 0; i < 50; i++ {
		sample := randomBitmap()
		var expected float64
		if bitmapHasHorizontal(sample) {
			expected = 1
		}
		result := network.Apply(&autofunc.Variable{sample})
		output := result.Output()[0]
		amountError := math.Abs(output - expected)
		totalError += amountError
		maxPossibleError += 1.0
	}

	fmt.Printf("%s() training error: %f; cross error: %f\n", name,
		trainingError/maxTrainingError, totalError/maxPossibleError)
}
コード例 #4
0
ファイル: main.go プロジェクト: unixpickle/weakai
func main() {
	rand.Seed(time.Now().UnixNano())

	sampleSet := sgd.SliceSampleSet{}
	for i := 0; i < TrainingCount; i++ {
		inSeq, outSeq := genEvenOddSeq(rand.Intn(MaxSeqLen-MinSeqLen) + MinSeqLen)
		sampleSet = append(sampleSet, seqtoseq.Sample{
			Inputs:  inSeq,
			Outputs: outSeq,
		})
	}

	outNet := neuralnet.Network{
		&neuralnet.DenseLayer{
			InputCount:  HiddenSize,
			OutputCount: 2,
		},
	}
	outNet.Randomize()
	outBlock := rnn.NewNetworkBlock(outNet, 0)
	lstm := rnn.NewLSTM(2, HiddenSize)
	net := rnn.StackedBlock{lstm, outBlock}

	gradienter := &sgd.RMSProp{
		Gradienter: &seqtoseq.Gradienter{
			SeqFunc:  &rnn.BlockSeqFunc{B: net},
			Learner:  net,
			CostFunc: neuralnet.SigmoidCECost{},
			MaxLanes: 1,
		},
	}

	sgd.SGD(gradienter, sampleSet, StepSize, Epochs, BatchSize)

	outNet = append(outNet, neuralnet.Sigmoid{})

	var scoreSum float64
	var scoreTotal float64
	for i := 0; i < TestingCount; i++ {
		size := rand.Intn(MaxSeqLen-MinSeqLen) + MinSeqLen
		ins, outs := genEvenOddSeq(size)
		score := runTestSample(ins, outs, net)
		scoreSum += score
		scoreTotal += 1
	}

	fmt.Println("Testing success rate:", scoreSum/scoreTotal)
}
コード例 #5
0
ファイル: training_test.go プロジェクト: unixpickle/weakai
func benchmarkTrainingBig(b *testing.B, hiddenSize, batchSize int) {
	runtime.GC()

	inputs := make([]linalg.Vector, 100)
	outputs := make([]linalg.Vector, len(inputs))
	for i := range inputs {
		inputs[i] = make(linalg.Vector, 1000)
		outputs[i] = make(linalg.Vector, len(inputs[i]))
		for j := range inputs[i] {
			inputs[i][j] = rand.Float64()
			outputs[i][j] = rand.Float64()
		}
	}

	samples := VectorSampleSet(inputs, outputs)
	network := Network{
		&DenseLayer{
			InputCount:  len(inputs[0]),
			OutputCount: hiddenSize,
		},
		&Sigmoid{},
		&DenseLayer{
			InputCount:  hiddenSize,
			OutputCount: 10,
		},
		&Sigmoid{},
	}
	network.Randomize()
	batcher := &BatchRGradienter{
		Learner:  network.BatchLearner(),
		CostFunc: MeanSquaredCost{},
	}

	b.ResetTimer()
	sgd.SGD(batcher, samples, 0.01, b.N, batchSize)
}