예제 #1
0
func init() {
	mat64.Register(cblas.Blas{})
	// Set up all of the test sinks
	for i, initer := range sinkIniters {
		s := NewTrainer(initer.inputDim, initer.outputDim, initer.nFeatures, initer.kernel)
		s.features = randomMat(initer.nFeatures, initer.inputDim)
		s.featureWeights = randomMat(initer.nFeatures, initer.outputDim)
		s.b = randomSlice(initer.nFeatures)
		testSinks = append(testSinks, s)
		if initer.name == "" {
			initer.name = strconv.Itoa(i)
		}
	}

}
예제 #2
0
// Activate propagates the given input matrix (with) across the network
// a certain number of times (up to maxIterations).
//
// The with matrix should be size * size elements, with only the values
// of input neurons set (everything else should be zero).
//
// If the network is conceptually organised into layers, maxIterations
// should be set to the number of layers.
//
// This function overwrites whatever's stored in its first argument.
func (n *Network) Activate(with *mat64.Dense, maxIterations int) {

	// Add bias and feed to activation
	biasFunc := func(r, c int, v float64) float64 {
		return v + n.biases[r]
	}
	activFunc := func(r, c int, v float64) float64 {
		return n.funcs[r].Forward(v)
	}

	tmp := new(mat64.Dense)
	tmp.Clone(with)

	mat64.Register(cblas.Blas{})

	// Main loop
	for i := 0; i < maxIterations; i++ {
		with.Mul(n.weights, with)
		with.Apply(biasFunc, with)
		with.Apply(activFunc, with)
	}
}
예제 #3
0
func init() {
	mat64.Register(goblas.Blas{})
}
예제 #4
0
func init() {
	mat64.Register(goblas.Blas{}) // use a go-based blas library
	dbw.Register(goblas.Blas{})
}