func forward(x *mat64.Dense) (yHat mat64.Dense) { z2 := mat64.Dense{} // 3x3 a2 := mat64.Dense{} // 3x3 z3 := mat64.Dense{} // 3x1 z2.Mul(x, w1) a2.Apply(sigmoidApplyable, &z2) z3.Mul(&a2, w2) // yHat is 3x1 yHat.Apply(sigmoidApplyable, &z3) return }
// Activate propagates the given input matrix (with) across the network // a certain number of times (up to maxIterations). // // The with matrix should be size * size elements, with only the values // of input neurons set (everything else should be zero). // // If the network is conceptually organised into layers, maxIterations // should be set to the number of layers. // // This function overwrites whatever's stored in its first argument. func (n *Network) Activate(with *mat64.Dense, maxIterations int) { // Add bias and feed to activation biasFunc := func(r, c int, v float64) float64 { return v + n.biases[r] } activFunc := func(r, c int, v float64) float64 { return n.funcs[r].Forward(v) } tmp := new(mat64.Dense) tmp.Clone(with) // Main loop for i := 0; i < maxIterations; i++ { with.Mul(n.weights, with) with.Apply(biasFunc, with) with.Apply(activFunc, with) } }
func nnlsSubproblem(V, W, Ho *mat64.Dense, tol float64, outer, inner int) (H, G *mat64.Dense, i int, ok bool) { H = new(mat64.Dense) H.Clone(Ho) var WtV, WtW mat64.Dense WtV.Mul(W.T(), V) WtW.Mul(W.T(), W) alpha, beta := 1., 0.1 decFilt := func(r, c int, v float64) float64 { // decFilt is applied to G, so v = G.At(r, c). if v < 0 || H.At(r, c) > 0 { return v } return 0 } G = new(mat64.Dense) for i = 0; i < outer; i++ { G.Mul(&WtW, H) G.Sub(G, &WtV) G.Apply(decFilt, G) if mat64.Norm(G, 2) < tol { break } var ( reduce bool Hp *mat64.Dense d, dQ mat64.Dense ) for j := 0; j < inner; j++ { var Hn mat64.Dense Hn.Scale(alpha, G) Hn.Sub(H, &Hn) Hn.Apply(posFilt, &Hn) d.Sub(&Hn, H) dQ.Mul(&WtW, &d) dQ.MulElem(&dQ, &d) d.MulElem(G, &d) sufficient := 0.99*mat64.Sum(&d)+0.5*mat64.Sum(&dQ) < 0 if j == 0 { reduce = !sufficient Hp = H } if reduce { if sufficient { H = &Hn ok = true break } else { alpha *= beta } } else { if !sufficient || mat64.Equal(Hp, &Hn) { H = Hp break } else { alpha /= beta Hp = &Hn } } } } return H, G, i, ok }
func denormalize(matrix *mat64.Dense, max float64) { matrix.Apply(func(i, j int, elem float64) float64 { return elem * max }, matrix) }