func (this *TrainingSet) sumParameters(i0, i1 int, Res **Matrix.Matrix, done chan<- bool) { di := i1 - i0 if di >= THRESHOLD { done2 := make(chan bool, THRESHOLD) mi := i0 + di/2 res1 := Matrix.NullMatrixP(1, this.Xs.GetNColumns()) res2 := Matrix.NullMatrixP(1, this.Xs.GetNColumns()) go this.sumParameters(i0, mi, &res1, done2) go this.sumParameters(mi, i1, &res2, done2) <-done2 <-done2 SP, _ := Matrix.Sum(res1, res2) *Res = SP } else { for i := i0; i <= i1; i++ { xsi := this.Xs.GetRow(i) SP, _ := Matrix.Sum(*Res, xsi) *Res = SP } } done <- true }
func (this *TrainingSet) Variance_sum(i0, i1 int, mean *Matrix.Matrix, res **Matrix.Matrix, sustract *Matrix.Matrix, done chan<- bool) { di := i1 - i0 if di >= THRESHOLD { mi := i0 + di/2 done2 := make(chan bool, THRESHOLD) res1 := Matrix.NullMatrixP(1, this.Xs.GetNColumns()) res2 := Matrix.NullMatrixP(1, this.Xs.GetNColumns()) go this.Variance_sum(i0, mi, mean, &res1, sustract, done2) go this.Variance_sum(mi, i1, mean, &res1, sustract, done2) <-done2 <-done2 SP, _ := Matrix.Sum(res1, res2) *res = SP } else { for i := i0; i <= i1; i++ { xsi := this.Xs.GetRow(i) Sustract, _ := Matrix.Sustract(mean, xsi) Square := Matrix.DotMultiplication(Sustract, Sustract) sustract.SetRow(i, Sustract) SP, _ := Matrix.Sum(Square, *res) *res = SP } } done <- true }
func (this *ANN) UpdateWeights(length float64, changeBeasWeights bool) { for i := 0; i < len(this.Weights); i++ { if changeBeasWeights { this.BestWeightsFound[i] = this.Weights[i] } D, _ := Matrix.Sum(this.Δ[i].Scalar(complex(-this.η, 0)), this.Δ1[i].Scalar(complex(this.α, 0))) this.Weights[i], _ = Matrix.Sum(this.Weights[i], D) } }
func (this *ANN) BackPropagation(As, AsDerviate *[](*Matrix.Matrix), ForwardOutput *Matrix.Matrix, Y *Matrix.Matrix, flen float64) { ð := this.DerviateCostFunction(ForwardOutput, Y) this.ð[len(this.ð)-1] = ð this.AcumatedError, _ = Matrix.Sum(this.CostFunction(ForwardOutput, Y), this.AcumatedError) for i := len(this.Weights) - 1; i >= 0; i-- { A := (*As)[i] Aderviate := (*AsDerviate)[i] var ðtemp *Matrix.Matrix if i == len(this.Weights)-1 { ðtemp = this.ð[i+1].Transpose() } else { ðtemp = this.ð[i+1].MatrixWithoutLastRow().Transpose() } //Calc ð //fmt.Println("ð(i+1)", this.ð[i+1].ToString()) //fmt.Println("W(i)", this.Weights[i].ToString()) Product := Matrix.Product(this.Weights[i], ðtemp.Transpose()) //fmt.Println("Product", i, " ", Product.ToString()) this.ð[i] = Matrix.DotMultiplication(Product, Aderviate.AddRowsToDown(Matrix.I(1))) //Calc of Derivate with respect to the Weights //ðtemp:= i==len(this.Weights) - 1? this.ð[i+1].Transpose() : this.ð[i+1].MatrixWithoutLastRow().Transpose() Dw := Matrix.Product(A, ðtemp) this.Δ[i], _ = Matrix.Sum(this.Δ[i], Dw) } return }
func GradientDescent(alpha complex128, Tolerance complex128, ts *TrainingSet, f func(x complex128) complex128) *Hypothesis { n := ts.Xs.GetNColumns() m := ts.Xs.GetMRows() //Xsc:=ts.Xs.Copy() ts.AddX0() // add the parametrer x0, with value 1, to all elements of the training set t := Matrix.NullMatrixP(1, n+1) // put 0 to the parameters theta thetaP := t //thetaP:=Matrix.RandomMatrix(1,n+1) // Generates a random values of parameters theta var h1 Hypothesis h1.H = f h1.ThetaParameters = thetaP var Error complex128 Error = complex(1.0, 0) var it = 1 diferencia, diferenciaT := h1.Parallel_DiffH1Ys(ts) jt := Matrix.Product(diferenciaT, diferencia).Scalar(1/complex(2.0*float64(m), 0.0)).GetValue(1, 1) alpha = 1 / jt for cmplx.Abs(Error) >= cmplx.Abs(Tolerance) { // Until converges ThetaPB := h1.ThetaParameters.Copy() //for Error Calc //diff:=h1.DiffH1Ys(ts) _, diffT := h1.Parallel_DiffH1Ys(ts) //h(x)-y product := Matrix.Product(diffT, ts.Xs) //Sum( (hi(xi)-yi)*xij) in matrix form h1.Sum = product alpha_it := alpha / (cmplx.Sqrt(complex(float64(it), 0.0))) // re-calc alpha scalar := product.Scalar(-alpha_it / complex(float64(m), 0.0)) //println("Delta", scalar.ToString()) ThetaTemp, _ := Matrix.Sum(h1.ThetaParameters, scalar) //Theas=Theas-alfa/m*Sum( (hi(xi)-yi)*xij) update the parameters h1.ThetaParameters = ThetaTemp diffError, _ := Matrix.Sustract(ThetaPB, h1.ThetaParameters) //diff between theta's Vector , calc the error Error = complex(diffError.FrobeniusNorm(), 0) //Frobenius Norm //Error=diffError.InfinityNorm() //Infinty Norm //println("->", h1.ThetaParameters.ToString()) //println("Error", Error) /*if it > 10 { break }*/ it++ } h1.M = m return &h1 }