Esempio n. 1
0
File: ANN.go Progetto: eddytrex/AIgo
//TODO the activation function and his Derviate has to be more general.. to implemente soft-max for example
func (this *ANN) ForwardPropagation(In *Matrix.Matrix) (As, AsDerviate *([]*Matrix.Matrix), Output *Matrix.Matrix) {
	if In.GetMRows() == this.Inputs && In.GetNColumns() == 1 {
		As1 := make([]*Matrix.Matrix, len(this.Weights)+1, len(this.Weights)+1)
		AsDerviate1 := make([]*Matrix.Matrix, len(this.Weights)+1, len(this.Weights)+1)

		As := &As1
		AsDerviate = &AsDerviate1

		sTemp := In.Transpose()

		//Add  a new column for a Bias Weight
		sTemp = sTemp.AddColumn(Matrix.I(1))

		holeInput := sTemp.Copy()
		As1[0] = sTemp.Transpose()

		//Derivate
		//sutract, _ := Matrix.Sustract(Matrix.OnesMatrix(As1[0].GetMRows(), 1), As1[0])
		//derivate := Matrix.DotMultiplication(As1[0], sutract)

		//derivate := holeInput.Apply(this.Derivate)
		derivate := this.DarivateActivationLayer(holeInput)

		AsDerviate1[0] = derivate.Transpose()

		for i := 0; i < len(this.Weights); i++ {
			sTemp = Matrix.Product(sTemp, (this.Weights[i]))

			//apply the activation functions
			holeInput := sTemp.Copy()
			sTemp = this.ActivationLayer(sTemp)

			//sTemp = sTemp.Apply(this.Activation)

			//Add  a new column for a Bias Weight
			sTemp = sTemp.AddColumn(Matrix.I(1))
			(*As)[i+1] = sTemp.Transpose()

			//Derivate
			//sutract, _ := Matrix.Sustract(Matrix.OnesMatrix((*As)[i+1].GetMRows(), 1), (*As)[i+1])
			//derivate := Matrix.DotMultiplication((*As)[i+1], sutract)

			derivate := this.DarivateActivationLayer(holeInput)
			//derivate := holeInput.Apply(this.Derivate)

			(*AsDerviate)[i+1] = derivate.Transpose()

		}
		Asf := sTemp.Copy()

		//Asf = Asf.AddColumn(Matrix.I(1))
		(*As)[len(As1)-1] = Asf.Transpose()
		Output = sTemp.Transpose().MatrixWithoutLastRow()
		return As, AsDerviate, Output
	}
	return nil, nil, nil
}
Esempio n. 2
0
func NormalEquation(ts *TrainingSet) *Hypothesis {
	//     n:=ts.Xs.GetNColumns()
	//     m:=ts.Xs.GetMRows().
	ts.AddX0()
	println(ts.Xs.ToString())
	Xst := ts.Xs.Transpose()
	mult := Matrix.Product(Xst, ts.Xs)

	pinv := mult.PInverse()

	xT := Matrix.Product(pinv, Xst)
	theta := Matrix.Product(xT, ts.Y)

	var h1 Hypothesis

	h1.ThetaParameters = theta
	return &h1

}
Esempio n. 3
0
File: ANN.go Progetto: eddytrex/AIgo
func (this *ANN) BackPropagation(As, AsDerviate *[](*Matrix.Matrix), ForwardOutput *Matrix.Matrix, Y *Matrix.Matrix, flen float64) {
	ð := this.DerviateCostFunction(ForwardOutput, Y)

	this.ð[len(this.ð)-1] = ð

	this.AcumatedError, _ = Matrix.Sum(this.CostFunction(ForwardOutput, Y), this.AcumatedError)

	for i := len(this.Weights) - 1; i >= 0; i-- {
		A := (*As)[i]
		Aderviate := (*AsDerviate)[i]

		var ðtemp *Matrix.Matrix
		if i == len(this.Weights)-1 {
			ðtemp = this.ð[i+1].Transpose()
		} else {
			ðtemp = this.ð[i+1].MatrixWithoutLastRow().Transpose()
		}

		//Calc ð

		//fmt.Println("ð(i+1)", this.ð[i+1].ToString())
		//fmt.Println("W(i)", this.Weights[i].ToString())

		Product := Matrix.Product(this.Weights[i], ðtemp.Transpose())
		//fmt.Println("Product", i, " ", Product.ToString())

		this.ð[i] = Matrix.DotMultiplication(Product, Aderviate.AddRowsToDown(Matrix.I(1)))

		//Calc of Derivate with respect to the Weights

		//ðtemp:= i==len(this.Weights) - 1? this.ð[i+1].Transpose() : this.ð[i+1].MatrixWithoutLastRow().Transpose()
		Dw := Matrix.Product(A, ðtemp)

		this.Δ[i], _ = Matrix.Sum(this.Δ[i], Dw)
	}

	return
}
Esempio n. 4
0
func (this *Hypothesis) Evaluate(x *Matrix.Matrix) (complex128, error) {
	x0 := Matrix.NullMatrixP(1, 1)
	x0.SetValue(1, 1, 1)
	x0 = x0.AddColumn(x)
	if x0.GetNColumns() == this.ThetaParameters.GetNColumns() {

		xt := x0.Transpose()

		res := Matrix.Product(this.ThetaParameters, xt)

		return this.H(res.GetValue(1, 1)), nil
	}
	return 0, errors.New(" The number of parameters is not equal to the parameters of the hypotesis")
}
Esempio n. 5
0
func (this *Hypothesis) DiffH1Ys(Ts TrainingSet) *Matrix.Matrix {

	m := Ts.Xs.GetMRows()

	hx := Matrix.NullMatrixP(m, 1)

	if this.ThetaParameters.GetNColumns() == Ts.Xs.GetNColumns() {
		for i := 1; i <= Ts.Xs.GetMRows(); i++ {
			xi := Ts.Xs.GetRow(i)

			Thi := Matrix.Product(xi, this.ThetaParameters.Transpose())

			hx.SetValue(i, 1, Thi.GetValue(1, 1)-Ts.Y.GetValue(1, i))

		}
		return hx
	}
	return nil
}
Esempio n. 6
0
func (this *Hypothesis) part_DiffH1Ys(i0, i1 int, Ts *TrainingSet, Ret *Matrix.Matrix, RetT *Matrix.Matrix, done chan<- bool) {
	di := i1 - i0

	if di >= THRESHOLD && runtime.NumGoroutine() < maxGoRoutines {
		done2 := make(chan bool, THRESHOLD)

		mi := i0 + di/2
		go this.part_DiffH1Ys(i0, mi, Ts, Ret, RetT, done2)
		go this.part_DiffH1Ys(mi, i1, Ts, Ret, RetT, done2)
		<-done2
		<-done2
	} else {
		for i := i0; i <= i1; i++ {
			xi := Ts.Xs.GetRow(i)

			Thi := Matrix.Product(xi, this.ThetaParameters.Transpose())
			temp := this.H(Thi.GetValue(1, 1)) - Ts.Y.GetValue(1, i)
			Ret.SetValue(i, 1, temp)
			RetT.SetValue(1, i, temp)
		}
	}
	done <- true
}
Esempio n. 7
0
func GradientDescent(alpha complex128, Tolerance complex128, ts *TrainingSet, f func(x complex128) complex128) *Hypothesis {
	n := ts.Xs.GetNColumns()
	m := ts.Xs.GetMRows()

	//Xsc:=ts.Xs.Copy()

	ts.AddX0() // add  the parametrer x0, with value 1, to all elements of the training set

	t := Matrix.NullMatrixP(1, n+1) // put 0 to the parameters theta
	thetaP := t

	//thetaP:=Matrix.RandomMatrix(1,n+1)  // Generates a random values of parameters theta

	var h1 Hypothesis

	h1.H = f
	h1.ThetaParameters = thetaP

	var Error complex128

	Error = complex(1.0, 0)

	var it = 1

	diferencia, diferenciaT := h1.Parallel_DiffH1Ys(ts)
	jt := Matrix.Product(diferenciaT, diferencia).Scalar(1/complex(2.0*float64(m), 0.0)).GetValue(1, 1)

	alpha = 1 / jt

	for cmplx.Abs(Error) >= cmplx.Abs(Tolerance) { // Until converges

		ThetaPB := h1.ThetaParameters.Copy() //for Error Calc

		//diff:=h1.DiffH1Ys(ts)
		_, diffT := h1.Parallel_DiffH1Ys(ts) //h(x)-y

		product := Matrix.Product(diffT, ts.Xs) //Sum( (hi(xi)-yi)*xij)  in matrix form

		h1.Sum = product

		alpha_it := alpha / (cmplx.Sqrt(complex(float64(it), 0.0))) // re-calc alpha

		scalar := product.Scalar(-alpha_it / complex(float64(m), 0.0))

		//println("Delta", scalar.ToString())
		ThetaTemp, _ := Matrix.Sum(h1.ThetaParameters, scalar) //Theas=Theas-alfa/m*Sum( (hi(xi)-yi)*xij)  update the parameters

		h1.ThetaParameters = ThetaTemp

		diffError, _ := Matrix.Sustract(ThetaPB, h1.ThetaParameters) //diff between theta's Vector , calc the error

		Error = complex(diffError.FrobeniusNorm(), 0) //Frobenius Norm
		//Error=diffError.InfinityNorm()              //Infinty Norm

		//println("->", h1.ThetaParameters.ToString())
		//println("Error", Error)
		/*if it > 10 {
			break
		}*/
		it++
	}
	h1.M = m
	return &h1
}