Пример #1
0
func (this *TrainingSet) Variance_sum(i0, i1 int, mean *Matrix.Matrix, res **Matrix.Matrix, sustract *Matrix.Matrix, done chan<- bool) {
	di := i1 - i0

	if di >= THRESHOLD {
		mi := i0 + di/2
		done2 := make(chan bool, THRESHOLD)

		res1 := Matrix.NullMatrixP(1, this.Xs.GetNColumns())
		res2 := Matrix.NullMatrixP(1, this.Xs.GetNColumns())

		go this.Variance_sum(i0, mi, mean, &res1, sustract, done2)
		go this.Variance_sum(mi, i1, mean, &res1, sustract, done2)

		<-done2
		<-done2

		SP, _ := Matrix.Sum(res1, res2)
		*res = SP

	} else {
		for i := i0; i <= i1; i++ {
			xsi := this.Xs.GetRow(i)
			Sustract, _ := Matrix.Sustract(mean, xsi)
			Square := Matrix.DotMultiplication(Sustract, Sustract)

			sustract.SetRow(i, Sustract)

			SP, _ := Matrix.Sum(Square, *res)
			*res = SP
		}
	}
	done <- true
}
Пример #2
0
func (this *TrainingSet) sumParameters(i0, i1 int, Res **Matrix.Matrix, done chan<- bool) {
	di := i1 - i0

	if di >= THRESHOLD {
		done2 := make(chan bool, THRESHOLD)
		mi := i0 + di/2

		res1 := Matrix.NullMatrixP(1, this.Xs.GetNColumns())
		res2 := Matrix.NullMatrixP(1, this.Xs.GetNColumns())

		go this.sumParameters(i0, mi, &res1, done2)

		go this.sumParameters(mi, i1, &res2, done2)

		<-done2
		<-done2

		SP, _ := Matrix.Sum(res1, res2)

		*Res = SP

	} else {
		for i := i0; i <= i1; i++ {

			xsi := this.Xs.GetRow(i)
			SP, _ := Matrix.Sum(*Res, xsi)
			*Res = SP
		}
	}

	done <- true

}
Пример #3
0
func FFT_ct(this *Matrix.Matrix, N, skip int, tf *[]complex128) *Matrix.Matrix {

	Xr := Matrix.NullMatrixP(N, this.GetNColumns())
	RowTemp := Matrix.NullMatrixP(1, this.GetNColumns())

	FFT_aux(this, Xr, RowTemp, N, skip, tf)
	return Xr
}
Пример #4
0
func (this *Hypothesis) Parallel_DiffH1Ys(Ts *TrainingSet) (*Matrix.Matrix, *Matrix.Matrix) {
	m := Ts.Xs.GetMRows()
	hx := Matrix.NullMatrixP(m, 1)
	hxt := Matrix.NullMatrixP(1, m)

	if this.ThetaParameters.GetNColumns() == Ts.Xs.GetNColumns() {
		done := make(chan bool)
		go this.part_DiffH1Ys(1, m, Ts, hx, hxt, done)
		<-done
	}
	return hx, hxt
}
Пример #5
0
func (this *TrainingSet) Variance() (V, Sustract, Mean *Matrix.Matrix) {
	mean := this.Mean()

	sum := Matrix.NullMatrixP(1, this.Xs.GetNColumns())
	sustract := Matrix.NullMatrixP(this.Xs.GetMRows(), this.Xs.GetNColumns())

	done := make(chan bool)
	this.Variance_sum(1, this.Xs.GetMRows(), mean, &sum, sustract, done)
	<-done

	return sum.Scalar(1 / (complex(float64(this.Xs.GetMRows()), 0) - 1.0)), sustract, mean
}
Пример #6
0
func FFT_ct2(this *Matrix.Matrix, N, skip int, tf *[]complex128) *Matrix.Matrix {

	Xr := Matrix.NullMatrixP(N, this.GetNColumns())
	Scratch := Matrix.NullMatrixP(N, this.GetNColumns())

	var E, D, Xp, Xstart *Matrix.Matrix
	var evenIteration bool

	if N%2 == 0 {
		evenIteration = true
	} else {
		evenIteration = false
	}

	if N == 1 {
		Xr.SetRow(1, this.GetReferenceRow(1))
	}

	E = this

	for n := 1; n < N; n *= 2 {

		if evenIteration {
			Xstart = Scratch
		} else {
			Xstart = Xr
		}

		skip := N / (2 * n)
		Xp = Xstart

		for k := 0; k != n; k++ {
			for m := 0; m != skip; m++ {
				D = E.MatrixWithoutFirstRows(skip)
				D.ScalarRow(1, (*tf)[skip*k])

				sr, rr, _ := Matrix.Sum_Sustract(E.GetReferenceRow(1), D.GetReferenceRow(1))

				Xp.SetRow(1, sr)
				Xp.SetRow(N/2+1, rr)

				Xp = Xp.MatrixWithoutFirstRows(1)
				E = E.MatrixWithoutFirstRows(1)
			}
			E = E.MatrixWithoutFirstRows(skip)
		}
		E = Xstart
		evenIteration = !evenIteration
	}
	return Scratch
}
Пример #7
0
func (this *TrainingSet) Mean() *Matrix.Matrix {
	sum := Matrix.NullMatrixP(1, this.Xs.GetNColumns())

	done := make(chan bool)

	go this.sumParameters(1, this.Xs.GetMRows(), &sum, done)

	<-done

	return sum.Scalar(1.0 / (complex(float64(this.Xs.GetMRows()), 0.0)))
}
Пример #8
0
func (this *Hypothesis) Evaluate(x *Matrix.Matrix) (complex128, error) {
	x0 := Matrix.NullMatrixP(1, 1)
	x0.SetValue(1, 1, 1)
	x0 = x0.AddColumn(x)
	if x0.GetNColumns() == this.ThetaParameters.GetNColumns() {

		xt := x0.Transpose()

		res := Matrix.Product(this.ThetaParameters, xt)

		return this.H(res.GetValue(1, 1)), nil
	}
	return 0, errors.New(" The number of parameters is not equal to the parameters of the hypotesis")
}
Пример #9
0
func (this *Hypothesis) DiffH1Ys(Ts TrainingSet) *Matrix.Matrix {

	m := Ts.Xs.GetMRows()

	hx := Matrix.NullMatrixP(m, 1)

	if this.ThetaParameters.GetNColumns() == Ts.Xs.GetNColumns() {
		for i := 1; i <= Ts.Xs.GetMRows(); i++ {
			xi := Ts.Xs.GetRow(i)

			Thi := Matrix.Product(xi, this.ThetaParameters.Transpose())

			hx.SetValue(i, 1, Thi.GetValue(1, 1)-Ts.Y.GetValue(1, i))

		}
		return hx
	}
	return nil
}
Пример #10
0
func CreateANN(Inputs int, NeuronsByLayer []int, Act func(*Matrix.Matrix) *Matrix.Matrix, Derivate func(*Matrix.Matrix) *Matrix.Matrix, Cost func(*Matrix.Matrix, *Matrix.Matrix) *Matrix.Matrix, DCost func(*Matrix.Matrix, *Matrix.Matrix) *Matrix.Matrix, path string) ANN {

	var out ANN

	out.Weights = make([]*Matrix.Matrix, len(NeuronsByLayer), len(NeuronsByLayer))
	out.BestWeightsFound = make([]*Matrix.Matrix, len(NeuronsByLayer), len(NeuronsByLayer))
	out.LearningRates = make([]*Matrix.Matrix, len(NeuronsByLayer), len(NeuronsByLayer))

	out.Δ = make([]*Matrix.Matrix, len(NeuronsByLayer), len(NeuronsByLayer))
	out.Δ1 = make([]*Matrix.Matrix, len(NeuronsByLayer), len(NeuronsByLayer))

	out.ð = make([]*Matrix.Matrix, len(NeuronsByLayer)+1, len(NeuronsByLayer)+1)

	out.Inputs = Inputs
	out.Outputs = NeuronsByLayer[len(NeuronsByLayer)-1]

	out.ActivationLayer = Act
	out.DarivateActivationLayer = Derivate

	out.CostFunction = Cost
	out.DerviateCostFunction = DCost
	out.PathWeightsInCSV = path
	m := Inputs
	for i := 0; i < (len(NeuronsByLayer)); i++ {

		n := NeuronsByLayer[i]

		// one row extra for Bias weights, we need to change to random values for this matrixes
		//temp := Matrix.RandomRealMatrix(m+1, n)

		out.Weights[i] = Matrix.RandomRealMatrix(m+1, n, 1.2)
		out.BestWeightsFound[i] = Matrix.NullMatrixP(m+1, n)
		out.LearningRates[i] = Matrix.FixValueMatrix(m+1, n, 0.0001)

		//tempdelta := Matrix.NullMatrix(m+1, n)
		out.ð[i] = Matrix.NullMatrix(m+1, n)

		out.Δ[i] = Matrix.NullMatrixP(m+1, n)
		out.Δ1[i] = Matrix.NullMatrixP(m+1, n)
		m = n

	}

	out.AcumatedError = Matrix.NullMatrixP(m, 1)
	out.MinimumErrorFound = Matrix.NullMatrixP(m, 1)
	out.AcumatedError1 = Matrix.NullMatrixP(m, 1)
	return out
}
Пример #11
0
func GradientDescent(alpha complex128, Tolerance complex128, ts *TrainingSet, f func(x complex128) complex128) *Hypothesis {
	n := ts.Xs.GetNColumns()
	m := ts.Xs.GetMRows()

	//Xsc:=ts.Xs.Copy()

	ts.AddX0() // add  the parametrer x0, with value 1, to all elements of the training set

	t := Matrix.NullMatrixP(1, n+1) // put 0 to the parameters theta
	thetaP := t

	//thetaP:=Matrix.RandomMatrix(1,n+1)  // Generates a random values of parameters theta

	var h1 Hypothesis

	h1.H = f
	h1.ThetaParameters = thetaP

	var Error complex128

	Error = complex(1.0, 0)

	var it = 1

	diferencia, diferenciaT := h1.Parallel_DiffH1Ys(ts)
	jt := Matrix.Product(diferenciaT, diferencia).Scalar(1/complex(2.0*float64(m), 0.0)).GetValue(1, 1)

	alpha = 1 / jt

	for cmplx.Abs(Error) >= cmplx.Abs(Tolerance) { // Until converges

		ThetaPB := h1.ThetaParameters.Copy() //for Error Calc

		//diff:=h1.DiffH1Ys(ts)
		_, diffT := h1.Parallel_DiffH1Ys(ts) //h(x)-y

		product := Matrix.Product(diffT, ts.Xs) //Sum( (hi(xi)-yi)*xij)  in matrix form

		h1.Sum = product

		alpha_it := alpha / (cmplx.Sqrt(complex(float64(it), 0.0))) // re-calc alpha

		scalar := product.Scalar(-alpha_it / complex(float64(m), 0.0))

		//println("Delta", scalar.ToString())
		ThetaTemp, _ := Matrix.Sum(h1.ThetaParameters, scalar) //Theas=Theas-alfa/m*Sum( (hi(xi)-yi)*xij)  update the parameters

		h1.ThetaParameters = ThetaTemp

		diffError, _ := Matrix.Sustract(ThetaPB, h1.ThetaParameters) //diff between theta's Vector , calc the error

		Error = complex(diffError.FrobeniusNorm(), 0) //Frobenius Norm
		//Error=diffError.InfinityNorm()              //Infinty Norm

		//println("->", h1.ThetaParameters.ToString())
		//println("Error", Error)
		/*if it > 10 {
			break
		}*/
		it++
	}
	h1.M = m
	return &h1
}
Пример #12
0
func FFT_ct3(this *Matrix.Matrix, N, skip int, tf *[]complex128) *Matrix.Matrix {

	Xr := Matrix.NullMatrixP(N, this.GetNColumns())
	Scratch := Matrix.NullMatrixP(N, this.GetNColumns())

	var E, D, Xp, Xstart *Matrix.Matrix
	var evenIteration bool

	if N%2 == 0 {
		evenIteration = true
	} else {
		evenIteration = false
	}

	if N == 1 {
		Xr.SetRow(1, this.GetReferenceRow(1))
	}

	E = this

	for n := 1; n < N; n *= 2 {

		if evenIteration {
			Xstart = Scratch
		} else {
			Xstart = Xr
		}

		skip := N / (2 * n)
		Xp = Xstart

		for k := 0; k != n; k++ {

			var Aux = func(m0, m1 int, Xp, E, D *Matrix.Matrix) {

				println("-", m0)
				Xp = Xp.MatrixWithoutFirstRows(m0)
				E = E.MatrixWithoutFirstRows(m0)
				//D = E.MatrixWithoutFirstRows(skip)

				for m := m0; m < m1; m++ {
					D = E.MatrixWithoutFirstRows(skip)
					D.ScalarRow(1, (*tf)[skip*k])

					sr, rr, _ := Matrix.Sum_Sustract(E.GetReferenceRow(1), D.GetReferenceRow(1))

					Xp.SetRow(1, sr)
					Xp.SetRow(N/2+1, rr)

					Xp = Xp.MatrixWithoutFirstRows(1)

					println("E", E.ToString())
					E = E.MatrixWithoutFirstRows(1)

				}

			}

			mm := skip / 2
			m0 := 0
			//m1 := skip

			go Aux(m0, mm, Xp, E, D)
			//println("->E", E.ToString(), ">XP", Xp.ToString())
			//go Aux(mm, m1, Xp, E, D)

			//for m := 0; m != skip; m++ {
			//	D = E.MatrixWithoutFirstRows(skip)
			//	D.ScalarRow(1, (*tf)[skip*k])

			//	sr, rr, _ := Matrix.Sum_Sustract(E.GetReferenceRow(1), D.GetReferenceRow(1))

			//	Xp.SetRow(1, sr)
			//	Xp.SetRow(N/2+1, rr)

			//	Xp = Xp.MatrixWithoutFirstRows(1)
			//	E = E.MatrixWithoutFirstRows(1)
			//}
			E = E.MatrixWithoutFirstRows(skip)

		}
		E = Xstart
		evenIteration = !evenIteration
	}
	return Scratch
}