Пример #1
0
//TODO the activation function and his Derviate has to be more general.. to implemente soft-max for example
func (this *ANN) ForwardPropagation(In *Matrix.Matrix) (As, AsDerviate *([]*Matrix.Matrix), Output *Matrix.Matrix) {
	if In.GetMRows() == this.Inputs && In.GetNColumns() == 1 {
		As1 := make([]*Matrix.Matrix, len(this.Weights)+1, len(this.Weights)+1)
		AsDerviate1 := make([]*Matrix.Matrix, len(this.Weights)+1, len(this.Weights)+1)

		As := &As1
		AsDerviate = &AsDerviate1

		sTemp := In.Transpose()

		//Add  a new column for a Bias Weight
		sTemp = sTemp.AddColumn(Matrix.I(1))

		holeInput := sTemp.Copy()
		As1[0] = sTemp.Transpose()

		//Derivate
		//sutract, _ := Matrix.Sustract(Matrix.OnesMatrix(As1[0].GetMRows(), 1), As1[0])
		//derivate := Matrix.DotMultiplication(As1[0], sutract)

		//derivate := holeInput.Apply(this.Derivate)
		derivate := this.DarivateActivationLayer(holeInput)

		AsDerviate1[0] = derivate.Transpose()

		for i := 0; i < len(this.Weights); i++ {
			sTemp = Matrix.Product(sTemp, (this.Weights[i]))

			//apply the activation functions
			holeInput := sTemp.Copy()
			sTemp = this.ActivationLayer(sTemp)

			//sTemp = sTemp.Apply(this.Activation)

			//Add  a new column for a Bias Weight
			sTemp = sTemp.AddColumn(Matrix.I(1))
			(*As)[i+1] = sTemp.Transpose()

			//Derivate
			//sutract, _ := Matrix.Sustract(Matrix.OnesMatrix((*As)[i+1].GetMRows(), 1), (*As)[i+1])
			//derivate := Matrix.DotMultiplication((*As)[i+1], sutract)

			derivate := this.DarivateActivationLayer(holeInput)
			//derivate := holeInput.Apply(this.Derivate)

			(*AsDerviate)[i+1] = derivate.Transpose()

		}
		Asf := sTemp.Copy()

		//Asf = Asf.AddColumn(Matrix.I(1))
		(*As)[len(As1)-1] = Asf.Transpose()
		Output = sTemp.Transpose().MatrixWithoutLastRow()
		return As, AsDerviate, Output
	}
	return nil, nil, nil
}