Пример #1
0
func DSoftmax(X *Matrix.Matrix) *Matrix.Matrix {
	Total := 1 / X.TaxicabNorm()
	Y := X.Scalar(complex(Total, 0))

	S, _ := Matrix.Sustract(Matrix.FixValueMatrix(X.GetNColumns(), X.GetNColumns(), 1.0), X)

	YD := Matrix.DotMultiplication(Y, S)
	return YD
}
Пример #2
0
func CreateANN(Inputs int, NeuronsByLayer []int, Act func(*Matrix.Matrix) *Matrix.Matrix, Derivate func(*Matrix.Matrix) *Matrix.Matrix, Cost func(*Matrix.Matrix, *Matrix.Matrix) *Matrix.Matrix, DCost func(*Matrix.Matrix, *Matrix.Matrix) *Matrix.Matrix, path string) ANN {

	var out ANN

	out.Weights = make([]*Matrix.Matrix, len(NeuronsByLayer), len(NeuronsByLayer))
	out.BestWeightsFound = make([]*Matrix.Matrix, len(NeuronsByLayer), len(NeuronsByLayer))
	out.LearningRates = make([]*Matrix.Matrix, len(NeuronsByLayer), len(NeuronsByLayer))

	out.Δ = make([]*Matrix.Matrix, len(NeuronsByLayer), len(NeuronsByLayer))
	out.Δ1 = make([]*Matrix.Matrix, len(NeuronsByLayer), len(NeuronsByLayer))

	out.ð = make([]*Matrix.Matrix, len(NeuronsByLayer)+1, len(NeuronsByLayer)+1)

	out.Inputs = Inputs
	out.Outputs = NeuronsByLayer[len(NeuronsByLayer)-1]

	out.ActivationLayer = Act
	out.DarivateActivationLayer = Derivate

	out.CostFunction = Cost
	out.DerviateCostFunction = DCost
	out.PathWeightsInCSV = path
	m := Inputs
	for i := 0; i < (len(NeuronsByLayer)); i++ {

		n := NeuronsByLayer[i]

		// one row extra for Bias weights, we need to change to random values for this matrixes
		//temp := Matrix.RandomRealMatrix(m+1, n)

		out.Weights[i] = Matrix.RandomRealMatrix(m+1, n, 1.2)
		out.BestWeightsFound[i] = Matrix.NullMatrixP(m+1, n)
		out.LearningRates[i] = Matrix.FixValueMatrix(m+1, n, 0.0001)

		//tempdelta := Matrix.NullMatrix(m+1, n)
		out.ð[i] = Matrix.NullMatrix(m+1, n)

		out.Δ[i] = Matrix.NullMatrixP(m+1, n)
		out.Δ1[i] = Matrix.NullMatrixP(m+1, n)
		m = n

	}

	out.AcumatedError = Matrix.NullMatrixP(m, 1)
	out.MinimumErrorFound = Matrix.NullMatrixP(m, 1)
	out.AcumatedError1 = Matrix.NullMatrixP(m, 1)
	return out
}