func HypothesisHistory(thetas Parameters, trainingEx TrainingExample) []*matrix.Matrix { // Describes the current working values (a_1, a_2, ...) curValues := trainingEx.Input // Is simply a 1 in a 1x1 matrix to b // inserted into a vector as the bias unit biasValueMatrix := matrix.Ones(1, 1) history := make([]*matrix.Matrix, 0, len(thetas)+1) history = append(history, curValues.InsertRows(biasValueMatrix, 0)) for i, theta := range thetas { // Insert the bias unit, multiply with theta and apply the sigmoid function curValues = theta.Mul(history[len(history)-1]).Apply(sigmoidMatrix) if i != len(thetas)-1 { history = append(history, curValues.InsertRows(biasValueMatrix, 0)) } else { history = append(history, curValues) } } return history }
func DeltaTerms(thetas Parameters, trainingEx TrainingExample) Deltas { deltas := make(Deltas, len(thetas)) biasValueMatrix := matrix.Ones(1, 1) deltas[len(deltas)-1], _ = Hypothesis(thetas, trainingEx).Sub(trainingEx.ExpectedOutput) for i := len(deltas) - 2; i >= 0; i-- { workingTheta := thetas[i+1] levelPrediction := Hypothesis(thetas[:i+1], trainingEx).InsertRows(biasValueMatrix, 0) tmp, _ := matrix.Ones(levelPrediction.R(), 1).Sub(levelPrediction) levelGradient := levelPrediction.EWProd(tmp) deltas[i] = workingTheta.Transpose().Mul(deltas[i+1]).EWProd(levelGradient).RemoveRow(1) } return deltas }
func Hypothesis(thetas Parameters, trainingEx TrainingExample) *matrix.Matrix { // Describes the current working values (a_1, a_2, ...) curValues := trainingEx.Input // Is simply a 1 in a 1x1 matrix to b // inserted into a vector as the bias unit biasValueMatrix := matrix.Ones(1, 1) for _, theta := range thetas { // Insert the bias unit, multiply with theta and apply the sigmoid function curValues = theta.Mul(curValues.InsertRows(biasValueMatrix, 0)).Apply(sigmoidMatrix) } return curValues }