func TestChebyshev(t *testing.T) { var vectorX, vectorY *mat64.Dense chebyshev := NewChebyshev() Convey("Given two vectors", t, func() { vectorX = mat64.NewDense(4, 1, []float64{1, 2, 3, 4}) vectorY = mat64.NewDense(4, 1, []float64{-5, -6, 7, 8}) Convey("When calculating distance with two vectors", func() { result := chebyshev.Distance(vectorX, vectorY) Convey("The result should be 8", func() { So(result, ShouldEqual, 8) }) }) Convey("When calculating distance with row vectors", func() { vectorX.Copy(vectorX.T()) vectorY.Copy(vectorY.T()) result := chebyshev.Distance(vectorX, vectorY) Convey("The result should be 8", func() { So(result, ShouldEqual, 8) }) }) Convey("When calculating distance with different dimension matrices", func() { vectorX.Clone(vectorX.T()) So(func() { chebyshev.Distance(vectorX, vectorY) }, ShouldPanic) }) }) }
func TestCranberrra(t *testing.T) { var vectorX, vectorY *mat64.Dense cranberra := NewCranberra() Convey("Given two vectors that are same", t, func() { vec := mat64.NewDense(7, 1, []float64{0, 1, -2, 3.4, 5, -6.7, 89}) distance := cranberra.Distance(vec, vec) Convey("The result should be 0", func() { So(distance, ShouldEqual, 0) }) }) Convey("Given two vectors", t, func() { vectorX = mat64.NewDense(5, 1, []float64{1, 2, 3, 4, 9}) vectorY = mat64.NewDense(5, 1, []float64{-5, -6, 7, 4, 3}) Convey("When calculating distance with two vectors", func() { result := cranberra.Distance(vectorX, vectorY) Convey("The result should be 2.9", func() { So(result, ShouldEqual, 2.9) }) }) Convey("When calculating distance with row vectors", func() { vectorX.Copy(vectorX.T()) vectorY.Copy(vectorY.T()) result := cranberra.Distance(vectorX, vectorY) Convey("The result should be 2.9", func() { So(result, ShouldEqual, 2.9) }) }) Convey("When calculating distance with different dimension matrices", func() { vectorX.Clone(vectorX.T()) So(func() { cranberra.Distance(vectorX, vectorY) }, ShouldPanic) }) }) }
func TestManhattan(t *testing.T) { var vectorX, vectorY *mat64.Dense manhattan := NewManhattan() Convey("Given two vectors that are same", t, func() { vec := mat64.NewDense(7, 1, []float64{0, 1, -2, 3.4, 5, -6.7, 89}) distance := manhattan.Distance(vec, vec) Convey("The result should be 0", func() { So(distance, ShouldEqual, 0) }) }) Convey("Given two vectors", t, func() { vectorX = mat64.NewDense(3, 1, []float64{2, 2, 3}) vectorY = mat64.NewDense(3, 1, []float64{1, 4, 5}) Convey("When calculating distance with column vectors", func() { result := manhattan.Distance(vectorX, vectorY) Convey("The result should be 5", func() { So(result, ShouldEqual, 5) }) }) Convey("When calculating distance with row vectors", func() { vectorX.Copy(vectorX.T()) vectorY.Copy(vectorY.T()) result := manhattan.Distance(vectorX, vectorY) Convey("The result should be 5", func() { So(result, ShouldEqual, 5) }) }) Convey("When calculating distance with different dimension matrices", func() { vectorX.Clone(vectorX.T()) So(func() { manhattan.Distance(vectorX, vectorY) }, ShouldPanic) }) }) }
// LinearSolve trains a Linear algorithm. // Assumes inputs and outputs are already scaled // If features is nil will call featurize // Will return nil if regularizer is not a linear regularizer // Is destructive if any of the weights are zero // Losser is always the two-norm // Does not set the value of the parameters (in case this is called in parallel with a different routine) func LinearSolve(linearTrainable LinearTrainable, features *mat64.Dense, inputs, trueOutputs common.RowMatrix, weights []float64, regularizer regularize.Regularizer) (parameters []float64) { // TODO: Allow tikhonov regularization // TODO: Add test for weights // TODO: Need to do something about returning a []float64 if !IsLinearSolveRegularizer(regularizer) { return nil } if features == nil { features = FeaturizeTrainable(linearTrainable, inputs, features) } _, nFeatures := features.Dims() var weightedFeatures, weightedOutput *mat64.Dense fmt.Println("In linear solve") if weights != nil { panic("Need functionality to be better. Either banded special case in matrix or do the mulitplication by hand") scaledWeight := make([]float64, len(weights)) for i, weight := range weights { scaledWeight[i] = math.Sqrt(weight) } diagWeight := diagonal.NewDiagonal(len(scaledWeight), scaledWeight) nSamples, outputDim := trueOutputs.Dims() weightedOutput = mat64.NewDense(nSamples, outputDim, nil) weightedFeatures = mat64.NewDense(nSamples, nFeatures, nil) weightedOutput.Copy(trueOutputs) weightedFeatures.Copy(features) // TODO: Replace this with better than mat multiply weightedOutput.Mul(diagWeight, weightedOutput) weightedFeatures.Mul(diagWeight, weightedFeatures) } switch regularizer.(type) { case nil: case regularize.None: default: panic("Shouldn't be here. Must be error in IsLinearRegularizer") } if weights == nil { parameterMat, err := mat64.Solve(features, trueOutputs) if err != nil { panic(err) } return parameterMat.RawMatrix().Data } parameterMat, err := mat64.Solve(weightedFeatures, weightedOutput) if err != nil { panic(err) } return parameterMat.RawMatrix().Data }