Example #1
0
func (regressor *linearRegressor) Train(trainingData dataset.Dataset) error {
	if !trainingData.AllFeaturesFloats() {
		return linearerrors.NewNonFloatFeaturesError()
	}

	if !trainingData.AllTargetsFloats() {
		return linearerrors.NewNonFloatTargetsError()
	}

	if trainingData.NumTargets() != 1 {
		return linearerrors.NewInvalidNumberOfTargetsError(trainingData.NumTargets())
	}

	if trainingData.NumFeatures() == 0 {
		return linearerrors.NewNoFeaturesError()
	}

	estimator, err := gradientdescentestimator.NewGradientDescentParameterEstimator(
		defaultLearningRate,
		defaultPrecision,
		defaultMaxIterations,
		gradientdescentestimator.LinearModelLeastSquaresLossGradient,
	)
	if err != nil {
		return linearerrors.NewEstimatorConstructionError(err)
	}

	err = estimator.Train(trainingData)
	if err != nil {
		return linearerrors.NewEstimatorTrainingError(err)
	}

	coefficients, err := estimator.Estimate(defaultInitialCoefficientEstimate(trainingData.NumFeatures()))
	if err != nil {
		return linearerrors.NewEstimatorEstimationError(err)
	}

	regressor.coefficients = coefficients
	return nil
}
	"math"
	"math/rand"

	. "github.com/onsi/ginkgo"
	. "github.com/onsi/gomega"
)

var _ = Describe("Gradient Descent Parameter Estimation", func() {
	var lossGradient gradientdescentestimator.ParameterizedLossGradient

	Describe("NewGradientDescentParameterEstimator", func() {
		Context("Given negative learning rate", func() {
			It("Returns an error", func() {
				_, err := gradientdescentestimator.NewGradientDescentParameterEstimator(
					-0.3,
					0.3,
					100,
					lossGradient,
				)

				Ω(err).Should(BeAssignableToTypeOf(gdeErrors.InvalidGDPEInitializationValuesError{}))
			})
		})

		Context("Given zero learning rate", func() {
			It("Returns an error", func() {
				_, err := gradientdescentestimator.NewGradientDescentParameterEstimator(
					0,
					0.3,
					100,
					lossGradient,
				)
	. "github.com/onsi/ginkgo"
	. "github.com/onsi/gomega"
)

var _ = Describe("Linear Model Least Squares Parameter Estimation", func() {
	var estimator parameterestimator.ParameterEstimator
	var trueParameters []float64

	Context("Given reasonable learning rate, precision, maximum number of iterations, and training set", func() {
		BeforeEach(func() {
			trueParameters = []float64{2, -3, 4}

			var err error
			estimator, err = gradientdescentestimator.NewGradientDescentParameterEstimator(
				0.001,
				0.000005,
				1000,
				gradientdescentestimator.LinearModelLeastSquaresLossGradient,
			)
			Ω(err).ShouldNot(HaveOccurred())

			columnTypes, err := columntype.StringsToColumnTypes([]string{"1.0", "1.0", "1.0"})
			Ω(err).ShouldNot(HaveOccurred())

			trainingSet := dataset.NewDataset([]int{0, 1}, []int{2}, columnTypes)
			for i := 0; i < 20; i++ {
				for j := 0; j < 20; j++ {
					x0 := -1.9 + 0.2*float64(i)
					x1 := -1.9 + 0.2*float64(j)
					y := trueParameters[0]*x0 + trueParameters[1]*x1 + trueParameters[2] + 0.1*rand.NormFloat64()

					err = trainingSet.AddRowFromStrings([]string{