Пример #1
0
func (_ SigmoidCECost) Cost(x linalg.Vector, a autofunc.Result) autofunc.Result {
	logsig := autofunc.LogSigmoid{}
	log := logsig.Apply(a)
	invLog := logsig.Apply(autofunc.Scale(a, -1))

	xVar := &autofunc.Variable{x}
	oneMinusX := autofunc.AddScaler(autofunc.Scale(xVar, -1), 1)

	sums := autofunc.Add(autofunc.Mul(xVar, log), autofunc.Mul(oneMinusX, invLog))
	return autofunc.Scale(autofunc.SumAll(sums), -1)
}
Пример #2
0
func (_ CrossEntropyCost) Cost(x linalg.Vector, a autofunc.Result) autofunc.Result {
	return autofunc.Pool(a, func(a autofunc.Result) autofunc.Result {
		xVar := &autofunc.Variable{x}
		logA := autofunc.Log{}.Apply(a)
		oneMinusA := autofunc.AddScaler(autofunc.Scale(a, -1), 1)
		oneMinusX := autofunc.AddScaler(autofunc.Scale(xVar, -1), 1)
		log1A := autofunc.Log{}.Apply(oneMinusA)

		errorVec := autofunc.Add(autofunc.Mul(xVar, logA),
			autofunc.Mul(oneMinusX, log1A))
		return autofunc.Scale(autofunc.SumAll(errorVec), -1)
	})
}
Пример #3
0
func (s *LogSoftmaxLayer) Apply(in autofunc.Result) autofunc.Result {
	return autofunc.Pool(in, func(in autofunc.Result) autofunc.Result {
		// Compute the log of the sum of the exponents by
		// factoring out the largest exponent so that all
		// the exponentials fit nicely inside floats.
		maxIdx := maxVecIdx(in.Output())
		maxValue := autofunc.Slice(in, maxIdx, maxIdx+1)
		exponents := autofunc.AddFirst(in, autofunc.Scale(maxValue, -1))
		expSum := autofunc.SumAll(autofunc.Exp{}.Apply(exponents))
		expLog := autofunc.Log{}.Apply(expSum)
		denomLog := autofunc.Add(expLog, maxValue)
		return autofunc.AddFirst(in, autofunc.Scale(denomLog, -1))
	})
}
Пример #4
0
// Loss returns the weighted exponential loss.
// It determines which samples are positive vs. negative
// by checking the sign of the element in the expected
// vector.
func (w *WeightedExpLoss) Loss(actual autofunc.Result, expected linalg.Vector) autofunc.Result {
	expVar := &autofunc.Variable{Vector: expected.Copy().Scale(-1)}
	dots := autofunc.Mul(actual, expVar)
	exps := autofunc.Exp{}.Apply(dots)

	weightVec := &autofunc.Variable{Vector: make(linalg.Vector, len(expected))}
	for i, x := range expected {
		if x > 0 {
			weightVec.Vector[i] = w.PosWeight
		} else {
			weightVec.Vector[i] = 1
		}
	}

	return autofunc.SumAll(autofunc.Mul(exps, weightVec))
}
Пример #5
0
// Loss returns the exponential loss, as given by
// exp(-actual*expected).
func (_ ExpLoss) Loss(actual autofunc.Result, expected linalg.Vector) autofunc.Result {
	expVar := &autofunc.Variable{Vector: expected.Copy().Scale(-1)}
	dots := autofunc.Mul(actual, expVar)
	exps := autofunc.Exp{}.Apply(dots)
	return autofunc.SumAll(exps)
}
Пример #6
0
// Loss returns the squared magnitude of the difference
// between actual and expected.
func (_ SquareLoss) Loss(actual autofunc.Result, expected linalg.Vector) autofunc.Result {
	expVar := &autofunc.Variable{Vector: expected.Copy().Scale(-1)}
	return autofunc.SumAll(autofunc.Square(autofunc.Add(actual, expVar)))
}
Пример #7
0
func (_ DotCost) Cost(x linalg.Vector, a autofunc.Result) autofunc.Result {
	xVar := &autofunc.Variable{x}
	return autofunc.Scale(autofunc.SumAll(autofunc.Mul(xVar, a)), -1)
}