Beispiel #1
0
func (g *GaussNoiseLayer) Apply(in autofunc.Result) autofunc.Result {
	if g.Training {
		return autofunc.Add(in, g.noise(len(in.Output())))
	} else {
		return in
	}
}
Beispiel #2
0
func (r *RegularizingCost) Cost(a linalg.Vector, x autofunc.Result) autofunc.Result {
	regFunc := autofunc.SquaredNorm{}
	cost := r.CostFunc.Cost(a, x)
	for _, variable := range r.Variables {
		norm := regFunc.Apply(variable)
		cost = autofunc.Add(cost, autofunc.Scale(norm, r.Penalty))
	}
	return cost
}
Beispiel #3
0
func (_ SigmoidCECost) Cost(x linalg.Vector, a autofunc.Result) autofunc.Result {
	logsig := autofunc.LogSigmoid{}
	log := logsig.Apply(a)
	invLog := logsig.Apply(autofunc.Scale(a, -1))

	xVar := &autofunc.Variable{x}
	oneMinusX := autofunc.AddScaler(autofunc.Scale(xVar, -1), 1)

	sums := autofunc.Add(autofunc.Mul(xVar, log), autofunc.Mul(oneMinusX, invLog))
	return autofunc.Scale(autofunc.SumAll(sums), -1)
}
Beispiel #4
0
func (_ CrossEntropyCost) Cost(x linalg.Vector, a autofunc.Result) autofunc.Result {
	return autofunc.Pool(a, func(a autofunc.Result) autofunc.Result {
		xVar := &autofunc.Variable{x}
		logA := autofunc.Log{}.Apply(a)
		oneMinusA := autofunc.AddScaler(autofunc.Scale(a, -1), 1)
		oneMinusX := autofunc.AddScaler(autofunc.Scale(xVar, -1), 1)
		log1A := autofunc.Log{}.Apply(oneMinusA)

		errorVec := autofunc.Add(autofunc.Mul(xVar, logA),
			autofunc.Mul(oneMinusX, log1A))
		return autofunc.Scale(autofunc.SumAll(errorVec), -1)
	})
}
Beispiel #5
0
func (s *LogSoftmaxLayer) Apply(in autofunc.Result) autofunc.Result {
	return autofunc.Pool(in, func(in autofunc.Result) autofunc.Result {
		// Compute the log of the sum of the exponents by
		// factoring out the largest exponent so that all
		// the exponentials fit nicely inside floats.
		maxIdx := maxVecIdx(in.Output())
		maxValue := autofunc.Slice(in, maxIdx, maxIdx+1)
		exponents := autofunc.AddFirst(in, autofunc.Scale(maxValue, -1))
		expSum := autofunc.SumAll(autofunc.Exp{}.Apply(exponents))
		expLog := autofunc.Log{}.Apply(expSum)
		denomLog := autofunc.Add(expLog, maxValue)
		return autofunc.AddFirst(in, autofunc.Scale(denomLog, -1))
	})
}
Beispiel #6
0
// ApplyBlock applies the LSTM to a batch of inputs.
func (l *LSTM) ApplyBlock(s []State, in []autofunc.Result) BlockResult {
	var internalPool, lastOutPool []*autofunc.Variable
	res := autofunc.PoolAll(in, func(in []autofunc.Result) autofunc.Result {
		var weavedInputs []autofunc.Result
		var internalResults []autofunc.Result
		for i, sObj := range s {
			state := sObj.(lstmState)
			internalVar := &autofunc.Variable{Vector: state.Internal}
			lastOutVar := &autofunc.Variable{Vector: state.Output}

			internalPool = append(internalPool, internalVar)
			lastOutPool = append(lastOutPool, lastOutVar)

			weavedInputs = append(weavedInputs, in[i], lastOutVar, internalVar)
			internalResults = append(internalResults, internalVar)
		}

		gateIn := autofunc.Concat(weavedInputs...)
		inValue := l.inputValue.Batch(gateIn, len(in))
		inGate := l.inputGate.Batch(gateIn, len(in))
		rememberGate := l.rememberGate.Batch(gateIn, len(in))

		lastState := autofunc.Concat(internalResults...)
		newState := autofunc.Add(autofunc.Mul(rememberGate, lastState),
			autofunc.Mul(inValue, inGate))

		return autofunc.Pool(newState, func(newState autofunc.Result) autofunc.Result {
			var newWeaved []autofunc.Result
			for i, state := range autofunc.Split(len(in), newState) {
				newWeaved = append(newWeaved, in[i], lastOutPool[i], state)
			}
			newGateIn := autofunc.Concat(newWeaved...)
			outGate := l.outputGate.Batch(newGateIn, len(in))
			outValues := neuralnet.HyperbolicTangent{}.Apply(newState)
			return autofunc.Concat(newState, autofunc.Mul(outGate, outValues))
		})
	})

	states, outs := splitLSTMOutput(len(in), res.Output())
	return &lstmResult{
		CellStates:   states,
		OutputVecs:   outs,
		InternalPool: internalPool,
		LastOutPool:  lastOutPool,
		JoinedOut:    res,
	}
}
Beispiel #7
0
func (l *lstmGate) Batch(in autofunc.Result, n int) autofunc.Result {
	if l.Peephole == nil {
		return l.Activation.Apply(l.Dense.Batch(in, n))
	}
	return autofunc.Pool(in, func(in autofunc.Result) autofunc.Result {
		vecSize := len(in.Output()) / n
		var weightedInputs []autofunc.Result
		var peepholed []autofunc.Result
		for i := 0; i < n; i++ {
			start := vecSize * i
			weightedEnd := start + vecSize - len(l.Peephole.Vector)
			weightedInputs = append(weightedInputs, autofunc.Slice(in, start, weightedEnd))
			peepholeMe := autofunc.Slice(in, weightedEnd, (i+1)*vecSize)
			peepholed = append(peepholed, autofunc.Mul(l.Peephole, peepholeMe))
		}
		weighted := l.Dense.Batch(autofunc.Concat(weightedInputs...), n)
		return l.Activation.Apply(autofunc.Add(autofunc.Concat(peepholed...), weighted))
	})
}
Beispiel #8
0
// ApplyBlock applies the block to an input.
func (g *GRU) ApplyBlock(s []State, in []autofunc.Result) BlockResult {
	stateVars, stateRes := PoolVecStates(s)
	var gateInputs []autofunc.Result
	for i, x := range stateRes {
		gateInputs = append(gateInputs, in[i], x)
	}
	n := len(in)

	gateInput := autofunc.Concat(gateInputs...)
	stateIn := autofunc.Concat(stateRes...)

	resetMask := g.resetGate.Batch(gateInput, n)
	updateMask := g.updateGate.Batch(gateInput, n)

	maskedByReset := autofunc.Mul(resetMask, stateIn)
	inputValue := autofunc.PoolSplit(n, maskedByReset,
		func(newStates []autofunc.Result) autofunc.Result {
			var newGateInputs []autofunc.Result
			for i, input := range in {
				newGateInputs = append(newGateInputs, input, newStates[i])
			}
			newIn := autofunc.Concat(newGateInputs...)
			return g.inputValue.Batch(newIn, n)
		})

	newState := autofunc.Pool(updateMask, func(umask autofunc.Result) autofunc.Result {
		updateComplement := autofunc.AddScaler(autofunc.Scale(umask, -1), 1)
		return autofunc.Add(autofunc.Mul(umask, stateIn),
			autofunc.Mul(updateComplement, inputValue))
	})

	return &gruResult{
		InStates: stateVars,
		Output:   newState,
	}
}
Beispiel #9
0
func (v *VecRescaleLayer) Apply(in autofunc.Result) autofunc.Result {
	return autofunc.Mul(autofunc.Add(in, &autofunc.Variable{Vector: v.Biases}),
		&autofunc.Variable{Vector: v.Scales})
}
Beispiel #10
0
// Loss returns the squared magnitude of the difference
// between actual and expected.
func (_ SquareLoss) Loss(actual autofunc.Result, expected linalg.Vector) autofunc.Result {
	expVar := &autofunc.Variable{Vector: expected.Copy().Scale(-1)}
	return autofunc.SumAll(autofunc.Square(autofunc.Add(actual, expVar)))
}