Esempio n. 1
0
// BatchR is like Batch, but for RResults.
func (c *ConvLayer) BatchR(rv autofunc.RVector, in autofunc.RResult,
	n int) autofunc.RResult {
	if c.Filters == nil || c.Biases == nil || c.FilterVar == nil {
		panic(uninitPanicMessage)
	}
	outSize := c.OutputWidth() * c.OutputHeight() * c.OutputDepth()
	inSize := c.InputWidth * c.InputHeight * c.InputDepth
	if len(in.Output()) != n*inSize {
		panic("invalid input size")
	}
	res := &convLayerRResult{
		OutputVec:  make(linalg.Vector, outSize*n),
		ROutputVec: make(linalg.Vector, outSize*n),
		Input:      in,
		FiltersR:   rv[c.FilterVar],
		N:          n,
		Layer:      c,
	}
	for i := 0; i < n; i++ {
		subIn := in.Output()[i*inSize : (i+1)*inSize]
		subOut := res.OutputVec[i*outSize : (i+1)*outSize]
		c.convolve(subIn, c.outputToTensor(subOut))

		subInR := in.ROutput()[i*inSize : (i+1)*inSize]
		subOutR := res.ROutputVec[i*outSize : (i+1)*outSize]
		c.convolveR(rv, subIn, subInR, c.outputToTensor(subOutR))
	}
	return res
}
Esempio n. 2
0
func manualNetworkSeq(rv autofunc.RVector, f autofunc.RFunc, start *autofunc.Variable,
	ins [][]*autofunc.Variable, stateSize int) (out, outR [][]linalg.Vector) {
	out = make([][]linalg.Vector, len(ins))
	outR = make([][]linalg.Vector, len(ins))
	for seqIdx, inSeq := range ins {
		var state autofunc.RResult = autofunc.NewRVariable(start, rv)
		for _, in := range inSeq {
			inR := rv[in]

			packedIn := append(linalg.Vector{}, in.Output()...)
			packedIn = append(packedIn, state.Output()...)
			packedInR := append(linalg.Vector{}, inR...)
			packedInR = append(packedInR, state.ROutput()...)

			stepOut := f.ApplyR(rv, &autofunc.RVariable{
				Variable:   &autofunc.Variable{Vector: packedIn},
				ROutputVec: packedInR,
			})
			outSize := len(stepOut.Output()) - stateSize
			out[seqIdx] = append(out[seqIdx], stepOut.Output()[:outSize])
			outR[seqIdx] = append(outR[seqIdx], stepOut.ROutput()[:outSize])
			state = &autofunc.RVariable{
				Variable:   &autofunc.Variable{Vector: stepOut.Output()[outSize:]},
				ROutputVec: stepOut.ROutput()[outSize:],
			}
		}
	}
	return
}
Esempio n. 3
0
// BatchR is like Batch, but for RResults.
func (m *MaxPoolingLayer) BatchR(rv autofunc.RVector, in autofunc.RResult,
	n int) autofunc.RResult {
	outSize := m.OutputWidth() * m.OutputHeight() * m.InputDepth
	inSize := m.InputWidth * m.InputHeight * m.InputDepth
	if len(in.Output()) != n*inSize {
		panic("invalid input size")
	}
	res := &maxPoolingRResult{
		OutputVec:  make(linalg.Vector, outSize*n),
		ROutputVec: make(linalg.Vector, outSize*n),
		Input:      in,
		Layer:      m,
	}
	for i := 0; i < n; i++ {
		outTensor := m.outputTensor(res.OutputVec[i*outSize : (i+1)*outSize])
		inTensor := m.inputTensor(in.Output()[i*inSize : (i+1)*inSize])
		choices := m.evaluate(inTensor, outTensor)
		res.Choices = append(res.Choices, choices)

		outTensorR := m.outputTensor(res.ROutputVec[i*outSize : (i+1)*outSize])
		inTensorR := m.inputTensor(in.ROutput()[i*inSize : (i+1)*inSize])
		choices.ForwardPropagate(inTensorR, outTensorR)
	}
	return res
}
Esempio n. 4
0
func (u *UnstackLayer) ApplyR(v autofunc.RVector, in autofunc.RResult) autofunc.RResult {
	return &unstackLayerRResult{
		OutputVector:  u.unstack(in.Output()),
		ROutputVector: u.unstack(in.ROutput()),
		Input:         in,
		Layer:         u,
	}
}
Esempio n. 5
0
func (b *BorderLayer) ApplyR(rv autofunc.RVector, in autofunc.RResult) autofunc.RResult {
	return &borderRResult{
		OutputVec:  b.addBorder(in.Output()),
		ROutputVec: b.addBorder(in.ROutput()),
		Input:      in,
		Info:       b,
	}
}
Esempio n. 6
0
func (_ ReLU) ApplyR(v autofunc.RVector, r autofunc.RResult) autofunc.RResult {
	outVec := r.Output()
	outVecR := r.ROutput()
	vec := make(linalg.Vector, len(outVec))
	vecR := make(linalg.Vector, len(outVec))
	for i, x := range outVec {
		if x > 0 {
			vec[i] = x
			vecR[i] = outVecR[i]
		}
	}
	return &reLURResult{
		OutputVec:  vec,
		ROutputVec: vecR,
		Input:      r,
	}
}