Ejemplo n.º 1
0
// Rejection generates len(batch) samples using the rejection sampling algorithm
// and stores them in place into samples. Sampling continues until batch is
// filled. Rejection returns the total number of proposed locations and a boolean
// indicating if the rejection sampling assumption is violated (see details
// below). If the returned boolean is false, all elements of samples are set to
// NaN. If src is not nil, it will be used to generate random numbers, otherwise
// rand.Float64 will be used.
//
// Rejection sampling generates points from the target distribution by using
// the proposal distribution. At each step of the algorithm, the proposed point
// is accepted with probability
//  p = target(x) / (proposal(x) * c)
// where target(x) is the probability of the point according to the target distribution
// and proposal(x) is the probability according to the proposal distribution.
// The constant c must be chosen such that target(x) < proposal(x) * c for all x.
// The expected number of proposed samples is len(samples) * c.
//
// Target may return the true (log of) the probablity of the location, or it may return
// a value that is proportional to the probability (logprob + constant). This is
// useful for cases where the probability distribution is only known up to a normalization
// constant.
func Rejection(batch []float64, target distuv.LogProber, proposal distuv.RandLogProber, c float64, src *rand.Rand) (nProposed int, ok bool) {
	if c < 1 {
		panic("rejection: acceptance constant must be greater than 1")
	}
	f64 := rand.Float64
	if src != nil {
		f64 = src.Float64
	}
	var idx int
	for {
		nProposed++
		v := proposal.Rand()
		qx := proposal.LogProb(v)
		px := target.LogProb(v)
		accept := math.Exp(px-qx) / c
		if accept > 1 {
			// Invalidate the whole result and return a failure.
			for i := range batch {
				batch[i] = math.NaN()
			}
			return nProposed, false
		}
		if accept > f64() {
			batch[idx] = v
			idx++
			if idx == len(batch) {
				break
			}
		}
	}
	return nProposed, true
}
Ejemplo n.º 2
0
// Importance sampling generates len(batch) samples from the proposal distribution,
// and stores the locations and importance sampling weights in place.
//
// Importance sampling is a variance reduction technique where samples are
// generated from a proposal distribution, q(x), instead of the target distribution
// p(x). This allows relatively unlikely samples in p(x) to be generated more frequently.
//
// The importance sampling weight at x is given by p(x)/q(x). To reduce variance,
// a good proposal distribution will bound this sampling weight. This implies the
// support of q(x) should be at least as broad as p(x), and q(x) should be "fatter tailed"
// than p(x).
//
// If weights is nil, the weights are not stored. The length of weights must equal
// the length of batch, otherwise Importance will panic.
func Importance(batch, weights []float64, target distuv.LogProber, proposal distuv.RandLogProber) {
	if len(batch) != len(weights) {
		panic(badLengthMismatch)
	}
	for i := range batch {
		v := proposal.Rand()
		batch[i] = v
		weights[i] = math.Exp(target.LogProb(v) - proposal.LogProb(v))
	}
}