Esempio n. 1
0
func TestTrainBasic(t *testing.T) {

	data := [][]float64{{0.1}, {0.3}, {1.1}, {5.5}, {7.8}, {10.0}, {5.2}, {4.1}, {3.3}, {6.2}, {8.3}}

	m := makeHMM(t)
	h := m.Set.Nets[0]
	tp0 := narray.Exp(nil, h.A.Copy())
	obs := model.NewFloatObsSequence(data, model.SimpleLabel(""), "")

	m.Clear()
	m.UpdateOne(obs, 1.0)
	m.Estimate()

	m.Clear()
	m.UpdateOne(obs, 1.0)
	m.Estimate()

	tp := narray.Exp(nil, h.A.Copy())
	ns := tp.Shape[0]
	for i := 0; i < ns; i++ {
		for j := 0; j < ns; j++ {
			p0 := tp0.At(i, j)
			p := tp.At(i, j)
			if p > smallNumber || p0 > smallNumber {
				t.Logf("TP: %d=>%d, p0:%5.2f, p:%5.2f", i, j, p0, p)
			}
		}
	}

	t.Log("")
	t.Logf("hmm  g1: %+v, g2:%+v", h.B[1], h.B[2])
}
Esempio n. 2
0
// Score implements the graph.Viterbier interface.
// The argument x must be of type []float64.
func (nv nodeValue) Score(x interface{}) float64 {
	if nv.scorer == nil {
		return 0 // non-emitting node.
	}
	o := model.NewFloatObs(x.([]float64), model.SimpleLabel(""))
	return nv.scorer.LogProb(o)
}
Esempio n. 3
0
func TestLRAssign(t *testing.T) {

	initChainFB(t)
	ms2, e := NewSet(hmm0, hmm1)
	fatalIf(t, e)
	testScorer := func() scorer {
		return scorer{op: []float64{math.Log(0.4), math.Log(0.2), math.Log(0.4)}}
	}

	_, err := ms2.makeLeftToRight("model 2", 4, 0.4, 0.1,
		[]model.Modeler{nil, testScorer(), testScorer(), nil})
	fatalIf(t, err)
	_, errr := ms2.makeLeftToRight("model 3", 6, 0.3, 0.2,
		[]model.Modeler{nil, testScorer(), testScorer(), testScorer(), testScorer(), nil})
	fatalIf(t, errr)

	// we need to put a label to use assigner - use same sequence as in TestLR() using the model names
	simplelab := model.SimpleLabel("model 0,model 2,model 3,model 0,model 0,model 0,model 0,model 2")
	oo := model.NewFloatObsSequence(xobs.Value().([][]float64), simplelab, "")

	// read it back
	sl := oo.Label().(model.SimpleLabel)
	glog.V(5).Infoln("read label: ", sl)
	var assigner DirectAssigner
	hmms2, err := ms2.chainFromAssigner(oo, assigner)
	if err != nil {
		t.Fatal(err)
	}

	hmms2.update()
	nq := hmms2.nq
	alpha2 := hmms2.alpha.At(nq-1, hmms2.ns[nq-1]-1, nobs-1)
	beta2 := hmms2.beta.At(0, 0, 0)

	t.Logf("alpha2:%f", alpha2)
	t.Logf("beta2:%f", beta2)

	// check log prob per obs calculated with alpha and beta
	delta := math.Abs(alpha2-beta2) / float64(nobs)
	if delta > smallNumber {
		t.Fatalf("alphaLogProb:%f does not match betaLogProb:%f", alpha2, beta2)
	}
	if math.Abs(alpha2-expectedProb) > smallNumber {
		t.Fatalf("alphaLogProb:%f, expected:%f", alpha2, expectedProb)
	}
}
Esempio n. 4
0
func TestHMMModel(t *testing.T) {

	initChainFB(t)
	modelSet, e := NewSet(hmm0, hmm1)
	fatalIf(t, e)
	testScorer := func() scorer {
		return scorer{op: []float64{math.Log(0.4), math.Log(0.2), math.Log(0.4)}}
	}

	_, err := modelSet.makeLeftToRight("model 2", 4, 0.4, 0.1,
		[]model.Modeler{nil, testScorer(), testScorer(), nil})
	fatalIf(t, err)
	_, errr := modelSet.makeLeftToRight("model 3", 6, 0.3, 0.2,
		[]model.Modeler{nil, testScorer(), testScorer(), testScorer(), testScorer(), nil})
	fatalIf(t, errr)

	// we need to put a label to use assigner - use same sequence as in TestLR() using the model names
	simplelab := model.SimpleLabel("model 0,model 2,model 3,model 0,model 0,model 0,model 0,model 2")
	oo := model.NewFloatObsSequence(xobs.Value().([][]float64), simplelab, "")

	// read it back
	var assigner DirectAssigner
	hmm := NewModel(OSet(modelSet), OAssign(assigner))
	hmm.UpdateOne(oo, model.NoWeight(oo))
	hmm.Estimate()

	hmm.Clear()
	hmm.UpdateOne(oo, model.NoWeight(oo))
	hmm.Estimate()

	hmm.Clear()
	hmm.UpdateOne(oo, model.NoWeight(oo))
	hmm.Estimate()

	hmm.Clear()
	hmm.UpdateOne(oo, model.NoWeight(oo))
	hmm.Estimate()
}
Esempio n. 5
0
// Next returns the next observation sequence.
func (gen *generator) next(id string) (*model.FloatObsSequence, []string) {

	var data [][]float64
	name := gen.hmm.Name
	states := []string{name + "-0"}
	r := gen.r
	seq := model.NewFloatObsSequence(data, model.SimpleLabel(name), id).(model.FloatObsSequence)
	s := gen.hmm.nextState(0, r)
	states = append(states, name+"-"+strconv.FormatInt(int64(s), 10))
	for {
		if s == gen.hmm.ns-1 {
			// Reached exit state.
			break
		}
		glog.V(8).Infof("start loop for hmm: %s, state: %d, num states: %d", name, s, gen.hmm.ns)
		g := gen.hmm.B[s]
		if g == nil {
			glog.Infof("hmm name: %s, state: %d, num states: %d", name, s, gen.hmm.ns)
			panic("output PDF is nil - can't generate data")
		}
		gs, ok := g.(model.Sampler)
		if !ok {
			glog.Info(gen.hmm.A)
			glog.Infof("hmm name: %s, state: %d, num states: %d", name, s, gen.hmm.ns)
			panic("output PDF does not implement the sampler interface")
		}
		x := gs.Sample(r).(model.FloatObs)
		seq.Add(x, "")
		s = gen.hmm.nextState(s, r)
		states = append(states, name+"-"+strconv.FormatInt(int64(s), 10))
	}
	if gen.noNull {
		states = states[1 : len(states)-1]
	}
	return &seq, states
}
Esempio n. 6
0
func (m *Net) logProb(s int, x []float64) float64 {
	o := model.NewFloatObs(x, model.SimpleLabel(""))
	return m.B[s].LogProb(o)
}
Esempio n. 7
0
// should be equivalent to training a single gaussian, great for debugging.
func TestSingleState(t *testing.T) {

	// HMM to generate data.
	g01 := gm.NewModel(1, gm.Name("g01"), gm.Mean([]float64{0}), gm.StdDev([]float64{1}))

	h0 := narray.New(3, 3)
	h0.Set(1, 0, 1)
	h0.Set(.8, 1, 1)
	h0.Set(.2, 1, 2)
	h0 = narray.Log(nil, h0.Copy())

	ms0, _ := NewSet()
	net0, e0 := ms0.NewNet("hmm", h0,
		[]model.Modeler{nil, g01, nil})
	fatalIf(t, e0)
	hmm0 := NewModel(OSet(ms0))
	_ = hmm0

	// Create gaussian to estimate without using the HMM code.
	g := gm.NewModel(1, gm.Name("g1"), gm.Mean([]float64{-1}), gm.StdDev([]float64{2}))

	// Create initial HMM and estimate params from generated data.
	g1 := gm.NewModel(1, gm.Name("g1"), gm.Mean([]float64{-1}), gm.StdDev([]float64{2}))

	h := narray.New(3, 3)
	h.Set(1, 0, 1)
	h.Set(.5, 1, 1)
	h.Set(.5, 1, 2)
	h = narray.Log(nil, h.Copy())

	ms, _ = NewSet()
	net, e := ms.NewNet("hmm", h,
		[]model.Modeler{nil, g1, nil})
	fatalIf(t, e)
	hmm := NewModel(OSet(ms), UpdateTP(true), UpdateOP(true))

	iter := 5
	// number of sequences
	m := 1000
	numFrames := 0
	t0 := time.Now() // Start timer.
	for i := 0; i < iter; i++ {
		t.Logf("iter [%d]", i)

		// Make sure we generate the same data in each iteration.
		r := rand.New(rand.NewSource(33))
		gen := newGenerator(r, false, net0)

		// Reset all counters.
		hmm.Clear()
		g.Clear()

		// fix the seed to get the same sequence
		for j := 0; j < m; j++ {
			obs, states := gen.next("oid-" + fi(j))
			numFrames += len(states) - 2
			hmm.UpdateOne(obs, 1.0)

			// Update Gaussian
			for _, o := range obs.ValueAsSlice() {
				vec := o.([]float64)
				gobs := model.NewFloatObs(vec, model.SimpleLabel(""))
				g.UpdateOne(gobs, 1.0)
			}
		}
		hmm.Estimate()
		g.Estimate()
		t.Logf("iter:%d, hmm g1:   %+v", i, net.B[1])
		t.Logf("iter:%d, direct g1:%+v", i, g)
	}
	dur := time.Now().Sub(t0)
	tp0 := narray.Exp(nil, h0.Copy())
	tp := narray.Exp(nil, net.A.Copy())
	ns := tp.Shape[0]
	for i := 0; i < ns; i++ {
		for j := 0; j < ns; j++ {
			p0 := tp0.At(i, j)
			logp0 := h0.At(i, j)
			p := tp.At(i, j)
			logp := h.At(i, j)
			if p > smallNumber || p0 > smallNumber {
				t.Logf("TP: %d=>%d, p0:%5.2f, p:%5.2f, logp0:%8.5f, logp:%8.5f", i, j, p0, p, logp0, logp)
			}
		}
	}

	t.Log("")
	t.Logf("hmm0 g1:%+v", net0.B[1])
	t.Logf("hmm  g1: %+v", net.B[1])

	t.Log("")
	t.Logf("direct g1:%+v", g)

	// Print time stats.
	t.Log("")
	t.Logf("Total time: %v", dur)
	t.Logf("Time per iteration: %v", dur/time.Duration(iter))
	t.Logf("Time per frame: %v", dur/time.Duration(iter*numFrames*m))

	gjoa.CompareSliceFloat(t, tp0.Data, tp.Data,
		"error in Trans Probs [0]", .03)

	CompareGaussians(t, net0.B[1].(*gm.Model), net.B[1].(*gm.Model), 0.03)

	if t.Failed() {
		t.FailNow()
	}

	// Recognize.
	sg := ms.SearchGraph()

	dec, e := graph.NewDecoder(sg)
	if e != nil {
		t.Fatal(e)
	}

	r := rand.New(rand.NewSource(5151))
	gen := newGenerator(r, true, net0)
	//	testDecoder(t, gen, dec, 1000)
	testDecoder(t, gen, dec, 10)
}
Esempio n. 8
0
func TestMain(m *testing.M) {

	// Configure glog. Example to set debug level 6 for file viterbi.go and 3 for everythign else:
	// export GLOG_LEVEL=3
	// go test -v  -run TestTrainHmmGau -vmodule=viterbi=6 > /tmp/zzz
	flag.Set("alsologtostderr", "true")
	flag.Set("log_dir", "/tmp/log")
	level := os.Getenv("GLOG_LEVEL")
	if len(level) == 0 {
		level = "0"
	}
	flag.Set("v", level)
	glog.Info("glog debug level is: ", level)

	ns = 5 // max num states in a model
	nstates[0] = 5
	nstates[1] = 4
	nobs = len(obs)
	a = narray.New(nq, ns, ns)
	b = narray.New(nq, ns, nobs)
	alpha = narray.New(nq, ns, nobs)
	beta = narray.New(nq, ns, nobs)

	a.Set(1, 0, 0, 1)
	a.Set(.5, 0, 1, 1)
	a.Set(.5, 0, 1, 2)
	a.Set(.3, 0, 2, 2)
	a.Set(.6, 0, 2, 3)
	a.Set(.1, 0, 2, 4)
	a.Set(.7, 0, 3, 3)
	a.Set(.3, 0, 3, 4)

	a.Set(1, 1, 0, 1)
	a.Set(.3, 1, 1, 1)
	a.Set(.2, 1, 1, 2)
	a.Set(.5, 1, 1, 3)
	a.Set(.6, 1, 2, 2)
	a.Set(.4, 1, 2, 3)

	dist := [][]float64{{.4, .5, .1}, {.3, .5, .2}} // prob dist for states in model 0 and 1

	// output probs as a function of model,state,time
	for q := 0; q < nq; q++ {
		for i := 1; i < nstates[q]-1; i++ {
			for t := 0; t < nobs; t++ {
				p := dist[q][obs[t]]
				//				k := r.Intn(len(someProbs))
				//				b.Set(someProbs[k], q, i, t)
				b.Set(p, q, i, t)
			}
		}
	}

	// same output probs but as a function of model,state,symbol
	// we need this to test the network implementation.
	outputProbs = narray.New(nq, ns, nsymb)
	for q := 0; q < nq; q++ {
		for i := 1; i < nstates[q]-1; i++ {
			for k := 0; k < nsymb; k++ {
				p := math.Log(dist[q][k])
				outputProbs.Set(p, q, i, k)
			}
		}
	}

	loga = narray.Log(loga, a.Copy())
	logb = narray.Log(logb, b.Copy())

	data := make([][]float64, len(obs), len(obs))
	for k, v := range obs {
		data[k] = []float64{float64(v)}
	}
	xobs = model.NewFloatObsSequence(data, model.SimpleLabel(""), "")

	os.Exit(m.Run())
}
Esempio n. 9
0
func (m *Net) testLogProb(s, o int) float64 {
	return m.B[s].LogProb(model.NewIntObs(o, model.SimpleLabel(""), ""))
}
Esempio n. 10
0
// Sample returns a Gaussian sample.
func (g *Model) Sample(r *rand.Rand) model.Obs {
	obs := model.RandNormalVector(r, g.Mean, g.StdDev)
	return model.NewFloatObs(obs, model.SimpleLabel(""))
}