func TestTermVectorMisc(t *testing.T) { positions := []int32{0, 3} startOffsets := []int32{0, 20} endOffsets := []int32{2, 22} tv := NewTermVector("content", "red yellow green red blue", positions, startOffsets, endOffsets) if got := tv.GetPositions(); !reflect.DeepEqual(got, positions) { t.Errorf("GetPositions: %v", got) } if got := tv.GetStartOffsets(); !reflect.DeepEqual(got, startOffsets) { t.Errorf("GetStartOffsets: %v", got) } if got := tv.GetEndOffsets(); !reflect.DeepEqual(got, endOffsets) { t.Errorf("GetEndOffsets: %v", got) } folder := NewRAMFolder("") out, _ := folder.OpenOut("dump") tv.serialize(out) out.Close() in, _ := folder.OpenIn("dump") dupe := clownfish.GetClass(tv).MakeObj().(TermVector).deserialize(in) if !tv.Equals(dupe) { t.Errorf("Unsuccessful serialization round trip") } }
func TestDocVectorMisc(t *testing.T) { schema := NewSchema() spec := NewFullTextType(NewStandardTokenizer()) spec.SetHighlightable(true) schema.SpecField("content", spec) folder := NewRAMFolder("") indexer, _ := OpenIndexer(&OpenIndexerArgs{Index: folder, Schema: schema, Create: true}) indexer.AddDoc(&testDoc{Content: "foo bar baz"}) indexer.Commit() searcher, _ := OpenIndexSearcher(folder) dv := searcher.fetchDocVec(1) fieldBuf := dv.fieldBuf("content") if fieldBuf == nil { t.Errorf("fieldBuf returned nil") } dv.addFieldBuf("content", fieldBuf) if got := dv.termVector("content", "bar"); got == nil { t.Errorf("termVector returned nil") } out, _ := folder.OpenOut("dump") dv.serialize(out) out.Close() in, _ := folder.OpenIn("dump") dupe := clownfish.GetClass(dv).MakeObj().(DocVector).deserialize(in) in.Close() if _, ok := dupe.(DocVector); !ok { t.Errorf("serialize/deserialize") } }
func TestTopDocsBasics(t *testing.T) { matchDocs := []MatchDoc{ NewMatchDoc(42, 2.0, nil), NewMatchDoc(100, 3.0, nil), } td := NewTopDocs(matchDocs, 50) td.setTotalHits(20) if totalHits := td.getTotalHits(); totalHits != 20 { t.Errorf("Expected 20 total hits, got %d", totalHits) } td.SetMatchDocs(matchDocs) fetched := td.GetMatchDocs() if docID := fetched[0].getDocID(); docID != 42 { t.Errorf("Set/Get MatchDocs expected 42, got %d", docID) } folder := NewRAMFolder("") outstream, _ := folder.OpenOut("foo") td.serialize(outstream) outstream.Close() inStream, _ := folder.OpenIn("foo") dupe := clownfish.GetClass(td).MakeObj().(TopDocs).deserialize(inStream) if dupe.getTotalHits() != td.getTotalHits() { t.Errorf("Failed round-trip serializetion of TopDocs") } }
func checkQueryDumpLoad(t *testing.T, query Query) { dupe := clownfish.GetClass(query).MakeObj().(Query) dupe = dupe.Load(query.Dump()).(Query) if !query.Equals(dupe) { t.Errorf("Unsuccessful Dump/Load round trip -- expected '%v', got '%v'", query.ToString(), dupe.ToString()) } }
func checkdocDumpLoad(t *testing.T, doc Doc) { t.Skip("Dump/Load are TODO") return dupe := clownfish.GetClass(doc).MakeObj().(Doc) dupe = dupe.load(doc.dump()).(Doc) if !doc.Equals(dupe) { t.Errorf("Unsuccessful dump/load round trip -- expected '%v', got '%v'", doc.ToString(), dupe.ToString()) } }
func (d *DocReaderIMP) ReadDoc(docID int32, doc interface{}) error { self := (*C.lucy_DocReader)(clownfish.Unwrap(d, "d")) class := clownfish.GetClass(d) classC := ((*C.cfish_Class)(clownfish.Unwrap(class, "class"))) if classC == C.LUCY_DEFAULTDOCREADER { return doReadDocData((*C.lucy_DefaultDocReader)(self), docID, doc) } else if classC == C.LUCY_POLYDOCREADER { return readDocPolyDR((*C.lucy_PolyDocReader)(self), docID, doc) } else { panic(clownfish.NewErr(fmt.Sprintf("Unexpected type: %s", class.GetName))) } }
func checkDocSerialize(t *testing.T, doc Doc) { folder := NewRAMFolder("") outStream, _ := folder.OpenOut("foo") doc.serialize(outStream) outStream.Close() inStream, _ := folder.OpenIn("foo") dupe := clownfish.GetClass(doc).MakeObj().(Doc).deserialize(inStream) if !doc.Equals(dupe) { t.Errorf("Unsuccessful serialization round trip -- expected '%v', got '%v'", doc.ToString(), dupe.ToString()) } }
func TestMatchDocSerialization(t *testing.T) { values := []interface{}{"foo", int64(42)} matchDoc := NewMatchDoc(100, 1.5, values) folder := NewRAMFolder("") outstream, _ := folder.OpenOut("foo") matchDoc.serialize(outstream) outstream.Close() inStream, _ := folder.OpenIn("foo") dupe := clownfish.GetClass(matchDoc).MakeObj().(MatchDoc).deserialize(inStream) if got := dupe.getValues(); !reflect.DeepEqual(got, values) { t.Errorf("Failed round-trip serializetion of MatchDoc") } }
func TestSimilarityRoundTrip(t *testing.T) { sim := NewSimilarity() dupe := sim.load(sim.dump()) if !sim.Equals(dupe) { t.Errorf("Dump/Load round-trip") } folder := NewRAMFolder("") out, _ := folder.OpenOut("dump") sim.serialize(out) out.Close() in, _ := folder.OpenIn("dump") dupe = clownfish.GetClass(sim).MakeObj().(Similarity).deserialize(in) if !sim.Equals(dupe) { t.Errorf("serialize/deserialize round-trip") } }
func TestSortSpecBasics(t *testing.T) { folder := NewRAMFolder("") schema := NewSchema() fieldType := NewFullTextType(NewStandardTokenizer()) fieldType.SetSortable(true) schema.SpecField("content", fieldType) args := &OpenIndexerArgs{Index: folder, Schema: schema, Create: true} indexer, err := OpenIndexer(args) if err != nil { panic(err) } for _, fieldVal := range []string{"a b", "a a"} { indexer.AddDoc(&simpleTestDoc{fieldVal}) } indexer.Commit() rules := []SortRule{ NewFieldSortRule("content", false), } sortSpec := NewSortSpec(rules) searcher, _ := OpenIndexSearcher(folder) hits, _ := searcher.Hits("a", 0, 1, sortSpec) var doc simpleTestDoc hits.Next(&doc) if doc.Content != "a a" { t.Error("Sort by field value") } outstream, _ := folder.OpenOut("foo") sortSpec.serialize(outstream) outstream.Close() inStream, _ := folder.OpenIn("foo") dupe := clownfish.GetClass(sortSpec).MakeObj().(SortSpec).deserialize(inStream) if len(dupe.GetRules()) != len(rules) { t.Errorf("Failed round-trip serializetion of SortSpec") } }