func BenchmarkTokenizeEnglishText(b *testing.B) { tokenizer := regexp_tokenizer.NewRegexpTokenizer(whitespaceTokenizerRegexp) b.ResetTimer() for i := 0; i < b.N; i++ { tokenizer.Tokenize(sampleLargeInput) } }
func TestBoundary(t *testing.T) { tests := []struct { input []byte output analysis.TokenStream }{ { []byte("Hello World."), analysis.TokenStream{ { Start: 0, End: 5, Term: []byte("Hello"), Position: 1, Type: analysis.AlphaNumeric, }, { Start: 6, End: 11, Term: []byte("World"), Position: 2, Type: analysis.AlphaNumeric, }, }, }, { []byte("こんにちは世界"), analysis.TokenStream{ { Start: 0, End: 3, Term: []byte("こ"), Position: 1, Type: analysis.Ideographic, }, { Start: 3, End: 6, Term: []byte("ん"), Position: 2, Type: analysis.Ideographic, }, { Start: 6, End: 9, Term: []byte("に"), Position: 3, Type: analysis.Ideographic, }, { Start: 9, End: 12, Term: []byte("ち"), Position: 4, Type: analysis.Ideographic, }, { Start: 12, End: 15, Term: []byte("は"), Position: 5, Type: analysis.Ideographic, }, { Start: 15, End: 18, Term: []byte("世"), Position: 6, Type: analysis.Ideographic, }, { Start: 18, End: 21, Term: []byte("界"), Position: 7, Type: analysis.Ideographic, }, }, }, { []byte(""), analysis.TokenStream{}, }, { []byte("abc界"), analysis.TokenStream{ { Start: 0, End: 3, Term: []byte("abc"), Position: 1, Type: analysis.AlphaNumeric, }, { Start: 3, End: 6, Term: []byte("界"), Position: 2, Type: analysis.Ideographic, }, }, }, } for _, test := range tests { tokenizer := regexp_tokenizer.NewRegexpTokenizer(whitespaceTokenizerRegexp) actual := tokenizer.Tokenize(test.input) if !reflect.DeepEqual(actual, test.output) { t.Errorf("Expected %v, got %v for %s", test.output, actual, string(test.input)) } } }
"sync" "testing" "time" "github.com/blevesearch/bleve/analysis" "github.com/blevesearch/bleve/analysis/analyzers/standard_analyzer" "github.com/blevesearch/bleve/analysis/tokenizers/regexp_tokenizer" "github.com/blevesearch/bleve/document" "github.com/blevesearch/bleve/index" "github.com/blevesearch/bleve/index/store/boltdb" "github.com/blevesearch/bleve/index/store/null" "github.com/blevesearch/bleve/registry" ) var testAnalyzer = &analysis.Analyzer{ Tokenizer: regexp_tokenizer.NewRegexpTokenizer(regexp.MustCompile(`\w+`)), } func TestIndexOpenReopen(t *testing.T) { defer func() { err := DestroyTest() if err != nil { t.Fatal(err) } }() analysisQueue := index.NewAnalysisQueue(1) idx, err := NewUpsideDownCouch(boltdb.Name, boltTestConfig, analysisQueue) if err != nil { t.Fatal(err) }
func TokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) { return regexp_tokenizer.NewRegexpTokenizer(whitespaceTokenizerRegexp), nil }