func getTokenFilters(tokenFilterNames []string, cache *registry.Cache) ([]analysis.TokenFilter, error) { tokenFilters := make([]analysis.TokenFilter, len(tokenFilterNames)) for i, tokenFilterName := range tokenFilterNames { tokenFilter, err := cache.TokenFilterNamed(tokenFilterName) if err != nil { return nil, err } tokenFilters[i] = tokenFilter } return tokenFilters, nil }
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) { tokenizer, err := cache.TokenizerNamed(unicode.Name) if err != nil { return nil, err } possEnFilter, err := cache.TokenFilterNamed(PossessiveName) if err != nil { return nil, err } toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name) if err != nil { return nil, err } stopEnFilter, err := cache.TokenFilterNamed(StopName) if err != nil { return nil, err } stemmerEnFilter, err := cache.TokenFilterNamed(porter.Name) if err != nil { return nil, err } rv := analysis.Analyzer{ Tokenizer: tokenizer, TokenFilters: []analysis.TokenFilter{ possEnFilter, toLowerFilter, stopEnFilter, stemmerEnFilter, }, } return &rv, nil }
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) { tokenizer, err := cache.TokenizerNamed(unicode.Name) if err != nil { return nil, err } elisionFilter, err := cache.TokenFilterNamed(ElisionName) if err != nil { return nil, err } toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name) if err != nil { return nil, err } stopItFilter, err := cache.TokenFilterNamed(StopName) if err != nil { return nil, err } stemmerItFilter, err := cache.TokenFilterNamed(LightStemmerName) if err != nil { return nil, err } rv := analysis.Analyzer{ Tokenizer: tokenizer, TokenFilters: []analysis.TokenFilter{ toLowerFilter, elisionFilter, stopItFilter, stemmerItFilter, }, } return &rv, nil }
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) { unicodeTokenizer, err := cache.TokenizerNamed(unicode.Name) if err != nil { return nil, err } normCkbFilter, err := cache.TokenFilterNamed(NormalizeName) if err != nil { return nil, err } toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name) if err != nil { return nil, err } stopCkbFilter, err := cache.TokenFilterNamed(StopName) if err != nil { return nil, err } stemmerCkbFilter, err := cache.TokenFilterNamed(StemmerName) if err != nil { return nil, err } rv := analysis.Analyzer{ Tokenizer: unicodeTokenizer, TokenFilters: []analysis.TokenFilter{ normCkbFilter, toLowerFilter, stopCkbFilter, stemmerCkbFilter, }, } return &rv, nil }
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) { tokenizer, err := cache.TokenizerNamed(unicode.Name) if err != nil { return nil, err } toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name) if err != nil { return nil, err } normalizeFilter := unicode_normalize.MustNewUnicodeNormalizeFilter(unicode_normalize.NFKC) stopArFilter, err := cache.TokenFilterNamed(StopName) if err != nil { return nil, err } normalizeArFilter, err := cache.TokenFilterNamed(NormalizeName) if err != nil { return nil, err } stemmerArFilter, err := cache.TokenFilterNamed(StemmerName) if err != nil { return nil, err } rv := analysis.Analyzer{ Tokenizer: tokenizer, TokenFilters: []analysis.TokenFilter{ toLowerFilter, normalizeFilter, stopArFilter, normalizeArFilter, stemmerArFilter, }, } return &rv, nil }
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) { tokenizer, err := cache.TokenizerNamed(webt.Name) if err != nil { return nil, err } toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name) if err != nil { return nil, err } stopEnFilter, err := cache.TokenFilterNamed(en.StopName) if err != nil { return nil, err } rv := analysis.Analyzer{ Tokenizer: tokenizer, TokenFilters: []analysis.TokenFilter{ toLowerFilter, stopEnFilter, }, } return &rv, nil }
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) { whitespaceTokenizer, err := cache.TokenizerNamed(whitespace_tokenizer.Name) if err != nil { return nil, err } normalizeFilter := unicode_normalize.MustNewUnicodeNormalizeFilter(unicode_normalize.NFKD) toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name) if err != nil { return nil, err } bigramFilter, err := cache.TokenFilterNamed(BigramName) if err != nil { return nil, err } rv := analysis.Analyzer{ Tokenizer: whitespaceTokenizer, TokenFilters: []analysis.TokenFilter{ normalizeFilter, toLowerFilter, bigramFilter, }, } return &rv, nil }
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) { zFilter, err := cache.CharFilterNamed(zero_width_non_joiner.Name) if err != nil { return nil, err } unicodeTokenizer, err := cache.TokenizerNamed(unicode.Name) if err != nil { return nil, err } normArFilter, err := cache.TokenFilterNamed(ar.NormalizeName) if err != nil { return nil, err } normFaFilter, err := cache.TokenFilterNamed(NormalizeName) if err != nil { return nil, err } toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name) if err != nil { return nil, err } stopFaFilter, err := cache.TokenFilterNamed(StopName) if err != nil { return nil, err } rv := analysis.Analyzer{ CharFilters: []analysis.CharFilter{ zFilter, }, Tokenizer: unicodeTokenizer, TokenFilters: []analysis.TokenFilter{ toLowerFilter, normArFilter, normFaFilter, stopFaFilter, }, } return &rv, nil }