Esempio n. 1
0
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
	tokenizer, err := cache.TokenizerNamed(unicode.Name)
	if err != nil {
		return nil, err
	}
	toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name)
	if err != nil {
		return nil, err
	}
	normalizeFilter := unicodenorm.MustNewUnicodeNormalizeFilter(unicodenorm.NFKC)
	stopArFilter, err := cache.TokenFilterNamed(StopName)
	if err != nil {
		return nil, err
	}
	normalizeArFilter, err := cache.TokenFilterNamed(NormalizeName)
	if err != nil {
		return nil, err
	}
	stemmerArFilter, err := cache.TokenFilterNamed(StemmerName)
	if err != nil {
		return nil, err
	}
	rv := analysis.Analyzer{
		Tokenizer: tokenizer,
		TokenFilters: []analysis.TokenFilter{
			toLowerFilter,
			normalizeFilter,
			stopArFilter,
			normalizeArFilter,
			stemmerArFilter,
		},
	}
	return &rv, nil
}
Esempio n. 2
0
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
	kagomeTokenizer, err := cache.TokenizerNamed(TokenizerName)
	if err != nil {
		return nil, err
	}
	normalizeFilter := unicodenorm.MustNewUnicodeNormalizeFilter(unicodenorm.NFKD)
	rv := analysis.Analyzer{
		Tokenizer: kagomeTokenizer,
		TokenFilters: []analysis.TokenFilter{
			normalizeFilter,
		},
	}
	return &rv, nil
}