Exemplo n.º 1
0
// Returns data model for index
func getIndexMapping() *bv.IndexMapping {
	indexMapping := bv.NewIndexMapping()
	messageMapping := bv.NewDocumentStaticMapping()

	// Will search exact string, e.g.: «hostname.example.org» will search for «hostname.example.org»
	mappingKeyword := getTextFieldMapping()
	mappingKeyword.Analyzer = bvKeywordAnalyzer.Name

	// Tokenized query, e.g.: «hostname example org» will search for «hostname», «example» or «org»
	mappingText := getTextFieldMapping()
	mappingText.Analyzer = bvStandardAnalyzer.Name

	messageMapping.AddFieldMappingsAt("version", mappingKeyword)
	messageMapping.AddFieldMappingsAt("host", mappingKeyword)
	messageMapping.AddFieldMappingsAt("short_message", mappingText)
	messageMapping.AddFieldMappingsAt("full_message", mappingText)
	messageMapping.AddFieldMappingsAt("timestamp", bv.NewDateTimeFieldMapping())
	messageMapping.AddFieldMappingsAt("level", bv.NewNumericFieldMapping())
	messageMapping.AddFieldMappingsAt("facility", mappingKeyword)
	messageMapping.AddSubDocumentMapping("extra", bv.NewDocumentMapping())

	indexMapping.AddDocumentMapping(DOC_TYPE, messageMapping)

	return indexMapping
}
Exemplo n.º 2
0
func NewOfferIndex(dir string) (bleve.Index, error) {
	err := os.RemoveAll(dir)
	if err != nil && !os.IsNotExist(err) {
		return nil, err
	}

	parts := []string{}
	for _, exc := range indexExceptions {
		parts = append(parts, regexp.QuoteMeta(exc))
	}
	pattern := strings.Join(parts, "|")
	pattern = "(?i)(?:" + pattern + ")"

	m := bleve.NewIndexMapping()
	apecTokenizer := "apec"
	err = m.AddCustomTokenizer(apecTokenizer, map[string]interface{}{
		"type":       exception.Name,
		"exceptions": []string{pattern},
		"tokenizer":  bleveuni.Name,
	})
	if err != nil {
		return nil, err
	}

	apecTokens := "apec_tokens"
	err = m.AddCustomTokenMap(apecTokens, map[string]interface{}{
		"type":   tokenmap.Name,
		"tokens": stopWords,
	})
	if err != nil {
		return nil, err
	}

	apecStop := "apec_stop"
	err = m.AddCustomTokenFilter(apecStop, map[string]interface{}{
		"type":           stop.Name,
		"stop_token_map": apecTokens,
	})
	if err != nil {
		return nil, err
	}

	frTokens := []string{
		lowercase.Name,
		fr.ElisionName,
		fr.StopName,
		fr.LightStemmerName,
		apecStop,
	}
	fr := map[string]interface{}{
		"type":          custom.Name,
		"tokenizer":     apecTokenizer,
		"token_filters": frTokens,
	}
	frHtml := map[string]interface{}{
		"type": custom.Name,
		"char_filters": []string{
			html.Name,
		},
		"tokenizer":     apecTokenizer,
		"token_filters": frTokens,
	}
	err = m.AddCustomAnalyzer("fr", fr)
	if err != nil {
		return nil, fmt.Errorf("failed to register analyzer fr: %s", err)
	}
	err = m.AddCustomAnalyzer("fr_html", frHtml)
	if err != nil {
		return nil, fmt.Errorf("failed to register analyzer fr_html: %s", err)
	}

	htmlFr := bleve.NewTextFieldMapping()
	htmlFr.Store = false
	htmlFr.IncludeInAll = false
	htmlFr.IncludeTermVectors = false
	htmlFr.Analyzer = "fr_html"

	textFr := bleve.NewTextFieldMapping()
	textFr.Store = false
	textFr.IncludeInAll = false
	textFr.IncludeTermVectors = false
	textFr.Analyzer = "fr"

	textAll := bleve.NewTextFieldMapping()
	textAll.Store = false
	textAll.IncludeInAll = true
	textAll.IncludeTermVectors = false

	date := bleve.NewDateTimeFieldMapping()
	date.Index = false
	date.Store = true
	date.IncludeInAll = false
	date.IncludeTermVectors = false

	offer := bleve.NewDocumentStaticMapping()
	offer.Dynamic = false
	offer.AddFieldMappingsAt("html", htmlFr)
	offer.AddFieldMappingsAt("title", textFr)
	offer.AddFieldMappingsAt("date", date)

	m.AddDocumentMapping("offer", offer)
	m.DefaultMapping = offer

	index, err := bleve.NewUsing(dir, m, upsidedown.Name, boltdb.Name,
		map[string]interface{}{
			"nosync": true,
		})
	if err != nil {
		return nil, err
	}
	return index, nil
}