func (b *SqlBuilder) BuildFrom() string { sql := " FROM " idx := 0 for table, _ := range b.from { log.Println(table) if idx > 0 { sql += ", " } sql += table idx++ } log.Println(sql) return sql }
func (s *EventService) FindNetflow(options core.EventQueryOptions, sortBy string, order string) (interface{}, error) { size := int64(10) if options.Size > 0 { size = options.Size } if order == "" { order = "desc" } query := NewEventQuery() query.AddFilter(TermQuery("event_type", "netflow")) if options.TimeRange != "" { query.AddTimeRangeFilter(options.TimeRange) } if options.QueryString != "" { query.AddFilter(QueryString(options.QueryString)) } if sortBy != "" { query.Aggs["agg"] = TopHitsAgg(sortBy, order, size) } else { query.Size = size } log.Println(util.ToJsonPretty(query)) response, err := s.es.Search(query) if err != nil { return nil, err } // Unwrap response. hits := response.Aggregations.GetMap("agg").GetMap("hits").Get("hits") return map[string]interface{}{ "data": hits, }, nil }
func (es *ElasticSearch) Search(query interface{}) (*SearchResponse, error) { if es.keyword == "" { log.Warning("Search keyword not known, trying again.") es.InitKeyword() } path := fmt.Sprintf("%s/_search", es.EventSearchIndex) response, err := es.HttpClient.PostJson(path, query) if err != nil { return nil, errors.WithStack(&DatastoreError{ Message: "Failed to connect to Elastic Search", Cause: err, }) } result := SearchResponse{} if err := es.Decode(response, &result); err != nil { log.Println("Failed to decode response...") return nil, err } return &result, nil }
func SubmitHandler(appContext AppContext, r *http.Request) interface{} { count := uint64(0) decoder := json.NewDecoder(r.Body) decoder.UseNumber() for { var event map[string]interface{} err := decoder.Decode(&event) if err != nil { if err == io.EOF { break } log.Println(err) return err } count++ } return count }
// RemoveTagsFromAlertGroup removes the given tags from all alerts matching // the provided parameters. func (s *EventService) RemoveTagsFromAlertGroup(p core.AlertGroupQueryParams, tags []string) error { filter := []interface{}{ ExistsQuery("event_type"), KeywordTermQuery("event_type", "alert", s.es.keyword), RangeQuery{ Field: "timestamp", Gte: p.MinTimestamp, Lte: p.MaxTimestamp, }, KeywordTermQuery("src_ip", p.SrcIP, s.es.keyword), KeywordTermQuery("dest_ip", p.DstIP, s.es.keyword), TermQuery("alert.signature_id", p.SignatureID), } for _, tag := range tags { filter = append(filter, TermQuery("tags", tag)) } query := m{ "query": m{ "bool": m{ "filter": filter, }, }, "_source": "tags", "sort": l{ "_doc", }, "size": 10000, } log.Println(util.ToJson(query)) searchResponse, err := s.es.SearchScroll(query, "1m") if err != nil { log.Error("Failed to initialize scroll: %v", err) return err } scrollID := searchResponse.ScrollId for { log.Debug("Search response total: %d; hits: %d", searchResponse.Hits.Total, len(searchResponse.Hits.Hits)) if len(searchResponse.Hits.Hits) == 0 { break } // We do this in a retry loop as some documents may fail to be // updated. Most likely rejected due to max thread count or // something. maxRetries := 5 retries := 0 for { retry, err := bulkUpdateTags(s.es, searchResponse.Hits.Hits, nil, tags) if err != nil { log.Error("BulkAddTags failed: %v", err) return err } if !retry { break } retries++ if retries > maxRetries { log.Warning("Errors occurred archive events, not all events may have been archived.") break } } // Get next set of events to archive. searchResponse, err = s.es.Scroll(scrollID, "1m") if err != nil { log.Error("Failed to fetch from scroll: %v", err) return err } } response, err := s.es.DeleteScroll(scrollID) if err != nil { log.Error("Failed to delete scroll id: %v", err) } io.Copy(ioutil.Discard, response.Body) s.es.Refresh() return nil }
func (s *DataStore) EventQuery(options core.EventQueryOptions) (interface{}, error) { size := int64(500) if options.Size > 0 { size = options.Size } sql := `select events.id, events.timestamp, events.source` sqlBuilder := SqlBuilder{} sqlBuilder.From("events") if options.EventType != "" { sqlBuilder.WhereEquals("json_extract(events.source, '$.event_type')", options.EventType) } fts := []string{} if options.QueryString != "" { words, _ := shellwords.Parse(options.QueryString) for _, word := range words { log.Debug("Word: %s", word) parts := strings.SplitN(word, "=", 2) if len(parts) == 2 { field := parts[0] valuestr := parts[1] var arg interface{} valueint, err := strconv.ParseInt(valuestr, 0, 64) if err == nil { arg = valueint } else { arg = valuestr } sqlBuilder.WhereEquals( fmt.Sprintf(" json_extract(events.source, '$.%s')", field), arg) } else { fts = append(fts, fmt.Sprintf("\"%s\"", parts[0])) } } } if options.MaxTs != "" { maxTs, err := time.Parse("2006-01-02T15:04:05.999999", options.MaxTs) if err != nil { return nil, fmt.Errorf("Bad timestamp: %s", options.MaxTs) } sqlBuilder.WhereLte("datetime(events.timestamp)", maxTs) } if options.MinTs != "" { minTs, err := time.Parse("2006-01-02T15:04:05.999999", options.MinTs) if err != nil { return nil, fmt.Errorf("Bad timestamp: %s", options.MinTs) } sqlBuilder.WhereGte("datetime(events.timestamp)", minTs) } if len(fts) > 0 { sqlBuilder.From("events_fts") sqlBuilder.Where("events.id == events_fts.id") sqlBuilder.Where(fmt.Sprintf("events_fts MATCH '%s'", strings.Join(fts, " AND "))) } sql += sqlBuilder.BuildFrom() if sqlBuilder.HasWhere() { sql += sqlBuilder.BuildWhere() } sql += fmt.Sprintf(" ORDER BY timestamp DESC") sql += fmt.Sprintf(" LIMIT %d", size) log.Println(sql) rows, err := s.db.Query(sql, sqlBuilder.args...) if err != nil { return nil, err } events := []interface{}{} for rows.Next() { var rawSource []byte var id uuid.UUID var timestamp string err = rows.Scan(&id, ×tamp, &rawSource) if err != nil { return nil, err } source := map[string]interface{}{} decoder := json.NewDecoder(bytes.NewReader(rawSource)) decoder.UseNumber() err = decoder.Decode(&source) if err != nil { return nil, err } source["@timestamp"] = timestamp events = append(events, map[string]interface{}{ "_id": id.String(), "_source": source, }) } return map[string]interface{}{ "data": events, }, nil }