func BenchmarkParsePointsTagsSorted2(b *testing.B) { line := `cpu,host=serverA,region=us-west value=1i 1000000000` for i := 0; i < b.N; i++ { models.ParsePoints([]byte(line)) b.SetBytes(int64(len(line))) } }
func BenchmarkParsePointsTagsSorted10(b *testing.B) { line := `cpu,env=prod,host=serverA,region=us-west,tag1=value1,tag2=value2,tag3=value3,tag4=value4,tag5=value5,target=servers,zone=1c value=1i 1000000000` for i := 0; i < b.N; i++ { models.ParsePoints([]byte(line)) b.SetBytes(int64(len(line))) } }
func TestParsePointToString(t *testing.T) { line := `cpu,host=serverA,region=us-east bool=false,float=11,float2=12.123,int=10i,str="string val" 1000000000` pts, err := models.ParsePoints([]byte(line)) if err != nil { t.Fatalf(`ParsePoints() failed. got %s`, err) } if exp := 1; len(pts) != exp { t.Errorf("ParsePoint() len mismatch: got %v, exp %v", len(pts), exp) } pt := pts[0] got := pt.String() if line != got { t.Errorf("ParsePoint() to string mismatch:\n got %v\n exp %v", got, line) } pt = models.MustNewPoint("cpu", models.Tags{"host": "serverA", "region": "us-east"}, models.Fields{"int": 10, "float": float64(11.0), "float2": float64(12.123), "bool": false, "str": "string val"}, time.Unix(1, 0)) got = pt.String() if line != got { t.Errorf("NewPoint() to string mismatch:\n got %v\n exp %v", got, line) } }
func BenchmarkParsePointNoTags(b *testing.B) { line := `cpu value=1i 1000000000` for i := 0; i < b.N; i++ { models.ParsePoints([]byte(line)) b.SetBytes(int64(len(line))) } }
// parser() reads all incoming messages from the consumer, and parses them into // influxdb metric points. func (k *Kafka) parser() { for { select { case <-k.done: return case err := <-k.errs: log.Printf("Kafka Consumer Error: %s\n", err.Error()) case msg := <-k.in: points, err := models.ParsePoints(msg.Value) if err != nil { log.Printf("Could not parse kafka message: %s, error: %s", string(msg.Value), err.Error()) } for _, point := range points { select { case k.pointChan <- point: continue default: log.Printf("Kafka Consumer buffer is full, dropping a point." + " You may want to increase the point_buffer setting") } } if !k.doNotCommitMsgs { // TODO(cam) this locking can be removed if this PR gets merged: // https://github.com/wvanbergen/kafka/pull/84 k.Lock() k.Consumer.CommitUpto(msg) k.Unlock() } } } }
func (s *Service) processPackets() { defer s.wg.Done() for p := range s.packets { points, err := models.ParsePoints(p) if err != nil { s.statMap.Add(statPointsParseFail, 1) s.Logger.Printf("E! Failed to parse points: %s", err) continue } if err := s.PointsWriter.WritePoints( s.config.Database, s.config.RetentionPolicy, models.ConsistencyLevelAll, points, ); err == nil { s.statMap.Add(statPointsTransmitted, int64(len(points))) } else { s.Logger.Printf("E! failed to write points to database %q: %s", s.config.Database, err) s.statMap.Add(statTransmitFail, 1) } s.statMap.Add(statPointsReceived, int64(len(points))) } }
func BenchmarkParsePointsTagsUnSorted5(b *testing.B) { line := `cpu,region=us-west,host=serverA,env=prod,target=servers,zone=1c value=1i 1000000000` for i := 0; i < b.N; i++ { pt, _ := models.ParsePoints([]byte(line)) b.SetBytes(int64(len(line))) pt[0].Key() } }
func unmarshalWrite(b []byte) (uint64, []models.Point, error) { if len(b) < 8 { return 0, nil, fmt.Errorf("too short: len = %d", len(b)) } ownerID := binary.BigEndian.Uint64(b[:8]) points, err := models.ParsePoints(b[8:]) return ownerID, points, err }
// ParseMetrics returns a slice of Metrics from a text representation of a // metric (in line-protocol format) // with each metric separated by newlines. If any metrics fail to parse, // a non-nil error will be returned in addition to the metrics that parsed // successfully. func ParseMetrics(buf []byte) ([]Metric, error) { points, err := models.ParsePoints(buf) metrics := make([]Metric, len(points)) for i, point := range points { // Ignore error here because it's impossible that a model.Point // wouldn't parse into client.Point properly metrics[i], _ = NewMetric(point.Name(), point.Tags(), point.Fields(), point.Time()) } return metrics, err }
// ParseMetrics returns a slice of Metrics from a text representation of a // metric (in line-protocol format) // with each metric separated by newlines. If any metrics fail to parse, // a non-nil error will be returned in addition to the metrics that parsed // successfully. func ParseMetrics(buf []byte) ([]Metric, error) { // parse even if the buffer begins with a newline buf = bytes.TrimPrefix(buf, []byte("\n")) points, err := models.ParsePoints(buf) metrics := make([]Metric, len(points)) for i, point := range points { // Ignore error here because it's impossible that a model.Point // wouldn't parse into client.Point properly metrics[i], _ = NewMetric(point.Name(), point.Tags(), point.Fields(), point.Time()) } return metrics, err }
func TestParsePointKeyUnsorted(t *testing.T) { pts, err := models.ParsePoints([]byte("cpu,last=1,first=2 value=1i")) if err != nil { t.Fatalf(`ParsePoints() failed. got %s`, err) } if exp := 1; len(pts) != exp { t.Errorf("ParsePoint() len mismatch: got %v, exp %v", len(pts), exp) } pt := pts[0] if exp := "cpu,first=2,last=1"; string(pt.Key()) != exp { t.Errorf("ParsePoint key not sorted. got %v, exp %v", string(pt.Key()), exp) } }
// Parse returns a slice of Metrics from a text representation of a // metric (in line-protocol format) // with each metric separated by newlines. If any metrics fail to parse, // a non-nil error will be returned in addition to the metrics that parsed // successfully. func (p *InfluxParser) Parse(buf []byte) ([]telegraf.Metric, error) { // parse even if the buffer begins with a newline buf = bytes.TrimPrefix(buf, []byte("\n")) points, err := models.ParsePoints(buf) metrics := make([]telegraf.Metric, len(points)) for i, point := range points { for k, v := range p.DefaultTags { // only set the default tag if it doesn't already exist: if tmp := point.Tags().GetString(k); tmp == "" { point.AddTag(k, v) } } // Ignore error here because it's impossible that a model.Point // wouldn't parse into client.Point properly metrics[i] = telegraf.NewMetricFromPoint(point) } return metrics, err }
// Parse returns a slice of Metrics from a text representation of a // metric (in line-protocol format) // with each metric separated by newlines. If any metrics fail to parse, // a non-nil error will be returned in addition to the metrics that parsed // successfully. func (p *InfluxParser) Parse(buf []byte) ([]telegraf.Metric, error) { // parse even if the buffer begins with a newline buf = bytes.TrimPrefix(buf, []byte("\n")) points, err := models.ParsePoints(buf) metrics := make([]telegraf.Metric, len(points)) for i, point := range points { tags := point.Tags() for k, v := range p.DefaultTags { // Only set tags not in parsed metric if _, ok := tags[k]; !ok { tags[k] = v } } // Ignore error here because it's impossible that a model.Point // wouldn't parse into client.Point properly metrics[i], _ = telegraf.NewMetric(point.Name(), tags, point.Fields(), point.Time()) } return metrics, err }
func TestBasicPointGenerator_Generate(t *testing.T) { ps, err := basicPG.Generate() if err != nil { t.Error(err) } var buf bytes.Buffer for p := range ps { b := p.Line() buf.Write(b) buf.Write([]byte("\n")) } bs := buf.Bytes() bs = bs[0 : len(bs)-1] _, err = models.ParsePoints(bs) if err != nil { t.Error(err) } }
func TestParsePointIntsFloats(t *testing.T) { pts, err := models.ParsePoints([]byte(`cpu,host=serverA,region=us-east int=10i,float=11.0,float2=12.1 1000000000`)) if err != nil { t.Fatalf(`ParsePoints() failed. got %s`, err) } if exp := 1; len(pts) != exp { t.Errorf("ParsePoint() len mismatch: got %v, exp %v", len(pts), exp) } pt := pts[0] if _, ok := pt.Fields()["int"].(int64); !ok { t.Errorf("ParsePoint() int field mismatch: got %T, exp %T", pt.Fields()["int"], int64(10)) } if _, ok := pt.Fields()["float"].(float64); !ok { t.Errorf("ParsePoint() float field mismatch: got %T, exp %T", pt.Fields()["float64"], float64(11.0)) } if _, ok := pt.Fields()["float2"].(float64); !ok { t.Errorf("ParsePoint() float field mismatch: got %T, exp %T", pt.Fields()["float64"], float64(12.1)) } }
func TestParsPointWithDuplicateTags(t *testing.T) { _, err := models.ParsePoints([]byte(`cpu,host=serverA,host=serverB value=1i 1000000000`)) if err == nil { t.Fatalf(`ParsePoint() expected error. got nil`) } }