func (self *ClusterConfiguration) CreateContinuousQuery(db string, query string) error {
	self.continuousQueriesLock.Lock()
	defer self.continuousQueriesLock.Unlock()

	if self.continuousQueries == nil {
		self.continuousQueries = map[string][]*ContinuousQuery{}
	}

	if self.parsedContinuousQueries == nil {
		self.parsedContinuousQueries = map[string]map[uint32]*parser.SelectQuery{}
	}

	maxId := uint32(0)
	for _, query := range self.continuousQueries[db] {
		if query.Id > maxId {
			maxId = query.Id
		}
	}

	selectQuery, err := parser.ParseSelectQuery(query)
	if err != nil {
		return fmt.Errorf("Failed to parse continuous query: %s", query)
	}

	queryId := maxId + 1
	if self.parsedContinuousQueries[db] == nil {
		self.parsedContinuousQueries[db] = map[uint32]*parser.SelectQuery{queryId: selectQuery}
	} else {
		self.parsedContinuousQueries[db][queryId] = selectQuery
	}
	self.continuousQueries[db] = append(self.continuousQueries[db], &ContinuousQuery{queryId, query})

	return nil
}
func (self *FilteringSuite) TestEqualityFiltering(c *C) {
	queryStr := "select * from t where column_one = 100 and column_two <> 6;"
	query, err := parser.ParseSelectQuery(queryStr)
	c.Assert(err, IsNil)

	series, err := common.StringToSeriesArray(`
[
 {
   "points": [
     {"values": [{"int64_value": 100},{"int64_value": 5 }], "timestamp": 1381346631, "sequence_number": 1},
     {"values": [{"int64_value": 100},{"int64_value": 6 }], "timestamp": 1381346631, "sequence_number": 1},
     {"values": [{"int64_value": 90 },{"int64_value": 15}], "timestamp": 1381346632, "sequence_number": 1}
   ],
   "name": "t",
   "fields": ["column_one", "column_two"]
 }
]
`)
	c.Assert(err, IsNil)
	result, err := Filter(query, series[0])
	c.Assert(err, IsNil)
	c.Assert(result, NotNil)
	c.Assert(result.Points, HasLen, 1)
	c.Assert(*result.Points[0].Values[0].Int64Value, Equals, int64(100))
	c.Assert(*result.Points[0].Values[1].Int64Value, Equals, int64(5))
}
func (self *DatastoreSuite) TestPropagateErrorsProperly(c *C) {
	cleanup(nil)
	db := newDatastore(c)
	defer cleanup(db)
	mock := `
  {
    "points": [
      {
        "values": [
          {
            "int64_value": 3
          }
        ],
        "sequence_number": 1
      }
    ],
    "name": "foo",
    "fields": ["value"]
  }`
	pointTime := time.Now().Unix()
	series := stringToSeries(mock, pointTime, c)
	err := db.WriteSeriesData("test", series)
	c.Assert(err, IsNil)
	q, err := parser.ParseSelectQuery("select value from foo;")
	c.Assert(err, IsNil)
	yield := func(series *protocol.Series) error {
		return fmt.Errorf("Whatever")
	}
	user := &MockUser{}
	err = db.ExecuteQuery(user, "test", q, yield, nil)
	c.Assert(err, ErrorMatches, "Whatever")
}
func (self *FilteringSuite) TestFilteringWithGroupBy(c *C) {
	queryStr := "select sum(column_one) from t group by column_two where column_one = 85;"
	query, err := parser.ParseSelectQuery(queryStr)
	c.Assert(err, IsNil)

	series, err := common.StringToSeriesArray(`
[
 {
   "points": [
     {"values": [{"int64_value": 100},{"int64_value": 5 }], "timestamp": 1381346631, "sequence_number": 1},
     {"values": [{"int64_value": 85},{"int64_value": 6 }], "timestamp": 1381346631, "sequence_number": 1},
     {"values": [{"int64_value": 90 },{"int64_value": 15}], "timestamp": 1381346632, "sequence_number": 1}
   ],
   "name": "t",
   "fields": ["column_one", "column_two"]
 }
]
`)
	c.Assert(err, IsNil)
	result, err := Filter(query, series[0])
	c.Assert(err, IsNil)
	c.Assert(result, NotNil)
	c.Assert(result.Points, HasLen, 1)
	c.Assert(result.Fields, HasLen, 2)
}
func (self *ProtobufRequestHandler) handleQuery(request *protocol.Request, conn net.Conn) {
	nextPointMap := make(map[string]*NextPoint)
	assignNextPointTimesAndSend := func(series *protocol.Series) error {
		response := createResponse(nextPointMap, series, request.Id)
		return self.WriteResponse(conn, response)
	}
	// the query should always parse correctly since it was parsed at the originating server.
	query, _ := parser.ParseSelectQuery(*request.Query)
	var user common.User
	if *request.IsDbUser {
		user = self.clusterConfig.GetDbUser(*request.Database, *request.UserName)
	} else {
		user = self.clusterConfig.GetClusterAdmin(*request.UserName)
	}

	var response *protocol.Response
	var ringFilter func(database, series *string, time *int64) bool

	if user == nil {
		errorMsg := fmt.Sprintf("Cannot find user %s", *request.UserName)
		response = &protocol.Response{ErrorMessage: &errorMsg}
		goto response
	}

	if request.RingLocationsToQuery != nil {
		ringFilter = self.clusterConfig.GetRingFilterFunction(*request.Database, *request.RingLocationsToQuery)
	}
	self.db.ExecuteQuery(user, *request.Database, query, assignNextPointTimesAndSend, ringFilter)

	response = &protocol.Response{Type: &endStreamResponse, RequestId: request.Id}
response:
	self.WriteResponse(conn, response)
}
Exemple #6
0
func (s *RaftServer) CreateContinuousQuery(db string, query string) error {
	selectQuery, err := parser.ParseSelectQuery(query)
	if err != nil {
		return fmt.Errorf("Failed to parse continuous query: %s", query)
	}

	if !selectQuery.IsValidContinuousQuery() {
		return fmt.Errorf("Continuous queries with a group by clause must include time(...) as one of the elements")
	}

	duration, err := selectQuery.GetGroupByClause().GetGroupByTime()
	if err != nil {
		return fmt.Errorf("Couldn't get group by time for continuous query: %s", err)
	}

	// if there are already-running queries, we need to initiate a backfill
	if duration != nil && !s.clusterConfig.LastContinuousQueryRunTime().IsZero() {
		zeroTime := time.Time{}
		currentBoundary := time.Now().Truncate(*duration)
		go s.runContinuousQuery(db, selectQuery, zeroTime, currentBoundary)
	} else {
		// TODO: make continuous queries backfill for queries that don't have a group by time
	}

	command := NewCreateContinuousQueryCommand(db, query)
	_, err = s.doOrProxyCommand(command, "create_cq")
	return err
}
func (self *DatastoreSuite) TestCanWriteAndRetrievePoints(c *C) {
	cleanup(nil)
	db := newDatastore(c)
	defer cleanup(db)
	mock := `
  {
    "points": [
      {
        "values": [
          {
            "int64_value": 3
          }
        ],
        "sequence_number": 1
      },
      {
        "values": [
          {
            "int64_value": 2
          }
        ],
        "sequence_number": 2
      }
    ],
    "name": "foo",
    "fields": ["value"]
  }`
	pointTime := time.Now().Unix()
	series := stringToSeries(mock, pointTime, c)
	err := db.WriteSeriesData("test", series)
	c.Assert(err, IsNil)
	q, errQ := parser.ParseSelectQuery("select value from foo;")
	c.Assert(errQ, IsNil)
	resultSeries := []*protocol.Series{}
	yield := func(series *protocol.Series) error {
		resultSeries = append(resultSeries, series)
		return nil
	}
	user := &MockUser{}
	err = db.ExecuteQuery(user, "test", q, yield, nil)
	c.Assert(err, IsNil)
	// we should get the actual data and the end of series data
	// indicator , i.e. a series with no points
	c.Assert(resultSeries, HasLen, 2)
	c.Assert(resultSeries[0].Points, HasLen, 2)
	c.Assert(resultSeries[0].Fields, HasLen, 1)
	c.Assert(*resultSeries[0].Points[0].SequenceNumber, Equals, uint64(2))
	c.Assert(*resultSeries[0].Points[1].SequenceNumber, Equals, uint64(1))
	c.Assert(*resultSeries[0].Points[0].GetTimestampInMicroseconds(), Equals, pointTime*1000000)
	c.Assert(*resultSeries[0].Points[1].GetTimestampInMicroseconds(), Equals, pointTime*1000000)
	c.Assert(*resultSeries[0].Points[0].Values[0].Int64Value, Equals, int64(2))
	c.Assert(*resultSeries[0].Points[1].Values[0].Int64Value, Equals, int64(3))
	c.Assert(resultSeries[1].Points, HasLen, 0)
	c.Assert(resultSeries[1].Fields, HasLen, 0)
	c.Assert(resultSeries, Not(DeepEquals), series)
}
func (self *DatastoreSuite) TestCanSelectFromARegex(c *C) {
	cleanup(nil)
	db := newDatastore(c)
	defer cleanup(db)

	mock := `{
    "points":[
      {"values":[{"int64_value":3},{"string_value":"paul"}],"sequence_number":2},
      {"values":[{"int64_value":1},{"string_value":"todd"}],"sequence_number":1}],
      "name":"user_things",
      "fields":["count", "name"]
    }`
	series := stringToSeries(mock, time.Now().Unix(), c)
	err := db.WriteSeriesData("foobar", series)
	c.Assert(err, IsNil)
	user := &MockUser{}
	results := executeQuery(user, "foobar", "select count, name from user_things;", db, c)
	c.Assert(results[0], DeepEquals, series)

	mock = `{
    "points":[{"values":[{"double_value":10.1}],"sequence_number":23}],
    "name":"response_times",
    "fields":["ms"]
  }`
	responseSeries := stringToSeries(mock, time.Now().Unix(), c)
	err = db.WriteSeriesData("foobar", responseSeries)
	c.Assert(err, IsNil)
	results = executeQuery(user, "foobar", "select ms from response_times;", db, c)
	c.Assert(results[0], DeepEquals, responseSeries)

	mock = `{
    "points":[{"values":[{"string_value":"NY"}],"sequence_number":23}, {"values":[{"string_value":"CO"}],"sequence_number":20}],
    "name":"other_things",
    "fields":["state"]
  }`
	otherSeries := stringToSeries(mock, time.Now().Unix(), c)
	err = db.WriteSeriesData("foobar", otherSeries)
	c.Assert(err, IsNil)
	results = executeQuery(user, "foobar", "select state from other_things;", db, c)
	c.Assert(results[0], DeepEquals, otherSeries)

	q, errQ := parser.ParseSelectQuery("select * from /.*things/;")
	c.Assert(errQ, IsNil)
	resultSeries := make([]*protocol.Series, 0)
	yield := func(series *protocol.Series) error {
		if len(series.Points) > 0 {
			resultSeries = append(resultSeries, series)
		}
		return nil
	}
	err = db.ExecuteQuery(user, "foobar", q, yield, nil)
	c.Assert(err, IsNil)
	c.Assert(resultSeries, HasLen, 2)
	c.Assert(resultSeries[0], DeepEquals, otherSeries)
	c.Assert(resultSeries[1], DeepEquals, series)
}
func (self *DatastoreSuite) TestCanWriteAndRetrievePointsWithAlias(c *C) {
	cleanup(nil)
	db := newDatastore(c)
	defer cleanup(db)
	mock := `
  {
    "points": [
      {
        "values": [
          {
            "int64_value": 3
          }
        ],
        "sequence_number": 1
      },
      {
        "values": [
          {
            "int64_value": 2
          }
        ],
        "sequence_number": 2
      }
    ],
    "name": "foo",
    "fields": ["value"]
  }`
	pointTime := time.Now().Unix()
	series := stringToSeries(mock, pointTime, c)
	err := db.WriteSeriesData("test", series)
	c.Assert(err, IsNil)
	q, errQ := parser.ParseSelectQuery("select * from foo as f1 inner join foo as f2;")
	c.Assert(errQ, IsNil)
	resultSeries := map[string][]*protocol.Series{}
	yield := func(series *protocol.Series) error {
		resultSeries[*series.Name] = append(resultSeries[*series.Name], series)
		return nil
	}
	user := &MockUser{}
	err = db.ExecuteQuery(user, "test", q, yield, nil)
	c.Assert(err, IsNil)
	// we should get the actual data and the end of series data
	// indicator , i.e. a series with no points
	c.Assert(resultSeries, HasLen, 2)
	c.Assert(resultSeries["f1"], HasLen, 2)
	c.Assert(resultSeries["f1"][0].Points, HasLen, 2)
	c.Assert(resultSeries["f1"][1].Points, HasLen, 0)
	c.Assert(resultSeries["f2"], HasLen, 2)
	c.Assert(resultSeries["f2"][0].Points, HasLen, 2)
	c.Assert(resultSeries["f2"][1].Points, HasLen, 0)
}
Exemple #10
0
func executeQuery(user common.User, database, query string, db Datastore, c *C) []*protocol.Series {
	q, errQ := parser.ParseSelectQuery(query)
	c.Assert(errQ, IsNil)
	resultSeries := []*protocol.Series{}
	yield := func(series *protocol.Series) error {
		// ignore time series which have no data, this includes
		// end of series indicator
		if len(series.Points) > 0 {
			resultSeries = append(resultSeries, series)
		}
		return nil
	}
	err := db.ExecuteQuery(user, database, q, yield, nil)
	c.Assert(err, IsNil)
	return resultSeries
}
func (self *ProtobufRequestHandler) handleQuery(request *protocol.Request, conn net.Conn) {
	nextPointMap := make(map[string]*protocol.Point)
	assignNextPointTimesAndSend := func(series *protocol.Series) error {
		pointCount := len(series.Points)
		if pointCount <= 1 {
			if nextPoint := nextPointMap[*series.Name]; nextPoint != nil {
				series.Points = append(series.Points, nextPoint)
			}
			response := &protocol.Response{Type: &queryResponse, Series: series, RequestId: request.Id}

			self.WriteResponse(conn, response)
			return nil
		}
		oldNextPoint := nextPointMap[*series.Name]
		nextPoint := series.Points[pointCount-1]
		series.Points[pointCount-1] = nil
		if oldNextPoint != nil {
			copy(series.Points[1:], series.Points[0:])
			series.Points[0] = oldNextPoint
		} else {
			series.Points = series.Points[:len(series.Points)-1]
		}

		response := &protocol.Response{Series: series, Type: &queryResponse, RequestId: request.Id}
		if nextPoint != nil {
			response.NextPointTime = nextPoint.Timestamp
			nextPointMap[*series.Name] = nextPoint
		}
		err := self.WriteResponse(conn, response)
		return err
	}
	// the query should always parse correctly since it was parsed at the originating server.
	query, _ := parser.ParseSelectQuery(*request.Query)
	user := self.clusterConfig.GetDbUser(*request.Database, *request.UserName)

	var ringFilter func(database, series *string, time *int64) bool
	if request.RingLocationsToQuery != nil {
		ringFilter = self.clusterConfig.GetRingFilterFunction(*request.Database, *request.RingLocationsToQuery)
	}
	self.db.ExecuteQuery(user, *request.Database, query, assignNextPointTimesAndSend, ringFilter)

	response := &protocol.Response{Type: &endStreamResponse, RequestId: request.Id}
	self.WriteResponse(conn, response)
}
Exemple #12
0
func (self *DatastoreSuite) TestBreaksLargeResultsIntoMultipleBatches(c *C) {
	cleanup(nil)
	db := newDatastore(c)
	defer cleanup(db)

	mock := `{
    "points":[
      {"values":[{"double_value":23.1},{"string_value":"paul"}],"sequence_number":2},
      {"values":[{"double_value":56.8},{"string_value":"todd"}],"sequence_number":1}],
      "name":"user_things",
      "fields":["response_time","name"]
  }`
	series := stringToSeries(mock, time.Now().Unix(), c)
	sequence := 0
	writtenPoints := 0
	for i := 0; i < 50000; i++ {
		for _, p := range series.Points {
			sequence += 1
			s := uint64(sequence)
			p.SequenceNumber = &s
		}
		writtenPoints += 2
		err := db.WriteSeriesData("foobar", series)
		c.Assert(err, IsNil)
	}

	q, errQ := parser.ParseSelectQuery("select * from user_things limit 0;")
	c.Assert(errQ, IsNil)
	resultSeries := make([]*protocol.Series, 0)
	yield := func(series *protocol.Series) error {
		resultSeries = append(resultSeries, series)
		return nil
	}
	user := &MockUser{}
	err := db.ExecuteQuery(user, "foobar", q, yield, nil)
	c.Assert(err, IsNil)
	c.Assert(len(resultSeries), InRange, 2, 20)
	pointCount := 0
	for _, s := range resultSeries {
		pointCount += len(s.Points)
	}
	c.Assert(pointCount, Equals, writtenPoints)
}
func (self *ProtobufRequestHandler) handleQuery(request *protocol.Request, conn net.Conn) {
	nextPointMap := make(map[string]*NextPoint)
	assignNextPointTimesAndSend := func(series *protocol.Series) error {
		response := createResponse(nextPointMap, series, request.Id)
		return self.WriteResponse(conn, response)
	}
	// the query should always parse correctly since it was parsed at the originating server.
	query, _ := parser.ParseSelectQuery(*request.Query)
	user := self.clusterConfig.GetDbUser(*request.Database, *request.UserName)

	var ringFilter func(database, series *string, time *int64) bool
	if request.RingLocationsToQuery != nil {
		ringFilter = self.clusterConfig.GetRingFilterFunction(*request.Database, *request.RingLocationsToQuery)
	}
	self.db.ExecuteQuery(user, *request.Database, query, assignNextPointTimesAndSend, ringFilter)

	response := &protocol.Response{Type: &endStreamResponse, RequestId: request.Id}
	self.WriteResponse(conn, response)
}
Exemple #14
0
func (self *DatastoreSuite) TestCheckReadAccess(c *C) {
	cleanup(nil)
	db := newDatastore(c)
	defer cleanup(db)

	mock := `{
    "points":[
      {"values":[{"int64_value":3},{"string_value":"paul"}],"sequence_number":2},
      {"values":[{"int64_value":1},{"string_value":"todd"}],"sequence_number":1}],
      "name":"user_things",
      "fields":["count", "name"]
    }`
	series := stringToSeries(mock, time.Now().Unix(), c)
	err := db.WriteSeriesData("foobar", series)
	c.Assert(err, IsNil)

	mock = `{
    "points":[{"values":[{"string_value":"NY"}],"sequence_number":23}, {"values":[{"string_value":"CO"}],"sequence_number":20}],
    "name":"other_things",
    "fields":["state"]
  }`
	otherSeries := stringToSeries(mock, time.Now().Unix(), c)
	err = db.WriteSeriesData("foobar", otherSeries)

	user := &MockUser{
		dbCannotRead: map[string]bool{"other_things": true},
	}
	q, errQ := parser.ParseSelectQuery("select * from /.*things/;")
	c.Assert(errQ, IsNil)
	resultSeries := make([]*protocol.Series, 0)
	yield := func(series *protocol.Series) error {
		if len(series.Points) > 0 {
			resultSeries = append(resultSeries, series)
		}
		return nil
	}
	err = db.ExecuteQuery(user, "foobar", q, yield, nil)
	c.Assert(err, ErrorMatches, ".*one or more.*")
	c.Assert(len(resultSeries), Equals, 1)
	c.Assert(*resultSeries[0].Name, Equals, "user_things")
	c.Assert(resultSeries[0], DeepEquals, series)
}
Exemple #15
0
func (self *DatastoreSuite) TestDeletingData(c *C) {
	cleanup(nil)
	db := newDatastore(c)
	defer cleanup(db)
	mock := `
  {
    "points": [
      {
        "values": [
          {
            "int64_value": 3
          }
        ],
        "sequence_number": 1
      }
    ],
    "name": "foo",
    "fields": ["value"]
  }`
	pointTime := time.Now().Unix()
	series := stringToSeries(mock, pointTime, c)
	err := db.WriteSeriesData("test", series)
	c.Assert(err, IsNil)
	q, err := parser.ParseSelectQuery("select value from foo;")
	c.Assert(err, IsNil)
	yield := func(series *protocol.Series) error {
		if len(series.Points) > 0 {
			panic("Series contains points")
		}
		return nil
	}
	c.Assert(db.DropDatabase("test"), IsNil)
	user := &MockUser{}
	err = db.ExecuteQuery(user, "test", q, yield, nil)

	// we don't have an error any more on query for fields that don't exist.
	// This is because of the clustering. Some servers could have some fields
	// while others don't. To be expected.
	// c.Assert(err, ErrorMatches, ".*Field value doesn't exist.*")
	c.Assert(err, IsNil)
}
func (self *FilteringSuite) TestFilteringNonExistentColumn(c *C) {
	queryStr := "select * from t where column_one = 100 and column_two <> 6"
	query, err := parser.ParseSelectQuery(queryStr)
	c.Assert(err, IsNil)

	series, err := common.StringToSeriesArray(`
[
 {
   "points": [
     {"values": [{"int64_value": 100}], "timestamp": 1381346631, "sequence_number": 1},
     {"values": [{"int64_value": 90 }], "timestamp": 1381346632, "sequence_number": 1}
   ],
   "name": "t",
   "fields": ["column_one"]
 }
]
`)
	c.Assert(err, IsNil)
	_, err = Filter(query, series[0])
	c.Assert(err, NotNil)
}
func (self *ClusterConfiguration) addContinuousQuery(db string, query *ContinuousQuery) error {
	if self.continuousQueries == nil {
		self.continuousQueries = map[string][]*ContinuousQuery{}
	}

	if self.ParsedContinuousQueries == nil {
		self.ParsedContinuousQueries = map[string]map[uint32]*parser.SelectQuery{}
	}

	selectQuery, err := parser.ParseSelectQuery(query.Query)
	if err != nil {
		return fmt.Errorf("Failed to parse continuous query: %s", query)
	}

	if self.ParsedContinuousQueries[db] == nil {
		self.ParsedContinuousQueries[db] = map[uint32]*parser.SelectQuery{query.Id: selectQuery}
	} else {
		self.ParsedContinuousQueries[db][query.Id] = selectQuery
	}
	self.continuousQueries[db] = append(self.continuousQueries[db], query)
	return nil
}
func (self *FilteringSuite) TestNotRegexFiltering(c *C) {
	queryStr := "select * from t where column_one !~ /.*foo.*/ and time > now() - 1d;"
	query, err := parser.ParseSelectQuery(queryStr)
	c.Assert(err, IsNil)
	series, err := common.StringToSeriesArray(`
[
 {
   "points": [
     {"values": [{"string_value": "100"}], "timestamp": 1381346631, "sequence_number": 1},
     {"values": [{"string_value": "foobar"}], "timestamp": 1381346631, "sequence_number": 1}
   ],
   "name": "t",
   "fields": ["column_one"]
 }
]
`)
	c.Assert(err, IsNil)
	result, err := Filter(query, series[0])
	c.Assert(err, IsNil)
	c.Assert(result, NotNil)
	c.Assert(result.Points, HasLen, 1)
	c.Assert(*result.Points[0].Values[0].StringValue, Equals, "100")
}
func (s *RaftServer) CreateContinuousQuery(db string, query string) error {
	// if there are already-running queries, we need to initiate a backfill
	if !s.clusterConfig.continuousQueryTimestamp.IsZero() {
		selectQuery, err := parser.ParseSelectQuery(query)
		if err != nil {
			return fmt.Errorf("Failed to parse continuous query: %s", query)
		}

		duration, err := selectQuery.GetGroupByClause().GetGroupByTime()
		if err != nil {
			return fmt.Errorf("Couldn't get group by time for continuous query: %s", err)
		}

		if duration != nil {
			zeroTime := time.Time{}
			currentBoundary := time.Now().Truncate(*duration)
			go s.runContinuousQuery(db, selectQuery, zeroTime, currentBoundary)
		}
	}

	command := NewCreateContinuousQueryCommand(db, query)
	_, err := s.doOrProxyCommand(command, "create_cq")
	return err
}
func (self *FilteringSuite) TestFilteringWithJoin(c *C) {
	queryStr := "select * from t as bar inner join t as foo where bar.column_one = 100 and foo.column_two <> 6;"
	query, err := parser.ParseSelectQuery(queryStr)
	c.Assert(err, IsNil)
	series, err := common.StringToSeriesArray(`
[
 {
   "points": [
     {"values": [{"int64_value": 100},{"int64_value": 5 }], "timestamp": 1381346631, "sequence_number": 1},
     {"values": [{"int64_value": 100},{"int64_value": 6 }], "timestamp": 1381346631, "sequence_number": 1},
     {"values": [{"int64_value": 90 },{"int64_value": 15}], "timestamp": 1381346632, "sequence_number": 1}
   ],
   "name": "foo_join_bar",
   "fields": ["bar.column_one", "foo.column_two"]
 }
]
`)
	c.Assert(err, IsNil)
	result, err := Filter(query, series[0])
	c.Assert(err, IsNil)
	c.Assert(result, NotNil)
	// no filtering should happen for join queries
	c.Assert(result.Points, HasLen, 1)
}