func (s *endpointServiceImpl) send(ctx context.Context, data []byte) error { ctx = log.SetField(ctx, "endpointURL", s.url) return retryCall(ctx, "endpoint.send", func() error { startTime := clock.Now(ctx) log.Debugf(ctx, "Pushing message to endpoint.") req, err := http.NewRequest("POST", s.url, bytes.NewReader(data)) if err != nil { log.Errorf(log.SetError(ctx, err), "Failed to create HTTP request.") return err } req.Header.Add("content-type", protobufContentType) req.Header.Add("user-agent", monitoringEndpointUserAgent) resp, err := s.client.Do(req) if err != nil { // Treat a client error as transient. log.Warningf(log.SetError(ctx, err), "Failed proxy client request.") return errors.WrapTransient(err) } defer resp.Body.Close() // Read the full response body. This will enable us to re-use the // connection. bodyData, err := ioutil.ReadAll(resp.Body) if err != nil { log.Errorf(log.SetError(ctx, err), "Error during endpoint connection.") return errors.WrapTransient(err) } log.Fields{ "status": resp.Status, "statusCode": resp.StatusCode, "headers": resp.Header, "contentLength": resp.ContentLength, "body": string(bodyData), "duration": clock.Now(ctx).Sub(startTime), }.Debugf(ctx, "Received HTTP response from endpoint.") if http.StatusOK <= resp.StatusCode && resp.StatusCode < http.StatusMultipleChoices { log.Debugf(ctx, "Message pushed successfully.") return nil } err = fmt.Errorf("http: server error (%d)", resp.StatusCode) if resp.StatusCode >= http.StatusInternalServerError { err = errors.WrapTransient(err) } log.Fields{ log.ErrorKey: err, "status": resp.Status, "statusCode": resp.StatusCode, }.Warningf(ctx, "Proxy error.") return err }) }
func (a *application) proxySingleMessage(ctx context.Context, msg *pubsub.Message) error { log.Debugf(ctx, "Sending data to monitoring endpoint.") // Refuse to transmit message if it's too large. if len(msg.Data) > maxMessageSize { log.Fields{ "size": len(msg.Data), "maxSize": maxMessageSize, }.Errorf(ctx, "Message exceeds maximum size threshold; discarding.") return errors.New("main: message is too large") } // Execute the request. return a.endpoint.send(ctx, msg.Data) }
// retryCall is a wrapper around the retry library that logs information about // the retry attempts. func retryCall(ctx context.Context, title string, f func() error) error { log.Debugf(ctx, "Executing retriable call %s.", title) err := retry.Retry(ctx, func() error { return f() }, func(err error, delay time.Duration) { log.Fields{ log.ErrorKey: err, "delay": delay, }.Warningf(ctx, "Transient error encountered during %s; retrying.", title) }) if err != nil { log.Errorf(log.SetError(ctx, err), "Failed to %s.", title) return err } return nil }
// Main buffering function, which runs in a goroutine. func (m *meter) consumeWork() { // Acknowledge when this goroutine finishes. defer close(m.closeAckC) ctx := log.SetFilter(m.ctx, "meter") timerRunning := false flushTimer := clock.NewTimer(ctx) defer func() { flushTimer.Stop() }() flushCount := m.Count flushTime := m.Delay // Build our work buffer. workBufferSize := initialWorkBufferSize if flushCount > 0 { // Will never buffer more than this much Work. workBufferSize = flushCount } bufferedWork := make([]interface{}, 0, workBufferSize) log.Debugf(ctx, "Starting work loop.") active := true for active { flushRound := false select { case work, ok := <-m.workC: if !ok { log.Debugf(ctx, "Received instance close signal; terminating.") // Don't immediately exit the loop, since there may still be buffered // Work to flush. active = false flushRound = true break } // Count the work in this group. If we're not using a given metric, try // and avoid wasting time collecting it. bufferedWork = append(bufferedWork, work) // Handle work count threshold. We do this first, since it's trivial to // setup/compute. if flushCount > 0 && len(bufferedWork) >= flushCount { flushRound = true } // Start our flush timer, if it's not already ticking. Only waste time // doing this if we're not already flushing, since flushing will clear the // timer. if !flushRound && flushTime > 0 && !timerRunning { log.Infof(log.SetFields(ctx, log.Fields{ "flushInterval": flushTime, }), "Starting flush timer.") flushTimer.Reset(flushTime) timerRunning = true } // Invoke work callback, if one is set. if m.IngestCallback != nil { flushRound = m.IngestCallback(work) || flushRound } case <-m.flushNowC: flushRound = true case <-flushTimer.GetC(): log.Infof(ctx, "Flush timer has triggered.") timerRunning = false flushRound = true } // Should we flush? if flushRound { flushTimer.Stop() timerRunning = false if len(bufferedWork) > 0 { // Clone bufferedWork, since we re-use it. workToSend := make([]interface{}, len(bufferedWork)) copy(workToSend, bufferedWork) // Clear our Work slice for re-use. This does not resize the underlying // array, since it's likely to fill again. for idx := range bufferedWork { bufferedWork[idx] = nil } bufferedWork = bufferedWork[:0] // Callback with this work. m.Callback(workToSend) } } } }
func TestBasicDatastore(t *testing.T) { t.Parallel() Convey("basic", t, func() { inst, err := aetest.NewInstance(&aetest.Options{ StronglyConsistentDatastore: true, }) So(err, ShouldBeNil) defer inst.Close() req, err := inst.NewRequest("GET", "/", nil) So(err, ShouldBeNil) ctx := Use(context.Background(), req) ds := datastore.Get(ctx) mc := memcache.Get(ctx) inf := info.Get(ctx) Convey("logging allows you to tweak the level", func() { // You have to visually confirm that this actually happens in the stdout // of the test... yeah I know. logging.Debugf(ctx, "SHOULD NOT SEE") logging.Infof(ctx, "SHOULD SEE") ctx = logging.SetLevel(ctx, logging.Debug) logging.Debugf(ctx, "SHOULD SEE") logging.Infof(ctx, "SHOULD SEE (2)") }) Convey("Can probe/change Namespace", func() { So(inf.GetNamespace(), ShouldEqual, "") ctx, err = inf.Namespace("wat") So(err, ShouldBeNil) inf = info.Get(ctx) So(inf.GetNamespace(), ShouldEqual, "wat") ds = datastore.Get(ctx) So(ds.MakeKey("Hello", "world").Namespace(), ShouldEqual, "wat") }) Convey("Can get non-transactional context", func() { ctx, err := inf.Namespace("foo") So(err, ShouldBeNil) ds = datastore.Get(ctx) inf = info.Get(ctx) ds.RunInTransaction(func(ctx context.Context) error { So(ds.MakeKey("Foo", "bar").Namespace(), ShouldEqual, "foo") So(ds.Put(&TestStruct{ValueI: []int64{100}}), ShouldBeNil) err = datastore.GetNoTxn(ctx).RunInTransaction(func(ctx context.Context) error { ds = datastore.Get(ctx) So(ds.MakeKey("Foo", "bar").Namespace(), ShouldEqual, "foo") So(ds.Put(&TestStruct{ValueI: []int64{100}}), ShouldBeNil) return nil }, nil) So(err, ShouldBeNil) return nil }, nil) }) Convey("Can Put/Get", func() { orig := TestStruct{ ValueI: []int64{1, 7, 946688461000000, 996688461000000}, ValueB: []bool{true, false}, ValueS: []string{"hello", "world"}, ValueF: []float64{1.0, 7.0, 946688461000000.0, 996688461000000.0}, ValueBS: [][]byte{ []byte("allo"), []byte("hello"), []byte("world"), []byte("zurple"), }, ValueK: []*datastore.Key{ ds.NewKey("Something", "Cool", 0, nil), ds.NewKey("Something", "", 1, nil), ds.NewKey("Something", "Recursive", 0, ds.NewKey("Parent", "", 2, nil)), }, ValueBK: []blobstore.Key{"bellow", "hello"}, ValueGP: []datastore.GeoPoint{ {Lat: 120.7, Lng: 95.5}, }, } So(ds.Put(&orig), ShouldBeNil) ret := TestStruct{ID: orig.ID} So(ds.Get(&ret), ShouldBeNil) So(ret, ShouldResemble, orig) // can't be sure the indexes have caught up... so sleep time.Sleep(time.Second) Convey("Can query", func() { q := datastore.NewQuery("TestStruct") ds.Run(q, func(ts *TestStruct) { So(*ts, ShouldResemble, orig) }) count, err := ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 1) }) Convey("Can project", func() { q := datastore.NewQuery("TestStruct").Project("ValueS") rslts := []datastore.PropertyMap{} So(ds.GetAll(q, &rslts), ShouldBeNil) So(rslts, ShouldResemble, []datastore.PropertyMap{ { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueS": {mp("hello")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueS": {mp("world")}, }, }) q = datastore.NewQuery("TestStruct").Project("ValueBS") rslts = []datastore.PropertyMap{} So(ds.GetAll(q, &rslts), ShouldBeNil) So(rslts, ShouldResemble, []datastore.PropertyMap{ { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueBS": {mp("allo")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueBS": {mp("hello")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueBS": {mp("world")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueBS": {mp("zurple")}, }, }) count, err := ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 4) q = datastore.NewQuery("TestStruct").Lte("ValueI", 7).Project("ValueS").Distinct(true) rslts = []datastore.PropertyMap{} So(ds.GetAll(q, &rslts), ShouldBeNil) So(rslts, ShouldResemble, []datastore.PropertyMap{ { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueI": {mp(1)}, "ValueS": {mp("hello")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueI": {mp(1)}, "ValueS": {mp("world")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueI": {mp(7)}, "ValueS": {mp("hello")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueI": {mp(7)}, "ValueS": {mp("world")}, }, }) count, err = ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 4) }) }) Convey("Can Put/Get (time)", func() { // time comparisons in Go are wonky, so this is pulled out pm := datastore.PropertyMap{ "$key": {mpNI(ds.NewKey("Something", "value", 0, nil))}, "Time": { mp(time.Date(1938, time.January, 1, 1, 1, 1, 1, time.UTC)), mp(time.Time{}), }, } So(ds.Put(&pm), ShouldBeNil) rslt := datastore.PropertyMap{} rslt.SetMeta("key", ds.KeyForObj(pm)) So(ds.Get(&rslt), ShouldBeNil) So(pm["Time"][0].Value(), ShouldResemble, rslt["Time"][0].Value()) q := datastore.NewQuery("Something").Project("Time") all := []datastore.PropertyMap{} So(ds.GetAll(q, &all), ShouldBeNil) So(len(all), ShouldEqual, 2) prop := all[0]["Time"][0] So(prop.Type(), ShouldEqual, datastore.PTInt) tval, err := prop.Project(datastore.PTTime) So(err, ShouldBeNil) So(tval, ShouldResemble, time.Time{}.UTC()) tval, err = all[1]["Time"][0].Project(datastore.PTTime) So(err, ShouldBeNil) So(tval, ShouldResemble, pm["Time"][0].Value()) ent := datastore.PropertyMap{ "$key": {mpNI(ds.MakeKey("Something", "value"))}, } So(ds.Get(&ent), ShouldBeNil) So(ent["Time"], ShouldResemble, pm["Time"]) }) Convey("memcache: Set (nil) is the same as Set ([]byte{})", func() { So(mc.Set(mc.NewItem("bob")), ShouldBeNil) // normally would panic because Value is nil bob, err := mc.Get("bob") So(err, ShouldBeNil) So(bob.Value(), ShouldResemble, []byte{}) }) }) }
func TestGoLogger(t *testing.T) { Convey(`A new Go Logger instance`, t, func() { // Regex to pull log information from "formatString". buf := bytes.Buffer{} l := New(&buf, gol.DEBUG) for _, entry := range []struct { L logging.Level F func(string, ...interface{}) T string }{ {logging.Debug, l.Debugf, "DEBU"}, {logging.Info, l.Infof, "INFO"}, {logging.Warning, l.Warningf, "WARN"}, {logging.Error, l.Errorf, "ERRO"}, } { Convey(fmt.Sprintf("Can log to: %s", entry.L), func() { entry.F("Test logging %s", entry.L) matches := lre.FindAllStringSubmatch(normalizeLog(buf.String()), -1) So(len(matches), ShouldEqual, 1) So(len(matches[0]), ShouldEqual, 4) So(matches[0][1], ShouldEqual, "gologger_test.go") So(matches[0][2], ShouldEqual, entry.T) So(matches[0][3], ShouldEqual, fmt.Sprintf("Test logging %s", entry.L)) }) } }) Convey(`A Go Logger instance installed in a Context at Info.`, t, func() { buf := bytes.Buffer{} lc := &LoggerConfig{ Format: standardConfig.Format, Out: &buf, Level: gol.DEBUG, } c := logging.SetLevel(lc.Use(context.Background()), logging.Info) Convey(`Should log through top-level Context methods.`, func() { for _, entry := range []struct { L logging.Level F func(context.Context, string, ...interface{}) T string }{ {logging.Info, logging.Infof, "INFO"}, {logging.Warning, logging.Warningf, "WARN"}, {logging.Error, logging.Errorf, "ERRO"}, } { Convey(fmt.Sprintf("Can log to: %s", entry.L), func() { entry.F(c, "Test logging %s", entry.L) matches := lre.FindAllStringSubmatch(normalizeLog(buf.String()), -1) So(len(matches), ShouldEqual, 1) So(len(matches[0]), ShouldEqual, 4) So(matches[0][1], ShouldEqual, "gologger_test.go") So(matches[0][2], ShouldEqual, entry.T) So(matches[0][3], ShouldEqual, fmt.Sprintf("Test logging %s", entry.L)) }) } }) Convey(`With Fields installed in the Context`, func() { c = logging.SetFields(c, logging.Fields{ logging.ErrorKey: "An error!", "reason": "test", }) Convey(`Should log Fields.`, func() { logging.Infof(c, "Here is a %s", "log") matches := lre.FindAllStringSubmatch(normalizeLog(buf.String()), -1) So(len(matches), ShouldEqual, 1) So(len(matches[0]), ShouldEqual, 4) So(matches[0][1], ShouldEqual, "gologger_test.go") So(matches[0][2], ShouldEqual, "INFO") So(matches[0][3], ShouldEqual, `Here is a log {"error":"An error!", "reason":"test"}`) }) Convey(`Should log fields installed immediately`, func() { logging.Fields{ "foo": "bar", "reason": "override", }.Infof(c, "Here is another %s", "log") matches := lre.FindAllStringSubmatch(normalizeLog(buf.String()), -1) So(len(matches), ShouldEqual, 1) So(len(matches[0]), ShouldEqual, 4) So(matches[0][1], ShouldEqual, "gologger_test.go") So(matches[0][2], ShouldEqual, "INFO") So(matches[0][3], ShouldEqual, `Here is another log {"error":"An error!", "foo":"bar", "reason":"override"}`) }) }) Convey(`Will not log to Debug, as it's below level.`, func() { logging.Debugf(c, "Hello!") So(buf.Len(), ShouldEqual, 0) }) }) }