func initializeMetrics(batchIntervalMilliseconds uint) *metricbatcher.MetricBatcher { eventEmitter := dropsonde.AutowiredEmitter() metricSender := metric_sender.NewMetricSender(eventEmitter) metricBatcher := metricbatcher.New(metricSender, time.Duration(batchIntervalMilliseconds)*time.Millisecond) metrics.Initialize(metricSender, metricBatcher) return metricBatcher }
func initialize() { sender := metric_sender.NewMetricSender(AutowiredEmitter()) batcher := metricbatcher.New(sender, defaultBatchInterval) metrics.Initialize(sender, batcher) logs.Initialize(log_sender.NewLogSender(AutowiredEmitter(), statsInterval, gosteno.NewLogger("dropsonde/logs"))) go runtime_stats.NewRuntimeStats(autowiredEmitter, statsInterval).Run(nil) http.DefaultTransport = InstrumentedRoundTripper(http.DefaultTransport) }
func initializeMetrics(messageTagger *tagger.Tagger, config *config.Config, logger *gosteno.Logger) { metricsAggregator := messageaggregator.New(messageTagger, logger) eventWriter := eventwriter.New("MetronAgent", metricsAggregator) metricSender := metric_sender.NewMetricSender(eventWriter) metricBatcher := metricbatcher.New(metricSender, time.Duration(config.MetricBatchIntervalSeconds)*time.Second) metrics.Initialize(metricSender, metricBatcher) }
func initializeMetrics(byteSigner *signer.Signer, config *config.Config, logger *gosteno.Logger) { metricsMarshaller := eventmarshaller.New(byteSigner, logger) metricsTagger := tagger.New(config.Deployment, config.Job, config.Index, metricsMarshaller) metricsAggregator := messageaggregator.New(metricsTagger, logger) eventWriter := eventwriter.New("MetronAgent", metricsAggregator) metricSender := metric_sender.NewMetricSender(eventWriter) metricBatcher := metricbatcher.New(metricSender, time.Duration(config.MetricBatchIntervalSeconds)*time.Second) metrics.Initialize(metricSender, metricBatcher) }
func initializeMetrics(config *config.Config, stopChan chan struct{}, logger *gosteno.Logger) (*metricbatcher.MetricBatcher, *eventwriter.EventWriter) { eventWriter := eventwriter.New(origin) metricSender := metric_sender.NewMetricSender(eventWriter) metricBatcher := metricbatcher.New(metricSender, time.Duration(config.MetricBatchIntervalMilliseconds)*time.Millisecond) metrics.Initialize(metricSender, metricBatcher) stats := runtime_stats.NewRuntimeStats(eventWriter, time.Duration(config.RuntimeStatsIntervalMilliseconds)*time.Millisecond) go stats.Run(stopChan) return metricBatcher, eventWriter }
func initializeMetrics(messageTagger *tagger.Tagger, config *config.Config, stopChan chan struct{}, logger *gosteno.Logger) { metricsAggregator := messageaggregator.New(messageTagger, logger) eventWriter := eventwriter.New("MetronAgent", metricsAggregator) metricSender := metric_sender.NewMetricSender(eventWriter) metricBatcher := metricbatcher.New(metricSender, time.Duration(config.MetricBatchIntervalMilliseconds)*time.Millisecond) metrics.Initialize(metricSender, metricBatcher) stats := runtime_stats.NewRuntimeStats(eventWriter, time.Duration(config.RuntimeStatsIntervalMilliseconds)*time.Millisecond) go stats.Run(stopChan) }
func initializeMetrics(origin, destination string) (*metricbatcher.MetricBatcher, error) { err := setupDefaultEmitter(origin, destination) if err != nil { // Legacy holdover. We would prefer to panic, rather than just throwing our metrics // away and pretending we're running fine, but for now, we just don't want to break // anything. dropsonde.DefaultEmitter = &dropsonde.NullEventEmitter{} } // Copied from dropsonde.initialize(), since we stopped using dropsonde.Initialize // but needed it to continue operating the same. sender := metric_sender.NewMetricSender(dropsonde.DefaultEmitter) batcher := metricbatcher.New(sender, defaultBatchInterval) metrics.Initialize(sender, batcher) logs.Initialize(log_sender.NewLogSender(dropsonde.DefaultEmitter, gosteno.NewLogger("dropsonde/logs"))) envelopes.Initialize(envelope_sender.NewEnvelopeSender(dropsonde.DefaultEmitter)) go runtime_stats.NewRuntimeStats(dropsonde.DefaultEmitter, statsInterval).Run(nil) http.DefaultTransport = dropsonde.InstrumentedRoundTripper(http.DefaultTransport) return batcher, err }
fakeMetricSender *fake_metric_sender.FakeMetricSender logger lager.Logger fakeDiegoClient *fake_bbs.FakeClient fakeCcClient *fakes.FakeCcClient fakeBackend *fake_backend.FakeBackend responseRecorder *httptest.ResponseRecorder handler handlers.StagingHandler ) BeforeEach(func() { logger = lagertest.NewTestLogger("test") fakeMetricSender = fake_metric_sender.NewFakeMetricSender() metrics.Initialize(fakeMetricSender, nil) fakeCcClient = &fakes.FakeCcClient{} fakeBackend = &fake_backend.FakeBackend{} fakeBackend.BuildRecipeReturns(&models.TaskDefinition{}, "", "", nil) fakeDiegoClient = &fake_bbs.FakeClient{} responseRecorder = httptest.NewRecorder() handler = handlers.NewStagingHandler(logger, map[string]backend.Backend{"fake-backend": fakeBackend}, fakeCcClient, fakeDiegoClient) }) Describe("Stage", func() { var ( stagingRequestJson []byte
. "github.com/onsi/gomega" ) var _ = Describe("MetricsReporter", func() { var metricsReporter *metrics.MetricsReporter var req *http.Request var endpoint *route.Endpoint var sender *fake.FakeMetricSender BeforeEach(func() { metricsReporter = metrics.NewMetricsReporter() req, _ = http.NewRequest("GET", "https://example.com", nil) endpoint = route.NewEndpoint("someId", "host", 2222, "privateId", "2", map[string]string{}, 30, "", models.ModificationTag{}) sender = fake.NewFakeMetricSender() batcher := metricbatcher.New(sender, time.Millisecond) dropsondeMetrics.Initialize(sender, batcher) }) It("increments the bad_requests metric", func() { metricsReporter.CaptureBadRequest(req) Eventually(func() uint64 { return sender.GetCounter("rejected_requests") }).Should(BeEquivalentTo(1)) metricsReporter.CaptureBadRequest(req) Eventually(func() uint64 { return sender.GetCounter("rejected_requests") }).Should(BeEquivalentTo(2)) }) It("increments the bad_gateway metric", func() { metricsReporter.CaptureBadGateway(req) Eventually(func() uint64 { return sender.GetCounter("bad_gateways") }).Should(BeEquivalentTo(1)) metricsReporter.CaptureBadGateway(req)
package dropsonde_unmarshaller_test import ( "time" "github.com/cloudfoundry/dropsonde/emitter/fake" "github.com/cloudfoundry/dropsonde/metric_sender" "github.com/cloudfoundry/dropsonde/metricbatcher" "github.com/cloudfoundry/dropsonde/metrics" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "testing" ) func TestUnmarshaller(t *testing.T) { RegisterFailHandler(Fail) RunSpecs(t, "Dropsonde Unmarshaller Suite") } var fakeEventEmitter = fake.NewFakeEventEmitter("doppler") var metricBatcher *metricbatcher.MetricBatcher var _ = BeforeSuite(func() { sender := metric_sender.NewMetricSender(fakeEventEmitter) metricBatcher = metricbatcher.New(sender, 100*time.Millisecond) metrics.Initialize(sender, metricBatcher) })
. "github.com/onsi/ginkgo" . "github.com/onsi/gomega" ) var _ = Describe("Deadlock", func() { var ( metricBatcher *metricbatcher.MetricBatcher done chan struct{} ) BeforeEach(func() { done = make(chan struct{}) metricSender := NewFakeMetricSender(&done) metricBatcher = metricbatcher.New(metricSender, 50*time.Millisecond) metrics.Initialize(metricSender, metricBatcher) }) It("doesn't deadlock when Batch functions are called while batch sending", func() { metricBatcher.BatchAddCounter("count1", 2) Eventually(done).Should(BeClosed()) }, 1) }) type FakeMetricSender struct { done *chan struct{} } func NewFakeMetricSender(done *chan struct{}) *FakeMetricSender { return &FakeMetricSender{ done: done,
var _ = Describe("Batch Writer", func() { var ( byteWriter *mockByteWriter messageBytes []byte prefixedMessage []byte batcher *batch.Writer timeout time.Duration logger *gosteno.Logger sender *fake.FakeMetricSender constructorErr error ) BeforeEach(func() { sender = fake.NewFakeMetricSender() metrics.Initialize(sender, metricbatcher.New(sender, time.Millisecond*10)) byteWriter = newMockByteWriter() close(byteWriter.WriteOutput.err) messageBytes = []byte("this is a log message") timeout = time.Second / 2 bufferSize = 1024 logger = loggertesthelper.Logger() // zero out the values that are assigned in the JustBeforeEach prefixedMessage = nil batcher = nil constructorErr = nil }) JustBeforeEach(func() { prefixedMessage = prefixWithLength(messageBytes)
"github.com/cloudfoundry/dropsonde/metrics" "github.com/cloudfoundry/sonde-go/events" "github.com/gogo/protobuf/proto" "github.com/gorilla/websocket" ) func TestWebsocketServer(t *testing.T) { RegisterFailHandler(Fail) RunSpecs(t, "WebsocketServer Suite") } var fakeMetricSender *fake.FakeMetricSender var _ = BeforeSuite(func() { fakeMetricSender = fake.NewFakeMetricSender() metrics.Initialize(fakeMetricSender, metricbatcher.New(fakeMetricSender, 10*time.Millisecond)) }) func AddSlowWSSink(receivedChan chan []byte, errChan chan error, timeout time.Duration, url string) { ws, _, err := websocket.DefaultDialer.Dial(url, http.Header{}) if err != nil { panic(err) } go func() { time.Sleep(timeout) _, reader, err := ws.NextReader() if err != nil { errChan <- err return }
. "github.com/onsi/gomega" ) const TTL = 0 var _ = Describe("Convergence of Tasks", func() { var ( sender *fake.FakeMetricSender kickTasksDurationInSeconds, expirePendingTaskDurationInSeconds uint64 kickTasksDuration, expirePendingTaskDuration, expireCompletedTaskDuration time.Duration ) BeforeEach(func() { sender = fake.NewFakeMetricSender() metrics.Initialize(sender, nil) kickTasksDurationInSeconds = 10 kickTasksDuration = time.Duration(kickTasksDurationInSeconds) * time.Second expirePendingTaskDurationInSeconds = 30 expirePendingTaskDuration = time.Duration(expirePendingTaskDurationInSeconds) * time.Second expireCompletedTaskDuration = time.Hour }) Describe("ConvergeTasks", func() { const ( taskGuid = "some-guid" taskGuid2 = "some-other-guid" domain = "some-domain" cellId = "cell-id" )
var _ = Describe("UDPWrapper", func() { var ( client *mockClient envelope *events.Envelope udpWrapper *dopplerforwarder.UDPWrapper logger *gosteno.Logger message []byte sharedSecret []byte mockBatcher *mockMetricBatcher ) BeforeEach(func() { sharedSecret = []byte("secret") mockBatcher = newMockMetricBatcher() metrics.Initialize(nil, mockBatcher) client = newMockClient() envelope = &events.Envelope{ Origin: proto.String("fake-origin-1"), EventType: events.Envelope_LogMessage.Enum(), LogMessage: factories.NewLogMessage(events.LogMessage_OUT, "message", "appid", "sourceType"), } logger = loggertesthelper.Logger() udpWrapper = dopplerforwarder.NewUDPWrapper(sharedSecret, logger) var err error message, err = proto.Marshal(envelope) Expect(err).NotTo(HaveOccurred()) })
func init() { sender = metrics_fakes.NewFakeMetricSender() metrics.Initialize(sender, nil) }
"github.com/cloudfoundry-incubator/bbs/models" "github.com/cloudfoundry-incubator/rep" "github.com/cloudfoundry/dropsonde/metric_sender/fake" "github.com/cloudfoundry/dropsonde/metrics" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" ) var _ = Describe("Auction Metric Emitter Delegate", func() { var delegate auctiontypes.AuctionMetricEmitterDelegate var metricSender *fake.FakeMetricSender BeforeEach(func() { metricSender = fake.NewFakeMetricSender() metrics.Initialize(metricSender, nil) delegate = auctionmetricemitterdelegate.New() }) Describe("AuctionCompleted", func() { It("should adjust the metric counters", func() { resource := rep.NewResource(10, 10, "linux") delegate.AuctionCompleted(auctiontypes.AuctionResults{ SuccessfulLRPs: []auctiontypes.LRPAuction{ { LRP: rep.NewLRP(models.NewActualLRPKey("successful-start", 0, "domain"), resource), }, }, SuccessfulTasks: []auctiontypes.TaskAuction{ {
"github.com/cloudfoundry/loggregatorlib/loggertesthelper" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" ) var _ = Describe("LogSender", func() { var ( emitter *fake.FakeEventEmitter sender log_sender.LogSender ) BeforeEach(func() { emitter = fake.NewFakeEventEmitter("origin") metricSender := metric_sender.NewMetricSender(emitter) batcher := metricbatcher.New(metricSender, time.Millisecond) metrics.Initialize(metricSender, batcher) sender = log_sender.NewLogSender(emitter, loggertesthelper.Logger()) }) AfterEach(func() { emitter.Close() for !emitter.IsClosed() { time.Sleep(10 * time.Millisecond) } }) Describe("SendAppLog", func() { It("sends a log message event to its emitter", func() { err := sender.SendAppLog("app-id", "custom-log-message", "App", "0") Expect(err).NotTo(HaveOccurred())
var ( inputChan chan *events.Envelope outputChan chan []byte runComplete chan struct{} marshaller dropsonde_marshaller.DropsondeMarshaller fakeSender *fake.FakeMetricSender ) BeforeEach(func() { inputChan = make(chan *events.Envelope, 100) outputChan = make(chan []byte, 10) runComplete = make(chan struct{}) marshaller = dropsonde_marshaller.NewDropsondeMarshaller(loggertesthelper.Logger()) fakeSender = fake.NewFakeMetricSender() batcher := metricbatcher.New(fakeSender, 200*time.Millisecond) metrics.Initialize(fakeSender, batcher) go func() { marshaller.Run(inputChan, outputChan) close(runComplete) }() }) AfterEach(func() { close(inputChan) Eventually(runComplete).Should(BeClosed()) }) It("marshals envelopes into bytes", func() { envelope := &events.Envelope{ Origin: proto.String("fake-origin-1"),
import ( "github.com/cloudfoundry/dropsonde/metric_sender/fake" "github.com/cloudfoundry/dropsonde/metricbatcher" "github.com/cloudfoundry/dropsonde/metrics" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "time" ) var _ = Describe("Metrics", func() { var fakeMetricSender *fake.FakeMetricSender BeforeEach(func() { fakeMetricSender = fake.NewFakeMetricSender() metricBatcher := metricbatcher.New(fakeMetricSender, time.Millisecond) metrics.Initialize(fakeMetricSender, metricBatcher) }) It("delegates SendValue", func() { metrics.SendValue("metric", 42.42, "answers") Expect(fakeMetricSender.GetValue("metric").Value).To(Equal(42.42)) Expect(fakeMetricSender.GetValue("metric").Unit).To(Equal("answers")) }) It("delegates IncrementCounter", func() { metrics.IncrementCounter("count") Expect(fakeMetricSender.GetCounter("count")).To(BeEquivalentTo(1)) metrics.IncrementCounter("count")
var ( fakeEventEmitter *fake.FakeEventEmitter uptimeMonitor monitor.Monitor ) const ( interval = 100 * time.Millisecond ) var _ = Describe("UptimeMonitor", func() { BeforeEach(func() { fakeEventEmitter = fake.NewFakeEventEmitter("MonitorTest") sender := metric_sender.NewMetricSender(fakeEventEmitter) batcher := metricbatcher.New(sender, 100*time.Millisecond) metrics.Initialize(sender, batcher) uptimeMonitor = monitor.NewUptimeMonitor(interval) go uptimeMonitor.Start() }) AfterEach(func() { fakeEventEmitter.Close() }) Context("stops automatically", func() { AfterEach(func() { uptimeMonitor.Stop() })
} else if eventType == events.Envelope_CounterEvent { f.truncateChan <- struct{}{} } return true } var _ = Describe("Truncating Buffer", func() { var inMessageChan chan *events.Envelope var stopChannel chan struct{} var bufferSize uint var buffer *truncatingbuffer.TruncatingBuffer var context truncatingbuffer.BufferContext BeforeEach(func() { metrics.Initialize(nil, nil) inMessageChan = make(chan *events.Envelope) stopChannel = make(chan struct{}) context = &FakeContext{} bufferSize = 3 }) JustBeforeEach(func() { buffer = truncatingbuffer.NewTruncatingBuffer(inMessageChan, bufferSize, context, loggertesthelper.Logger(), stopChannel) }) AfterEach(func() { if inMessageChan != nil { close(inMessageChan) } })
"net" "net/http" "strings" "sync" "time" ) // these tests need to be invoked individually from an external script, // since environment variables need to be set/unset before starting the tests var _ = Describe("Autowire End-to-End", func() { Context("with standard initialization", func() { origin := []string{"test-origin"} BeforeEach(func() { dropsonde.Initialize("localhost:3457", origin...) metrics.Initialize(metric_sender.NewMetricSender(dropsonde.AutowiredEmitter())) }) It("emits HTTP client/server events and heartbeats", func() { udpListener, err := net.ListenPacket("udp4", ":3457") Expect(err).ToNot(HaveOccurred()) defer udpListener.Close() udpDataChan := make(chan []byte, 16) receivedEvents := make(map[string]bool) heartbeatUuidsChan := make(chan string, 1000) lock := sync.RWMutex{} heartbeatRequest := newHeartbeatRequest() marshalledHeartbeatRequest, _ := proto.Marshal(heartbeatRequest)
sender *fake.FakeMetricSender etcdOptions etcd.ETCDOptions reportInterval time.Duration fakeClock *fakeclock.FakeClock pmn ifrit.Process ) BeforeEach(func() { reportInterval = 100 * time.Millisecond fakeClock = fakeclock.NewFakeClock(time.Unix(123, 456)) sender = fake.NewFakeMetricSender() dropsonde_metrics.Initialize(sender, nil) }) JustBeforeEach(func() { pmn = ifrit.Invoke(metrics.NewPeriodicMetronNotifier( lagertest.NewTestLogger("test"), reportInterval, &etcdOptions, fakeClock, )) }) AfterEach(func() { pmn.Signal(os.Interrupt) Eventually(pmn.Wait(), 2*time.Second).Should(Receive()) })
var fakeMetricSender *fake.FakeMetricSender BeforeEach(func() { port = 3456 + GinkgoParallelNode() address = net.JoinHostPort("127.0.0.1", strconv.Itoa(port)) writer = mocks.MockByteArrayWriter{} reader = networkreader.New(address, "networkReader", &writer, loggertesthelper.Logger()) readerStopped = make(chan struct{}) }) Context("with a reader running", func() { BeforeEach(func() { loggertesthelper.TestLoggerSink.Clear() fakeMetricSender = fake.NewFakeMetricSender() metricBatcher := metricbatcher.New(fakeMetricSender, time.Millisecond) metrics.Initialize(fakeMetricSender, metricBatcher) go func() { reader.Start() close(readerStopped) }() expectedLog := fmt.Sprintf("Listening on port %s", address) Eventually(loggertesthelper.TestLoggerSink.LogContents).Should(ContainSubstring(expectedLog)) }) AfterEach(func() { reader.Stop() <-readerStopped })
Expect(mockWriter.Events[2].GetOrigin()).To(Equal("fake-origin-4")) expectCorrectCounterNameDeltaAndTotal(mockWriter.Events[2], "counter1", 4, 8) }) }) Context("metrics", func() { var ( fakeSender *fake.FakeMetricSender mockBatcher *mockMetricBatcher ) BeforeEach(func() { fakeSender = fake.NewFakeMetricSender() mockBatcher = newMockMetricBatcher() metrics.Initialize(fakeSender, mockBatcher) }) It("emits a counter for counter events", func() { messageAggregator.Write(createCounterMessage("counter1", "fake-origin-1", nil)) Eventually(mockBatcher.BatchIncrementCounterInput).Should(BeCalled( With("MessageAggregator.counterEventReceived"), )) // since we're counting counters, let's make sure we're not adding their deltas messageAggregator.Write(createCounterMessage("counter1", "fake-origin-1", nil)) Eventually(mockBatcher.BatchIncrementCounterInput).Should(BeCalled( With("MessageAggregator.counterEventReceived"), )) }) })