func BuildPipelines(store data.Nodeinfostore, receiver announced.AnnouncedPacketReceiver, pipeEnd func(response data.ParsedResponse)) ([]io.Closer, error) { closeables := make([]io.Closer, 0, 2) receivePipeline := pipeline.NewReceivePipeline(&pipeline.JsonParsePipe{}, &pipeline.DeflatePipe{}) processPipe := pipeline.NewProcessPipeline(getProcessPipes(store)...) closeables = append(closeables, receivePipeline, processPipe) log.Printf("Adding process pipe end") go func() { processPipe.Dequeue(pipeEnd) }() log.Printf("Connecting requester to receive pipeline") go func() { receiver.Receive(func(response announced.Response) { receivePipeline.Enqueue(response) }) }() log.Printf("Connecting receive to process pipeline") //Connect the receive to the process pipeline go func() { receivePipeline.Dequeue(func(response data.ParsedResponse) { processPipe.Enqueue(response) }) }() return closeables, nil }
func TestPrometheusClientCounter(t *testing.T) { assert := assert.New(t) assert.True(true) var expectedClientCounts = []float64{13, 11, 15} finishChan := make(chan bool) store := data.NewSimpleInMemoryStore() processPipeline := pipeline.NewProcessPipeline(&prometheus.ClientCountPipe{Store: store}, &collectors.StatisticsCollector{Store: store}) prometheus.TotalClientCounter.Set(10.0) var packetCount int = 0 go processPipeline.Dequeue(func(response data.ParsedResponse) { value := collectGaugeValue(prometheus.TotalClientCounter) assert.Equal(expectedClientCounts[packetCount], value) packetCount = packetCount + 1 if packetCount == len(expectedClientCounts) { finishChan <- true close(finishChan) } }) feedClientsStat(processPipeline, 3) // Give the collector pipe a little time to execute its go routin // in production it is very very unrealistic that we will have two Statistics // Responses from the same node in the channel at the same time. time.Sleep(time.Millisecond * 50) feedClientsStat(processPipeline, 1) time.Sleep(time.Millisecond * 50) feedClientsStat(processPipeline, 5) for range finishChan { processPipeline.Close() } }