Beispiel #1
0
func CsvPipeline(levelDbManager, csvManager store.Manager) transformer.Pipeline {
	var experiment, node, filename string
	var receivedTimestamp, creationTimestamp, size int64
	csvStore := csvManager.Writer("stats.csv", []string{"experiment", "node", "filename"}, []string{"received_timestamp", "creation_timestamp", "size"}, &experiment, &node, &filename, &receivedTimestamp, &creationTimestamp, &size)
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:   "WriteStatsCsv",
			Reader: levelDbManager.Reader("stats"),
			Writer: csvStore,
		},
	}
}
Beispiel #2
0
func SummarizeHealthPipeline(levelDbManager, csvManager store.Manager) transformer.Pipeline {
	memoryStore := levelDbManager.Reader("memory")
	memoryUsageByDayStore := levelDbManager.ReadingWriter("memory-usage-by-day")
	memoryUsageByDaySummarizedStore := levelDbManager.ReadingWriter("memory-usage-by-day-summarized")
	filesystemStore := levelDbManager.Reader("filesystem")
	filesystemUsageByDayStore := levelDbManager.ReadingWriter("filesystem-usage-by-day")
	filesystemUsageByDaySummarizedStore := levelDbManager.ReadingWriter("filesystem-usage-by-day-summarized")

	var timestamp, usage int64
	var filesystem, node string
	memoryUsageSummaryCsv := csvManager.Writer("memory-usage-summary.csv", []string{"timestamp", "node"}, []string{"usage"}, &timestamp, &node, &usage)
	filesystemUsageSummaryCsv := csvManager.Writer("filesystem-usage-summary.csv", []string{"filesystem", "timestamp", "node"}, []string{"usage"}, &filesystem, &timestamp, &node, &usage)

	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "OrderMemoryUsageByTimestamp",
			Reader:      memoryStore,
			Transformer: transformer.MakeMapFunc(orderRecordsByDay),
			Writer:      memoryUsageByDayStore,
		},
		transformer.PipelineStage{
			Name:        "SummarizeMemoryUsage",
			Reader:      memoryUsageByDayStore,
			Transformer: transformer.TransformFunc(summarizeMemoryUsage),
			Writer:      memoryUsageByDaySummarizedStore,
		},
		transformer.PipelineStage{
			Name:   "WriteMemoryUsageSummaryCsv",
			Reader: memoryUsageByDaySummarizedStore,
			Writer: memoryUsageSummaryCsv,
		},
		transformer.PipelineStage{
			Name:        "OrderFilesystemUsageByTimestamp",
			Reader:      filesystemStore,
			Transformer: transformer.MakeMapFunc(orderFilesystemRecordsByDay),
			Writer:      filesystemUsageByDayStore,
		},
		transformer.PipelineStage{
			Name:        "SummarizeFilesystemUsage",
			Reader:      filesystemUsageByDayStore,
			Transformer: transformer.TransformFunc(summarizeFilesystemUsage),
			Writer:      filesystemUsageByDaySummarizedStore,
		},
		transformer.PipelineStage{
			Name:   "WriteFilesystemUsageSummaryCsv",
			Reader: filesystemUsageByDaySummarizedStore,
			Writer: filesystemUsageSummaryCsv,
		},
	}
}
func FilterSessionsPipeline(sessionStartTime, sessionEndTime int64, levelDbManager store.Manager, outputName string) transformer.Pipeline {
	tracesStore := levelDbManager.Reader("traces")
	traceKeyRangesStore := levelDbManager.Reader("availability-done")
	filteredStore := levelDbManager.Writer(outputName)
	parameters := filterSessions{
		SessionStartTime: sessionStartTime * 1000000,
		SessionEndTime:   sessionEndTime * 1000000,
	}
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "FilterSessions",
			Reader:      store.NewDemuxingReader(traceKeyRangesStore, tracesStore),
			Transformer: parameters,
			Writer:      filteredStore,
		},
	}
}
Beispiel #4
0
func TimesCsvPipeline(levelDbManager store.Manager, csvRoot string) transformer.Pipeline {
	writeTimesCsv := func(inputChan, outputChan chan *store.Record) {
		var currentHandle *os.File
		var currentExperiment, currentNode string
		for record := range inputChan {
			var statsKey StatsKey
			lex.DecodeOrDie(record.Key, &statsKey)
			var statsValue StatsValue
			lex.DecodeOrDie(record.Value, &statsValue)

			if currentExperiment != statsKey.Experiment || currentNode != statsKey.Node {
				if currentHandle != nil {
					currentHandle.Close()
				}
				currentExperiment = statsKey.Experiment
				currentNode = statsKey.Node

				csvName := fmt.Sprintf("%s_%s.csv", currentExperiment, currentNode)
				newHandle, err := os.Create(filepath.Join(csvRoot, csvName))
				if err != nil {
					panic(err)
				}
				currentHandle = newHandle
			}

			if _, err := fmt.Fprintf(currentHandle, "%d,%d\n", statsValue.CreationTimestamp, statsValue.ReceivedTimestamp); err != nil {
				panic(err)
			}
		}
		if currentHandle != nil {
			currentHandle.Close()
		}
	}

	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "WriteTimesCsv",
			Reader:      levelDbManager.Reader("stats"),
			Transformer: transformer.TransformFunc(writeTimesCsv),
		},
	}
}
func runFilterSessionsPipeline(startSecs, endSecs int64, levelDbManager store.Manager) {

	transformer.RunPipeline(FilterSessionsPipeline(startSecs, endSecs, levelDbManager, "test"))

	filteredStore := levelDbManager.Reader("test")
	filteredStore.BeginReading()
	for {
		record, err := filteredStore.ReadRecord()
		if err != nil {
			panic(err)
		}
		if record == nil {
			break
		}
		var traceKey TraceKey
		lex.DecodeOrDie(record.Key, &traceKey)
		fmt.Printf("%s %d %d\n", traceKey.NodeId, traceKey.SessionId, traceKey.SequenceNumber)
	}
	filteredStore.EndReading()
}
Beispiel #6
0
func SummarizePipeline(levelDbManager store.Manager, csvManager store.Manager) transformer.Pipeline {
	statsStore := levelDbManager.Reader("stats")
	statsWithHourStore := levelDbManager.ReadingDeleter("stats-with-hour")
	statsWithDayStore := levelDbManager.ReadingDeleter("stats-with-day")
	statsWithReceivedTimestampStore := levelDbManager.ReadingDeleter("stats-with-received-timestamp")
	interarrivalTimesStore := levelDbManager.ReadingDeleter("interarrival-times")
	sizeSummaryStore := levelDbManager.ReadingWriter("size-summary")
	sizeSummaryByHourStore := levelDbManager.ReadingWriter("size-summary-by-hour")
	sizeSummaryByDayStore := levelDbManager.ReadingWriter("size-summary-by-day")
	interarrivalTimesSummaryStore := levelDbManager.ReadingWriter("interarrival-times-summary")
	sizePerDayStore := levelDbManager.ReadingWriter("sizes-by-day")

	sizeSummaryWriter := makeSummaryCsvWriter(csvManager, "size-summary.csv")
	sizeSummaryByHourWriter := makeSummaryByTimestampCsvWriter(csvManager, "size-summary-by-hour.csv")
	sizeSummaryByDayWriter := makeSummaryByTimestampCsvWriter(csvManager, "size-summary-by-day.csv")
	interarrivalTimesSummaryWriter := makeSummaryCsvWriter(csvManager, "interarrival-times-summary.csv")
	sizesPerDayWriter := csvManager.Writer("sizes-per-day.csv", []string{"experiment", "node", "timestamp"}, []string{"count"}, new(string), new(string), new(int64), new(int64))

	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "SummarizeSizes",
			Reader:      statsStore,
			Transformer: transformer.TransformFunc(summarizeSizes),
			Writer:      sizeSummaryStore,
		},
		transformer.PipelineStage{
			Name:        "RekeyStatsByHour",
			Reader:      statsStore,
			Transformer: transformer.MakeMapFunc(rekeyStatsByHour),
			Writer:      store.NewTruncatingWriter(statsWithHourStore),
		},
		transformer.PipelineStage{
			Name:        "SummarizeSizesByHour",
			Reader:      statsWithHourStore,
			Transformer: transformer.TransformFunc(summarizeSizesByTimestamp),
			Writer:      sizeSummaryByHourStore,
		},
		transformer.PipelineStage{
			Name:        "RekeyStatsByDay",
			Reader:      statsStore,
			Transformer: transformer.MakeMapFunc(rekeyStatsByDay),
			Writer:      store.NewTruncatingWriter(statsWithDayStore),
		},
		transformer.PipelineStage{
			Name:        "SummarizeSizesByDay",
			Reader:      statsWithDayStore,
			Transformer: transformer.TransformFunc(summarizeSizesByTimestamp),
			Writer:      sizeSummaryByDayStore,
		},
		transformer.PipelineStage{
			Name:        "RekeyStatsByReceivedTimestamp",
			Reader:      statsStore,
			Transformer: transformer.MakeMapFunc(rekeyStatsByReceviedTimestamp),
			Writer:      store.NewTruncatingWriter(statsWithReceivedTimestampStore),
		},
		transformer.PipelineStage{
			Name:        "ComputeInterarrivalTimes",
			Reader:      statsWithReceivedTimestampStore,
			Transformer: transformer.TransformFunc(computeInterarrivalTimes),
			Writer:      store.NewTruncatingWriter(interarrivalTimesStore),
		},
		transformer.PipelineStage{
			Name:        "SummarizeInterarrival",
			Reader:      interarrivalTimesStore,
			Transformer: transformer.TransformFunc(summarizeInterarrivalTimes),
			Writer:      interarrivalTimesSummaryStore,
		},
		transformer.PipelineStage{
			Name:        "SummarizeSizesPerDay",
			Reader:      statsStore,
			Transformer: transformer.TransformFunc(summarizeSizesPerDay),
			Writer:      sizePerDayStore,
		},
		transformer.PipelineStage{
			Name:        "AggregateExperimentsPerDay",
			Reader:      sizePerDayStore,
			Transformer: transformer.TransformFunc(aggregateSizesPerDay),
			Writer:      sizePerDayStore,
		},
		transformer.PipelineStage{
			Name:   "WriteSizesSummary",
			Reader: sizeSummaryStore,
			Writer: sizeSummaryWriter,
		},
		transformer.PipelineStage{
			Name:   "WriteSizesSummaryByHour",
			Reader: sizeSummaryByHourStore,
			Writer: sizeSummaryByHourWriter,
		},
		transformer.PipelineStage{
			Name:   "WriteSizesSummaryByDay",
			Reader: sizeSummaryByDayStore,
			Writer: sizeSummaryByDayWriter,
		},
		transformer.PipelineStage{
			Name:   "WriteInterarrivalTimesSummary",
			Reader: interarrivalTimesSummaryStore,
			Writer: interarrivalTimesSummaryWriter,
		},
		transformer.PipelineStage{
			Name:   "WriteSizePerDaySummary",
			Reader: sizePerDayStore,
			Writer: sizesPerDayWriter,
		},
	}
}