Esempio n. 1
0
func PackagesPipeline(levelDbManager, csvManager, sqliteManager store.Manager) transformer.Pipeline {
	logsStore := levelDbManager.Seeker("logs")
	installedPackagesStore := levelDbManager.ReadingWriter("installed-packages")
	versionChangesStore := levelDbManager.ReadingWriter("version-changes")
	var node, packageName string
	var timestamp int64
	var version string
	csvStore := csvManager.Writer("packages.csv", []string{"node", "package", "timestamp"}, []string{"version"}, &node, &packageName, &timestamp, &version)
	sqliteStore := sqliteManager.Writer("packages", []string{"node", "package", "timestamp"}, []string{"version"}, &node, &packageName, &timestamp, &version)
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "OpkgListInstalled",
			Reader:      ReadOnlySomeLogs(logsStore, "opkg_list-installed"),
			Transformer: transformer.MakeDoFunc(extractInstalledPackages),
			Writer:      installedPackagesStore,
		},
		transformer.PipelineStage{
			Name:        "DetectVersionChanges",
			Reader:      installedPackagesStore,
			Transformer: transformer.TransformFunc(detectChangedPackageVersions),
			Writer:      versionChangesStore,
		},
		transformer.PipelineStage{
			Name:   "WriteVersionChangesSqlite",
			Reader: versionChangesStore,
			Writer: sqliteStore,
		},
		transformer.PipelineStage{
			Name:   "WriteVersionChangesCsv",
			Reader: versionChangesStore,
			Writer: csvStore,
		},
	}
}
Esempio n. 2
0
func IndexTarballsPipeline(tarballsPath string, levelDbManager store.Manager) transformer.Pipeline {
	allTarballsPattern := filepath.Join(tarballsPath, "all", "health", "*", "*", "health_*.tar.gz")
	dailyTarballsPattern := filepath.Join(tarballsPath, "by-date", "*", "health", "*", "health_*.tar.gz")
	tarnamesStore := levelDbManager.ReadingWriter("tarnames")
	tarnamesIndexedStore := levelDbManager.ReadingWriter("tarnames-indexed")
	logsStore := levelDbManager.Writer("logs")
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:   "ScanLogTarballs",
			Reader: store.NewGlobReader(allTarballsPattern),
			Writer: tarnamesStore,
		},
		transformer.PipelineStage{
			Name:   "ScanDailyLogTarballs",
			Reader: store.NewGlobReader(dailyTarballsPattern),
			Writer: tarnamesStore,
		},
		transformer.PipelineStage{
			Name:        "ReadLogTarballs",
			Reader:      store.NewDemuxingReader(tarnamesStore, tarnamesIndexedStore),
			Transformer: transformer.MakeMultipleOutputsGroupDoFunc(IndexTarballs, 2),
			Writer:      store.NewMuxingWriter(logsStore, tarnamesIndexedStore),
		},
	}
}
Esempio n. 3
0
func UptimePipeline(levelDbManager, csvManager, sqliteManager store.Manager) transformer.Pipeline {
	logsStore := levelDbManager.Seeker("logs")
	uptimeStore := levelDbManager.ReadingWriter("uptime")
	var node string
	var timestamp, uptime int64
	csvStore := csvManager.Writer("uptime.csv", []string{"node", "timestamp"}, []string{"uptime"}, &node, &timestamp, &uptime)
	sqliteStore := sqliteManager.Writer("uptime", []string{"node", "timestamp"}, []string{"uptime"}, &node, &timestamp, &uptime)
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "Uptime",
			Reader:      ReadOnlySomeLogs(logsStore, "uptime"),
			Transformer: transformer.MakeMapFunc(extractUptime),
			Writer:      uptimeStore,
		},
		transformer.PipelineStage{
			Name:   "WriteUptimeCsv",
			Reader: uptimeStore,
			Writer: csvStore,
		},
		transformer.PipelineStage{
			Name:   "WriteUptimeSqlite",
			Reader: uptimeStore,
			Writer: sqliteStore,
		},
	}
}
Esempio n. 4
0
func MemoryUsagePipeline(levelDbManager, csvManager, sqliteManager store.Manager) transformer.Pipeline {
	logsStore := levelDbManager.Seeker("logs")
	memoryUsageStore := levelDbManager.ReadingWriter("memory")
	var node string
	var timestamp, used, free int64
	csvStore := csvManager.Writer("memory.csv", []string{"node", "timestamp"}, []string{"used", "free"}, &node, &timestamp, &used, &free)
	sqliteStore := sqliteManager.Writer("memory", []string{"node", "timestamp"}, []string{"used", "free"}, &node, &timestamp, &used, &free)
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "Memory",
			Reader:      ReadOnlySomeLogs(logsStore, "top"),
			Transformer: transformer.MakeDoFunc(extractMemoryUsage),
			Writer:      memoryUsageStore,
		},
		transformer.PipelineStage{
			Name:   "WriteMemoryUsageCsv",
			Reader: memoryUsageStore,
			Writer: csvStore,
		},
		transformer.PipelineStage{
			Name:   "WriteMemoryUsageSqlite",
			Reader: memoryUsageStore,
			Writer: sqliteStore,
		},
	}
}
Esempio n. 5
0
func RebootsPipeline(levelDbManager, csvManager, sqliteManager store.Manager) transformer.Pipeline {
	uptimeStore := levelDbManager.Seeker("uptime")
	rebootsStore := levelDbManager.ReadingWriter("reboots")
	var node string
	var timestamp int64
	rebootsCsvStore := csvManager.Writer("reboots.csv", []string{"node", "boot_timestamp"}, []string{}, &node, &timestamp)
	rebootsSqliteStore := sqliteManager.Writer("reboots", []string{"node", "boot_timestamp"}, []string{}, &node, &timestamp)

	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "InferReboots",
			Reader:      uptimeStore,
			Transformer: transformer.TransformFunc(inferReboots),
			Writer:      rebootsStore,
		},
		transformer.PipelineStage{
			Name:   "WriteRebootsCsv",
			Reader: rebootsStore,
			Writer: rebootsCsvStore,
		},
		transformer.PipelineStage{
			Name:   "WriteRebootsSqlite",
			Reader: rebootsStore,
			Writer: rebootsSqliteStore,
		},
	}
}
Esempio n. 6
0
func makeSummaryByTimestampCsvWriter(manager store.Manager, name string) store.Writer {
	keyNames := []string{
		"experiment",
		"node",
		"timestamp",
	}
	valueNames := []string{
		"count",
	}
	for _, i := range []int{0, 1, 5, 10, 25, 50, 75, 90, 95, 99, 100} {
		valueNames = append(valueNames, humanize.Ordinal(i))
	}
	arguments := []interface{}{
		name,
		keyNames,
		valueNames,
		new(string), // experiment
		new(string), // node
		new(int64),  // timestamp
		new(int64),  // count
	}
	for _ = range []int{0, 1, 5, 10, 25, 50, 75, 90, 95, 99, 100} {
		arguments = append(arguments, new(int64))
	}
	return manager.Writer(arguments...)
}
func FilterNodesPipeline(nodeId string, levelDbManager store.Manager) transformer.Pipeline {
	tracesStore := levelDbManager.Seeker("traces")
	filteredStore := levelDbManager.Writer(fmt.Sprintf("filtered-%s", nodeId))
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:   "FilterNode",
			Reader: FilterNodes(tracesStore, nodeId),
			Writer: filteredStore,
		},
	}
}
Esempio n. 8
0
func CsvPipeline(levelDbManager, csvManager store.Manager) transformer.Pipeline {
	var experiment, node, filename string
	var receivedTimestamp, creationTimestamp, size int64
	csvStore := csvManager.Writer("stats.csv", []string{"experiment", "node", "filename"}, []string{"received_timestamp", "creation_timestamp", "size"}, &experiment, &node, &filename, &receivedTimestamp, &creationTimestamp, &size)
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:   "WriteStatsCsv",
			Reader: levelDbManager.Reader("stats"),
			Writer: csvStore,
		},
	}
}
Esempio n. 9
0
func SummarizeHealthPipeline(levelDbManager, csvManager store.Manager) transformer.Pipeline {
	memoryStore := levelDbManager.Reader("memory")
	memoryUsageByDayStore := levelDbManager.ReadingWriter("memory-usage-by-day")
	memoryUsageByDaySummarizedStore := levelDbManager.ReadingWriter("memory-usage-by-day-summarized")
	filesystemStore := levelDbManager.Reader("filesystem")
	filesystemUsageByDayStore := levelDbManager.ReadingWriter("filesystem-usage-by-day")
	filesystemUsageByDaySummarizedStore := levelDbManager.ReadingWriter("filesystem-usage-by-day-summarized")

	var timestamp, usage int64
	var filesystem, node string
	memoryUsageSummaryCsv := csvManager.Writer("memory-usage-summary.csv", []string{"timestamp", "node"}, []string{"usage"}, &timestamp, &node, &usage)
	filesystemUsageSummaryCsv := csvManager.Writer("filesystem-usage-summary.csv", []string{"filesystem", "timestamp", "node"}, []string{"usage"}, &filesystem, &timestamp, &node, &usage)

	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "OrderMemoryUsageByTimestamp",
			Reader:      memoryStore,
			Transformer: transformer.MakeMapFunc(orderRecordsByDay),
			Writer:      memoryUsageByDayStore,
		},
		transformer.PipelineStage{
			Name:        "SummarizeMemoryUsage",
			Reader:      memoryUsageByDayStore,
			Transformer: transformer.TransformFunc(summarizeMemoryUsage),
			Writer:      memoryUsageByDaySummarizedStore,
		},
		transformer.PipelineStage{
			Name:   "WriteMemoryUsageSummaryCsv",
			Reader: memoryUsageByDaySummarizedStore,
			Writer: memoryUsageSummaryCsv,
		},
		transformer.PipelineStage{
			Name:        "OrderFilesystemUsageByTimestamp",
			Reader:      filesystemStore,
			Transformer: transformer.MakeMapFunc(orderFilesystemRecordsByDay),
			Writer:      filesystemUsageByDayStore,
		},
		transformer.PipelineStage{
			Name:        "SummarizeFilesystemUsage",
			Reader:      filesystemUsageByDayStore,
			Transformer: transformer.TransformFunc(summarizeFilesystemUsage),
			Writer:      filesystemUsageByDaySummarizedStore,
		},
		transformer.PipelineStage{
			Name:   "WriteFilesystemUsageSummaryCsv",
			Reader: filesystemUsageByDaySummarizedStore,
			Writer: filesystemUsageSummaryCsv,
		},
	}
}
func FilterSessionsPipeline(sessionStartTime, sessionEndTime int64, levelDbManager store.Manager, outputName string) transformer.Pipeline {
	tracesStore := levelDbManager.Reader("traces")
	traceKeyRangesStore := levelDbManager.Reader("availability-done")
	filteredStore := levelDbManager.Writer(outputName)
	parameters := filterSessions{
		SessionStartTime: sessionStartTime * 1000000,
		SessionEndTime:   sessionEndTime * 1000000,
	}
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "FilterSessions",
			Reader:      store.NewDemuxingReader(traceKeyRangesStore, tracesStore),
			Transformer: parameters,
			Writer:      filteredStore,
		},
	}
}
Esempio n. 11
0
func DevicesCountPipeline(levelDbManager store.Manager) transformer.Pipeline {
	logsStore := levelDbManager.Seeker("logs")
	devicesCountStore := levelDbManager.Writer("devices-count")
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "ExtractEthernetCount",
			Reader:      ReadOnlySomeLogs(logsStore, "swconfig_ports"),
			Transformer: transformer.MakeDoFunc(extractEthernetCount),
			Writer:      devicesCountStore,
		},
		transformer.PipelineStage{
			Name:        "ExtractWirelessCount",
			Reader:      ReadOnlySomeLogs(logsStore, "iw_station_count"),
			Transformer: transformer.MakeDoFunc(extractWirelessCount),
			Writer:      devicesCountStore,
		},
	}
}
Esempio n. 12
0
func IndexTarballsPipeline(tarballsPath string, levelDbManager store.Manager) transformer.Pipeline {
	tarballsPattern := filepath.Join(tarballsPath, "*", "*", "*.tar.gz")
	tarnamesStore := levelDbManager.ReadingWriter("tarnames")
	tarnamesIndexedStore := levelDbManager.ReadingWriter("tarnames-indexed")
	tracesStore := levelDbManager.Writer("traces")
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:   "ScanTraceTarballs",
			Reader: store.NewGlobReader(tarballsPattern),
			Writer: tarnamesStore,
		},
		transformer.PipelineStage{
			Name:        "IndexTraces",
			Transformer: transformer.MakeMultipleOutputsGroupDoFunc(IndexTarballs, 2),
			Reader:      store.NewDemuxingReader(tarnamesStore, tarnamesIndexedStore),
			Writer:      store.NewMuxingWriter(tracesStore, tarnamesIndexedStore),
		},
	}
}
Esempio n. 13
0
func DisjointPackagesPipeline(levelDbManager, csvManager store.Manager) transformer.Pipeline {
	logsStore := levelDbManager.Seeker("logs")
	disjointPackagesStore := levelDbManager.ReadingWriter("disjoint-packages")
	var filename, node string
	var timestamp int64
	csvStore := csvManager.Writer("not-disjoint.csv", []string{"filename", "node", "timestamp"}, []string{}, &filename, &node, &timestamp)
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "DisjointPackages",
			Reader:      logsStore,
			Transformer: transformer.MakeDoFunc(detectDisjointPackagesError),
			Writer:      disjointPackagesStore,
		},
		transformer.PipelineStage{
			Name:   "WriteDisjointPackagesCsv",
			Reader: disjointPackagesStore,
			Writer: csvStore,
		},
	}
}
Esempio n. 14
0
func IpRoutePipeline(levelDbManager, sqliteManager store.Manager) transformer.Pipeline {
	logsStore := levelDbManager.Seeker("logs")
	defaultRoutesStore := levelDbManager.ReadingWriter("default-routes")
	var node string
	var timestamp int64
	var gateway string
	sqliteStore := sqliteManager.Writer("defaultroutes", []string{"node", "timestamp"}, []string{"gateway"}, &node, &timestamp, &gateway)
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "ExtractDefaultRoute",
			Reader:      ReadOnlySomeLogs(logsStore, "iproute"),
			Transformer: transformer.MakeDoFunc(extractDefaultRoute),
			Writer:      defaultRoutesStore,
		},
		transformer.PipelineStage{
			Name:   "WriteDefaultRoutesSqlite",
			Reader: defaultRoutesStore,
			Writer: sqliteStore,
		},
	}
}
Esempio n. 15
0
func FilesystemUsagePipeline(levelDbManager, csvManager store.Manager) transformer.Pipeline {
	logsStore := levelDbManager.Seeker("logs")
	filesystemUsageStore := levelDbManager.ReadingWriter("filesystem")
	var mount, node string
	var timestamp, used, free int64
	csvStore := csvManager.Writer("filesystem.csv", []string{"mount", "node", "timestamp"}, []string{"used", "free"}, &mount, &node, &timestamp, &used, &free)

	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "Filesystem",
			Reader:      ReadOnlySomeLogs(logsStore, "df"),
			Transformer: transformer.MakeDoFunc(extractFilesystemUsage),
			Writer:      filesystemUsageStore,
		},
		transformer.PipelineStage{
			Name:   "WriteFilesystemUsageCsv",
			Reader: filesystemUsageStore,
			Writer: csvStore,
		},
	}
}
func LookupsPerDevicePipeline(levelDbManager store.Manager) transformer.Pipeline {
	tracesStore := levelDbManager.Seeker("traces")
	availabilityIntervalsStore := levelDbManager.Seeker("consistent-ranges")
	addressIdStore := levelDbManager.Seeker("bytesperdomain-address-id-table")
	addressIdToDomainStore := levelDbManager.SeekingWriter("lookupsperdevice-address-id-to-domain")
	lookupsPerDeviceSharded := levelDbManager.ReadingWriter("lookupsperdevice-sharded")
	lookupsPerDeviceStore := levelDbManager.Writer("lookupsperdevice-lookups-per-device")
	lookupsPerDevicePerHourStore := levelDbManager.Writer("lookupsperdevice-lookups-per-device-per-hour")
	consistentTracesStore := store.NewRangeIncludingReader(tracesStore, availabilityIntervalsStore)
	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "LookupsPerDeviceMapper",
			Reader:      consistentTracesStore,
			Transformer: transformer.MakeDoFunc(lookupsPerDeviceMapper),
			Writer:      addressIdToDomainStore,
		},
		transformer.PipelineStage{
			Name:        "JoinMacWithLookups",
			Reader:      store.NewDemuxingSeeker(addressIdStore, addressIdToDomainStore),
			Transformer: transformer.TransformFunc(joinMacWithLookups),
			Writer:      lookupsPerDeviceSharded,
		},
		transformer.PipelineStage{
			Name:        "FlattenLookupsToNodeAndMac",
			Reader:      lookupsPerDeviceSharded,
			Transformer: transformer.TransformFunc(flattenLookupsToNodeAndMac),
			Writer:      lookupsPerDeviceStore,
		},
		transformer.PipelineStage{
			Name:        "FlattenLookupsToNodeMacAndTimestamp",
			Reader:      lookupsPerDeviceSharded,
			Transformer: transformer.TransformFunc(flattenLookupsToNodeMacAndTimestamp),
			Writer:      lookupsPerDevicePerHourStore,
		},
	}
}
Esempio n. 17
0
func makeSummaryCsvWriter(manager store.Manager, name string) store.Writer {
	keyNames := []string{
		"experiment",
		"node",
	}
	valueNames := []string{
		"count",
	}
	for i := 0; i <= 100; i += 5 {
		valueNames = append(valueNames, humanize.Ordinal(i))
	}
	arguments := []interface{}{
		name,
		keyNames,
		valueNames,
		new(string), // experiment
		new(string), // node
		new(int64),  // count
	}
	for i := 0; i <= 100; i += 5 {
		arguments = append(arguments, new(int64))
	}
	return manager.Writer(arguments...)
}
Esempio n. 18
0
func SummarizePipeline(levelDbManager store.Manager, csvManager store.Manager) transformer.Pipeline {
	statsStore := levelDbManager.Reader("stats")
	statsWithHourStore := levelDbManager.ReadingDeleter("stats-with-hour")
	statsWithDayStore := levelDbManager.ReadingDeleter("stats-with-day")
	statsWithReceivedTimestampStore := levelDbManager.ReadingDeleter("stats-with-received-timestamp")
	interarrivalTimesStore := levelDbManager.ReadingDeleter("interarrival-times")
	sizeSummaryStore := levelDbManager.ReadingWriter("size-summary")
	sizeSummaryByHourStore := levelDbManager.ReadingWriter("size-summary-by-hour")
	sizeSummaryByDayStore := levelDbManager.ReadingWriter("size-summary-by-day")
	interarrivalTimesSummaryStore := levelDbManager.ReadingWriter("interarrival-times-summary")
	sizePerDayStore := levelDbManager.ReadingWriter("sizes-by-day")

	sizeSummaryWriter := makeSummaryCsvWriter(csvManager, "size-summary.csv")
	sizeSummaryByHourWriter := makeSummaryByTimestampCsvWriter(csvManager, "size-summary-by-hour.csv")
	sizeSummaryByDayWriter := makeSummaryByTimestampCsvWriter(csvManager, "size-summary-by-day.csv")
	interarrivalTimesSummaryWriter := makeSummaryCsvWriter(csvManager, "interarrival-times-summary.csv")
	sizesPerDayWriter := csvManager.Writer("sizes-per-day.csv", []string{"experiment", "node", "timestamp"}, []string{"count"}, new(string), new(string), new(int64), new(int64))

	return []transformer.PipelineStage{
		transformer.PipelineStage{
			Name:        "SummarizeSizes",
			Reader:      statsStore,
			Transformer: transformer.TransformFunc(summarizeSizes),
			Writer:      sizeSummaryStore,
		},
		transformer.PipelineStage{
			Name:        "RekeyStatsByHour",
			Reader:      statsStore,
			Transformer: transformer.MakeMapFunc(rekeyStatsByHour),
			Writer:      store.NewTruncatingWriter(statsWithHourStore),
		},
		transformer.PipelineStage{
			Name:        "SummarizeSizesByHour",
			Reader:      statsWithHourStore,
			Transformer: transformer.TransformFunc(summarizeSizesByTimestamp),
			Writer:      sizeSummaryByHourStore,
		},
		transformer.PipelineStage{
			Name:        "RekeyStatsByDay",
			Reader:      statsStore,
			Transformer: transformer.MakeMapFunc(rekeyStatsByDay),
			Writer:      store.NewTruncatingWriter(statsWithDayStore),
		},
		transformer.PipelineStage{
			Name:        "SummarizeSizesByDay",
			Reader:      statsWithDayStore,
			Transformer: transformer.TransformFunc(summarizeSizesByTimestamp),
			Writer:      sizeSummaryByDayStore,
		},
		transformer.PipelineStage{
			Name:        "RekeyStatsByReceivedTimestamp",
			Reader:      statsStore,
			Transformer: transformer.MakeMapFunc(rekeyStatsByReceviedTimestamp),
			Writer:      store.NewTruncatingWriter(statsWithReceivedTimestampStore),
		},
		transformer.PipelineStage{
			Name:        "ComputeInterarrivalTimes",
			Reader:      statsWithReceivedTimestampStore,
			Transformer: transformer.TransformFunc(computeInterarrivalTimes),
			Writer:      store.NewTruncatingWriter(interarrivalTimesStore),
		},
		transformer.PipelineStage{
			Name:        "SummarizeInterarrival",
			Reader:      interarrivalTimesStore,
			Transformer: transformer.TransformFunc(summarizeInterarrivalTimes),
			Writer:      interarrivalTimesSummaryStore,
		},
		transformer.PipelineStage{
			Name:        "SummarizeSizesPerDay",
			Reader:      statsStore,
			Transformer: transformer.TransformFunc(summarizeSizesPerDay),
			Writer:      sizePerDayStore,
		},
		transformer.PipelineStage{
			Name:        "AggregateExperimentsPerDay",
			Reader:      sizePerDayStore,
			Transformer: transformer.TransformFunc(aggregateSizesPerDay),
			Writer:      sizePerDayStore,
		},
		transformer.PipelineStage{
			Name:   "WriteSizesSummary",
			Reader: sizeSummaryStore,
			Writer: sizeSummaryWriter,
		},
		transformer.PipelineStage{
			Name:   "WriteSizesSummaryByHour",
			Reader: sizeSummaryByHourStore,
			Writer: sizeSummaryByHourWriter,
		},
		transformer.PipelineStage{
			Name:   "WriteSizesSummaryByDay",
			Reader: sizeSummaryByDayStore,
			Writer: sizeSummaryByDayWriter,
		},
		transformer.PipelineStage{
			Name:   "WriteInterarrivalTimesSummary",
			Reader: interarrivalTimesSummaryStore,
			Writer: interarrivalTimesSummaryWriter,
		},
		transformer.PipelineStage{
			Name:   "WriteSizePerDaySummary",
			Reader: sizePerDayStore,
			Writer: sizesPerDayWriter,
		},
	}
}