func main() { defer common.LogPanic() common.Init() frontend.MustInit() // Send start email. emailsArr := util.ParseEmails(*emails) emailsArr = append(emailsArr, util.CtAdmins...) if len(emailsArr) == 0 { glog.Error("At least one email address must be specified") return } skutil.LogErr(frontend.UpdateWebappTaskSetStarted(&admin_tasks.RecreateWebpageArchivesUpdateVars{}, *gaeTaskID)) skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Capture archives", util.GetMasterLogLink(*runID), "")) // Ensure webapp is updated and completion email is sent even if task fails. defer updateWebappTask() defer sendEmail(emailsArr) // Cleanup tmp files after the run. defer util.CleanTmpDir() // Finish with glog flush and how long the task took. defer util.TimeTrack(time.Now(), "Capture archives on Workers") defer glog.Flush() if *pagesetType == "" { glog.Error("Must specify --pageset_type") return } if *chromiumBuild == "" { glog.Error("Must specify --chromium_build") return } cmd := []string{ fmt.Sprintf("cd %s;", util.CtTreeDir), "git pull;", "make all;", // The main command that runs capture_archives on all workers. fmt.Sprintf("DISPLAY=:0 capture_archives --worker_num=%s --log_dir=%s --log_id=%s --pageset_type=%s --chromium_build=%s;", util.WORKER_NUM_KEYWORD, util.GLogDir, *runID, *pagesetType, *chromiumBuild), } _, err := util.SSH(strings.Join(cmd, " "), util.Slaves, util.CAPTURE_ARCHIVES_TIMEOUT) if err != nil { glog.Errorf("Error while running cmd %s: %s", cmd, err) return } *taskCompletedSuccessfully = true }
func main() { common.Init() webhook.MustInitRequestSaltFromFile(util.WebhookRequestSaltPath) // Send start email. emailsArr := util.ParseEmails(*emails) emailsArr = append(emailsArr, util.CtAdmins...) if len(emailsArr) == 0 { glog.Error("At least one email address must be specified") return } skutil.LogErr(frontend.UpdateWebappTaskSetStarted(&frontend.RecreatePageSetsUpdateVars{}, *gaeTaskID)) skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Creating pagesets")) // Ensure webapp is updated and completion email is sent even if task fails. defer updateWebappTask() defer sendEmail(emailsArr) // Cleanup tmp files after the run. defer util.CleanTmpDir() // Finish with glog flush and how long the task took. defer util.TimeTrack(time.Now(), "Creating Pagesets on Workers") defer glog.Flush() if *pagesetType == "" { glog.Error("Must specify --pageset_type") return } cmd := []string{ fmt.Sprintf("cd %s;", util.CtTreeDir), "git pull;", "make all;", // The main command that runs create_pagesets on all workers. fmt.Sprintf("create_pagesets --worker_num=%s --log_dir=%s --pageset_type=%s;", util.WORKER_NUM_KEYWORD, util.GLogDir, *pagesetType), } // Setting a 4 hour timeout since it may take a while to upload page sets to // Google Storage when doing 10k page sets per worker. if _, err := util.SSH(strings.Join(cmd, " "), util.Slaves, 4*time.Hour); err != nil { glog.Errorf("Error while running cmd %s: %s", cmd, err) return } *taskCompletedSuccessfully = true }
func main() { defer common.LogPanic() master_common.Init() // Send start email. emailsArr := util.ParseEmails(*emails) emailsArr = append(emailsArr, util.CtAdmins...) if len(emailsArr) == 0 { glog.Error("At least one email address must be specified") return } skutil.LogErr(frontend.UpdateWebappTaskSetStarted(&admin_tasks.RecreatePageSetsUpdateVars{}, *gaeTaskID)) skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Creating pagesets", util.GetMasterLogLink(*runID), "")) // Ensure webapp is updated and completion email is sent even if task fails. defer updateWebappTask() defer sendEmail(emailsArr) if !*master_common.Local { // Cleanup tmp files after the run. defer util.CleanTmpDir() } // Finish with glog flush and how long the task took. defer util.TimeTrack(time.Now(), "Creating Pagesets on Workers") defer glog.Flush() if *pagesetType == "" { glog.Error("Must specify --pageset_type") return } cmd := append(master_common.WorkerSetupCmds(), // The main command that runs create_pagesets on all workers. fmt.Sprintf( "create_pagesets --worker_num=%s --log_dir=%s --log_id=%s --pageset_type=%s --local=%t;", util.WORKER_NUM_KEYWORD, util.GLogDir, *runID, *pagesetType, *master_common.Local)) _, err := util.SSH(strings.Join(cmd, " "), util.Slaves, util.CREATE_PAGESETS_TIMEOUT) if err != nil { glog.Errorf("Error while running cmd %s: %s", cmd, err) return } *taskCompletedSuccessfully = true }
func main() { defer common.LogPanic() master_common.Init() // Send start email. emailsArr := util.ParseEmails(*emails) emailsArr = append(emailsArr, util.CtAdmins...) if len(emailsArr) == 0 { glog.Error("At least one email address must be specified") return } skutil.LogErr(frontend.UpdateWebappTaskSetStarted(&chromium_builds.UpdateVars{}, *gaeTaskID)) skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Build chromium", util.GetMasterLogLink(*runID), "")) // Ensure webapp is updated and completion email is sent even if task fails. defer updateWebappTask() defer sendEmail(emailsArr) if !*master_common.Local { // Cleanup tmp files after the run. defer util.CleanTmpDir() } // Finish with glog flush and how long the task took. defer util.TimeTrack(time.Now(), "Running build chromium") defer glog.Flush() if *chromiumHash == "" { glog.Error("Must specify --chromium_hash") return } if *skiaHash == "" { glog.Error("Must specify --skia_hash") return } if _, _, err := util.CreateChromiumBuild("", *targetPlatform, *chromiumHash, *skiaHash, *applyPatches); err != nil { glog.Errorf("Error while creating the Chromium build: %s", err) return } taskCompletedSuccessfully = true }
func main() { common.Init() webhook.MustInitRequestSaltFromFile(util.WebhookRequestSaltPath) // Send start email. emailsArr := util.ParseEmails(*emails) emailsArr = append(emailsArr, util.CtAdmins...) if len(emailsArr) == 0 { glog.Error("At least one email address must be specified") return } skutil.LogErr(frontend.UpdateWebappTaskSetStarted(&frontend.ChromiumPerfUpdateVars{}, *gaeTaskID)) skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Chromium perf")) // Ensure webapp is updated and email is sent even if task fails. defer updateWebappTask() defer sendEmail(emailsArr) // Cleanup dirs after run completes. defer skutil.RemoveAll(filepath.Join(util.StorageDir, util.ChromiumPerfRunsDir)) defer skutil.RemoveAll(filepath.Join(util.StorageDir, util.BenchmarkRunsDir)) // Cleanup tmp files after the run. defer util.CleanTmpDir() // Finish with glog flush and how long the task took. defer util.TimeTrack(time.Now(), "Running chromium perf task on workers") defer glog.Flush() if *pagesetType == "" { glog.Error("Must specify --pageset_type") return } if *benchmarkName == "" { glog.Error("Must specify --benchmark_name") return } if *runID == "" { glog.Error("Must specify --run_id") return } // Instantiate GsUtil object. gs, err := util.NewGsUtil(nil) if err != nil { glog.Errorf("Could not instantiate gsutil object: %s", err) return } remoteOutputDir := filepath.Join(util.ChromiumPerfRunsDir, *runID) // Copy the patches to Google Storage. skiaPatchName := *runID + ".skia.patch" blinkPatchName := *runID + ".blink.patch" chromiumPatchName := *runID + ".chromium.patch" for _, patchName := range []string{skiaPatchName, blinkPatchName, chromiumPatchName} { if err := gs.UploadFile(patchName, os.TempDir(), remoteOutputDir); err != nil { glog.Errorf("Could not upload %s to %s: %s", patchName, remoteOutputDir, err) return } } skiaPatchLink = util.GS_HTTP_LINK + filepath.Join(util.GS_BUCKET_NAME, remoteOutputDir, skiaPatchName) blinkPatchLink = util.GS_HTTP_LINK + filepath.Join(util.GS_BUCKET_NAME, remoteOutputDir, blinkPatchName) chromiumPatchLink = util.GS_HTTP_LINK + filepath.Join(util.GS_BUCKET_NAME, remoteOutputDir, chromiumPatchName) // Create the two required chromium builds (with patch and without the patch). chromiumHash, skiaHash, err := util.CreateChromiumBuild(*runID, *targetPlatform, "", "", true) if err != nil { glog.Errorf("Could not create chromium build: %s", err) return } // Reboot all workers to start from a clean slate. util.RebootWorkers() // Run the run_chromium_perf script on all workers. runIDNoPatch := *runID + "-nopatch" runIDWithPatch := *runID + "-withpatch" chromiumBuildNoPatch := fmt.Sprintf("try-%s-%s-%s", chromiumHash, skiaHash, runIDNoPatch) chromiumBuildWithPatch := fmt.Sprintf("try-%s-%s-%s", chromiumHash, skiaHash, runIDWithPatch) runChromiumPerfCmdTemplate := "DISPLAY=:0 run_chromium_perf " + "--worker_num={{.WorkerNum}} --log_dir={{.LogDir}} --pageset_type={{.PagesetType}} " + "--chromium_build_nopatch={{.ChromiumBuildNoPatch}} --chromium_build_withpatch={{.ChromiumBuildWithPatch}} " + "--run_id_nopatch={{.RunIDNoPatch}} --run_id_withpatch={{.RunIDWithPatch}} " + "--benchmark_name={{.BenchmarkName}} --benchmark_extra_args=\"{{.BenchmarkExtraArgs}}\" " + "--browser_extra_args_nopatch=\"{{.BrowserExtraArgsNoPatch}}\" --browser_extra_args_withpatch=\"{{.BrowserExtraArgsWithPatch}}\" " + "--repeat_benchmark={{.RepeatBenchmark}} --target_platform={{.TargetPlatform}};" runChromiumPerfTemplateParsed := template.Must(template.New("run_chromium_perf_cmd").Parse(runChromiumPerfCmdTemplate)) runChromiumPerfCmdBytes := new(bytes.Buffer) if err := runChromiumPerfTemplateParsed.Execute(runChromiumPerfCmdBytes, struct { WorkerNum string LogDir string PagesetType string ChromiumBuildNoPatch string ChromiumBuildWithPatch string RunIDNoPatch string RunIDWithPatch string BenchmarkName string BenchmarkExtraArgs string BrowserExtraArgsNoPatch string BrowserExtraArgsWithPatch string RepeatBenchmark int TargetPlatform string }{ WorkerNum: util.WORKER_NUM_KEYWORD, LogDir: util.GLogDir, PagesetType: *pagesetType, ChromiumBuildNoPatch: chromiumBuildNoPatch, ChromiumBuildWithPatch: chromiumBuildWithPatch, RunIDNoPatch: runIDNoPatch, RunIDWithPatch: runIDWithPatch, BenchmarkName: *benchmarkName, BenchmarkExtraArgs: *benchmarkExtraArgs, BrowserExtraArgsNoPatch: *browserExtraArgsNoPatch, BrowserExtraArgsWithPatch: *browserExtraArgsWithPatch, RepeatBenchmark: *repeatBenchmark, TargetPlatform: *targetPlatform, }); err != nil { glog.Errorf("Failed to execute template: %s", err) return } cmd := []string{ fmt.Sprintf("cd %s;", util.CtTreeDir), "git pull;", "make all;", // The main command that runs run_chromium_perf on all workers. runChromiumPerfCmdBytes.String(), } // Setting a 1 day timeout since it may take a while run benchmarks with many // repeats. if _, err := util.SSH(strings.Join(cmd, " "), util.Slaves, 1*24*time.Hour); err != nil { glog.Errorf("Error while running cmd %s: %s", cmd, err) return } // If "--output-format=csv-pivot-table" was specified then merge all CSV files and upload. if strings.Contains(*benchmarkExtraArgs, "--output-format=csv-pivot-table") { for _, runID := range []string{runIDNoPatch, runIDWithPatch} { if err := mergeUploadCSVFiles(runID, gs); err != nil { glog.Errorf("Unable to merge and upload CSV files for %s: %s", runID, err) } } } // Compare the resultant CSV files using csv_comparer.py noPatchCSVPath := filepath.Join(util.StorageDir, util.BenchmarkRunsDir, runIDNoPatch, runIDNoPatch+".output") withPatchCSVPath := filepath.Join(util.StorageDir, util.BenchmarkRunsDir, runIDWithPatch, runIDWithPatch+".output") htmlOutputDir := filepath.Join(util.StorageDir, util.ChromiumPerfRunsDir, *runID, "html") skutil.MkdirAll(htmlOutputDir, 0700) htmlRemoteDir := filepath.Join(remoteOutputDir, "html") htmlOutputLinkBase := util.GS_HTTP_LINK + filepath.Join(util.GS_BUCKET_NAME, htmlRemoteDir) + "/" htmlOutputLink = htmlOutputLinkBase + "index.html" noPatchOutputLink = util.GS_HTTP_LINK + filepath.Join(util.GS_BUCKET_NAME, util.BenchmarkRunsDir, runIDNoPatch, "consolidated_outputs", runIDNoPatch+".output") withPatchOutputLink = util.GS_HTTP_LINK + filepath.Join(util.GS_BUCKET_NAME, util.BenchmarkRunsDir, runIDWithPatch, "consolidated_outputs", runIDWithPatch+".output") // Construct path to the csv_comparer python script. _, currentFile, _, _ := runtime.Caller(0) pathToPyFiles := filepath.Join( filepath.Dir((filepath.Dir(filepath.Dir(filepath.Dir(currentFile))))), "py") pathToCsvComparer := filepath.Join(pathToPyFiles, "csv_comparer.py") args := []string{ pathToCsvComparer, "--csv_file1=" + noPatchCSVPath, "--csv_file2=" + withPatchCSVPath, "--output_html=" + htmlOutputDir, "--variance_threshold=" + strconv.FormatFloat(*varianceThreshold, 'f', 2, 64), "--discard_outliers=" + strconv.FormatFloat(*discardOutliers, 'f', 2, 64), "--absolute_url=" + htmlOutputLinkBase, "--requester_email=" + *emails, "--skia_patch_link=" + skiaPatchLink, "--blink_patch_link=" + blinkPatchLink, "--chromium_patch_link=" + chromiumPatchLink, "--raw_csv_nopatch=" + noPatchOutputLink, "--raw_csv_withpatch=" + withPatchOutputLink, "--num_repeated=" + strconv.Itoa(*repeatBenchmark), "--target_platform=" + *targetPlatform, "--browser_args_nopatch=" + *browserExtraArgsNoPatch, "--browser_args_withpatch=" + *browserExtraArgsWithPatch, "--pageset_type=" + *pagesetType, "--chromium_hash=" + chromiumHash, "--skia_hash=" + skiaHash, } if err := util.ExecuteCmd("python", args, []string{}, 2*time.Hour, nil, nil); err != nil { glog.Errorf("Error running csv_comparer.py: %s", err) return } // Copy the HTML files to Google Storage. if err := gs.UploadDir(htmlOutputDir, htmlRemoteDir, true); err != nil { glog.Errorf("Could not upload %s to %s: %s", htmlOutputDir, htmlRemoteDir, err) return } taskCompletedSuccessfully = true }
func main() { common.Init() webhook.MustInitRequestSaltFromFile(util.WebhookRequestSaltPath) // Send start email. emailsArr := util.ParseEmails(*emails) emailsArr = append(emailsArr, util.CtAdmins...) if len(emailsArr) == 0 { glog.Error("At least one email address must be specified") return } skutil.LogErr(frontend.UpdateWebappTaskSetStarted(&frontend.CaptureSkpsUpdateVars{}, *gaeTaskID)) skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Capture SKPs")) // Ensure webapp is updated and completion email is sent even if task // fails. defer updateWebappTask() defer sendEmail(emailsArr) // Cleanup tmp files after the run. defer util.CleanTmpDir() // Finish with glog flush and how long the task took. defer util.TimeTrack(time.Now(), "Running capture skps task on workers") defer glog.Flush() if *pagesetType == "" { glog.Error("Must specify --pageset_type") return } if *chromiumBuild == "" { glog.Error("Must specify --chromium_build") return } if *runID == "" { glog.Error("Must specify --run_id") return } // Run the capture_skps script on all workers. captureSKPsCmdTemplate := "DISPLAY=:0 capture_skps --worker_num={{.WorkerNum}} --log_dir={{.LogDir}} " + "--pageset_type={{.PagesetType}} --chromium_build={{.ChromiumBuild}} --run_id={{.RunID}} " + "--target_platform={{.TargetPlatform}};" captureSKPsTemplateParsed := template.Must(template.New("capture_skps_cmd").Parse(captureSKPsCmdTemplate)) captureSKPsCmdBytes := new(bytes.Buffer) if err := captureSKPsTemplateParsed.Execute(captureSKPsCmdBytes, struct { WorkerNum string LogDir string PagesetType string ChromiumBuild string RunID string TargetPlatform string }{ WorkerNum: util.WORKER_NUM_KEYWORD, LogDir: util.GLogDir, PagesetType: *pagesetType, ChromiumBuild: *chromiumBuild, RunID: *runID, TargetPlatform: *targetPlatform, }); err != nil { glog.Errorf("Failed to execute template: %s", err) return } cmd := []string{ fmt.Sprintf("cd %s;", util.CtTreeDir), "git pull;", "make all;", // The main command that runs capture_skps on all workers. captureSKPsCmdBytes.String(), } // Setting a 2 day timeout since it may take a while to capture 1M SKPs. if _, err := util.SSH(strings.Join(cmd, " "), util.Slaves, 2*24*time.Hour); err != nil { glog.Errorf("Error while running cmd %s: %s", cmd, err) return } taskCompletedSuccessfully = true }
func main() { defer common.LogPanic() master_common.Init() // Send start email. emailsArr := util.ParseEmails(*emails) emailsArr = append(emailsArr, util.CtAdmins...) if len(emailsArr) == 0 { glog.Error("At least one email address must be specified") return } skutil.LogErr(frontend.UpdateWebappTaskSetStarted(&capture_skps.UpdateVars{}, *gaeTaskID)) skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Capture SKPs", util.GetMasterLogLink(*runID), *description)) // Ensure webapp is updated and completion email is sent even if task // fails. defer updateWebappTask() defer sendEmail(emailsArr) if !*master_common.Local { // Cleanup tmp files after the run. defer util.CleanTmpDir() } // Finish with glog flush and how long the task took. defer util.TimeTrack(time.Now(), "Running capture skps task on workers") defer glog.Flush() if *pagesetType == "" { glog.Error("Must specify --pageset_type") return } if *chromiumBuild == "" { glog.Error("Must specify --chromium_build") return } if *runID == "" { glog.Error("Must specify --run_id") return } // Run the capture_skps script on all workers. captureSKPsCmdTemplate := "DISPLAY=:0 capture_skps --worker_num={{.WorkerNum}} --log_dir={{.LogDir}} --log_id={{.RunID}} " + "--pageset_type={{.PagesetType}} --chromium_build={{.ChromiumBuild}} --run_id={{.RunID}} " + "--target_platform={{.TargetPlatform}} --local={{.Local}};" captureSKPsTemplateParsed := template.Must(template.New("capture_skps_cmd").Parse(captureSKPsCmdTemplate)) captureSKPsCmdBytes := new(bytes.Buffer) if err := captureSKPsTemplateParsed.Execute(captureSKPsCmdBytes, struct { WorkerNum string LogDir string PagesetType string ChromiumBuild string RunID string TargetPlatform string Local bool }{ WorkerNum: util.WORKER_NUM_KEYWORD, LogDir: util.GLogDir, PagesetType: *pagesetType, ChromiumBuild: *chromiumBuild, RunID: *runID, TargetPlatform: *targetPlatform, Local: *master_common.Local, }); err != nil { glog.Errorf("Failed to execute template: %s", err) return } cmd := append(master_common.WorkerSetupCmds(), // The main command that runs capture_skps on all workers. captureSKPsCmdBytes.String()) _, err := util.SSH(strings.Join(cmd, " "), util.Slaves, util.CAPTURE_SKPS_TIMEOUT) if err != nil { glog.Errorf("Error while running cmd %s: %s", cmd, err) return } taskCompletedSuccessfully = true }
func main() { common.Init() webhook.MustInitRequestSaltFromFile(util.WebhookRequestSaltPath) // Send start email. emailsArr := util.ParseEmails(*emails) emailsArr = append(emailsArr, util.CtAdmins...) if len(emailsArr) == 0 { glog.Error("At least one email address must be specified") return } skutil.LogErr(frontend.UpdateWebappTaskSetStarted(&frontend.LuaScriptUpdateVars{}, *gaeTaskID)) skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Lua script")) // Ensure webapp is updated and email is sent even if task fails. defer updateWebappTask() defer sendEmail(emailsArr) // Cleanup tmp files after the run. defer util.CleanTmpDir() // Finish with glog flush and how long the task took. defer util.TimeTrack(time.Now(), "Running Lua script on workers") defer glog.Flush() if *pagesetType == "" { glog.Error("Must specify --pageset_type") return } if *chromiumBuild == "" { glog.Error("Must specify --chromium_build") return } if *runID == "" { glog.Error("Must specify --run_id") return } // Instantiate GsUtil object. gs, err := util.NewGsUtil(nil) if err != nil { glog.Error(err) return } // Upload the lua script for this run to Google storage. luaScriptName := *runID + ".lua" defer skutil.Remove(filepath.Join(os.TempDir(), luaScriptName)) luaScriptRemoteDir := filepath.Join(util.LuaRunsDir, *runID, "scripts") luaScriptRemoteLink = util.GS_HTTP_LINK + filepath.Join(util.GS_BUCKET_NAME, luaScriptRemoteDir, luaScriptName) if err := gs.UploadFile(luaScriptName, os.TempDir(), luaScriptRemoteDir); err != nil { glog.Errorf("Could not upload %s to %s: %s", luaScriptName, luaScriptRemoteDir, err) return } // Run the run_lua script on all workers. runLuaCmdTemplate := "DISPLAY=:0 run_lua --worker_num={{.WorkerNum}} --log_dir={{.LogDir}} --pageset_type={{.PagesetType}} --chromium_build={{.ChromiumBuild}} --run_id={{.RunID}};" runLuaTemplateParsed := template.Must(template.New("run_lua_cmd").Parse(runLuaCmdTemplate)) luaCmdBytes := new(bytes.Buffer) if err := runLuaTemplateParsed.Execute(luaCmdBytes, struct { WorkerNum string LogDir string PagesetType string ChromiumBuild string RunID string }{ WorkerNum: util.WORKER_NUM_KEYWORD, LogDir: util.GLogDir, PagesetType: *pagesetType, ChromiumBuild: *chromiumBuild, RunID: *runID, }); err != nil { glog.Errorf("Failed to execute template: %s", err) return } cmd := []string{ fmt.Sprintf("cd %s;", util.CtTreeDir), "git pull;", "make all;", // The main command that runs run_lua on all workers. luaCmdBytes.String(), } if _, err := util.SSH(strings.Join(cmd, " "), util.Slaves, 2*time.Hour); err != nil { glog.Errorf("Error while running cmd %s: %s", cmd, err) return } // Copy outputs from all slaves locally and combine it into one file. consolidatedFileName := "lua-output" consolidatedLuaOutput := filepath.Join(os.TempDir(), consolidatedFileName) if err := ioutil.WriteFile(consolidatedLuaOutput, []byte{}, 0660); err != nil { glog.Errorf("Could not create %s: %s", consolidatedLuaOutput, err) return } for i := 0; i < util.NUM_WORKERS; i++ { workerNum := i + 1 workerRemoteOutputPath := filepath.Join(util.LuaRunsDir, *runID, fmt.Sprintf("slave%d", workerNum), "outputs", *runID+".output") respBody, err := gs.GetRemoteFileContents(workerRemoteOutputPath) if err != nil { glog.Errorf("Could not fetch %s: %s", workerRemoteOutputPath, err) // TODO(rmistry): Should we instead return here? We can only return // here if all 100 slaves reliably run without any failures which they // really should. continue } defer skutil.Close(respBody) out, err := os.OpenFile(consolidatedLuaOutput, os.O_RDWR|os.O_APPEND, 0660) if err != nil { glog.Errorf("Unable to open file %s: %s", consolidatedLuaOutput, err) return } defer skutil.Close(out) defer skutil.Remove(consolidatedLuaOutput) if _, err = io.Copy(out, respBody); err != nil { glog.Errorf("Unable to write out %s to %s: %s", workerRemoteOutputPath, consolidatedLuaOutput, err) return } } // Copy the consolidated file into Google Storage. consolidatedOutputRemoteDir := filepath.Join(util.LuaRunsDir, *runID, "consolidated_outputs") luaOutputRemoteLink = util.GS_HTTP_LINK + filepath.Join(util.GS_BUCKET_NAME, consolidatedOutputRemoteDir, consolidatedFileName) if err := gs.UploadFile(consolidatedFileName, os.TempDir(), consolidatedOutputRemoteDir); err != nil { glog.Errorf("Unable to upload %s to %s: %s", consolidatedLuaOutput, consolidatedOutputRemoteDir, err) return } // Upload the lua aggregator (if specified) for this run to Google storage. luaAggregatorName := *runID + ".aggregator" luaAggregatorPath := filepath.Join(os.TempDir(), luaAggregatorName) defer skutil.Remove(luaAggregatorPath) luaAggregatorRemoteLink = util.GS_HTTP_LINK + filepath.Join(util.GS_BUCKET_NAME, luaScriptRemoteDir, luaAggregatorName) luaAggregatorFileInfo, err := os.Stat(luaAggregatorPath) if !os.IsNotExist(err) && luaAggregatorFileInfo.Size() > 10 { if err := gs.UploadFile(luaAggregatorName, os.TempDir(), luaScriptRemoteDir); err != nil { glog.Errorf("Could not upload %s to %s: %s", luaAggregatorName, luaScriptRemoteDir, err) return } // Run the aggregator and save stdout. luaAggregatorOutputFileName := *runID + ".agg.output" luaAggregatorOutputFilePath := filepath.Join(os.TempDir(), luaAggregatorOutputFileName) luaAggregatorOutputFile, err := os.Create(luaAggregatorOutputFilePath) defer skutil.Close(luaAggregatorOutputFile) defer skutil.Remove(luaAggregatorOutputFilePath) if err != nil { glog.Errorf("Could not create %s: %s", luaAggregatorOutputFilePath, err) return } if err := util.ExecuteCmd(util.BINARY_LUA, []string{luaAggregatorPath}, []string{}, time.Hour, luaAggregatorOutputFile, nil); err != nil { glog.Errorf("Could not execute the lua aggregator %s: %s", luaAggregatorPath, err) return } // Copy the aggregator output into Google Storage. luaAggregatorOutputRemoteLink = util.GS_HTTP_LINK + filepath.Join(util.GS_BUCKET_NAME, consolidatedOutputRemoteDir, luaAggregatorOutputFileName) if err := gs.UploadFile(luaAggregatorOutputFileName, os.TempDir(), consolidatedOutputRemoteDir); err != nil { glog.Errorf("Unable to upload %s to %s: %s", luaAggregatorOutputFileName, consolidatedOutputRemoteDir, err) return } } else { glog.Info("A lua aggregator has not been specified.") } taskCompletedSuccessfully = true }
func main() { common.Init() webhook.MustInitRequestSaltFromFile(util.WebhookRequestSaltPath) // Send start email. emailsArr := util.ParseEmails(*emails) emailsArr = append(emailsArr, util.CtAdmins...) if len(emailsArr) == 0 { glog.Error("At least one email address must be specified") return } skutil.LogErr(frontend.UpdateWebappTaskSetStarted(&frontend.ChromiumBuildUpdateVars{}, *gaeTaskID)) skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Build chromium")) // Ensure webapp is updated and completion email is sent even if task fails. defer updateWebappTask() defer sendEmail(emailsArr) // Cleanup tmp files after the run. defer util.CleanTmpDir() // Finish with glog flush and how long the task took. defer util.TimeTrack(time.Now(), "Running build chromium") defer glog.Flush() if *chromiumHash == "" { glog.Error("Must specify --chromium_hash") return } if *skiaHash == "" { glog.Error("Must specify --skia_hash") return } if _, _, err := util.CreateChromiumBuild("", *targetPlatform, *chromiumHash, *skiaHash, *applyPatches); err != nil { glog.Errorf("Error while creating the Chromium build: %s", err) return } // Find when the requested Chromium revision was submitted. stdoutFileName := *runID + ".out" stdoutFilePath := filepath.Join(os.TempDir(), stdoutFileName) stdoutFile, err := os.Create(stdoutFilePath) defer skutil.Close(stdoutFile) defer skutil.Remove(stdoutFilePath) if err != nil { glog.Errorf("Could not create %s: %s", stdoutFilePath, err) return } var chromiumBuildDir string if *targetPlatform == "Android" { chromiumBuildDir = filepath.Join(util.ChromiumBuildsDir, "android_base") } else if *targetPlatform == "Linux" { chromiumBuildDir = filepath.Join(util.ChromiumBuildsDir, "linux_base") } if err := os.Chdir(filepath.Join(chromiumBuildDir, "src")); err != nil { glog.Errorf("Could not chdir to %s: %s", chromiumBuildDir, err) return } // Run git log --pretty=format="%at" -1 if err := util.ExecuteCmd(util.BINARY_GIT, []string{"log", "--pretty=format:%at", "-1"}, []string{}, 5*time.Minute, stdoutFile, nil); err != nil { glog.Errorf("Could not run git log cmd: %s", err) return } content, err := ioutil.ReadFile(stdoutFilePath) if err != nil { glog.Errorf("Could not read %s: %s", stdoutFilePath, err) return } chromiumBuildTimestamp = string(content) taskCompletedSuccessfully = true }
func main() { common.Init() webhook.MustInitRequestSaltFromFile(util.WebhookRequestSaltPath) // Send start email. emailsArr := util.ParseEmails(*emails) emailsArr = append(emailsArr, util.CtAdmins...) if len(emailsArr) == 0 { glog.Error("At least one email address must be specified") return } skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Skia correctness")) // Ensure webapp is updated and email is sent even if task fails. defer updateWebappTask() defer sendEmail(emailsArr) // Cleanup tmp files after the run. defer util.CleanTmpDir() // Finish with glog flush and how long the task took. defer util.TimeTrack(time.Now(), "Running skia correctness task on workers") defer glog.Flush() if *pagesetType == "" { glog.Error("Must specify --pageset_type") return } if *chromiumBuild == "" { glog.Error("Must specify --chromium_build") return } if *runID == "" { glog.Error("Must specify --run_id") return } // Instantiate GsUtil object. gs, err := util.NewGsUtil(nil) if err != nil { glog.Error(err) return } remoteOutputDir := filepath.Join(util.SkiaCorrectnessRunsDir, *runID) // Copy the patch to Google Storage. patchName := *runID + ".patch" patchRemoteDir := filepath.Join(remoteOutputDir, "patches") if err := gs.UploadFile(patchName, os.TempDir(), patchRemoteDir); err != nil { glog.Errorf("Could not upload %s to %s: %s", patchName, patchRemoteDir, err) return } skiaPatchLink = util.GS_HTTP_LINK + filepath.Join(util.GS_BUCKET_NAME, patchRemoteDir, patchName) // Run the run_skia_correctness script on all workers. runSkiaCorrCmdTemplate := "DISPLAY=:0 run_skia_correctness --worker_num={{.WorkerNum}} --log_dir={{.LogDir}} " + "--pageset_type={{.PagesetType}} --chromium_build={{.ChromiumBuild}} --run_id={{.RunID}} " + "--render_pictures_args=\"{{.RenderPicturesArgs}}\" --gpu_nopatch_run={{.GpuNoPatchRun}} " + "--gpu_withpatch_run={{.GpuWithPatchRun}};" runSkiaCorrTemplateParsed := template.Must(template.New("run_skia_correctness_cmd").Parse(runSkiaCorrCmdTemplate)) runSkiaCorrCmdBytes := new(bytes.Buffer) if err := runSkiaCorrTemplateParsed.Execute(runSkiaCorrCmdBytes, struct { WorkerNum string LogDir string PagesetType string ChromiumBuild string RunID string RenderPicturesArgs string GpuNoPatchRun string GpuWithPatchRun string }{ WorkerNum: util.WORKER_NUM_KEYWORD, LogDir: util.GLogDir, PagesetType: *pagesetType, ChromiumBuild: *chromiumBuild, RunID: *runID, RenderPicturesArgs: *renderPicturesArgs, GpuNoPatchRun: strconv.FormatBool(*gpuNoPatchRun), GpuWithPatchRun: strconv.FormatBool(*gpuWithPatchRun), }); err != nil { glog.Errorf("Failed to execute template: %s", err) return } cmd := []string{ fmt.Sprintf("cd %s;", util.CtTreeDir), "git pull;", "make all;", // The main command that runs run_skia_correctness on all workers. runSkiaCorrCmdBytes.String(), } if _, err := util.SSH(strings.Join(cmd, " "), util.Slaves, 4*time.Hour); err != nil { glog.Errorf("Error while running cmd %s: %s", cmd, err) return } localOutputDir := filepath.Join(util.StorageDir, util.SkiaCorrectnessRunsDir, *runID) localSummariesDir := filepath.Join(localOutputDir, "summaries") skutil.MkdirAll(localSummariesDir, 0700) defer skutil.RemoveAll(filepath.Join(util.StorageDir, util.SkiaCorrectnessRunsDir)) // Copy outputs from all slaves locally. for i := 0; i < util.NUM_WORKERS; i++ { workerNum := i + 1 workerLocalOutputPath := filepath.Join(localSummariesDir, fmt.Sprintf("slave%d", workerNum)+".json") workerRemoteOutputPath := filepath.Join(remoteOutputDir, fmt.Sprintf("slave%d", workerNum), fmt.Sprintf("slave%d", workerNum)+".json") respBody, err := gs.GetRemoteFileContents(workerRemoteOutputPath) if err != nil { glog.Errorf("Could not fetch %s: %s", workerRemoteOutputPath, err) // TODO(rmistry): Should we instead return here? We can only return // here if all 100 slaves reliably run without any failures which they // really should. continue } defer skutil.Close(respBody) out, err := os.Create(workerLocalOutputPath) if err != nil { glog.Errorf("Unable to create file %s: %s", workerLocalOutputPath, err) return } defer skutil.Close(out) defer skutil.Remove(workerLocalOutputPath) if _, err = io.Copy(out, respBody); err != nil { glog.Errorf("Unable to copy to file %s: %s", workerLocalOutputPath, err) return } } // Call json_summary_combiner.py to merge all results into a single results CSV. _, currentFile, _, _ := runtime.Caller(0) pathToPyFiles := filepath.Join( filepath.Dir((filepath.Dir(filepath.Dir(filepath.Dir(currentFile))))), "py") pathToJsonCombiner := filepath.Join(pathToPyFiles, "json_summary_combiner.py") localHtmlDir := filepath.Join(localOutputDir, "html") remoteHtmlDir := filepath.Join(remoteOutputDir, "html") baseHtmlLink := util.GS_HTTP_LINK + filepath.Join(util.GS_BUCKET_NAME, remoteHtmlDir) + "/" htmlOutputLink = baseHtmlLink + "index.html" skutil.MkdirAll(localHtmlDir, 0700) args := []string{ pathToJsonCombiner, "--json_summaries_dir=" + localSummariesDir, "--output_html_dir=" + localHtmlDir, "--absolute_url=" + baseHtmlLink, "--render_pictures_args=" + *renderPicturesArgs, "--nopatch_gpu=" + strconv.FormatBool(*gpuNoPatchRun), "--withpatch_gpu=" + strconv.FormatBool(*gpuWithPatchRun), } if err := util.ExecuteCmd("python", args, []string{}, 1*time.Hour, nil, nil); err != nil { glog.Errorf("Error running json_summary_combiner.py: %s", err) return } // Copy the HTML files to Google Storage. if err := gs.UploadDir(localHtmlDir, remoteHtmlDir, true); err != nil { glog.Errorf("Could not upload %s to %s: %s", localHtmlDir, remoteHtmlDir, err) return } taskCompletedSuccessfully = true }
func main() { defer common.LogPanic() master_common.Init() // Send start email. emailsArr := util.ParseEmails(*emails) emailsArr = append(emailsArr, util.CtAdmins...) if len(emailsArr) == 0 { glog.Error("At least one email address must be specified") return } skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Fix archives", util.GetMasterLogLink(*runID), "")) // Ensure completion email is sent even if task fails. defer sendEmail(emailsArr) if !*master_common.Local { // Cleanup tmp files after the run. defer util.CleanTmpDir() } // Finish with glog flush and how long the task took. defer util.TimeTrack(time.Now(), "Running fix archives task on workers") defer glog.Flush() if *pagesetType == "" { glog.Error("Must specify --pageset_type") return } if *chromiumBuild == "" { glog.Error("Must specify --chromium_build") return } if *runID == "" { glog.Error("Must specify --run_id") return } // Run the fix_archives script on all workers. fixArchivesCmdTemplate := "DISPLAY=:0 fix_archives --worker_num={{.WorkerNum}} --log_dir={{.LogDir}} " + "--pageset_type={{.PagesetType}} --chromium_build={{.ChromiumBuild}} --run_id={{.RunID}} " + "--repeat_benchmark={{.RepeatBenchmark}} --benchmark_name={{.BenchmarkName}} " + "--benchmark_header_to_check=\"{{.BenchmarkHeaderToCheck}}\" --delete_pageset={{.DeletePageset}} " + "--perc_change_threshold={{.PercentageChangeThreshold}} --res_missing_count_threshold={{.ResourceMissingCountThreshold}} " + "--local={{.Local}};" fixArchivesTemplateParsed := template.Must(template.New("fix_archives_cmd").Parse(fixArchivesCmdTemplate)) fixArchivesCmdBytes := new(bytes.Buffer) if err := fixArchivesTemplateParsed.Execute(fixArchivesCmdBytes, struct { WorkerNum string LogDir string PagesetType string ChromiumBuild string RunID string RepeatBenchmark int BenchmarkName string BenchmarkHeaderToCheck string DeletePageset bool PercentageChangeThreshold float64 ResourceMissingCountThreshold int Local bool }{ WorkerNum: util.WORKER_NUM_KEYWORD, LogDir: util.GLogDir, PagesetType: *pagesetType, ChromiumBuild: *chromiumBuild, RunID: *runID, RepeatBenchmark: *repeatBenchmark, BenchmarkName: *benchmarkName, BenchmarkHeaderToCheck: *benchmarkHeaderToCheck, DeletePageset: *deletePageset, PercentageChangeThreshold: *percentageChangeThreshold, ResourceMissingCountThreshold: *resourceMissingCountThreshold, Local: *master_common.Local, }); err != nil { glog.Errorf("Failed to execute template: %s", err) return } cmd := append(master_common.WorkerSetupCmds(), // The main command that runs fix_archives on all workers. fixArchivesCmdBytes.String()) if _, err := util.SSH(strings.Join(cmd, " "), util.Slaves, util.FIX_ARCHIVES_TIMEOUT); err != nil { glog.Errorf("Error while running cmd %s: %s", cmd, err) return } taskCompletedSuccessfully = true }