// Deactivate any previously activated but undispatched // tasks for the same build variant + display name + project combination // as the task. func DeactivatePreviousTasks(taskId, caller string) error { t, err := task.FindOne(task.ById(taskId)) if err != nil { return err } statuses := []string{evergreen.TaskUndispatched} allTasks, err := task.Find(task.ByActivatedBeforeRevisionWithStatuses(t.RevisionOrderNumber, statuses, t.BuildVariant, t.DisplayName, t.Project)) if err != nil { return err } for _, t := range allTasks { err = SetActiveState(t.Id, caller, false) if err != nil { return err } event.LogTaskDeactivated(t.Id, caller) // update the cached version of the task, in its build document to be deactivated if err = build.SetCachedTaskActivated(t.BuildId, t.Id, false); err != nil { return err } } return nil }
func (uis *UIServer) spawnPage(w http.ResponseWriter, r *http.Request) { flashes := PopFlashes(uis.CookieStore, r, w) projCtx := MustHaveProjectContext(r) var spawnDistro *distro.Distro var spawnTask *task.Task var err error if len(r.FormValue("distro_id")) > 0 { spawnDistro, err = distro.FindOne(distro.ById(r.FormValue("distro_id"))) if err != nil { uis.LoggedError(w, r, http.StatusInternalServerError, fmt.Errorf("Error finding distro %v: %v", r.FormValue("distro_id"), err)) return } } if len(r.FormValue("task_id")) > 0 { spawnTask, err = task.FindOne(task.ById(r.FormValue("task_id"))) if err != nil { uis.LoggedError(w, r, http.StatusInternalServerError, fmt.Errorf("Error finding task %v: %v", r.FormValue("task_id"), err)) return } } uis.WriteHTML(w, http.StatusOK, struct { ProjectData projectContext User *user.DBUser Flashes []interface{} Distro *distro.Distro Task *task.Task MaxHostsPerUser int }{projCtx, GetUser(r), flashes, spawnDistro, spawnTask, spawn.MaxPerUser}, "base", "spawned_hosts.html", "base_angular.html", "menu.html") }
// MarkStart updates the task, build, version and if necessary, patch documents with the task start time func MarkStart(taskId string) error { t, err := task.FindOne(task.ById(taskId)) if err != nil { return err } startTime := time.Now() if err = t.MarkStart(startTime); err != nil { return err } event.LogTaskStarted(t.Id) // ensure the appropriate build is marked as started if necessary if err = build.TryMarkStarted(t.BuildId, startTime); err != nil { return err } // ensure the appropriate version is marked as started if necessary if err = MarkVersionStarted(t.Version, startTime); err != nil { return err } // if it's a patch, mark the patch as started if necessary if t.Requester == evergreen.PatchVersionRequester { if err = patch.TryMarkStarted(t.Version, startTime); err != nil { return err } } // update the cached version of the task, in its build document return build.SetCachedTaskStarted(t.BuildId, t.Id, startTime) }
// Returns true if the task should stepback upon failure, and false // otherwise. Note that the setting is obtained from the top-level // project, if not explicitly set on the task. func getStepback(taskId string, project *Project) (bool, error) { t, err := task.FindOne(task.ById(taskId)) if err != nil { return false, err } projectTask := project.FindProjectTask(t.DisplayName) // Check if the task overrides the stepback policy specified by the project if projectTask != nil && projectTask.Stepback != nil { return *projectTask.Stepback, nil } // Check if the build variant overrides the stepback policy specified by the project for _, buildVariant := range project.BuildVariants { if t.BuildVariant == buildVariant.Name { if buildVariant.Stepback != nil { return *buildVariant.Stepback, nil } break } } return project.Stepback, nil }
func TestProjectTaskExecTimeout(t *testing.T) { setupTlsConfigs(t) for tlsString, tlsConfig := range tlsConfigs { Convey("With agent running a slow test and live API server over "+tlsString, t, func() { testTask, _, err := setupAPITestData(testConfig, "project_exec_timeout_task", "linux-64", filepath.Join(testDirectory, "testdata/config_test_plugin/project/project-timeout-test.yml"), NoPatch, t) testutil.HandleTestingErr(err, t, "Failed to find test task") testServer, err := service.CreateTestServer(testConfig, tlsConfig, plugin.APIPlugins, Verbose) testutil.HandleTestingErr(err, t, "Couldn't create apiserver: %v", err) testAgent, err := New(testServer.URL, testTask.Id, testTask.Secret, "", testConfig.Api.HttpsCert, testPidFile) So(err, ShouldBeNil) So(testAgent, ShouldNotBeNil) Convey("after the slow test runs beyond the project timeout threshold", func() { // actually run the task. // this function won't return until the whole thing is done. testAgent.RunTask() testAgent.APILogger.Flush() time.Sleep(5 * time.Second) printLogsForTask(testTask.Id) Convey("the test should be marked as failed and timed out", func() { So(scanLogsForTask(testTask.Id, "", "executing the pre-run script"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "", "executing the post-run script!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "", "executing the task-timeout script!"), ShouldBeTrue) testTask, err = task.FindOne(task.ById(testTask.Id)) So(testTask.Status, ShouldEqual, evergreen.TaskFailed) So(testTask.Details.TimedOut, ShouldBeTrue) So(testTask.Details.Description, ShouldEqual, "shell.exec") }) }) }) } }
func TestPatchPluginAPI(t *testing.T) { testConfig := evergreen.TestConfig() cwd := testutil.GetDirectoryOfFile() Convey("With a running api server and installed plugin", t, func() { registry := plugin.NewSimpleRegistry() gitPlugin := &GitPlugin{} err := registry.Register(gitPlugin) testutil.HandleTestingErr(err, t, "Couldn't register patch plugin") server, err := service.CreateTestServer(testConfig, nil, plugin.APIPlugins, false) testutil.HandleTestingErr(err, t, "Couldn't set up testing server") taskConfig, _ := plugintest.CreateTestConfig(filepath.Join(cwd, "testdata", "plugin_patch.yml"), t) testCommand := GitGetProjectCommand{Directory: "dir"} _, _, err = plugintest.SetupAPITestData("testTask", filepath.Join(cwd, "testdata", "testmodule.patch"), t) testutil.HandleTestingErr(err, t, "Couldn't set up test documents") testTask, err := task.FindOne(task.ById("testTaskId")) testutil.HandleTestingErr(err, t, "Couldn't set up test patch task") sliceAppender := &evergreen.SliceAppender{[]*slogger.Log{}} logger := agentutil.NewTestLogger(sliceAppender) Convey("calls to existing tasks with patches should succeed", func() { httpCom := plugintest.TestAgentCommunicator(testTask.Id, testTask.Secret, server.URL) pluginCom := &comm.TaskJSONCommunicator{gitPlugin.Name(), httpCom} patch, err := testCommand.GetPatch(taskConfig, pluginCom, logger) So(err, ShouldBeNil) So(patch, ShouldNotBeNil) testutil.HandleTestingErr(db.Clear(version.Collection), t, "unable to clear versions collection") }) Convey("calls to non-existing tasks should fail", func() { v := version.Version{Id: ""} testutil.HandleTestingErr(v.Insert(), t, "Couldn't insert dummy version") httpCom := plugintest.TestAgentCommunicator("BAD_TASK_ID", "", server.URL) pluginCom := &comm.TaskJSONCommunicator{gitPlugin.Name(), httpCom} patch, err := testCommand.GetPatch(taskConfig, pluginCom, logger) So(err.Error(), ShouldContainSubstring, "not found") So(err, ShouldNotBeNil) So(patch, ShouldBeNil) testutil.HandleTestingErr(db.Clear(version.Collection), t, "unable to clear versions collection") }) Convey("calls to existing tasks without patches should fail", func() { noPatchTask := task.Task{Id: "noPatchTask", BuildId: "a"} testutil.HandleTestingErr(noPatchTask.Insert(), t, "Couldn't insert patch task") noPatchVersion := version.Version{Id: "noPatchVersion", BuildIds: []string{"a"}} testutil.HandleTestingErr(noPatchVersion.Insert(), t, "Couldn't insert patch version") v := version.Version{Id: ""} testutil.HandleTestingErr(v.Insert(), t, "Couldn't insert dummy version") httpCom := plugintest.TestAgentCommunicator(noPatchTask.Id, "", server.URL) pluginCom := &comm.TaskJSONCommunicator{gitPlugin.Name(), httpCom} patch, err := testCommand.GetPatch(taskConfig, pluginCom, logger) So(err, ShouldNotBeNil) So(err.Error(), ShouldContainSubstring, "no patch found for task") So(patch, ShouldBeNil) testutil.HandleTestingErr(db.Clear(version.Collection), t, "unable to clear versions collection") }) }) }
func (as *APIServer) checkTask(checkSecret bool, next http.HandlerFunc) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { taskId := mux.Vars(r)["taskId"] if taskId == "" { as.LoggedError(w, r, http.StatusBadRequest, fmt.Errorf("missing task id")) return } t, err := task.FindOne(task.ById(taskId)) if err != nil { as.LoggedError(w, r, http.StatusInternalServerError, err) return } if t == nil { as.LoggedError(w, r, http.StatusNotFound, fmt.Errorf("task not found")) return } if checkSecret { secret := r.Header.Get(evergreen.TaskSecretHeader) // Check the secret - if it doesn't match, write error back to the client if secret != t.Secret { evergreen.Logger.Logf(slogger.ERROR, "Wrong secret sent for task %v: Expected %v but got %v", taskId, t.Secret, secret) http.Error(w, "wrong secret!", http.StatusConflict) return } } context.Set(r, apiTaskKey, t) // also set the task in the context visible to plugins plugin.SetTask(r, t) next(w, r) } }
// dBTestsWildcard are the database verification tests for globbed file execution func dBTestsWildcard() { task, err := task.FindOne(task.ById("mocktaskid")) So(err, ShouldBeNil) So(len(task.TestResults), ShouldEqual, TotalResultCount) Convey("along with the proper logs", func() { // junit_1.xml tl := dBFindOneTestLog("pkg1.test.test_things.test_params_func:2") So(tl.Lines[0], ShouldContainSubstring, "FAILURE") So(tl.Lines[6], ShouldContainSubstring, "AssertionError") tl = dBFindOneTestLog("pkg1.test.test_things.SomeTests.test_skippy") So(tl.Lines[0], ShouldContainSubstring, "SKIPPED") // junit_2.xml tl = dBFindOneTestLog("tests.ATest.fail") So(tl.Lines[0], ShouldContainSubstring, "FAILURE") So(tl.Lines[1], ShouldContainSubstring, "AssertionFailedError") // junit_3.xml tl = dBFindOneTestLog( "test.test_threads_replica_set_client.TestThreadsReplicaSet.test_safe_update", ) So(tl.Lines[0], ShouldContainSubstring, "SKIPPED") tl = dBFindOneTestLog("test.test_bson.TestBSON.test_basic_encode") So(tl.Lines[0], ShouldContainSubstring, "AssertionError") }) }
// reachedFailureLimit returns true if task for the previous failure transition alert // happened too long ago, as determined by some magic math. func reachedFailureLimit(taskId string) (bool, error) { t, err := task.FindOne(task.ById(taskId)) if err != nil { return false, err } if t == nil { return false, fmt.Errorf("task %v not found", taskId) } pr, err := model.FindOneProjectRef(t.Project) if err != nil { return false, err } if pr == nil { return false, fmt.Errorf("project ref %v not found", t.Project) } p, err := model.FindProject(t.Revision, pr) if err != nil { return false, err } if p == nil { return false, fmt.Errorf("project %v not found for revision %v", t.Project, t.Revision) } v := p.FindBuildVariant(t.BuildVariant) if v == nil { return false, fmt.Errorf("build variant %v does not exist in project", t.BuildVariant) } batchTime := pr.GetBatchTime(v) reached := time.Since(t.FinishTime) > (time.Duration(batchTime) * time.Minute * failureLimitMultiplier) return reached, nil }
func TestPatchTask(t *testing.T) { setupTlsConfigs(t) testConfig := evergreen.TestConfig() db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(testConfig)) patchModes := []patchTestMode{InlinePatch, ExternalPatch} testutil.ConfigureIntegrationTest(t, testConfig, "TestPatchTask") for tlsString, tlsConfig := range tlsConfigs { for _, testSetup := range testSetups { Convey(testSetup.testSpec, t, func() { Convey("With agent running a patched 'compile'"+tlsString, func() { for _, mode := range patchModes { Convey(fmt.Sprintf("Using patch mode %v", mode.String()), func() { testTask, b, err := setupAPITestData(testConfig, "compile", "linux-64", "testdata/config_test_plugin/project/evergreen-ci-render.yml", mode, t) githash := "1e5232709595db427893826ce19289461cba3f75" setupPatches(mode, b, t, patchRequest{"", "testdata/test.patch", githash}, patchRequest{"recursive", "testdata/testmodule.patch", githash}) testutil.HandleTestingErr(err, t, "Error setting up test data: %v", err) testServer, err := apiserver.CreateTestServer(testConfig, tlsConfig, plugin.APIPlugins, Verbose) testutil.HandleTestingErr(err, t, "Couldn't create apiserver: %v", err) testAgent, err := New(testServer.URL, testTask.Id, testTask.Secret, "", testConfig.Api.HttpsCert) // actually run the task. // this function won't return until the whole thing is done. testAgent.RunTask() time.Sleep(100 * time.Millisecond) testAgent.APILogger.FlushAndWait() printLogsForTask(testTask.Id) Convey("all scripts in task should have been run successfully", func() { So(scanLogsForTask(testTask.Id, "executing the pre-run script"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "executing the post-run script!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "Cloning into") || // git 1.8 scanLogsForTask(testTask.Id, "Initialized empty Git repository"), // git 1.7 ShouldBeTrue) So(scanLogsForTask(testTask.Id, "i am patched!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "i am a patched module"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "i am compiling!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "i am sanity testing!"), ShouldBeTrue) testTask, err = task.FindOne(task.ById(testTask.Id)) testutil.HandleTestingErr(err, t, "Error finding test task: %v", err) So(testTask.Status, ShouldEqual, evergreen.TaskSucceeded) }) }) } }) }) } } }
func TestDeletingBuild(t *testing.T) { Convey("With a build", t, func() { testutil.HandleTestingErr(db.Clear(build.Collection), t, "Error clearing"+ " '%v' collection", build.Collection) b := &build.Build{ Id: "build", } So(b.Insert(), ShouldBeNil) Convey("deleting it should remove it and all its associated"+ " tasks from the database", func() { testutil.HandleTestingErr(db.ClearCollections(task.Collection), t, "Error"+ " clearing '%v' collection", task.Collection) // insert two tasks that are part of the build, and one that isn't matchingTaskOne := &task.Task{ Id: "matchingOne", BuildId: b.Id, } So(matchingTaskOne.Insert(), ShouldBeNil) matchingTaskTwo := &task.Task{ Id: "matchingTwo", BuildId: b.Id, } So(matchingTaskTwo.Insert(), ShouldBeNil) nonMatchingTask := &task.Task{ Id: "nonMatching", BuildId: "blech", } So(nonMatchingTask.Insert(), ShouldBeNil) // delete the build, make sure only it and its tasks are deleted So(DeleteBuild(b.Id), ShouldBeNil) b, err := build.FindOne(build.ById(b.Id)) So(err, ShouldBeNil) So(b, ShouldBeNil) matchingTasks, err := task.Find(task.ByBuildId("build")) So(err, ShouldBeNil) So(len(matchingTasks), ShouldEqual, 0) nonMatchingTask, err = task.FindOne(task.ById(nonMatchingTask.Id)) So(err, ShouldBeNil) So(nonMatchingTask, ShouldNotBeNil) }) }) }
func TestTaskEndEndpoint(t *testing.T) { setupTlsConfigs(t) for tlsString, tlsConfig := range tlsConfigs { testTask, _, err := setupAPITestData(testConfig, "random", "linux-64", filepath.Join(testDirectory, "testdata/config_test_plugin/project/evergreen-ci-render.yml"), NoPatch, t) testutil.HandleTestingErr(err, t, "Couldn't make test data: %v", err) Convey("With a live api server, agent, and test task over "+tlsString, t, func() { testServer, err := service.CreateTestServer(testConfig, tlsConfig, plugin.APIPlugins, Verbose) testutil.HandleTestingErr(err, t, "Couldn't create apiserver: %v", err) testAgent, err := createAgent(testServer, testTask) testutil.HandleTestingErr(err, t, "failed to create agent: %v") testAgent.heartbeater.Interval = 10 * time.Second testAgent.StartBackgroundActions(&NoopSignalHandler{}) Convey("calling end() should update task's/host's status properly "+ "and start running the next task", func() { subsequentTaskId := testTask.Id + "Two" details := &apimodels.TaskEndDetail{Status: evergreen.TaskSucceeded} taskEndResp, err := testAgent.End(details) time.Sleep(1 * time.Second) So(err, ShouldBeNil) taskUpdate, err := task.FindOne(task.ById(testTask.Id)) So(err, ShouldBeNil) So(taskUpdate.Status, ShouldEqual, evergreen.TaskSucceeded) testHost, err := host.FindOne(host.ById(testTask.HostId)) So(err, ShouldBeNil) So(testHost.RunningTask, ShouldEqual, subsequentTaskId) taskUpdate, err = task.FindOne(task.ById(subsequentTaskId)) So(err, ShouldBeNil) So(taskUpdate.Status, ShouldEqual, evergreen.TaskDispatched) So(taskEndResp, ShouldNotBeNil) So(taskEndResp.RunNext, ShouldBeTrue) So(taskEndResp.TaskId, ShouldEqual, subsequentTaskId) }) }) } }
func TestDeactivatePreviousTask(t *testing.T) { Convey("With two tasks and a build", t, func() { testutil.HandleTestingErr(db.ClearCollections(task.Collection, build.Collection), t, "Error clearing task and build collections") // create two tasks displayName := "testTask" userName := "******" b := &build.Build{ Id: "testBuild", } previousTask := &task.Task{ Id: "one", DisplayName: displayName, RevisionOrderNumber: 1, Priority: 1, Activated: true, ActivatedBy: "user", BuildId: b.Id, Status: evergreen.TaskUndispatched, Project: "sample", } currentTask := &task.Task{ Id: "two", DisplayName: displayName, RevisionOrderNumber: 2, Status: evergreen.TaskFailed, Priority: 1, Activated: true, BuildId: b.Id, Project: "sample", } tc := []build.TaskCache{ { DisplayName: displayName, Id: previousTask.Id, }, { DisplayName: displayName, Id: currentTask.Id, }, } b.Tasks = tc So(b.Insert(), ShouldBeNil) So(previousTask.Insert(), ShouldBeNil) So(currentTask.Insert(), ShouldBeNil) Convey("activating a previous task should set the previous task's active field to true", func() { So(DeactivatePreviousTasks(currentTask.Id, userName), ShouldBeNil) previousTask, err := task.FindOne(task.ById(previousTask.Id)) So(err, ShouldBeNil) So(previousTask.Activated, ShouldBeFalse) }) }) }
func uiGetTaskHistory(w http.ResponseWriter, r *http.Request) { t, err := task.FindOne(task.ById(mux.Vars(r)["task_id"])) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } if t == nil { http.Error(w, "{}", http.StatusNotFound) return } getTaskHistory(t, w, r) }
func TestMarkStart(t *testing.T) { Convey("With a task, build and version", t, func() { testutil.HandleTestingErr(db.ClearCollections(task.Collection, build.Collection, version.Collection), t, "Error clearing task and build collections") displayName := "testName" b := &build.Build{ Id: "buildtest", Status: evergreen.BuildCreated, Version: "abc", } v := &version.Version{ Id: b.Version, Status: evergreen.VersionCreated, } testTask := task.Task{ Id: "testTask", DisplayName: displayName, Activated: true, BuildId: b.Id, Project: "sample", Status: evergreen.TaskUndispatched, Version: b.Version, } b.Tasks = []build.TaskCache{ { Id: testTask.Id, Status: evergreen.TaskUndispatched, }, } So(b.Insert(), ShouldBeNil) So(testTask.Insert(), ShouldBeNil) So(v.Insert(), ShouldBeNil) Convey("when calling MarkStart, the task, version and build should be updated", func() { So(MarkStart(testTask.Id), ShouldBeNil) testTask, err := task.FindOne(task.ById(testTask.Id)) So(err, ShouldBeNil) So(testTask.Status, ShouldEqual, evergreen.TaskStarted) b, err := build.FindOne(build.ById(b.Id)) So(err, ShouldBeNil) So(b.Status, ShouldEqual, evergreen.BuildStarted) So(b.Tasks, ShouldNotBeNil) So(len(b.Tasks), ShouldEqual, 1) So(b.Tasks[0].Status, ShouldEqual, evergreen.TaskStarted) v, err := version.FindOne(version.ById(v.Id)) So(err, ShouldBeNil) So(v.Status, ShouldEqual, evergreen.VersionStarted) }) }) }
func TestAttachResults(t *testing.T) { resetTasks(t) testConfig := evergreen.TestConfig() cwd := testutil.GetDirectoryOfFile() Convey("With attachResults plugin installed into plugin registry", t, func() { registry := plugin.NewSimpleRegistry() attachPlugin := &AttachPlugin{} err := registry.Register(attachPlugin) testutil.HandleTestingErr(err, t, "Couldn't register plugin: %v") server, err := service.CreateTestServer(testConfig, nil, plugin.APIPlugins, true) testutil.HandleTestingErr(err, t, "Couldn't set up testing server") httpCom := plugintest.TestAgentCommunicator("mocktaskid", "mocktasksecret", server.URL) configFile := filepath.Join(cwd, "testdata", "plugin_attach_results.yml") resultsLoc := filepath.Join(cwd, "testdata", "plugin_attach_results.json") taskConfig, err := plugintest.CreateTestConfig(configFile, t) testutil.HandleTestingErr(err, t, "failed to create test config: %v") taskConfig.WorkDir = "." sliceAppender := &evergreen.SliceAppender{[]*slogger.Log{}} logger := agentutil.NewTestLogger(sliceAppender) Convey("all commands in test project should execute successfully", func() { for _, projTask := range taskConfig.Project.Tasks { So(len(projTask.Commands), ShouldNotEqual, 0) for _, command := range projTask.Commands { pluginCmds, err := registry.GetCommands(command, taskConfig.Project.Functions) testutil.HandleTestingErr(err, t, "Couldn't get plugin command: %v") So(pluginCmds, ShouldNotBeNil) So(err, ShouldBeNil) pluginCom := &comm.TaskJSONCommunicator{pluginCmds[0].Plugin(), httpCom} err = pluginCmds[0].Execute(logger, pluginCom, taskConfig, make(chan bool)) So(err, ShouldBeNil) testTask, err := task.FindOne(task.ById(httpCom.TaskId)) testutil.HandleTestingErr(err, t, "Couldn't find task") So(testTask, ShouldNotBeNil) // ensure test results are exactly as expected // attempt to open the file reportFile, err := os.Open(resultsLoc) testutil.HandleTestingErr(err, t, "Couldn't open report file: '%v'", err) results := &task.TestResults{} err = util.ReadJSONInto(reportFile, results) testutil.HandleTestingErr(err, t, "Couldn't read report file: '%v'", err) testResults := *results So(testTask.TestResults, ShouldResemble, testResults.Results) testutil.HandleTestingErr(err, t, "Couldn't clean up test temp dir") } } }) }) }
// dBTests are the database verification tests for standard one file execution func dBTests() { task, err := task.FindOne(task.ById("mocktaskid")) So(err, ShouldBeNil) So(len(task.TestResults), ShouldNotEqual, 0) Convey("along with the proper logs", func() { // junit_3.xml tl := dBFindOneTestLog( "test.test_threads_replica_set_client.TestThreadsReplicaSet.test_safe_update", ) So(tl.Lines[0], ShouldContainSubstring, "SKIPPED") tl = dBFindOneTestLog("test.test_bson.TestBSON.test_basic_encode") So(tl.Lines[0], ShouldContainSubstring, "AssertionError") }) }
func TestTaskFailures(t *testing.T) { setupTlsConfigs(t) testutil.ConfigureIntegrationTest(t, testConfig, "TestTaskFailures") for tlsString, tlsConfig := range tlsConfigs { for _, testSetup := range testSetups { Convey(testSetup.testSpec, t, func() { Convey("With agent running a failing test and live API server over "+tlsString, func() { testTask, _, err := setupAPITestData(testConfig, "failing_task", "linux-64", filepath.Join(testDirectory, "testdata/config_test_plugin/project/evergreen-ci-render.yml"), NoPatch, t) testutil.HandleTestingErr(err, t, "Couldn't create test data: %v", err) testServer, err := service.CreateTestServer(testConfig, tlsConfig, plugin.APIPlugins, Verbose) testutil.HandleTestingErr(err, t, "Couldn't create apiserver: %v", err) testAgent, err := createAgent(testServer, testTask) testutil.HandleTestingErr(err, t, "failed to create agent: %v") // actually run the task. // this function won't return until the whole thing is done. testAgent.RunTask() time.Sleep(100 * time.Millisecond) testAgent.APILogger.FlushAndWait() printLogsForTask(testTask.Id) Convey("the pre and post-run scripts should have run", func() { So(scanLogsForTask(testTask.Id, "", "executing the pre-run script"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "", "executing the post-run script!"), ShouldBeTrue) Convey("the task should have run up until its first failure", func() { So(scanLogsForTask(testTask.Id, "", "starting failing_task!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "", "done with failing_task!"), ShouldBeFalse) }) Convey("the tasks's final status should be FAILED", func() { testTask, err = task.FindOne(task.ById(testTask.Id)) testutil.HandleTestingErr(err, t, "Failed to find test task") So(testTask.Status, ShouldEqual, evergreen.TaskFailed) So(testTask.Details.Status, ShouldEqual, evergreen.TaskFailed) So(testTask.Details.Description, ShouldEqual, "failing shell command") So(testTask.Details.TimedOut, ShouldBeFalse) So(testTask.Details.Type, ShouldEqual, model.SystemCommandType) }) }) }) }) } } }
func TestTaskAbortion(t *testing.T) { setupTlsConfigs(t) testutil.ConfigureIntegrationTest(t, testConfig, "TestTaskAbortion") for tlsString, tlsConfig := range tlsConfigs { for _, testSetup := range testSetups { Convey(testSetup.testSpec, t, func() { Convey("With agent running a slow test and live API server over "+tlsString, func() { testTask, _, err := setupAPITestData(testConfig, "very_slow_task", "linux-64", filepath.Join(testDirectory, "testdata/config_test_plugin/project/evergreen-ci-render.yml"), NoPatch, t) testutil.HandleTestingErr(err, t, "Failed to find test task") testServer, err := service.CreateTestServer(testConfig, tlsConfig, plugin.APIPlugins, Verbose) testutil.HandleTestingErr(err, t, "Couldn't create apiserver: %v", err) testAgent, err := createAgent(testServer, testTask) testutil.HandleTestingErr(err, t, "failed to create agent: %v") Convey("when the abort signal is triggered on the task", func() { go func() { // Wait for a few seconds, then switch the task to aborted! time.Sleep(3 * time.Second) err := model.AbortTask(testTask.Id, "") testutil.HandleTestingErr(err, t, "Failed to abort test task") fmt.Println("aborted task.") }() // actually run the task. // this function won't return until the whole thing is done. _, err := testAgent.RunTask() So(err, ShouldBeNil) testAgent.APILogger.Flush() time.Sleep(1 * time.Second) printLogsForTask(testTask.Id) Convey("the pre and post-run scripts should have run", func() { So(scanLogsForTask(testTask.Id, "", "executing the pre-run script"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "", "executing the post-run script!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "", "Received abort signal - stopping."), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "", "done with very_slow_task!"), ShouldBeFalse) testTask, err = task.FindOne(task.ById(testTask.Id)) testutil.HandleTestingErr(err, t, "Failed to find test task") So(testTask.Status, ShouldEqual, evergreen.TaskUndispatched) }) }) }) }) } } }
func TestAbortTask(t *testing.T) { Convey("With a task and a build", t, func() { testutil.HandleTestingErr(db.ClearCollections(task.Collection, build.Collection, version.Collection), t, "Error clearing task, build, and version collections") displayName := "testName" userName := "******" b := &build.Build{ Id: "buildtest", } testTask := task.Task{ Id: "testone", DisplayName: displayName, Activated: false, BuildId: b.Id, Status: evergreen.TaskStarted, } finishedTask := task.Task{ Id: "another", DisplayName: displayName, Activated: false, BuildId: b.Id, Status: evergreen.TaskFailed, } b.Tasks = []build.TaskCache{ { Id: testTask.Id, }, { Id: finishedTask.Id, }, } So(b.Insert(), ShouldBeNil) So(testTask.Insert(), ShouldBeNil) So(finishedTask.Insert(), ShouldBeNil) Convey("with a task that has started, aborting a task should work", func() { So(AbortTask(testTask.Id, userName), ShouldBeNil) testTask, err := task.FindOne(task.ById(testTask.Id)) So(err, ShouldBeNil) So(testTask.Activated, ShouldEqual, false) So(testTask.Aborted, ShouldEqual, true) }) Convey("a task that is finished should error when aborting", func() { So(AbortTask(finishedTask.Id, userName), ShouldNotBeNil) }) }) }
// DispatchTaskForHost assigns the task at the head of the task queue to the // given host, dequeues the task and then marks it as dispatched for the host func DispatchTaskForHost(taskQueue *model.TaskQueue, assignedHost *host.Host) ( nextTask *task.Task, err error) { if assignedHost == nil { return nil, fmt.Errorf("can not assign task to a nil host") } // only proceed if there are pending tasks left for !taskQueue.IsEmpty() { queueItem := taskQueue.NextTask() // pin the task to the given host and fetch the full task document from // the database nextTask, err = task.FindOne(task.ById(queueItem.Id)) if err != nil { return nil, fmt.Errorf("error finding task with id %v: %v", queueItem.Id, err) } if nextTask == nil { return nil, fmt.Errorf("refusing to move forward because queued "+ "task with id %v does not exist", queueItem.Id) } // dequeue the task from the queue if err = taskQueue.DequeueTask(nextTask.Id); err != nil { return nil, fmt.Errorf("error pulling task with id %v from "+ "queue for distro %v: %v", nextTask.Id, nextTask.DistroId, err) } // validate that the task can be run, if not fetch the next one in // the queue if shouldSkipTask(nextTask) { evergreen.Logger.Logf(slogger.WARN, "Skipping task %v, which was "+ "picked up to be run but is not runnable - "+ "status (%v) activated (%v)", nextTask.Id, nextTask.Status, nextTask.Activated) continue } // record that the task was dispatched on the host if err := model.MarkTaskDispatched(nextTask, assignedHost.Id, assignedHost.Distro.Id); err != nil { return nil, err } return nextTask, nil } return nil, nil }
// TryResetTask resets a task func TryResetTask(taskId, user, origin string, p *Project, detail *apimodels.TaskEndDetail) (err error) { t, err := task.FindOne(task.ById(taskId)) if err != nil { return err } // if we've reached the max number of executions for this task, mark it as finished and failed if t.Execution >= evergreen.MaxTaskExecution { // restarting from the UI bypasses the restart cap message := fmt.Sprintf("Task '%v' reached max execution (%v):", t.Id, evergreen.MaxTaskExecution) if origin == evergreen.UIPackage { evergreen.Logger.Logf(slogger.DEBUG, "%v allowing exception for %v", message, user) } else { evergreen.Logger.Logf(slogger.DEBUG, "%v marking as failed", message) if detail != nil { return MarkEnd(t.Id, origin, time.Now(), detail, p, false) } else { panic(fmt.Sprintf("TryResetTask called with nil TaskEndDetail by %v", origin)) } } } // only allow re-execution for failed or successful tasks if !task.IsFinished(*t) { // this is to disallow terminating running tasks via the UI if origin == evergreen.UIPackage { evergreen.Logger.Logf(slogger.DEBUG, "Unsatisfiable '%v' reset request on '%v' (status: '%v')", user, t.Id, t.Status) return fmt.Errorf("Task '%v' is currently '%v' - can not reset task in this status", t.Id, t.Status) } } if detail != nil { if err = t.MarkEnd(origin, time.Now(), detail); err != nil { return fmt.Errorf("Error marking task as ended: %v", err) } } if err = resetTask(t.Id); err == nil { if origin == evergreen.UIPackage { event.LogTaskRestarted(t.Id, user) } else { event.LogTaskRestarted(t.Id, origin) } } return err }
// NextTaskForHost the next task that should be run on the host. func NextTaskForHost(h *host.Host) (*task.Task, error) { taskQueue, err := FindTaskQueueForDistro(h.Distro.Id) if err != nil { return nil, err } if taskQueue == nil || taskQueue.IsEmpty() { return nil, nil } nextTaskId := taskQueue.Queue[0].Id fullTask, err := task.FindOne(task.ById(nextTaskId)) if err != nil { return nil, err } return fullTask, nil }
func SetActiveState(taskId string, caller string, active bool) error { t, err := task.FindOne(task.ById(taskId)) if err != nil { return err } if active { // if the task is being activated, make sure to activate all of the task's // dependencies as well for _, dep := range t.DependsOn { if err = SetActiveState(dep.TaskId, caller, true); err != nil { return fmt.Errorf("error activating dependency for %v with id %v: %v", taskId, dep.TaskId, err) } } if t.DispatchTime != util.ZeroTime && t.Status == evergreen.TaskUndispatched { err = resetTask(t.Id) if err != nil { return fmt.Errorf("error resetting task: %v:", err.Error()) } } else { err = t.ActivateTask(caller) if err != nil { return fmt.Errorf("error while activating task: %v", err.Error()) } } // if the caller is not evergreen or the the task is activated by evergreen, deactivate it } else if !evergreen.IsSystemActivator(caller) || evergreen.IsSystemActivator(t.ActivatedBy) { err = t.DeactivateTask(caller) if err != nil { return fmt.Errorf("error deactivating task : %v:", err.Error()) } } else { return nil } if active { event.LogTaskActivated(taskId, caller) } else { event.LogTaskDeactivated(taskId, caller) } return build.SetCachedTaskActivated(t.BuildId, taskId, active) }
// getTags finds TaskJSONs that have tags func getTags(w http.ResponseWriter, r *http.Request) { t, err := task.FindOne(task.ById(mux.Vars(r)["task_id"])) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } tags := []struct { Tag string `bson:"_id" json:"tag"` }{} err = db.Aggregate(collection, []bson.M{ {"$match": bson.M{ProjectIdKey: t.Project, TagKey: bson.M{"$exists": true, "$ne": ""}}}, {"$project": bson.M{TagKey: 1}}, bson.M{"$group": bson.M{"_id": "$tag"}}, }, &tags) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } plugin.WriteJSON(w, http.StatusOK, tags) }
func AbortTask(taskId, caller string) error { t, err := task.FindOne(task.ById(taskId)) if err != nil { return err } if !task.IsAbortable(*t) { return fmt.Errorf("Task '%v' is currently '%v' - cannot abort task"+ " in this status", t.Id, t.Status) } evergreen.Logger.Logf(slogger.DEBUG, "Aborting task %v", t.Id) // set the active state and then set the abort if err = SetActiveState(t.Id, caller, false); err != nil { return err } event.LogTaskAbortRequest(t.Id, caller) return t.SetAborted() }
// handleTaskTags will update the TaskJSON's tags depending on the request. func handleTaskTag(w http.ResponseWriter, r *http.Request) { t, err := task.FindOne(task.ById(mux.Vars(r)["task_id"])) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } if t == nil { http.Error(w, "{}", http.StatusNotFound) return } if r.Method == "DELETE" { if _, err = db.UpdateAll(collection, bson.M{VersionIdKey: t.Version, NameKey: mux.Vars(r)["name"]}, bson.M{"$unset": bson.M{TagKey: 1}}); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } plugin.WriteJSON(w, http.StatusOK, "") } inTag := struct { Tag string `json:"tag"` }{} err = util.ReadJSONInto(r.Body, &inTag) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } if len(inTag.Tag) == 0 { http.Error(w, "tag must not be blank", http.StatusBadRequest) return } _, err = db.UpdateAll(collection, bson.M{VersionIdKey: t.Version, NameKey: mux.Vars(r)["name"]}, bson.M{"$set": bson.M{TagKey: inTag.Tag}}) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } plugin.WriteJSON(w, http.StatusOK, "") }
func (uis *UIServer) hostPage(w http.ResponseWriter, r *http.Request) { projCtx := MustHaveProjectContext(r) vars := mux.Vars(r) id := vars["host_id"] h, err := host.FindOne(host.ById(id)) if err != nil { uis.LoggedError(w, r, http.StatusInternalServerError, err) return } if h == nil { http.Error(w, "Host not found", http.StatusNotFound) return } events, err := event.Find(event.MostRecentHostEvents(id, 50)) if err != nil { uis.LoggedError(w, r, http.StatusInternalServerError, err) return } runningTask := &task.Task{} if h.RunningTask != "" { runningTask, err = task.FindOne(task.ById(h.RunningTask)) if err != nil { uis.LoggedError(w, r, http.StatusInternalServerError, err) return } } flashes := PopFlashes(uis.CookieStore, r, w) uis.WriteHTML(w, http.StatusOK, struct { Flashes []interface{} Events []event.Event Host *host.Host RunningTask *task.Task User *user.DBUser ProjectData projectContext }{flashes, events, h, runningTask, GetUser(r), projCtx}, "base", "host.html", "base_angular.html", "menu.html") }
// populateTaskBuildVersion takes a task, build, and version ID and populates a projectContext // with as many of the task, build, and version documents as possible. // If any of the provided IDs is blank, they will be inferred from the more selective ones. // Returns the project ID of the data found, which may be blank if the IDs are empty. func (pc *projectContext) populateTaskBuildVersion(taskId, buildId, versionId string) (string, error) { projectId := "" var err error // Fetch task if there's a task ID present; if we find one, populate build/version IDs from it if len(taskId) > 0 { pc.Task, err = task.FindOne(task.ById(taskId)) if err != nil { return "", err } if pc.Task != nil { // override build and version ID with the ones this task belongs to buildId = pc.Task.BuildId versionId = pc.Task.Version projectId = pc.Task.Project } } // Fetch build if there's a build ID present; if we find one, populate version ID from it if len(buildId) > 0 { pc.Build, err = build.FindOne(build.ById(buildId)) if err != nil { return "", err } if pc.Build != nil { versionId = pc.Build.Version projectId = pc.Build.Project } } if len(versionId) > 0 { pc.Version, err = version.FindOne(version.ById(versionId)) if err != nil { return "", err } if pc.Version != nil { projectId = pc.Version.Identifier } } return projectId, nil }
// reset task finds a task, attempts to archive it, and resets the task and resets the TaskCache in the build as well. func resetTask(taskId string) error { t, err := task.FindOne(task.ById(taskId)) if err != nil { return err } if err := t.Archive(); err != nil { return fmt.Errorf("Can't restart task because it can't be archived: %v", err) } if err = t.Reset(); err != nil { return err } // update the cached version of the task, in its build document if err = build.ResetCachedTask(t.BuildId, t.Id); err != nil { return err } return UpdateBuildAndVersionStatusForTask(t.Id) }