func init() { db.SetGlobalSessionProvider(db.SessionFactoryFromConfig( taskImportanceCmpTestConf)) if taskImportanceCmpTestConf.Scheduler.LogFile != "" { evergreen.SetLogger(taskImportanceCmpTestConf.Scheduler.LogFile) } }
func init() { db.SetGlobalSessionProvider( db.SessionFactoryFromConfig(hostAllocatorTestConf)) if hostAllocatorTestConf.Scheduler.LogFile != "" { evergreen.SetLogger(hostAllocatorTestConf.Scheduler.LogFile) } }
func init() { db.SetGlobalSessionProvider(db.SessionFactoryFromConfig( taskQueuePersisterTestConf)) if taskQueuePersisterTestConf.Scheduler.LogFile != "" { evergreen.SetLogger(taskQueuePersisterTestConf.Scheduler.LogFile) } }
func init() { db.SetGlobalSessionProvider(db.SessionFactoryFromConfig( taskDurationEstimatorTestConf)) if taskDurationEstimatorTestConf.Scheduler.LogFile != "" { evergreen.SetLogger(taskDurationEstimatorTestConf.Scheduler.LogFile) } }
func TestFindRunningSpawnedHosts(t *testing.T) { testConfig := evergreen.TestConfig() db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(testConfig)) testutil.HandleTestingErr(db.Clear(Collection), t, "Error"+ " clearing '%v' collection", Collection) Convey("With calling FindRunningSpawnedHosts...", t, func() { Convey("if there are no spawned hosts, nothing should be returned", func() { spawnedHosts, err := Find(IsRunningAndSpawned) So(err, ShouldBeNil) // make sure we only returned no document So(len(spawnedHosts), ShouldEqual, 0) }) Convey("if there are spawned hosts, they should be returned", func() { host := &Host{} host.Id = "spawned-1" host.Status = "running" host.StartedBy = "user1" testutil.HandleTestingErr(host.Insert(), t, "error from "+ "FindRunningSpawnedHosts") spawnedHosts, err := Find(IsRunningAndSpawned) testutil.HandleTestingErr(err, t, "error from "+ "FindRunningSpawnedHosts: %v", err) // make sure we only returned no document So(len(spawnedHosts), ShouldEqual, 1) }) }) }
func main() { settings := evergreen.GetSettingsOrExit() if settings.Runner.LogFile != "" { evergreen.SetLogger(settings.Runner.LogFile) } go util.DumpStackOnSIGQUIT(os.Stdout) db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(settings)) // just run one process if an argument was passed in if flag.Arg(0) != "" { err := runProcessByName(flag.Arg(0), settings) if err != nil { evergreen.Logger.Logf(slogger.ERROR, "Error: %v", err) os.Exit(1) } return } if settings.Runner.IntervalSeconds <= 0 { evergreen.Logger.Logf(slogger.WARN, "Interval set to %vs (<= 0s) using %vs instead", settings.Runner.IntervalSeconds, runInterval) } else { runInterval = settings.Runner.IntervalSeconds } // start and schedule runners wg := &sync.WaitGroup{} ch := startRunners(wg, settings) go listenForSIGTERM(ch) // wait for all the processes to exit wg.Wait() evergreen.Logger.Logf(slogger.INFO, "Cleanly terminated all %v processes", len(Runners)) }
func main() { settings := evergreen.GetSettingsOrExit() if settings.Ui.LogFile != "" { evergreen.SetLogger(settings.Ui.LogFile) } db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(settings)) home := evergreen.FindEvergreenHome() userManager, err := auth.LoadUserManager(settings.AuthConfig) if err != nil { fmt.Println("Failed to create user manager:", err) os.Exit(1) } cookieStore := sessions.NewCookieStore([]byte(settings.Ui.Secret)) uis := ui.UIServer{ nil, // render settings.Ui.Url, // RootURL userManager, // User Manager *settings, // mci settings cookieStore, // cookiestore nil, // plugin panel manager } router, err := uis.NewRouter() if err != nil { fmt.Println("Failed to create router:", err) os.Exit(1) } webHome := filepath.Join(home, "public") functionOptions := ui.FuncOptions{webHome, settings.Ui.HelpUrl, true, router} functions, err := ui.MakeTemplateFuncs(functionOptions, settings.SuperUsers) if err != nil { fmt.Println("Failed to create template function map:", err) os.Exit(1) } uis.Render = render.New(render.Options{ Directory: filepath.Join(home, ui.WebRootPath, ui.Templates), DisableCache: !settings.Ui.CacheTemplates, Funcs: functions, }) err = uis.InitPlugins() if err != nil { fmt.Println("WARNING: Error initializing plugins:", err) } n := negroni.New() n.Use(negroni.NewStatic(http.Dir(webHome))) n.Use(ui.NewLogger()) n.Use(negroni.HandlerFunc(ui.UserMiddleware(userManager))) n.UseHandler(router) graceful.Run(settings.Ui.HttpListenAddr, requestTimeout, n) evergreen.Logger.Logf(slogger.INFO, "UI server cleanly terminated") }
func init() { dbutil.SetGlobalSessionProvider(dbutil.SessionFactoryFromConfig(testConfig)) testSetups = []testConfigPath{ {"With plugin mode test config", filepath.Join(testutil.GetDirectoryOfFile(), "testdata", "config_test_plugin")}, } }
func TestPatchTask(t *testing.T) { setupTlsConfigs(t) testConfig := evergreen.TestConfig() db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(testConfig)) patchModes := []patchTestMode{InlinePatch, ExternalPatch} testutil.ConfigureIntegrationTest(t, testConfig, "TestPatchTask") for tlsString, tlsConfig := range tlsConfigs { for _, testSetup := range testSetups { Convey(testSetup.testSpec, t, func() { Convey("With agent running a patched 'compile'"+tlsString, func() { for _, mode := range patchModes { Convey(fmt.Sprintf("Using patch mode %v", mode.String()), func() { testTask, b, err := setupAPITestData(testConfig, "compile", "linux-64", "testdata/config_test_plugin/project/evergreen-ci-render.yml", mode, t) githash := "1e5232709595db427893826ce19289461cba3f75" setupPatches(mode, b, t, patchRequest{"", "testdata/test.patch", githash}, patchRequest{"recursive", "testdata/testmodule.patch", githash}) testutil.HandleTestingErr(err, t, "Error setting up test data: %v", err) testServer, err := apiserver.CreateTestServer(testConfig, tlsConfig, plugin.APIPlugins, Verbose) testutil.HandleTestingErr(err, t, "Couldn't create apiserver: %v", err) testAgent, err := New(testServer.URL, testTask.Id, testTask.Secret, "", testConfig.Api.HttpsCert) // actually run the task. // this function won't return until the whole thing is done. testAgent.RunTask() time.Sleep(100 * time.Millisecond) testAgent.APILogger.FlushAndWait() printLogsForTask(testTask.Id) Convey("all scripts in task should have been run successfully", func() { So(scanLogsForTask(testTask.Id, "executing the pre-run script"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "executing the post-run script!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "Cloning into") || // git 1.8 scanLogsForTask(testTask.Id, "Initialized empty Git repository"), // git 1.7 ShouldBeTrue) So(scanLogsForTask(testTask.Id, "i am patched!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "i am a patched module"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "i am compiling!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "i am sanity testing!"), ShouldBeTrue) testTask, err = task.FindOne(task.ById(testTask.Id)) testutil.HandleTestingErr(err, t, "Error finding test task: %v", err) So(testTask.Status, ShouldEqual, evergreen.TaskSucceeded) }) }) } }) }) } } }
func TestS3CopyPluginExecution(t *testing.T) { testConfig := evergreen.TestConfig() db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(testConfig)) testutil.ConfigureIntegrationTest(t, testConfig, "TestS3CopyPluginExecution") Convey("With a SimpleRegistry and test project file", t, func() { registry := plugin.NewSimpleRegistry() s3CopyPlugin := &S3CopyPlugin{} testutil.HandleTestingErr(registry.Register(s3CopyPlugin), t, "failed to register s3Copy plugin") testutil.HandleTestingErr(registry.Register(&s3Plugin.S3Plugin{}), t, "failed to register S3 plugin") testutil.HandleTestingErr( db.ClearCollections(model.PushlogCollection, version.Collection), t, "error clearing test collections") version := &version.Version{ Id: "", } So(version.Insert(), ShouldBeNil) server, err := apiserver.CreateTestServer(testConfig, nil, plugin.APIPlugins, false) testutil.HandleTestingErr(err, t, "Couldn't set up testing server") httpCom := plugintest.TestAgentCommunicator("mocktaskid", "mocktasksecret", server.URL) //server.InstallPlugin(s3CopyPlugin) taskConfig, err := plugintest.CreateTestConfig("testdata/plugin_s3_copy.yml", t) testutil.HandleTestingErr(err, t, "failed to create test config: %v", err) taskConfig.WorkDir = "." sliceAppender := &evergreen.SliceAppender{[]*slogger.Log{}} logger := agent.NewTestLogger(sliceAppender) taskConfig.Expansions.Update(map[string]string{ "aws_key": testConfig.Providers.AWS.Id, "aws_secret": testConfig.Providers.AWS.Secret, }) Convey("the s3 copy command should execute successfully", func() { for _, task := range taskConfig.Project.Tasks { So(len(task.Commands), ShouldNotEqual, 0) for _, command := range task.Commands { pluginCmds, err := registry.GetCommands(command, taskConfig.Project.Functions) testutil.HandleTestingErr(err, t, "Couldn't get plugin command: %v") So(pluginCmds, ShouldNotBeNil) So(err, ShouldBeNil) pluginCom := &agent.TaskJSONCommunicator{s3CopyPlugin.Name(), httpCom} err = pluginCmds[0].Execute(logger, pluginCom, taskConfig, make(chan bool)) So(err, ShouldBeNil) } } }) }) }
func TestPatchTask(t *testing.T) { setupTlsConfigs(t) testConfig := evergreen.TestConfig() db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(testConfig)) testutil.ConfigureIntegrationTest(t, testConfig, "TestPatchTask") for tlsString, tlsConfig := range tlsConfigs { for _, testSetup := range testSetups { Convey(testSetup.testSpec, t, func() { Convey("With agent running a patched 'compile'"+tlsString, func() { testTask, _, err := setupAPITestData(testConfig, "compile", "linux-64", true, t) testutil.HandleTestingErr(err, t, "Error setting up test data: %v", err) testServer, err := apiserver.CreateTestServer(testConfig, tlsConfig, plugin.Published, Verbose) testutil.HandleTestingErr(err, t, "Couldn't create apiserver: %v", err) testAgent, err := New(testServer.URL, testTask.Id, testTask.Secret, "", testConfig.Expansions["api_httpscert"]) // actually run the task. // this function won't return until the whole thing is done. testAgent.RunTask() time.Sleep(100 * time.Millisecond) testAgent.APILogger.FlushAndWait() printLogsForTask(testTask.Id) Convey("all scripts in task should have been run successfully", func() { So(scanLogsForTask(testTask.Id, "executing the pre-run script!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "executing the post-run script!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "Cloning into") || // git 1.8 scanLogsForTask(testTask.Id, "Initialized empty Git repository"), // git 1.7 ShouldBeTrue) So(scanLogsForTask(testTask.Id, "i am patched!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "i am a patched module"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "i am compiling!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "i am sanity testing!"), ShouldBeTrue) testTask, err = model.FindTask(testTask.Id) testutil.HandleTestingErr(err, t, "Error finding test task: %v", err) So(testTask.Status, ShouldEqual, evergreen.TaskSucceeded) }) }) }) } } }
func TestPatchPlugin(t *testing.T) { cwd := testutil.GetDirectoryOfFile() testConfig := evergreen.TestConfig() db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(testConfig)) Convey("With patch plugin installed into plugin registry", t, func() { registry := plugin.NewSimpleRegistry() gitPlugin := &GitPlugin{} err := registry.Register(gitPlugin) testutil.HandleTestingErr(err, t, "Couldn't register plugin %v") testutil.HandleTestingErr(db.Clear(version.Collection), t, "unable to clear versions collection") version := &version.Version{ Id: "", } So(version.Insert(), ShouldBeNil) server, err := service.CreateTestServer(testConfig, nil, plugin.APIPlugins, false) testutil.HandleTestingErr(err, t, "Couldn't set up testing server") httpCom := plugintest.TestAgentCommunicator("testTaskId", "testTaskSecret", server.URL) //sliceAppender := &evergreen.SliceAppender{[]*slogger.Log{}} sliceAppender := slogger.StdOutAppender() logger := agentutil.NewTestLogger(sliceAppender) Convey("all commands in test project should execute successfully", func() { taskConfig, err := plugintest.CreateTestConfig(filepath.Join(cwd, "testdata", "plugin_patch.yml"), t) testutil.HandleTestingErr(err, t, "could not create test config") taskConfig.Task.Requester = evergreen.PatchVersionRequester _, _, err = plugintest.SetupAPITestData("testTask", filepath.Join(cwd, "testdata", "testmodule.patch"), t) testutil.HandleTestingErr(err, t, "Couldn't set up test documents") for _, task := range taskConfig.Project.Tasks { So(len(task.Commands), ShouldNotEqual, 0) for _, command := range task.Commands { pluginCmds, err := registry.GetCommands(command, taskConfig.Project.Functions) testutil.HandleTestingErr(err, t, "Couldn't get plugin command: %v") So(pluginCmds, ShouldNotBeNil) So(err, ShouldBeNil) pluginCom := &comm.TaskJSONCommunicator{pluginCmds[0].Plugin(), httpCom} err = pluginCmds[0].Execute(logger, pluginCom, taskConfig, make(chan bool)) So(err, ShouldBeNil) } } }) }) }
func TestFlaggingProvisioningFailedHosts(t *testing.T) { testConfig := evergreen.TestConfig() db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(testConfig)) Convey("When flagging hosts whose provisioning failed", t, func() { // reset the db testutil.HandleTestingErr(db.ClearCollections(host.Collection), t, "error clearing hosts collection") Convey("only hosts whose provisioning failed should be"+ " picked up", func() { host1 := &host.Host{ Id: "h1", Status: evergreen.HostRunning, } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") host2 := &host.Host{ Id: "h2", Status: evergreen.HostUninitialized, } testutil.HandleTestingErr(host2.Insert(), t, "error inserting host") host3 := &host.Host{ Id: "h3", Status: evergreen.HostProvisionFailed, } testutil.HandleTestingErr(host3.Insert(), t, "error inserting host") unprovisioned, err := flagProvisioningFailedHosts(nil, nil) So(err, ShouldBeNil) So(len(unprovisioned), ShouldEqual, 1) So(unprovisioned[0].Id, ShouldEqual, "h3") }) }) }
func NewUIServer(settings *evergreen.Settings, home string) (*UIServer, error) { uis := &UIServer{} if settings.Ui.LogFile != "" { evergreen.SetLogger(settings.Ui.LogFile) } db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(settings)) uis.Settings = *settings uis.Home = home userManager, err := auth.LoadUserManager(settings.AuthConfig) if err != nil { return nil, err } uis.UserManager = userManager uis.CookieStore = sessions.NewCookieStore([]byte(settings.Ui.Secret)) uis.PluginTemplates = map[string]*htmlTemplate.Template{} return uis, nil }
func init() { db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(evergreen.TestConfig())) }
func init() { db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(taskRunnerTestConf)) if taskRunnerTestConf.TaskRunner.LogFile != "" { evergreen.SetLogger(taskRunnerTestConf.TaskRunner.LogFile) } }
func TestPushTask(t *testing.T) { testConfig := evergreen.TestConfig() setupTlsConfigs(t) db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(testConfig)) testutil.ConfigureIntegrationTest(t, testConfig, "TestPushTask") for tlsString, tlsConfig := range tlsConfigs { for _, testSetup := range testSetups { Convey(testSetup.testSpec, t, func() { Convey("With agent running a push task "+tlsString, func() { testTask, _, err := setupAPITestData(testConfig, evergreen.PushStage, "linux-64", false, t) testutil.HandleTestingErr(err, t, "Error setting up test data: %v", err) testutil.HandleTestingErr(db.ClearCollections(artifact.Collection), t, "can't clear files collection") testServer, err := apiserver.CreateTestServer(testConfig, tlsConfig, plugin.Published, Verbose) testutil.HandleTestingErr(err, t, "Couldn't create apiserver: %v", err) testAgent, err := New(testServer.URL, testTask.Id, testTask.Secret, "", testConfig.Expansions["api_httpscert"]) testutil.HandleTestingErr(err, t, "Error making test agent: %v", err) // actually run the task. // this function won't return until the whole thing is done. testAgent.RunTask() time.Sleep(100 * time.Millisecond) testAgent.APILogger.FlushAndWait() printLogsForTask(testTask.Id) newDate := testAgent.taskConfig.Expansions.Get("new_date") Convey("all scripts in task should have been run successfully", func() { So(scanLogsForTask(testTask.Id, "executing the pre-run script!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "executing the post-run script!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "push task pre-run!"), ShouldBeTrue) So(scanLogsForTask(testTask.Id, "push task post-run!"), ShouldBeTrue) Convey("s3.put attaches task file properly", func() { entry, err := artifact.FindOne(artifact.ByTaskId(testTask.Id)) So(err, ShouldBeNil) So(len(entry.Files), ShouldEqual, 2) for _, element := range entry.Files { So(element.Name, ShouldNotEqual, "") } So(entry.Files[0].Name, ShouldEqual, "push_file") link := "https://s3.amazonaws.com/build-push-testing/pushtest-stage/unittest-testTaskId-DISTRO_EXP-BUILDVAR_EXP-FILE_EXP.txt" So(entry.Files[0].Link, ShouldEqual, link) }) Convey("s3.copy attached task file properly", func() { entry, err := artifact.FindOne(artifact.ByTaskId(testTask.Id)) So(err, ShouldBeNil) So(len(entry.Files), ShouldNotEqual, 0) So(entry.Files[0].Name, ShouldEqual, "push_file") So(entry.Files[1].Name, ShouldEqual, "copy_file") So(entry.Files[0].Link, ShouldEqual, "https://s3.amazonaws.com/build-push-testing/pushtest-stage/unittest-testTaskId-DISTRO_EXP-BUILDVAR_EXP-FILE_EXP.txt") So(entry.Files[1].Link, ShouldEqual, "https://s3.amazonaws.com/build-push-testing/pushtest/unittest-DISTRO_EXP-BUILDVAR_EXP-FILE_EXP-latest.txt") }) testTask, err = model.FindTask(testTask.Id) testutil.HandleTestingErr(err, t, "Error finding test task: %v", err) So(testTask.Status, ShouldEqual, evergreen.TaskSucceeded) // Check the file written to s3 is what we expected auth := &aws.Auth{ AccessKey: testConfig.Providers.AWS.Id, SecretKey: testConfig.Providers.AWS.Secret, } // check the staging location first filebytes, err := getS3FileBytes(auth, "build-push-testing", "/pushtest-stage/unittest-testTaskId-DISTRO_EXP-BUILDVAR_EXP-FILE_EXP.txt") testutil.HandleTestingErr(err, t, "Failed to get file from s3: %v", err) So(string(filebytes), ShouldEqual, newDate+"\n") // now check remote location (after copy) filebytes, err = getS3FileBytes(auth, "build-push-testing", "/pushtest/unittest-DISTRO_EXP-BUILDVAR_EXP-FILE_EXP-latest.txt") testutil.HandleTestingErr(err, t, "Failed to get remote file from s3: %v", err) So(string(filebytes), ShouldEqual, newDate+"\n") }) }) }) } } }
func TestWarnSlowProvisioningHosts(t *testing.T) { testConfig := evergreen.TestConfig() db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(testConfig)) Convey("When building warnings for hosts that are taking a long time to"+ " provision", t, func() { // reset the db testutil.HandleTestingErr(db.ClearCollections(host.Collection), t, "error clearing hosts collection") Convey("hosts that have not hit the threshold should not trigger a"+ " warning", func() { host1 := &host.Host{ Id: "h1", StartedBy: evergreen.User, CreationTime: time.Now().Add(-10 * time.Minute), } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") warnings, err := slowProvisioningWarnings(testConfig) So(err, ShouldBeNil) So(len(warnings), ShouldEqual, 0) }) Convey("hosts that have already triggered a notification should not"+ " trigger another", func() { host1 := &host.Host{ Id: "h1", StartedBy: evergreen.User, CreationTime: time.Now().Add(-1 * time.Hour), Notifications: map[string]bool{ slowProvisioningWarning: true, }, } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") warnings, err := slowProvisioningWarnings(testConfig) So(err, ShouldBeNil) So(len(warnings), ShouldEqual, 0) }) Convey("terminated hosts should not trigger a warning", func() { host1 := &host.Host{ Id: "h1", StartedBy: evergreen.User, Status: evergreen.HostTerminated, CreationTime: time.Now().Add(-1 * time.Hour), } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") warnings, err := slowProvisioningWarnings(testConfig) So(err, ShouldBeNil) So(len(warnings), ShouldEqual, 0) }) Convey("hosts that are at the threshold and have not previously"+ " triggered a warning should trigger one", func() { host1 := &host.Host{ Id: "h1", StartedBy: evergreen.User, CreationTime: time.Now().Add(-1 * time.Hour), } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") warnings, err := slowProvisioningWarnings(testConfig) So(err, ShouldBeNil) So(len(warnings), ShouldEqual, 1) // make sure running the callback sets the notification key So(warnings[0].callback(warnings[0].host, warnings[0].threshold), ShouldBeNil) host1, err = host.FindOne(host.ById("h1")) So(err, ShouldBeNil) So(host1.Notifications[slowProvisioningWarning], ShouldBeTrue) }) }) }
func init() { db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(taskFinderTestConf)) if setupFuncsTestConf.Scheduler.LogFile != "" { evergreen.SetLogger(setupFuncsTestConf.Scheduler.LogFile) } }
func reset(t *testing.T) { db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(evergreen.TestConfig())) testutil.HandleTestingErr( db.ClearCollections(model.TasksCollection, model.TestLogCollection), t, "error clearing test collections") }
func TestWarnExpiringSpawnedHosts(t *testing.T) { testConfig := evergreen.TestConfig() db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(testConfig)) Convey("When building warnings for spawned hosts that will be expiring"+ " soon", t, func() { // reset the db testutil.HandleTestingErr(db.ClearCollections(host.Collection), t, "error clearing hosts collection") Convey("any hosts not expiring within a threshold should not trigger"+ " warnings", func() { // this host does not expire within the first notification // threshold host1 := &host.Host{ Id: "h1", ExpirationTime: time.Now().Add(time.Hour * 15), } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") warnings, err := spawnHostExpirationWarnings(testConfig) So(err, ShouldBeNil) So(len(warnings), ShouldEqual, 0) }) Convey("any thresholds for which warnings have already been sent"+ " should be ignored", func() { // this host meets the first notification warning threshold host1 := &host.Host{ Id: "h1", ExpirationTime: time.Now().Add(time.Hour * 10), Notifications: map[string]bool{ "720": true, // the first threshold in minutes }, } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") warnings, err := spawnHostExpirationWarnings(testConfig) So(err, ShouldBeNil) So(len(warnings), ShouldEqual, 0) }) Convey("the most recent threshold crossed should be used to create"+ " the warning", func() { // this host meets both notification warning thresholds host1 := &host.Host{ Id: "h1", ExpirationTime: time.Now().Add(time.Minute * 10), } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") warnings, err := spawnHostExpirationWarnings(testConfig) So(err, ShouldBeNil) So(len(warnings), ShouldEqual, 1) // execute the callback, make sure the correct threshold is set So(warnings[0].callback(warnings[0].host, warnings[0].threshold), ShouldBeNil) host1, err = host.FindOne(host.ById("h1")) So(err, ShouldBeNil) So(host1.Notifications["120"], ShouldBeTrue) }) Convey("hosts that are quarantined or have already expired should not"+ " merit warnings", func() { // quarantined host host1 := &host.Host{ Id: "h1", Status: evergreen.HostQuarantined, ExpirationTime: time.Now().Add(time.Minute * 10), } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") // terminated host host2 := &host.Host{ Id: "h2", Status: evergreen.HostTerminated, ExpirationTime: time.Now().Add(time.Minute * 10), } testutil.HandleTestingErr(host2.Insert(), t, "error inserting host") // past the expiration. no warning needs to be sent since this host // is theoretically about to be terminated, at which time a // notification will be sent host3 := &host.Host{ Id: "h3", ExpirationTime: time.Now().Add(-time.Minute * 10), } testutil.HandleTestingErr(host3.Insert(), t, "error inserting host") warnings, err := spawnHostExpirationWarnings(testConfig) So(err, ShouldBeNil) So(len(warnings), ShouldEqual, 0) }) }) }
func init() { db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(taskQueueTestConf)) }
func init() { db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(evergreen.TestConfig())) evergreen.SetLogger("/tmp/version_test.log") }
func init() { dbutil.SetGlobalSessionProvider(dbutil.SessionFactoryFromConfig(testConfig)) }
func TestNotify(t *testing.T) { if evergreen.TestConfig().Notify.LogFile != "" { evergreen.SetLogger(evergreen.TestConfig().Notify.LogFile) } db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(TestConfig)) emailSubjects = make([]string, 0) emailBodies = make([]string, 0) Convey("When running notification handlers", t, func() { ae, err := createEnvironment(TestConfig, map[string]interface{}{}) So(err, ShouldBeNil) Convey("Build-specific handlers should return the correct emails", func() { cleanupdb() timeNow := time.Now() // insert the test documents insertBuildDocs(timeNow) version := &version.Version{Id: "version"} So(version.Insert(), ShouldBeNil) Convey("BuildFailureHandler should return 1 email per failed build", func() { handler := BuildFailureHandler{} emails, err := handler.GetNotifications(ae, "config_test", &buildFailureNotificationKey) So(err, ShouldBeNil) // check that we only returned 2 failed notifications So(len(emails), ShouldEqual, 2) So(emails[0].GetSubject(), ShouldEqual, "[MCI-FAILURE ] Build #build1 failed on displayName") So(emails[1].GetSubject(), ShouldEqual, "[MCI-FAILURE ] Build #build9 failed on displayName") }) Convey("BuildSuccessHandler should return 1 email per successful build", func() { handler := BuildSuccessHandler{} emails, err := handler.GetNotifications(ae, "config_test", &buildSucceessNotificationKey) So(err, ShouldBeNil) // check that we only returned 2 success notifications So(len(emails), ShouldEqual, 2) So(emails[0].GetSubject(), ShouldEqual, "[MCI-SUCCESS ] Build #build3 succeeded on displayName") So(emails[1].GetSubject(), ShouldEqual, "[MCI-SUCCESS ] Build #build8 succeeded on displayName") }) Convey("BuildCompletionHandler should return 1 email per completed build", func() { handler := BuildCompletionHandler{} emails, err := handler.GetNotifications(ae, "config_test", &buildCompletionNotificationKey) So(err, ShouldBeNil) // check that we only returned 6 completed notifications So(len(emails), ShouldEqual, 4) So(emails[0].GetSubject(), ShouldEqual, "[MCI-COMPLETION ] Build #build1 completed on displayName") So(emails[1].GetSubject(), ShouldEqual, "[MCI-COMPLETION ] Build #build3 completed on displayName") So(emails[2].GetSubject(), ShouldEqual, "[MCI-COMPLETION ] Build #build8 completed on displayName") So(emails[3].GetSubject(), ShouldEqual, "[MCI-COMPLETION ] Build #build9 completed on displayName") }) Convey("BuildSuccessToFailureHandler should return 1 email per "+ "build success to failure transition", func() { handler := BuildSuccessToFailureHandler{} emails, err := handler.GetNotifications(ae, "config_test", &buildSuccessToFailureNotificationKey) So(err, ShouldBeNil) // check that we only returned 1 success_to_failure notifications So(len(emails), ShouldEqual, 1) So(emails[0].GetSubject(), ShouldEqual, "[MCI-FAILURE ] Build #build9 transitioned to failure on displayName") }) }) Convey("Task-specific handlers should return the correct emails", func() { cleanupdb() timeNow := time.Now() // insert the test documents insertTaskDocs(timeNow) v := &version.Version{Id: "version"} So(v.Insert(), ShouldBeNil) Convey("TaskFailureHandler should return 1 email per task failure", func() { handler := TaskFailureHandler{} emails, err := handler.GetNotifications(ae, "config_test", &taskFailureNotificationKey) So(err, ShouldBeNil) // check that we only returned 2 failed notifications So(len(emails), ShouldEqual, 2) So(emails[0].GetSubject(), ShouldEqual, "[MCI-FAILURE ] possible MCI failure in displayName (failed on build1)") So(emails[1].GetSubject(), ShouldEqual, "[MCI-FAILURE ] possible MCI failure in displayName (failed on build1)") }) Convey("TaskSuccessHandler should return 1 email per task success", func() { handler := TaskSuccessHandler{} emails, err := handler.GetNotifications(ae, "config_test", &taskSucceessNotificationKey) So(err, ShouldBeNil) // check that we only returned 2 success notifications So(len(emails), ShouldEqual, 2) So(emails[0].GetSubject(), ShouldEqual, "[MCI-SUCCESS ] possible MCI failure in displayName (succeeded on build1)") So(emails[1].GetSubject(), ShouldEqual, "[MCI-SUCCESS ] possible MCI failure in displayName (succeeded on build1)") }) Convey("TaskCompletionHandler should return 1 email per completed task", func() { handler := TaskCompletionHandler{} emails, err := handler.GetNotifications(ae, "config_test", &taskCompletionNotificationKey) So(err, ShouldBeNil) // check that we only returned 6 completion notifications So(len(emails), ShouldEqual, 4) So(emails[0].GetSubject(), ShouldEqual, "[MCI-COMPLETION ] possible MCI failure in displayName (completed on build1)") So(emails[1].GetSubject(), ShouldEqual, "[MCI-COMPLETION ] possible MCI failure in displayName (completed on build1)") So(emails[2].GetSubject(), ShouldEqual, "[MCI-COMPLETION ] possible MCI failure in displayName (completed on build1)") So(emails[3].GetSubject(), ShouldEqual, "[MCI-COMPLETION ] possible MCI failure in displayName (completed on build1)") }) Convey("TaskSuccessToFailureHandler should return 1 email per "+ "task success to failure transition", func() { handler := TaskSuccessToFailureHandler{} emails, err := handler.GetNotifications(ae, "config_test", &taskSuccessToFailureNotificationKey) So(err, ShouldBeNil) // check that we only returned 1 success to failure notifications So(len(emails), ShouldEqual, 1) So(emails[0].GetSubject(), ShouldEqual, "[MCI-FAILURE ] possible MCI failure in displayName (transitioned to "+ "failure on build1)") }) }) }) Convey("When running notifications pipeline", t, func() { cleanupdb() timeNow := time.Now() // insert the test documents insertTaskDocs(timeNow) v := &version.Version{Id: "version"} So(v.Insert(), ShouldBeNil) Convey("Should run the correct notification handlers for given "+ "notification keys", func() { notificationSettings := &MCINotification{} notificationSettings.Notifications = []Notification{ Notification{"task_failure", "project", []string{"user@mongodb"}, []string{}}, Notification{"task_success_to_failure", "project", []string{"user@mongodb"}, []string{}}, } notificationSettings.Teams = []Team{ Team{ "myteam", "*****@*****.**", []Subscription{Subscription{"task", []string{}, []string{"task_failure"}}}, }, } notificationSettings.PatchNotifications = []Subscription{ Subscription{"patch_project", []string{}, []string{}}, } notificationKeyFailure := NotificationKey{"project", "task_failure", "task", "gitter_request"} notificationKeyToFailure := NotificationKey{"project", "task_success_to_failure", "task", "gitter_request"} ae, err := createEnvironment(TestConfig, map[string]interface{}{}) So(err, ShouldBeNil) emails, err := ProcessNotifications(ae, "config_test", notificationSettings, false) So(err, ShouldBeNil) So(len(emails[notificationKeyFailure]), ShouldEqual, 2) So(emails[notificationKeyFailure][0].GetSubject(), ShouldEqual, "[MCI-FAILURE ] possible MCI failure in displayName (failed on build1)") So(emails[notificationKeyFailure][1].GetSubject(), ShouldEqual, "[MCI-FAILURE ] possible MCI failure in displayName (failed on build1)") So(len(emails[notificationKeyToFailure]), ShouldEqual, 1) So(emails[notificationKeyToFailure][0].GetSubject(), ShouldEqual, "[MCI-FAILURE ] possible MCI failure in displayName (transitioned to "+ "failure on build1)") }) Convey("SendNotifications should send emails correctly", func() { notificationSettings := &MCINotification{} notificationSettings.Notifications = []Notification{ Notification{"task_failure", "project", []string{"user@mongodb"}, []string{}}, } notificationSettings.Teams = []Team{ Team{ "myteam", "*****@*****.**", []Subscription{Subscription{"task", []string{}, []string{"task_failure"}}}, }, } notificationSettings.PatchNotifications = []Subscription{ Subscription{"patch_project", []string{}, []string{}}, } fakeTask, err := model.FindOneTask(bson.M{"_id": "task8"}, bson.M{}, []string{}) notificationKey := NotificationKey{"project", "task_failure", "task", "gitter_request"} triggeredNotification := TriggeredTaskNotification{ fakeTask, nil, []ChangeInfo{}, notificationKey, "[MCI-FAILURE]", "failed", } email := TaskEmail{ EmailBase{ "This is the email body", "This is the email subject", triggeredNotification.Info, }, triggeredNotification, } m := make(map[NotificationKey][]Email) m[notificationKey] = []Email{&email} mailer := MockMailer{} mockSettings := evergreen.Settings{Notify: evergreen.NotifyConfig{}} err = SendNotifications(&mockSettings, notificationSettings, m, mailer) So(err, ShouldBeNil) So(len(emailSubjects), ShouldEqual, 1) So(emailSubjects[0], ShouldEqual, "This is the email subject") So(emailBodies[0], ShouldEqual, "This is the email body") }) }) }
func init() { db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(testConfig)) if testConfig.RepoTracker.LogFile != "" { evergreen.SetLogger(testConfig.RepoTracker.LogFile) } }
func init() { db.SetGlobalSessionProvider( db.SessionFactoryFromConfig(patchTestConfig)) }
func TestFlaggingExpiredHosts(t *testing.T) { testConfig := evergreen.TestConfig() db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(testConfig)) Convey("When flagging expired hosts to be terminated", t, func() { // reset the db testutil.HandleTestingErr(db.ClearCollections(host.Collection), t, "error clearing hosts collection") Convey("hosts started by the default user should be filtered"+ " out", func() { host1 := &host.Host{ Id: "h1", Status: evergreen.HostRunning, StartedBy: evergreen.User, } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") expired, err := flagExpiredHosts(nil, nil) So(err, ShouldBeNil) So(len(expired), ShouldEqual, 0) }) Convey("hosts that are terminated or quarantined should be filtered"+ " out", func() { host1 := &host.Host{ Id: "h1", Status: evergreen.HostQuarantined, } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") host2 := &host.Host{ Id: "h2", Status: evergreen.HostTerminated, } testutil.HandleTestingErr(host2.Insert(), t, "error inserting host") expired, err := flagExpiredHosts(nil, nil) So(err, ShouldBeNil) So(len(expired), ShouldEqual, 0) }) Convey("hosts should be returned if their expiration threshold has"+ " been reached", func() { // not expired host1 := &host.Host{ Id: "h1", Status: evergreen.HostRunning, ExpirationTime: time.Now().Add(time.Minute * 10), } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") // expired host2 := &host.Host{ Id: "h2", Status: evergreen.HostRunning, ExpirationTime: time.Now().Add(-time.Minute * 10), } testutil.HandleTestingErr(host2.Insert(), t, "error inserting host") expired, err := flagExpiredHosts(nil, nil) So(err, ShouldBeNil) So(len(expired), ShouldEqual, 1) So(expired[0].Id, ShouldEqual, "h2") }) }) }
func init() { db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(conf)) evergreen.SetLogger("/tmp/task_test.log") }
func TestFlaggingIdleHosts(t *testing.T) { testConfig := evergreen.TestConfig() db.SetGlobalSessionProvider(db.SessionFactoryFromConfig(testConfig)) Convey("When flagging idle hosts to be terminated", t, func() { // reset the db testutil.HandleTestingErr(db.ClearCollections(host.Collection), t, "error clearing hosts collection") Convey("hosts currently running a task should never be"+ " flagged", func() { // insert a host that is currently running a task - but whose // creation time would otherwise indicate it has been idle a while host1 := host.Host{ Id: "h1", Provider: mock.ProviderName, CreationTime: time.Now().Add(-30 * time.Minute), RunningTask: "t1", Status: evergreen.HostRunning, StartedBy: evergreen.User, } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") // finding idle hosts should not return the host idle, err := flagIdleHosts(nil, nil) So(err, ShouldBeNil) So(len(idle), ShouldEqual, 0) }) Convey("hosts not currently running a task should be flagged if they"+ " have been idle at least 15 minutes and will incur a payment in"+ " less than 10 minutes", func() { // insert two hosts - one whose last task was more than 15 minutes // ago, one whose last task was less than 15 minutes ago host1 := host.Host{ Id: "h1", Provider: mock.ProviderName, LastTaskCompleted: "t1", LastTaskCompletedTime: time.Now().Add(-time.Minute * 20), Status: evergreen.HostRunning, StartedBy: evergreen.User, } testutil.HandleTestingErr(host1.Insert(), t, "error inserting host") host2 := host.Host{ Id: "h2", Provider: mock.ProviderName, LastTaskCompleted: "t2", LastTaskCompletedTime: time.Now().Add(-time.Minute * 5), Status: evergreen.HostRunning, StartedBy: evergreen.User, } testutil.HandleTestingErr(host2.Insert(), t, "error inserting host") // finding idle hosts should only return the first host idle, err := flagIdleHosts(nil, nil) So(err, ShouldBeNil) So(len(idle), ShouldEqual, 1) So(idle[0].Id, ShouldEqual, "h1") }) }) }