// newTestHarness returns a harness starting a dev consul and vault server, // building the appropriate config and creating a TaskTemplateManager func newTestHarness(t *testing.T, templates []*structs.Template, consul, vault bool) *testHarness { harness := &testHarness{ mockHooks: NewMockTaskHooks(), templates: templates, node: mock.Node(), config: &config.Config{}, } // Build the task environment harness.taskEnv = env.NewTaskEnvironment(harness.node) // Make a tempdir d, err := ioutil.TempDir("", "") if err != nil { t.Fatalf("Failed to make tmpdir: %v", err) } harness.taskDir = d if consul { harness.consul = ctestutil.NewTestServer(t) harness.config.ConsulConfig = &sconfig.ConsulConfig{ Addr: harness.consul.HTTPAddr, } } if vault { harness.vault = testutil.NewTestVault(t).Start() harness.config.VaultConfig = harness.vault.Config harness.vaultToken = harness.vault.RootToken } return harness }
func TestProcessHealthCheckWithKnownCheck(t *testing.T) { consulKey := "notif/node1/check" srv := ctu.NewTestServer(t) defer srv.Stop() config := *consulapi.DefaultConfig() config.Address = srv.HTTPAddr cc, err := consulapi.NewClient(&config) if err != nil { t.Fatal(err) } drain := make(chan *consulapi.HealthCheck) notifier := &testNotifier{} pro := NewProcessor(drain, notifier, cc) b, err := json.Marshal(check{Status: "critical", UpdatedAt: time.Now().UTC()}) if err != nil { t.Fatal("failed to marshal check") } srv.SetKV(consulKey, b) hc := &consulapi.HealthCheck{ CheckID: "check", Node: "node1", Status: "critical", } pro.processHealthCheck(hc) if notifier.notified { t.Fatal("should not have notified") } if notifier.resolved { t.Fatal("resolved when unexpected") } if len(pro.hcs) != 1 { t.Fatal("failed to memoize check") } notifier.reset() pro.processHealthCheck(hc) if notifier.notified { t.Fatal("should not have notified") } if notifier.resolved { t.Fatal("resolved when unexpected") } if len(pro.hcs) != 1 { t.Fatal("failed to memoize check") } }
func TestProcessHealthCheck(t *testing.T) { srv := ctu.NewTestServer(t) defer srv.Stop() config := *consulapi.DefaultConfig() config.Address = srv.HTTPAddr cc, err := consulapi.NewClient(&config) if err != nil { t.Fatal(err) } drain := make(chan *consulapi.HealthCheck) notifier := &testNotifier{} pro := NewProcessor(drain, notifier, cc) hc := &consulapi.HealthCheck{ CheckID: "check", Node: "node1", Status: "critical", } pro.processHealthCheck(hc) if !notifier.notified { t.Fatal("failed to notify") } if notifier.resolved { t.Fatal("resolved when unexpected") } if len(pro.hcs) != 1 { t.Fatal("failed to memoize check") } notifier.reset() pro.processHealthCheck(hc) if notifier.notified { t.Fatal("should not have notified") } if notifier.resolved { t.Fatal("resolved when unexpected") } }
func TestConsulLocker(t *testing.T) { a := assert.New(t) server := consultestutil.NewTestServer(t) defer server.Stop() client, err := consul.NewClient(&consul.Config{ Address: server.HTTPAddr, }) a.NoError(err) locker := New(client) a.NoError(locker.LockUpload("one")) a.Equal(tusd.ErrFileLocked, locker.LockUpload("one")) a.NoError(locker.UnlockUpload("one")) a.Equal(consul.ErrLockNotHeld, locker.UnlockUpload("one")) }
func TestLockLost(t *testing.T) { // This test will panic because the connection to Consul will be cut, which // is indented. // TODO: find a way to test this t.SkipNow() a := assert.New(t) server := consultestutil.NewTestServer(t) client, err := consul.NewClient(&consul.Config{ Address: server.HTTPAddr, }) a.NoError(err) locker := New(client) locker.ConnectionName = server.HTTPAddr a.NoError(locker.LockUpload("two")) server.Stop() time.Sleep(time.Hour) }