func TestTreeFunc_missingData(t *testing.T) { d, err := dep.ParseStoreKeyPrefix("non-existing") if err != nil { t.Fatal(err) } brain := NewBrain() used := make(map[string]dep.Dependency) missing := make(map[string]dep.Dependency) f := treeFunc(brain, used, missing) result, err := f("non-existing") if err != nil { t.Fatal(err) } expected := []*dep.KeyPair{} if !reflect.DeepEqual(result, expected) { t.Errorf("expected %q to be %q", result, expected) } if _, ok := used[d.HashCode()]; !ok { t.Errorf("expected dep to be used") } if _, ok := missing[d.HashCode()]; !ok { t.Errorf("expected dep to be missing") } }
func TestParseFlags_prefixes(t *testing.T) { cli := NewCLI(ioutil.Discard, ioutil.Discard) config, _, _, err := cli.parseFlags([]string{ "-prefix", "global@nyc1:backup", }) if err != nil { t.Fatal(err) } if len(config.Prefixes) != 1 { t.Fatal("expected 1 prefix") } d, err := dep.ParseStoreKeyPrefix("global@nyc1") if err != nil { t.Fatal(err) } expected := &Prefix{ Source: d, SourceRaw: "global@nyc1", Destination: "backup", } if !reflect.DeepEqual(config.Prefixes[0], expected) { t.Errorf("expected %q to be %q", config.Prefixes[0], expected) } }
// lsFunc returns or accumulates keyPrefix dependencies. func lsFunc(brain *Brain, used, missing map[string]dep.Dependency) func(string) ([]*dep.KeyPair, error) { return func(s string) ([]*dep.KeyPair, error) { result := []*dep.KeyPair{} if len(s) == 0 { return result, nil } d, err := dep.ParseStoreKeyPrefix(s) if err != nil { return result, err } addDependency(used, d) // Only return non-empty top-level keys if value, ok := brain.Recall(d); ok { for _, pair := range value.([]*dep.KeyPair) { if pair.Key != "" && !strings.Contains(pair.Key, "/") { result = append(result, pair) } } return result, nil } addDependency(missing, d) return result, nil } }
func TestReceive_receivesData(t *testing.T) { prefix, err := dep.ParseStoreKeyPrefix("foo/bar") if err != nil { t.Fatal(err) } config := testConfig(` prefix { path = "foo/bar" } `, t) runner, err := NewRunner(config, []string{"env"}, true) if err != nil { t.Fatal(err) } runner.outStream, runner.errStream = ioutil.Discard, ioutil.Discard data := []*dep.KeyPair{&dep.KeyPair{Path: "foo/bar"}} runner.Receive(prefix, data) if !reflect.DeepEqual(runner.data[prefix.HashCode()], data) { t.Errorf("expected %#v to be %#v", runner.data[prefix.HashCode()], data) } }
// treeFunc returns or accumulates keyPrefix dependencies. func treeFunc(brain *Brain, used, missing map[string]dep.Dependency) func(string) ([]*dep.KeyPair, error) { return func(s string) ([]*dep.KeyPair, error) { result := make([]*dep.KeyPair, 0) if len(s) == 0 { return result, nil } d, err := dep.ParseStoreKeyPrefix(s) if err != nil { return result, err } addDependency(used, d) // Only return non-empty top-level keys if value, ok := brain.Recall(d); ok { for _, pair := range value.([]*dep.KeyPair) { parts := strings.Split(pair.Key, "/") if parts[len(parts)-1] != "" { result = append(result, pair) } } return result, nil } addDependency(missing, d) return result, nil } }
// ParsePrefix parses a prefix of the format "source@dc:destination" into the // Prefix component. func ParsePrefix(s string) (*Prefix, error) { if len(strings.TrimSpace(s)) < 1 { return nil, fmt.Errorf("cannot specify empty prefix declaration") } var sourceRaw, destination string parts := prefixRe.FindAllString(s, -1) switch len(parts) { case 1: sourceRaw = parts[0] case 2: sourceRaw, destination = parts[0], parts[1] default: return nil, fmt.Errorf("invalid prefix declaration format") } source, err := dep.ParseStoreKeyPrefix(sourceRaw) if err != nil { return nil, err } if destination == "" { destination = source.Prefix } // ensure destination prefix ends with "/" destination = strings.TrimSuffix(destination, "/") + "/" return &Prefix{ Source: source, SourceRaw: sourceRaw, Destination: destination, }, nil }
func TestRun_doublePass(t *testing.T) { in := test.CreateTempfile([]byte(` {{ range ls "services" }} {{ range service .Key }} {{.Node}} {{.Address}}:{{.Port}} {{ end }} {{ end }} `), t) defer test.DeleteTempfile(in, t) config := DefaultConfig() config.Merge(&Config{ ConfigTemplates: []*ConfigTemplate{ &ConfigTemplate{Source: in.Name()}, }, }) runner, err := NewRunner(config, true, false) if err != nil { t.Fatal(err) } if len(runner.dependencies) != 0 { t.Errorf("expected %d to be %d", len(runner.dependencies), 0) } if err := runner.Run(); err != nil { t.Fatal(err) } if len(runner.dependencies) != 1 { t.Errorf("expected %d to be %d", len(runner.dependencies), 1) } d, err := dep.ParseStoreKeyPrefix("services") if err != nil { t.Fatal(err) } data := []*dep.KeyPair{ &dep.KeyPair{Key: "service1"}, &dep.KeyPair{Key: "service2"}, &dep.KeyPair{Key: "service3"}, } runner.Receive(d, data) if err := runner.Run(); err != nil { t.Fatal(err) } if len(runner.dependencies) != 4 { t.Errorf("expected %d to be %d", len(runner.dependencies), 4) } }
func TestMerge_prefixes(t *testing.T) { global, err := dep.ParseStoreKeyPrefix("global/time") if err != nil { t.Fatal(err) } redis, err := dep.ParseStoreKeyPrefix("config/redis") if err != nil { t.Fatal(err) } config := testConfig(` prefixes = ["global/time"] `, t) config.Merge(testConfig(` prefixes = ["config/redis"] `, t)) expected := []*dep.StoreKeyPrefix{global, redis} if !reflect.DeepEqual(config.Prefixes, expected) { t.Errorf("expected \n\n%#v\n\n to be \n\n%#v\n\n", config.Prefixes, expected) } }
func TestParseFlags_prefix(t *testing.T) { cli := NewCLI(ioutil.Discard, ioutil.Discard) config, _, _, _, err := cli.parseFlags([]string{ "-prefix", "global", }) if err != nil { t.Fatal(err) } expected, err := dep.ParseStoreKeyPrefix("global") if err != nil { t.Fatal(err) } if !reflect.DeepEqual(config.Prefixes[0], expected) { t.Errorf("expected %#v to be %#v", config.Prefixes[0], expected) } }
func TestRun_sanitize(t *testing.T) { prefix, err := dep.ParseStoreKeyPrefix("foo/bar") if err != nil { t.Fatal(err) } config := testConfig(` sanitize = true prefixes = ["foo/bar"] `, t) runner, err := NewRunner(config, []string{"env"}, true) if err != nil { t.Fatal(err) } outStream, errStream := new(bytes.Buffer), new(bytes.Buffer) runner.outStream, runner.errStream = outStream, errStream pair := []*dep.KeyPair{ &dep.KeyPair{ Path: "foo/bar", Key: "b*a*r", Value: "baz", }, } runner.Receive(prefix, pair) exitCh, err := runner.Run() if err != nil { t.Fatal(err) } select { case err := <-runner.ErrCh: t.Fatal(err) case <-exitCh: expected := "b_a_r=baz" if !strings.Contains(outStream.String(), expected) { t.Fatalf("expected %q to include %q", outStream.String(), expected) } } }
func TestParsePrefix_source(t *testing.T) { source := "global" prefix, err := ParsePrefix(source) if err != nil { t.Fatal(err) } if prefix.SourceRaw != source { t.Errorf("expected %q to equal %q", prefix.SourceRaw, source) } d, err := dep.ParseStoreKeyPrefix("global") if err != nil { t.Fatal(err) } if !reflect.DeepEqual(prefix.Source, d) { t.Errorf("expected %#v to equal %#v", prefix.Source, d) } }
func TestTreeFunc_hasData(t *testing.T) { d, err := dep.ParseStoreKeyPrefix("existing") if err != nil { t.Fatal(err) } data := []*dep.KeyPair{ &dep.KeyPair{Key: "", Value: ""}, &dep.KeyPair{Key: "user/sethvargo", Value: "true"}, &dep.KeyPair{Key: "maxconns", Value: "11"}, &dep.KeyPair{Key: "minconns", Value: "2"}, } brain := NewBrain() brain.Remember(d, data) used := make(map[string]dep.Dependency) missing := make(map[string]dep.Dependency) f := treeFunc(brain, used, missing) result, err := f("existing") if err != nil { t.Fatal(err) } expected := []*dep.KeyPair{ &dep.KeyPair{Key: "user/sethvargo", Value: "true"}, &dep.KeyPair{Key: "maxconns", Value: "11"}, &dep.KeyPair{Key: "minconns", Value: "2"}, } if !reflect.DeepEqual(result, expected) { t.Errorf("expected %q to be %q", result, expected) } if len(missing) != 0 { t.Errorf("expected missing to have 0 elements, but had %d", len(missing)) } if _, ok := used[d.HashCode()]; !ok { t.Errorf("expected dep to be used") } }
func TestRun_exitCh(t *testing.T) { prefix, err := dep.ParseStoreKeyPrefix("foo/bar") if err != nil { t.Fatal(err) } config := testConfig(` prefixes = ["foo/bar"] `, t) runner, err := NewRunner(config, []string{"env"}, true) if err != nil { t.Fatal(err) } outStream, errStream := new(bytes.Buffer), new(bytes.Buffer) runner.outStream, runner.errStream = outStream, errStream pair := []*dep.KeyPair{ &dep.KeyPair{ Path: "foo/bar", Key: "bar", Value: "baz", }, } runner.Receive(prefix, pair) exitCh, err := runner.Run() if err != nil { t.Fatal(err) } select { case err := <-runner.ErrCh: t.Fatal(err) case <-exitCh: // Ok } }
// NewRunner accepts a JsonExport, and boolean value for once mode. func NewRunner(config *JsonExport, once bool) (*Runner, error) { var err error log.Printf("[INFO] (runner) creating new runner (once: %v)\n", once) runner := &Runner{ config: config, once: once, } s := strings.TrimPrefix(config.Prefix, "/") runner.Prefix, err = dep.ParseStoreKeyPrefix(s) if err != nil { return nil, err } if err := runner.init(); err != nil { return nil, err } return runner, nil }
func TestParsePrefix_destination(t *testing.T) { source, destination := "global@nyc4", "backup" prefix, err := ParsePrefix(fmt.Sprintf("%s:%s", source, destination)) if err != nil { t.Fatal(err) } if prefix.SourceRaw != source { t.Errorf("expected %q to equal %q", prefix.SourceRaw, source) } d, err := dep.ParseStoreKeyPrefix("global@nyc4") if err != nil { t.Fatal(err) } if !reflect.DeepEqual(prefix.Source, d) { t.Errorf("expected %#v to equal %#v", prefix.Source, d) } if prefix.Destination != destination { t.Errorf("expected %q to equal %q", prefix.Destination, destination) } }
// ParseConfig reads the configuration file at the given path and returns a new // Config struct with the data populated. func ParseConfig(path string) (*Config, error) { var errs *multierror.Error // Read the contents of the file contents, err := ioutil.ReadFile(path) if err != nil { return nil, fmt.Errorf("error reading config at %q: %s", path, err) } // Parse the file (could be HCL or JSON) var shadow interface{} if err := hcl.Decode(&shadow, string(contents)); err != nil { return nil, fmt.Errorf("error decoding config at %q: %s", path, err) } // Convert to a map and flatten the keys we want to flatten parsed, ok := shadow.(map[string]interface{}) if !ok { return nil, fmt.Errorf("error converting config at %q", path) } flattenKeys(parsed, []string{"auth", "ssl", "syslog"}) // Parse the prefixes if raw, ok := parsed["prefixes"]; ok { if typed, ok := raw.([]interface{}); !ok { err = fmt.Errorf("error converting prefixes to []interface{} at %q, was %T", path, raw) errs = multierror.Append(errs, err) delete(parsed, "prefixes") } else { prefixes := make([]*dep.StoreKeyPrefix, 0, len(typed)) for _, p := range typed { if s, ok := p.(string); ok { if prefix, err := dep.ParseStoreKeyPrefix(s); err != nil { err = fmt.Errorf("error parsing prefix %q at %q: %s", p, path, err) errs = multierror.Append(errs, err) } else { prefixes = append(prefixes, prefix) } } else { err = fmt.Errorf("error converting %T to string", p) errs = multierror.Append(errs, err) delete(parsed, "prefixes") } } parsed["prefixes"] = prefixes } } // Parse the wait component if raw, ok := parsed["wait"]; ok { if typed, ok := raw.(string); !ok { err = fmt.Errorf("error converting wait to string at %q", path) errs = multierror.Append(errs, err) delete(parsed, "wait") } else { if wait, err := watch.ParseWait(typed); err != nil { err = fmt.Errorf("error parsing wait at %q: %s", path, err) errs = multierror.Append(errs, err) delete(parsed, "wait") } else { parsed["wait"] = map[string]time.Duration{ "min": wait.Min, "max": wait.Max, } } } } // Create a new, empty config config := new(Config) // Use mapstructure to populate the basic config fields metadata := new(mapstructure.Metadata) decoder, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ DecodeHook: mapstructure.ComposeDecodeHookFunc( mapstructure.StringToSliceHookFunc(","), mapstructure.StringToTimeDurationHookFunc(), ), ErrorUnused: true, Metadata: metadata, Result: config, }) if err != nil { errs = multierror.Append(errs, err) return nil, errs.ErrorOrNil() } if err := decoder.Decode(parsed); err != nil { errs = multierror.Append(errs, err) return nil, errs.ErrorOrNil() } // Store a reference to the path where this config was read from config.Path = path // Update the list of set keys if config.setKeys == nil { config.setKeys = make(map[string]struct{}) } for _, key := range metadata.Keys { if _, ok := config.setKeys[key]; !ok { config.setKeys[key] = struct{}{} } } config.setKeys["path"] = struct{}{} d := DefaultConfig() d.Merge(config) config = d return config, errs.ErrorOrNil() }
func TestParseConfig_correctValues(t *testing.T) { global, err := dep.ParseStoreKeyPrefix("config/global") if err != nil { t.Fatal(err) } redis, err := dep.ParseStoreKeyPrefix("config/redis") if err != nil { t.Fatal(err) } configFile := test.CreateTempfile([]byte(` consul = "nyc1.demo.consul.io" max_stale = "5s" token = "abcd1234" wait = "5s:10s" retry = "10s" log_level = "warn" prefixes = ["config/global", "config/redis"] auth { enabled = true username = "******" password = "******" } ssl { enabled = true verify = false cert = "c1.pem" ca_cert = "c2.pem" } syslog { enabled = true facility = "LOCAL5" } `), t) defer test.DeleteTempfile(configFile, t) config, err := ParseConfig(configFile.Name()) if err != nil { t.Fatal(err) } expected := &Config{ Path: configFile.Name(), Consul: "nyc1.demo.consul.io", MaxStale: time.Second * 5, Upcase: false, Sanitize: false, Timeout: 5 * time.Second, Auth: &AuthConfig{ Enabled: true, Username: "******", Password: "******", }, SSL: &SSLConfig{ Enabled: true, Verify: false, Cert: "c1.pem", CaCert: "c2.pem", }, Syslog: &SyslogConfig{ Enabled: true, Facility: "LOCAL5", }, Token: "abcd1234", Wait: &watch.Wait{ Min: time.Second * 5, Max: time.Second * 10, }, Retry: 10 * time.Second, LogLevel: "warn", Prefixes: []*dep.StoreKeyPrefix{global, redis}, KillSignal: "SIGTERM", setKeys: config.setKeys, } if !reflect.DeepEqual(config, expected) { t.Fatalf("expected \n%#v\n to be \n%#v\n", config, expected) } // if !reflect.DeepEqual(config, expected) { // t.Fatalf("expected \n%#v\n to be \n%#v\n", config, expected) // } }
func TestRun_merges(t *testing.T) { globalPrefix, err := dep.ParseStoreKeyPrefix("config/global") if err != nil { t.Fatal(err) } redisPrefix, err := dep.ParseStoreKeyPrefix("config/redis") if err != nil { t.Fatal(err) } config := testConfig(` upcase = true prefixes = ["config/global", "config/redis"] `, t) runner, err := NewRunner(config, []string{"env"}, true) if err != nil { t.Fatal(err) } outStream, errStream := new(bytes.Buffer), new(bytes.Buffer) runner.outStream, runner.errStream = outStream, errStream globalData := []*dep.KeyPair{ &dep.KeyPair{ Path: "config/global", Key: "address", Value: "1.2.3.4", }, &dep.KeyPair{ Path: "config/global", Key: "port", Value: "5598", }, } runner.Receive(globalPrefix, globalData) redisData := []*dep.KeyPair{ &dep.KeyPair{ Path: "config/redis", Key: "port", Value: "8000", }, } runner.Receive(redisPrefix, redisData) exitCh, err := runner.Run() if err != nil { t.Fatal(err) } select { case err := <-runner.ErrCh: t.Fatal(err) case <-exitCh: expected := "ADDRESS=1.2.3.4" if !strings.Contains(outStream.String(), expected) { t.Fatalf("expected %q to include %q", outStream.String(), expected) } expected = "PORT=8000" if !strings.Contains(outStream.String(), expected) { t.Fatalf("expected %q to include %q", outStream.String(), expected) } } }
// g reads the configuration file at the given path and returns a new // Config struct with the data populated. func ParseConfig(path string) (*Config, error) { var errs *multierror.Error // Read the contents of the file contents, err := ioutil.ReadFile(path) if err != nil { return nil, fmt.Errorf("error reading config at %q: %s", path, err) } // Parse the file (could be HCL or JSON) var shadow interface{} if err := hcl.Decode(&shadow, string(contents)); err != nil { return nil, fmt.Errorf("error decoding config at %q: %s", path, err) } // Convert to a map and flatten the keys we want to flatten parsed, ok := shadow.(map[string]interface{}) if !ok { return nil, fmt.Errorf("error converting config at %q", path) } flattenKeys(parsed, []string{"auth", "ssl", "syslog"}) // Create a new, empty config config := new(Config) // Use mapstructure to populate the basic config fields metadata := new(mapstructure.Metadata) decoder, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ DecodeHook: mapstructure.ComposeDecodeHookFunc( watch.StringToWaitDurationHookFunc(), mapstructure.StringToSliceHookFunc(","), mapstructure.StringToTimeDurationHookFunc(), ), ErrorUnused: true, Metadata: metadata, Result: config, }) if err != nil { errs = multierror.Append(errs, err) return nil, errs.ErrorOrNil() } if err := decoder.Decode(parsed); err != nil { errs = multierror.Append(errs, err) return nil, errs.ErrorOrNil() } // Store a reference to the path where this config was read from config.Path = path // Parse the prefix sources for _, prefix := range config.Prefixes { parsed, err := dep.ParseStoreKeyPrefix(prefix.SourceRaw) if err != nil { errs = multierror.Append(errs, err) continue } prefix.Source = parsed // If no destination was given, default to the prefix if prefix.Destination == "" { prefix.Destination = parsed.Prefix } } // Update the list of set keys if config.setKeys == nil { config.setKeys = make(map[string]struct{}) } for _, key := range metadata.Keys { if _, ok := config.setKeys[key]; !ok { config.setKeys[key] = struct{}{} } } config.setKeys["path"] = struct{}{} d := DefaultConfig() d.Merge(config) config = d return config, errs.ErrorOrNil() }
func TestStart_runsCommandOnChange(t *testing.T) { prefix, err := dep.ParseStoreKeyPrefix("foo/bar") if err != nil { t.Fatal(err) } config := testConfig(` prefixes = ["foo/bar"] `, t) runner, err := NewRunner(config, []string{"sh", "-c", "echo $BAR"}, true) if err != nil { t.Fatal(err) } outStream, errStream := new(bytes.Buffer), new(bytes.Buffer) runner.outStream, runner.errStream = outStream, errStream go runner.Start() defer runner.Stop() // Kind of hacky, but wait for the runner to return an error, indicating we // are all setup. select { case <-runner.watcher.ErrCh: } pair := []*dep.KeyPair{ &dep.KeyPair{ Path: "foo/bar", Key: "BAR", Value: "one", }, } runner.watcher.DataCh <- &watch.View{Dependency: prefix, Data: pair} select { case err := <-runner.ErrCh: t.Fatal(err) case <-time.After(200 * time.Millisecond): expected := "one\n" if outStream.String() != expected { t.Fatalf("expected %q to be %q", outStream.String(), expected) } } outStream.Reset() errStream.Reset() pair = []*dep.KeyPair{ &dep.KeyPair{ Path: "foo/bar", Key: "BAR", Value: "two", }, } runner.watcher.DataCh <- &watch.View{Dependency: prefix, Data: pair} select { case err := <-runner.ErrCh: t.Fatal(err) case <-time.After(200 * time.Millisecond): expected := "two\n" if outStream.String() != expected { t.Fatalf("expected %q to be %q", outStream.String(), expected) } } }
func TestExecute_multipass(t *testing.T) { in := test.CreateTempfile([]byte(` {{ range ls "services" }}{{.Key}}:{{ range service .Key }} {{.Node}} {{.Address}}:{{.Port}}{{ end }} {{ end }} `), t) defer test.DeleteTempfile(in, t) tmpl, err := NewTemplate(in.Name()) if err != nil { t.Fatal(err) } brain := NewBrain() used, missing, result, err := tmpl.Execute(brain) if err != nil { t.Fatal(err) } if num := len(missing); num != 1 { t.Errorf("expected 1 missing, got: %d", num) } if num := len(used); num != 1 { t.Errorf("expected 1 used, got: %d", num) } expected := bytes.TrimSpace([]byte("")) result = bytes.TrimSpace(result) if !bytes.Equal(result, expected) { t.Errorf("expected %q to be %q", result, expected) } // Receive data for the key prefix dependency d1, err := dep.ParseStoreKeyPrefix("services") brain.Remember(d1, []*dep.KeyPair{ &dep.KeyPair{Key: "webapp", Value: "1"}, &dep.KeyPair{Key: "database", Value: "1"}, }) used, missing, result, err = tmpl.Execute(brain) if err != nil { t.Fatal(err) } if num := len(missing); num != 2 { t.Errorf("expected 2 missing, got: %d", num) } if num := len(used); num != 3 { t.Errorf("expected 3 used, got: %d", num) } expected = bytes.TrimSpace([]byte(` webapp: database: `)) result = bytes.TrimSpace(result) if !bytes.Equal(result, expected) { t.Errorf("expected \n%q\n to be \n%q\n", result, expected) } // Receive data for the services d2, err := dep.ParseHealthServices("webapp") brain.Remember(d2, []*dep.HealthService{ &dep.HealthService{Node: "web01", Address: "1.2.3.4", Port: 1234}, }) d3, err := dep.ParseHealthServices("database") brain.Remember(d3, []*dep.HealthService{ &dep.HealthService{Node: "db01", Address: "5.6.7.8", Port: 5678}, }) used, missing, result, err = tmpl.Execute(brain) if err != nil { t.Fatal(err) } if num := len(missing); num != 0 { t.Errorf("expected 0 missing, got: %d", num) } if num := len(used); num != 3 { t.Errorf("expected 3 used, got: %d", num) } expected = bytes.TrimSpace([]byte(` webapp: web01 1.2.3.4:1234 database: db01 5.6.7.8:5678 `)) result = bytes.TrimSpace(result) if !bytes.Equal(result, expected) { t.Errorf("expected \n%q\n to be \n%q\n", result, expected) } }
func TestExecute_renders(t *testing.T) { // Stub out the time. now = func() time.Time { return time.Unix(0, 0).UTC() } in := test.CreateTempfile([]byte(` API Functions ------------- datacenters:{{ range datacenters }} {{.}}{{ end }} file: {{ file "/path/to/file" }} key: {{ key "config/redis/maxconns" }} ls:{{ range ls "config/redis" }} {{.Key}}={{.Value}}{{ end }} node:{{ with node }} {{.Node.Node}}{{ range .Services}} {{.Service}}{{ end }}{{ end }} nodes:{{ range nodes }} {{.Node}}{{ end }} service:{{ range service "webapp" }} {{.Address}}{{ end }} service (any):{{ range service "webapp" "any" }} {{.Address}}{{ end }} service (tag.Contains):{{ range service "webapp" }}{{ if .Tags.Contains "production" }} {{.Node}}{{ end }}{{ end }} services:{{ range services }} {{.Name}}{{ end }} tree:{{ range tree "config/redis" }} {{.Key}}={{.Value}}{{ end }} vault: {{ with vault "secret/foo/bar" }}{{.Data.zip}}{{ end }} Helper Functions ---------------- byKey:{{ range $key, $pairs := tree "config/redis" | byKey }} {{$key}}:{{ range $pairs }} {{.Key}}={{.Value}}{{ end }}{{ end }} byTag (health service):{{ range $tag, $services := service "webapp" | byTag }} {{$tag}}:{{ range $services }} {{.Address}}{{ end }}{{ end }} byTag (catalog services):{{ range $tag, $services := services | byTag }} {{$tag}}:{{ range $services }} {{.Name}}{{ end }}{{ end }} contains:{{ range service "webapp" }}{{ if .Tags | contains "production" }} {{.Node}}{{ end }}{{ end }} env: {{ env "foo" }} explode:{{ range $k, $v := tree "config/redis" | explode }} {{$k}}{{$v}}{{ end }} in:{{ range service "webapp" }}{{ if in .Tags "production" }} {{.Node}}{{ end }}{{ end }} loop:{{ range loop 3 }} test{{ end }} loop(i):{{ range $i := loop 5 8 }} test{{$i}}{{ end }} join: {{ "a,b,c" | split "," | join ";" }} parseBool: {{"true" | parseBool}} parseFloat: {{"1.2" | parseFloat}} parseInt: {{"-1" | parseInt}} parseJSON (string):{{ range $key, $value := "{\"foo\": \"bar\"}" | parseJSON }} {{$key}}={{$value}}{{ end }} parseJSON (file):{{ range $key, $value := file "/path/to/json/file" | parseJSON }} {{$key}}={{$value}}{{ end }} parseJSON (env):{{ range $key, $value := env "json" | parseJSON }} {{$key}}={{$value}}{{ end }} parseUint: {{"1" | parseUint}} plugin: {{ file "/path/to/json/file" | plugin "echo" }} timestamp: {{ timestamp }} timestamp (formatted): {{ timestamp "2006-01-02" }} regexMatch: {{ file "/path/to/file" | regexMatch ".*[cont][a-z]+" }} regexMatch: {{ file "/path/to/file" | regexMatch "v[0-9]*" }} regexReplaceAll: {{ file "/path/to/file" | regexReplaceAll "\\w" "x" }} replaceAll: {{ file "/path/to/file" | replaceAll "some" "this" }} split:{{ range "a,b,c" | split "," }} {{.}}{{end}} toLower: {{ file "/path/to/file" | toLower }} toJSON: {{ tree "config/redis" | explode | toJSON }} toJSONPretty: {{ tree "config/redis" | explode | toJSONPretty }} toTitle: {{ file "/path/to/file" | toTitle }} toUpper: {{ file "/path/to/file" | toUpper }} toYAML: {{ tree "config/redis" | explode | toYAML }} Math Functions -------------- add:{{ 2 | add 2 }} subtract:{{ 2 | subtract 2 }} multiply:{{ 2 | multiply 2 }} divide:{{ 2 | divide 2 }} `), t) defer test.DeleteTempfile(in, t) tmpl, err := NewTemplate(in.Name()) if err != nil { t.Fatal(err) } brain := NewBrain() var d dep.Dependency d, err = dep.ParseDatacenters() if err != nil { t.Fatal(err) } brain.Remember(d, []string{"dc1", "dc2"}) d, err = dep.ParseFile("/path/to/file") if err != nil { t.Fatal(err) } brain.Remember(d, "some content") d, err = dep.ParseStoreKey("config/redis/maxconns") if err != nil { t.Fatal(err) } brain.Remember(d, "5") d, err = dep.ParseStoreKeyPrefix("config/redis") if err != nil { t.Fatal(err) } brain.Remember(d, []*dep.KeyPair{ &dep.KeyPair{Key: "", Value: ""}, &dep.KeyPair{Key: "admin/port", Value: "1134"}, &dep.KeyPair{Key: "maxconns", Value: "5"}, &dep.KeyPair{Key: "minconns", Value: "2"}, }) d, err = dep.ParseCatalogNode() if err != nil { t.Fatal(err) } brain.Remember(d, &dep.NodeDetail{ Node: &dep.Node{Node: "node1"}, Services: dep.NodeServiceList([]*dep.NodeService{ &dep.NodeService{ Service: "service1", }, }), }) d, err = dep.ParseCatalogNodes("") if err != nil { t.Fatal(err) } brain.Remember(d, []*dep.Node{ &dep.Node{Node: "node1"}, &dep.Node{Node: "node2"}, }) d, err = dep.ParseHealthServices("webapp") if err != nil { t.Fatal(err) } brain.Remember(d, []*dep.HealthService{ &dep.HealthService{ Node: "node1", Address: "1.2.3.4", Tags: []string{"release"}, }, &dep.HealthService{ Node: "node2", Address: "5.6.7.8", Tags: []string{"release", "production"}, }, &dep.HealthService{ Node: "node3", Address: "9.10.11.12", Tags: []string{"production"}, }, }) d, err = dep.ParseHealthServices("webapp", "any") if err != nil { t.Fatal(err) } brain.Remember(d, []*dep.HealthService{ &dep.HealthService{Node: "node1", Address: "1.2.3.4"}, &dep.HealthService{Node: "node2", Address: "5.6.7.8"}, }) d, err = dep.ParseCatalogServices("") if err != nil { t.Fatal(err) } brain.Remember(d, []*dep.CatalogService{ &dep.CatalogService{ Name: "service1", Tags: []string{"production"}, }, &dep.CatalogService{ Name: "service2", Tags: []string{"release", "production"}, }, }) d, err = dep.ParseVaultSecret("secret/foo/bar") if err != nil { t.Fatal(err) } brain.Remember(d, &dep.Secret{ LeaseID: "abcd1234", LeaseDuration: 120, Renewable: true, Data: map[string]interface{}{"zip": "zap"}, }) if err := os.Setenv("foo", "bar"); err != nil { t.Fatal(err) } d, err = dep.ParseFile("/path/to/json/file") if err != nil { t.Fatal(err) } brain.Remember(d, `{"foo": "bar"}`) if err := os.Setenv("json", `{"foo": "bar"}`); err != nil { t.Fatal(err) } _, _, result, err := tmpl.Execute(brain) if err != nil { t.Fatal(err) } expected := []byte(` API Functions ------------- datacenters: dc1 dc2 file: some content key: 5 ls: maxconns=5 minconns=2 node: node1 service1 nodes: node1 node2 service: 1.2.3.4 5.6.7.8 9.10.11.12 service (any): 1.2.3.4 5.6.7.8 service (tag.Contains): node2 node3 services: service1 service2 tree: admin/port=1134 maxconns=5 minconns=2 vault: zap Helper Functions ---------------- byKey: admin: port=1134 byTag (health service): production: 5.6.7.8 9.10.11.12 release: 1.2.3.4 5.6.7.8 byTag (catalog services): production: service1 service2 release: service2 contains: node2 node3 env: bar explode: adminmap[port:1134] maxconns5 minconns2 in: node2 node3 loop: test test test loop(i): test5 test6 test7 join: a;b;c parseBool: true parseFloat: 1.2 parseInt: -1 parseJSON (string): foo=bar parseJSON (file): foo=bar parseJSON (env): foo=bar parseUint: 1 plugin: {"foo": "bar"} timestamp: 1970-01-01T00:00:00Z timestamp (formatted): 1970-01-01 regexMatch: true regexMatch: false regexReplaceAll: xxxx xxxxxxx replaceAll: this content split: a b c toLower: some content toJSON: {"admin":{"port":"1134"},"maxconns":"5","minconns":"2"} toJSONPretty: { "admin": { "port": "1134" }, "maxconns": "5", "minconns": "2" } toTitle: Some Content toUpper: SOME CONTENT toYAML: admin: port: "1134" maxconns: "5" minconns: "2" Math Functions -------------- add:4 subtract:0 multiply:4 divide:1 `) if !bytes.Equal(result, expected) { t.Errorf("expected %s to be %s", result, expected) } }
// init creates the Runner's underlying data structures and returns an error if // any problems occur. func (r *Runner) init() error { // Ensure we have defaults config := DefaultConfig() config.Merge(r.config) r.config = config // Print the final config for debugging result, err := json.MarshalIndent(r.config, "", " ") if err != nil { return err } log.Printf("[DEBUG] (runner) final config (tokens suppressed):\n\n%s\n\n", result) // Setup the kill signal signal, err := signals.Parse(r.config.KillSignal) if err != nil { return errors.Wrap(err, "runner") } r.killSignal = signal // Create the clientset clients, err := newClientSet(r.config) if err != nil { return fmt.Errorf("runner: %s", err) } // Create the watcher watcher, err := newWatcher(r.config, clients, r.once) if err != nil { return fmt.Errorf("runner: %s", err) } r.watcher = watcher r.data = make(map[string]interface{}) r.configPrefixMap = make(map[string]*ConfigPrefix) r.inStream = os.Stdin r.outStream = os.Stdout r.errStream = os.Stderr r.ErrCh = make(chan error) r.DoneCh = make(chan struct{}) r.ExitCh = make(chan int, 1) // Parse and add consul dependencies for _, p := range r.config.Prefixes { d, err := dep.ParseStoreKeyPrefix(p.Path) if err != nil { return err } r.dependencies = append(r.dependencies, d) r.configPrefixMap[d.HashCode()] = p } // Parse and add vault dependencies - it is important that this come after // consul, because consul should never be permitted to overwrite values from // vault; that would expose a security hole since access to consul is // typically less controlled than access to vault. for _, s := range r.config.Secrets { log.Printf("looking at vault %s", s.Path) d, err := dep.ParseVaultSecret(s.Path) if err != nil { return err } r.dependencies = append(r.dependencies, d) r.configPrefixMap[d.HashCode()] = s } return nil }
// Test that the config is parsed correctly func TestParseConfig_correctValues(t *testing.T) { configFile := test.CreateTempfile([]byte(` consul = "nyc1.demo.consul.io" max_stale = "5s" token = "abcd1234" wait = "5s:10s" retry = "10s" log_level = "warn" status_path = "global/statuses/replicators" auth { enabled = true username = "******" password = "******" } ssl { enabled = true verify = false } syslog { enabled = true facility = "LOCAL5" } prefix { source = "global/config@nyc1" } prefix { source = "redis/config@nyc1" destination = "redis/backup" } `), t) defer test.DeleteTempfile(configFile, t) config, err := ParseConfig(configFile.Name()) if err != nil { t.Fatal(err) } globalDep, err := dep.ParseStoreKeyPrefix("global/config@nyc1") if err != nil { t.Fatal(err) } global := &Prefix{ Source: globalDep, SourceRaw: "global/config@nyc1", Destination: "global/config", } redisDep, err := dep.ParseStoreKeyPrefix("redis/config@nyc1") if err != nil { t.Fatal(err) } redis := &Prefix{ Source: redisDep, SourceRaw: "redis/config@nyc1", Destination: "redis/backup", } expected := &Config{ Path: configFile.Name(), Consul: "nyc1.demo.consul.io", Token: "abcd1234", MaxStale: time.Second * 5, MaxStaleRaw: "5s", Prefixes: []*Prefix{global, redis}, Auth: &Auth{ Enabled: true, Username: "******", Password: "******", }, AuthRaw: []*Auth{ &Auth{ Enabled: true, Username: "******", Password: "******", }, }, SSL: &SSL{ Enabled: true, Verify: false, }, SSLRaw: []*SSL{ &SSL{ Enabled: true, Verify: false, }, }, Syslog: &Syslog{ Enabled: true, Facility: "LOCAL5", }, SyslogRaw: []*Syslog{ &Syslog{ Enabled: true, Facility: "LOCAL5", }, }, Wait: &watch.Wait{ Min: time.Second * 5, Max: time.Second * 10, }, WaitRaw: "5s:10s", Retry: 10 * time.Second, RetryRaw: "10s", LogLevel: "warn", StatusDir: "global/statuses/replicators", } if !reflect.DeepEqual(config, expected) { t.Fatalf("expected \n%#v\n\n, got \n\n%#v", expected.Prefixes[0], config.Prefixes[0]) } }
func TestStart_runsCommandOnChange(t *testing.T) { prefix, err := dep.ParseStoreKeyPrefix("foo/bar") if err != nil { t.Fatal(err) } config := testConfig(` prefix { path = "foo/bar" } `, t) f := test.CreateTempfile(nil, t) defer os.Remove(f.Name()) os.Remove(f.Name()) readFile := func(path string, ch chan string) { for { contents, err := ioutil.ReadFile(path) if err != nil { if os.IsNotExist(err) { time.Sleep(50 * time.Millisecond) continue } else { t.Fatal(err) return } } ch <- string(contents) return } } runner, err := NewRunner(config, []string{"sh", "-c", "echo $BAR > " + f.Name()}, true) if err != nil { t.Fatal(err) } runner.outStream, runner.errStream = ioutil.Discard, ioutil.Discard go runner.Start() defer runner.Stop() // Kind of hacky, but wait for the runner to return an error, indicating we // are all setup. select { case <-runner.watcher.ErrCh: } pair := []*dep.KeyPair{ &dep.KeyPair{ Path: "foo/bar", Key: "BAR", Value: "one", }, } runner.watcher.DataCh <- &watch.View{Dependency: prefix, Data: pair} contentCh := make(chan string) go readFile(f.Name(), contentCh) select { case err := <-runner.ErrCh: t.Fatal(err) case content := <-contentCh: expected := "one\n" if content != expected { t.Fatalf("expected %q to be %q", content, expected) } case <-time.After(1 * time.Second): t.Fatal("expected file to be rendered by now") } // Delete the file - otherwise the next read could have a false-positive since // the file already exists os.Remove(f.Name()) pair = []*dep.KeyPair{ &dep.KeyPair{ Path: "foo/bar", Key: "BAR", Value: "two", }, } runner.watcher.DataCh <- &watch.View{Dependency: prefix, Data: pair} contentCh = make(chan string) go readFile(f.Name(), contentCh) select { case err := <-runner.ErrCh: t.Fatal(err) case content := <-contentCh: expected := "two\n" if content != expected { t.Fatalf("expected %q to be %q", content, expected) } case <-time.After(1 * time.Second): t.Fatal("expected file to be rendered by now") } }
// parseFlags is a helper function for parsing command line flags using Go's // Flag library. This is extracted into a helper to keep the main function // small, but it also makes writing tests for parsing command line arguments // much easier and cleaner. func (cli *CLI) parseFlags(args []string) (*Config, []string, bool, bool, error) { var once, version bool var config = DefaultConfig() // Parse the flags and options flags := flag.NewFlagSet(Name, flag.ContinueOnError) flags.SetOutput(cli.errStream) flags.Usage = func() { fmt.Fprintf(cli.errStream, usage, Name) } flags.Var((funcVar)(func(s string) error { config.Consul = s config.set("consul") return nil }), "consul", "") flags.Var((funcVar)(func(s string) error { config.Token = s config.set("token") return nil }), "token", "") flags.Var((funcVar)(func(s string) error { s = strings.TrimPrefix(s, "/") p, err := dep.ParseStoreKeyPrefix(s) if err != nil { return err } if config.Prefixes == nil { config.Prefixes = make([]*dep.StoreKeyPrefix, 0, 1) } config.Prefixes = append(config.Prefixes, p) return nil }), "prefix", "") flags.Var((funcVar)(func(s string) error { config.Auth.Enabled = true config.set("auth.enabled") if strings.Contains(s, ":") { split := strings.SplitN(s, ":", 2) config.Auth.Username = split[0] config.set("auth.username") config.Auth.Password = split[1] config.set("auth.password") } else { config.Auth.Username = s config.set("auth.username") } return nil }), "auth", "") flags.Var((funcBoolVar)(func(b bool) error { config.SSL.Enabled = b config.set("ssl") config.set("ssl.enabled") return nil }), "ssl", "") flags.Var((funcBoolVar)(func(b bool) error { config.SSL.Verify = b config.set("ssl.verify") return nil }), "ssl-verify", "") flags.Var((funcVar)(func(s string) error { config.SSL.Cert = s config.set("ssl.cert") return nil }), "ssl-cert", "") flags.Var((funcVar)(func(s string) error { config.SSL.CaCert = s config.set("ssl.ca_cert") return nil }), "ssl-ca-cert", "") flags.Var((funcDurationVar)(func(d time.Duration) error { config.MaxStale = d config.set("max_stale") return nil }), "max-stale", "") flags.Var((funcBoolVar)(func(b bool) error { config.Syslog.Enabled = b config.set("syslog.enabled") return nil }), "syslog", "") flags.Var((funcVar)(func(s string) error { config.Syslog.Facility = s config.set("syslog.facility") return nil }), "syslog-facility", "") flags.Var((funcVar)(func(s string) error { w, err := watch.ParseWait(s) if err != nil { return err } config.Wait.Min = w.Min config.Wait.Max = w.Max config.set("wait") return nil }), "wait", "") flags.Var((funcDurationVar)(func(d time.Duration) error { config.Retry = d config.set("retry") return nil }), "retry", "") flags.Var((funcBoolVar)(func(b bool) error { config.Sanitize = b config.set("sanitize") return nil }), "sanitize", "") flags.Var((funcBoolVar)(func(b bool) error { config.Upcase = b config.set("upcase") return nil }), "upcase", "") flags.Var((funcVar)(func(s string) error { config.Path = s config.set("path") return nil }), "config", "") flags.Var((funcVar)(func(s string) error { config.KillSignal = s config.set("kill_signal") return nil }), "kill-signal", "") flags.Var((funcVar)(func(s string) error { config.LogLevel = s config.set("log_level") return nil }), "log-level", "") flags.Var((funcBoolVar)(func(b bool) error { config.Pristine = b config.set("pristine") return nil }), "pristine", "") flags.BoolVar(&once, "once", false, "") flags.BoolVar(&version, "v", false, "") flags.BoolVar(&version, "version", false, "") // If there was a parser error, stop if err := flags.Parse(args); err != nil { return nil, nil, false, false, err } return config, flags.Args(), once, version, nil }
// ParseConfig reads the configuration file at the given path and returns a new // Config struct with the data populated. func ParseConfig(path string) (*Config, error) { var errs *multierror.Error // Read the contents of the file contents, err := ioutil.ReadFile(path) if err != nil { errs = multierror.Append(errs, err) return nil, errs.ErrorOrNil() } // Parse the file (could be HCL or JSON) var parsed interface{} if err := hcl.Decode(&parsed, string(contents)); err != nil { errs = multierror.Append(errs, err) return nil, errs.ErrorOrNil() } // Create a new, empty config config := &Config{} // Use mapstructure to populate the basic config fields decoder, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ ErrorUnused: true, Metadata: nil, Result: config, }) if err != nil { errs = multierror.Append(errs, err) return nil, errs.ErrorOrNil() } if err := decoder.Decode(parsed); err != nil { errs = multierror.Append(errs, err) return nil, errs.ErrorOrNil() } // Store a reference to the path where this config was read from config.Path = path // Parse the prefix sources for _, prefix := range config.Prefixes { parsed, err := dep.ParseStoreKeyPrefix(prefix.SourceRaw) if err != nil { errs = multierror.Append(errs, err) continue } prefix.Source = parsed // If no destination was given, default to the prefix if prefix.Destination == "" { prefix.Destination = parsed.Prefix } } // Parse the MaxStale component if raw := config.MaxStaleRaw; raw != "" { stale, err := time.ParseDuration(raw) if err == nil { config.MaxStale = stale } else { errs = multierror.Append(errs, fmt.Errorf("max_stale invalid: %v", err)) } } // Extract the last Auth block if len(config.AuthRaw) > 0 { config.Auth = config.AuthRaw[len(config.AuthRaw)-1] } // Extract the last SSL block if len(config.SSLRaw) > 0 { config.SSL = config.SSLRaw[len(config.SSLRaw)-1] } // Extract the last Syslog block if len(config.SyslogRaw) > 0 { config.Syslog = config.SyslogRaw[len(config.SyslogRaw)-1] } // Parse the Retry component if raw := config.RetryRaw; raw != "" { retry, err := time.ParseDuration(raw) if err == nil { config.Retry = retry } else { errs = multierror.Append(errs, fmt.Errorf("retry invalid: %v", err)) } } // Parse the Wait component if raw := config.WaitRaw; raw != "" { wait, err := watch.ParseWait(raw) if err == nil { config.Wait = wait } else { errs = multierror.Append(errs, fmt.Errorf("wait invalid: %v", err)) } } return config, errs.ErrorOrNil() }