func (s *httpServer) indexHandler(w http.ResponseWriter, req *http.Request) { reqParams, err := http_api.NewReqParams(req) if err != nil { s.ctx.nsqadmin.logf("ERROR: failed to parse request params - %s", err) http.Error(w, "INVALID_REQUEST", 500) return } var topics []string if len(s.ctx.nsqadmin.opts.NSQLookupdHTTPAddresses) != 0 { topics, _ = lookupd.GetLookupdTopics(s.ctx.nsqadmin.opts.NSQLookupdHTTPAddresses) } else { topics, _ = lookupd.GetNSQDTopics(s.ctx.nsqadmin.opts.NSQDHTTPAddresses) } p := struct { Title string GraphOptions *GraphOptions Topics Topics Version string }{ Title: "NSQ", GraphOptions: NewGraphOptions(w, req, reqParams, s.ctx), Topics: TopicsFromStrings(topics), Version: version.Binary, } err = templates.T.ExecuteTemplate(w, "index.html", p) if err != nil { s.ctx.nsqadmin.logf("Template Error %s", err) http.Error(w, "Template Error", 500) } }
func (s *httpServer) indexHandler(w http.ResponseWriter, req *http.Request, ps httprouter.Params) (interface{}, error) { reqParams, err := http_api.NewReqParams(req) if err != nil { return nil, http_api.Err{400, "INVALID_REQUEST"} } var topics []string if len(s.ctx.nsqadmin.opts.NSQLookupdHTTPAddresses) != 0 { topics, _ = lookupd.GetLookupdTopics(s.ctx.nsqadmin.opts.NSQLookupdHTTPAddresses) } else { topics, _ = lookupd.GetNSQDTopics(s.ctx.nsqadmin.opts.NSQDHTTPAddresses) } p := struct { Title string GraphOptions *GraphOptions Topics Topics Version string }{ Title: "NSQ", GraphOptions: NewGraphOptions(w, req, reqParams, s.ctx), Topics: TopicsFromStrings(topics), Version: version.Binary, } err = templates.T.ExecuteTemplate(w, "index.html", p) if err != nil { s.ctx.nsqadmin.logf("ERROR: executing template - %s", err) return nil, http_api.Err{500, "INTERNAL_ERROR"} } return nil, nil }
func (t *TopicDiscoverer) syncTopics(addrs []string, pattern string) { newTopics, err := lookupd.GetLookupdTopics(addrs) if err != nil { log.Printf("ERROR: could not retrieve topic list: %s", err) } for _, topic := range newTopics { if _, ok := t.topics[topic]; !ok { if !t.allowTopicName(pattern, topic) { log.Println("Skipping topic ", topic, "as it didn't match required pattern:", pattern) continue } logger, err := newConsumerFileLogger(topic) if err != nil { log.Printf("ERROR: couldn't create logger for new topic %s: %s", topic, err) continue } t.topics[topic] = logger go t.startTopicRouter(logger) } } }
func (s *httpServer) lookupHandler(w http.ResponseWriter, req *http.Request) { reqParams, err := http_api.NewReqParams(req) if err != nil { s.ctx.nsqadmin.logf("ERROR: failed to parse request params - %s", err) http.Error(w, "INVALID_REQUEST", 500) return } channels := make(map[string][]string) allTopics, _ := lookupd.GetLookupdTopics(s.ctx.nsqadmin.opts.NSQLookupdHTTPAddresses) for _, topicName := range allTopics { var producers []string producers, _ = lookupd.GetLookupdTopicProducers(topicName, s.ctx.nsqadmin.opts.NSQLookupdHTTPAddresses) if len(producers) == 0 { topicChannels, _ := lookupd.GetLookupdTopicChannels(topicName, s.ctx.nsqadmin.opts.NSQLookupdHTTPAddresses) channels[topicName] = topicChannels } } p := struct { Title string GraphOptions *GraphOptions TopicMap map[string][]string Lookupd []string Version string }{ Title: "NSQ Lookup", GraphOptions: NewGraphOptions(w, req, reqParams, s.ctx), TopicMap: channels, Lookupd: s.ctx.nsqadmin.opts.NSQLookupdHTTPAddresses, Version: version.Binary, } err = templates.T.ExecuteTemplate(w, "lookup.html", p) if err != nil { s.ctx.nsqadmin.logf("Template Error %s", err) http.Error(w, "Template Error", 500) } }
func main() { flag.Parse() if *showVersion { fmt.Printf("nsq_to_file v%s\n", version.Binary) return } if *channel == "" { log.Fatal("--channel is required") } var topicsFromNSQLookupd bool if len(nsqdTCPAddrs) == 0 && len(lookupdHTTPAddrs) == 0 { log.Fatal("--nsqd-tcp-address or --lookupd-http-address required.") } if len(nsqdTCPAddrs) != 0 && len(lookupdHTTPAddrs) != 0 { log.Fatal("use --nsqd-tcp-address or --lookupd-http-address not both") } if *gzipLevel < 1 || *gzipLevel > 9 { log.Fatalf("invalid --gzip-level value (%d), should be 1-9", *gzipLevel) } // TODO: remove, deprecated if hasArg("gzip-compression") { log.Printf("WARNING: --gzip-compression is deprecated in favor of --gzip-level") switch *gzipCompression { case 1: *gzipLevel = gzip.BestSpeed case 2: *gzipLevel = gzip.BestCompression case 3: *gzipLevel = gzip.DefaultCompression default: log.Fatalf("invalid --gzip-compression value (%d), should be 1,2,3", *gzipCompression) } } discoverer := newTopicDiscoverer() signal.Notify(discoverer.hupChan, syscall.SIGHUP) signal.Notify(discoverer.termChan, syscall.SIGINT, syscall.SIGTERM) if len(topics) < 1 { if len(lookupdHTTPAddrs) < 1 { log.Fatal("use --topic to list at least one topic to subscribe to or specify at least one --lookupd-http-address to subscribe to all its topics") } topicsFromNSQLookupd = true var err error topics, err = lookupd.GetLookupdTopics(lookupdHTTPAddrs) if err != nil { log.Fatalf("ERROR: could not retrieve topic list: %s", err) } } for _, topic := range topics { if !discoverer.allowTopicName(*topicPattern, topic) { log.Println("Skipping topic", topic, "as it didn't match required pattern:", *topicPattern) continue } logger, err := newConsumerFileLogger(topic) if err != nil { log.Fatalf("ERROR: couldn't create logger for topic %s: %s", topic, err) } discoverer.topics[topic] = logger go discoverer.startTopicRouter(logger) } discoverer.watch(lookupdHTTPAddrs, topicsFromNSQLookupd, *topicPattern) }