func main() { // 支持根据参数打印版本信息 global.PrintVersion(os.Stdout) savePid() logger.Init(ROOT+"/log", ConfigFile.MustValue("global", "log_level", "DEBUG")) go ServeBackGround() e := echo.New() serveStatic(e) e.Use(thirdmw.EchoLogger()) e.Use(mw.Recover()) e.Use(pwm.Installed(filterPrefixs)) e.Use(pwm.HTTPError()) e.Use(pwm.AutoLogin()) frontG := e.Group("", thirdmw.EchoCache()) controller.RegisterRoutes(frontG) frontG.GET("/admin", echo.HandlerFunc(admin.AdminIndex), pwm.NeedLogin(), pwm.AdminAuth()) adminG := e.Group("/admin", pwm.NeedLogin(), pwm.AdminAuth()) admin.RegisterRoutes(adminG) std := standard.New(getAddr()) std.SetHandler(e) gracefulRun(std) }
func main() { logger.Init(config.ROOT+"/log", config.ConfigFile.MustValue("global", "log_level", "DEBUG"), "crawl") var ( needAll bool crawlConfFilename string whichSite string ) flag.BoolVar(&needAll, "all", false, "是否需要全量抓取,默认否") flag.StringVar(&crawlConfFilename, "config", "config/auto_crawl.json", "自动抓取配置文件") flag.StringVar(&whichSite, "site", "", "抓取配置中哪个站点(空表示所有配置站点)") flag.Parse() if !filepath.IsAbs(crawlConfFilename) { crawlConfFilename = config.ROOT + "/" + crawlConfFilename } go autocrawl(needAll, crawlConfFilename, whichSite) select {} }
func main() { if !flag.Parsed() { flag.Parse() } logger.Init(ROOT+"/log", ConfigFile.MustValue("global", "log_level", "DEBUG")) if *manualIndex { indexing(true) } c := cron.New() // 构建 solr 需要的索引数据 // 一天一次全量 c.AddFunc("@daily", func() { indexing(true) }) c.Start() select {} }
func init() { logger.Init("", "INFO") }