Exemplo n.º 1
0
// Run Stat Command Implementation
func (c *TotalCommand) Run(args []string) int {

	cmdFlags := flag.NewFlagSet("total", flag.ContinueOnError)
	cmdFlags.Usage = func() { c.UI.Output(c.Help()) }
	env.ConfigFlags(cmdFlags)

	if err := cmdFlags.Parse(args); err != nil {
		fmt.Println("Could not parse config: ", err)
		return 1
	}

	environment, err := env.GetEnvironment()
	if err != nil {
		fmt.Println("Could not parse config ", err)
		return 1
	}

	mysql, err = env.GetConnection(environment)
	if err != nil {
		fmt.Println("Could not Get DB Connection: ", err)
		return 1
	}

	fixPostsWithNoGroup()

	//loop users
	totalStatUsers()
	//loop groups
	totalStatGroups()

	return 0
}
Exemplo n.º 2
0
// Run Serve Command Implementation
func (c *ServeCommand) Run(args []string) int {

	cmdFlags := flag.NewFlagSet("serve", flag.ContinueOnError)
	cmdFlags.Usage = func() { c.UI.Output(c.Help()) }
	env.ConfigFlags(cmdFlags)

	if err := cmdFlags.Parse(args); err != nil {
		fmt.Println("Could not parse config: ", err)
		return 1
	}

	environment, err := env.GetEnvironment()
	if err != nil {
		fmt.Println("Could not parse config ", err)
		return 1
	}

	mysql, err = env.GetConnection(environment)
	if err != nil {
		fmt.Println("Could not Get DB Connection: ", err)
		return 1
	}

	fs := http.FileServer(http.Dir("../public"))
	http.Handle("/public/", http.StripPrefix("/public/", fs))
	http.HandleFunc("/", homeEntry)
	http.HandleFunc("/search/do", doSearchHandler)
	http.HandleFunc("/rank/do", doRankHandler)
	err = http.ListenAndServe(environment.HTTP, nil)
	if err != nil {
		fmt.Println("Server Failed to Start, ", err.Error())
		return 1
	}
	return 0
}
Exemplo n.º 3
0
// Run Stat Command Implementation
func (c *StatCommand) Run(args []string) int {

	var startDate string
	var endDate string

	cmdFlags := flag.NewFlagSet("stat", flag.ContinueOnError)
	cmdFlags.Usage = func() { c.UI.Output(c.Help()) }
	env.ConfigFlags(cmdFlags)

	cmdFlags.StringVar(&startDate, "start", "", "Start Date.")
	cmdFlags.StringVar(&endDate, "end", "", "End Date.")

	if err := cmdFlags.Parse(args); err != nil {
		fmt.Println("Could not parse config: ", err)
		return 1
	}

	environment, err := env.GetEnvironment()
	if err != nil {
		fmt.Println("Could not parse config ", err)
		return 1
	}

	mysql, err = env.GetConnection(environment)
	if err != nil {
		fmt.Println("Could not Get DB Connection: ", err)
		return 1
	}

	startTime := time.Now().Add(time.Hour * -24)
	endTime := time.Now().Add(time.Hour * -24)

	if startDate != "" {
		startTime = parseDate(startDate)
	}

	if endDate != "" {
		endTime = parseDate(endDate).Add(time.Hour * 24)
	}

	fixPostsWithNoGroup()

	for startTime.Before(endTime) {

		currentDate := times.Format("Y-m-d", startTime)

		fmt.Println("Running date: ", currentDate, " ...")

		//loop users
		statUsers(currentDate)
		//loop groups
		statGroups(currentDate)

		startTime = startTime.Add(time.Hour * 24)
	}

	return 0
}
Exemplo n.º 4
0
//Run Crawl Command Run
func (c *CrawlCommand) Run(args []string) int {

	cmdFlags := flag.NewFlagSet("crawl", flag.ContinueOnError)
	cmdFlags.Usage = func() { c.UI.Output(c.Help()) }
	env.ConfigFlags(cmdFlags)

	if err := cmdFlags.Parse(args); err != nil {
		fmt.Println("Could not parse config: ", err)
		return 1
	}

	environment, err := env.GetEnvironment()
	if err != nil {
		fmt.Println("Could not parse config ", err)
		return 1
	}

	db, err := env.GetConnection(environment)
	if err != nil {
		l4g.Error("Could not Get DB Connection: ", err)
		return 1
	}

	sharedHTTPClient = &http.Client{}
	messageChan = make(chan *schema.MessageBase)
	groupChan = make(chan *schema.GroupBase)
	quitChan = make(chan int)
	resumeChan = make(chan int)
	idChan = make(chan int)
	isRunning = false
	continuousFailedIds = []int{}

	go stopper()
	go scheduler()

	go func() {
		resumeChan <- 1
	}()

	for {
		select {
		case id := <-idChan:
			go crawler(id + 1)
		case m := <-messageChan:
			m.Message.Save(db)
			go next(m.ID)
		case g := <-groupChan:
			g.Group.Save(db)
			go next(g.ID)
		case <-quitChan:
			time.Sleep(1 * time.Second)
			l4g.Info("[crawl.scheduler] Goodbye!")
			time.Sleep(100 * time.Millisecond)
			return 0
		case status := <-resumeChan:
			// status = 1 will trigger crawling start.
			l4g.Info("[crawl.scheduler] Resume Signal %d", status)
			if status == 1 && !isRunning {
				//check db for max(id).
				var maxID int64
				statment, err := db.Prepare(`
					SELECT MAX(id) FROM
						(SELECT MAX(id) AS id FROM users u
						UNION
						SELECT MAX(id) AS id FROM threads t
						UNION
						SELECT MAX(id) AS id FROM posts p)
					utp
					`)
				if err != nil {
					l4g.Error("[mysql.getMax] %s", err)
				}
				err = statment.QueryRow().Scan(&maxID)
				if err != nil {
					l4g.Error("[mysql.getMax.scan] %s", err)
				}
				statment.Close()
				l4g.Info("[mysql.getMax] Get MaxId %d", maxID)
				l4g.Info("[crawl.scheduler] Resumed from %d", maxID)
				isRunning = true
				go next(int(maxID))
			}
			if status == 0 && isRunning {
				l4g.Info("[crawl.scheduler] Paused")
				isRunning = false
			}
		}

	}
}