// CreateAndStartFeedWatchers does exactly what it says func (d *Daemon) CreateAndStartFeedWatchers(feeds []*db.FeedInfo) { // start crawler pool crawler.StartCrawlerPool(d.Config.Crawl.MaxCrawlers, d.CrawlChan) // Start Polling d.startPollers(feeds) }
func (fc *feedCommand) runone(c *kingpin.ParseContext) error { fc.init() fc.Config.Mail.SendMail = true fc.Config.DB.UpdateDb = false mailer := mail.CreateAndStartMailer(fc.Config) feed, err := fc.DBH.GetFeedByURL(fc.FeedURL) if err != nil { return err } httpCrawlChannel := make(chan *feedwatcher.FeedCrawlRequest, 1) responseChannel := make(chan *feedwatcher.FeedCrawlResponse) // start crawler pool crawler.StartCrawlerPool(1, httpCrawlChannel) fw := feedwatcher.NewFeedWatcher( *feed, httpCrawlChannel, responseChannel, mailer.OutgoingMail, fc.DBH, []string{}, 10, 100, ) feeds := make(map[string]*feedwatcher.FeedWatcher) feeds[fw.FeedInfo.URL] = fw if fc.Loops == -1 { for { resp := fw.CrawlFeed() err := fw.UpdateFeed(resp) if err != nil { fmt.Printf("Error when updating feed: %v\n", err) } time.Sleep(time.Second * time.Duration(fc.Config.Crawl.MinInterval)) } } else if fc.Loops == 1 { resp := fw.CrawlFeed() err := fw.UpdateFeed(resp) if err != nil { fmt.Printf("Error when updating feed: %v\n", err) } } else { for i := 0; i < fc.Loops; i++ { resp := fw.CrawlFeed() err := fw.UpdateFeed(resp) if err != nil { fmt.Printf("Error when updating feed: %v\n", err) } time.Sleep(time.Second * time.Duration(fc.Config.Crawl.MinInterval)) } } return nil }