// Synthesise calItemRangeURLs for incremental scraping and queue them up func (s *station) Scrape() (jobs []r.Scraper, results []r.Broadcaster, err error) { now := time.Now() for _, t0 := range r.IncrementalNows(now) { u, _ := s.calendarItemRangeURLForTime(t0) jobs = append(jobs, r.Scraper(u)) } return }
// Synthesise calItemRangeURLs for incremental scraping and queue them up func (s *station) Scrape() (jobs []r.Scraper, results []r.Broadcaster, err error) { now := time.Now() for _, t0 := range r.IncrementalNows(now) { day, _ := s.dayURLForDate(t0) jobs = append(jobs, r.Scraper(*day)) } return }
func main() { jobs := make(chan scrape.Scraper, 15) // concurrent results := make(chan scrape.Broadcaster) // sequential defer close(jobs) defer close(results) var wg_jobs sync.WaitGroup var wg_results sync.WaitGroup nows := scrape.IncrementalNows(time.Now()) // scrape and write concurrently // scraper loop go func() { for jobb := range jobs { job := jobb go func() { defer wg_jobs.Done() // fmt.Fprintf(os.Stderr, "jobs process %p %s\n", job, job) scrapers, bcs, err := job.Scrape() if nil != err { fmt.Fprintf(os.Stderr, "error %s %s\n", job, err) } for _, s := range scrapers { if s.Matches(nows) { wg_jobs.Add(1) // fmt.Fprintf(os.Stderr, "jobs queue %p %s\n", s, s) jobs <- s } } for _, b := range bcs { wg_results.Add(1) results <- b } }() } }() // write loop go func() { for bc := range results { func() { defer wg_results.Done() bc.WriteAsLuaTable(os.Stdout) }() } }() { // seed all the radio stations to scrape for _, s := range []string{"b1", "b2", "b5", "b+", "brheimat", "puls"} { wg_jobs.Add(1) jobs <- br.Station(s) } wg_jobs.Add(1) jobs <- b3.Station("b3") wg_jobs.Add(1) jobs <- b4.Station("b4") wg_jobs.Add(1) jobs <- radiofabrik.Station("radiofabrik") wg_jobs.Add(1) jobs <- m945.Station("m945") for _, s := range []string{"dlf", "drk"} { wg_jobs.Add(1) jobs <- dlf.Station(s) } wg_jobs.Add(1) jobs <- wdr.Station("wdr5") } wg_jobs.Wait() wg_results.Wait() }