func (t Thumbnailer) FeedUpdated(feed content.Feed) error { t.logger.Debugln("Generating thumbnailer processors") processors := t.generateProcessors(feed.NewArticles()) numProcessors := 20 done := make(chan struct{}) errc := make(chan error) defer close(done) var wg sync.WaitGroup wg.Add(numProcessors) for i := 0; i < numProcessors; i++ { go func() { err := t.process(done, processors) if err != nil { errc <- err } wg.Done() }() } go func() { wg.Wait() close(errc) }() for err := range errc { return err } return nil }
func (h Hubbub) Unsubscribe(f content.Feed) error { if u, err := url.Parse(h.config.Hubbub.CallbackURL); err != nil { return ErrNotConfigured } else { if !u.IsAbs() { return ErrNotConfigured } } fdata := f.Data() if u, err := url.Parse(fdata.HubLink); err != nil { return ErrNoFeedHubLink } else { if !u.IsAbs() { return ErrNoFeedHubLink } } s := f.Subscription() if s.HasErr() { return s.Err() } if s.Data().FeedId != fdata.Id { h.logger.Infoln("Not subscribed to " + fdata.HubLink) return ErrNotSubscribed } go func() { h.subscription(s, f, false) }() return nil }
func (fm *FeedManager) startUpdatingFeed(f content.Feed) { if f == nil { fm.logger.Infoln("No feed provided") return } data := f.Data() if data.Id == 0 || fm.activeFeeds[data.Id] { fm.logger.Infoln("Feed " + data.Link + " already active") return } d := 30 * time.Minute if fm.config.FeedManager.Converted.UpdateInterval != 0 { if data.TTL != 0 && data.TTL > fm.config.FeedManager.Converted.UpdateInterval { d = data.TTL } else { d = fm.config.FeedManager.Converted.UpdateInterval } } fm.activeFeeds[data.Id] = true go func() { fm.requestFeedContent(f) ticker := time.After(d) fm.logger.Infof("Starting feed scheduler for %s and duration %d\n", f, d) TICKER: for { select { case now := <-ticker: if !fm.activeFeeds[data.Id] { fm.logger.Infof("Feed '%s' no longer active\n", data.Link) break TICKER } if !data.SkipHours[now.Hour()] && !data.SkipDays[now.Weekday().String()] { fm.requestFeedContent(f) } ticker = time.After(d) fm.logger.Infof("New feed ticker for '%s' after %d\n", data.Link, d) case <-fm.done: fm.stopUpdatingFeed(f) return } } }() go fm.scoreFeedContent(f) }
func (fm *FeedManager) AddFeed(f content.Feed) { if f.Data().HubLink != "" && fm.hubbub != nil { err := fm.hubbub.Subscribe(f) if err == nil || err == ErrSubscribed { return } } fm.addFeed <- f }
func (fm FeedManager) processFeedUpdateMonitors(f content.Feed) { if len(f.NewArticles()) > 0 { for _, m := range fm.feedMonitors { if err := m.FeedUpdated(f); err != nil { fm.logger.Printf("Error invoking monitor '%s' on updated feed '%s': %v\n", reflect.TypeOf(m), f, err) } } } else { fm.logger.Infoln("No new articles for " + f.String()) } }
func addFeed(user content.User, fm *readeef.FeedManager, links []string) (resp responseError) { resp = newResponse() success := false for _, link := range links { var u *url.URL if u, resp.err = url.Parse(link); resp.err != nil { /* TODO: non-fatal error */ return } else if !u.IsAbs() { /* TODO: non-fatal error */ resp.err = errors.New("Feed has no link") return } else { var f content.Feed if f, resp.err = fm.AddFeedByLink(link); resp.err != nil { return } uf := user.AddFeed(f) if uf.HasErr() { resp.err = f.Err() return } tags := strings.SplitN(u.Fragment, ",", -1) if u.Fragment != "" && len(tags) > 0 { repo := uf.Repo() tf := repo.TaggedFeed(user) tf.Data(uf.Data()) t := make([]content.Tag, len(tags)) for i := range tags { t[i] = repo.Tag(user) t[i].Value(data.TagValue(tags[i])) } tf.Tags(t) if tf.UpdateTags(); tf.HasErr() { resp.err = tf.Err() return } } success = true } } resp.val["Success"] = success return }
func (i Index) FeedDeleted(feed content.Feed) error { i.logger.Infof("Deleting article search index for feed '%s'\n", feed) articles := feed.AllArticles() if feed.HasErr() { return fmt.Errorf("Error deleting all articles of %s from the search index: %v\n", feed, feed.Err()) } else { i.logger.Infof("Deleting article search index for feed '%s'\n", feed) return i.provider.BatchIndex(articles, data.BatchDelete) } }
func (fm *FeedManager) requestFeedContent(f content.Feed) { if f == nil { fm.logger.Infoln("No feed provided") return } data := f.Data() fm.logger.Infoln("Requesting feed content for " + f.String()) resp, err := fm.client.Get(data.Link) if err != nil { data.UpdateError = err.Error() } else if resp.StatusCode != http.StatusOK { defer func() { // Drain the body so that the connection can be reused io.Copy(ioutil.Discard, resp.Body) resp.Body.Close() }() data.UpdateError = httpStatusPrefix + strconv.Itoa(resp.StatusCode) } else { defer resp.Body.Close() data.UpdateError = "" buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) if _, err := buf.ReadFrom(resp.Body); err == nil { hash := md5.Sum(buf.Bytes()) if b, ok := fm.lastUpdateHash[data.Id]; ok && bytes.Equal(b[:], hash[:]) { fm.logger.Infof("Content of feed %s is the same as the previous update\n", f) return } fm.lastUpdateHash[data.Id] = hash if pf, err := parser.ParseFeed(buf.Bytes(), parser.ParseRss2, parser.ParseAtom, parser.ParseRss1); err == nil { f.Refresh(fm.processParserFeed(pf)) } else { data.UpdateError = err.Error() } } else { data.UpdateError = err.Error() } } if data.UpdateError != "" { fm.logger.Printf("Error updating feed '%s': %s\n", f, data.UpdateError) } f.Data(data) select { case <-fm.done: return default: fm.updateFeed(f) } }
func TestImplements(t *testing.T) { var article content.Article r := NewRepo(nil, nil) article = r.Article() article.Data() var userArticle content.UserArticle userArticle = r.UserArticle(nil) userArticle.Data() var scoredArticle content.ScoredArticle scoredArticle = r.ScoredArticle() scoredArticle.Data() var feed content.Feed feed = r.Feed() feed.Data() var userFeed content.UserFeed userFeed = r.UserFeed(nil) userFeed.Data() var taggedFeed content.TaggedFeed taggedFeed = r.TaggedFeed(nil) taggedFeed.Data() r.HasErr() var subscription content.Subscription subscription = r.Subscription() subscription.Data() var tag content.Tag tag = r.Tag(nil) tag.Value() var user content.User user = r.User() user.Data() }
func (h *Hubbub) Subscribe(f content.Feed) error { if u, err := url.Parse(h.config.Hubbub.CallbackURL); err != nil { return ErrNotConfigured } else { if !u.IsAbs() { return ErrNotConfigured } } fdata := f.Data() if u, err := url.Parse(fdata.HubLink); err != nil { return ErrNoFeedHubLink } else { if !u.IsAbs() { return ErrNoFeedHubLink } } s := f.Subscription() if s.HasErr() { return s.Err() } data := s.Data() if data.FeedId == fdata.Id { h.logger.Infoln("Already subscribed to " + fdata.HubLink) return ErrSubscribed } data.Link = fdata.HubLink data.FeedId = fdata.Id data.SubscriptionFailure = true s.Data(data) s.Update() if s.HasErr() { return s.Err() } go func() { h.subscribe(s, f, true) }() return nil }
func (si SearchIndex) UpdateFeed(feed content.Feed) { si.logger.Infof("Updating article search index for feed '%s'\n", feed) newArticleLinks := map[string]bool{} for _, a := range feed.NewArticles() { newArticleLinks[a.Data().Link] = true } var articles []content.Article for _, a := range feed.ParsedArticles() { if newArticleLinks[a.Data().Link] { articles = append(articles, a) } } si.batchIndex(articles) }
func (u *User) AddFeed(f content.Feed) (uf content.UserFeed) { uf = u.Repo().UserFeed(u) if u.HasErr() { uf.Err(u.Err()) return } if err := u.Validate(); err != nil { u.Err(err) return } d := f.Data() if f.HasErr() { uf.Data(d) uf.Err(f.Err()) return } if err := f.Validate(); err != nil { uf.Err(err) return } login := u.Data().Login u.logger.Infof("Getting user feed for user %s and feed %d\n", login, d.Id) tx, err := u.db.Beginx() if err != nil { uf.Err(err) return } defer tx.Rollback() stmt, err := tx.Preparex(u.db.SQL().User.CreateFeed) if err != nil { uf.Err(err) return } defer stmt.Close() _, err = stmt.Exec(u.Data().Login, d.Id) if err != nil { uf.Err(err) return } if err := tx.Commit(); err != nil { uf.Err(err) } uf.Data(d) return }
func addFeeds(user content.User, fm *readeef.FeedManager, links []string) (resp responseError) { resp = newResponse() var err error errs := make([]addFeedError, 0, len(links)) for _, link := range links { var u *url.URL if u, err = url.Parse(link); err != nil { resp.err = err errs = append(errs, addFeedError{Link: link, Error: "Error parsing link"}) continue } else if !u.IsAbs() { resp.err = errors.New("Feed has no link") errs = append(errs, addFeedError{Link: link, Error: resp.err.Error()}) continue } else { var f content.Feed if f, err = fm.AddFeedByLink(link); err != nil { resp.err = err errs = append(errs, addFeedError{Link: link, Error: "Error adding feed to the database"}) continue } uf := user.AddFeed(f) if uf.HasErr() { resp.err = f.Err() errs = append(errs, addFeedError{Link: link, Title: f.Data().Title, Error: "Error adding feed to the database"}) continue } tags := strings.SplitN(u.Fragment, ",", -1) if u.Fragment != "" && len(tags) > 0 { repo := uf.Repo() tf := repo.TaggedFeed(user) tf.Data(uf.Data()) t := make([]content.Tag, len(tags)) for i := range tags { t[i] = repo.Tag(user) t[i].Data(data.Tag{Value: data.TagValue(tags[i])}) } tf.Tags(t) if tf.UpdateTags(); tf.HasErr() { resp.err = tf.Err() errs = append(errs, addFeedError{Link: link, Title: f.Data().Title, Error: "Error adding feed to the database"}) continue } } } } resp.val["Errors"] = errs resp.val["Success"] = len(errs) < len(links) return }
func (fm FeedManager) updateFeed(f content.Feed) { f.Update() if f.HasErr() { fm.logger.Printf("Error updating feed '%s' database record: %v\n", f, f.Err()) } else { fm.processFeedUpdateMonitors(f) } }
func (i Unread) FeedUpdated(feed content.Feed) error { i.logger.Infof("Adding 'unread' states for all new articles of %s' for all users\n", feed) feed.SetNewArticlesUnread() if feed.HasErr() { return feed.Err() } else { return nil } }
func (si SearchIndex) DeleteFeed(feed content.Feed) error { articles := feed.AllArticles() if !feed.HasErr() { si.logger.Infof("Removing all articles from the search index for feed '%s'\n", feed) si.batchDelete(articles) } else { return feed.Err() } return nil }
func (h *Hubbub) subscribe(s content.Subscription, f content.Feed, subscribe bool) { var err error fdata := f.Data() u := callbackURL(h.config, h.pattern, fdata.Id) body := url.Values{} body.Set("hub.callback", u) if subscribe { h.logger.Infoln("Subscribing to hubbub for " + f.String() + " with url " + u) body.Set("hub.mode", "subscribe") } else { h.logger.Infoln("Unsubscribing to hubbub for " + f.String() + " with url " + u) body.Set("hub.mode", "unsubscribe") } body.Set("hub.topic", fdata.Link) buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) buf.WriteString(body.Encode()) req, _ := http.NewRequest("POST", s.Data().Link, buf) req.Header.Add("Content-Type", "application/x-www-form-urlencoded") req.Header.Add("From", h.config.Hubbub.From) resp, err := h.client.Do(req) if err != nil { err = SubscriptionError{error: err, Subscription: s} } else if resp.StatusCode != 202 { err = SubscriptionError{error: errors.New("Expected response status 202, got " + resp.Status), Subscription: s} } if err != nil { fdata.SubscribeError = err.Error() h.logger.Printf("Error subscribing to hub feed '%s': %s\n", f, err) f.Data(fdata) f.Update() if f.HasErr() { h.logger.Printf("Error updating feed database record for '%s': %s\n", f, f.Err()) } h.removeFeed <- f } }
func (fm *FeedManager) RemoveFeed(f content.Feed) { if f.Data().HubLink != "" && fm.hubbub != nil { fm.hubbub.Unsubscribe(f) } fm.removeFeed <- f }
func (fm *FeedManager) stopUpdatingFeed(f content.Feed) { if f == nil { fm.logger.Infoln("No feed provided") return } data := f.Data() fm.logger.Infoln("Stopping feed update for " + data.Link) delete(fm.activeFeeds, data.Id) users := f.Users() if f.HasErr() { fm.logger.Printf("Error getting users for feed '%s': %v\n", f, f.Err()) } else { if len(users) == 0 { fm.logger.Infoln("Removing orphan feed " + f.String() + " from the database") for _, m := range fm.feedMonitors { if err := m.FeedDeleted(f); err != nil { fm.logger.Printf( "Error invoking monitor '%s' on deleted feed '%s': %v\n", reflect.TypeOf(m), f, err) } } f.Delete() if f.HasErr() { fm.logger.Printf("Error deleting feed '%s' from the repository: %v\n", f, f.Err()) } } } }
func (fm *FeedManager) scoreFeedContent(f content.Feed) { if f == nil { fm.logger.Infoln("No feed provided") return } data := f.Data() if len(fm.config.Popularity.Providers) == 0 { fm.logger.Infoln("No popularity providers configured") return } if !fm.activeFeeds[data.Id] { fm.logger.Infof("Feed '%s' no longer active for scoring\n", f) return } fm.logger.Infoln("Scoring feed content for " + f.String()) articles := f.LatestArticles() if f.HasErr() { fm.logger.Printf("Error getting latest feed articles for '%s': %v\n", f, f.Err()) return } for i := range articles { sa := fm.repo.Article() sa.Data(articles[i].Data()) fm.scoreArticle <- sa } fm.logger.Infoln("Done scoring feed content for " + f.String()) select { case <-time.After(30 * time.Minute): go fm.scoreFeedContent(f) case <-fm.done: return } }
func (i Index) FeedUpdated(feed content.Feed) error { i.logger.Infof("Updating article search index for feed '%s'\n", feed) return i.provider.BatchIndex(feed.NewArticles(), data.BatchAdd) }