func (fm *FeedManager) requestFeedContent(f content.Feed) { if f == nil { fm.logger.Infoln("No feed provided") return } data := f.Data() fm.logger.Infoln("Requesting feed content for " + f.String()) resp, err := fm.client.Get(data.Link) if err != nil { data.UpdateError = err.Error() } else if resp.StatusCode != http.StatusOK { defer func() { // Drain the body so that the connection can be reused io.Copy(ioutil.Discard, resp.Body) resp.Body.Close() }() data.UpdateError = httpStatusPrefix + strconv.Itoa(resp.StatusCode) } else { defer resp.Body.Close() data.UpdateError = "" buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) if _, err := buf.ReadFrom(resp.Body); err == nil { hash := md5.Sum(buf.Bytes()) if b, ok := fm.lastUpdateHash[data.Id]; ok && bytes.Equal(b[:], hash[:]) { fm.logger.Infof("Content of feed %s is the same as the previous update\n", f) return } fm.lastUpdateHash[data.Id] = hash if pf, err := parser.ParseFeed(buf.Bytes(), parser.ParseRss2, parser.ParseAtom, parser.ParseRss1); err == nil { f.Refresh(fm.processParserFeed(pf)) } else { data.UpdateError = err.Error() } } else { data.UpdateError = err.Error() } } if data.UpdateError != "" { fm.logger.Printf("Error updating feed '%s': %s\n", f, data.UpdateError) } f.Data(data) select { case <-fm.done: return default: fm.updateFeed(f) } }
func (fm *FeedManager) requestFeedContent(f Feed) Feed { Debug.Println("Requesting feed content for " + f.Link) resp, err := fm.client.Get(f.Link) if err != nil { f.UpdateError = err.Error() } else if resp.StatusCode != http.StatusOK { f.UpdateError = httpStatusPrefix + strconv.Itoa(resp.StatusCode) } else { f.UpdateError = "" buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) if _, err := buf.ReadFrom(resp.Body); err == nil { if pf, err := parser.ParseFeed(buf.Bytes(), parser.ParseRss2, parser.ParseAtom, parser.ParseRss1); err == nil { f = f.UpdateFromParsed(pf) } else { f.UpdateError = err.Error() } } else { f.UpdateError = err.Error() } } if f.UpdateError != "" { fm.logger.Printf("Error updating feed: %s\n", f.UpdateError) } select { case <-fm.done: return f default: f, newArticles, err := fm.db.UpdateFeed(f) if err != nil { fm.logger.Printf("Error updating feed database record: %v\n", err) } if newArticles { fm.updateFeed <- f } return f } }
func (con HubbubController) Handler(c context.Context) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { params := r.URL.Query() pathParams := webfw.GetParams(c, r) logger := webfw.GetLogger(c) feedId, err := strconv.ParseInt(pathParams["feed-id"], 10, 64) if err != nil { webfw.GetLogger(c).Print(err) return } repo := con.hubbub.repo f := repo.FeedById(data.FeedId(feedId)) s := f.Subscription() err = s.Err() if err != nil { webfw.GetLogger(c).Print(err) return } logger.Infoln("Receiving hubbub event " + params.Get("hub.mode") + " for " + f.String()) data := s.Data() switch params.Get("hub.mode") { case "subscribe": if lease, err := strconv.Atoi(params.Get("hub.lease_seconds")); err == nil { data.LeaseDuration = int64(lease) * int64(time.Second) } data.VerificationTime = time.Now() w.Write([]byte(params.Get("hub.challenge"))) case "unsubscribe": w.Write([]byte(params.Get("hub.challenge"))) case "denied": w.Write([]byte{}) webfw.GetLogger(c).Printf("Unable to subscribe to '%s': %s\n", params.Get("hub.topic"), params.Get("hub.reason")) default: w.Write([]byte{}) buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) if _, err := buf.ReadFrom(r.Body); err != nil { webfw.GetLogger(c).Print(err) return } newArticles := false if pf, err := parser.ParseFeed(buf.Bytes(), parser.ParseRss2, parser.ParseAtom, parser.ParseRss1); err == nil { f.Refresh(pf) f.Update() if f.HasErr() { webfw.GetLogger(c).Print(f.Err()) return } newArticles = len(f.NewArticles()) > 0 } else { webfw.GetLogger(c).Print(err) return } if newArticles { con.hubbub.NotifyReceivers(f) } return } switch params.Get("hub.mode") { case "subscribe": data.SubscriptionFailure = false case "unsubscribe", "denied": data.SubscriptionFailure = true } s.Data(data) s.Update() if s.HasErr() { webfw.GetLogger(c).Print(s.Err()) return } if data.SubscriptionFailure { con.hubbub.removeFeed <- f } else { con.hubbub.addFeed <- f } }) }
func (con HubbubController) Handler(c context.Context) http.Handler { logger := webfw.GetLogger(c) repo := readeef.GetRepo(c) return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { params := r.URL.Query() pathParams := webfw.GetParams(c, r) feedId, err := strconv.ParseInt(pathParams["feed-id"], 10, 64) if err != nil { logger.Print(err) return } f := repo.FeedById(data.FeedId(feedId)) s := f.Subscription() err = s.Err() if err != nil { logger.Print(err) return } logger.Infoln("Receiving hubbub event " + params.Get("hub.mode") + " for " + f.String()) data := s.Data() switch params.Get("hub.mode") { case "subscribe": if lease, err := strconv.Atoi(params.Get("hub.lease_seconds")); err == nil { data.LeaseDuration = int64(lease) * int64(time.Second) } data.VerificationTime = time.Now() w.Write([]byte(params.Get("hub.challenge"))) case "unsubscribe": // Nothing to do here, the subscription will be removed along with the feed by the manager w.Write([]byte(params.Get("hub.challenge"))) case "denied": w.Write([]byte{}) logger.Printf("Unable to subscribe to '%s': %s\n", params.Get("hub.topic"), params.Get("hub.reason")) default: w.Write([]byte{}) buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) if _, err := buf.ReadFrom(r.Body); err != nil { logger.Print(err) return } newArticles := false if pf, err := parser.ParseFeed(buf.Bytes(), parser.ParseRss2, parser.ParseAtom, parser.ParseRss1); err == nil { f.Refresh(pf) f.Update() if f.HasErr() { logger.Print(f.Err()) return } newArticles = len(f.NewArticles()) > 0 } else { logger.Print(err) return } if newArticles { for _, m := range con.hubbub.FeedMonitors() { if err := m.FeedUpdated(f); err != nil { logger.Printf("Error invoking monitor '%s' on updated feed '%s': %v\n", reflect.TypeOf(m), f, err) } } } return } switch params.Get("hub.mode") { case "subscribe": data.SubscriptionFailure = false case "unsubscribe", "denied": data.SubscriptionFailure = true } s.Data(data) s.Update() if s.HasErr() { logger.Print(fmt.Errorf("Error updating subscription %s: %v\n", s, s.Err())) return } if data.SubscriptionFailure { con.removeFeed <- f } else { con.addFeed <- f } }) }
func discoverParserFeeds(link string) ([]Feed, error) { resp, err := http.Get(link) if err != nil { return []Feed{}, err } buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) buf.ReadFrom(resp.Body) if parserFeed, err := parser.ParseFeed(buf.Bytes(), parser.ParseRss2, parser.ParseAtom, parser.ParseRss1); err == nil { feed := Feed{Link: link} feed = feed.UpdateFromParsed(parserFeed) return []Feed{feed}, nil } else { html := commentPattern.ReplaceAllString(buf.String(), "") links := linkPattern.FindAllStringSubmatch(html, -1) feeds := []Feed{} for _, l := range links { attrs := l[1] if strings.Contains(attrs, `"application/rss+xml"`) || strings.Contains(attrs, `'application/rss+xml'`) { index := strings.Index(attrs, "href=") attr := attrs[index+6:] index = strings.IndexByte(attr, attrs[index+5]) href := attr[:index] if u, err := url.Parse(href); err != nil { return []Feed{}, err } else { if !u.IsAbs() { l, _ := url.Parse(link) u.Scheme = l.Scheme if u.Host == "" { u.Host = l.Host } href = u.String() } Debug.Printf("Checking if '%s' is a valid feed link\n", href) fs, err := discoverParserFeeds(href) if err != nil { return []Feed{}, err } feeds = append(feeds, fs[0]) } } } if len(feeds) != 0 { return feeds, nil } } return []Feed{}, ErrNoFeed }
func (con HubbubController) Handler(c context.Context) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() params := r.URL.Query() pathParams := webfw.GetParams(c, r) feedId, err := strconv.ParseInt(pathParams["feed-id"], 10, 64) if err != nil { webfw.GetLogger(c).Print(err) return } s, err := con.hubbub.db.GetHubbubSubscription(feedId) if err != nil { webfw.GetLogger(c).Print(err) return } f, err := con.hubbub.db.GetFeed(s.FeedId) if err != nil { webfw.GetLogger(c).Print(err) return } Debug.Println("Receiving hubbub event " + params.Get("hub.mode") + " for " + f.Link) switch params.Get("hub.mode") { case "subscribe": if lease, err := strconv.Atoi(params.Get("hub.lease_seconds")); err == nil { s.LeaseDuration = int64(lease) * int64(time.Second) } s.VerificationTime = time.Now() w.Write([]byte(params.Get("hub.challenge"))) case "unsubscribe": w.Write([]byte(params.Get("hub.challenge"))) case "denied": w.Write([]byte{}) webfw.GetLogger(c).Printf("Unable to subscribe to '%s': %s\n", params.Get("hub.topic"), params.Get("hub.reason")) default: w.Write([]byte{}) buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) if _, err := buf.ReadFrom(r.Body); err != nil { webfw.GetLogger(c).Print(err) return } newArticles := false if pf, err := parser.ParseFeed(buf.Bytes(), parser.ParseRss2, parser.ParseAtom, parser.ParseRss1); err == nil { f = f.UpdateFromParsed(pf) _, newArticles, err = con.hubbub.db.UpdateFeed(f) if err != nil { webfw.GetLogger(c).Print(err) return } } else { webfw.GetLogger(c).Print(err) return } if newArticles { con.hubbub.updateFeed <- f } return } switch params.Get("hub.mode") { case "subscribe": s.SubscriptionFailure = false case "unsubscribe", "denied": s.SubscriptionFailure = true } if err := con.hubbub.db.UpdateHubbubSubscription(s); err != nil { webfw.GetLogger(c).Print(err) return } if s.SubscriptionFailure { con.hubbub.removeFeed <- f } else { con.hubbub.addFeed <- f } } }
func (fm FeedManager) discoverParserFeeds(link string) ([]content.Feed, error) { fm.logger.Debugf("Fetching feed link body %s\n", link) resp, err := http.Get(link) if err != nil { return []content.Feed{}, err } defer resp.Body.Close() buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) buf.ReadFrom(resp.Body) if parserFeed, err := parser.ParseFeed(buf.Bytes(), parser.ParseRss2, parser.ParseAtom, parser.ParseRss1); err == nil { fm.logger.Debugf("Discovering link %s contains feed data\n", link) feed := fm.repo.Feed() feed.Data(data.Feed{Link: link}) feed.Refresh(fm.processParserFeed(parserFeed)) return []content.Feed{feed}, nil } else { fm.logger.Debugf("Searching for html links within the discovering link %s\n", link) html := commentPattern.ReplaceAllString(buf.String(), "") links := linkPattern.FindAllStringSubmatch(html, -1) feeds := []content.Feed{} for _, l := range links { attrs := l[1] if strings.Contains(attrs, `"application/rss+xml"`) || strings.Contains(attrs, `'application/rss+xml'`) { index := strings.Index(attrs, "href=") attr := attrs[index+6:] index = strings.IndexByte(attr, attrs[index+5]) href := attr[:index] if u, err := url.Parse(href); err != nil { return []content.Feed{}, ErrNoFeed } else { if !u.IsAbs() { l, _ := url.Parse(link) u.Scheme = l.Scheme if u.Host == "" { u.Host = l.Host } href = u.String() } fs, err := fm.discoverParserFeeds(href) if err != nil { return []content.Feed{}, err } feeds = append(feeds, fs[0]) } } } if len(feeds) != 0 { return feeds, nil } } return []content.Feed{}, ErrNoFeed }