func (f *Feed) Update() { if f.HasErr() { return } if err := f.Validate(); err != nil { f.Err(err) return } i := f.Data() id := i.Id s := f.db.SQL() f.logger.Infof("Updating feed %d\n", id) tx, err := f.db.Beginx() if err != nil { f.Err(err) return } defer tx.Rollback() stmt, err := tx.Preparex(s.Feed.Update) if err != nil { f.Err(err) return } defer stmt.Close() res, err := stmt.Exec(i.Link, i.Title, i.Description, i.HubLink, i.SiteLink, i.UpdateError, i.SubscribeError, id) if err != nil { f.Err(err) return } if num, err := res.RowsAffected(); err != nil || num == 0 { id, err := f.db.CreateWithId(tx, s.Feed.Create, i.Link, i.Title, i.Description, i.HubLink, i.SiteLink, i.UpdateError, i.SubscribeError) if err != nil { f.Err(err) return } i.Id = data.FeedId(id) f.Data(i) } articles := f.updateFeedArticles(tx, f.ParsedArticles()) if f.HasErr() { return } f.newArticles = articles tx.Commit() }
func readState(user content.User, id string, beforeId data.ArticleId, timestamp int64) (resp responseError) { resp = newResponse() var ar content.ArticleRepo o := data.ArticleUpdateStateOptions{} if timestamp > 0 { t := time.Unix(timestamp/1000, 0) o.BeforeDate = t } if beforeId > 0 { o.BeforeId = beforeId } switch { case id == "all": ar = user case id == "favorite": o.FavoriteOnly = true ar = user case strings.HasPrefix(id, "popular:"): // Can't bulk set state to popular articles case strings.HasPrefix(id, "tag:"): tag := user.Repo().Tag(user) tag.Data(data.Tag{Value: data.TagValue(id[4:])}) ar = tag default: var feedId int64 if feedId, resp.err = strconv.ParseInt(id, 10, 64); resp.err != nil { /* TODO: non-fatal error */ return } ar = user.FeedById(data.FeedId(feedId)) } if ar != nil { ar.ReadState(true, o) if e, ok := ar.(content.Error); ok && e.HasErr() { resp.err = e.Err() return } } resp.val["Success"] = true return }
func search(user content.User, searchIndex readeef.SearchIndex, query, highlight, feedId string) (resp responseError) { resp = newResponse() if strings.HasPrefix(feedId, "tag:") { tag := user.Repo().Tag(user) tag.Value(data.TagValue(feedId[4:])) tag.Highlight(highlight) resp.val["Articles"], resp.err = tag.Query(query, searchIndex.Index), tag.Err() } else { if id, err := strconv.ParseInt(feedId, 10, 64); err == nil { f := user.FeedById(data.FeedId(id)) resp.val["Articles"], resp.err = f.Query(query, searchIndex.Index), f.Err() } else { user.Highlight(highlight) resp.val["Articles"], resp.err = user.Query(query, searchIndex.Index), user.Err() } } return }
func performSearch(user content.User, sp content.SearchProvider, query, feedId string, limit, offset int) (ua []content.UserArticle, err error) { defer func() { if rec := recover(); rec != nil { err = fmt.Errorf("Error during search: %s", rec) } }() if strings.HasPrefix(feedId, "tag:") { tag := user.Repo().Tag(user) tag.Data(data.Tag{Value: data.TagValue(feedId[4:])}) ua, err = tag.Query(query, sp, limit, offset), tag.Err() } else { if id, err := strconv.ParseInt(feedId, 10, 64); err == nil { f := user.FeedById(data.FeedId(id)) ua, err = f.Query(query, sp, limit, offset), f.Err() } else { ua, err = user.Query(query, sp, limit, offset), user.Err() } } return }
func markFeedAsRead(user content.User, id string, timestamp int64) (resp responseError) { resp = newResponse() t := time.Unix(timestamp/1000, 0) switch { case id == "all": if user.ReadBefore(t, true); user.HasErr() { resp.err = user.Err() return } case id == "favorite" || strings.HasPrefix(id, "popular:"): // Favorites are assumbed to have been read already case strings.HasPrefix(id, "tag:"): tag := user.Repo().Tag(user) tag.Value(data.TagValue(id[4:])) if tag.ReadBefore(t, true); tag.HasErr() { resp.err = tag.Err() return } default: var feedId int64 if feedId, resp.err = strconv.ParseInt(id, 10, 64); resp.err != nil { /* TODO: non-fatal error */ return } feed := user.FeedById(data.FeedId(feedId)) if feed.ReadBefore(t, true); feed.HasErr() { resp.err = feed.Err() return } } resp.val["Success"] = true return }
func (con HubbubController) Handler(c context.Context) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { params := r.URL.Query() pathParams := webfw.GetParams(c, r) logger := webfw.GetLogger(c) feedId, err := strconv.ParseInt(pathParams["feed-id"], 10, 64) if err != nil { webfw.GetLogger(c).Print(err) return } repo := con.hubbub.repo f := repo.FeedById(data.FeedId(feedId)) s := f.Subscription() err = s.Err() if err != nil { webfw.GetLogger(c).Print(err) return } logger.Infoln("Receiving hubbub event " + params.Get("hub.mode") + " for " + f.String()) data := s.Data() switch params.Get("hub.mode") { case "subscribe": if lease, err := strconv.Atoi(params.Get("hub.lease_seconds")); err == nil { data.LeaseDuration = int64(lease) * int64(time.Second) } data.VerificationTime = time.Now() w.Write([]byte(params.Get("hub.challenge"))) case "unsubscribe": w.Write([]byte(params.Get("hub.challenge"))) case "denied": w.Write([]byte{}) webfw.GetLogger(c).Printf("Unable to subscribe to '%s': %s\n", params.Get("hub.topic"), params.Get("hub.reason")) default: w.Write([]byte{}) buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) if _, err := buf.ReadFrom(r.Body); err != nil { webfw.GetLogger(c).Print(err) return } newArticles := false if pf, err := parser.ParseFeed(buf.Bytes(), parser.ParseRss2, parser.ParseAtom, parser.ParseRss1); err == nil { f.Refresh(pf) f.Update() if f.HasErr() { webfw.GetLogger(c).Print(f.Err()) return } newArticles = len(f.NewArticles()) > 0 } else { webfw.GetLogger(c).Print(err) return } if newArticles { con.hubbub.NotifyReceivers(f) } return } switch params.Get("hub.mode") { case "subscribe": data.SubscriptionFailure = false case "unsubscribe", "denied": data.SubscriptionFailure = true } s.Data(data) s.Update() if s.HasErr() { webfw.GetLogger(c).Print(s.Err()) return } if data.SubscriptionFailure { con.hubbub.removeFeed <- f } else { con.hubbub.addFeed <- f } }) }
func getFeedArticles(user content.User, sp content.SearchProvider, id string, minId, maxId data.ArticleId, limit int, offset int, olderFirst bool, unreadOnly bool) (resp responseError) { resp = newResponse() if limit > 200 { limit = 200 } var as content.ArticleSorting var ar content.ArticleRepo var ua []content.UserArticle o := data.ArticleQueryOptions{Limit: limit, Offset: offset, UnreadOnly: unreadOnly, UnreadFirst: true} if maxId > 0 { o.AfterId = maxId resp.val["MaxId"] = maxId } if id == "favorite" { o.FavoriteOnly = true ar = user as = user } else if id == "all" { ar = user as = user } else if strings.HasPrefix(id, "popular:") { o.IncludeScores = true o.HighScoredFirst = true o.BeforeDate = time.Now() o.AfterDate = time.Now().AddDate(0, 0, -5) if id == "popular:all" { ar = user as = user } else if strings.HasPrefix(id, "popular:tag:") { tag := user.Repo().Tag(user) tag.Data(data.Tag{Value: data.TagValue(id[12:])}) ar = tag as = tag } else { var f content.UserFeed var feedId int64 feedId, resp.err = strconv.ParseInt(id[8:], 10, 64) if resp.err != nil { resp.err = errors.New("Unknown feed id " + id) return } if f = user.FeedById(data.FeedId(feedId)); f.HasErr() { /* TODO: non-fatal error */ resp.err = f.Err() return } ar = f as = f } } else if strings.HasPrefix(id, "search:") && sp != nil { var query string id = id[7:] parts := strings.Split(id, ":") if parts[0] == "tag" { id = strings.Join(parts[:2], ":") query = strings.Join(parts[2:], ":") } else { id = strings.Join(parts[:1], ":") query = strings.Join(parts[1:], ":") } sp.SortingByDate() if olderFirst { sp.Order(data.AscendingOrder) } else { sp.Order(data.DescendingOrder) } ua, resp.err = performSearch(user, sp, query, id, limit, offset) } else if strings.HasPrefix(id, "tag:") { tag := user.Repo().Tag(user) tag.Data(data.Tag{Value: data.TagValue(id[4:])}) as = tag ar = tag } else { var f content.UserFeed var feedId int64 feedId, resp.err = strconv.ParseInt(id, 10, 64) if resp.err != nil { resp.err = errors.New("Unknown feed id " + id) return } if f = user.FeedById(data.FeedId(feedId)); f.HasErr() { /* TODO: non-fatal error */ resp.err = f.Err() return } as = f ar = f } if as != nil { as.SortingByDate() if olderFirst { as.Order(data.AscendingOrder) } else { as.Order(data.DescendingOrder) } } if ar != nil { ua = ar.Articles(o) if minId > 0 { qo := data.ArticleIdQueryOptions{BeforeId: maxId + 1, AfterId: minId - 1} qo.UnreadOnly = true resp.val["UnreadIds"] = ar.Ids(qo) qo.UnreadOnly = false qo.FavoriteOnly = true resp.val["FavoriteIds"] = ar.Ids(qo) resp.val["MinId"] = minId } if e, ok := ar.(content.Error); ok && e.HasErr() { resp.err = e.Err() } } resp.val["Articles"] = ua resp.val["Limit"] = limit resp.val["Offset"] = offset return }
func (con Feed) Handler(c context.Context) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { action := webfw.GetMultiPatternIdentifier(c, r) user := readeef.GetUser(c, r) r.ParseForm() var resp responseError var feedId int64 params := webfw.GetParams(c, r) switch action { case "list": resp = listFeeds(user) case "discover": link := r.FormValue("url") resp = discoverFeeds(user, con.fm, link) case "opml-export": resp = exportOpml(user) case "opml": buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) buf.ReadFrom(r.Body) resp = parseOpml(user, con.fm, buf.Bytes()) case "add": links := r.Form["url"] resp = addFeeds(user, con.fm, links) case "remove": if feedId, resp.err = strconv.ParseInt(params["feed-id"], 10, 64); resp.err == nil { resp = removeFeed(user, con.fm, data.FeedId(feedId)) } case "tags": if feedId, resp.err = strconv.ParseInt(params["feed-id"], 10, 64); resp.err == nil { if r.Method == "GET" { resp = getFeedTags(user, data.FeedId(feedId)) } else if r.Method == "POST" { if b, err := ioutil.ReadAll(r.Body); err == nil { tags := []data.TagValue{} if err = json.Unmarshal(b, &tags); err != nil { resp.err = fmt.Errorf("Error decoding request body: %s", err) break } resp = setFeedTags(user, data.FeedId(feedId), tags) } else { resp.err = fmt.Errorf("Error reading request body: %s", err) break } } } case "read": var timestamp, beforeId int64 if bid, ok := params["before-id"]; ok { beforeId, resp.err = strconv.ParseInt(bid, 10, 64) } else { timestamp, resp.err = strconv.ParseInt(params["timestamp"], 10, 64) } if resp.err == nil { resp = readState(user, params["feed-id"], data.ArticleId(beforeId), timestamp) } case "articles": var limit, offset int if limit, resp.err = strconv.Atoi(params["limit"]); resp.err == nil { if offset, resp.err = strconv.Atoi(params["offset"]); resp.err == nil { minId, _ := strconv.ParseInt(params["min-id"], 10, 64) maxId, _ := strconv.ParseInt(params["max-id"], 10, 64) resp = getFeedArticles(user, con.sp, params["feed-id"], data.ArticleId(minId), data.ArticleId(maxId), limit, offset, params["older-first"] == "true", params["unread-only"] == "true") } } } switch resp.err { case readeef.ErrNoAbsolute: resp.val["Error"] = true resp.val["ErrorType"] = errTypeNoAbsolute resp.err = nil case readeef.ErrNoFeed: resp.val["Error"] = true resp.val["ErrorType"] = errTypeNoFeed resp.err = nil } var b []byte if resp.err == nil { b, resp.err = json.Marshal(resp.val) } if resp.err == nil { w.Write(b) } else { webfw.GetLogger(c).Print(resp.err) w.WriteHeader(http.StatusInternalServerError) } }) }
func (con HubbubController) Handler(c context.Context) http.Handler { logger := webfw.GetLogger(c) repo := readeef.GetRepo(c) return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { params := r.URL.Query() pathParams := webfw.GetParams(c, r) feedId, err := strconv.ParseInt(pathParams["feed-id"], 10, 64) if err != nil { logger.Print(err) return } f := repo.FeedById(data.FeedId(feedId)) s := f.Subscription() err = s.Err() if err != nil { logger.Print(err) return } logger.Infoln("Receiving hubbub event " + params.Get("hub.mode") + " for " + f.String()) data := s.Data() switch params.Get("hub.mode") { case "subscribe": if lease, err := strconv.Atoi(params.Get("hub.lease_seconds")); err == nil { data.LeaseDuration = int64(lease) * int64(time.Second) } data.VerificationTime = time.Now() w.Write([]byte(params.Get("hub.challenge"))) case "unsubscribe": // Nothing to do here, the subscription will be removed along with the feed by the manager w.Write([]byte(params.Get("hub.challenge"))) case "denied": w.Write([]byte{}) logger.Printf("Unable to subscribe to '%s': %s\n", params.Get("hub.topic"), params.Get("hub.reason")) default: w.Write([]byte{}) buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) if _, err := buf.ReadFrom(r.Body); err != nil { logger.Print(err) return } newArticles := false if pf, err := parser.ParseFeed(buf.Bytes(), parser.ParseRss2, parser.ParseAtom, parser.ParseRss1); err == nil { f.Refresh(pf) f.Update() if f.HasErr() { logger.Print(f.Err()) return } newArticles = len(f.NewArticles()) > 0 } else { logger.Print(err) return } if newArticles { for _, m := range con.hubbub.FeedMonitors() { if err := m.FeedUpdated(f); err != nil { logger.Printf("Error invoking monitor '%s' on updated feed '%s': %v\n", reflect.TypeOf(m), f, err) } } } return } switch params.Get("hub.mode") { case "subscribe": data.SubscriptionFailure = false case "unsubscribe", "denied": data.SubscriptionFailure = true } s.Data(data) s.Update() if s.HasErr() { logger.Print(fmt.Errorf("Error updating subscription %s: %v\n", s, s.Err())) return } if data.SubscriptionFailure { con.removeFeed <- f } else { con.addFeed <- f } }) }
func (controller TtRss) Handler(c context.Context) http.Handler { repo := readeef.GetRepo(c) logger := webfw.GetLogger(c) config := readeef.GetConfig(c) return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { action := webfw.GetMultiPatternIdentifier(c, r) if action == "redirecter" { http.Redirect(w, r, "/", http.StatusMovedPermanently) } req := ttRssRequest{} resp := ttRssResponse{} var err error var errType string var user content.User var con interface{} switch { default: var b []byte in := map[string]interface{}{} if b, err = ioutil.ReadAll(r.Body); err != nil { err = fmt.Errorf("reading request body: %s", err) break } if err = json.Unmarshal(b, &in); err != nil { err = fmt.Errorf("decoding JSON request: %s", err) break } req = ttRssConvertRequest(in) logger.Debugf("Request: %#v\n", req) resp.Seq = req.Seq if req.Op != "login" && req.Op != "isLoggedIn" { if sess, ok := ttRssSessions[req.Sid]; ok { user = repo.UserByLogin(data.Login(sess.login)) if repo.Err() != nil { errType = "NOT_LOGGED_IN" } else { sess.lastVisit = time.Now() ttRssSessions[req.Sid] = sess } } else { errType = "NOT_LOGGED_IN" } } if errType != "" { logger.Debugf("TT-RSS Sessions: %#v\n", ttRssSessions) break } logger.Debugf("TT-RSS OP: %s\n", req.Op) switch req.Op { case "getApiLevel": con = ttRssGenericContent{Level: TTRSS_API_LEVEL} case "getVersion": con = ttRssGenericContent{Version: TTRSS_VERSION} case "login": user = repo.UserByLogin(data.Login(req.User)) if repo.Err() != nil { errType = "LOGIN_ERROR" err = fmt.Errorf("getting TT-RSS user: %s", repo.Err()) break } if !user.Authenticate(req.Password, []byte(config.Auth.Secret)) { errType = "LOGIN_ERROR" err = fmt.Errorf("authentication for TT-RSS user '%s'", user.Data().Login) break } var sessId string login := user.Data().Login for id, sess := range ttRssSessions { if sess.login == login { sessId = id } } if sessId == "" { sessId = strings.Replace(util.UUID(), "-", "", -1) ttRssSessions[sessId] = ttRssSession{login: login, lastVisit: time.Now()} } con = ttRssGenericContent{ ApiLevel: TTRSS_API_LEVEL, SessionId: sessId, } case "logout": delete(ttRssSessions, req.Sid) con = ttRssGenericContent{Status: "OK"} case "isLoggedIn": if _, ok := ttRssSessions[req.Sid]; ok { con = ttRssGenericContent{Status: true} } else { con = ttRssGenericContent{Status: false} } case "getUnread": var ar content.ArticleRepo o := data.ArticleCountOptions{UnreadOnly: true} if req.IsCat { tagId := data.TagId(req.FeedId) if tagId > 0 { ar = user.TagById(tagId) } else if tagId == TTRSS_CAT_UNCATEGORIZED { ar = user o.UntaggedOnly = true } else if tagId == TTRSS_CAT_SPECIAL { ar = user o.FavoriteOnly = true } } else { switch req.FeedId { case TTRSS_FAVORITE_ID: ar = user o.FavoriteOnly = true case TTRSS_FRESH_ID: ar = user o.AfterDate = time.Now().Add(TTRSS_FRESH_DURATION) case TTRSS_ALL_ID, 0: ar = user default: if req.FeedId > 0 { feed := user.FeedById(req.FeedId) if feed.HasErr() { err = feed.Err() break } ar = feed } } } if ar == nil { con = ttRssGenericContent{Unread: "0"} } else if con == nil { con = ttRssGenericContent{Unread: strconv.FormatInt(ar.Count(o), 10)} } case "getCounters": if req.OutputMode == "" { req.OutputMode = "flc" } cContent := ttRssCountersContent{} o := data.ArticleCountOptions{UnreadOnly: true} unreadCount := user.Count(o) cContent = append(cContent, ttRssCounter{Id: "global-unread", Counter: unreadCount}) feeds := user.AllFeeds() cContent = append(cContent, ttRssCounter{Id: "subscribed-feeds", Counter: int64(len(feeds))}) cContent = append(cContent, ttRssCounter{Id: TTRSS_ARCHIVED_ID}) cContent = append(cContent, ttRssCounter{Id: TTRSS_FAVORITE_ID, Counter: user.Count(data.ArticleCountOptions{UnreadOnly: true, FavoriteOnly: true}), AuxCounter: user.Count(data.ArticleCountOptions{FavoriteOnly: true})}) cContent = append(cContent, ttRssCounter{Id: TTRSS_PUBLISHED_ID}) freshTime := time.Now().Add(TTRSS_FRESH_DURATION) cContent = append(cContent, ttRssCounter{Id: TTRSS_FRESH_ID, Counter: user.Count(data.ArticleCountOptions{UnreadOnly: true, AfterDate: freshTime}), AuxCounter: 0}) cContent = append(cContent, ttRssCounter{Id: TTRSS_ALL_ID, Counter: user.Count(), AuxCounter: 0}) for _, f := range feeds { cContent = append(cContent, ttRssCounter{Id: int64(f.Data().Id), Counter: f.Count(o)}, ) } cContent = append(cContent, ttRssCounter{Id: TTRSS_CAT_LABELS, Counter: 0, Kind: "cat"}) for _, t := range user.Tags() { cContent = append(cContent, ttRssCounter{ Id: int64(t.Data().Id), Counter: t.Count(o), Kind: "cat", }, ) } cContent = append(cContent, ttRssCounter{ Id: TTRSS_CAT_UNCATEGORIZED, Counter: user.Count(data.ArticleCountOptions{UnreadOnly: true, UntaggedOnly: true}), Kind: "cat", }, ) if user.HasErr() { err = fmt.Errorf("Error getting user counters: %v\n", user.Err()) } con = cContent case "getFeeds": fContent := ttRssFeedsContent{} if req.CatId == TTRSS_CAT_ALL || req.CatId == TTRSS_CAT_SPECIAL { unreadFav := user.Count(data.ArticleCountOptions{UnreadOnly: true, FavoriteOnly: true}) if unreadFav > 0 || !req.UnreadOnly { fContent = append(fContent, ttRssFeed{ Id: TTRSS_FAVORITE_ID, Title: ttRssSpecialTitle(TTRSS_FAVORITE_ID), Unread: unreadFav, CatId: TTRSS_FAVORITE_ID, }) } freshTime := time.Now().Add(TTRSS_FRESH_DURATION) unreadFresh := user.Count(data.ArticleCountOptions{UnreadOnly: true, AfterDate: freshTime}) if unreadFresh > 0 || !req.UnreadOnly { fContent = append(fContent, ttRssFeed{ Id: TTRSS_FRESH_ID, Title: ttRssSpecialTitle(TTRSS_FRESH_ID), Unread: unreadFresh, CatId: TTRSS_FAVORITE_ID, }) } unreadAll := user.Count(data.ArticleCountOptions{UnreadOnly: true}) if unreadAll > 0 || !req.UnreadOnly { fContent = append(fContent, ttRssFeed{ Id: TTRSS_ALL_ID, Title: ttRssSpecialTitle(TTRSS_ALL_ID), Unread: unreadAll, CatId: TTRSS_FAVORITE_ID, }) } } var feeds []content.UserFeed var catId int if req.CatId == TTRSS_CAT_ALL || req.CatId == TTRSS_CAT_ALL_EXCEPT_VIRTUAL { feeds = user.AllFeeds() } else { if req.CatId == TTRSS_CAT_UNCATEGORIZED { tagged := user.AllTaggedFeeds() for _, t := range tagged { if len(t.Tags()) == 0 { feeds = append(feeds, t) } } } else if req.CatId > 0 { catId = int(req.CatId) t := user.TagById(req.CatId) tagged := t.AllFeeds() if t.HasErr() { err = t.Err() break } for _, t := range tagged { feeds = append(feeds, t) } } } if len(feeds) > 0 { o := data.ArticleCountOptions{UnreadOnly: true} for i := range feeds { if req.Limit > 0 { if i < req.Offset || i >= req.Limit+req.Offset { continue } } d := feeds[i].Data() unread := feeds[i].Count(o) if unread > 0 || !req.UnreadOnly { fContent = append(fContent, ttRssFeed{ Id: d.Id, Title: d.Title, FeedUrl: d.Link, CatId: catId, Unread: unread, LastUpdated: time.Now().Unix(), OrderId: 0, }) } } } if user.HasErr() { err = fmt.Errorf("Error getting user feeds: %v\n", user.Err()) } con = fContent case "getCategories": cContent := ttRssCategoriesContent{} o := data.ArticleCountOptions{UnreadOnly: true} for _, t := range user.Tags() { td := t.Data() count := t.Count(o) if count > 0 || !req.UnreadOnly { cContent = append(cContent, ttRssCat{Id: strconv.FormatInt(int64(td.Id), 10), Title: string(td.Value), Unread: count}, ) } } count := user.Count(data.ArticleCountOptions{UnreadOnly: true, UntaggedOnly: true}) if count > 0 || !req.UnreadOnly { cContent = append(cContent, ttRssCat{Id: strconv.FormatInt(TTRSS_CAT_UNCATEGORIZED, 10), Title: "Uncategorized", Unread: count}, ) } o.FavoriteOnly = true count = user.Count(o) if count > 0 || !req.UnreadOnly { cContent = append(cContent, ttRssCat{Id: strconv.FormatInt(TTRSS_CAT_SPECIAL, 10), Title: "Special", Unread: count}, ) } con = cContent case "getHeadlines": if req.FeedId == 0 { errType = "INCORRECT_USAGE" break } limit := req.Limit if limit == 0 { limit = 200 } var articles []content.UserArticle var articleRepo content.ArticleRepo var feedTitle string firstId := data.ArticleId(0) o := data.ArticleQueryOptions{Limit: limit, Offset: req.Skip, UnreadFirst: true, SkipSessionProcessors: true} if req.IsCat { if req.FeedId == TTRSS_CAT_UNCATEGORIZED { ttRssSetupSorting(req, user) articleRepo = user o.UntaggedOnly = true feedTitle = "Uncategorized" } else if req.FeedId > 0 { t := user.TagById(data.TagId(req.FeedId)) ttRssSetupSorting(req, t) articleRepo = t feedTitle = string(t.Data().Value) } } else { if req.FeedId == TTRSS_FAVORITE_ID { ttRssSetupSorting(req, user) o.FavoriteOnly = true articleRepo = user feedTitle = "Starred articles" } else if req.FeedId == TTRSS_FRESH_ID { ttRssSetupSorting(req, user) o.AfterDate = time.Now().Add(TTRSS_FRESH_DURATION) articleRepo = user feedTitle = "Fresh articles" } else if req.FeedId == TTRSS_ALL_ID { ttRssSetupSorting(req, user) articleRepo = user feedTitle = "All articles" } else if req.FeedId > 0 { feed := user.FeedById(req.FeedId) ttRssSetupSorting(req, feed) articleRepo = feed feedTitle = feed.Data().Title } } if req.SinceId > 0 { o.AfterId = req.SinceId } if articleRepo != nil { if req.Search != "" { if controller.sp != nil { if as, ok := articleRepo.(content.ArticleSearch); ok { articles = as.Query(req.Search, controller.sp, limit, req.Skip) } } } else { var skip bool switch req.ViewMode { case "all_articles": case "adaptive": case "unread": o.UnreadOnly = true case "marked": o.FavoriteOnly = true default: skip = true } if !skip { articles = articleRepo.Articles(o) } } } if len(articles) > 0 { firstId = articles[0].Data().Id } headlines := ttRssHeadlinesFromArticles(articles, feedTitle, req.ShowContent, req.ShowExcerpt) if req.IncludeHeader { header := ttRssHeadlinesHeader{Id: req.FeedId, FirstId: firstId, IsCat: req.IsCat} hContent := ttRssHeadlinesHeaderContent{} hContent = append(hContent, header) hContent = append(hContent, headlines) con = hContent } else { con = headlines } case "updateArticle": articles := user.ArticlesById(req.ArticleIds, data.ArticleQueryOptions{SkipSessionProcessors: true}) updateCount := int64(0) switch req.Field { case 0, 2: for _, a := range articles { d := a.Data() updated := false switch req.Field { case 0: switch req.Mode { case 0: if d.Favorite { updated = true d.Favorite = false } case 1: if !d.Favorite { updated = true d.Favorite = true } case 2: updated = true d.Favorite = !d.Favorite } if updated { a.Favorite(d.Favorite) } case 2: switch req.Mode { case 0: if !d.Read { updated = true d.Read = true } case 1: if d.Read { updated = true d.Read = false } case 2: updated = true d.Read = !d.Read } if updated { a.Read(d.Read) } } if updated { if a.HasErr() { err = a.Err() break } updateCount++ } } if err != nil { break } con = ttRssGenericContent{Status: "OK", Updated: updateCount} } case "getArticle": articles := user.ArticlesById(req.ArticleId, data.ArticleQueryOptions{SkipSessionProcessors: true}) feedTitles := map[data.FeedId]string{} for _, a := range articles { d := a.Data() if _, ok := feedTitles[d.FeedId]; !ok { f := repo.FeedById(d.FeedId) feedTitles[d.FeedId] = f.Data().Title } } cContent := ttRssArticlesContent{} for _, a := range articles { d := a.Data() title := feedTitles[d.FeedId] h := ttRssArticle{ Id: strconv.FormatInt(int64(d.Id), 10), Unread: !d.Read, Marked: d.Favorite, Updated: d.Date.Unix(), Title: d.Title, Link: d.Link, FeedId: strconv.FormatInt(int64(d.FeedId), 10), FeedTitle: title, Content: d.Description, } cContent = append(cContent, h) } con = cContent case "getConfig": con = ttRssConfigContent{DaemonIsRunning: true, NumFeeds: len(user.AllFeeds())} case "updateFeed": con = ttRssGenericContent{Status: "OK"} case "catchupFeed": var ar content.ArticleRepo o := data.ArticleUpdateStateOptions{BeforeDate: time.Now()} if req.IsCat { tagId := data.TagId(req.FeedId) ar = user.TagById(tagId) if tagId == TTRSS_CAT_UNCATEGORIZED { o.UntaggedOnly = true } } else { ar = user.FeedById(req.FeedId) } if ar != nil { ar.ReadState(true, o) if e, ok := ar.(content.Error); ok { if e.HasErr() { err = e.Err() break } } con = ttRssGenericContent{Status: "OK"} } case "getPref": switch req.PrefName { case "DEFAULT_UPDATE_INTERVAL": con = ttRssGenericContent{Value: int(config.FeedManager.Converted.UpdateInterval.Minutes())} case "DEFAULT_ARTICLE_LIMIT": con = ttRssGenericContent{Value: 200} case "HIDE_READ_FEEDS": con = ttRssGenericContent{Value: user.Data().ProfileData["unreadOnly"]} case "FEEDS_SORT_BY_UNREAD", "ENABLE_FEED_CATS", "SHOW_CONTENT_PREVIEW": con = ttRssGenericContent{Value: true} case "FRESH_ARTICLE_MAX_AGE": con = ttRssGenericContent{Value: (-1 * TTRSS_FRESH_DURATION).Hours()} } case "getLabels": con = []interface{}{} case "setArticleLabel": con = ttRssGenericContent{Status: "OK", Updated: 0} case "shareToPublished": errType = "Publishing failed" case "subscribeToFeed": f := repo.FeedByLink(req.FeedUrl) for _, u := range f.Users() { if u.Data().Login == user.Data().Login { con = ttRssSubscribeContent{Status: struct { Code int `json:"code"` }{0}} break } } if f.HasErr() { err = f.Err() break } f, err := controller.fm.AddFeedByLink(req.FeedUrl) if err != nil { errType = "INCORRECT_USAGE" break } uf := user.AddFeed(f) if uf.HasErr() { err = uf.Err() break } con = ttRssSubscribeContent{Status: struct { Code int `json:"code"` }{1}} case "unsubscribeFeed": f := user.FeedById(req.FeedId) f.Detach() users := f.Users() if f.HasErr() { err = f.Err() if err == content.ErrNoContent { errType = "FEED_NOT_FOUND" } break } if len(users) == 0 { controller.fm.RemoveFeed(f) } con = ttRssGenericContent{Status: "OK"} case "getFeedTree": items := []ttRssCategory{} special := ttRssCategory{Id: "CAT:-1", Items: []ttRssCategory{}, Name: "Special", Type: "category", BareId: -1} special.Items = append(special.Items, ttRssFeedListCategoryFeed(user, nil, TTRSS_ALL_ID, false)) special.Items = append(special.Items, ttRssFeedListCategoryFeed(user, nil, TTRSS_FRESH_ID, false)) special.Items = append(special.Items, ttRssFeedListCategoryFeed(user, nil, TTRSS_FAVORITE_ID, false)) special.Items = append(special.Items, ttRssFeedListCategoryFeed(user, nil, TTRSS_PUBLISHED_ID, false)) special.Items = append(special.Items, ttRssFeedListCategoryFeed(user, nil, TTRSS_ARCHIVED_ID, false)) special.Items = append(special.Items, ttRssFeedListCategoryFeed(user, nil, TTRSS_RECENTLY_READ_ID, false)) items = append(items, special) tf := user.AllTaggedFeeds() uncat := ttRssCategory{Id: "CAT:0", Items: []ttRssCategory{}, BareId: 0, Name: "Uncategorized", Type: "category"} tagCategories := map[content.Tag]ttRssCategory{} for _, f := range tf { tags := f.Tags() item := ttRssFeedListCategoryFeed(user, f, f.Data().Id, true) if len(tags) > 0 { for _, t := range tags { var c ttRssCategory if cached, ok := tagCategories[t]; ok { c = cached } else { c = ttRssCategory{ Id: "CAT:" + strconv.FormatInt(int64(t.Data().Id), 10), BareId: data.FeedId(t.Data().Id), Name: string(t.Data().Value), Type: "category", Items: []ttRssCategory{}, } } c.Items = append(c.Items, item) tagCategories[t] = c } } else { uncat.Items = append(uncat.Items, item) } } categories := []ttRssCategory{uncat} for _, c := range tagCategories { categories = append(categories, c) } for _, c := range categories { if len(c.Items) == 1 { c.Param = "(1 feed)" } else { c.Param = fmt.Sprintf("(%d feed)", len(c.Items)) } items = append(items, c) } fl := ttRssCategory{Identifier: "id", Label: "name"} fl.Items = items if user.HasErr() { err = user.Err() } else { con = ttRssFeedTreeContent{Categories: fl} } default: errType = "UNKNOWN_METHOD" con = ttRssGenericContent{Method: req.Op} } } if err == nil && errType == "" { resp.Status = TTRSS_API_STATUS_OK } else { logger.Infof("Error processing TT-RSS API request: %s %v\n", errType, err) resp.Status = TTRSS_API_STATUS_ERR con = ttRssErrorContent{Error: errType} } var b []byte b, err = json.Marshal(con) if err == nil { resp.Content = json.RawMessage(b) } b, err = json.Marshal(&resp) if err == nil { w.Header().Set("Content-Type", "text/json") w.Header().Set("Api-Content-Length", strconv.Itoa(len(b))) w.Write(b) logger.Debugf("Output for %s: %s\n", req.Op, string(b)) } else { logger.Print(fmt.Errorf("TT-RSS error %s: %v", req.Op, err)) w.WriteHeader(http.StatusInternalServerError) } }) }
func ttRssConvertRequest(in map[string]interface{}) (req ttRssRequest) { for key, v := range in { switch key { case "op": req.Op = ttRssParseString(v) case "sid": req.Sid = ttRssParseString(v) case "seq": req.Seq = ttRssParseInt(v) case "user": req.User = ttRssParseString(v) case "password": req.Password = ttRssParseString(v) case "output_mode": req.OutputMode = ttRssParseString(v) case "unread_only": req.UnreadOnly = ttRssParseBool(v) case "include_empty": req.IncludeEmpty = ttRssParseBool(v) case "limit": req.Limit = ttRssParseInt(v) case "offset": req.Offset = ttRssParseInt(v) case "cat_id": req.CatId = data.TagId(ttRssParseInt64(v)) case "feed_id": req.FeedId = data.FeedId(ttRssParseInt64(v)) case "skip": req.Skip = ttRssParseInt(v) case "is_cat": req.IsCat = ttRssParseBool(v) case "show_content": req.ShowContent = ttRssParseBool(v) case "show_excerpt": req.ShowExcerpt = ttRssParseBool(v) case "view_mode": req.ViewMode = ttRssParseString(v) case "since_id": req.SinceId = data.ArticleId(ttRssParseInt64(v)) case "sanitize": req.Sanitize = ttRssParseBool(v) case "has_sandbox": req.HasSandbox = ttRssParseBool(v) case "include_header": req.IncludeHeader = ttRssParseBool(v) case "order_by": req.OrderBy = ttRssParseString(v) case "search": req.Search = ttRssParseString(v) case "article_ids": req.ArticleIds = ttRssParseArticleIds(v) case "mode": req.Mode = ttRssParseInt(v) case "field": req.Field = ttRssParseInt(v) case "data": req.Data = ttRssParseString(v) case "article_id": req.ArticleId = ttRssParseArticleIds(v) case "pref_name": req.PrefName = ttRssParseString(v) case "feed_url": req.FeedUrl = ttRssParseString(v) } } return }
func getFeedArticles(user content.User, id string, limit int, offset int, newerFirst bool, unreadOnly bool) (resp responseError) { resp = newResponse() if limit > 50 { limit = 50 } user.SortingByDate() if newerFirst { user.Order(data.DescendingOrder) } else { user.Order(data.AscendingOrder) } if id == "favorite" { resp.val["Articles"], resp.err = user.FavoriteArticles(limit, offset), user.Err() } else if id == "popular:all" { resp.val["Articles"], resp.err = user.ScoredArticles(time.Now().AddDate(0, 0, -5), time.Now(), limit, offset), user.Err() } else if id == "all" { if unreadOnly { resp.val["Articles"], resp.err = user.UnreadArticles(limit, offset), user.Err() } else { resp.val["Articles"], resp.err = user.Articles(limit, offset), user.Err() } } else if strings.HasPrefix(id, "popular:") { if strings.HasPrefix(id, "popular:tag:") { tag := user.Repo().Tag(user) tag.Value(data.TagValue(id[12:])) tag.SortingByDate() if newerFirst { tag.Order(data.DescendingOrder) } else { tag.Order(data.AscendingOrder) } resp.val["Articles"], resp.err = tag.ScoredArticles(time.Now().AddDate(0, 0, -5), time.Now(), limit, offset), tag.Err() } else { var f content.UserFeed var feedId int64 feedId, resp.err = strconv.ParseInt(id[8:], 10, 64) if resp.err != nil { resp.err = errors.New("Unknown feed id " + id) return } if f = user.FeedById(data.FeedId(feedId)); f.HasErr() { /* TODO: non-fatal error */ resp.err = f.Err() return } f.SortingByDate() if newerFirst { f.Order(data.DescendingOrder) } else { f.Order(data.AscendingOrder) } resp.val["Articles"], resp.err = f.ScoredArticles(time.Now().AddDate(0, 0, -5), time.Now(), limit, offset), f.Err() } } else if strings.HasPrefix(id, "tag:") { tag := user.Repo().Tag(user) tag.Value(data.TagValue(id[4:])) tag.SortingByDate() if newerFirst { tag.Order(data.DescendingOrder) } else { tag.Order(data.AscendingOrder) } if unreadOnly { resp.val["Articles"], resp.err = tag.UnreadArticles(limit, offset), tag.Err() } else { resp.val["Articles"], resp.err = tag.Articles(limit, offset), tag.Err() } } else { var f content.UserFeed var feedId int64 feedId, resp.err = strconv.ParseInt(id, 10, 64) if resp.err != nil { resp.err = errors.New("Unknown feed id " + id) return } if f = user.FeedById(data.FeedId(feedId)); f.HasErr() { /* TODO: non-fatal error */ resp.err = f.Err() return } if newerFirst { f.Order(data.DescendingOrder) } else { f.Order(data.AscendingOrder) } f.SortingByDate() if unreadOnly { resp.val["Articles"], resp.err = f.UnreadArticles(limit, offset), f.Err() } else { resp.val["Articles"], resp.err = f.Articles(limit, offset), f.Err() } } return }
func (con Feed) Handler(c context.Context) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { action := webfw.GetMultiPatternIdentifier(c, r) user := readeef.GetUser(c, r) r.ParseForm() var resp responseError var feedId int64 params := webfw.GetParams(c, r) if resp.err == nil { switch action { case "list": resp = listFeeds(user) case "discover": link := r.FormValue("url") resp = discoverFeeds(user, con.fm, link) case "opml": buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) buf.ReadFrom(r.Body) resp = parseOpml(user, con.fm, buf.Bytes()) case "add": links := r.Form["url"] resp = addFeed(user, con.fm, links) case "remove": if feedId, resp.err = strconv.ParseInt(params["feed-id"], 10, 64); resp.err == nil { resp = removeFeed(user, con.fm, data.FeedId(feedId)) } case "tags": if feedId, resp.err = strconv.ParseInt(params["feed-id"], 10, 64); resp.err == nil { if r.Method == "GET" { resp = getFeedTags(user, data.FeedId(feedId)) } else if r.Method == "POST" { decoder := json.NewDecoder(r.Body) tags := []data.TagValue{} if resp.err = decoder.Decode(&tags); resp.err != nil && resp.err != io.EOF { break } resp.err = nil resp = setFeedTags(user, data.FeedId(feedId), tags) } } case "read": var timestamp int64 if timestamp, resp.err = strconv.ParseInt(params["timestamp"], 10, 64); resp.err == nil { resp = markFeedAsRead(user, params["feed-id"], timestamp) } case "articles": var limit, offset int if limit, resp.err = strconv.Atoi(params["limit"]); resp.err == nil { if offset, resp.err = strconv.Atoi(params["offset"]); resp.err == nil { resp = getFeedArticles(user, params["feed-id"], limit, offset, params["newer-first"] == "true", params["unread-only"] == "true") } } } } switch resp.err { case readeef.ErrNoAbsolute: resp.val["Error"] = true resp.val["ErrorType"] = errTypeNoAbsolute resp.err = nil case readeef.ErrNoFeed: resp.val["Error"] = true resp.val["ErrorType"] = errTypeNoFeed resp.err = nil } var b []byte if resp.err == nil { b, resp.err = json.Marshal(resp.val) } if resp.err == nil { w.Write(b) } else { webfw.GetLogger(c).Print(resp.err) w.WriteHeader(http.StatusInternalServerError) } }) }
func (con Fever) Handler(c context.Context) http.Handler { repo := readeef.GetRepo(c) return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { logger := webfw.GetLogger(c) var err error var user content.User err = r.ParseForm() if err == nil { user = getReadeefUser(repo, r.FormValue("api_key"), webfw.GetLogger(c)) } resp := map[string]interface{}{"api_version": FEVER_API_VERSION} var reqType string switch { default: if user == nil { resp["auth"] = 0 break } now := time.Now().Unix() resp["auth"] = 1 resp["last_refreshed_on_time"] = now if _, ok := r.Form["groups"]; ok { reqType = "groups" logger.Infoln("Fetching fever groups") resp["groups"], resp["feeds_groups"], err = getGroups(user) } if _, ok := r.Form["feeds"]; ok { reqType = "feeds" logger.Infoln("Fetching fever feeds") var feverFeeds []feverFeed feeds := user.AllFeeds() err = user.Err() if err != nil { break } for i := range feeds { in := feeds[i].Data() feed := feverFeed{ Id: in.Id, Title: in.Title, Url: in.Link, SiteUrl: in.SiteLink, UpdateTime: now, } feverFeeds = append(feverFeeds, feed) } resp["feeds"] = feverFeeds _, resp["feeds_groups"], err = getGroups(user) } if _, ok := r.Form["unread_item_ids"]; ok { reqType = "unread item ids" logger.Infoln("Fetching unread fever item ids") ids := user.Ids(data.ArticleIdQueryOptions{UnreadOnly: true}) err = user.Err() if err != nil { break } buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) for i := range ids { if i != 0 { buf.WriteString(",") } buf.WriteString(strconv.FormatInt(int64(ids[i]), 10)) } resp["unread_item_ids"] = buf.String() } if _, ok := r.Form["saved_item_ids"]; ok { reqType = "saved item ids" logger.Infoln("Fetching saved fever item ids") ids := user.Ids(data.ArticleIdQueryOptions{FavoriteOnly: true}) err = user.Err() if err != nil { break } buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) for i := range ids { if i != 0 { buf.WriteString(",") } buf.WriteString(strconv.FormatInt(int64(ids[i]), 10)) } resp["saved_item_ids"] = buf.String() } if _, ok := r.Form["items"]; ok { reqType = "items" logger.Infoln("Fetching fever items") var count, since, max int64 count, err = user.Count(), user.Err() if err != nil { err = fmt.Errorf("Error getting user article count: %v", err) break } items := []feverItem{} if count > 0 { if val, ok := r.Form["since_id"]; ok { since, err = strconv.ParseInt(val[0], 10, 64) if err != nil { err = nil since = 0 } } if val, ok := r.Form["max_id"]; ok { max, err = strconv.ParseInt(val[0], 10, 64) if err != nil { err = nil since = 0 } } var articles []content.UserArticle // Fever clients do their own paging o := data.ArticleQueryOptions{Limit: 50, Offset: 0, SkipSessionProcessors: true} if withIds, ok := r.Form["with_ids"]; ok { stringIds := strings.Split(withIds[0], ",") ids := make([]data.ArticleId, 0, len(stringIds)) for _, stringId := range stringIds { stringId = strings.TrimSpace(stringId) if id, err := strconv.ParseInt(stringId, 10, 64); err == nil { ids = append(ids, data.ArticleId(id)) } } articles, err = user.ArticlesById(ids, data.ArticleQueryOptions{SkipSessionProcessors: true}), user.Err() } else if max > 0 { user.Order(data.DescendingOrder) o.BeforeId = data.ArticleId(max) articles, err = user.Articles(o), user.Err() } else { user.Order(data.AscendingOrder) o.AfterId = data.ArticleId(since) articles, err = user.Articles(o), user.Err() } if err != nil { break } for i := range articles { in := articles[i].Data() item := feverItem{ Id: in.Id, FeedId: in.FeedId, Title: in.Title, Html: in.Description, Url: in.Link, CreatedOnTime: in.Date.Unix(), } if in.Read { item.IsRead = 1 } if in.Favorite { item.IsSaved = 1 } items = append(items, item) } } resp["total_items"] = count resp["items"] = items } if _, ok := r.Form["links"]; ok { reqType = "links" logger.Infoln("Fetching fever links") offset, _ := strconv.ParseInt(r.FormValue("offset"), 10, 64) rng, e := strconv.ParseInt(r.FormValue("range"), 10, 64) if e != nil { rng = 7 } page := int64(1) page, err = strconv.ParseInt(r.FormValue("page"), 10, 64) if e != nil { break } if page > 3 { resp["links"] = []feverLink{} break } var articles []content.UserArticle var from, to time.Time if offset == 0 { from = time.Now().AddDate(0, 0, int(-1*rng)) to = time.Now() } else { from = time.Now().AddDate(0, 0, int(-1*rng-offset)) to = time.Now().AddDate(0, 0, int(-1*offset)) } user.SortingByDate() user.Order(data.DescendingOrder) articles, err = user.Articles(data.ArticleQueryOptions{ BeforeDate: to, AfterDate: from, Limit: 50, Offset: 50 * int(page-1), IncludeScores: true, }), user.Err() if err != nil { break } links := make([]feverLink, len(articles)) for i := range articles { in := articles[i].Data() link := feverLink{ Id: in.Id, FeedId: in.FeedId, ItemId: in.Id, IsItem: 1, IsLocal: 1, Title: in.Title, Url: in.Link, ItemIds: fmt.Sprintf("%d", in.Id), } if in.Score == 0 { link.Temperature = 0 } else { link.Temperature = math.Log10(float64(in.Score)) / math.Log10(1.1) } if in.Favorite { link.IsSaved = 1 } links[i] = link } resp["links"] = links } if val := r.PostFormValue("unread_recently_read"); val == "1" { reqType = "unread and recently read" logger.Infoln("Marking recently read fever items as unread") t := time.Now().Add(-24 * time.Hour) user.ReadState(false, data.ArticleUpdateStateOptions{ BeforeDate: time.Now(), AfterDate: t, }) err = user.Err() if err != nil { break } } if val := r.PostFormValue("mark"); val != "" { if val == "item" { logger.Infof("Marking fever item '%s' as '%s'\n", r.PostFormValue("id"), r.PostFormValue("as")) var id int64 var article content.UserArticle id, err = strconv.ParseInt(r.PostFormValue("id"), 10, 64) if err != nil { break } article, err = user.ArticleById(data.ArticleId(id), data.ArticleQueryOptions{SkipSessionProcessors: true}), user.Err() if err != nil { break } switch r.PostFormValue("as") { case "read": article.Read(true) case "saved": article.Favorite(true) case "unsaved": article.Favorite(false) default: err = errors.New("Unknown 'as' action") } if err == nil { err = article.Err() } } else if val == "feed" || val == "group" { logger.Infof("Marking fever %s '%s' as '%s'\n", val, r.PostFormValue("id"), r.PostFormValue("as")) if r.PostFormValue("as") != "read" { err = errors.New("Unknown 'as' action") break } var id, timestamp int64 id, err = strconv.ParseInt(r.PostFormValue("id"), 10, 64) if err != nil { break } timestamp, err = strconv.ParseInt(r.PostFormValue("before"), 10, 64) if err != nil { break } t := time.Unix(timestamp, 0) if val == "feed" { var feed content.UserFeed feed, err = user.FeedById(data.FeedId(id)), feed.Err() if err != nil { break } feed.ReadState(true, data.ArticleUpdateStateOptions{ BeforeDate: t, }) err = feed.Err() } else if val == "group" { if id == 1 || id == 0 { user.ReadState(true, data.ArticleUpdateStateOptions{ BeforeDate: t, }) err = user.Err() } else { err = errors.New(fmt.Sprintf("Unknown group %d\n", id)) } } } } } var b []byte if err == nil { b, err = json.Marshal(resp) } if err == nil { w.Write(b) } else { if reqType == "" { reqType = "modifying fever data" } else { reqType = "getting " + reqType + " for fever" } webfw.GetLogger(c).Print(fmt.Errorf("Error %s: %v", reqType, err)) w.WriteHeader(http.StatusInternalServerError) } }) }