func TestArticle(t *testing.T) { a := Article{} a.data.Title = "Title" a.data.Id = data.ArticleId(1) tests.CheckString(t, "Title (1)", a.String()) d := a.Data() tests.CheckString(t, "Title", d.Title) d = a.Data(data.Article{Title: "New title", Description: "Desc"}) tests.CheckString(t, "New title", d.Title) tests.CheckString(t, "Desc", d.Description) tests.CheckBool(t, false, a.Validate() == nil) d.Link = "http://sugr.org/en/" d.FeedId = 42 a.Data(d) tests.CheckBool(t, true, a.Validate() == nil) ejson, eerr := json.Marshal(d) tests.CheckBool(t, true, eerr == nil) ajson, aerr := json.Marshal(a) tests.CheckBool(t, true, aerr == nil) tests.CheckBytes(t, ejson, ajson) }
func TestArticleScores(t *testing.T) { a := ArticleScores{} a.data.Score1 = 1 a.data.Score2 = 2 a.data.Score3 = 3 a.data.Score4 = 4 a.data.Score5 = 5 d := a.Data() tests.CheckInt64(t, 2, a.data.Score2) d.Score2 = 10 tests.CheckInt64(t, 2, a.data.Score2) d = a.Data(d) tests.CheckInt64(t, 10, d.Score2) tests.CheckBool(t, false, a.Validate() == nil) d.ArticleId = data.ArticleId(1) a.Data(d) tests.CheckBool(t, true, a.Validate() == nil) tests.CheckString(t, "Scores for article '1'", a.String()) }
func ttRssParseArticleIds(vv interface{}) (ids []data.ArticleId) { switch v := vv.(type) { case string: parts := strings.Split(v, ",") for _, p := range parts { if i, err := strconv.ParseInt(strings.TrimSpace(p), 10, 64); err == nil { ids = append(ids, data.ArticleId(i)) } } case []float64: for _, p := range v { ids = append(ids, data.ArticleId(int64(p))) } case float64: ids = append(ids, data.ArticleId(int64(v))) } return }
func updateArticle(a content.Article, tx *sqlx.Tx, db *db.DB, logger webfw.Logger) { if a.HasErr() { return } if err := a.Validate(); err != nil { a.Err(err) return } logger.Infof("Updating article %s\n", a) d := a.Data() s := db.SQL() stmt, err := tx.Preparex(s.Article.Update) if err != nil { a.Err(err) return } defer stmt.Close() res, err := stmt.Exec(d.Title, d.Description, d.Date, d.Guid, d.Link, d.FeedId) if err != nil { a.Err(err) return } if num, err := res.RowsAffected(); err != nil && err == sql.ErrNoRows || num == 0 { logger.Infof("Creating article %s\n", a) aId, err := db.CreateWithId(tx, s.Article.Create, d.FeedId, d.Link, d.Guid, d.Title, d.Description, d.Date) if err != nil { a.Err(fmt.Errorf("Error updating article %s (guid - %v, link - %s): %v", a, d.Guid, d.Link, err)) return } d.Id = data.ArticleId(aId) d.IsNew = true a.Data(d) } }
func (con Article) Handler(c context.Context) http.Handler { logger := webfw.GetLogger(c) return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { user := readeef.GetUser(c, r) params := webfw.GetParams(c, r) action := webfw.GetMultiPatternIdentifier(c, r) logger.Infof("Invoking Article controller with action '%s', article id '%s'\n", action, params["article-id"]) var articleId int64 var resp responseError articleId, resp.err = strconv.ParseInt(params["article-id"], 10, 64) if resp.err == nil { id := data.ArticleId(articleId) switch action { case "fetch": resp = fetchArticle(user, id) case "read": resp = articleReadState(user, id, params["value"] == "true") case "favorite": resp = articleFavoriteState(user, id, params["value"] == "true") case "format": resp = formatArticle(user, id, con.extractor, webfw.GetConfig(c), con.config) } } var b []byte if resp.err == nil { b, resp.err = json.Marshal(resp.val) } if resp.err == nil { w.Write(b) } else { webfw.GetLogger(c).Print(resp.err) w.WriteHeader(http.StatusInternalServerError) } }) }
func query(term, highlight string, index bleve.Index, u content.User, feedIds []data.FeedId, paging ...int) (ua []content.UserArticle, err error) { var query bleve.Query query = bleve.NewQueryStringQuery(term) if len(feedIds) > 0 { queries := make([]bleve.Query, len(feedIds)) conjunct := make([]bleve.Query, 2) for i, id := range feedIds { q := bleve.NewTermQuery(strconv.FormatInt(int64(id), 10)) q.SetField("FeedId") queries[i] = q } disjunct := bleve.NewDisjunctionQuery(queries) conjunct[0] = query conjunct[1] = disjunct query = bleve.NewConjunctionQuery(conjunct) } searchRequest := bleve.NewSearchRequest(query) if highlight != "" { searchRequest.Highlight = bleve.NewHighlightWithStyle(highlight) } limit, offset := pagingLimit(paging) searchRequest.Size = limit searchRequest.From = offset searchResult, err := index.Search(searchRequest) if err != nil { return } if len(searchResult.Hits) == 0 { return } articleIds := []data.ArticleId{} hitMap := map[data.ArticleId]*search.DocumentMatch{} for _, hit := range searchResult.Hits { if articleId, err := strconv.ParseInt(hit.ID, 10, 64); err == nil { id := data.ArticleId(articleId) articleIds = append(articleIds, id) hitMap[id] = hit } } ua = u.ArticlesById(articleIds) if u.HasErr() { return ua, u.Err() } for i := range ua { data := ua[i].Data() hit := hitMap[data.Id] if len(hit.Fragments) > 0 { data.Hit.Fragments = hit.Fragments ua[i].Data(data) } } return }
func (con Feed) Handler(c context.Context) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { action := webfw.GetMultiPatternIdentifier(c, r) user := readeef.GetUser(c, r) r.ParseForm() var resp responseError var feedId int64 params := webfw.GetParams(c, r) switch action { case "list": resp = listFeeds(user) case "discover": link := r.FormValue("url") resp = discoverFeeds(user, con.fm, link) case "opml-export": resp = exportOpml(user) case "opml": buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) buf.ReadFrom(r.Body) resp = parseOpml(user, con.fm, buf.Bytes()) case "add": links := r.Form["url"] resp = addFeeds(user, con.fm, links) case "remove": if feedId, resp.err = strconv.ParseInt(params["feed-id"], 10, 64); resp.err == nil { resp = removeFeed(user, con.fm, data.FeedId(feedId)) } case "tags": if feedId, resp.err = strconv.ParseInt(params["feed-id"], 10, 64); resp.err == nil { if r.Method == "GET" { resp = getFeedTags(user, data.FeedId(feedId)) } else if r.Method == "POST" { if b, err := ioutil.ReadAll(r.Body); err == nil { tags := []data.TagValue{} if err = json.Unmarshal(b, &tags); err != nil { resp.err = fmt.Errorf("Error decoding request body: %s", err) break } resp = setFeedTags(user, data.FeedId(feedId), tags) } else { resp.err = fmt.Errorf("Error reading request body: %s", err) break } } } case "read": var timestamp, beforeId int64 if bid, ok := params["before-id"]; ok { beforeId, resp.err = strconv.ParseInt(bid, 10, 64) } else { timestamp, resp.err = strconv.ParseInt(params["timestamp"], 10, 64) } if resp.err == nil { resp = readState(user, params["feed-id"], data.ArticleId(beforeId), timestamp) } case "articles": var limit, offset int if limit, resp.err = strconv.Atoi(params["limit"]); resp.err == nil { if offset, resp.err = strconv.Atoi(params["offset"]); resp.err == nil { minId, _ := strconv.ParseInt(params["min-id"], 10, 64) maxId, _ := strconv.ParseInt(params["max-id"], 10, 64) resp = getFeedArticles(user, con.sp, params["feed-id"], data.ArticleId(minId), data.ArticleId(maxId), limit, offset, params["older-first"] == "true", params["unread-only"] == "true") } } } switch resp.err { case readeef.ErrNoAbsolute: resp.val["Error"] = true resp.val["ErrorType"] = errTypeNoAbsolute resp.err = nil case readeef.ErrNoFeed: resp.val["Error"] = true resp.val["ErrorType"] = errTypeNoFeed resp.err = nil } var b []byte if resp.err == nil { b, resp.err = json.Marshal(resp.val) } if resp.err == nil { w.Write(b) } else { webfw.GetLogger(c).Print(resp.err) w.WriteHeader(http.StatusInternalServerError) } }) }
func TestUserFeed(t *testing.T) { uf := repo.UserFeed(createUser(data.User{})) uf.Data(data.Feed{Link: "http://sugr.org"}) tests.CheckBool(t, false, uf.Validate() == nil) u := createUser(data.User{Login: "******"}) uf = repo.UserFeed(u) uf.Data(data.Feed{Link: "http://sugr.org", Title: "User feed 1"}) tests.CheckBool(t, true, uf.Validate() == nil, uf.Validate()) uf.Update() tests.CheckBool(t, false, uf.HasErr(), uf.Err()) u.AddFeed(uf) id := uf.Data().Id uf2 := u.FeedById(id) tests.CheckBool(t, false, uf2.HasErr(), uf2.Err()) tests.CheckString(t, uf.Data().Title, uf2.Data().Title) now := time.Now() uf.AddArticles([]content.Article{ createArticle(data.Article{Title: "article1", Date: now, Link: "http://sugr.org/en/products/gearshift"}), createArticle(data.Article{Title: "article2", Date: now.Add(2 * time.Hour), Link: "http://sugr.org/en/products/readeef"}), createArticle(data.Article{Title: "article3", Date: now.Add(-3 * time.Hour), Link: "http://sugr.org/en/about/us"}), }) tests.CheckBool(t, false, uf.HasErr(), uf.Err()) uf.SortingById() ua := uf.Articles() tests.CheckBool(t, false, uf.HasErr(), uf.Err()) tests.CheckInt64(t, 3, int64(len(ua))) var id1, id2, id3 data.ArticleId for i := range ua { d := ua[i].Data() switch d.Title { case "article1": id1 = d.Id case "article2": id2 = d.Id case "article3": id3 = d.Id default: tests.CheckBool(t, true, false, "Unknown article") } } tests.CheckInt64(t, int64(id1), int64(ua[0].Data().Id)) tests.CheckString(t, "article2", ua[1].Data().Title) tests.CheckInt64(t, now.Add(-3*time.Hour).Unix(), ua[2].Data().Date.Unix()) uf.SortingByDate() ua = uf.Articles() tests.CheckInt64(t, int64(id3), int64(ua[0].Data().Id)) tests.CheckString(t, "article1", ua[1].Data().Title) tests.CheckInt64(t, now.Add(2*time.Hour).Unix(), ua[2].Data().Date.Unix()) uf.Reverse() ua = uf.Articles() tests.CheckInt64(t, int64(id2), int64(ua[0].Data().Id)) tests.CheckString(t, "article1", ua[1].Data().Title) tests.CheckInt64(t, now.Add(-3*time.Hour).Unix(), ua[2].Data().Date.Unix()) ua[0].Read(true) uf.Reverse() uf.SortingById() ua = uf.UnreadArticles() tests.CheckBool(t, false, uf.HasErr(), uf.Err()) tests.CheckInt64(t, 2, int64(len(ua))) tests.CheckInt64(t, int64(id1), int64(ua[0].Data().Id)) tests.CheckString(t, "article3", ua[1].Data().Title) u.ArticleById(data.ArticleId(id2)).Read(false) ua = uf.UnreadArticles() tests.CheckInt64(t, 3, int64(len(ua))) uf.ReadBefore(now.Add(time.Minute), true) tests.CheckBool(t, false, uf.HasErr(), uf.Err()) ua = uf.UnreadArticles() tests.CheckBool(t, false, uf.HasErr(), uf.Err()) tests.CheckInt64(t, 1, int64(len(ua))) tests.CheckInt64(t, int64(id2), int64(ua[0].Data().Id)) asc1 := createArticleScores(data.ArticleScores{ArticleId: id1, Score1: 2, Score2: 2}) tests.CheckBool(t, false, asc1.HasErr(), asc1.Err()) asc2 := createArticleScores(data.ArticleScores{ArticleId: id2, Score1: 1, Score2: 3}) tests.CheckBool(t, false, asc2.HasErr(), asc2.Err()) sa := uf.ScoredArticles(now.Add(-20*time.Hour), now.Add(20*time.Hour)) tests.CheckBool(t, false, uf.HasErr(), uf.Err()) tests.CheckInt64(t, 2, int64(len(sa))) for i := range sa { switch sa[i].Data().Id { case id1: tests.CheckInt64(t, asc1.Calculate(), sa[i].Data().Score) case id2: tests.CheckInt64(t, asc2.Calculate(), sa[i].Data().Score) } } uf.Detach() tests.CheckInt64(t, 0, int64(len(u.AllFeeds()))) uf2 = u.FeedById(id) tests.CheckBool(t, true, uf2.Err() == content.ErrNoContent) }
func (e Elastic) Search( term string, u content.User, feedIds []data.FeedId, limit, offset int, ) (ua []content.UserArticle, err error) { search := e.client.Search().Index(elasticIndexName) var query elastic.Query if t, err := url.QueryUnescape(term); err == nil { term = t } query = elastic.NewCommonTermsQuery("_all", term) if len(feedIds) > 0 { idFilter := elastic.NewBoolQuery() for _, id := range feedIds { idFilter = idFilter.Should(elastic.NewTermQuery("feed_id", int64(id))) } query = elastic.NewBoolQuery().Must(query).Filter(idFilter) } search.Query(query) search.Highlight(elastic.NewHighlight().PreTags("<mark>").PostTags("</mark>").Field("title").Field("description")) search.From(offset).Size(limit) switch e.Field() { case data.SortByDate: search.Sort("date", e.Order() == data.AscendingOrder) case data.SortById, data.DefaultSort: search.Sort("article_id", e.Order() == data.AscendingOrder) } var res *elastic.SearchResult res, err = search.Do() if err != nil { return } if res.TotalHits() == 0 { return } articleIds := []data.ArticleId{} highlightMap := map[data.ArticleId]elastic.SearchHitHighlight{} if res.Hits != nil && res.Hits.Hits != nil { for _, hit := range res.Hits.Hits { a := indexArticle{} if err := json.Unmarshal(*hit.Source, &a); err == nil { if id, err := strconv.ParseInt(a.ArticleId, 10, 64); err == nil { articleId := data.ArticleId(id) articleIds = append(articleIds, articleId) highlightMap[articleId] = hit.Highlight } } } } ua = u.ArticlesById(articleIds) if u.HasErr() { return ua, u.Err() } for i := range ua { data := ua[i].Data() if highlight, ok := highlightMap[data.Id]; ok { data.Hit.Fragments = map[string][]string{} if len(highlight["title"]) > 0 { data.Hit.Fragments["Title"] = highlight["title"] } if len(highlight["description"]) > 0 { data.Hit.Fragments["Description"] = highlight["description"] } ua[i].Data(data) } } return }
func (controller TtRss) Handler(c context.Context) http.Handler { repo := readeef.GetRepo(c) logger := webfw.GetLogger(c) config := readeef.GetConfig(c) return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { action := webfw.GetMultiPatternIdentifier(c, r) if action == "redirecter" { http.Redirect(w, r, "/", http.StatusMovedPermanently) } req := ttRssRequest{} resp := ttRssResponse{} var err error var errType string var user content.User var con interface{} switch { default: var b []byte in := map[string]interface{}{} if b, err = ioutil.ReadAll(r.Body); err != nil { err = fmt.Errorf("reading request body: %s", err) break } if err = json.Unmarshal(b, &in); err != nil { err = fmt.Errorf("decoding JSON request: %s", err) break } req = ttRssConvertRequest(in) logger.Debugf("Request: %#v\n", req) resp.Seq = req.Seq if req.Op != "login" && req.Op != "isLoggedIn" { if sess, ok := ttRssSessions[req.Sid]; ok { user = repo.UserByLogin(data.Login(sess.login)) if repo.Err() != nil { errType = "NOT_LOGGED_IN" } else { sess.lastVisit = time.Now() ttRssSessions[req.Sid] = sess } } else { errType = "NOT_LOGGED_IN" } } if errType != "" { logger.Debugf("TT-RSS Sessions: %#v\n", ttRssSessions) break } logger.Debugf("TT-RSS OP: %s\n", req.Op) switch req.Op { case "getApiLevel": con = ttRssGenericContent{Level: TTRSS_API_LEVEL} case "getVersion": con = ttRssGenericContent{Version: TTRSS_VERSION} case "login": user = repo.UserByLogin(data.Login(req.User)) if repo.Err() != nil { errType = "LOGIN_ERROR" err = fmt.Errorf("getting TT-RSS user: %s", repo.Err()) break } if !user.Authenticate(req.Password, []byte(config.Auth.Secret)) { errType = "LOGIN_ERROR" err = fmt.Errorf("authentication for TT-RSS user '%s'", user.Data().Login) break } var sessId string login := user.Data().Login for id, sess := range ttRssSessions { if sess.login == login { sessId = id } } if sessId == "" { sessId = strings.Replace(util.UUID(), "-", "", -1) ttRssSessions[sessId] = ttRssSession{login: login, lastVisit: time.Now()} } con = ttRssGenericContent{ ApiLevel: TTRSS_API_LEVEL, SessionId: sessId, } case "logout": delete(ttRssSessions, req.Sid) con = ttRssGenericContent{Status: "OK"} case "isLoggedIn": if _, ok := ttRssSessions[req.Sid]; ok { con = ttRssGenericContent{Status: true} } else { con = ttRssGenericContent{Status: false} } case "getUnread": var ar content.ArticleRepo o := data.ArticleCountOptions{UnreadOnly: true} if req.IsCat { tagId := data.TagId(req.FeedId) if tagId > 0 { ar = user.TagById(tagId) } else if tagId == TTRSS_CAT_UNCATEGORIZED { ar = user o.UntaggedOnly = true } else if tagId == TTRSS_CAT_SPECIAL { ar = user o.FavoriteOnly = true } } else { switch req.FeedId { case TTRSS_FAVORITE_ID: ar = user o.FavoriteOnly = true case TTRSS_FRESH_ID: ar = user o.AfterDate = time.Now().Add(TTRSS_FRESH_DURATION) case TTRSS_ALL_ID, 0: ar = user default: if req.FeedId > 0 { feed := user.FeedById(req.FeedId) if feed.HasErr() { err = feed.Err() break } ar = feed } } } if ar == nil { con = ttRssGenericContent{Unread: "0"} } else if con == nil { con = ttRssGenericContent{Unread: strconv.FormatInt(ar.Count(o), 10)} } case "getCounters": if req.OutputMode == "" { req.OutputMode = "flc" } cContent := ttRssCountersContent{} o := data.ArticleCountOptions{UnreadOnly: true} unreadCount := user.Count(o) cContent = append(cContent, ttRssCounter{Id: "global-unread", Counter: unreadCount}) feeds := user.AllFeeds() cContent = append(cContent, ttRssCounter{Id: "subscribed-feeds", Counter: int64(len(feeds))}) cContent = append(cContent, ttRssCounter{Id: TTRSS_ARCHIVED_ID}) cContent = append(cContent, ttRssCounter{Id: TTRSS_FAVORITE_ID, Counter: user.Count(data.ArticleCountOptions{UnreadOnly: true, FavoriteOnly: true}), AuxCounter: user.Count(data.ArticleCountOptions{FavoriteOnly: true})}) cContent = append(cContent, ttRssCounter{Id: TTRSS_PUBLISHED_ID}) freshTime := time.Now().Add(TTRSS_FRESH_DURATION) cContent = append(cContent, ttRssCounter{Id: TTRSS_FRESH_ID, Counter: user.Count(data.ArticleCountOptions{UnreadOnly: true, AfterDate: freshTime}), AuxCounter: 0}) cContent = append(cContent, ttRssCounter{Id: TTRSS_ALL_ID, Counter: user.Count(), AuxCounter: 0}) for _, f := range feeds { cContent = append(cContent, ttRssCounter{Id: int64(f.Data().Id), Counter: f.Count(o)}, ) } cContent = append(cContent, ttRssCounter{Id: TTRSS_CAT_LABELS, Counter: 0, Kind: "cat"}) for _, t := range user.Tags() { cContent = append(cContent, ttRssCounter{ Id: int64(t.Data().Id), Counter: t.Count(o), Kind: "cat", }, ) } cContent = append(cContent, ttRssCounter{ Id: TTRSS_CAT_UNCATEGORIZED, Counter: user.Count(data.ArticleCountOptions{UnreadOnly: true, UntaggedOnly: true}), Kind: "cat", }, ) if user.HasErr() { err = fmt.Errorf("Error getting user counters: %v\n", user.Err()) } con = cContent case "getFeeds": fContent := ttRssFeedsContent{} if req.CatId == TTRSS_CAT_ALL || req.CatId == TTRSS_CAT_SPECIAL { unreadFav := user.Count(data.ArticleCountOptions{UnreadOnly: true, FavoriteOnly: true}) if unreadFav > 0 || !req.UnreadOnly { fContent = append(fContent, ttRssFeed{ Id: TTRSS_FAVORITE_ID, Title: ttRssSpecialTitle(TTRSS_FAVORITE_ID), Unread: unreadFav, CatId: TTRSS_FAVORITE_ID, }) } freshTime := time.Now().Add(TTRSS_FRESH_DURATION) unreadFresh := user.Count(data.ArticleCountOptions{UnreadOnly: true, AfterDate: freshTime}) if unreadFresh > 0 || !req.UnreadOnly { fContent = append(fContent, ttRssFeed{ Id: TTRSS_FRESH_ID, Title: ttRssSpecialTitle(TTRSS_FRESH_ID), Unread: unreadFresh, CatId: TTRSS_FAVORITE_ID, }) } unreadAll := user.Count(data.ArticleCountOptions{UnreadOnly: true}) if unreadAll > 0 || !req.UnreadOnly { fContent = append(fContent, ttRssFeed{ Id: TTRSS_ALL_ID, Title: ttRssSpecialTitle(TTRSS_ALL_ID), Unread: unreadAll, CatId: TTRSS_FAVORITE_ID, }) } } var feeds []content.UserFeed var catId int if req.CatId == TTRSS_CAT_ALL || req.CatId == TTRSS_CAT_ALL_EXCEPT_VIRTUAL { feeds = user.AllFeeds() } else { if req.CatId == TTRSS_CAT_UNCATEGORIZED { tagged := user.AllTaggedFeeds() for _, t := range tagged { if len(t.Tags()) == 0 { feeds = append(feeds, t) } } } else if req.CatId > 0 { catId = int(req.CatId) t := user.TagById(req.CatId) tagged := t.AllFeeds() if t.HasErr() { err = t.Err() break } for _, t := range tagged { feeds = append(feeds, t) } } } if len(feeds) > 0 { o := data.ArticleCountOptions{UnreadOnly: true} for i := range feeds { if req.Limit > 0 { if i < req.Offset || i >= req.Limit+req.Offset { continue } } d := feeds[i].Data() unread := feeds[i].Count(o) if unread > 0 || !req.UnreadOnly { fContent = append(fContent, ttRssFeed{ Id: d.Id, Title: d.Title, FeedUrl: d.Link, CatId: catId, Unread: unread, LastUpdated: time.Now().Unix(), OrderId: 0, }) } } } if user.HasErr() { err = fmt.Errorf("Error getting user feeds: %v\n", user.Err()) } con = fContent case "getCategories": cContent := ttRssCategoriesContent{} o := data.ArticleCountOptions{UnreadOnly: true} for _, t := range user.Tags() { td := t.Data() count := t.Count(o) if count > 0 || !req.UnreadOnly { cContent = append(cContent, ttRssCat{Id: strconv.FormatInt(int64(td.Id), 10), Title: string(td.Value), Unread: count}, ) } } count := user.Count(data.ArticleCountOptions{UnreadOnly: true, UntaggedOnly: true}) if count > 0 || !req.UnreadOnly { cContent = append(cContent, ttRssCat{Id: strconv.FormatInt(TTRSS_CAT_UNCATEGORIZED, 10), Title: "Uncategorized", Unread: count}, ) } o.FavoriteOnly = true count = user.Count(o) if count > 0 || !req.UnreadOnly { cContent = append(cContent, ttRssCat{Id: strconv.FormatInt(TTRSS_CAT_SPECIAL, 10), Title: "Special", Unread: count}, ) } con = cContent case "getHeadlines": if req.FeedId == 0 { errType = "INCORRECT_USAGE" break } limit := req.Limit if limit == 0 { limit = 200 } var articles []content.UserArticle var articleRepo content.ArticleRepo var feedTitle string firstId := data.ArticleId(0) o := data.ArticleQueryOptions{Limit: limit, Offset: req.Skip, UnreadFirst: true, SkipSessionProcessors: true} if req.IsCat { if req.FeedId == TTRSS_CAT_UNCATEGORIZED { ttRssSetupSorting(req, user) articleRepo = user o.UntaggedOnly = true feedTitle = "Uncategorized" } else if req.FeedId > 0 { t := user.TagById(data.TagId(req.FeedId)) ttRssSetupSorting(req, t) articleRepo = t feedTitle = string(t.Data().Value) } } else { if req.FeedId == TTRSS_FAVORITE_ID { ttRssSetupSorting(req, user) o.FavoriteOnly = true articleRepo = user feedTitle = "Starred articles" } else if req.FeedId == TTRSS_FRESH_ID { ttRssSetupSorting(req, user) o.AfterDate = time.Now().Add(TTRSS_FRESH_DURATION) articleRepo = user feedTitle = "Fresh articles" } else if req.FeedId == TTRSS_ALL_ID { ttRssSetupSorting(req, user) articleRepo = user feedTitle = "All articles" } else if req.FeedId > 0 { feed := user.FeedById(req.FeedId) ttRssSetupSorting(req, feed) articleRepo = feed feedTitle = feed.Data().Title } } if req.SinceId > 0 { o.AfterId = req.SinceId } if articleRepo != nil { if req.Search != "" { if controller.sp != nil { if as, ok := articleRepo.(content.ArticleSearch); ok { articles = as.Query(req.Search, controller.sp, limit, req.Skip) } } } else { var skip bool switch req.ViewMode { case "all_articles": case "adaptive": case "unread": o.UnreadOnly = true case "marked": o.FavoriteOnly = true default: skip = true } if !skip { articles = articleRepo.Articles(o) } } } if len(articles) > 0 { firstId = articles[0].Data().Id } headlines := ttRssHeadlinesFromArticles(articles, feedTitle, req.ShowContent, req.ShowExcerpt) if req.IncludeHeader { header := ttRssHeadlinesHeader{Id: req.FeedId, FirstId: firstId, IsCat: req.IsCat} hContent := ttRssHeadlinesHeaderContent{} hContent = append(hContent, header) hContent = append(hContent, headlines) con = hContent } else { con = headlines } case "updateArticle": articles := user.ArticlesById(req.ArticleIds, data.ArticleQueryOptions{SkipSessionProcessors: true}) updateCount := int64(0) switch req.Field { case 0, 2: for _, a := range articles { d := a.Data() updated := false switch req.Field { case 0: switch req.Mode { case 0: if d.Favorite { updated = true d.Favorite = false } case 1: if !d.Favorite { updated = true d.Favorite = true } case 2: updated = true d.Favorite = !d.Favorite } if updated { a.Favorite(d.Favorite) } case 2: switch req.Mode { case 0: if !d.Read { updated = true d.Read = true } case 1: if d.Read { updated = true d.Read = false } case 2: updated = true d.Read = !d.Read } if updated { a.Read(d.Read) } } if updated { if a.HasErr() { err = a.Err() break } updateCount++ } } if err != nil { break } con = ttRssGenericContent{Status: "OK", Updated: updateCount} } case "getArticle": articles := user.ArticlesById(req.ArticleId, data.ArticleQueryOptions{SkipSessionProcessors: true}) feedTitles := map[data.FeedId]string{} for _, a := range articles { d := a.Data() if _, ok := feedTitles[d.FeedId]; !ok { f := repo.FeedById(d.FeedId) feedTitles[d.FeedId] = f.Data().Title } } cContent := ttRssArticlesContent{} for _, a := range articles { d := a.Data() title := feedTitles[d.FeedId] h := ttRssArticle{ Id: strconv.FormatInt(int64(d.Id), 10), Unread: !d.Read, Marked: d.Favorite, Updated: d.Date.Unix(), Title: d.Title, Link: d.Link, FeedId: strconv.FormatInt(int64(d.FeedId), 10), FeedTitle: title, Content: d.Description, } cContent = append(cContent, h) } con = cContent case "getConfig": con = ttRssConfigContent{DaemonIsRunning: true, NumFeeds: len(user.AllFeeds())} case "updateFeed": con = ttRssGenericContent{Status: "OK"} case "catchupFeed": var ar content.ArticleRepo o := data.ArticleUpdateStateOptions{BeforeDate: time.Now()} if req.IsCat { tagId := data.TagId(req.FeedId) ar = user.TagById(tagId) if tagId == TTRSS_CAT_UNCATEGORIZED { o.UntaggedOnly = true } } else { ar = user.FeedById(req.FeedId) } if ar != nil { ar.ReadState(true, o) if e, ok := ar.(content.Error); ok { if e.HasErr() { err = e.Err() break } } con = ttRssGenericContent{Status: "OK"} } case "getPref": switch req.PrefName { case "DEFAULT_UPDATE_INTERVAL": con = ttRssGenericContent{Value: int(config.FeedManager.Converted.UpdateInterval.Minutes())} case "DEFAULT_ARTICLE_LIMIT": con = ttRssGenericContent{Value: 200} case "HIDE_READ_FEEDS": con = ttRssGenericContent{Value: user.Data().ProfileData["unreadOnly"]} case "FEEDS_SORT_BY_UNREAD", "ENABLE_FEED_CATS", "SHOW_CONTENT_PREVIEW": con = ttRssGenericContent{Value: true} case "FRESH_ARTICLE_MAX_AGE": con = ttRssGenericContent{Value: (-1 * TTRSS_FRESH_DURATION).Hours()} } case "getLabels": con = []interface{}{} case "setArticleLabel": con = ttRssGenericContent{Status: "OK", Updated: 0} case "shareToPublished": errType = "Publishing failed" case "subscribeToFeed": f := repo.FeedByLink(req.FeedUrl) for _, u := range f.Users() { if u.Data().Login == user.Data().Login { con = ttRssSubscribeContent{Status: struct { Code int `json:"code"` }{0}} break } } if f.HasErr() { err = f.Err() break } f, err := controller.fm.AddFeedByLink(req.FeedUrl) if err != nil { errType = "INCORRECT_USAGE" break } uf := user.AddFeed(f) if uf.HasErr() { err = uf.Err() break } con = ttRssSubscribeContent{Status: struct { Code int `json:"code"` }{1}} case "unsubscribeFeed": f := user.FeedById(req.FeedId) f.Detach() users := f.Users() if f.HasErr() { err = f.Err() if err == content.ErrNoContent { errType = "FEED_NOT_FOUND" } break } if len(users) == 0 { controller.fm.RemoveFeed(f) } con = ttRssGenericContent{Status: "OK"} case "getFeedTree": items := []ttRssCategory{} special := ttRssCategory{Id: "CAT:-1", Items: []ttRssCategory{}, Name: "Special", Type: "category", BareId: -1} special.Items = append(special.Items, ttRssFeedListCategoryFeed(user, nil, TTRSS_ALL_ID, false)) special.Items = append(special.Items, ttRssFeedListCategoryFeed(user, nil, TTRSS_FRESH_ID, false)) special.Items = append(special.Items, ttRssFeedListCategoryFeed(user, nil, TTRSS_FAVORITE_ID, false)) special.Items = append(special.Items, ttRssFeedListCategoryFeed(user, nil, TTRSS_PUBLISHED_ID, false)) special.Items = append(special.Items, ttRssFeedListCategoryFeed(user, nil, TTRSS_ARCHIVED_ID, false)) special.Items = append(special.Items, ttRssFeedListCategoryFeed(user, nil, TTRSS_RECENTLY_READ_ID, false)) items = append(items, special) tf := user.AllTaggedFeeds() uncat := ttRssCategory{Id: "CAT:0", Items: []ttRssCategory{}, BareId: 0, Name: "Uncategorized", Type: "category"} tagCategories := map[content.Tag]ttRssCategory{} for _, f := range tf { tags := f.Tags() item := ttRssFeedListCategoryFeed(user, f, f.Data().Id, true) if len(tags) > 0 { for _, t := range tags { var c ttRssCategory if cached, ok := tagCategories[t]; ok { c = cached } else { c = ttRssCategory{ Id: "CAT:" + strconv.FormatInt(int64(t.Data().Id), 10), BareId: data.FeedId(t.Data().Id), Name: string(t.Data().Value), Type: "category", Items: []ttRssCategory{}, } } c.Items = append(c.Items, item) tagCategories[t] = c } } else { uncat.Items = append(uncat.Items, item) } } categories := []ttRssCategory{uncat} for _, c := range tagCategories { categories = append(categories, c) } for _, c := range categories { if len(c.Items) == 1 { c.Param = "(1 feed)" } else { c.Param = fmt.Sprintf("(%d feed)", len(c.Items)) } items = append(items, c) } fl := ttRssCategory{Identifier: "id", Label: "name"} fl.Items = items if user.HasErr() { err = user.Err() } else { con = ttRssFeedTreeContent{Categories: fl} } default: errType = "UNKNOWN_METHOD" con = ttRssGenericContent{Method: req.Op} } } if err == nil && errType == "" { resp.Status = TTRSS_API_STATUS_OK } else { logger.Infof("Error processing TT-RSS API request: %s %v\n", errType, err) resp.Status = TTRSS_API_STATUS_ERR con = ttRssErrorContent{Error: errType} } var b []byte b, err = json.Marshal(con) if err == nil { resp.Content = json.RawMessage(b) } b, err = json.Marshal(&resp) if err == nil { w.Header().Set("Content-Type", "text/json") w.Header().Set("Api-Content-Length", strconv.Itoa(len(b))) w.Write(b) logger.Debugf("Output for %s: %s\n", req.Op, string(b)) } else { logger.Print(fmt.Errorf("TT-RSS error %s: %v", req.Op, err)) w.WriteHeader(http.StatusInternalServerError) } }) }
func ttRssConvertRequest(in map[string]interface{}) (req ttRssRequest) { for key, v := range in { switch key { case "op": req.Op = ttRssParseString(v) case "sid": req.Sid = ttRssParseString(v) case "seq": req.Seq = ttRssParseInt(v) case "user": req.User = ttRssParseString(v) case "password": req.Password = ttRssParseString(v) case "output_mode": req.OutputMode = ttRssParseString(v) case "unread_only": req.UnreadOnly = ttRssParseBool(v) case "include_empty": req.IncludeEmpty = ttRssParseBool(v) case "limit": req.Limit = ttRssParseInt(v) case "offset": req.Offset = ttRssParseInt(v) case "cat_id": req.CatId = data.TagId(ttRssParseInt64(v)) case "feed_id": req.FeedId = data.FeedId(ttRssParseInt64(v)) case "skip": req.Skip = ttRssParseInt(v) case "is_cat": req.IsCat = ttRssParseBool(v) case "show_content": req.ShowContent = ttRssParseBool(v) case "show_excerpt": req.ShowExcerpt = ttRssParseBool(v) case "view_mode": req.ViewMode = ttRssParseString(v) case "since_id": req.SinceId = data.ArticleId(ttRssParseInt64(v)) case "sanitize": req.Sanitize = ttRssParseBool(v) case "has_sandbox": req.HasSandbox = ttRssParseBool(v) case "include_header": req.IncludeHeader = ttRssParseBool(v) case "order_by": req.OrderBy = ttRssParseString(v) case "search": req.Search = ttRssParseString(v) case "article_ids": req.ArticleIds = ttRssParseArticleIds(v) case "mode": req.Mode = ttRssParseInt(v) case "field": req.Field = ttRssParseInt(v) case "data": req.Data = ttRssParseString(v) case "article_id": req.ArticleId = ttRssParseArticleIds(v) case "pref_name": req.PrefName = ttRssParseString(v) case "feed_url": req.FeedUrl = ttRssParseString(v) } } return }
func (con Fever) Handler(c context.Context) http.Handler { repo := readeef.GetRepo(c) return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { logger := webfw.GetLogger(c) var err error var user content.User err = r.ParseForm() if err == nil { user = getReadeefUser(repo, r.FormValue("api_key"), webfw.GetLogger(c)) } resp := map[string]interface{}{"api_version": FEVER_API_VERSION} var reqType string switch { default: if user == nil { resp["auth"] = 0 break } now := time.Now().Unix() resp["auth"] = 1 resp["last_refreshed_on_time"] = now if _, ok := r.Form["groups"]; ok { reqType = "groups" logger.Infoln("Fetching fever groups") resp["groups"], resp["feeds_groups"], err = getGroups(user) } if _, ok := r.Form["feeds"]; ok { reqType = "feeds" logger.Infoln("Fetching fever feeds") var feverFeeds []feverFeed feeds := user.AllFeeds() err = user.Err() if err != nil { break } for i := range feeds { in := feeds[i].Data() feed := feverFeed{ Id: in.Id, Title: in.Title, Url: in.Link, SiteUrl: in.SiteLink, UpdateTime: now, } feverFeeds = append(feverFeeds, feed) } resp["feeds"] = feverFeeds _, resp["feeds_groups"], err = getGroups(user) } if _, ok := r.Form["unread_item_ids"]; ok { reqType = "unread item ids" logger.Infoln("Fetching unread fever item ids") ids := user.Ids(data.ArticleIdQueryOptions{UnreadOnly: true}) err = user.Err() if err != nil { break } buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) for i := range ids { if i != 0 { buf.WriteString(",") } buf.WriteString(strconv.FormatInt(int64(ids[i]), 10)) } resp["unread_item_ids"] = buf.String() } if _, ok := r.Form["saved_item_ids"]; ok { reqType = "saved item ids" logger.Infoln("Fetching saved fever item ids") ids := user.Ids(data.ArticleIdQueryOptions{FavoriteOnly: true}) err = user.Err() if err != nil { break } buf := util.BufferPool.GetBuffer() defer util.BufferPool.Put(buf) for i := range ids { if i != 0 { buf.WriteString(",") } buf.WriteString(strconv.FormatInt(int64(ids[i]), 10)) } resp["saved_item_ids"] = buf.String() } if _, ok := r.Form["items"]; ok { reqType = "items" logger.Infoln("Fetching fever items") var count, since, max int64 count, err = user.Count(), user.Err() if err != nil { err = fmt.Errorf("Error getting user article count: %v", err) break } items := []feverItem{} if count > 0 { if val, ok := r.Form["since_id"]; ok { since, err = strconv.ParseInt(val[0], 10, 64) if err != nil { err = nil since = 0 } } if val, ok := r.Form["max_id"]; ok { max, err = strconv.ParseInt(val[0], 10, 64) if err != nil { err = nil since = 0 } } var articles []content.UserArticle // Fever clients do their own paging o := data.ArticleQueryOptions{Limit: 50, Offset: 0, SkipSessionProcessors: true} if withIds, ok := r.Form["with_ids"]; ok { stringIds := strings.Split(withIds[0], ",") ids := make([]data.ArticleId, 0, len(stringIds)) for _, stringId := range stringIds { stringId = strings.TrimSpace(stringId) if id, err := strconv.ParseInt(stringId, 10, 64); err == nil { ids = append(ids, data.ArticleId(id)) } } articles, err = user.ArticlesById(ids, data.ArticleQueryOptions{SkipSessionProcessors: true}), user.Err() } else if max > 0 { user.Order(data.DescendingOrder) o.BeforeId = data.ArticleId(max) articles, err = user.Articles(o), user.Err() } else { user.Order(data.AscendingOrder) o.AfterId = data.ArticleId(since) articles, err = user.Articles(o), user.Err() } if err != nil { break } for i := range articles { in := articles[i].Data() item := feverItem{ Id: in.Id, FeedId: in.FeedId, Title: in.Title, Html: in.Description, Url: in.Link, CreatedOnTime: in.Date.Unix(), } if in.Read { item.IsRead = 1 } if in.Favorite { item.IsSaved = 1 } items = append(items, item) } } resp["total_items"] = count resp["items"] = items } if _, ok := r.Form["links"]; ok { reqType = "links" logger.Infoln("Fetching fever links") offset, _ := strconv.ParseInt(r.FormValue("offset"), 10, 64) rng, e := strconv.ParseInt(r.FormValue("range"), 10, 64) if e != nil { rng = 7 } page := int64(1) page, err = strconv.ParseInt(r.FormValue("page"), 10, 64) if e != nil { break } if page > 3 { resp["links"] = []feverLink{} break } var articles []content.UserArticle var from, to time.Time if offset == 0 { from = time.Now().AddDate(0, 0, int(-1*rng)) to = time.Now() } else { from = time.Now().AddDate(0, 0, int(-1*rng-offset)) to = time.Now().AddDate(0, 0, int(-1*offset)) } user.SortingByDate() user.Order(data.DescendingOrder) articles, err = user.Articles(data.ArticleQueryOptions{ BeforeDate: to, AfterDate: from, Limit: 50, Offset: 50 * int(page-1), IncludeScores: true, }), user.Err() if err != nil { break } links := make([]feverLink, len(articles)) for i := range articles { in := articles[i].Data() link := feverLink{ Id: in.Id, FeedId: in.FeedId, ItemId: in.Id, IsItem: 1, IsLocal: 1, Title: in.Title, Url: in.Link, ItemIds: fmt.Sprintf("%d", in.Id), } if in.Score == 0 { link.Temperature = 0 } else { link.Temperature = math.Log10(float64(in.Score)) / math.Log10(1.1) } if in.Favorite { link.IsSaved = 1 } links[i] = link } resp["links"] = links } if val := r.PostFormValue("unread_recently_read"); val == "1" { reqType = "unread and recently read" logger.Infoln("Marking recently read fever items as unread") t := time.Now().Add(-24 * time.Hour) user.ReadState(false, data.ArticleUpdateStateOptions{ BeforeDate: time.Now(), AfterDate: t, }) err = user.Err() if err != nil { break } } if val := r.PostFormValue("mark"); val != "" { if val == "item" { logger.Infof("Marking fever item '%s' as '%s'\n", r.PostFormValue("id"), r.PostFormValue("as")) var id int64 var article content.UserArticle id, err = strconv.ParseInt(r.PostFormValue("id"), 10, 64) if err != nil { break } article, err = user.ArticleById(data.ArticleId(id), data.ArticleQueryOptions{SkipSessionProcessors: true}), user.Err() if err != nil { break } switch r.PostFormValue("as") { case "read": article.Read(true) case "saved": article.Favorite(true) case "unsaved": article.Favorite(false) default: err = errors.New("Unknown 'as' action") } if err == nil { err = article.Err() } } else if val == "feed" || val == "group" { logger.Infof("Marking fever %s '%s' as '%s'\n", val, r.PostFormValue("id"), r.PostFormValue("as")) if r.PostFormValue("as") != "read" { err = errors.New("Unknown 'as' action") break } var id, timestamp int64 id, err = strconv.ParseInt(r.PostFormValue("id"), 10, 64) if err != nil { break } timestamp, err = strconv.ParseInt(r.PostFormValue("before"), 10, 64) if err != nil { break } t := time.Unix(timestamp, 0) if val == "feed" { var feed content.UserFeed feed, err = user.FeedById(data.FeedId(id)), feed.Err() if err != nil { break } feed.ReadState(true, data.ArticleUpdateStateOptions{ BeforeDate: t, }) err = feed.Err() } else if val == "group" { if id == 1 || id == 0 { user.ReadState(true, data.ArticleUpdateStateOptions{ BeforeDate: t, }) err = user.Err() } else { err = errors.New(fmt.Sprintf("Unknown group %d\n", id)) } } } } } var b []byte if err == nil { b, err = json.Marshal(resp) } if err == nil { w.Write(b) } else { if reqType == "" { reqType = "modifying fever data" } else { reqType = "getting " + reqType + " for fever" } webfw.GetLogger(c).Print(fmt.Errorf("Error %s: %v", reqType, err)) w.WriteHeader(http.StatusInternalServerError) } }) }