// source : feed(atom/rss) url // count: must large than 0 // page: 0 based page index // if page is 0, entries may be fetched online func feedentry_unread(source string, count int, page int) ([]ReadEntry, error, int) { if count <= 0 { panic("invalid arg count") } var sc int if page == 0 { log.Println("curl-get...") c := curl.NewCurlerDetail(backend_config().FeedEntryFolder, 0, 0, nil, backend_context.ruler) cache, err := c.GetUtf8(source) log.Println("curl-get", cache.LocalUtf8) if err != nil || cache.LocalUtf8 == "" { return nil, err, cache.StatusCode } ext := curl.MimeToExt(cache.Mime) if ext != "xml" && ext != "atom+xml" && ext != "rss+xml" { return nil, new_backenderror(cache.StatusCode, "unsupported mime: "+cache.Mime), 0 } f, err := os.Open(cache.LocalUtf8) if err != nil { return nil, err, cache.StatusCode } fs, v, err := feed.NewFeedMaker(f, source).MakeFeed() f.Close() rs := new_readsource(fs) if err == nil { new_feedsource_operator().update(rs) log.Println("feed-update", fs.Name) } rv := readentry_filter(new_readentries(v)) log.Println("feedentries-filter", len(rv)) sc = cache.StatusCode } rv, err := new_feedentry_operator().topn_by_feedsource(count*page, count, source) log.Println("unread-return(uri, page, count)", source, page, count, len(rv), err) return rv, err, sc }
func feed_fetch(uri string) (v ReadSource, res []ReadEntry, err error) { cache, err := curl.NewCurl(backend_config().FeedSourceFolder).GetUtf8(uri) if err != nil { return } ext := curl.MimeToExt(cache.Mime) if ext != "xml" && ext != "atom+xml" && ext != "rss+xml" { return v, nil, new_backenderror(-1, "unsupported mime: "+cache.Mime) } else if cache.LocalUtf8 == "" { return v, nil, new_backenderror(-1, "unrecognized encoding: "+cache.Local) } f, err := os.Open(cache.LocalUtf8) if err != nil { return } var fv feed.FeedSource var fes []feed.FeedEntry fv, fes, err = feed.NewFeedMaker(f, uri).MakeFeed() f.Close() v = new_readsource(fv) res = new_readentries(fes) return }