Пример #1
0
func (t *Tag) Query(term string, sp content.SearchProvider, paging ...int) (ua []content.UserArticle) {
	if t.HasErr() {
		return
	}

	if err := t.Validate(); err != nil {
		t.Err(err)
		return
	}

	var err error

	feeds := t.AllFeeds()
	if t.HasErr() {
		return
	}

	ids := make([]data.FeedId, len(feeds))
	for i := range feeds {
		ids = append(ids, feeds[i].Data().Id)
	}

	limit, offset := pagingLimit(paging)
	ua, err = sp.Search(term, t.User(), ids, limit, offset)
	t.Err(err)

	return
}
Пример #2
0
func (u *User) Query(term string, sp content.SearchProvider, paging ...int) (ua []content.UserArticle) {
	if u.HasErr() {
		return
	}

	if err := u.Validate(); err != nil {
		u.Err(err)
		return
	}

	var err error

	limit, offset := pagingLimit(paging)
	ua, err = sp.Search(term, u, []data.FeedId{}, limit, offset)
	u.Err(err)

	return
}
Пример #3
0
func main() {
	confpath := flag.String("config", "", "readeef config path")

	flag.Parse()

	cfg, err := readeef.ReadConfig(*confpath)
	if err != nil {
		exitWithError(fmt.Sprintf("Error reading config from path '%s': %v", *confpath, err))
	}

	logger := readeef.NewLogger(cfg)
	repo, err := repo.New(cfg.DB.Driver, cfg.DB.Connect, logger)
	if err != nil {
		exitWithError(fmt.Sprintf("Error connecting to database: %v", err))
	}

	var sp content.SearchProvider

	switch cfg.Content.SearchProvider {
	case "elastic":
		if sp, err = search.NewElastic(cfg.Content.ElasticURL, cfg.Content.SearchBatchSize, logger); err != nil {
			exitWithError(fmt.Sprintf("Error initializing Elastic search: %v\n", err))
		}
	case "bleve":
		fallthrough
	default:
		if sp, err = search.NewBleve(cfg.Content.BlevePath, cfg.Content.SearchBatchSize, logger); err != nil {
			exitWithError(fmt.Sprintf("Error initializing Bleve search: %v\n", err))
		}
	}

	logger.Infoln("Getting all articles")

	if err := sp.IndexAllFeeds(repo); err != nil {
		exitWithError(fmt.Sprintf("Error indexing all articles: %v", err))
	}
}
Пример #4
0
func (uf *UserFeed) Query(term string, sp content.SearchProvider, paging ...int) (ua []content.UserArticle) {
	if uf.HasErr() {
		return
	}

	if err := uf.Validate(); err != nil {
		uf.Err(err)
		return
	}

	id := uf.Data().Id
	if id == 0 {
		uf.Err(content.NewValidationError(errors.New("Invalid feed id")))
		return
	}

	var err error

	limit, offset := pagingLimit(paging)
	ua, err = sp.Search(term, uf.User(), []data.FeedId{id}, limit, offset)
	uf.Err(err)

	return
}
Пример #5
0
func RegisterControllers(config readeef.Config, dispatcher *webfw.Dispatcher, logger webfw.Logger) error {
	repo, err := repo.New(config.DB.Driver, config.DB.Connect, logger)
	if err != nil {
		return err
	}

	capabilities := capabilities{
		I18N:       len(dispatcher.Config.I18n.Languages) > 1,
		Popularity: len(config.Popularity.Providers) > 0,
	}

	var ap []content.ArticleProcessor
	for _, p := range config.Content.ArticleProcessors {
		switch p {
		case "relative-url":
			ap = append(ap, contentProcessor.NewRelativeUrl(logger))
		case "proxy-http":
			template := config.Content.ProxyHTTPURLTemplate

			if template != "" {
				p, err := contentProcessor.NewProxyHTTP(logger, template)
				if err != nil {
					return fmt.Errorf("Error initializing Proxy HTTP article processor: %v", err)
				}
				ap = append(ap, p)
				capabilities.ProxyHTTP = true
			}
		case "insert-thumbnail-target":
			ap = append(ap, contentProcessor.NewInsertThumbnailTarget(logger))
		}
	}

	repo.ArticleProcessors(ap)

	if err := initAdminUser(repo, []byte(config.Auth.Secret)); err != nil {
		return err
	}

	mw := make([]string, 0, len(dispatcher.Config.Dispatcher.Middleware))
	for _, m := range dispatcher.Config.Dispatcher.Middleware {
		switch m {
		case "I18N", "Static", "Url", "Sitemap":
		case "Session":
			if capabilities.ProxyHTTP {
				mw = append(mw, m)
			}
		default:
			mw = append(mw, m)
		}
	}

	dispatcher.Config.Dispatcher.Middleware = mw

	dispatcher.Context.SetGlobal(readeef.CtxKey("config"), config)
	dispatcher.Context.SetGlobal(context.BaseCtxKey("readeefConfig"), config)
	dispatcher.Context.SetGlobal(readeef.CtxKey("repo"), repo)

	fm := readeef.NewFeedManager(repo, config, logger)

	var processors []parser.Processor
	for _, p := range config.FeedParser.Processors {
		switch p {
		case "relative-url":
			processors = append(processors, processor.NewRelativeUrl(logger))
		case "proxy-http":
			template := config.FeedParser.ProxyHTTPURLTemplate

			if template != "" {
				p, err := processor.NewProxyHTTP(logger, template)
				if err != nil {
					return fmt.Errorf("Error initializing Proxy HTTP processor: %v", err)
				}
				processors = append(processors, p)
				capabilities.ProxyHTTP = true
			}
		case "cleanup":
			processors = append(processors, processor.NewCleanup(logger))
		case "top-image-marker":
			processors = append(processors, processor.NewTopImageMarker(logger))
		}
	}

	fm.ParserProcessors(processors)

	var sp content.SearchProvider

	switch config.Content.SearchProvider {
	case "elastic":
		if sp, err = search.NewElastic(config.Content.ElasticURL, config.Content.SearchBatchSize, logger); err != nil {
			logger.Printf("Error initializing Elastic search: %v\n", err)
		}
	case "bleve":
		fallthrough
	default:
		if sp, err = search.NewBleve(config.Content.BlevePath, config.Content.SearchBatchSize, logger); err != nil {
			logger.Printf("Error initializing Bleve search: %v\n", err)
		}
	}

	if sp != nil {
		if sp.IsNewIndex() {
			go func() {
				sp.IndexAllFeeds(repo)
			}()
		}
	}

	var ce content.Extractor

	switch config.Content.Extractor {
	case "readability":
		if ce, err = extractor.NewReadability(config.Content.ReadabilityKey); err != nil {
			return fmt.Errorf("Error initializing Readability extractor: %v\n", err)
		}
	case "goose":
		fallthrough
	default:
		if ce, err = extractor.NewGoose(dispatcher.Config.Renderer.Dir); err != nil {
			return fmt.Errorf("Error initializing Goose extractor: %v\n", err)
		}
	}

	if ce != nil {
		capabilities.Extractor = true
	}

	var t content.Thumbnailer
	switch config.Content.Thumbnailer {
	case "extract":
		if t, err = thumbnailer.NewExtract(ce, logger); err != nil {
			return fmt.Errorf("Error initializing Extract thumbnailer: %v\n", err)
		}
	case "description":
		fallthrough
	default:
		t = thumbnailer.NewDescription(logger)
	}

	monitors := []content.FeedMonitor{monitor.NewUnread(repo, logger)}
	for _, m := range config.FeedManager.Monitors {
		switch m {
		case "index":
			if sp != nil {
				monitors = append(monitors, monitor.NewIndex(sp, logger))
				capabilities.Search = true
			}
		case "thumbnailer":
			if t != nil {
				monitors = append(monitors, monitor.NewThumbnailer(t, logger))
			}
		}
	}

	webSocket := NewWebSocket(fm, sp, ce, capabilities)
	dispatcher.Handle(webSocket)

	monitors = append(monitors, webSocket)

	if config.Hubbub.CallbackURL != "" {
		hubbub := readeef.NewHubbub(repo, config, logger, dispatcher.Pattern,
			fm.RemoveFeedChannel())
		if err := hubbub.InitSubscriptions(); err != nil {
			return fmt.Errorf("Error initializing hubbub subscriptions: %v", err)
		}

		hubbub.FeedMonitors(monitors)
		fm.Hubbub(hubbub)
	}

	fm.FeedMonitors(monitors)

	fm.Start()

	nonce := readeef.NewNonce()

	controllers := []webfw.Controller{
		NewAuth(capabilities),
		NewFeed(fm, sp),
		NewArticle(config, ce),
		NewUser(),
		NewUserSettings(),
		NewNonce(nonce),
	}

	if fm.Hubbub() != nil {
		controllers = append(controllers, NewHubbubController(fm.Hubbub(), config.Hubbub.RelativePath,
			fm.AddFeedChannel(), fm.RemoveFeedChannel()))
	}

	for _, e := range config.API.Emulators {
		switch e {
		case "tt-rss":
			controllers = append(controllers, NewTtRss(fm, sp))
		case "fever":
			controllers = append(controllers, NewFever())
		}
	}

	for _, c := range controllers {
		dispatcher.Handle(c)
	}

	middleware.InitializeDefault(dispatcher)
	dispatcher.RegisterMiddleware(readeef.Auth{Pattern: dispatcher.Pattern, Nonce: nonce, IgnoreURLPrefix: config.Auth.IgnoreURLPrefix})

	dispatcher.Renderer = renderer.NewRenderer(dispatcher.Config.Renderer.Dir,
		dispatcher.Config.Renderer.Base)

	dispatcher.Renderer.Delims("{%", "%}")

	go func() {
		for {
			select {
			case <-time.After(5 * time.Minute):
				nonce.Clean(45 * time.Second)
			}
		}
	}()

	return nil
}
Пример #6
0
func getFeedArticles(user content.User, sp content.SearchProvider,
	id string, minId, maxId data.ArticleId, limit int, offset int, olderFirst bool,
	unreadOnly bool) (resp responseError) {

	resp = newResponse()

	if limit > 200 {
		limit = 200
	}

	var as content.ArticleSorting
	var ar content.ArticleRepo
	var ua []content.UserArticle

	o := data.ArticleQueryOptions{Limit: limit, Offset: offset, UnreadOnly: unreadOnly, UnreadFirst: true}

	if maxId > 0 {
		o.AfterId = maxId
		resp.val["MaxId"] = maxId
	}

	if id == "favorite" {
		o.FavoriteOnly = true
		ar = user
		as = user
	} else if id == "all" {
		ar = user
		as = user
	} else if strings.HasPrefix(id, "popular:") {
		o.IncludeScores = true
		o.HighScoredFirst = true
		o.BeforeDate = time.Now()
		o.AfterDate = time.Now().AddDate(0, 0, -5)

		if id == "popular:all" {
			ar = user
			as = user
		} else if strings.HasPrefix(id, "popular:tag:") {
			tag := user.Repo().Tag(user)
			tag.Data(data.Tag{Value: data.TagValue(id[12:])})

			ar = tag
			as = tag
		} else {
			var f content.UserFeed

			var feedId int64
			feedId, resp.err = strconv.ParseInt(id[8:], 10, 64)

			if resp.err != nil {
				resp.err = errors.New("Unknown feed id " + id)
				return
			}

			if f = user.FeedById(data.FeedId(feedId)); f.HasErr() {
				/* TODO: non-fatal error */
				resp.err = f.Err()
				return
			}

			ar = f
			as = f
		}
	} else if strings.HasPrefix(id, "search:") && sp != nil {
		var query string
		id = id[7:]
		parts := strings.Split(id, ":")

		if parts[0] == "tag" {
			id = strings.Join(parts[:2], ":")
			query = strings.Join(parts[2:], ":")
		} else {
			id = strings.Join(parts[:1], ":")
			query = strings.Join(parts[1:], ":")
		}

		sp.SortingByDate()
		if olderFirst {
			sp.Order(data.AscendingOrder)
		} else {
			sp.Order(data.DescendingOrder)
		}

		ua, resp.err = performSearch(user, sp, query, id, limit, offset)
	} else if strings.HasPrefix(id, "tag:") {
		tag := user.Repo().Tag(user)
		tag.Data(data.Tag{Value: data.TagValue(id[4:])})

		as = tag
		ar = tag
	} else {
		var f content.UserFeed

		var feedId int64
		feedId, resp.err = strconv.ParseInt(id, 10, 64)

		if resp.err != nil {
			resp.err = errors.New("Unknown feed id " + id)
			return
		}

		if f = user.FeedById(data.FeedId(feedId)); f.HasErr() {
			/* TODO: non-fatal error */
			resp.err = f.Err()
			return
		}

		as = f
		ar = f
	}

	if as != nil {
		as.SortingByDate()
		if olderFirst {
			as.Order(data.AscendingOrder)
		} else {
			as.Order(data.DescendingOrder)
		}
	}

	if ar != nil {
		ua = ar.Articles(o)

		if minId > 0 {
			qo := data.ArticleIdQueryOptions{BeforeId: maxId + 1, AfterId: minId - 1}

			qo.UnreadOnly = true
			resp.val["UnreadIds"] = ar.Ids(qo)

			qo.UnreadOnly = false
			qo.FavoriteOnly = true
			resp.val["FavoriteIds"] = ar.Ids(qo)

			resp.val["MinId"] = minId
		}

		if e, ok := ar.(content.Error); ok && e.HasErr() {
			resp.err = e.Err()
		}
	}

	resp.val["Articles"] = ua
	resp.val["Limit"] = limit
	resp.val["Offset"] = offset

	return
}