func fetchBlog(res *wcg.Response, req *wcg.Request, includePosts bool) (*models.Blog, *supports.Page, error) { // default query for posts. id := req.Param("blog_id") driver := models.NewBlogDriver(gae.NewContext(req), req.Logger) blog, query, err := driver.PostQuery(id) if err != nil { if err == models.ErrBlogNotFound { res.WriteHeader(404) res.WriteString("Not found") res.End() return nil, nil, err } res.RenderInternalError(err.Error()) return nil, nil, err } if !includePosts { return blog, supports.EmptyPage, nil } query = query.Filter("IsNew =", false).Filter("IsDraft =", false).Order("-PostDate").Order("-UpdatedAt") per_page := wcg.ParseInt(req.Query("num"), AppConfig.DefaultPostsFetched, 0, AppConfig.MaxPostsFetched) current := wcg.ParseInt(req.Query("p"), 0, 0, wcg.ParseIntMax) page, err := supports.NewPage(current, per_page, query) if err != nil { res.RenderInternalError("Post pagination error: %v", err) return nil, nil, err } return blog, page, nil }
func queryShows(res *wcg.Response, req *wcg.Request, q *datastore.Query) (*showQueryResult, error) { var appCtx = lib.NewAppContextFromRequest(req) var showList []event.Show var basePath = req.HttpRequest().URL.Path per_page := 12 // Restict to 12 due to view rendering. page := wcg.ParseInt(req.Query("page"), 0, 0, wcg.ParseIntMax) if _, err := q.Offset(page * per_page).Limit(per_page).GetAll(&showList); err != nil { return nil, err } if showList == nil { return &showQueryResult{ Shows: make([]EventShow, 0), }, nil } if list, err := NewEventShowList(appCtx, showList); err != nil { return nil, err } else { p := &showQueryResult{ Shows: list, Current: fmt.Sprintf("%s?page=%d&n=%d", basePath, page, per_page), } // prev url if page > 0 { p.Previous = fmt.Sprintf("%s?page=%d&n=%d", basePath, page-1, per_page) } // next url if len(p.Shows) == per_page { p.Next = fmt.Sprintf("%s?page=%d&n=%d", basePath, page+1, per_page) } return p, nil } }
func indexSpecifiedMember(res *wcg.Response, req *wcg.Request, app *App) { var appCtx = lib.NewAppContextFromRequest(req) member, ok := app.Members[req.Param("member")] if !ok { lib.NotFound(res, req) return } num := wcg.ParseInt(req.Param("n"), 0, 0, wcg.ParseIntMax) if num == 0 { num = wcg.ParseIntMax } // Crawling crawler := ameblo.NewCrawler(appCtx.NewHttpClient()) prefix := strings.TrimSuffix(member.BlogUrl, ".html") // xxxx.html => xxxx-{num}.html entries := make([]*ameblo.AmebloEntry, 0) for i := 1; i < num; i += 1 { url := fmt.Sprintf("%s-%d.html", prefix, i) req.Logger.Info("Indexing from %s ... ", url) newentries, err := crawler.CrawlEntryList(url) if err != nil { lib.InternalError(res, req, err) return } if len(newentries) > 20 { panic(fmt.Errorf("Unexpected number of entries (%d) are returned during indexing.", len(newentries))) } if len(newentries) == 0 { break } if len(newentries) < 20 { entries = append(entries, newentries...) break } if len(entries) > 0 && entries[len(entries)-1].Url == newentries[len(newentries)-1].Url { break } entries = append(entries, newentries...) } // Save and return resutls results := make([]string, 0) for _, ent := range entries { ent.Owner = member.Name results = append(results, ent.Url) } if err := updateIndexes(appCtx, entries); err != nil { req.Logger.Error("Failed to update the entry: %v", err) lib.InternalError(res, req, err) // stopped. } else { time.Sleep(10 * time.Second) // TODO: wait for all indexes are updated on datastore. mc := appCtx.NewMemcacheDriver() mckey := fmt.Sprintf(MC_KEY_HISTORY, app.Key, member.Name) mc.Delete(mckey) res.WriteJson(results) } }
func queryPosts(res *wcg.Response, req *wcg.Request, per_page int) (*postQueryResult, error) { var list []blog.Post var basePath = req.HttpRequest().URL.Path is_admin := (lib.GetUserKind(req) == lib.Admin) && (req.Query("is_admin") == "true") if per_page == 0 { per_page = wcg.ParseInt(req.Query("n"), 5, 0, 20) // default 5, max 20 } page := wcg.ParseInt(req.Query("page"), 0, 0, wcg.ParseIntMax) q := NewPostDriver(lib.NewAppContextFromRequest(req)).NewQuery() q = q.Order("-PublishAt").Order("-CreatedAt") if !is_admin { q = q.Filter("PublishAt <=", time.Now()).Filter("IsDraft =", false).Filter("IsHidden =", false) } _, err := q.Offset(page * per_page).Limit(per_page).GetAll(&list) if err != nil { return nil, err } if list == nil { list = make([]blog.Post, 0) } p := &postQueryResult{ Posts: list, } // prev url if page > 0 { p.Previous = fmt.Sprintf("%s?page=%d&n=%d", basePath, page-1, per_page) if is_admin { p.Previous = fmt.Sprintf("%s&is_admin=true", p.Previous) } } // next url if len(list) == per_page { p.Next = fmt.Sprintf("%s?page=%d&n=%d", basePath, page+1, per_page) if is_admin { p.Next = fmt.Sprintf("%s&is_admin=true", p.Next) } } return p, nil }