func runTasksCrawlersAmebloPosts(req *wcg.Request, task *models.AsyncAPITask) (*models.AsyncAPITaskProgress, error) { // Tuning conditions to set the proper `limit`` and `concurrency`` value: // // - The task should be done in 10 minutes. // - We'll invoke the number of goroutines defined in `concurrency`. // - Each goroutine can access one URL per `urlCrawlWait` seconds (fixed value - 5 seconds) // // So we finally gets N (< 120) URLs updated // concurrency := configs.GetIntValue(req, "hplink.ameblo_crawler_post_concurrency", 3, 1, 10) perThread := configs.GetIntValue(req, "hplink.ameblo_crawler_num_posts_per_thread", 30, 1, 100) wait := configs.GetIntValue(req, "hplink.ameblo_crawler_url_wait", 2, 0, 10) selected, err := _selectAmebloPosts(req, perThread*concurrency) if err != nil { return nil, err } // prepare members for MemberKey detection p, err := entities.Member.Query().Execute(req) if err != nil { return nil, err } members := p.Data.([]hplink.Member) req.Logger.Infof("[Task.Crawlers.AmebloPosts] Crawling %d URLs (concurrency: %d)", len(selected), concurrency) iterator.ParallelSliceWithMaxConcurrency(selected, concurrency, func(i int, post *hplink.AmebloPost) error { startTime := lib.Now() if err := _crawlAmebloPost(req, post, members); err != nil { req.Logger.Errorf("Crawler Failure: %v", err) return err } selected[i] = post lib.WaitAndEnsureAfter(startTime, time.Duration(wait)*time.Second) return nil }) req.Logger.Infof("[Task.Crawlers.AmebloPosts] Updating datastore.") _, err = entities.AmebloPost.PutMulti().Update(req, selected) if err != nil { return nil, err } return nil, nil }
func runTasksCrawlersAmebloEntryLists(req *wcg.Request, task *models.AsyncAPITask) (*models.AsyncAPITaskProgress, error) { const FollowLinkKey = "fl" const SettingsKey = "s" const URLKey = "u" var query = req.HTTPRequest().URL.Query() var settingsList []*hplink.CrawlerSettings var urlList []string if settingsKeys, ok := query[SettingsKey]; ok { _, _list := entities.CrawlerSettings.GetMulti().Keys(settingsKeys...).MustList(req) settingsList = _list.([]*hplink.CrawlerSettings) } else { query := entities.CrawlerSettings.Query().Filter("Type=", hplink.CrawlerSettingsTypeAmeblo) if pagination := query.MustExecute(req); pagination.Length() > 0 { list := pagination.Data.([]hplink.CrawlerSettings) settingsList = make([]*hplink.CrawlerSettings, len(list)) for i := range list { settingsList[i] = &list[i] } } } var numList = len(settingsList) urlList = make([]string, numList) if urls, ok := query[URLKey]; ok { if numList != len(urls) { return nil, fmt.Errorf("List mismatch - found %d settings but %d urls are specified", numList, len(urls)) } urlList = query[URLKey] } else { for i := range settingsList { urlList[i] = (*hplink.AmebloCrawlerSettings)(settingsList[i]).GetEntryListURL() } } startTime := lib.Now() nextParamSettingsKeys := make([]string, numList) nextParamURLs := make([]string, numList) err := iterator.ParallelSlice(settingsList, func(i int, v *hplink.CrawlerSettings) error { next, err := _crawlAmebloEntryList(req, v, urlList[i]) if err != nil { settingsList[i].Error = []byte(fmt.Sprintf("%v", err)) settingsList[i].Status = hplink.CrawlerStatusFailure settingsList[i].LastRun = lib.Now() return err } settingsList[i].Error = nil settingsList[i].Status = hplink.CrawlerStatusSuccess settingsList[i].LastRun = lib.Now() if next != "" { nextParamSettingsKeys[i] = v.URL nextParamURLs[i] = next } return nil }) entities.CrawlerSettings.PutMulti().MustUpdate(req, settingsList) if err != nil { return nil, err } if req.Query(FollowLinkKey) != "true" { return nil, err } // fl=true make a recursive call to follow next links // reduce empty urls from nextParam* and return it for recursive call var fixedNextParamSettingsKeys []string var fixedNextParamURLs []string var hasNext = false for i := range nextParamURLs { if nextParamURLs[i] != "" { hasNext = true fixedNextParamSettingsKeys = append(fixedNextParamSettingsKeys, nextParamSettingsKeys[i]) fixedNextParamURLs = append(fixedNextParamURLs, nextParamURLs[i]) } } var progress models.AsyncAPITaskProgress var lastProgress = task.LastProgress() if lastProgress == nil { progress.Current = len(urlList) progress.Total = 0 } else { progress.Current = lastProgress.Current + len(urlList) } if hasNext { progress.Next = url.Values{ FollowLinkKey: []string{"true"}, SettingsKey: fixedNextParamSettingsKeys, URLKey: fixedNextParamURLs, } wait := configs.GetIntValue(req, "hplink.ameblo_crawler_url_wait", 2, 0, 10) lib.WaitAndEnsureAfter(startTime, time.Duration(wait)*time.Second) } req.Logger.Infof("No more URL needs to be crawled.") return &progress, nil }