func _crawlAmebloPost(req *wcg.Request, post *hplink.AmebloPost, members []hplink.Member) error {
	crawler := crawlers.NewAmebloPostCrawler(cacheutil.NewURLCacheAwareClient(req, entities.URLCache))
	req.Logger.Infof("[Task.Crawlers.AmebloPosts] crawling URL: %s", post.URL)
	crawled, err := crawler.RunOnPostURL(post.URL)
	if err != nil {
		return err
	}
	post.NumLikes = crawled.NumLikes
	post.NumComments = crawled.NumComments
	post.NumReblogs = crawled.NumReblogs
	post.PostAt = crawled.PostAt
	post.Title = crawled.Title
	post.Theme = crawled.Theme
	post.IsContentsCrawled = true
	post.Images = make([]models.Image, len(crawled.ImageURLs))
	if cacher, err := cacheutil.NewImageCacher(req, entities.ImageCache); err == nil {
		iterator.ParallelSlice(crawled.ImageURLs, func(i int, url string) error {
			req.Logger.Infof("[Task.Crawlers.AmebloPosts] caching image URL %s (on %s)", url, post.URL)
			const retries = 5
			var err error
			var cache *models.ImageCache
			for j := 0; j < retries; j++ {
				if j != 0 {
					req.Logger.Infof("[Task.Crawlers.AmebloPosts] Retry image URL caching: %s (on %s) (%d)", url, post.URL, j)
				}
				cache, err = cacher.Cache(url)
				if err == nil {
					post.Images[i] = *cache.ToImage()
					return nil
				}
				lib.WaitAndEnsureAfter(lib.Now(), 3*time.Second)
			}
			req.Logger.Warnf("[Task.Crawlers.AmebloPosts] Failed to cache image URL %s (on %s) - %v", url, post.URL, err)
			return err
		})
	} else {
		for j, url := range crawled.ImageURLs {
			req.Logger.Infof("[Task.Crawlers.AmebloPosts] skip caching image URL %s (on %s)", url, post.URL)
			post.Images[j] = models.Image{
				URL: url,
			}
		}
	}
	// Update MemberKey only if not set.
	if post.MemberKey == "" {
		if m := _guessMember(req, post, members); m != nil {
			post.MemberKey = m.Key
		}
	}
	req.Logger.Infof("[Task.Crawlers.AmebloPosts] finished crawling")
	return nil
}
func runTasksCrawlersAmebloPosts(req *wcg.Request, task *models.AsyncAPITask) (*models.AsyncAPITaskProgress, error) {
	// Tuning conditions to set the proper `limit`` and `concurrency`` value:
	//
	//   - The task should be done in 10 minutes.
	//   - We'll invoke the number of goroutines defined in `concurrency`.
	//   - Each goroutine can access one URL per `urlCrawlWait` seconds (fixed value - 5 seconds)
	//
	// So we finally gets N (< 120) URLs updated
	//
	concurrency := configs.GetIntValue(req, "hplink.ameblo_crawler_post_concurrency", 3, 1, 10)
	perThread := configs.GetIntValue(req, "hplink.ameblo_crawler_num_posts_per_thread", 30, 1, 100)
	wait := configs.GetIntValue(req, "hplink.ameblo_crawler_url_wait", 2, 0, 10)
	selected, err := _selectAmebloPosts(req, perThread*concurrency)
	if err != nil {
		return nil, err
	}
	// prepare members for MemberKey detection
	p, err := entities.Member.Query().Execute(req)
	if err != nil {
		return nil, err
	}
	members := p.Data.([]hplink.Member)
	req.Logger.Infof("[Task.Crawlers.AmebloPosts] Crawling %d URLs (concurrency: %d)", len(selected), concurrency)
	iterator.ParallelSliceWithMaxConcurrency(selected, concurrency, func(i int, post *hplink.AmebloPost) error {
		startTime := lib.Now()
		if err := _crawlAmebloPost(req, post, members); err != nil {
			req.Logger.Errorf("Crawler Failure: %v", err)
			return err
		}
		selected[i] = post
		lib.WaitAndEnsureAfter(startTime, time.Duration(wait)*time.Second)
		return nil
	})
	req.Logger.Infof("[Task.Crawlers.AmebloPosts] Updating datastore.")
	_, err = entities.AmebloPost.PutMulti().Update(req, selected)
	if err != nil {
		return nil, err
	}
	return nil, nil
}
func runTasksCrawlersAmebloEntryLists(req *wcg.Request, task *models.AsyncAPITask) (*models.AsyncAPITaskProgress, error) {
	const FollowLinkKey = "fl"
	const SettingsKey = "s"
	const URLKey = "u"

	var query = req.HTTPRequest().URL.Query()
	var settingsList []*hplink.CrawlerSettings
	var urlList []string
	if settingsKeys, ok := query[SettingsKey]; ok {
		_, _list := entities.CrawlerSettings.GetMulti().Keys(settingsKeys...).MustList(req)
		settingsList = _list.([]*hplink.CrawlerSettings)
	} else {
		query := entities.CrawlerSettings.Query().Filter("Type=", hplink.CrawlerSettingsTypeAmeblo)
		if pagination := query.MustExecute(req); pagination.Length() > 0 {
			list := pagination.Data.([]hplink.CrawlerSettings)
			settingsList = make([]*hplink.CrawlerSettings, len(list))
			for i := range list {
				settingsList[i] = &list[i]
			}
		}
	}

	var numList = len(settingsList)
	urlList = make([]string, numList)
	if urls, ok := query[URLKey]; ok {
		if numList != len(urls) {
			return nil, fmt.Errorf("List mismatch - found %d settings but %d urls are specified", numList, len(urls))
		}
		urlList = query[URLKey]
	} else {
		for i := range settingsList {
			urlList[i] = (*hplink.AmebloCrawlerSettings)(settingsList[i]).GetEntryListURL()
		}
	}

	startTime := lib.Now()
	nextParamSettingsKeys := make([]string, numList)
	nextParamURLs := make([]string, numList)
	err := iterator.ParallelSlice(settingsList, func(i int, v *hplink.CrawlerSettings) error {
		next, err := _crawlAmebloEntryList(req, v, urlList[i])
		if err != nil {
			settingsList[i].Error = []byte(fmt.Sprintf("%v", err))
			settingsList[i].Status = hplink.CrawlerStatusFailure
			settingsList[i].LastRun = lib.Now()
			return err
		}
		settingsList[i].Error = nil
		settingsList[i].Status = hplink.CrawlerStatusSuccess
		settingsList[i].LastRun = lib.Now()
		if next != "" {
			nextParamSettingsKeys[i] = v.URL
			nextParamURLs[i] = next
		}
		return nil
	})
	entities.CrawlerSettings.PutMulti().MustUpdate(req, settingsList)
	if err != nil {
		return nil, err
	}
	if req.Query(FollowLinkKey) != "true" {
		return nil, err
	}
	// fl=true make a recursive call to follow next links
	// reduce empty urls from nextParam* and return it for recursive call
	var fixedNextParamSettingsKeys []string
	var fixedNextParamURLs []string
	var hasNext = false
	for i := range nextParamURLs {
		if nextParamURLs[i] != "" {
			hasNext = true
			fixedNextParamSettingsKeys = append(fixedNextParamSettingsKeys, nextParamSettingsKeys[i])
			fixedNextParamURLs = append(fixedNextParamURLs, nextParamURLs[i])
		}
	}
	var progress models.AsyncAPITaskProgress
	var lastProgress = task.LastProgress()
	if lastProgress == nil {
		progress.Current = len(urlList)
		progress.Total = 0
	} else {
		progress.Current = lastProgress.Current + len(urlList)
	}
	if hasNext {
		progress.Next = url.Values{
			FollowLinkKey: []string{"true"},
			SettingsKey:   fixedNextParamSettingsKeys,
			URLKey:        fixedNextParamURLs,
		}
		wait := configs.GetIntValue(req, "hplink.ameblo_crawler_url_wait", 2, 0, 10)
		lib.WaitAndEnsureAfter(startTime, time.Duration(wait)*time.Second)
	}
	req.Logger.Infof("No more URL needs to be crawled.")
	return &progress, nil
}