func CrawlTags(s *session.Session, positive bool) {
	keys := s.GetPeopleCursor(positive, utils.MAXREQUESTS*s.GetState())
	s.IncrementState()
	total := 0
	size := 0
	for {
		key, err := keys.Next(nil)
		total += 1
		if err != nil {
			if total < utils.MAXREQUESTS {
				s.IncrementStep()
			}
			break // No further entities match the query.
		}
		media := http.GetMedia(s, key.StringID()).Data
		captured := len(media) - 1
		for i := 0; i < 3 && i < captured; i++ {
			tagCount := len(media[i].Tags)

			lim := 5
			for j, tag := range media[i].Tags {
				if tag == "" {
					continue
				}
				if j >= lim {
					break
				}
				h := s.Hashtag(tag)
				for k := 0; k < lim && k < tagCount; k++ {
					if j == k {
						continue
					}
					if tag == media[i].Tags[k] {
						lim += 1
						media[i].Tags[k] = ""
						continue
					}
					if positive {
						h.Beneficiaries = append(h.Beneficiaries, media[i].Tags[k])
					} else {
						h.Victims = append(h.Beneficiaries, media[i].Tags[k])
					}
				}
				s.SaveHashtag(h)
				size += 1
			}
		}
	}
	s.IncrementSize(size, positive)
	s.StopProcessing()
}