Exemplo n.º 1
0
func process(s *session.Session, posts *http.Posts, i int, grp chan *group) {
	for i >= 0 {

		// I don't really have to parse here, forgot kosher implementation.
		// See http/json
		id := strings.Split(posts.Data[i].Id, "_")[1]
		if posts.Data[i].User_has_liked || s.CheckCache(id) || http.IsFollowing(s, id) {
			// Try to add to channel and stop if done
			grp <- &group{
				id: "continue",
			}
			i--
			continue
		}

		//
		user := http.GetUser(s, id)

		// Create perosn to get value
		person := session.Person{
			Followers: float64(user.Data.Counts.Follows),
			Following: float64(user.Data.Counts.Followed_by),
			Posts:     float64(user.Data.Counts.Media),
		}

		// Forget sigmoid for now
		grp <- &group{
			id:    posts.Data[i].Id,
			value: person.Followers / person.Following, // person.Sigmoid(session.GetTheta()) would be the ideal way
			user:  posts.Data[i].User.Username,
		}

		i--
	}
}
Exemplo n.º 2
0
func BasicDecision(s *session.Session, follows int, likes int, intervals int, done chan bool) {
	// Round robin the hashtags. Allows for manual weighting eg: [#dog,#dog,#cute]
	posts := http.GetPosts(s, s.GetHashtag(intervals))

	// Go from end to reduce collision
	// Doesn't bother checking
	i := 19
	for (likes > 0 || follows > 0) && i >= 0 {

		// Process likes
		if likes > 0 {
			go http.LikePosts(s, posts.Data[i].Id)
			likes--

			// Doing this seperately reaches larger audience
			// Never exceeds 12/11 at a given time
		} else if follows > 0 {
			go http.FollowUser(s, posts.Data[i].Id)
			follows--
		}

		// Decrement
		i--
	}

	// Indicate doneness
	done <- true
}
Exemplo n.º 3
0
func processTags(s *session.Session, weight float64, tags []string) {
	weight /= float64(len(tags))
	for _, next := range tags {
		h := s.Hashtag(next)
		h.Value += weight
		s.SaveHashtag(h)
	}
}
Exemplo n.º 4
0
// Post actions
func LikePosts(s *session.Session, id string) {
	v := url.Values{}

	response, err := s.Post("https://api.instagram.com/v1/media/"+id+"/likes", v)
	if err != nil {
		panic(err)
	}
	s.Log(string(response.StatusCode))
}
Exemplo n.º 5
0
func FollowUser(s *session.Session, id string) {
	v := url.Values{}
	v.Set("action", "follow")

	response, err := s.Post("https://api.instagram.com/v1/users/"+strings.Split(id, "_")[1]+"/relationship", v)
	if err != nil {
		panic(err)
	}
	s.Log(string(response.StatusCode))
}
Exemplo n.º 6
0
// Authenticate
func Authenticate(s *session.Session, code string) {
	decoder := s.Auth(code)

	//Decode request
	var auth Auth
	err := decoder.Decode(&auth)
	if err != nil {
		panic(err)
	}

	s.SetAuth(auth.Access_token, auth.User.Id)

}
Exemplo n.º 7
0
func IntelligentDecision(s *session.Session, follows int, likes int, intervals int, done chan bool) {

	// Still do round robin, but this time the hashtags are smart
	// and our choice is educated
	posts := http.GetPosts(s, s.GetHashtag(intervals))

	// Set up channels for async download/ processing from instagram
	next := make(chan *http.Posts)
	grp := make(chan *group)
	count := 0
	calls := 0
	go sort(s, grp, follows, likes, &calls, &count, done)
	go listen(s, grp, next, &calls, &count)
	next <- &posts
}
Exemplo n.º 8
0
func IsFollowing(s *session.Session, id string) bool {
	response, err := s.Get("https://api.instagram.com/v1/users/" + id + "/relationship")
	if err != nil {
		panic(err)
	}

	var status Status
	decoder := json.NewDecoder(response.Body)
	err = decoder.Decode(&status)
	if err != nil {
		panic(err)
	}

	return status.Data.Outgoing_status == "follows"
}
Exemplo n.º 9
0
func GetMedia(s *session.Session, id string) Posts {
	params := map[string]string{"MIN_TIMESTAMP": utils.SixHoursAgo(), "COUNT": "3"}
	response, err := s.GetParamed("https://api.instagram.com/v1/users/"+id+"/media/recent/", params)
	if err != nil {
		panic(err)
	}

	//Decode request
	var posts Posts
	decoder := json.NewDecoder(response.Body)
	err = decoder.Decode(&posts)
	if err != nil {
		panic(err)
	}

	return posts
}
Exemplo n.º 10
0
func GetUser(s *session.Session, id string) User {
	response, err := s.Get("https://api.instagram.com/v1/users/" + id)
	if err != nil {
		panic(err)
	}

	//Decode request
	var user User
	decoder := json.NewDecoder(response.Body)
	err = decoder.Decode(&user)
	if err != nil {
		panic(err)
	}

	return user

}
Exemplo n.º 11
0
func getPeople(s *session.Session, url string) (users Users) {
	response, err := s.Get(url)
	if err != nil {
		panic(err)
	}

	data, err := ioutil.ReadAll(response.Body)
	if err == nil && data != nil {
		err = json.Unmarshal(data, &users)
	}
	if err != nil {
		s.Log(string(data[:]))
		panic(err)
	}

	return
}
Exemplo n.º 12
0
func GetPosts(s *session.Session, hashtag string) Posts {

	response, err := s.Get("https://api.instagram.com/v1/tags/" + hashtag + "/media/recent")
	if err != nil {
		panic(err)
	}

	//Decode request
	var posts Posts
	decoder := json.NewDecoder(response.Body)
	err = decoder.Decode(&posts)
	if err != nil {
		panic(err)
	}

	return posts
}
Exemplo n.º 13
0
func processHandle(w http.ResponseWriter, r *http.Request, s *session.Session) {

	// Grab intervals since day start
	intervals := utils.Intervals()

	// Had some fancy math for peroidictiy. But
	// we could just brute force 100 per hour
	likes := int(utils.LIKES / utils.CALLS)
	utils.Limit(&likes, intervals, utils.LIKES)

	if !s.Usable() {
		fmt.Fprint(w, "Please set hashtags and authorize")
		return
	}

	// Follow ratio function where target is the desired
	// amount of followers.
	// e^(x*ln(magic)/target)
	// I wish could say there's some science behind why
	// we're doing this, but ultimately we just need a
	// decreasing function and some percentage of your
	// target feels right
	count := action.GetStatus(s)
	follows := int(utils.FollowerDecay(count.Followed_by, count.Follows, s.GetMagic(), s.GetTarget()))
	utils.Limit(&follows, intervals, utils.FOLLOWS)
	if follows < 0 {
		follows = 0
	}

	// Hang on channel otherwise jobs cut out
	done := make(chan bool)

	// Save status at midnight
	if intervals == 0 {
		go s.SetRecords(count.Followed_by, count.Follows)
	}
	if s.GetLearnt() {
		IntelligentDecision(s, follows, likes, intervals, done)
	} else {
		BasicDecision(s, follows, likes, intervals, done)
	}
	// Wait for finish. Defeats the purpose of aysnc, but only way to run in prod
	<-done
	fmt.Fprint(w, "Processing")
}
Exemplo n.º 14
0
func GetTag(s *session.Session, hashtag string) Tag {

	response, err := s.Get("https://api.instagram.com/v1/tags/" + hashtag)
	if err != nil {
		panic(err)
	}

	//Decode request
	var tag Tag
	decoder := json.NewDecoder(response.Body)
	err = decoder.Decode(&tag)
	if err != nil {
		panic(err)
	}

	return tag

}
Exemplo n.º 15
0
// Math looks good, but haven't rigorously tested it
func LogisticRegression(s *session.Session) {

	// Grab all people because of many iterations
	people := s.GetPeople()
	objective := Objective{
		People: people,
		Lambda: s.GetLambda(),
		Alpha:  s.GetAlpha(),
		Size:   float64(len(people)),
	}

	start := []float64{1, 1, 1, 0} // Bias, Following, Followers, Posts
	minimum := Minimize(objective, start)
	log.Println(minimum)

	s.SetTheta(minimum)
	s.StopProcessing()
}
Exemplo n.º 16
0
// Get actions
func GetStatus(s *session.Session) (count Counts) {

	response, err := s.Get("https://api.instagram.com/v1/users/" + s.GetId())
	if err != nil {
		panic(err)
	}

	//Decode request
	var status Status
	decoder := json.NewDecoder(response.Body)
	err = decoder.Decode(&status)
	if err != nil {
		panic(err)
	}

	count = status.Data.Counts
	return
}
Exemplo n.º 17
0
func updateHandle(w http.ResponseWriter, r *http.Request, s *session.Session) {
	// Probs implement TOTP, potentially vulnerable to MTM
	if s.VerifiedUpdate(r.URL.Query()["hash"][0]) {
		s.SetHashtags(strings.Split(r.URL.Query()["hashtags"][0], " "))
		s.ParseTheta(strings.Split(r.URL.Query()["theta"][0], " "))
		s.SetLearnt()
		fmt.Fprint(w, "Updated")
	} else {
		fmt.Fprint(w, "Not Verified")
	}
}
Exemplo n.º 18
0
func GetNextPost(s *session.Session, url string) Posts {
	response, err := s.RawGet(url)
	if err != nil {
		panic(err)
	}

	//Decode request
	var bunch Posts
	data, err := ioutil.ReadAll(response.Body)
	if err == nil && data != nil {
		err = json.Unmarshal(data, &bunch)
	}
	if err != nil {
		s.Log(string(data[:]))
		panic(err)
	}

	return bunch
}
Exemplo n.º 19
0
func process(s *session.Session, users *http.Users, i int, follows float64) {
	for i >= 0 {

		id := users.Data[i].Id
		user := http.GetUser(s, id)

		log.Println(user)

		if user.Data.Counts.Followed_by+user.Data.Counts.Follows > 0 {
			//check follower records, if following and guy in other records, don't do anythin
			person := session.Person{
				Followers: float64(user.Data.Counts.Follows),
				Following: float64(user.Data.Counts.Followed_by),
				Posts:     float64(user.Data.Counts.Media),
				Follows:   !s.CheckCache(id),
			}

			// Because unset properties won't change, this should be fine
			if int(follows) == utils.SCORN {
				person.Followed = true
				person.Follows = !person.Follows
			}

			// Add to variable and to Keys
			s.PutPerson(person, id)
		}

		// Decrement
		i--
	}

	// Catches up and thus done
	if s.FinishedCount() {
		s.SavePeople()
		s.StopProcessing()
	}
}
Exemplo n.º 20
0
// Build a little status bar for debug and ./toolbelt -l purposes
func StatusBar(s *session.Session, title string) (bar string) {
	bar = "    " + title + ":"
	BARSIZE := 100 - len(title)
	i := int(BARSIZE * s.GetState() / s.GetLimit())
	j := BARSIZE - i

	for i+j > 0 {
		if i > 0 {
			i--
			bar += "*"
		} else {
			j--
			bar += "-"
		}
	}
	bar += utils.IntToString(s.GetState()) + "/" + utils.IntToString(s.GetLimit())
	return
}
Exemplo n.º 21
0
func authHandle(w http.ResponseWriter, r *http.Request, s *session.Session) {
	s.SetHashtags(strings.Split(r.URL.Query()["hashtags"][0], " "))
	action.Authenticate(s, r.URL.Query()["code"][0])

	http.Redirect(w, r, "/", 302)
}
Exemplo n.º 22
0
func GetFollowers(s *session.Session) Users {
	return getPeople(s, "https://api.instagram.com/v1/users/"+s.GetId()+"/followed-by")
}
Exemplo n.º 23
0
// Simplified and altered page rank
func WeightTags(s *session.Session) {

	s.IncrementState()

	tags := s.GetHashtagCursor()
	goodWeight := 1.0 / s.GetHashtagSize(true)
	badWeight := -1.0 / s.GetHashtagSize(false)

	for {
		key, err := tags.Next(nil)
		if err != nil {
			log.Println(err)
			break // No further entities match the query.
		}
		hashtag := s.Hashtag(key.StringID())

		log.Println(hashtag.Name)

		processTags(s, goodWeight, hashtag.Beneficiaries)
		processTags(s, badWeight, hashtag.Victims)
	}

	s.SetTopTags()

	// Move on
	s.IncrementStep()
	s.StopProcessing()
}
Exemplo n.º 24
0
// Flush handle to kill all ML data
func flushHashtagHandle(w http.ResponseWriter, r *http.Request, s *session.Session) {
	go s.FlushEntity("Hashtag")
	fmt.Fprint(w, "Done Flushed")
}
Exemplo n.º 25
0
// Async set up multi calls
func listen(s *session.Session, next chan *http.Users, calls int, follows float64) {
	for {
		select {
		case users := <-next:

			i := len(users.Data) - 1
			s.IncrementCount()
			go process(s, users, i, follows)

			close(next)
			if calls == utils.MAXPEOPLEGRAB {
				s.SetNext(users.Pagination.Next_url)
				return
			}

			var batch http.Users
			nxt := make(chan *http.Users)
			if users.Pagination.Next_url != "" {
				log.Println("Getting another batch")
				batch = http.GetNext(s, users.Pagination.Next_url)
			} else if follows == 0 { // follows == float64(s.GetLearningStep()) then have a array of functions
				log.Println("Proceeding to next Step")
				s.IncrementStep()
				s.IncrementState()
				batch = http.GetFollowing(s)
				follows = float64(s.GetLearningStep())
			} else {
				s.SetNext("")
				return
			}

			go listen(s, nxt, calls+1, follows)
			nxt <- &batch
			return
		}
	}
}
Exemplo n.º 26
0
func MinePeople(s *session.Session) {
	// Set up channel
	next := make(chan *http.Users)
	var batch http.Users

	if s.GetNext() == "" {
		if s.GetState() > 0 {
			s.IncrementStep()
		}
		if s.GetLearningStep() == utils.APPRAISE {
			batch = http.GetFollowers(s)
		} else {
			batch = http.GetFollowing(s)
		}
	} else {
		batch = http.GetNext(s, s.GetNext())
	}

	go listen(s, next, 0, s.IncrementState())
	next <- &batch
}
Exemplo n.º 27
0
// Async heapsort, hope it works
func sort(s *session.Session, next chan *group, follows, likes int, calls, total *int, done chan bool) {
	var instances []group
	count := 0
	x := 0
	min := math.Inf(1)
	for {
		select {
		case instance := <-next:

			x++
			// Catches up and thus done
			if x == *total && *calls == utils.MAXPOSTGRAB {
				i := 0
				for likes > 0 || follows > 0 {

					// Highest value for follows then do likes
					if follows > 0 {
						go http.FollowUser(s, instances[i].id)
						follows--
					} else if likes > 0 {
						go http.LikePosts(s, instances[i].id)
						likes--
					}
					i++
				}
				s.FlushCache()
				done <- true
				close(next)
				return
			}

			// We already have our fill and this value won't contribute
			if instance.id == "continue" || (instance.value <= min && count == follows+likes) {
				continue
			}

			if min < instance.value {
				if count == follows+likes {
					min = instance.value
				}
			} else {
				if count < follows+likes {
					min = instance.value
				}
			}

			if count < follows+likes {
				instances = append(instances, *instance)
				count += 1
			} else {
				// Replace end
				instances[count-1] = *instance
			}

			// Bubble sort
			for i := count - 2; i >= 0; i-- {
				if instance.value > instances[i].value {
					holder := instances[i]
					instances[i] = *instance
					instances[i+1] = holder
				} else {
					break
				}
			}
		}
	}
}
Exemplo n.º 28
0
func CrawlTags(s *session.Session, positive bool) {
	keys := s.GetPeopleCursor(positive, utils.MAXREQUESTS*s.GetState())
	s.IncrementState()
	total := 0
	size := 0
	for {
		key, err := keys.Next(nil)
		total += 1
		if err != nil {
			if total < utils.MAXREQUESTS {
				s.IncrementStep()
			}
			break // No further entities match the query.
		}
		media := http.GetMedia(s, key.StringID()).Data
		captured := len(media) - 1
		for i := 0; i < 3 && i < captured; i++ {
			tagCount := len(media[i].Tags)

			lim := 5
			for j, tag := range media[i].Tags {
				if tag == "" {
					continue
				}
				if j >= lim {
					break
				}
				h := s.Hashtag(tag)
				for k := 0; k < lim && k < tagCount; k++ {
					if j == k {
						continue
					}
					if tag == media[i].Tags[k] {
						lim += 1
						media[i].Tags[k] = ""
						continue
					}
					if positive {
						h.Beneficiaries = append(h.Beneficiaries, media[i].Tags[k])
					} else {
						h.Victims = append(h.Beneficiaries, media[i].Tags[k])
					}
				}
				s.SaveHashtag(h)
				size += 1
			}
		}
	}
	s.IncrementSize(size, positive)
	s.StopProcessing()
}
Exemplo n.º 29
0
// Might be worth putting a defer in here incase a job breaks
// I.E to Clean up is processing
func Learn(s *session.Session) string {

	if s.SetLearning() {
		// New
		log.Println("Set up learning")
		status := http.GetStatus(s)
		s.SetLimits(int(status.Follows), int(status.Followed_by))
	}

	if !s.SetProcessing() {
		// Show we're still working
		return "    *"
	}

	switch s.GetLearningStep() {

	case utils.APPRAISE:
		jobs.MinePeople(s)
		return StatusBar(s, "Mining Followers")

	case utils.SCORN:
		jobs.MinePeople(s)
		return StatusBar(s, "Mining Following")

	case utils.BUILD:
		// Logistic Regression
		// Get records and run
		go jobs.LogisticRegression(s)
		s.IncrementStep()
		return "* Running Logistic Regression"

	case utils.GOODTAGS:
		go jobs.CrawlTags(s, true)
		return StatusBar(s, "Finding Good Tags")

	case utils.BADTAGS:
		go jobs.CrawlTags(s, false)
		return StatusBar(s, "Finding Bad Tags")

	case utils.COMPUTETAGS:
		go jobs.WeightTags(s)
		return "* Ranking Tags"

	case utils.SHARE:
		go s.Share()
		s.IncrementStep()
		s.StopProcessing()
		return "Sharing"
	}

	return "Stop"
}
Exemplo n.º 30
0
// Flush handle to kill all ML data
func flushHandle(w http.ResponseWriter, r *http.Request, s *session.Session) {
	s.Flush()
	fmt.Fprint(w, "Done Flushed")
}