Esempio n. 1
0
func CustomComparatorExample() {
	set := treeset.NewWith(byID)

	set.Add(User{2, "Second"})
	set.Add(User{3, "Third"})
	set.Add(User{1, "First"})
	set.Add(User{4, "Fourth"})

	fmt.Println(set) // {1 First}, {2 Second}, {3 Third}, {4 Fourth}
}
Esempio n. 2
0
func LoadDictionaries() {
	var newDictionaryMap = make(map[string]*patricia.Trie)
	var itemMap = ImportDictionaries()

	numPrefixes := 0
	numSuggestions := 0
	numDictionaries := 0

	for dictionaryName, suggestItems := range itemMap {
		numDictionaries++
		log.Print("Loading dictionary " + dictionaryName)
		// First see if the trie already exists
		var trie, ok = newDictionaryMap[dictionaryName]
		if !ok {
			trie = patricia.NewTrie()
		}

		// Great, we have a trie, now let's see if prefixes for the
		// suggestItems exist in the trie
		for _, suggestItem := range suggestItems {
			numSuggestions++
			//Tokenize the suggested term by whitespace.  Each token will become a prefix in the trie
			var tokens = strings.Fields(suggestItem.Term)
			tokens = append(tokens, suggestItem.Term)
			for _, token := range tokens {
				numPrefixes++
				//TODO: use ascii folding
				lowerToken := strings.ToLower(token)
				// The values in the trie are sorted sets of SuggestItems
				trieItem := trie.Get([]byte(lowerToken))
				if trieItem != nil {
					suggestItemSet := trieItem.(treeset.Set)
					//If the set already exists, add the new suggestion to the set
					suggestItemSet.Add(suggestItem)

				} else {
					// Otherwise create a new set, add the SuggestItem, and insert it into
					// the trie using the lowercase token for the prefix
					suggestItemSet := treeset.NewWith(models.SuggestItemComparator)
					//					log.Printf("Inserting suggestion item %s (%s)", lowerToken, suggestItem.Term)
					suggestItemSet.Add(suggestItem)
					trie.Insert(patricia.Prefix([]byte(lowerToken)), *suggestItemSet)
				}
			}
		}
		newDictionaryMap[dictionaryName] = trie
		log.Print("Dictionary " + dictionaryName + " loaded")
	}
	//Atomic swap
	DictionaryMap = newDictionaryMap
	log.Printf("All dictionaries updated")
}
Esempio n. 3
0
func (this *Api) Tile(w http.ResponseWriter, r *http.Request) {
	start := time.Now()

	vars := mux.Vars(r)
	z, _ := strconv.Atoi(vars["zoom"])
	x, _ := strconv.Atoi(vars["x"])
	y, _ := strconv.Atoi(vars["y"])

	p := path.Join(tileFolder, vars["zoom"], vars["x"], vars["y"]+".jpg")
	if _, err := os.Stat(p); os.IsNotExist(err) {
		/*
			if this.isRunning {
				http.Error(w, http.StatusText(500), 500)
				log.Printf("ERROR @GENERATION : %s", time.Since(start).String())
				return
			}
		*/
		qID := this.queueNb
		this.queueNb += 1
		for this.isRunning || this.queueId != qID {
			time.Sleep(150 * time.Millisecond)
		}
		//on revérifi qu'il a pas été généré entre temps
		if _, err := os.Stat(p); os.IsExist(err) {
			b, _ := ioutil.ReadFile(p)
			w.Header().Set("Content-Type", "image/jpeg")
			w.Header().Set("Content-Length", strconv.Itoa(len(b)))
			if _, err := w.Write(b); err != nil {
				log.Println("unable to write image.")
			}
			log.Printf("Serving tile cache after waiting in : %s", time.Since(start).String())
			return
		}

		this.isRunning = true
		//fmt.Fprintln(w, "Generating file zoom :", vars["zoom"], "x,y :", vars["x"], ",", vars["y"])
		tile := geo.NewTileFromZXY(z, x, y)
		//fmt.Fprintf(w, "\nResulting tile : %v", tile)
		//fmt.Fprintf(w, "\nNb tile at level %d : %.0f", tile.Z, geo.NbAtZLevel(tile.Z))
		plat, plon := geo.PrecisionAtZLevel(tile.Z)
		log.Printf("Resulting precision : %v %v", plat, plon)
		bbox := tile.GetBBOX()
		//fmt.Fprintf(w, "\nResulting bbox : %v", bbox)

		//TODO
		log.Printf("Starting way findind in bbox %v", bbox)
		ways, _ := this.db.WayIndex.GetWayInBBox(bbox, "natural")
		//fmt.Fprintf(w, "\nResulting ways : %d", len(ways))
		//log.Printf("List %v", ways)
		wanted := treeset.NewWith(db.CompareInt64)
		for _, way := range ways {
			wanted.Add(way)
			// element is the element from someSlice for where we are
		}

		//TODO add function to filter by zoom rendering (features)
		log.Printf("Searching for %d ways", wanted.Size())
		var found map[int64]*osmpbf.Way
		found, _ = this.db.GetWays(wanted)
		log.Printf("%d ways found", len(found))
		log.Printf("TIME ELAPSED @WaysFound : %s", time.Since(start).String())

		wanted_node := treeset.NewWith(db.CompareInt64)
		var nodeId int64
		for _, way := range found {
			for _, nodeId = range way.NodeIDs {
				wanted_node.Add(nodeId)
			}
		}
		log.Printf("Searching for %d nodes", wanted_node.Size())
		//founded_nodes, _ := this.db.GetNodes(&wanted_node)
		//TODO
		founded_nodes, _ := this.db.GetNodes(wanted_node, nil)
		log.Printf("%d nodes founded", len(founded_nodes))
		log.Printf("TIME ELAPSED @NodesFound : %s", time.Since(start).String())
		img, err := tile.DrawTile(found, founded_nodes, true)
		if err != nil {
			http.Error(w, http.StatusText(500), 500)
			log.Printf("ERROR @GENERATION : %s", time.Since(start).String())
			return
		} else {
			this.saveImageJpeg(tile, &img)
			this.writeImageJpeg(w, &img)
			log.Printf("TIME ELAPSED @END : %s", time.Since(start).String())
		}
		this.queueId++
		this.isRunning = false
	} else {
		b, _ := ioutil.ReadFile(p)
		w.Header().Set("Content-Type", "image/jpeg")
		w.Header().Set("Content-Length", strconv.Itoa(len(b)))
		if _, err := w.Write(b); err != nil {
			log.Println("unable to write image.")
		}

		log.Printf("Serving tile cache in : %s", time.Since(start).String())
	}
}
Esempio n. 4
0
// Scan before processing need to be call frist after start.
func (this *Db) ParseWays() error {
	//Le dernier est un faux

	start := time.Now()

	//On pull les dernier et on sync in order to "close" database at the end.
	defer this.WayIndex.PullBatch(true)
	//	defer this.WayIndex.db.Close()

	var bb geo.Bbox
	var ways []*osmpbf.Way
	wanted := treeset.NewWith(CompareInt64)
	//wanted := make(map[int64]*osmpbf.Node)

	var cw, cow, cn, last int64
	cow = this.WayIndex.Size()
	last = this.WayIndex.Last()
	found := make(map[int64]*osmpbf.Node, 0)
	//TODO find nodes by block of wa in order to not redecode the start of a block
	for i := 0; i < len(this.Descriptor.Ways)-1; i++ {
		if this.Descriptor.WaysId[i+1] < last {
			//There always be i+1 since the for condition
			continue
		}
		//log.Printf("Parsing block : %d", i)
		objects, err := this.Decoder.DecodeBlocAt(this.Descriptor.Ways[i])
		if err != nil {
			return err
		}
		for _, v := range objects {
			switch v := v.(type) {
			case *osmpbf.Way:
				//TODO better take to long and my be more slow that the little advatage it gave by resuming
				//if has, _ := this.WayIndex.db.Has(Int64bytes(v.ID), nil); has
				if v.ID <= last {
					//log.Printf("Passing %d", v.ID)
					//cow++
					continue
				}

				cw++
				//log.Printf("Adding to search %d", v.ID)
				ways = ExtendWays(ways, v)
				//TODO check ways with no nodes maybe ?
				//TODO used an ordered list (invert)
				for _, nodeId := range v.NodeIDs {
					/*
						for i := 0; i < len(wanted); i++ {
							if nodeId <= wanted[i] {
								break
							}
						}
						//We don't insert if nodeId == wanted[i] (duplicate)
						if i == len(wanted) || nodeId < wanted[i] {
							wanted = append(wanted[:i], append([]int64{nodeId}, wanted[i:]...)...)
						}

						//wanted = append(wanted, nodeId)
						//wanted[nodeId] = nil
					*/
					wanted.Add(nodeId)
				}
				break
			}
		}
		if wanted.Size() > CacheSize || i == len(this.Descriptor.Ways)-2 {
			log.Printf("On parse les points pour %d ways soit %d nodes recherchés", len(ways), wanted.Size())
			//TODO reused allready found on previous round
			found, _ = this.GetNodes(wanted, found)
			//On reset wanted to save space (not obligated but in case it not clear by getNodes)
			wanted.Clear()

			for _, way := range ways {
				for id, nodeId := range way.NodeIDs {
					cn++
					node := found[nodeId]
					p := geo.Point{node.Lon, node.Lat}

					if id == 0 {
						bb = geo.Bbox{p, p}
					} else {
						//TODO
						//Will enlarge bb if needed
						bb.AddInnerPoint(p)
					}
				}
				// environ 15% de temps en plus
				tag := "other"
				if _, ok := way.Tags["natural"]; ok {
					tag = "natural"
				}
				this.WayIndex.Add(way.ID, tag, bb)
			}
			//TODO check For update of file
			//this.WayIndex.db.Sync()

			//For testing purpose
			//log.Printf("%v %v", ways[0].ID, ways[0])
			//a, e := this.WayIndex.Get(ways[0].ID)
			//log.Printf("%v %v", a, e)
			/* //TODO ajout par batch
			log.Println("Starting db insertion")
			this.WayIndex.AddBatch(ways, bb)
			log.Println("db insertion ended")
			*/
			ways = make([]*osmpbf.Way, 0)

			estimation := time.Since(start).Minutes() * (float64(this.Descriptor.TotalWay()-cow) / float64(cw))

			time_esti, _ := time.ParseDuration(fmt.Sprintf("%.4fm", estimation))
			log.Printf("%dk/%dk %.2f/100 TIME ELAPSED : %s ESTIMATION : %s\r", (cw+cow)/1000, this.Descriptor.TotalWay()/1000, float64((cw+cow)*100)/float64(this.Descriptor.TotalWay()), time.Since(start).String(), time_esti.String())
		}
	}

	found = make(map[int64]*osmpbf.Node, 0)
	wanted.Clear()
	return nil
}
Esempio n. 5
0
func TwoCentsHandlerV1(w http.ResponseWriter, r *http.Request) {
	vars := mux.Vars(r)
	dictionaryName := vars["dictionary"]
	limit := 10
	limitParam := vars["limit"]
	if limitParam != "" {
		limit, _ = strconv.Atoi(limitParam)
	}

	//case-insensitive filter
	filter := vars["filter"]
	if filter != "" {
		filter = strings.ToLower(filter)
	}

	dictionaryTrie, found := DictionaryMap[dictionaryName]
	if !found {
		http.NotFound(w, r)
		return
	}
	//TODO: use ascii folding
	query := vars["query"]

	/*
		The values in the patricia-trie are sets of SuggestItems.  patricia-trie won't return the list of nodes
		for you, but will invoke a function on all visited nodes.  This []treeset.Set will hold the results of the
		visited nodes.  visitorFunc will actually add those sets to that array.
	*/
	trieItems := []treeset.Set{}
	visitorFunc := func(prefix patricia.Prefix, item patricia.Item) error {
		trieItems = append(trieItems, item.(treeset.Set))
		return nil
	}
	dictionaryTrie.VisitSubtree(patricia.Prefix([]byte(strings.ToLower(query))), visitorFunc)

	/*
		This set will hold the SuggestItems we pull from the front of every set retrieve from the patricia-trie.  Since
		it's tree set, the items are sorted using the SuggestItemComparator, which compares by weight and string,
		guaranteeing the items within a set are ordered
	*/
	collatedSuggestionSet := treeset.NewWith(models.SuggestItemComparator)

	//If there were fewer suggestions than the requested limit, lower the limit
	totalSuggestions := 0
	for _, suggestionSetItem := range trieItems {
		totalSuggestions += suggestionSetItem.Size()
	}
	if totalSuggestions < limit {
		limit = totalSuggestions
	}

	/*
		The results from the patrica-trie visit are all sorted sets.  However, they're only sorted within the set.  Since
		we know that they're in weight-descending order, we can reliably pick the first element from each set, and insert
		them into another sorted result set.  After <limit> iterations, we're guaranteed to have the top weighted items
		in weight-descending order, and we only need to slice the array
	*/
	finalSuggestionSetPosition := 0
	for finalSuggestionSetPosition < limit && collatedSuggestionSet.Size() < limit {
		for _, suggestionSetItem := range trieItems {
			if suggestionSetItem.Size() > finalSuggestionSetPosition {
				thisItem := suggestionSetItem.Values()[finalSuggestionSetPosition].(*models.SuggestItem)
				//case-insensitive filter
				if filter != "" {
					if strings.Contains(strings.ToLower(thisItem.Term), filter) {
						collatedSuggestionSet.Add(thisItem)
					}
				} else {
					collatedSuggestionSet.Add(thisItem)
				}

			}
		}
		finalSuggestionSetPosition++
	}

	if len(collatedSuggestionSet.Values()) < limit {
		limit = len(collatedSuggestionSet.Values())
	}
	suggestions := []string{}
	for _, suggestion := range collatedSuggestionSet.Values()[0:limit] {
		suggestions = append(suggestions, suggestion.(*models.SuggestItem).Term)
	}

	t := TwoCentsV1{
		Suggestions: suggestions,
	}

	j, _ := json.Marshal(t)
	w.Header().Set("Content-Type", "application/json; charset=utf-8")
	if AllowedOrigin != "" {
		w.Header().Set("Access-Control-Allow-Origin", AllowedOrigin)
	}
	w.Write(j)

}