// processSort does either a coarse or a fine sort. func processSort(ts *task.Sort) (*task.SortResult, error) { attr := ts.Attr x.AssertTruef(ts.Count > 0, ("We do not yet support negative or infinite count with sorting: %s %d. " + "Try flipping order and return first few elements instead."), attr, ts.Count) n := len(ts.UidMatrix) out := make([]intersectedList, n) for i := 0; i < n; i++ { // offsets[i] is the offset for i-th posting list. It gets decremented as we // iterate over buckets. out[i].offset = int(ts.Offset) out[i].ulist = &task.List{Uids: []uint64{}} } // Iterate over every bucket in TokensTable. t := posting.GetTokensTable(attr) var token string if ts.Desc { token = t.GetLast() } else { token = t.GetFirst() } BUCKETS: for len(token) > 0 { err := intersectBucket(ts, attr, token, out) switch err { case errDone: break BUCKETS case errContinue: // Continue iterating over tokens. default: return &emptySortResult, err } if ts.Desc { token = t.GetPrev(token) } else { token = t.GetNext(token) } } r := new(task.SortResult) for _, il := range out { r.UidMatrix = append(r.UidMatrix, il.ulist) } return r, nil }
// getInequalityTokens gets tokens geq / leq compared to given token. func getInequalityTokens(attr, ineqValueToken string, geq bool) ([]string, error) { tt := posting.GetTokensTable(attr) if tt == nil { return nil, x.Errorf("Attribute %s is not indexed", attr) } var s string if geq { s = tt.GetNextOrEqual(ineqValueToken) } else { s = tt.GetPrevOrEqual(ineqValueToken) } out := make([]string, 0, 10) for s != "" { out = append(out, s) if geq { s = tt.GetNext(s) } else { s = tt.GetPrev(s) } } return out, nil }