func (rg *Registrar) process(seqn uint64, memberSet, calSet map[string]string) { memberSets := make(map[uint64]map[string]string) memberSets[seqn] = dup(memberSet) calSets := make(map[uint64][]string) calSets[seqn] = nonEmpty(values(calSet)) sort.SortStrings(calSets[seqn]) for { select { case l := <-rg.lookupCh: heap.Push(rg.lookups, l) case ev := <-rg.evs: dir, name := path.Split(ev.Path) switch dir { case membersDir: memberSet[name] = ev.Body, ev.IsSet() case slotDir: calSet[name] = ev.Body, ev.IsSet() } seqn = ev.Seqn memberSets[seqn] = dup(memberSet) calSets[seqn] = nonEmpty(values(calSet)) sort.SortStrings(calSets[seqn]) } // If we have any lookups that can be satisfied, do them. for l := rg.lookups.peek(); seqn >= l.cver; l = rg.lookups.peek() { heap.Pop(rg.lookups) l.memberSet = memberSets[l.cver] l.calSet = calSets[l.cver] l.done <- 1 } } }
func expectContentsEqual(t *testing.T, set *set.StringSet, expected []string) { var contents vector.StringVector for val := range set.Iter() { contents.Push(val) } sort.SortStrings(contents) sort.SortStrings(expected) if !reflect.DeepEqual(contents.Data(), expected) { t.Errorf("Expected:%v\nGot: %v", expected, contents) } }
func listCommands(conn core.Conn) { // Sort command names alphabetically names := make([]string, len(cmds)) i := 0 for name := range cmds { names[i] = name i++ } sort.SortStrings(names) // Print command names lines := make([]string, (len(names)+3)/4) for i = 0; i < len(names); i += 4 { a, b, c, d := names[i], "", "", "" if i+1 < len(names) { b = names[i+1] } if i+2 < len(names) { c = names[i+2] } if i+3 < len(names) { d = names[i+3] } lines[i/4] = fmt.Sprintf("%20s%20s%20s%20s", a, b, c, d) } sendLines(conn, lines) }
// writeCookies writes the wire representation of the cookies // to w. Each cookie is written on a separate "Cookie: " line. // This choice is made because HTTP parsers tend to have a limit on // line-length, so it seems safer to place cookies on separate lines. func writeCookies(w io.Writer, kk []*http.Cookie) os.Error { lines := make([]string, 0, len(kk)) var b bytes.Buffer for _, c := range kk { b.Reset() n := c.Name // TODO(petar): c.Value (below) should be unquoted if it is recognized as quoted fmt.Fprintf(&b, "%s=%s", http.CanonicalHeaderKey(n), c.Value) if len(c.Path) > 0 { fmt.Fprintf(&b, "; $Path=%s", http.URLEscape(c.Path)) } if len(c.Domain) > 0 { fmt.Fprintf(&b, "; $Domain=%s", http.URLEscape(c.Domain)) } if c.HttpOnly { fmt.Fprintf(&b, "; $HttpOnly") } lines = append(lines, "Cookie: "+b.String()+"\r\n") } sort.SortStrings(lines) for _, l := range lines { if _, err := io.WriteString(w, l); err != nil { return err } } return nil }
// Parse a text string, words seprated by whitespaces, and create a // Document instance. In order to initialize topic_histogram, this // function requires the number_of_topics. func NewDocument(text string, num_topics int) (doc *Document, err os.Error) { if num_topics <= 1 { return nil, os.NewError("num_topics must be >= 2") } words := strings.Fields(text) if len(words) <= 1 { return nil, os.NewError("Document less than 2 words:" + text) } sort.SortStrings(words) doc = new(Document) doc.wordtopics = make([]int, len(words)) doc.unique_words = make([]string, 0) doc.wordtopics_indices = make([]int, 0) doc.topic_histogram = make([]int, num_topics) doc.topic_histogram[0] = len(words) prev_word := "" for i := 0; i < len(words); i++ { if words[i] != prev_word { prev_word = words[i] doc.unique_words = append(doc.unique_words, prev_word) doc.wordtopics_indices = append(doc.wordtopics_indices, i) } } if !doc.IsValid() { return nil, os.NewError("Document is invalid") } return }
func writeCanonicalizedAmzHeaders(buf *bytes.Buffer, req *http.Request) { amzHeaders := make([]string, 0) vals := make(map[string][]string) for k, vv := range req.Header { if hasPrefixCaseInsensitive(k, "x-amz-") { lk := strings.ToLower(k) amzHeaders = append(amzHeaders, lk) vals[lk] = vv } } sort.SortStrings(amzHeaders) for _, k := range amzHeaders { buf.WriteString(k) buf.WriteByte(':') for idx, v := range vals[k] { if idx > 0 { buf.WriteByte(',') } if strings.Contains(v, "\n") { // TODO: "Unfold" long headers that // span multiple lines (as allowed by // RFC 2616, section 4.2) by replacing // the folding white-space (including // new-line) by a single space. buf.WriteString(v) } else { buf.WriteString(v) } } buf.WriteByte('\n') } }
func TestDoozerWalkWithRev(t *testing.T) { l := mustListen() defer l.Close() u := mustListenPacket(l.Addr().String()) defer u.Close() go Main("a", "X", "", nil, u, l, nil, 1e9, 2e9, 3e9) cl := doozer.New("foo", l.Addr().String()) rev, _ := cl.Set("/test/foo", store.Clobber, []byte("bar")) cl.Set("/test/fun", store.Clobber, []byte("house")) cl.Set("/test/fab", store.Clobber, []byte("ulous")) w, err := cl.Walk("/test/**", &rev, nil, nil) assert.Equal(t, nil, err, err) ls := []string{} for e := range w.C { ls = append(ls, e.Path) } sort.SortStrings(ls) assert.Equal(t, []string{"/test/foo"}, ls) }
// glob searches for files matching pattern in the directory dir // and appends them to matches. If the directory cannot be // opened, it returns the existing matches. New matches are // added in lexicographical order. // The only possible error return occurs when the pattern is malformed. func glob(dir, pattern string, matches []string) (m []string, e os.Error) { m = matches fi, err := os.Stat(dir) if err != nil { return } if !fi.IsDirectory() { return } d, err := os.Open(dir) if err != nil { return } defer d.Close() names, err := d.Readdirnames(-1) if err != nil { return } sort.SortStrings(names) for _, n := range names { matched, err := Match(pattern, n) if err != nil { return m, err } if matched { m = append(m, Join(dir, n)) } } return }
func TestWalkStop(t *testing.T) { exp := map[string]string{ "/d/x": "1", "/d/y": "2", "/d/z/a": "3", } var expPaths []string for p := range exp { expPaths = append(expPaths, p) } sort.SortStrings(expPaths) st := New() st.Ops <- Op{1, MustEncodeSet("/d/x", "1", Clobber)} st.Ops <- Op{2, MustEncodeSet("/d/y", "2", Clobber)} st.Ops <- Op{3, MustEncodeSet("/d/z/a", "3", Clobber)} st.Ops <- Op{4, MustEncodeSet("/m/y", "", Clobber)} st.Ops <- Op{5, MustEncodeSet("/n", "", Clobber)} glob, err := CompileGlob("/d/**") assert.Equal(t, nil, err) var c int b := Walk(st, glob, func(path, body string, rev int64) bool { assert.Equal(t, expPaths[0], path) assert.Equal(t, exp[path], body) c++ expPaths = expPaths[1:] return true }) assert.Equal(t, true, b) assert.Equal(t, 1, c) }
func TestDoozerGetdirOnDir(t *testing.T) { l := mustListen() defer l.Close() u := mustListenPacket(l.Addr().String()) defer u.Close() go Main("a", "X", "", nil, u, l, nil, 1e9, 2e9, 3e9) cl := doozer.New("foo", l.Addr().String()) cl.Set("/test/a", store.Clobber, []byte("1")) cl.Set("/test/b", store.Clobber, []byte("2")) cl.Set("/test/c", store.Clobber, []byte("3")) w, err := cl.Getdir("/test", 0, 0, nil) assert.Equal(t, nil, err) got := make([]string, 0) for e := range w.C { got = append(got, e.Path) } sort.SortStrings(got) assert.Equal(t, []string{"a", "b", "c"}, got) }
// glob searches for files matching pattern in the directory dir // and appends them to matches. func glob(dir, pattern string, matches []string) []string { fi, err := os.Stat(dir) if err != nil { return nil } if !fi.IsDirectory() { return matches } d, err := os.Open(dir, os.O_RDONLY, 0666) if err != nil { return nil } defer d.Close() names, err := d.Readdirnames(-1) if err != nil { return nil } sort.SortStrings(names) for _, n := range names { matched, err := Match(pattern, n) if err != nil { return matches } if matched { matches = append(matches, Join(dir, n)) } } return matches }
func signatureBase(httpMethod string, base_uri string, params map[string]string) string { var buf bytes.Buffer buf.WriteString(httpMethod) buf.WriteString("&") buf.WriteString(URLEscape(base_uri)) buf.WriteString("&") var keys vector.StringVector for k, _ := range params { keys.Push(k) } sort.SortStrings(keys) for i, k := range keys { v := params[k] buf.WriteString(URLEscape(k)) buf.WriteString("%3D") buf.WriteString(URLEscape(v)) //don't include the dangling %26 if i < len(params)-1 { buf.WriteString("%26") } i++ } return buf.String() }
// PrefixList returns a list of all prefixes, with duplicates removed. // For instance, for the mapping: // // user -> /home/user // public -> /home/user/public // public -> /home/build/public // // the prefix list is: // // user, public // func (m *Mapping) PrefixList() []string { // compute the list lazily if m.prefixes == nil { list := make([]string, len(m.list)) // populate list for i, e := range m.list { list[i] = e.prefix } // sort the list and remove duplicate entries sort.SortStrings(list) i := 0 prev := "" for _, path := range list { if path != prev { list[i] = path i++ prev = path } } m.prefixes = list[0:i] } return m.prefixes }
func (c *conn) getdir(t *T, tx txn) { path := pb.GetString(t.Path) if g := c.getterFor(t); g != nil { ents, rev := g.Get(path) if rev == store.Missing { c.respond(t, Valid|Done, nil, noEnt) return } if rev != store.Dir { c.respond(t, Valid|Done, nil, notDir) return } sort.SortStrings(ents) offset := int(pb.GetInt32(t.Offset)) if offset < 0 || offset >= len(ents) { c.respond(t, Valid|Done, nil, erange) return } e := ents[offset] c.respond(t, Valid|Done, tx.cancel, &R{Path: &e}) } }
// loadCodewalk reads a codewalk from the named XML file. func loadCodewalk(file string) (*Codewalk, os.Error) { f, err := os.Open(file) if err != nil { return nil, err } defer f.Close() cw := new(Codewalk) p := xml.NewParser(f) p.Entity = xml.HTMLEntity err = p.Unmarshal(cw, nil) if err != nil { return nil, &os.PathError{"parsing", file, err} } // Compute file list, evaluate line numbers for addresses. m := make(map[string]bool) for _, st := range cw.Step { i := strings.Index(st.Src, ":") if i < 0 { i = len(st.Src) } file := st.Src[0:i] data, err := ioutil.ReadFile(absolutePath(file, *goroot)) if err != nil { st.Err = err continue } if i < len(st.Src) { lo, hi, err := addrToByteRange(st.Src[i+1:], 0, data) if err != nil { st.Err = err continue } // Expand match to line boundaries. for lo > 0 && data[lo-1] != '\n' { lo-- } for hi < len(data) && (hi == 0 || data[hi-1] != '\n') { hi++ } st.Lo = byteToLine(data, lo) st.Hi = byteToLine(data, hi-1) } st.Data = data st.File = file m[file] = true } // Make list of files cw.File = make([]string, len(m)) i := 0 for f := range m { cw.File[i] = f i++ } sort.SortStrings(cw.File) return cw, nil }
func main() { parse_files() fmt.Println("!_TAG_FILE_SORTED\t1\t") sort.SortStrings(tags) for _, s := range tags { fmt.Println(s) } }
func getSorted(set *StringSet) []string { var sorted vector.StringVector for val := range set.Iter() { sorted.Push(val) } sort.SortStrings(sorted) return sorted.Data() }
func TestGetWithDir(t *testing.T) { st := New() st.Ops <- Op{1, MustEncodeSet("/x", "a", Clobber)} st.Ops <- Op{2, MustEncodeSet("/y", "b", Clobber)} st.Sync(2) dents, cas := st.Get("/") assert.Equal(t, Dir, cas) sort.SortStrings(dents) assert.Equal(t, []string{"x", "y"}, dents) }
func filterAndSort(names []string, prefix string) []string { for i := 0; i < len(names); i++ { if !inFormat(names[i], prefix) { names[i] = names[len(names)-1] names = names[0 : len(names)-1] i-- } } sort.SortStrings(names) return names }
func main() { m := map[string]string{"b": "15", "z": "123123", "x": "sdf", "a": "12"} mk := make([]string, len(m)) i := 0 for k, _ := range m { mk[i] = k i++ } sort.SortStrings(mk) fmt.Println(mk) }
func callGetImages(argc int, argv []string) (result []string) { if len(argv) == 0 { return []string{"#konnte rechnerliste nicht zusammenstellen"} } else { layoutNames := runSystemCommand([]string{"ls", "basesystems/"}, RENDERFARMPATH) fmt.Printf("\n%d baselayouts gefunden", layoutNames) result = getRidOfDummies(strings.Split(layoutNames, "\n", 0)) sort.SortStrings(result) } return }
func (me *LatencyMap) TopArgs(name string) []string { me.Mutex.Lock() defer me.Mutex.Unlock() counts := me.secondaryStats[name] results := make([]string, 0, len(counts)) for k, v := range counts { results = append(results, fmt.Sprintf("% 9d %s", v, k)) } sort.SortStrings(results) return results }
func main() { master := flag.String("m", "", "specify a master!") flag.Parse() client.Initialize(*master) ls, err := client.ReadDir("/") if err != 0 { panic("readdir should work") } if len(ls) != 0 { panic("readdir returned wrong number of results") } if client.MakeDir("/a") != 0 { panic("makedir should work") } fd := client.Open("/b", client.O_WRONLY|client.O_CREATE) if fd < 0 { panic("open failed") } //do read dir ls, err = client.ReadDir("/") if err != 0 { panic("readdir failed") } lsExpected := []string{"a/", "b"} sort.SortStrings(lsExpected) sort.SortStrings(ls) if !ArrEquals(ls, lsExpected) { fmt.Printf("Actual:\t%+v\nExpected:\t%+v\n", ls, lsExpected) panic("readir results differ") } fmt.Printf("\n{{{{{pass}}}}}\n") os.Exit(0) }
func uiHandler(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) vars := map[string]string{"checkboxes": "", "transplantws": "/transplantdata", "survivaldatasource": "/sampledata", "genedatasource": "/genedata", "jsdir": "/js", "loadergif": "/images/loader.gif"} lastfile := "" q := datastore.NewQuery("fileNameToKey") blobs := make([]fileNameToKey, 0, 100) c.Logf("ui handler entered, vars declared") if _, err := q.GetAll(c, &blobs); err != nil { c.Logf("%v", err) fmt.Fprint(w, err.String()) //http.Error(w, err.String(), http.StatusInternalServerError) return } blobmap := map[string]bool{} for _, blob := range blobs { //fmt.Fprint(w, blob.Filename) blobmap[blob.Filename[:28]] = true } count := len(blobmap) files := make([]string, 0, count) for fn, _ := range blobmap { files = append(files, fn) } sort.SortStrings(files) c.Logf("filenames found and sorted") for _, basename := range files { if lastfile != "" && basename[:12] != lastfile[:12] { vars["checkboxes"] += "<br/><br/>" + basename[:12] + ":<br/>" } vars["checkboxes"] += "<input type='checkbox' id='" + basename + "' name='" + basename + "'/>" + basename + " " lastfile = basename } vars["files"] = "['" + strings.Join(files, "','") + "']" //fake python style string formating poorly pageTemplate.SetDelims("%(", ")s") pageTemplate.Parse(pageTemplateConst) if err := pageTemplate.Execute(w, vars); err != nil { serveError(c, w, err) } }
// writeCookies writes the wire representation of the cookies // to w. Each cookie is written on a separate "Cookie: " line. // This choice is made because HTTP parsers tend to have a limit on // line-length, so it seems safer to place cookies on separate lines. func writeCookies(w io.Writer, kk []*Cookie) os.Error { lines := make([]string, 0, len(kk)) for _, c := range kk { lines = append(lines, fmt.Sprintf("Cookie: %s=%s\r\n", sanitizeName(c.Name), sanitizeValue(c.Value))) } sort.SortStrings(lines) for _, l := range lines { if _, err := io.WriteString(w, l); err != nil { return err } } return nil }
func (me *TimingPathFilesystem) HotPaths(operation string) (paths []string, uniquePaths int) { me.statisticsLock.Lock() defer me.statisticsLock.Unlock() counts := me.pathCounts[operation] results := make([]string, 0, len(counts)) for k, v := range counts { results = append(results, fmt.Sprintf("% 9d %s", v, k)) } sort.SortStrings(results) return results, len(counts) }
// For printing latency data. func PrintMap(m map[string]float64) { keys := make([]string, len(m)) for k, _ := range m { keys = append(keys, k) } sort.SortStrings(keys) for _, k := range keys { if m[k] > 0 { fmt.Println(k, m[k]) } } }
func TestLookupHost(t *testing.T) { // Can't depend on this to return anything in particular, // but if it does return something, make sure it doesn't // duplicate addresses (a common bug due to the way // getaddrinfo works). addrs, _ := LookupHost("localhost") sort.SortStrings(addrs) for i := 0; i+1 < len(addrs); i++ { if addrs[i] == addrs[i+1] { t.Fatalf("LookupHost(\"localhost\") = %v, has duplicate addresses", addrs) } } }
func Usage() { fmt.Fprintf(os.Stderr, "Use: %s [options] <command> [options] [args]\n", self) fmt.Fprint(os.Stderr, usage1) flag.PrintDefaults() fmt.Fprintln(os.Stderr) fmt.Fprintln(os.Stderr, "Exit Status (not an exhaustive list):") fmt.Fprintf(os.Stderr, " %3d: %s\n", 0, "success") var ns []int for n := range proto.Response_Err_name { ns = append(ns, int(n)) } sort.SortInts(ns) for _, n := range ns { name := proto.Response_Err_name[int32(n)] switch name { // These errors should never be exposed to the user, // so don't show them in the usage output. case "TAG_IN_USE", "UNKNOWN_VERB", "REDIRECT", "INVALID_SNAP": continue } var s string switch name { case "NOTDIR": s = "not a directory" case "ISDIR": s = "is a directory" default: s = strings.Replace(strings.ToLower(name), "_", " ", -1) } fmt.Fprintf(os.Stderr, " %3d: %s\n", n, s) } fmt.Fprint(os.Stderr, usage2) var max int var names []string us := make(map[string]string) for k := range cmds { u := k + " " + cmds[k].a if len(u) > max { max = len(u) } names = append(names, k) us[k] = u } sort.SortStrings(names) for _, k := range names { fmt.Fprintf(os.Stderr, " %-*s - %s\n", max, us[k], cmds[k].d) } }
func FloatMapToBytes(m map[string]float64) []byte { keys := make([]string, 0, len(m)) for k, _ := range m { keys = append(keys, k) } sort.SortStrings(keys) var r []string for _, k := range keys { r = append(r, fmt.Sprintf("%v %v", k, m[k])) } return []byte(strings.Join(r, "\n")) }