// Today this function creates a route in Doozer for the // RouteService.RouteCreditRequest method - which is CLARITY SPECIFIC // and adds it too Doozer func CreateInitialRoute() { r := &skylib.Route{} r.Name = sName r.LastUpdated = time.Seconds() r.Revision = 1 rpcScore := &skylib.RpcCall{Service: "GetUserDataService.GetUserData", Async: false, OkToRetry: false, ErrOnFail: true} rl := new(vector.Vector) r.RouteList = rl rl.Push(rpcScore) b, err := json.Marshal(r) if err != nil { log.Panic(err.String()) } rev, err := skylib.DC.Rev() if err != nil { log.Panic(err.String()) } _, err = skylib.DC.Set("/routes/RouteService.RouteGetUserDataRequest", rev, b) if err != nil { log.Panic(err.String()) } return }
func (p *parser) parseInterfaceType() *ast.InterfaceType { if p.trace { defer un(trace(p, "InterfaceType")) } pos := p.expect(token.INTERFACE) lbrace := p.expect(token.LBRACE) list := new(vector.Vector) for p.tok == token.IDENT { m := p.parseMethodSpec() if p.tok != token.RBRACE { p.expect(token.SEMICOLON) } m.Comment = p.lineComment list.Push(m) } rbrace := p.expect(token.RBRACE) p.optSemi = true // convert vector methods := make([]*ast.Field, list.Len()) for i := list.Len() - 1; i >= 0; i-- { methods[i] = list.At(i).(*ast.Field) } return &ast.InterfaceType{pos, lbrace, methods, rbrace, false} }
func populateServer(serv *server) []sfs.ReplicateChunkArgs { str := fmt.Sprintf("%s:%d", serv.addr.IP.String(), serv.addr.Port) log.Printf("master: PopulateServer: populating %s\n", str) log.Printf("master: PopulateServer: server heap state:\n%s\n", sHeap.printPresent()) if len(chunks) == 0 { return nil } thisVec := new(vector.Vector) for _, chunk := range chunks { //log.Printf("master: PopulateServer: examining chunk %+v, nservers %d\n", *chunk, chunk.servers.Len()) if chunk.servers.Len() < sfs.NREPLICAS { //populate chunk location list chunklist := make([]net.TCPAddr, chunk.servers.Len()) for cnt1 := 0; cnt1 < chunk.servers.Len(); cnt1++ { chunklist[cnt1] = chunk.servers.At(cnt1).(*server).addr } //send rpc call off thisVec.Push(sfs.ReplicateChunkArgs{chunk.chunkID, chunklist}) } } cnt := thisVec.Len() thisSlice := make([]sfs.ReplicateChunkArgs, cnt) for i := 0; i < cnt; i++ { thisSlice[i] = thisVec.Pop().(sfs.ReplicateChunkArgs) //horribly inefficient but what can you do... } return thisSlice }
func (p *parser) parseLiteral() literal { s := []byte(p.parseString()) // A string literal may contain %-format specifiers. To simplify // and speed up printing of the literal, split it into segments // that start with "%" possibly followed by a last segment that // starts with some other character. var list vector.Vector i0 := 0 for i := 0; i < len(s); i++ { if s[i] == '%' && i+1 < len(s) { // the next segment starts with a % format if i0 < i { // the current segment is not empty, split it off list.Push(s[i0:i]) i0 = i } i++ // skip %; let loop skip over char after % } } // the final segment may start with any character // (it is empty iff the string is empty) list.Push(s[i0:]) // convert list into a literal lit := make(literal, list.Len()) for i := 0; i < list.Len(); i++ { lit[i] = list.At(i).([]byte) } return lit }
// arrayInterface is like array but returns []interface{}. func (d *decodeState) arrayInterface() []interface{} { var v vector.Vector for { // Look ahead for ] - can only happen on first iteration. op := d.scanWhile(scanSkipSpace) if op == scanEndArray { break } // Back up so d.value can have the byte we just read. d.off-- d.scan.undo(op) v.Push(d.valueInterface()) // Next token must be , or ]. op = d.scanWhile(scanSkipSpace) if op == scanEndArray { break } if op != scanArrayValue { d.error(errPhase) } } return v }
func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl { if p.trace { defer un(trace(p, keyword.String()+"Decl")) } doc := p.leadComment pos := p.expect(keyword) var lparen, rparen token.Position var list vector.Vector if p.tok == token.LPAREN { lparen = p.pos p.next() for p.tok != token.RPAREN && p.tok != token.EOF { list.Push(f(p, p.leadComment)) } rparen = p.expect(token.RPAREN) p.expectSemi() } else { list.Push(f(p, nil)) } // convert vector specs := make([]ast.Spec, len(list)) for i, x := range list { specs[i] = x.(ast.Spec) } return &ast.GenDecl{doc, pos, keyword, lparen, specs, rparen} }
// parseGroups is used to parse a list of group states. func parseGroups(lines []string) ([]Group, os.Error) { var res vector.Vector for _, line := range lines { ss := strings.Split(strings.TrimSpace(line), " ", 4) if len(ss) < 4 { return nil, ProtocolError("short group info line: " + line) } high, err := strconv.Atoi(ss[1]) if err != nil { return nil, ProtocolError("bad number in line: " + line) } low, err := strconv.Atoi(ss[2]) if err != nil { return nil, ProtocolError("bad number in line: " + line) } res.Push(&Group{ss[0], high, low, ss[3]}) } realres := make([]Group, res.Len()) i := 0 for v := range res.Iter() { realres[i] = *v.(*Group) i++ } return realres, nil }
func TestGroupBy(t *testing.T) { in := IntArray{1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5} exp := [][]int{[]int{1}, []int{2, 2}, []int{3, 3, 3}, []int{4, 4, 4, 4}, []int{5, 5, 5, 5, 5}} i := 0 for x := range GroupBy(in, intkey{}).Iter() { gr := x.(Group) if gr.Key.(int) != i+1 { t.Fatal("group key wrong; expected", i+1, "but got", gr.Key.(int)) } vals := Data(gr.Vals) assertArraysAreEqual(t, vals, exp[i]) i++ } if i != 5 { t.Fatal("did not return expected number of groups") } // test 0 length Iterable for _ = range GroupBy(IntArray([]int{}), &intkey{}).Iter() { t.Fatal("iterator should be empty") } // test case with only uniques var out vector.Vector for x := range GroupBy(elevenToTwenty, intkey{}).Iter() { out.Push(x.(Group).Key) } assertArraysAreEqual(t, out.Data(), elevenToTwenty) }
func chooseNextEdgeByGeneration(T *quadratic.Map) *quadratic.Edge { ActiveFaces := new(vector.Vector) onGeneration := -1 T.Faces.Do(func(F interface{}) { Fac := F.(*quadratic.Face) if Fac.Value.(string) != "active" { return } ActiveFaces.Push(Fac) Fac.DoEdges(func(e *quadratic.Edge) { //fmt.Fprintf(os.Stderr,"edge generation: %v onGen: %v\n",e.Generation,onGeneration) if onGeneration < 0 || e.Generation < onGeneration { onGeneration = e.Generation } }) }) activeEdges := new(generationOrderedEdges) ActiveFaces.Do(func(F interface{}) { Fac := F.(*quadratic.Face) Fac.DoEdges(func(e *quadratic.Edge) { if e.Generation != onGeneration { return } activeEdges.Push(e) }) }) //fmt.Fprintf(os.Stderr,"onGen: %v have %v active edges\n",onGeneration,activeEdges.Len()) sort.Sort(activeEdges) return activeEdges.At(0).(*quadratic.Edge) }
// Data returns a slice containing the elements of iter. func Data(iter Iterable) []interface{} { vec := new(vector.Vector) for e := range iter.Iter() { vec.Push(e) } return vec.Data() }
// Consume a group of adjacent comments, add it to the parser's // comments list, and return the line of which the last comment // in the group ends. An empty line or non-comment token terminates // a comment group. // func (p *parser) consumeCommentGroup() int { list := new(vector.Vector) endline := p.pos.Line for p.tok == token.COMMENT && endline+1 >= p.pos.Line { var comment *ast.Comment comment, endline = p.consumeComment() list.Push(comment) } // convert list group := make([]*ast.Comment, list.Len()) for i := 0; i < list.Len(); i++ { group[i] = list.At(i).(*ast.Comment) } // add comment group to the comments list g := &ast.CommentGroup{group, nil} if p.lastComment != nil { p.lastComment.Next = g } else { p.comments = g } p.lastComment = g return endline }
func (p *parser) parseStructType() *ast.StructType { if p.trace { defer un(trace(p, "StructType")) } pos := p.expect(token.STRUCT) lbrace := p.expect(token.LBRACE) list := new(vector.Vector) for p.tok == token.IDENT || p.tok == token.MUL { f := p.parseFieldDecl() if p.tok != token.RBRACE { p.expect(token.SEMICOLON) } f.Comment = p.lineComment list.Push(f) } rbrace := p.expect(token.RBRACE) p.optSemi = true // convert vector fields := make([]*ast.Field, list.Len()) for i := list.Len() - 1; i >= 0; i-- { fields[i] = list.At(i).(*ast.Field) } return &ast.StructType{pos, lbrace, fields, rbrace, false} }
// codewalkDir serves the codewalk directory listing. // It scans the directory for subdirectories or files named *.xml // and prepares a table. func codewalkDir(w http.ResponseWriter, r *http.Request, relpath, abspath string) { type elem struct { Name string Title string } dir, err := ioutil.ReadDir(abspath) if err != nil { log.Print(err) serveError(w, r, relpath, err) return } var v vector.Vector for _, fi := range dir { if fi.IsDirectory() { v.Push(&elem{fi.Name + "/", ""}) } else if strings.HasSuffix(fi.Name, ".xml") { cw, err := loadCodewalk(abspath + "/" + fi.Name) if err != nil { continue } v.Push(&elem{fi.Name[0 : len(fi.Name)-len(".xml")], cw.Title}) } } b := applyTemplate(codewalkdirHTML, "codewalkdir", v) servePage(w, "Codewalks", "", "", b) }
func (p *parser) parseSwitchStmt() ast.Stmt { if p.trace { defer un(trace(p, "SwitchStmt")) } pos := p.expect(token.SWITCH) s1, s2, _ := p.parseControlClause(false) if isExprSwitch(s2) { lbrace := p.expect(token.LBRACE) cases := new(vector.Vector) for p.tok == token.CASE || p.tok == token.DEFAULT { cases.Push(p.parseCaseClause()) } rbrace := p.expect(token.RBRACE) p.optSemi = true body := &ast.BlockStmt{lbrace, makeStmtList(cases), rbrace} return &ast.SwitchStmt{pos, s1, p.makeExpr(s2), body} } // type switch // TODO(gri): do all the checks! lbrace := p.expect(token.LBRACE) cases := new(vector.Vector) for p.tok == token.CASE || p.tok == token.DEFAULT { cases.Push(p.parseTypeCaseClause()) } rbrace := p.expect(token.RBRACE) p.optSemi = true body := &ast.BlockStmt{lbrace, makeStmtList(cases), rbrace} return &ast.TypeSwitchStmt{pos, s1, s2, body} }
func findRand(t *testing.T, conn *db.Connection, ch chan *vector.Vector) { stmt, sErr := conn.Prepare( "SELECT * FROM t WHERE i != ? ORDER BY RAND()") if sErr != nil { error(t, sErr, "Couldn't prepare") } rs, cErr := conn.Execute(stmt, rand.Int()) if cErr != nil { error(t, cErr, "Couldn't select") } vout := new(vector.Vector) for res := range rs.Iter() { if res.Error() != nil { error(t, res.Error(), "Couldn't fetch") } vout.Push(res.Data()) } if vout.Len() != len(tableT) { t.Error("Invalid length") } stmt.Close() ch <- vout }
// use win-rate distribution of node to play a legal move in tracker func (node *Node) seed(t Tracker, path []int) bool { if node.parent == nil { return false } dist := new(vector.Vector) sum := 0.0 for sibling := node.parent.Child; sibling != nil; sibling = sibling.Sibling { for i := 0; i < len(path); i++ { if sibling.Vertex == path[i] { continue } } dist.Push(sibling.blendedMean) sum += sibling.blendedMean } node.totalseeds++ r := rand.Float64() * sum for i := 0; i < dist.Len(); i++ { r -= dist.At(i).(float64) if r <= 0 { if t.Legal(node.Color, i) { t.Play(node.Color, i) node.seeds++ return true } return false } } return false }
func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr { if p.trace { defer un(trace(p, "CallOrConversion")) } lparen := p.expect(token.LPAREN) p.exprLev++ var list vector.Vector var ellipsis token.Position for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() { list.Push(p.parseExpr()) if p.tok == token.ELLIPSIS { ellipsis = p.pos p.next() } if p.tok != token.COMMA { break } p.next() } p.exprLev-- rparen := p.expect(token.RPAREN) return &ast.CallExpr{fun, lparen, makeExprList(&list), ellipsis, rparen} }
// Fetch all remaining results. If we get no results at // all, an error will be returned; otherwise it probably // still occurred but will be hidden. func ClassicFetchAll(rs ClassicResultSet) (data [][]interface{}, error os.Error) { var v vector.Vector var d interface{} var e os.Error for rs.More() { r := rs.Fetch() d = r.Data() if d != nil { v.Push(d) } e = r.Error() if e != nil { break } } l := v.Len() if l > 0 { // TODO: how can this be done better? data = make([][]interface{}, l) for i := 0; i < l; i++ { data[i] = v.At(i).([]interface{}) } } else { // no results at all, return the error error = e } return }
func makeVector(n int) *vector.Vector { v := new(vector.Vector) for i := 0; i < n; i++ { v.Push(&User{"Mike", 1}) } return v }
// Next will chose a random point to put a piece. func (ap *RandomPlayer) Next(m *match.Match) *match.Response { color := ap.teban.Color() candidates := new(vector.Vector) size := m.Board.Size() for y := 1; y <= size; y++ { for x := 1; x <= size; x++ { if m.Board.CanPutAt(color, x, y, m.History) { if !m.Board.IsEye(color, x, y) { candidates.Push(&point.Point{x, y}) } } } } if candidates.Len() != 0 { bs := make([]byte, 1) _, err := rand.Read(bs) if err == nil { p := candidates.At(int(bs[0]) % candidates.Len()).(*point.Point) ts, resp := m.Board.PutAt(color, p.X(), p.Y(), m.History) if resp == board.OK { fmt.Printf("[random] put %d,%d\n", p.X(), p.Y()) return match.NewPutResponse(p.X(), p.Y(), ts) } } } return match.NewPassResponse() }
func CountColor(pngR io.Reader) int { /* modify here */ // uniq := new (vector.Vector[uint32]) var colorVector vector.Vector var rVector vector.IntVector var gVector vector.IntVector var bVector vector.IntVector im, _ := png.Decode(pngR) for y := 0; y < im.Bounds().Dy(); y++ { for x := 0; x < im.Bounds().Dx(); x++ { color := im.At(x, y) unique := true r, g, b, _ := color.RGBA() for i := 0; i < colorVector.Len(); i++ { if r == uint32(rVector.At(i)) && g == uint32(gVector.At(i)) && b == uint32(bVector.At(i)) { unique = false } } if unique == true { colorVector.Push(color) rVector.Push(int(r)) gVector.Push(int(g)) bVector.Push(int(b)) } } } return colorVector.Len() }
func (p *parser) parseExpression() Expression { var list vector.Vector for { x := p.parseSequence() if x != nil { list.Push(x) } if p.tok != token.OR { break } p.next() } // no need for an Alternative node if list.Len() < 2 switch list.Len() { case 0: return nil case 1: return list.At(0).(Expression) } // convert list into an Alternative node alt := make(Alternative, list.Len()) for i := 0; i < list.Len(); i++ { alt[i] = list.At(i).(Expression) } return alt }
// Consume a group of adjacent comments, add it to the parser's // comments list, and return the line of which the last comment // in the group ends. An empty line or non-comment token terminates // a comment group. // func (p *parser) consumeCommentGroup() int { var list vector.Vector endline := p.pos.Line for p.tok == token.COMMENT && endline+1 >= p.pos.Line { var comment *ast.Comment comment, endline = p.consumeComment() list.Push(comment) } // convert list group := make([]*ast.Comment, len(list)) for i, x := range list { group[i] = x.(*ast.Comment) } // add comment group to the comments list g := &ast.CommentGroup{group, nil} if p.lastComment != nil { p.lastComment.Next = g } else { p.comments = g } p.lastComment = g return endline }
func (tmpl *Template) Render(context ...interface{}) string { var buf bytes.Buffer var contextChain vector.Vector for _, c := range context { val := reflect.ValueOf(c) contextChain.Push(val) } tmpl.renderTemplate(&contextChain, &buf) return buf.String() }
// Durp. Why this originally created its own buffer and then returned the string is beyond me... it's much more efficient to just ask for a io.Writer to begin with. func (tmpl *Template) Render(wr io.Writer, context ...interface{}) (e os.Error) { //var buf bytes.Buffer var contextChain vector.Vector for _, c := range context { val := reflect.ValueOf(c) contextChain.Push(val) } tmpl.renderTemplate(&contextChain, wr) return }
func visit(dag vec.Vector, n int, L *vec.Vector, visited []bool) { if !visited[n] { visited[n] = true for i := 0; i < len(dag.At(n).(*par.Node).Cl); i++ { visit(dag, par.GetIndexById(dag, dag.At(n).(*par.Node).Cl.At(i).(*par.Rel).Id), L, visited) } L.Push(dag.At(n).(*par.Node).Id) } }
// takeOff will remove a group of cells at a given point. func (b *Board) takeOff(x int, y int, c cell.Cell, takenOffs *vector.Vector) { if b.At(x, y) == c { b.TakeAt(x, y) takenOffs.Push(point.Point{x, y}) b.takeOff(x-1, y, c, takenOffs) b.takeOff(x+1, y, c, takenOffs) b.takeOff(x, y-1, c, takenOffs) b.takeOff(x, y+1, c, takenOffs) } }
func (r *Redis) getSetKeys(index, scope, field string, tc tokenizer.TokenChan) (keys vector.Vector) { var ts vector.Vector for token := range tc { ts.Push(buildKey(index, scope, field, token.Backing())) } rs := r.redis.Command("keys", ts...) rs.ValuesDo(func(rv rdc.ResultValue) { keys.Push(rv.String()) }) return keys }
func nsRecords(domain string, asection *vector.Vector) { for i := 0; i < len(as112nameServers); i++ { asection.Push(types.RR{ Name: domain, TTL: defaultTTL, Type: types.NS, Class: types.IN, Data: types.Encode(as112nameServers[i]), }) } }
func TypeDecl(decl *ast.GenDecl, vec *vector.Vector) { for _, spec := range decl.Specs { ts := spec.(*ast.TypeSpec) name := ts.Name.String() vec.Push(&Declaration{ ts.Name.IsExported(), Type, name, fmt.Sprintf("type %s %s", name, TypeExpr(ts.Type))}) } }