func SaveStringsInPo(printer PrinterInterface, options Options, stringInfos map[string]StringInfo, outputDirname string, fileName string) error { if len(stringInfos) != 0 { printer.Println("Creating and saving i18n strings to .po file:", fileName) } if !options.DryRunFlag && len(stringInfos) != 0 { err := CreateOutputDirsIfNeeded(outputDirname) if err != nil { printer.Println(err) return err } file, err := os.Create(filepath.Join(outputDirname, fileName[strings.LastIndex(fileName, string(os.PathSeparator))+1:len(fileName)])) defer file.Close() if err != nil { printer.Println(err) return err } for _, stringInfo := range stringInfos { file.Write([]byte("# filename: " + strings.Split(fileName, ".en.po")[0] + ", offset: " + strconv.Itoa(stringInfo.Offset) + ", line: " + strconv.Itoa(stringInfo.Line) + ", column: " + strconv.Itoa(stringInfo.Column) + "\n")) file.Write([]byte("msgid " + strconv.Quote(stringInfo.Value) + "\n")) file.Write([]byte("msgstr " + strconv.Quote(stringInfo.Value) + "\n")) file.Write([]byte("\n")) } } return nil }
func (wr *TSWrite) OpenTSDBSingle() []byte { mJson := bytes.NewBuffer(make([]byte, 0)) mJson.Write([]byte(`{`)) mJson.Write([]byte(`"metric" : `)) mJson.Write([]byte(strconv.Quote(wr.single.metric))) mJson.Write([]byte(`,`)) mJson.Write([]byte(`"timestamp" : `)) mJson.Write([]byte(strconv.FormatInt(wr.single.stamp/int64(time.Millisecond), 10))) mJson.Write([]byte(`,`)) mJson.Write([]byte(`"value" : `)) mJson.Write([]byte(strconv.FormatFloat(wr.single.value, 'f', -1, 64))) mJson.Write([]byte(`, `)) mJson.Write([]byte(`"tags" : `)) mJson.Write([]byte(`{`)) var cnt int = 0 for idx, value := range wr.single.tags { mJson.Write([]byte(strconv.Quote(idx) + " : ")) mJson.Write([]byte(strconv.Quote(value))) if cnt < (len(wr.single.tags) - 1) { mJson.Write([]byte(`,`)) } cnt++ } mJson.Write([]byte(`}`)) mJson.Write([]byte(`}`)) return mJson.Bytes() }
func (source *Translations) toJsonWebStatic(template *Translations) []byte { var buf bytes.Buffer var k, t string var order []string next := false hastemplate := template != nil && len((*template).Order) > 0 if hastemplate { order = (*template).Order } else { order = (*source).Order } buf.WriteString("{\n") for i := 0; i < len(order); i++ { k = order[i] t = (*source).Data[k].Translation if len(t) == 0 && hastemplate { t = (*template).Data[k].Translation } if next { buf.WriteString(",\n") } else { next = true } buf.WriteString(fmt.Sprintf("%v%v: %v", indent, strconv.Quote(k), strconv.Quote(t))) } buf.WriteString("\n}\n") return buf.Bytes() }
func (source *Translations) ToPO(target *Translations, template bool) []byte { var buf bytes.Buffer buf.WriteString((*source).Header) hastarget := target != nil && len((*target).Data) > 0 var k, t string var po PO for i := 0; i < len((*source).Order); i = i + 1 { k = (*source).Order[i] po = (*source).Data[k] if hastarget { t = strconv.Quote((*target).Data[k].Translation) //translation in target language } else { t = strconv.Quote(po.Translation) //translation in source language (en) } buf.WriteString(fmt.Sprintln()) buf.WriteString(fmt.Sprintf("#: %v\n", po.Localization)) buf.WriteString(fmt.Sprintln("msgctxt", strconv.Quote(k))) buf.WriteString(fmt.Sprintln("msgid", strconv.Quote(po.Original))) if template { buf.WriteString(fmt.Sprintln("msgstr", `""`)) } else { buf.WriteString(fmt.Sprintln("msgstr", t)) } } return buf.Bytes() }
func (g *Generator) genStructFieldEncoder(t reflect.Type, f reflect.StructField) error { jsonName := g.namer.GetJSONFieldName(t, f) tags := parseFieldTags(f) if tags.omit { return nil } if !tags.omitEmpty && !g.omitEmpty || tags.noOmitEmpty { fmt.Fprintln(g.out, " if !first { out.RawByte(',') }") fmt.Fprintln(g.out, " first = false") fmt.Fprintf(g.out, " out.RawString(%q)\n", strconv.Quote(jsonName)+":") return g.genTypeEncoder(f.Type, "in."+f.Name, tags, 1) } fmt.Fprintln(g.out, " if", g.notEmptyCheck(f.Type, "in."+f.Name), "{") fmt.Fprintln(g.out, " if !first { out.RawByte(',') }") fmt.Fprintln(g.out, " first = false") fmt.Fprintf(g.out, " out.RawString(%q)\n", strconv.Quote(jsonName)+":") if err := g.genTypeEncoder(f.Type, "in."+f.Name, tags, 2); err != nil { return err } fmt.Fprintln(g.out, " }") return nil }
func syncDir(bucket, keyPrefix, dir string) int { keyPrefix = strings.TrimPrefix(keyPrefix, "/") errCount := 0 wg := &sync.WaitGroup{} filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { if info.IsDir() { return nil } rel, _ := filepath.Rel(dir, path) key := filepath.Join(keyPrefix, rel) wg.Add(1) go func() { log.Printf("Upload %v ...", strconv.Quote(key)) if err := uploadFile(bucket, key, path); err != nil { errCount += 1 log.Printf("Failed %v, %v", strconv.Quote(path), err) } else { log.Printf("Done %v", strconv.Quote(key)) } wg.Done() }() return nil }) wg.Wait() return errCount }
// PrepareForTemplate uses the Columns slice to transform the rows so that correct Go code can be printed. // int/Float values won't be touched. Bools or IntBools will be converted to true/false. Strings will be quoted. // And if there is an entry in the AttributeModelMap then the Go code from the map will be used. // Returns a slice containing all the import paths. Import paths which are equal to pkg will be filtered out. func PrepareForTemplate(cols Columns, rows []StringEntities, amm AttributeModelDefMap, targetPkg string) []string { ip := make([]string, 0, 10) // import_path container for _, row := range rows { for colName, colValue := range row { var c = cols.GetByName(colName) if false == c.Field.Valid { continue } goType, hasModel := amm[colValue] _, isAllowedInterfaceChange := EavAttributeColumnNameToInterface[colName] switch { case hasModel: row[colName] = "nil" if goType.GoFunc != "" { row[colName] = goType.Func() if validImportPath(goType, ip, targetPkg) { ip = append(ip, goType.Import()) } } break case isAllowedInterfaceChange: // if there is no defined model but column is (backend|frontend|data|source)_model then nil it row[colName] = "nil" break case c.IsBool(): row[colName] = "false" if colValue == "1" { row[colName] = "true" } break case c.IsInt(): if colValue == "" { row[colName] = "0" } break case c.IsString(): row[colName] = strconv.Quote(colValue) break case c.IsFloat(): if colValue == "" { row[colName] = "0.0" } break case c.IsDate(): if colValue == "" { row[colName] = "nil" } else { row[colName] = "time.Parse(`2006-01-02 15:04:05`," + strconv.Quote(colValue) + ")" // @todo timezone } break default: panic(fmt.Sprintf("\nERROR cannot detect SQL type: %s -> %s\n%#v\n", colName, colValue, c)) } } } return ip }
func (e *goof) getMessage(includeFields bool) string { if !includeFields { return e.msg } buf := &bytes.Buffer{} fmt.Fprintf(buf, "msg=%q", e.msg) for k, v := range e.data { sv := fmt.Sprintf("%v", v) if m := dublQuoteRX.FindStringSubmatch(sv); len(m) > 0 { sv = m[1] } else if m := snglQuoteRX.FindStringSubmatch(sv); len(m) > 0 { sv = m[1] } else if m := backQuoteRX.FindStringSubmatch(sv); len(m) > 0 { sv = m[1] } if containsWS.MatchString(k) { k = strconv.Quote(k) } if containsWS.MatchString(sv) { sv = strconv.Quote(sv) } fmt.Fprintf(buf, " %s=%s", k, sv) } return buf.String() }
// RewriteImports rewrites imports in the passed AST (in-place). // It returns bool changed set to true if any changes were made // and non-nil err on error func RewriteImports(f *ast.File, prefix string, remove bool) (changed bool, err error) { for _, impNode := range f.Imports { imp, err := strconv.Unquote(impNode.Path.Value) if err != nil { log.Printf("Error unquoting import value %v - %s\n", impNode.Path.Value, err) return false, err } // skip standard library imports and relative references if !strings.Contains(imp, ".") || strings.HasPrefix(imp, ".") { continue } if remove { if strings.HasPrefix(imp, prefix) { changed = true impNode.Path.Value = strconv.Quote(imp[len(prefix):]) } } else { // if import does not start with the prefix already, add it if !strings.HasPrefix(imp, prefix) { changed = true impNode.Path.Value = strconv.Quote(prefix + imp) } } } return }
// Renders the coordinates of the ElasticSearch document as JSON. func (e *ElasticSearchCoordinates) PopulateBuffer(m *message.Message, buf *bytes.Buffer) { buf.WriteString(`{"index":{"_index":`) var ( err error interpIndex string interpType string interpId string ) interpIndex, err = interpolateFlag(e, m, e.Index) buf.WriteString(strconv.Quote(interpIndex)) buf.WriteString(`,"_type":`) interpType, err = interpolateFlag(e, m, e.Type) buf.WriteString(strconv.Quote(interpType)) //Interpolate the Id flag interpId, err = interpolateFlag(e, m, e.Id) //Check that Id successfully interpolated. If not then do not specify id //at all and default to auto-generated one. if len(e.Id) > 0 && err == nil { buf.WriteString(`,"_id":`) buf.WriteString(strconv.Quote(interpId)) } buf.WriteString(`}}`) }
func outputCsv(writer io.Writer, dependency *Dependency) { for _, module := range dependency.modules { for _, to := range keys(dependency.relation[module]) { fmt.Fprintf(writer, "%s,%s\n", strconv.Quote(module), strconv.Quote(to)) } } }
/* run parses a line and runs the appropriate command. Source should be something that identifies the caller, like an IP address. Ding should be a "\a" for a bell every command, or the empty string. */ func run(line, source, ding string) error { commandsL.Lock() defer commandsL.Unlock() /* Make sure we actually have a command */ if 0 == len(line) { return nil } /* Split into fields */ a := strings.Fields(line) if 0 == len(a) { return nil } /* Get the function to call */ f, ok := commands[a[0]] if !ok { fmt.Printf("Nice Try!\n") log.Printf( "%v!: Unable to find command %v", source, strconv.Quote(line), ) return nil } /* Comms channel */ pr, pw := io.Pipe() defer pr.Close() defer pw.Close() go io.Copy(os.Stdout, pr) /* Start the command */ log.Printf("%v%v: %v", ding, source, strconv.Quote(line)) f(a[0], a[1:], pw) return nil }
func checkWarningExpectation(prog *ssa.Program, e *expectation, warnings []pointer.Warning) bool { // TODO(adonovan): check the position part of the warning too? re, err := regexp.Compile(e.args[0]) if err != nil { e.errorf("invalid regular expression in @warning expectation: %s", err.Error()) return false } if len(warnings) == 0 { e.errorf("@warning %s expectation, but no warnings", strconv.Quote(e.args[0])) return false } for _, w := range warnings { if re.MatchString(w.Message) { return true } } e.errorf("@warning %s expectation not satised; found these warnings though:", strconv.Quote(e.args[0])) for _, w := range warnings { fmt.Printf("%s: warning: %s\n", prog.Fset.Position(w.Pos), w.Message) } return false }
func printExportFunction(ctx context.Context, env *envctx.Env, g *builder.Builder, export *sysctx.SysExportInfo) error { g.Println("func ", system.GoName(export.Name), "() *", g.SprintRef(export.TypePackage, system.GoName(export.TypeName)), " {") { g.Print("ctx := ") g.PrintFunctionCall("kego.io/system", "NewContext", g.SprintFunctionCall("context", "Background"), strconv.Quote(env.Path), fmt.Sprintf("%#v", env.Aliases), ) g.Println() g.Println("o := new(", g.SprintRef(export.TypePackage, system.GoName(export.TypeName)), ")") g.Print("err := ") g.PrintMethodCall("o", "Unpack", "ctx", g.SprintFunctionCall("kego.io/system", "MustPackString", strconv.Quote(string(export.JsonContents))), "false", ) g.Println() g.Println("if err != nil {") { g.Println("panic(err.Error())") } g.Println("}") g.Println("return o") } g.Println("}") return nil }
func (k *KeywordRule) Parse(p *Parser) Result { p.Enter("KeywordRule(%s)", k.Value) defer p.Exit() p.PushTransaction() tok := p.Next() if tok.Type != TokenKeyword { p.Discard() return Result{Type: NoMatch, Message: Message(k.RuleName(), tok.Position, "Expected keyword got %s (at %s)", tok.Type, tok.Position)} } if tok.Value != k.Value { p.Discard() return Result{Type: NoMatch, Message: Message(k.RuleName(), tok.Position, "Expected %s, got %s (at %s)", strconv.Quote(k.Value), strconv.Quote(tok.Value), tok.Position)} } p.Commit() return Result{Type: Match, Value: tok} }
func (s *SymbolRule) Parse(p *Parser) Result { p.Enter("SymbolRule(%s)", s.Value) defer p.Exit() p.PushTransaction() tok := p.Next() if tok.Type != TokenSymbol { p.Discard() return Result{Type: NoMatch, Message: Message(s.RuleName(), tok.Position, "Expected symbol got %s (at %s)", tok.Type, tok.Position)} } if tok.Value != s.Value { p.Discard() return Result{Type: NoMatch, Message: Message(s.RuleName(), tok.Position, "Expected %s, got %s (at %s)", strconv.Quote(s.Value), strconv.Quote(tok.Value), tok.Position)} } p.Commit() return Result{Type: Match, Value: tok} }
func TestCanonicalize(t *testing.T) { var successCases = []struct { in string out string }{ {"select * from t", "select * from t"}, {" select * from t\n", "select * from t"}, {"/*!90620 set interpreter_mode=llvm*/", "/*!90620 set interpreter_mode=llvm*/"}, } for _, c := range successCases { actual, err := canonicalizeQuery(c.in) if err != nil { t.Errorf("Error canonicalizing query %s: %v", strconv.Quote(c.in), err) } else if actual != c.out { t.Errorf("Failure canonicalizing query %s:\ngot\t\t%s\nbut expected\t%s", strconv.Quote(c.in), strconv.Quote(actual), strconv.Quote(c.out)) } } var failCases = []string{ "select * from t; select 1", "use db", "begin", } for _, c := range failCases { _, err := canonicalizeQuery(c) if err == nil { t.Errorf("Unexpected successful canonicalization of query %s", strconv.Quote(c)) } } }
func (r *Rewriter) Visit(node ast.Node) ast.Visitor { switch n := node.(type) { case *ast.AssignStmt: for idx, stmt := range n.Lhs { if sel, ok := stmt.(*ast.SelectorExpr); ok { if expr, ok := sel.X.(*ast.Ident); ok && expr.Name == "ctx" { attr := sel.Sel.Name fname := "" if attr == strings.ToUpper(attr) { fname = "Setenv" } else if attr == strings.ToLower(attr) { fname = "Set" } if fname != "" { nexpr := &ast.CallExpr{ Args: make([]ast.Expr, 2), Fun: &ast.SelectorExpr{ X: &ast.Ident{Name: "ctx"}, Sel: &ast.Ident{Name: fname}, }, } nexpr.Args[0] = asExpr(strconv.Quote(attr)) nexpr.Args[1] = n.Rhs[idx] n.Lhs[idx] = asExpr("_") n.Rhs[idx] = nexpr } } } } case *ast.CallExpr: switch c := n.Fun.(type) { case *ast.SelectorExpr: if expr, ok := c.X.(*ast.Ident); ok && expr.Name == "ctx" { attr := c.Sel.Name if attr == strings.ToUpper(attr) { c.Sel.Name = "Setenv" n.Args = append(n.Args, nil) copy(n.Args[1:], n.Args[:]) n.Args[0] = asExpr(strconv.Quote(attr)) } } case *ast.Ident: if c.Name == "ctx" { c.Name = "ctx.GetSettings" } } case *ast.SelectorExpr: if expr, ok := n.X.(*ast.Ident); ok && expr.Name == "ctx" { attr := n.Sel.Name if attr == strings.ToUpper(attr) { n.Sel.Name = fmt.Sprintf("Getenv(%q)", attr) } else if attr == strings.ToLower(attr) { n.Sel.Name = fmt.Sprintf("Get(%q)", attr) } } } return r }
func write(service string, description string, command string, param string) { service = strings.TrimSpace(service) description = strings.TrimSpace(description) command = strings.TrimSpace(command) command = strconv.Quote(command) param = strings.TrimSpace(param) param = strconv.Quote(param) var buffer bytes.Buffer buffer.WriteString("#! /bin/sh\n") buffer.WriteString("NAME=\"" + service + "\"\n") buffer.WriteString("DESC=\"" + description + "\"\n") buffer.WriteString("PIDFILE=\"/var/run/${NAME}.pid\"\n") buffer.WriteString("LOGFILE=\"/var/log/${NAME}.log\"\n") buffer.WriteString("COMMAND=" + command + "\n") buffer.WriteString("COMMAND_OPT=" + param + "\n") buffer.WriteString("START_OPTS=\"--start --background --make-pidfile --pidfile ${PIDFILE} --name $NAME --exec $COMMAND -- $COMMAND_OPT\"\n") buffer.WriteString("STOP_OPTS=\"--stop --pidfile ${PIDFILE}\"\n") buffer.WriteString("case \"$1\" in\n") buffer.WriteString("start)\n") buffer.WriteString(" start-stop-daemon $START_OPTS >> $LOGFILE\n") buffer.WriteString(" echo \"start $NAME completed\"\n") buffer.WriteString(";;\n") buffer.WriteString("stop)\n") buffer.WriteString(" start-stop-daemon $STOP_OPTS\n") buffer.WriteString(" rm -f $PIDFILE\n") buffer.WriteString(" echo \"stop $NAME completed\"\n") buffer.WriteString(";;\n") buffer.WriteString("restart)\n") buffer.WriteString(" start-stop-daemon $STOP_OPTS\n") buffer.WriteString(" sleep 1\n") buffer.WriteString(" start-stop-daemon $START_OPTS >> $LOGFILE\n") buffer.WriteString(" echo \"restart $NAME completed\"\n") buffer.WriteString(";;\n") buffer.WriteString("*)\n") buffer.WriteString(" N=/etc/init.d/$NAME\n") buffer.WriteString(" echo \"Usage: $N {start|stop|restart}\" >&2\n") buffer.WriteString(" exit 1\n") buffer.WriteString(" ;;\n") buffer.WriteString("esac\n") buffer.WriteString("exit 0") err := ioutil.WriteFile(service, buffer.Bytes(), 0755) if err != nil { panic(err) } }
func (self *StructFieldRepr) PossibleJSONKeys() string { if t := self.getJSONFieldTagName(); t != "" { return strconv.Quote(t) } return strings.Join([]string{ strconv.Quote(self.Name), strconv.Quote(strings.ToLower(string(self.Name[0])) + self.Name[1:]), }, ", ") }
func outputDot(writer io.Writer, dependency *Dependency) { fmt.Fprintf(writer, "digraph \"graph\" {\n") for _, module := range dependency.modules { for _, to := range keys(dependency.relation[module]) { fmt.Fprintf(writer, " %s -> %s;\n", strconv.Quote(module), strconv.Quote(to)) } } fmt.Fprintf(writer, "}\n") }
func objfmt(x interface{}) string { switch x := x.(type) { default: return fmt.Sprint(x) case string: if isPDFDocEncoded(x) { return strconv.Quote(pdfDocDecode(x)) } if isUTF16(x) { return strconv.Quote(utf16Decode(x[2:])) } return strconv.Quote(x) case name: return "/" + string(x) case dict: var keys []string for k := range x { keys = append(keys, string(k)) } sort.Strings(keys) var buf bytes.Buffer buf.WriteString("<<") for i, k := range keys { elem := x[name(k)] if i > 0 { buf.WriteString(" ") } buf.WriteString("/") buf.WriteString(k) buf.WriteString(" ") buf.WriteString(objfmt(elem)) } buf.WriteString(">>") return buf.String() case array: var buf bytes.Buffer buf.WriteString("[") for i, elem := range x { if i > 0 { buf.WriteString(" ") } buf.WriteString(objfmt(elem)) } buf.WriteString("]") return buf.String() case stream: return fmt.Sprintf("%v@%d", objfmt(x.hdr), x.offset) case objptr: return fmt.Sprintf("%d %d R", x.id, x.gen) case objdef: return fmt.Sprintf("{%d %d obj}%v", x.ptr.id, x.ptr.gen, objfmt(x.obj)) } }
func (p *Parser) ParseType() (Node, bool) { typeNode := &TypeNode{BaseNode: FromToken(p.Lookahead(0))} parameter, ok := p.Symbol("$") typeNode.IsParameter = ok if token, ok := p.Symbol("*"); ok { typeNode.IsPointer = true subtype, ok := p.ParseType() if !ok { p.Fail(p.Lookahead(0), "Invalid type after '*' in type") } typeNode.Subtype = subtype.(*TypeNode) } else if token, ok := p.Symbol("["); ok { typeNode.IsArray = true if dots, ok := p.Keyword(".."); ok { typeNode.ArrayLength = -1 } else if length, ok := p.Numeral(); ok { panic("UNIMPLEMENTED") } end, ok := p.Symbol("]") if !ok { p.Fail(end, "Expected ']' in array type, got %s (%s)", strconv.Quote(end.Value), end.Type) } subtype, ok := p.ParseType() if !ok { p.Fail(p.Lookahead(0), "Invalid type after array header in type") } typeNode.Subtype = subtype.(*TypeNode) } /* // Type stuff type = ["$"] type_body ident. type_body = { "*" | type_array } . type_array = "[" [".." | expr] "]" . */ base, ok := p.Ident() if !ok { tok := p.Lookahead(0) p.Fail(tok, "Expected ident as end of type, got %s (%s)", strconv.Quote(tok.Value), tok.Type) } return &TypeNode{BaseNode: FromToken(start), IsParameter: hasParameter, } }
func LogstashRefreshApplicationsFilters() error { // Get all node apps, err := ApplicationMapper.FetchAll() if err != nil { return err } filters := logstash.NewTagFilters() for _, app := range apps { // Fetch logfiles logfiles, err := LogfileMapper.FetchAllEnabled(app) if err != nil { return err } if len(logfiles) == 0 { continue } var filePaths []string for _, lf := range logfiles { filePaths = append(filePaths, strconv.Quote(lf.Path)) } // Forge application tags var conds []string if len(filePaths) > 1 { conds = append(conds, fmt.Sprintf("[source] in [ %s ]", strings.Join(filePaths, ", "))) } else { conds = append(conds, fmt.Sprintf("[source] == %s", filePaths[0])) } for _, t := range app.Tags { conds = append(conds, fmt.Sprintf("%s in [karhu_tags]", strconv.Quote(t))) } filters.AddFilter( logstash.NewFilter(strings.Join(conds, " and ")). Mutate(logstash.NewMutate("karhu_app", strconv.Quote(app.Name)))) } data, err := filters.Marshal() if err != nil { return err } log.Println(string(data)) if err := ioutil.WriteFile(env.GetDefault("LOGSTASH_APPS_FILTERS", "./logstash/conf.d/11-apps-filters.conf"), data, 0644); err != nil { return err } return nil }
func gocov_test(ir *gothic.Interpreter) { var buf bytes.Buffer cmd := exec.Command("gocov", "test") cmd.Stdout = &buf err := cmd.Run() if err != nil { gocov_test_error(ir, err) return } result := struct{ Packages []*gocov.Package }{} err = json.Unmarshal(buf.Bytes(), &result) if err != nil { gocov_test_error(ir, err) return } sel := "" current = result.Packages for pi, p := range result.Packages { for fi, f := range p.Functions { r := reached(f) n := len(f.Statements) fun := fmt.Sprintf("%s.%s", p.Name, f.Name) cov := fmt.Sprintf("%.2f%% (%d/%d)", percentage(r, n), r, n) file := fmt.Sprintf("%s/%s", p.Name, filepath.Base(f.File)) id := fmt.Sprintf("f_%d_%d", pi, fi) if prevsel != "" && prevsel == fun { sel = id } ir.Eval(`.f2.funcs insert {} end -id `, id, ` -values {`, strconv.Quote(fun), ` `, strconv.Quote(file), ` `, strconv.Quote(cov), `}`) } } dir := filepath.Dir(current[0].Functions[0].File) ir.Set("pathtext", dir) done := 0 total := 0 for _, p := range result.Packages { for _, f := range p.Functions { done += reached(f) total += len(f.Statements) } } ir.Set("covtext", fmt.Sprintf("Overall coverage: %.2f%% (%d/%d)", percentage(done, total), done, total)) if sel == "" { sel = "f_0_0" } ir.Eval(".f2.funcs selection set ", sel) }
// GenerateImports generates the import declaration for this file. func (g *grpc) GenerateImports(file *generator.FileDescriptor) { if len(file.FileDescriptorProto.Service) == 0 { return } g.P("import (") g.P(contextPkg, " ", strconv.Quote(path.Join(g.gen.ImportPrefix, contextPkgPath))) g.P(grpcPkg, " ", strconv.Quote(path.Join(g.gen.ImportPrefix, grpcPkgPath))) g.P(")") g.P() }
func (ent Entry) GoSet() { ago.Import("github.com/droundy/goadmin/hosts") code := "e = hosts.Entry{"+strconv.Quote(ent.IpAddress)+","+ strconv.Quote(ent.CanonicalName)+", []string{" as := make([]string, len(ent.Aliases)) for i := range ent.Aliases { as[i] = strconv.Quote(ent.Aliases[i]) } code += strings.Join(as, ", ") + " } }.Set()" ago.Code(code) }
func marsh(t *testing.T, v interface{}, sv string) { out, err := Marshal(v) if err != nil { t.FailNow() } if string(out) != sv { fmt.Println("Marshal", v, "->", strconv.Quote(string(out)), " expected:", strconv.Quote(sv)) t.Fail() } }
// String is the string representation of a ParseError. func (e *ParseError) String() string { if e.Message == "" { return "parsing time " + strconv.Quote(e.Value) + " as " + strconv.Quote(e.Layout) + ": cannot parse " + strconv.Quote(e.ValueElem) + " as " + strconv.Quote(e.LayoutElem) } return "parsing time " + strconv.Quote(e.Value) + e.Message }
func TestMatrix4fString(t *testing.T) { test := func(m Matrix4f, s string) { if x := m.String(); x != s { t.Errorf("m.String() = %v, want %v", strconv.Quote(x), strconv.Quote(s)) } } test(Ident4f(), "Matrix4f(\n 1, 0, 0, 0,\n 0, 1, 0, 0,\n 0, 0, 1, 0,\n 0, 0, 0, 1,\n)") test(Matrix4f{1, 5, 9, 13, 2, 6, 10, 14, 3, 7, 11, 15, 4, 8, 12, 16}, "Matrix4f(\n 1, 2, 3, 4,\n 5, 6, 7, 8,\n 9, 10, 11, 12,\n 13, 14, 15, 16,\n)") }