func ExampleTrimPrefix() { var b = []byte("Goodbye,, world!") b = bytes.TrimPrefix(b, []byte("Goodbye,")) b = bytes.TrimPrefix(b, []byte("See ya,")) fmt.Printf("Hello%s", b) // Output: Hello, world! }
// CommitV000 // TreeV000 // XAttrSetV000 func ReadHeader(p *bytes.Buffer) (header *Header, err error) { header = &Header{} prefix := p.Next(4) var version []byte if bytes.Equal(prefix, []byte("Comm")) { header.Data = append(prefix, p.Next(6)...) header.Type = BLOB_TYPE_COMMIT version = bytes.TrimPrefix(header.Data, []byte("CommitV")) } else if bytes.Equal(prefix, []byte("Tree")) { header.Data = append(prefix, p.Next(4)...) header.Type = BLOB_TYPE_TREE version = bytes.TrimPrefix(header.Data, []byte("TreeV")) } else if bytes.Equal(prefix, []byte("XAtt")) { header.Data = append(prefix, p.Next(8)...) header.Type = BLOB_TYPE_X_ATTR_SET version = bytes.TrimPrefix(header.Data, []byte("XAttrSetV")) } else { err = errors.New(fmt.Sprintf("ReadHeader header %s has unknown type", header.Data)) return } if header.Version, err = strconv.Atoi(string(version)); err != nil { err = errors.New(fmt.Sprintf("ReadHeader header %s has non-integer version", header.Data)) return } return }
func detectLicense(filepath string) (LicenseType, error) { fh, err := os.Open(filepath) if err != nil { return UNKNOWN, err } defer fh.Close() var buf bytes.Buffer scanner := bufio.NewScanner(fh) for scanner.Scan() { if bytes.HasPrefix(scanner.Bytes(), []byte("package ")) { break } line := bytes.TrimSuffix(bytes.TrimPrefix(bytes.TrimPrefix(scanner.Bytes(), []byte("//")), []byte("/*")), []byte("*/")) if len(line) > 0 && (line[0] == '+' || bytes.HasPrefix(bytes.TrimSpace(line), []byte("Copyright"))) { continue } buf.Write(bytes.TrimSpace(line)) buf.WriteByte('\n') } //fmt.Fprintf(os.Stderr, "DETECT %q\n", strings.TrimSpace(buf.String())) l := license.New("", strings.TrimSpace(buf.String())) l.File = filepath if err = l.GuessType(); err != nil { if err.Error() == license.ErrUnrecognizedLicense { return UNKNOWN, scanner.Err() } return UNKNOWN, err } err = scanner.Err() switch l.Type { case license.LicenseMIT: return MIT, err case license.LicenseNewBSD: return NewBSD, err case license.LicenseFreeBSD: return Freebsd, err case license.LicenseApache20: return Apache2, err case license.LicenseMPL20: return MPL2, err case license.LicenseGPL20: return GPL2, err case license.LicenseGPL30: return GPL3, err case license.LicenseLGPL21: return LGPL2, err case license.LicenseLGPL30: return LGPL2, err case license.LicenseCDDL10: return CDDL, err case license.LicenseEPL10: return EPL, err } return UNKNOWN, err }
func DetectCode(first, second Line, detectors Detectors) Handler { if !first.hasFourSpacePrefix() { return nil } block := md.CodeBlock{} var paused *Line return HandlerFunc(func(next Line, ctx Context) (bool, error) { if next.EOF() { ctx.Emit(block) ctx.Emit(md.End{}) return maybeNull(paused, ctx) } // TODO(akavel): verify it's coded ok, it was converted from a different approach fourspace := []byte(" ") switch { // previous was blank, next is not tab-indented. Reject both. case paused != nil && !next.hasFourSpacePrefix(): ctx.Emit(block) ctx.Emit(md.End{}) return maybeNull(paused, ctx) case next.isBlank(): if paused != nil { block.Raw = append(block.Raw, md.Run(*paused)) block.Prose = append(block.Prose, md.Run{ paused.Line, bytes.TrimPrefix(paused.Bytes, fourspace)}) } paused = &next // note: only case where we pause a line return true, nil case next.hasFourSpacePrefix(): if paused != nil { block.Raw = append(block.Raw, md.Run(*paused)) block.Prose = append(block.Prose, md.Run{ paused.Line, bytes.TrimPrefix(paused.Bytes, fourspace)}) paused = nil } block.Raw = append(block.Raw, md.Run(next)) block.Prose = append(block.Prose, md.Run{ next.Line, bytes.TrimPrefix(next.Bytes, fourspace)}) return true, nil // next not blank & not indented. End the block. default: if paused != nil { block.Raw = append(block.Raw, md.Run(*paused)) block.Prose = append(block.Prose, md.Run{ paused.Line, bytes.TrimPrefix(paused.Bytes, fourspace)}) } ctx.Emit(block) ctx.Emit(md.End{}) return false, nil } }) }
func TrimBOM(b []byte, encoding string) []byte { bom := boms[encoding] if bom != nil { b = bytes.TrimPrefix(b, bom) } return b }
func (d *Driver) getMACAdress() (string, error) { args := append(d.xhyveArgs(), "-M") stdout := bytes.Buffer{} cmd := exec.Command(os.Args[0], args...) // TODO: Should be possible without exec log.Debugf("Running command: %s %s", os.Args[0], args) cmd.Stdout = &stdout if err := cmd.Run(); err != nil { if exitErr, ok := err.(*exec.ExitError); ok { log.Debugf("Stderr: %s", exitErr.Stderr) } return "", err } mac := bytes.TrimPrefix(stdout.Bytes(), []byte("MAC: ")) mac = bytes.TrimSpace(mac) hw, err := net.ParseMAC(string(mac)) if err != nil { return "", err } return hw.String(), nil }
// Keeps reading shallows until a flush-pkt is found func decodeShallow(p *Decoder) decoderStateFn { if !bytes.HasPrefix(p.line, shallow) { p.error("malformed shallow prefix, found %q... instead", p.line[:len(shallow)]) return nil } p.line = bytes.TrimPrefix(p.line, shallow) if len(p.line) != hashSize { p.error(fmt.Sprintf( "malformed shallow hash: wrong length, expected 40 bytes, read %d bytes", len(p.line))) return nil } text := p.line[:hashSize] var h plumbing.Hash if _, err := hex.Decode(h[:], text); err != nil { p.error("invalid hash text: %s", err) return nil } p.data.Shallows = append(p.data.Shallows, h) if ok := p.nextLine(); !ok { return nil } if len(p.line) == 0 { return nil // succesfull parse of the advertised-refs message } return decodeShallow }
func walkAndFindJsonFile(path string, info os.FileInfo, err error) error { if !info.IsDir() { if strings.Contains(info.Name(), "json") { fmt.Println("处理文件:::" + path) data, err := ioutil.ReadFile(path) if err != nil { panic(err) } data = bytes.TrimPrefix(data, []byte("\xef\xbb\xbf")) data, _ = GbkToUtf8(data) var book Book if err := json.Unmarshal(data, &book); err != nil { panic(err) } var bookName = book.Title var fileName = strings.Split(info.Name(), ".")[0] var zipFileName = strings.Split(book.ImageZipfile, ".")[0] if !strings.EqualFold(fileName, bookName) || !strings.EqualFold(zipFileName, bookName) { fmt.Println(info.Name() + ":::文件名:[" + bookName + "], zip文件名:[" + zipFileName + "]有误") fmt.Println(fileName + ":::文件名") fmt.Println(bookName + ":::书名") fmt.Println(zipFileName + ":::zip文件名") panic(info.Name()) } fmt.Printf("%s 格式正确\n", path) } if _, err := CopyFile(destDir+string(os.PathSeparator)+info.Name(), path); err != nil { panic(err) } } else { fmt.Printf("%s\n", path) } return nil }
func stockticker(w http.ResponseWriter, r *http.Request) { // Use http://finance.google.com/finance/info?client=ig&q=NASDAQ:GOOG to get a JSON response response, err = http.Get("http://finance.google.com/finance/info?client=ig&q=NASDAQ:GOOG,NASDAQ:AAPL,NASDAQ:MSFT") if err != nil { fmt.Println(err) } defer response.Body.Close() // Read the data into a byte slice body, err = ioutil.ReadAll(response.Body) if err != nil { fmt.Println(err) } // Remove whitespace from response data := bytes.TrimSpace(body) // Remove leading slashes and blank space to get byte slice that can be unmarshaled from JSON data = bytes.TrimPrefix(data, []byte("// ")) // Unmarshal the JSON byte slice to a predefined struct err = json.Unmarshal(data, &stocks) if err != nil { fmt.Println(err) } // Parse struct data to template tempErr := stockTemplate.Execute(w, stocks) if tempErr != nil { http.Error(w, tempErr.Error(), http.StatusInternalServerError) } }
// Expected format: want <hash> func (d *ulReqDecoder) decodeOtherWants() stateFn { if ok := d.nextLine(); !ok { return nil } if bytes.HasPrefix(d.line, shallow) { return d.decodeShallow } if bytes.HasPrefix(d.line, deepen) { return d.decodeDeepen } if len(d.line) == 0 { return nil } if !bytes.HasPrefix(d.line, want) { d.error("unexpected payload while expecting a want: %q", d.line) return nil } d.line = bytes.TrimPrefix(d.line, want) hash, ok := d.readHash() if !ok { return nil } d.data.Wants = append(d.data.Wants, hash) return d.decodeOtherWants }
func (p *GraphiteParser) Parse(buf []byte) ([]telegraf.Metric, error) { // parse even if the buffer begins with a newline buf = bytes.TrimPrefix(buf, []byte("\n")) metrics := make([]telegraf.Metric, 0) buffer := bytes.NewBuffer(buf) reader := bufio.NewReader(buffer) for { // Read up to the next newline. buf, err := reader.ReadBytes('\n') if err == io.EOF { return metrics, nil } if err != nil && err != io.EOF { return metrics, err } // Trim the buffer, even though there should be no padding line := strings.TrimSpace(string(buf)) if metric, err := p.ParseLine(line); err == nil { metrics = append(metrics, metric) } } }
func getRepos(URL *url.URL) (map[string]string, error) { URL.RawQuery = "format=JSON" resp, err := http.Get(URL.String()) if err != nil { return nil, err } defer resp.Body.Close() content, err := ioutil.ReadAll(resp.Body) if err != nil { return nil, err } const xssTag = ")]}'\n" content = bytes.TrimPrefix(content, []byte(xssTag)) m := map[string]*Project{} if err := json.Unmarshal(content, &m); err != nil { return nil, err } result := map[string]string{} for k, v := range m { result[k] = v.CloneURL } return result, nil }
func (fs *hgFSCmd) ReadDir(path string) ([]os.FileInfo, error) { path = filepath.Clean(internal.Rel(path)) // This combination of --include and --exclude opts gets all the files in // the dir specified by path, plus all files one level deeper (but no // deeper). This lets us list the files *and* subdirs in the dir without // needlessly listing recursively. cmd := exec.Command("hg", "locate", "--rev="+string(fs.at), "--include="+path, "--exclude="+filepath.Clean(path)+"/*/*/*") cmd.Dir = fs.dir out, err := cmd.CombinedOutput() if err != nil { return nil, fmt.Errorf("exec `hg cat` failed: %s. Output was:\n\n%s", err, out) } subdirs := make(map[string]struct{}) prefix := []byte(path + "/") files := bytes.Split(out, []byte{'\n'}) var fis []os.FileInfo for _, nameb := range files { nameb = bytes.TrimPrefix(nameb, prefix) if len(nameb) == 0 { continue } if bytes.Contains(nameb, []byte{'/'}) { subdir := strings.SplitN(string(nameb), "/", 2)[0] if _, seen := subdirs[subdir]; !seen { fis = append(fis, &util.FileInfo{Name_: subdir, Mode_: os.ModeDir}) subdirs[subdir] = struct{}{} } continue } fis = append(fis, &util.FileInfo{Name_: filepath.Base(string(nameb))}) } return fis, nil }
func getGoroutineID() (uint64, error) { b := make([]byte, 64) b = b[:runtime.Stack(b, false)] b = bytes.TrimPrefix(b, []byte("goroutine ")) b = b[:bytes.IndexByte(b, ' ')] return strconv.ParseUint(string(b), 10, 64) }
// checkVersionMatch makes sure that the go command in the path matches // the GOROOT that will be used for building the cross compiler. // // This is typically not a problem when using the a release version, but // it is easy for development environments to drift, causing unexpected // errors. // // checkVersionMatch is run after the tmpGoroot is built, so the dist // command is available to call. func checkVersionMatch(tmpGoroot string, version []byte) error { if buildN { return nil } version = bytes.TrimPrefix(version, []byte("go version ")) version = bytes.Trim(version, "\n") dist := filepath.Join(tmpGoroot, "pkg/tool/"+goEnv("GOOS")+"_"+goEnv("GOARCH")+"/dist") if goos == "windows" { dist += ".exe" } cmd := exec.Command(dist, "version") cmd.Dir = tmpGoroot cmd.Env = []string{ "GOROOT=" + tmpGoroot, `PATH=` + os.Getenv("PATH"), } cmd.Env = appendCommonEnv(cmd.Env) out, err := cmd.CombinedOutput() if err != nil { return fmt.Errorf("cannot get cmd/dist version: %v (%s)", err, out) } out = bytes.Trim(out, "\n") if !bytes.HasPrefix(version, out) { return fmt.Errorf("Go command out of sync with GOROOT. The command `go version` reports:\n\t%s\nbut the GOROOT %q is version:\n\t%s\nRebuild Go.", version, goEnv("GOROOT"), out) } return nil }
func (pr *protocolReader) Read(buf []byte) (int, error) { if len(pr.buf) > 0 { n := copy(buf, pr.buf) pr.buf = pr.buf[n:] return n, nil } for pr.scanner.Scan() { cmd := pr.scanner.Bytes() switch { default: statErrors.Add("type=InvalidCommand", 1) fmt.Fprintf(pr.conn, "error: invalid command\n") case bytes.Equal(cmd, versionCmd): fmt.Fprintf(pr.conn, "Built on ... (tsp-aggregator)\n") case bytes.HasPrefix(cmd, putCmd): line := bytes.TrimPrefix(cmd, putCmd) line = append(line, '\n') n := copy(buf, line) if n < len(line) { pr.buf = append(pr.buf, line[n:]...) } return n, nil } } err := pr.scanner.Err() if err == nil { err = io.EOF } return 0, err }
func stockticker() { // Use http://finance.google.com/finance/info?client=ig&q=NASDAQ:GOOG to get a JSON response response, err = http.Get("http://finance.google.com/finance/info?client=ig&q=NASDAQ:GOOG,NASDAQ:AAPL,NASDAQ:MSFT") if err != nil { fmt.Println(err) } defer response.Body.Close() // Read the data into a byte slice body, err = ioutil.ReadAll(response.Body) if err != nil { fmt.Println(err) } // Remove whitespace from response data := bytes.TrimSpace(body) // Remove leading slashes and blank space to get byte slice that can be unmarshaled from JSON data = bytes.TrimPrefix(data, []byte("// ")) // Unmarshal the JSON byte slice to a predefined struct err = json.Unmarshal(data, &stocks) if err != nil { fmt.Println(err) } fmt.Println(stocks) }
func (r *Reader) Read() ([]byte, error) { buf := []byte{} var isErr bool for { line, err := r.ReadBytes('\n') if err != nil { return nil, err } if bytes.HasPrefix(line, []byte("event: error")) { isErr = true } if bytes.HasPrefix(line, []byte("data: ")) { data := bytes.TrimSuffix(bytes.TrimPrefix(line, []byte("data: ")), []byte("\n")) buf = append(buf, data...) } // peek ahead one byte to see if we have a double newline (terminator) if peek, err := r.Peek(1); err == nil && string(peek) == "\n" { break } } if isErr { return nil, Error(string(buf)) } return buf, nil }
// ReadAll reads a .dockerignore file and returns the list of file patterns // to ignore. Note this will trim whitespace from each line as well // as use GO's "clean" func to get the shortest/cleanest path for each. func ReadAll(reader io.Reader) ([]string, error) { if reader == nil { return nil, nil } scanner := bufio.NewScanner(reader) var excludes []string currentLine := 0 utf8bom := []byte{0xEF, 0xBB, 0xBF} for scanner.Scan() { scannedBytes := scanner.Bytes() // We trim UTF8 BOM if currentLine == 0 { scannedBytes = bytes.TrimPrefix(scannedBytes, utf8bom) } pattern := string(scannedBytes) currentLine++ // Lines starting with # (comments) are ignored before processing if strings.HasPrefix(pattern, "#") { continue } pattern = strings.TrimSpace(pattern) if pattern == "" { continue } pattern = filepath.Clean(pattern) pattern = filepath.ToSlash(pattern) excludes = append(excludes, pattern) } if err := scanner.Err(); err != nil { return nil, fmt.Errorf("Error reading .dockerignore: %v", err) } return excludes, nil }
// GetAllServiceMeta returns all available services. func (ds *DataStorage) GetServiceDescriptions() []*common.ServiceDescription { sdList := make(common.ServiceDescriptionList, 0, 1000) sdPrefix := []byte(serviceDescriptionPrefix) iter := ds.db.NewIterator(defaultReadOptions) iter.Seek(sdPrefix) for iter.Valid() { svcKey := iter.Key() svcName := bytes.TrimPrefix(svcKey, sdPrefix) if len(svcKey) == len(svcName) { break } serviceDesc, err := common.NewServiceDescriptionFromBinary(iter.Value()) if err != nil { log.Error( "Coudn't read service '%s' description: %s", svcName, err.Error()) iter.Next() continue } sdList = append(sdList, serviceDesc) iter.Next() } sort.Sort(sdList) return sdList }
func (p *planner) getTableNames(dbDesc *DatabaseDescriptor) (parser.QualifiedNames, *roachpb.Error) { prefix := MakeNameMetadataKey(dbDesc.ID, "") sr, pErr := p.txn.Scan(prefix, prefix.PrefixEnd(), 0) if pErr != nil { return nil, pErr } var qualifiedNames parser.QualifiedNames for _, row := range sr { _, tableName, err := encoding.DecodeStringAscending( bytes.TrimPrefix(row.Key, prefix), nil) if err != nil { return nil, roachpb.NewError(err) } qname := &parser.QualifiedName{ Base: parser.Name(dbDesc.Name), Indirect: parser.Indirection{parser.NameIndirection(tableName)}, } if err := qname.NormalizeTableName(""); err != nil { return nil, roachpb.NewError(err) } qualifiedNames = append(qualifiedNames, qname) } return qualifiedNames, nil }
// getConfig retrieves the configuration for the specified key. If the // key is empty, all configurations are returned. Otherwise, the // leading "/" path delimiter is stripped and the configuration // matching the remainder is retrieved. Note that this will retrieve // the default config if "key" is equal to "/", and will list all // configs if "key" is equal to "". The body result contains a listing // of keys and retrieval of a config. The output format is determined // by the request header. func getConfig(db *client.DB, configPrefix proto.Key, config gogoproto.Message, path string, r *http.Request) (body []byte, contentType string, err error) { // Scan all configs if the key is empty. if len(path) == 0 { var rows []client.KeyValue if rows, err = db.Scan(configPrefix, configPrefix.PrefixEnd(), maxGetResults); err != nil { return } if len(rows) == maxGetResults { log.Warningf("retrieved maximum number of results (%d); some may be missing", maxGetResults) } var prefixes []string for _, row := range rows { trimmed := bytes.TrimPrefix(row.Key, configPrefix) prefixes = append(prefixes, url.QueryEscape(string(trimmed))) } // Encode the response. body, contentType, err = util.MarshalResponse(r, prefixes, util.AllEncodings) } else { configkey := keys.MakeKey(configPrefix, proto.Key(path[1:])) if err = db.GetProto(configkey, config); err != nil { return } body, contentType, err = util.MarshalResponse(r, config, util.AllEncodings) } return }
// Entry decodes the key (assuming it was encoded by EncodeKey) into an Entry // and populates its value field. func Entry(key []byte, val []byte) (*spb.Entry, error) { if !bytes.HasPrefix(key, entryKeyPrefixBytes) { return nil, fmt.Errorf("key is not prefixed with entry prefix %q", entryKeyPrefix) } keyStr := string(bytes.TrimPrefix(key, entryKeyPrefixBytes)) keyParts := strings.SplitN(keyStr, entryKeySepStr, 4) if len(keyParts) != 4 { return nil, fmt.Errorf("invalid key[%d]: %q", len(keyParts), string(key)) } srcVName, err := decodeVName(keyParts[0]) if err != nil { return nil, fmt.Errorf("error decoding source VName: %v", err) } targetVName, err := decodeVName(keyParts[3]) if err != nil { return nil, fmt.Errorf("error decoding target VName: %v", err) } return &spb.Entry{ Source: srcVName, FactName: keyParts[2], EdgeKind: keyParts[1], Target: targetVName, FactValue: val, }, nil }
// ShowTables returns all the tables. func (s *Server) ShowTables(session *Session, p *parser.ShowTables, args []sqlwire.Datum, resp *sqlwire.Response) error { if p.Name == nil { if session.Database == "" { return errNoDatabase } p.Name = append(p.Name, session.Database) } dbID, err := s.lookupDatabase(p.Name.String()) if err != nil { return err } prefix := keys.MakeNameMetadataKey(dbID, "") sr, err := s.db.Scan(prefix, prefix.PrefixEnd(), 0) if err != nil { return err } var rows []sqlwire.Result_Row for _, row := range sr { name := string(bytes.TrimPrefix(row.Key, prefix)) rows = append(rows, sqlwire.Result_Row{ Values: []sqlwire.Datum{ {StringVal: &name}, }, }) } resp.Results = []sqlwire.Result{ { Columns: []string{"tables"}, Rows: rows, }, } return nil }
// getTableNames implements the SchemaAccessor interface. func (p *planner) getTableNames(dbDesc *sqlbase.DatabaseDescriptor) (parser.TableNames, error) { if e, ok := p.session.virtualSchemas.getVirtualSchemaEntry(dbDesc.Name); ok { return e.tableNames(), nil } prefix := sqlbase.MakeNameMetadataKey(dbDesc.ID, "") sr, err := p.txn.Scan(prefix, prefix.PrefixEnd(), 0) if err != nil { return nil, err } var tableNames parser.TableNames for _, row := range sr { _, tableName, err := encoding.DecodeUnsafeStringAscending( bytes.TrimPrefix(row.Key, prefix), nil) if err != nil { return nil, err } tn := parser.TableName{ DatabaseName: parser.Name(dbDesc.Name), TableName: parser.Name(tableName), } tableNames = append(tableNames, tn) } return tableNames, nil }
func substPatternBytes(pat, repl, str []byte) (pre, subst, post []byte) { i := bytes.IndexByte(pat, '%') if i < 0 { if bytes.Equal(str, pat) { return repl, nil, nil } return str, nil, nil } in := str trimed := str if i > 0 { trimed = bytes.TrimPrefix(in, pat[:i]) if bytes.Equal(trimed, in) { return str, nil, nil } } in = trimed if i < len(pat)-1 { trimed = bytes.TrimSuffix(in, pat[i+1:]) if bytes.Equal(trimed, in) { return str, nil, nil } } i = bytes.IndexByte(repl, '%') if i < 0 { return repl, nil, nil } return repl[:i], trimed, repl[i+1:] }
// markdownify renders a given string from Markdown to HTML. func markdownify(in interface{}) template.HTML { text := cast.ToString(in) m := helpers.RenderBytes(&helpers.RenderingContext{Content: []byte(text), PageFmt: "markdown"}) m = bytes.TrimPrefix(m, markdownTrimPrefix) m = bytes.TrimSuffix(m, markdownTrimSuffix) return template.HTML(m) }
// Expected format: shallow <hash> func decodeShallow(d *Decoder) decoderStateFn { if bytes.HasPrefix(d.line, deepen) { return decodeDeepen } if len(d.line) == 0 { return nil } if !bytes.HasPrefix(d.line, shallow) { d.error("unexpected payload while expecting a shallow: %q", d.line) return nil } d.line = bytes.TrimPrefix(d.line, shallow) hash, ok := d.readHash() if !ok { return nil } d.data.Shallows = append(d.data.Shallows, hash) if ok := d.nextLine(); !ok { return nil } return decodeShallow }
func (options *Html) TitleBlock(out *bytes.Buffer, text []byte) { text = bytes.TrimPrefix(text, []byte("% ")) text = bytes.Replace(text, []byte("\n% "), []byte("\n"), -1) out.WriteString("<h1 class=\"title\">") out.Write(text) out.WriteString("\n</h1>") }
func decodeDeepenReference(d *Decoder) decoderStateFn { d.line = bytes.TrimPrefix(d.line, deepenReference) d.data.Depth = DepthReference(string(d.line)) return decodeFlush }