// lineDiff returns b with all lines added or changed from a highlighted. // It discards spaces within lines when comparing lines, so subtle // gofmt-induced alignment changes are not flagged as changes. // It also handles single-line diffs specially, highlighting only the // changes within those lines. func lineDiff(a, b []byte) []byte { l := byteLines{bytes.Split(a, []byte("\n")), bytes.Split(b, []byte("\n"))} cs := diff.Diff(len(l.a), len(l.b), diff.Data(l)) var buf bytes.Buffer n := 0 for _, c := range cs { for _, b := range l.b[n:c.B] { buf.Write(b) buf.WriteByte('\n') } if c.Ins > 0 { if c.Ins == 1 && c.Del == 1 { buf.Write(byteDiff(l.a[c.A], l.b[c.B])) buf.WriteByte('\n') } else { for _, b := range l.b[c.B : c.B+c.Ins] { buf.Write(colorize(b)) buf.WriteByte('\n') } } } n = c.B + c.Ins } for i, b := range l.b[n:] { if i > 0 { buf.WriteByte('\n') } buf.Write(b) } return buf.Bytes() }
func runDatCase(c []byte) int { var counter int defer func() { if e := recover(); e != nil { fmt.Println("ERROR while running test case:", e) counter++ } }() parts := bytes.Split(c, []byte("#")) if len(parts) != 4 { counter++ } if len(parts) != 4 && *verbose { fmt.Printf("Malformed test case: %d, %q\n", len(parts), string(c)) return counter } fmt.Println("Running test case:", string(c)) testData := make(map[string]string) for _, p := range parts[1:] { t := bytes.Split(p, []byte("\n")) testData[string(t[0])] = string(t[1]) } p := h5.NewParserFromString(string(testData["data"])) err := p.Parse() if err != nil { fmt.Println("Test case:", string(c)) fmt.Println("ERROR parsing: ", err) counter++ } else { if *verbose { fmt.Println("SUCCESS!!!") } } return counter }
func LoadMap() error { contents, err := ioutil.ReadFile(fileMapPath) if err != nil { return fmt.Errorf("Could not read file %s: %s", fileMapPath, err.Error()) } lines := bytes.Split(contents, []byte("\n")) newMap := make(map[string]string) for i, line := range lines { lineParts := bytes.Split(bytes.TrimSpace(line), []byte(" ")) if len(lineParts[0]) == 0 { continue } user := string(lineParts[:1][0]) host := string(lineParts[len(lineParts)-1:][0]) if _, alreadyExists := newMap[user]; alreadyExists { return fmt.Errorf("User %s was defined more than once on line %d", user, i) } newMap[user] = host } if len(newMap) < minEntries { return fmt.Errorf("New Map only contains %d entries, which is less than the set minimum %d", len(newMap), minEntries) } fileMapLock.Lock() defer fileMapLock.Unlock() fileMap = newMap return nil }
func (fl *FriendsList) Parse(buf []byte) (f map[string]string, err error) { f = make(map[string]string) for _, l := range bytes.Split(buf, []byte("\n")) { if len(l) < 3 { continue } parts := bytes.Split(l, []byte(" ")) if len(parts) != 2 { return f, fmt.Errorf("format error. too many parts. %s", parts) } user := string(parts[0]) if len(user) < 1 { return f, fmt.Errorf("invalid user: %s", user) } perm := string(parts[1]) if !validPerm(perm) { return f, fmt.Errorf("invalid perm: %s", perm) } f[user] = perm } // ok everything seems good return f, nil }
func loadPerms(path string) (mperms PermissionsList) { file, e := ioutil.ReadFile(path) if e != nil { fmt.Println("Could not get group permissions for", path, ":", e) return } lines := bytes.Split(file, []byte("\n"), -1) mperms = make(PermissionsList, len(lines)) for i, line := range lines { parts := bytes.Split(line, []byte(" "), 2) perms := mperms[i] for _, perm := range parts[0] { switch perm { case 'r': perms.Read = true case 'w': perms.Write = true default: fmt.Println("WARNING: Unrecognized permission", perm) } perms.Path = string(parts[1]) mperms[i] = perms } } sort.Sort(mperms) if !sort.IsSorted(mperms) { fmt.Println("Failed to sort!") } return }
func parseGcovCoverageResults(target *core.BuildTarget, coverage *core.TestCoverage, data []byte) error { // The data we have is a sequence of .gcov files smashed together. lines := bytes.Split(data, []byte{'\n'}) if len(lines) == 0 { return fmt.Errorf("Empty coverage file") } currentFilename := "" for lineno, line := range lines { fields := bytes.Split(line, []byte{':'}) if len(fields) < 3 { continue } if bytes.Equal(fields[2], []byte("Source")) { if len(fields) < 4 { return fmt.Errorf("Bad source on line %d: %s", lineno, string(line)) } currentFilename = string(fields[3]) continue } covLine, err := strconv.Atoi(strings.TrimSpace(string(fields[1]))) if err != nil { return fmt.Errorf("Bad line number on line %d: %s", lineno, string(line)) } else if covLine > 0 { coverage.Files[currentFilename] = append(coverage.Files[currentFilename], translateGcovCount(bytes.TrimSpace(fields[0]))) } } return nil }
// Reads a .dat file where the entries are matricies of numbers // separated by empty new lines and individually aligned with // whitespace between row entries and newlines between rows func Read(file []byte) []matrix.Matrix { data := make([]matrix.Matrix, 0) for _, mat := range bytes.Split(file, []byte("\n\n")) { temp := make(matrix.Matrix, 0) didParse := true Element: for _, row := range bytes.Split(mat, []byte("\n")) { floatsAsStrings := strings.Fields(string(row)) elms := make([]matrix.Element, 0) for _, s := range floatsAsStrings { f, err := strconv.ParseFloat(s, 64) if err != nil { didParse = false break Element } elms = append(elms, matrix.Element(f)) } temp = append(temp, elms) } if didParse && len(temp[0]) > 0 { data = append(data, temp) } } return data }
func getWikiList() []wikiListItem { if len(WikiList) == 0 { body, err := ioutil.ReadFile("wiki5000.csv") if err != nil { log.Printf("Error loading wiki5000.csv: %s\n", err) } currentItem := new(wikiListItem) lines := bytes.Split(body, []byte{'\n'}) for _, line := range lines { parts := bytes.Split(line, []byte{','}) for i, p := range parts { switch i { case 0: currentItem.Rank, _ = strconv.Atoi(string(p)) case 1: currentItem.Link = string(p) case 2: currentItem.Title = string(p) case 3: currentItem.Views, _ = strconv.Atoi(string(p)) } } WikiList = append(WikiList, *currentItem) currentItem = new(wikiListItem) } return WikiList } else { return WikiList } }
func (v otrV3) parseFragmentPrefix(c *Conversation, data []byte) (rest []byte, ignore bool, ok bool) { if len(data) < 23 { return data, false, false } header := data[:23] headerPart := bytes.Split(header, fragmentSeparator)[0] itagParts := bytes.Split(headerPart, fragmentItagsSeparator) if len(itagParts) < 3 { return data, false, false } senderInstanceTag, err1 := parseItag(itagParts[1]) if err1 != nil { return data, false, false } receiverInstanceTag, err2 := parseItag(itagParts[2]) if err2 != nil { return data, false, false } if err := v.verifyInstanceTags(c, senderInstanceTag, receiverInstanceTag); err != nil { switch err { case errInvalidOTRMessage: return data, false, false case errReceivedMessageForOtherInstance: return data, true, true } } return data[23:], false, true }
func (blp *Blp) parseRotateEvent(line []byte) { blp.RotateEventCount++ rem := bytes.Split(line, BINLOG_ROTATE_TO) rem2 := bytes.Split(rem[1], POS) rotateFilename := strings.TrimSpace(string(rem2[0])) rotatePos, err := strconv.ParseUint(string(rem2[1]), 10, 64) if err != nil { panic(NewBinlogParseError(CODE_ERROR, fmt.Sprintf("Error in extracting rotate pos %v from line %s", err, string(line)))) } if !blp.globalState.usingRelayLogs { //If the file being parsed is a binlog, //then the rotate events only correspond to itself. blp.currentPosition.Position.MasterFilename = rotateFilename blp.currentPosition.Position.MasterPosition = rotatePos } else { //For relay logs, the rotate events could be that of relay log or the binlog, //the prefix of rotateFilename is used to test which case is it. logsDir, relayFile := path.Split(blp.currentPosition.Position.RelayFilename) currentPrefix := strings.Split(relayFile, ".")[0] rotatePrefix := strings.Split(rotateFilename, ".")[0] if currentPrefix == rotatePrefix { //relay log rotated blp.currentPosition.Position.RelayFilename = path.Join(logsDir, rotateFilename) } else { //master file rotated blp.currentPosition.Position.MasterFilename = rotateFilename blp.currentPosition.Position.MasterPosition = rotatePos } } }
func TestIterWords(t *testing.T) { tests := []struct { in []byte out [][]byte }{ {[]byte("hello world"), bytes.Split([]byte("hello:world"), []byte(":"))}, {[]byte(" hello world "), bytes.Split([]byte("hello:world"), []byte(":"))}, } for i, test := range tests { out := [][]byte{} f := func(word []byte) { out = append(out, word) } IterWords(test.in, f) if len(out) != len(test.out) { t.Logf("%d: wrong output length: got %d want %d", i, len(out), len(test.out)) } for j := range out { if !bytes.Equal(out[j], test.out[j]) { t.Logf("%d:%d: don't match. got %q want %q", i, j, out[j], test.out[j]) t.Fail() } } } }
func DiffBytes(origb, endb []byte) []Diff { origl := bytes.Split(origb, []byte("\n"), -1) endl := bytes.Split(endb, []byte("\n"), -1) // Check if the streams are at all different // Do length check first for efficiency reasons. if len(origl) == len(endl) { if bytes.Equal(origb, endb) { // Bytes are equal! return nil } } for i, _ := range origl { if i >= len(endl) { fmt.Println("Out of range panic coming up!") fmt.Println(origl, endl, i) } if bytes.Equal(origl[i], endl[i]) { continue } // Search forward for the line for j := i; j < len(endl); j++ { if bytes.Equal(origl[i], endl[j]) { fmt.Println("Found match for line", i, "at line", j) } } for j := i; j >= 0; j-- { if bytes.Equal(origl[i], endl[j]) { fmt.Println("Found match for line", i, "at line", j) } } } return nil }
// NewRequestHeader returns RequestHeader from bytes func NewRequestHeader(b []byte) (*RequestHeader, error) { lines := bytes.Split(b, eol) reqline := bytes.Split(lines[0], []byte{' '}) headers := [][][]byte{} bodySize := 0 headerLines := lines[1:] for _, l := range headerLines { tokens := bytes.SplitN(l, []byte{':', ' '}, 2) if len(tokens) == 2 { headers = append(headers, tokens) } if bytes.Equal(bytes.ToLower(tokens[0]), []byte("content-length")) { size, err := strconv.Atoi(string(tokens[1])) if err != nil { return nil, err } bodySize = size } } r := &RequestHeader{ ReqLineTokens: reqline, Headers: headers, BodySize: bodySize, BodyRead: 0, } return r, nil }
// agentsMap returns a mapping of agents used by the web app to their // descriptions. // // Returns an error if they can't be retrieved correctly. func agentsMap() ([]map[string]string, error) { command := "mancalai_cli" args := []string{"-tool", "compare", "-agents"} output, err := exec.Command(command, args...).Output() if err != nil { return nil, fmt.Errorf( "Couldn't complete command '%s'", command, ) } lines := bytes.Split(output, []byte{'\n'}) var agents []map[string]string separator := []byte{':', ' '} for _, line := range lines { if len(line) > 0 { nameAndDescription := bytes.Split(line, separator) name := nameAndDescription[0] description := nameAndDescription[1] agents = append(agents, map[string]string{ "name": string(name), "description": string(description), }) } } return agents, nil }
// equivalent does a linewise comparison of a and b. // For each line: // got exactly equals want OR // want ends in " //substr" and is a substring of got OR // want ends in " //slashes" and runtime.GOOS == "windows" and got equals want with its slashes swapped for backslashes // Otherwise equivalent returns false. func equivalent(got, want []byte) bool { var ( gotLines = bytes.Split(got, newline) wantLines = bytes.Split(want, newline) substr = []byte(" //substr") slashes = []byte(" //slashes") slash = []byte{'/'} gg, ww []byte ) if len(gotLines) != len(wantLines) { return false } for i := range gotLines { gg, ww = gotLines[i], wantLines[i] if bytes.HasSuffix(ww, slashes) { ww = bytes.Replace(ww[:len(ww)-len(slashes)], slash, []byte{filepath.Separator}, -1) } if !(bytes.Equal(gg, ww) || bytes.HasSuffix(ww, substr) && bytes.Contains(gg, ww[:len(ww)-len(substr)])) { return false } } return true }
func compare(want, have io.Reader) ([]int, bool, error) { w, err := ioutil.ReadAll(want) if err != nil { return nil, false, err } h, err := ioutil.ReadAll(have) if err != nil { return nil, false, err } wb := bytes.Split(w, []byte("\n")) hb := bytes.Split(h, []byte("\n")) if len(wb) != len(hb) { return nil, false, nil } var diff []int ok := true for i := 0; i < len(wb); i++ { if bytes.Compare(wb[i], hb[i]) != 0 { diff = append(diff, i) ok = false } } return diff, ok, nil }
func netshInterfaceIPv6ShowAddress(name string) ([]string, error) { // TODO: need to test ipv6 netmask too, but netsh does not outputs it out, err := runCmd("netsh", "interface", "ipv6", "show", "address", "interface=\""+name+"\"") if err != nil { return nil, err } addrs := make([]string, 0) lines := bytes.Split(out, []byte{'\r', '\n'}) for _, line := range lines { if !bytes.HasPrefix(line, []byte("Address")) { continue } if !bytes.HasSuffix(line, []byte("Parameters")) { continue } f := bytes.Split(line, []byte{' '}) if len(f) != 3 { continue } // remove scope ID if present f = bytes.Split(f[1], []byte{'%'}) addrs = append(addrs, string(bytes.ToLower(bytes.TrimSpace(f[0])))) } return addrs, nil }
// TestHostKeyFile tests to read and write from HostKeyFile func TestHostKeyFile(t *testing.T) { os.Remove(hostFileBackup) defer os.Remove(hostFileBackup) in := NewHostKeyFile(hostFile) out := NewHostKeyFile(hostFileBackup) hostKeys, err := in.GetHostKeys() if err != nil { t.Fatal("reading host file error:", err) } for i, v := range hostKeys { if err = out.PutHostKey(i, v); err != nil { t.Fatal("append error:", err) } } keysByte, _ := ioutil.ReadFile(hostFile) keysByteBackup, _ := ioutil.ReadFile(hostFileBackup) keyBytes := bytes.Split(keysByte, []byte{'\n'}) keyBytesBackup := bytes.Split(keysByteBackup, []byte{'\n'}) for _, keyByte := range keyBytes { find := false for _, keyByteBackup := range keyBytesBackup { find = bytes.Compare(keyByte, keyByteBackup) == 0 if find { break } } if !find { t.Fatalf("host file difference") } } }
func saveHandler(w http.ResponseWriter, r *http.Request, title string) { page, _ := loadPage(title) boder := bytes.Split(page.Body, []byte("~![META]!~")) confer := bytes.Split(boder[0], []byte("::")) if strings.TrimSpace(r.FormValue("password_")) == strings.TrimSpace(string(confer[2])) { body := r.FormValue("body") public_edit := "false" public_view := "false" if r.FormValue("public_view") == "on" { public_view = "true" } if r.FormValue("public_edit") == "on" { public_edit = "true" } new_password := r.FormValue("new_password") body = public_view + "::" + public_edit + "::" + new_password + "\n~![META]!~\n" + body p := &Page{Title: title, Body: []byte(body)} err := p.save() if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } http.Redirect(w, r, "/view/"+title+"?password="******"&saved=succes", http.StatusFound) } else { http.Redirect(w, r, "/view/"+title+"?password="******"password_"))+"&saved=fail", http.StatusFound) } }
func (server *CollectionServer) handlePacket(packet []byte) { server.metrics.UpdateCounter("statsd.packets_received", 1, 1) metrics := bytes.Split(packet, []byte("\n")) for _, metric := range metrics { parts := bytes.Split(metric, []byte(":")) key := string(parts[0]) for _, bit := range parts[1:] { fields := bytes.Split(bit, []byte("|")) if len(fields) == 1 { server.metrics.UpdateCounter("statsd.bad_lines_seen", 1, 1) continue } sampleRate := float64(1) value, _ := strconv.ParseFloat(string(fields[0]), 64) if len(fields) == 3 { sampleRate, _ = strconv.ParseFloat(string(fields[2]), 64) } switch { case string(fields[1]) == "ms": server.metrics.UpdateTimer(key, value) case string(fields[1]) == "g": server.metrics.UpdateGauge(key, value) default: server.metrics.UpdateCounter(key, value, sampleRate) } } } }
// Records uses Jaro-Winkler distance to parse WHOIS queries. // Other than .com domains may not be supported. func Records(data []byte) record { lines := bytes.Split(data, []byte("\n")) query := make(map[string]string) var record record for _, line := range lines { if jwd.Calculate(strings.Split(string(line), ":")[0], "Referral") > 0.7 && bytes.Contains(line, []byte(":")) { record.Referral = strings.TrimSpace(strings.Split(string(line), ": ")[1]) } if len(line) > 0 && bytes.Contains(line, []byte(":")) && len(bytes.TrimSpace(bytes.Split(line, []byte(":"))[1])) > 0 { this := string(line) if len(query[strings.TrimSpace(strings.Split(this, ":")[0])]) != 0 { n := query[strings.TrimSpace(strings.Split(this, ":")[0])] query[strings.TrimSpace(strings.Split(this, ":")[0])] = n + "," + strings.TrimSpace(strings.Split(this, ":")[1]) } else { query[strings.TrimSpace(strings.Split(this, ":")[0])] = strings.TrimSpace(strings.Split(this, ":")[1]) } } } record.Updated = find(query, "Updated") record.Created = find(query, "Created") record.Nameservers = strings.Split(find(query, "Nameservers"), ",") record.Status = strings.Split(find(query, "Status"), ",") record.Expiration = find(query, "Expiration") return record }
func (bl *Malwaredomains) Init(cacheDir string) (err error) { bl.domains = make(map[string]*MalwareDomain) domainsTxtFile, err := CacheGet(cacheDir, "http://mirror1.malwaredomains.com/files/domains.txt", 24) if err != nil { return err } b, _ := ioutil.ReadFile(domainsTxtFile) lines := bytes.Split(b, []byte("\n")) for _, line := range lines { if bytes.HasPrefix(line, []byte("#")) || len(line) == 0 { continue } f := bytes.Split(line, []byte("\t")) if len(f) < 5 { continue } fDomain := string(bytes.ToLower(f[2])) fType := string(f[3]) fRef := string(f[4]) bl.domains[fDomain] = &MalwareDomain{fDomain, fType, fRef} } return nil }
func decodeChangesetState(data []byte) (State, error) { // example // --- // last_run: 2016-07-02 22:46:01.422137422 Z // sequence: 1912325 lines := bytes.Split(data, []byte("\n")) parts := bytes.Split(lines[1], []byte(":")) timeString := string(bytes.TrimSpace(bytes.Join(parts[1:], []byte(":")))) t, err := time.Parse( "2006-01-02 15:04:05.999999999 Z", timeString) if err != nil { return State{}, err } parts = bytes.Split(lines[2], []byte(":")) n, err := strconv.ParseUint(string(bytes.TrimSpace(parts[1])), 10, 64) if err != nil { return State{}, err } return State{ SeqNum: n, Timestamp: t, }, nil }
func computeInsertedDeletedLines(oldCodeR, newCodeR io.Reader) (id InsDel, err error) { var i, d int // TODO(flowlo): get rid of ReadAll var oldCode, newCode []byte if oldCode, err = ioutil.ReadAll(oldCodeR); err != nil { return } if newCode, err = ioutil.ReadAll(newCodeR); err != nil { return } currentFields := bytes.Split(newCode, []byte("\n")) oldFields := bytes.Split(oldCode, []byte("\n")) for _, val := range currentFields { if !bytes.Contains(oldCode, val) { i++ } } for _, val := range oldFields { if !bytes.Contains(oldCode, val) { d++ } } return InsDel{i, d}, nil }
// ParseMaps parses /proc/$$/maps into a useable data structure. func (p Process) ParseMaps() (maps Mapping, err error) { // TODO: slurp or use a reader? /proc/$$/maps shouldn't be large... buf, err := ioutil.ReadFile(p.maps) if err != nil { return nil, err } lines := bytes.Split(buf, []byte{'\n'}) var m Map for _, line := range lines { if len(line) == 0 { continue } parts := bytes.Split(line, []byte{' '}) // 6 parts minimum, but no max since sometimes // there's a big space between inode and path. // Prior to 2.0 there was only 5, but I doubt anybody // has a kernel from ~2004 that runs Go. if len(parts) < 6 { return maps, errors.New("proc.ParseMaps not enough portions.") } // Convert the address ranges from hex to uintptr. addr := bytes.Split(parts[0], []byte{'-'}) m.Start = hexToUintptr(addr[0]) m.End = hexToUintptr(addr[1]) // Convert 'rwxp' to permissions bitmask. for _, c := range parts[1] { switch c { case 'r': m.Perms |= Read case 'w': m.Perms |= Write case 'x': m.Perms |= Exec case 'p': m.Perms |= Priv case 's': m.Perms |= Shared } } m.Offset = hexToUintptr(parts[2]) // Split dev into Major:Minor parts. dev := bytes.Split(parts[3], []byte{':'}) m.Maj = parseUint(dev[0]) m.Min = parseUint(dev[1]) m.Inode = parseUint(parts[4]) m.Path = string(parts[len(parts)-1]) m.Type = p.ParseType(m.Path) maps = append(maps, m) } return maps, nil }
func ParseRoute(line []byte) (*Route, error) { route := &Route{} chunks := bytes.Split(line, []byte(",")) urlParams := bytes.Split(chunks[0], []byte(" ")) route.Method = strings.ToUpper(string(urlParams[0])) route.Path = string(urlParams[1]) for i, chunk := range chunks { if i != 0 { chunkParts := bytes.Split(chunk, []byte(":")) if len(chunkParts) != 2 { return nil, fmt.Errorf("unexpected route parameters: %v", string(line)) } name := string(bytes.TrimSpace(chunkParts[0])) value := string(bytes.TrimSpace(chunkParts[1])) if value[0] == '\'' && value[len(value)-1] == '\'' { value = value[1 : len(value)-1] } if name == "name" { route.Name = value } if name == "collection" && value == "true" { route.Collection = true } if name == "custom" && value == "true" { route.Custom = true } } } return route, nil }
func netshInterfaceIPv6ShowAddress(name string) ([]string, error) { // TODO: need to test ipv6 netmask too, but netsh does not outputs it out, err := runCmd("netsh", "interface", "ipv6", "show", "address", "interface=\""+name+"\"") if err != nil { return nil, err } addrs := make([]string, 0) lines := bytes.Split(out, []byte{'\r', '\n'}) for _, line := range lines { if !bytes.HasPrefix(line, []byte("Address")) { continue } if !bytes.HasSuffix(line, []byte("Parameters")) { continue } f := bytes.Split(line, []byte{' '}) if len(f) != 3 { continue } // remove scope ID if present f = bytes.Split(f[1], []byte{'%'}) // netsh can create IPv4-embedded IPv6 addresses, like fe80::5efe:192.168.140.1. // Convert these to all hexadecimal fe80::5efe:c0a8:8c01 for later string comparisons. ipv4Tail := regexp.MustCompile(`:\d+\.\d+\.\d+\.\d+$`) if ipv4Tail.Match(f[0]) { f[0] = []byte(ParseIP(string(f[0])).String()) } addrs = append(addrs, string(bytes.ToLower(bytes.TrimSpace(f[0])))) } return addrs, nil }
func (s *S) TestSimpleJsonMessageFormatterJsonInJson(c *check.C) { buf := bytes.Buffer{} encoder := json.NewEncoder(&buf) writer := SimpleJsonMessageEncoderWriter{encoder} for _, l := range bytes.Split([]byte(mockPullOutput), []byte("\n")) { writer.Write(l) } parts := bytes.Split(buf.Bytes(), []byte("\n")) parts = append([][]byte{[]byte(`{"message":"no json 1\n"}`)}, parts...) parts = append(parts, []byte(`{"message":"no json 2\n"}`)) outBuf := bytes.Buffer{} streamWriter := NewStreamWriter(&outBuf, nil) streamWriter.Write(bytes.Join(parts, []byte("\n"))) c.Assert(outBuf.String(), check.Equals, "no json 1\n"+ "latest: Pulling from tsuru/static\n"+ "a6aa3b66376f: Already exists\n"+ "106572778bf7: Pulling fs layer\n"+ "bac681833e51: Pulling fs layer\n"+ "7302e23ef08a: Pulling fs layer\n"+ "bac681833e51: Verifying Checksum\n"+ "bac681833e51: Download complete\n"+ "106572778bf7: Verifying Checksum\n"+ "106572778bf7: Download complete\n"+ "7302e23ef08a: Verifying Checksum\n"+ "7302e23ef08a: Download complete\n"+ "106572778bf7: Pull complete\n"+ "bac681833e51: Pull complete\n"+ "7302e23ef08a: Pull complete\n"+ "Digest: sha256:b754472891aa7e33fc0214e3efa988174f2c2289285fcae868b7ec8b6675fc77\n"+ "Status: Downloaded newer image for 192.168.50.4:5000/tsuru/static\n"+ "no json 2\n") }
func decodeIntervalState(data []byte, interval string) (State, error) { // example // --- // #Sat Jul 16 06:14:03 UTC 2016 // txnMaxQueried=836439235 // sequenceNumber=2010580 // timestamp=2016-07-16T06\:14\:02Z // txnReadyList= // txnMax=836439235 // txnActiveList=836439008 var ( state State n int err error ) for _, l := range bytes.Split(data, []byte("\n")) { parts := bytes.Split(l, []byte("=")) if bytes.Equal(parts[0], []byte("sequenceNumber")) { n, err = strconv.Atoi(string(bytes.TrimSpace(parts[1]))) if err != nil { return State{}, err } switch interval { case "minute": state.SeqNum = uint64(n) case "hour": state.SeqNum = uint64(n) case "day": state.SeqNum = uint64(n) default: panic("unsupported interval") } } else if bytes.Equal(parts[0], []byte("txnMax")) { state.TxnMax, err = strconv.Atoi(string(bytes.TrimSpace(parts[1]))) if err != nil { return State{}, err } } else if bytes.Equal(parts[0], []byte("txnMaxQueried")) { state.TxnMaxQueried, err = strconv.Atoi(string(bytes.TrimSpace(parts[1]))) if err != nil { return State{}, err } } else if bytes.Equal(parts[0], []byte("timestamp")) { timeString := string(bytes.TrimSpace(parts[1])) state.Timestamp, err = time.Parse( "2006-01-02T15\\:04\\:05Z", timeString) if err != nil { return State{}, err } } } return state, nil }
func (t *HTTPSTranslator) isConnectSucceeded(resp []byte) bool { lines := bytes.Split(resp, []byte("\r\n")) tokens := bytes.Split(lines[0], []byte(" ")) if bytes.Equal(tokens[1], []byte("200")) { return true } return false }