// BazaarDiffBranches returns a Delta between the bazaar branch at // oldPath and the one at newPath. func BazaarDiffBranches(oldPath, newPath string) (Delta, error) { output1, _, err := run("bzr", "log", "-l1", "--show-ids", "-r", "ancestor:"+oldPath, newPath) if err != nil { return nil, err } output2, _, err := run("bzr", "log", "-l1", "--show-ids", newPath) if err != nil { return nil, err } i1 := bytes.Index(output1, logRevId) i2 := bytes.Index(output2, logRevId) if i1 < 0 || i2 < 0 { return nil, errors.New("no revision-id in bzr log output") } output1 = output1[i1+len(logRevId):] output2 = output2[i2+len(logRevId):] i1 = bytes.Index(output1, []byte{'\n'}) i2 = bytes.Index(output2, []byte{'\n'}) if i1 < 0 || i2 < 0 { return nil, errors.New("bad revision-id in bzr log output") } oldRevision := string(output1[:i1]) newRevision := string(output2[:i2]) return &bzrBranches{oldPath, newPath, oldRevision, newRevision}, nil }
/* * Get json data from http code. */ func GetJsonFromHttp(httpData []byte) (map[string]interface{}, error) { //Find out if this page is age-restricted if bytes.Index(httpData, []byte("og:restrictions:age")) != -1 { return nil, errors.New("this page is age-restricted") } //Find begining of json data jsonBeg := "ytplayer.config = {" beg := bytes.Index(httpData, []byte(jsonBeg)) if beg == -1 { //pattern not found return nil, PatternNotFoundError{_pattern: jsonBeg} } beg += len(jsonBeg) //len(jsonBeg) returns the number of bytes in jsonBeg //Find offset of json data unmatchedBrackets := 1 offset := 0 for unmatchedBrackets > 0 { nextRight := bytes.Index(httpData[beg+offset:], []byte("}")) if nextRight == -1 { return nil, errors.New("unmatched brackets") } unmatchedBrackets -= 1 unmatchedBrackets += bytes.Count(httpData[beg+offset:beg+offset+nextRight], []byte("{")) offset += nextRight + 1 } //Load json data var f interface{} err := json.Unmarshal(httpData[beg-1:beg+offset], &f) if err != nil { return nil, err } return f.(map[string]interface{}), nil }
// skipSpaceOrComment returns data with any leading spaces or comments removed. func skipSpaceOrComment(data []byte) []byte { for len(data) > 0 { switch data[0] { case ' ', '\t', '\r', '\n': data = data[1:] continue case '/': if bytes.HasPrefix(data, slashSlash) { i := bytes.Index(data, newline) if i < 0 { return nil } data = data[i+1:] continue } if bytes.HasPrefix(data, slashStar) { data = data[2:] i := bytes.Index(data, starSlash) if i < 0 { return nil } data = data[i+2:] continue } } break } return data }
func extractEBNF(src []byte) []byte { var buf bytes.Buffer for { // i = beginning of EBNF text i := bytes.Index(src, open) if i < 0 { break // no EBNF found - we are done } i += len(open) // write as many newlines as found in the excluded text // to maintain correct line numbers in error messages for _, ch := range src[0:i] { if ch == '\n' { buf.WriteByte('\n') } } // j = end of EBNF text (or end of source) j := bytes.Index(src[i:], close) // close marker if j < 0 { j = len(src) - i } j += i // copy EBNF text buf.Write(src[i:j]) // advance src = src[j:] } return buf.Bytes() }
func ExampleIndex() { fmt.Println(bytes.Index([]byte("chicken"), []byte("ken"))) fmt.Println(bytes.Index([]byte("chicken"), []byte("dmr"))) // Output: // 4 // -1 }
//split implements bufio.SplitFunc for spliting fron matter from the body text. func (m *Matter) split(data []byte, atEOF bool) (advance int, token []byte, err error) { if atEOF && len(data) == 0 { return 0, nil, nil } if m.delim == "" { delim, err := sniffDelim(data) if err != nil { return 0, nil, err } m.delim = delim } if _, ok := m.handlers[m.delim]; !ok { return 0, nil, ErrUnknownDelim } if x := bytes.Index(data, []byte(m.delim)); x >= 0 { // check the next delim index if next := bytes.Index(data[x+len(m.delim):], []byte(m.delim)); next > 0 { if !m.lastDelim { m.lastDelim = true m.lastIndex = next + len(m.delim) return next + len(m.delim)*2, dropSpace(data[x : next+len(m.delim)]), nil } } } if atEOF { return len(data), data, nil } return 0, nil, nil }
func (c *CssCompressor) extractComments() { var sb bytes.Buffer startIndex := 0 endIndex := 0 tmpCss := c.Css for startIndex = bytes.Index(tmpCss, []byte("/*")); startIndex >= 0; { sb.WriteString(string(tmpCss[:startIndex])) endIndex = bytes.Index(tmpCss[startIndex+2:], []byte("*/")) if endIndex < 0 { endIndex = len(tmpCss) } c.comments = append(c.comments, string(tmpCss[startIndex+2:endIndex+startIndex+2])) sb.WriteString( string("/*___YUICSSMIN_PRESERVE_CANDIDATE_COMMENT_" + (strconv.Itoa(len(c.comments) - 1)) + "___*/")) tmpCss = tmpCss[startIndex+2+endIndex+2:] startIndex = bytes.Index(tmpCss, []byte("/*")) } sb.WriteString(string(tmpCss)) c.Css = sb.Bytes() }
func LoadFioJsonData(filename string) (fdata FioJsonData) { dataBytes, err := ioutil.ReadFile(filename) if os.IsNotExist(err) { log.Fatalf("Could not read file %s: %s", filename, err) } // data loaded OK fdata.Filename = filename // fio writes a bunch of crap out to the output file before the JSON so for // now do the easy thing and find the first { after a \n and call it good offset := bytes.Index(dataBytes, []byte("\n{")) // bytes.Index will return -1 for not found, in which case we assume that it // been trimmed from the input file and start at index 0 if offset == -1 { offset = 0 } // sometimes it also puts junk at the end of the file eof := bytes.Index(dataBytes, []byte("\n}")) if eof < offset { eof = len(dataBytes) } err = json.Unmarshal(dataBytes[offset:eof+2], &fdata) if err != nil { log.Fatalf("Could not parse fio --output=json JSON in file '%s': %s", filename, err) } fdata.HeaderGarbage = string(dataBytes[0:offset]) fdata.FooterGarbage = string(dataBytes[eof+1:]) return }
// fetchTitle attempts to retrieve the title element for a given url. func fetchTitle(c *proto.Client, m *proto.Message, url string) { resp, err := http.Get(url) if err != nil { return } body, err := ioutil.ReadAll(resp.Body) resp.Body.Close() if err != nil { return } body = bytes.ToLower(body) s := bytes.Index(body, []byte("<title>")) if s == -1 { return } body = body[s+7:] e := bytes.Index(body, []byte("</title>")) if e == -1 { e = len(body) - 1 } body = bytes.TrimSpace(body[:e]) c.PrivMsg(m.Receiver, "%s's link shows: %s", m.SenderName, html.UnescapeString(string(body))) }
func TestGolfsWithJsonFormatter(t *testing.T) { p := &Person{ Name: "Bruce", Alias: "Batman", Hideout: &Hideout{ Name: "JLU Tower", DimensionId: 52, }, } jf := newJsonFormatter() b, err := jf.Format(&log.Entry{ Message: "the dark knight", Data: log.Fields{"hero": p}}) if err != nil { t.Fatal("Unable to format entry: ", err) } if bytes.Index(b, ([]byte)(`"hero.name":"Bruce"`)) < 0 { t.Fatalf(`missing "hero.name":"Bruce"`) } if bytes.Index(b, ([]byte)(`"hero.alias":"Batman"`)) < 0 { t.Fatalf(`missing "hero.alias":"Batman"`) } if bytes.Index(b, ([]byte)(`"hero.hideout.name":"JLU Tower"`)) < 0 { t.Fatalf(`missing "hero.hideout.name":"JLU Tower"`) } if bytes.Index(b, ([]byte)(`"hero.hideout.dimensionId":52`)) < 0 { t.Fatalf(`missing "hero.hideout.dimensionId":52`) } }
func linkify(out io.Writer, src []byte) { for len(src) > 0 { n := len(src) // i: beginning of EBNF text (or end of source) i := bytes.Index(src, openTag) if i < 0 { i = n - len(openTag) } i += len(openTag) // j: end of EBNF text (or end of source) j := bytes.Index(src[i:n], closeTag) // close marker if j < 0 { j = n - i } j += i // write text before EBNF out.Write(src[0:i]) // parse and write EBNF var p ebnfParser p.parse(out, src[i:j]) // advance src = src[j:n] } }
func (self *Interp) Read() int64 { // stdout b := self.stdout_buffer.Bytes() i := bytes.Index(b, []byte{'\x04'}) if i != -1 { s, _ := self.stdout_buffer.ReadBytes('\x04') self.stdout_read_time = time.Now().UnixNano() if i > 0 { self.Stdout = string(s[0:i]) } } // stderr b = self.stderr_buffer.Bytes() i = bytes.Index(b, []byte{'\x04'}) if i != -1 { s, _ := self.stderr_buffer.ReadBytes('\x04') self.stderr_read_time = time.Now().UnixNano() if i > 0 { self.Stderr = string(s[0:i]) } } if self.stdout_read_time != 0 && self.stderr_read_time != 0 { return imax64(self.stdout_read_time, self.stderr_read_time) } return self.exit_read_time }
func (p *Org_sslproxies) Load() ([]*ProxyItem, error) { b, err := httpGet(SSLPROXIES_URL, p.client) if err != nil { return nil, errors.New("Failed to read stream") } startBytes := []byte("<tbody>") endBytes := []byte("</tbody>") tbodyStart := bytes.Index(b, startBytes) tbodyEnd := bytes.Index(b, endBytes) if tbodyEnd <= tbodyStart { return nil, errors.New("Failed to parse stream") } bytes := b[tbodyStart : tbodyEnd+len(endBytes)] tbl := Tbody{} err = xml.Unmarshal(bytes, &tbl) if err != nil { return nil, err } ret := make([]*ProxyItem, len(tbl.Tr)) cnt := 0 for _, tr := range tbl.Tr { item := p.convert(&tr) if item != nil { ret[cnt] = item cnt++ } } return ret, nil }
func TestGolfsWithTextFormatter(t *testing.T) { p := &Person{ Name: "Bruce", Alias: "Batman", Hideout: &Hideout{ Name: "JLU Tower", DimensionId: 52, }, } tf := newTextFormatter() b, _ := tf.Format(&log.Entry{ Message: "the dark knight", Data: log.Fields{"hero": p}}) if bytes.Index(b, ([]byte)("hero.name=Bruce")) < 0 { t.Fatalf("missing hero.name=Bruce") } if bytes.Index(b, ([]byte)("hero.alias=Batman")) < 0 { t.Fatalf("missing hero.alias=Batman") } if bytes.Index(b, ([]byte)(`hero.hideout.name="JLU Tower"`)) < 0 { t.Fatalf(`missing hero.hideout.name="JLU Tower"`) } if bytes.Index(b, ([]byte)("hero.hideout.dimensionId=52")) < 0 { t.Fatalf("missing hero.hideout.dimensionId=52") } }
// FixHtml parses bytes as HTML and returns well-formed HTML if the parse // was successful, or escaped HTML, if not. func fixHtml(linkUrl string, wild []byte) (well []byte) { n, err := html.Parse(bytes.NewReader(wild)) if err != nil { return []byte(html.EscapeString(string(wild))) } fixImgs(linkUrl, n) defer func() { if err := recover(); err == bytes.ErrTooLarge { well = []byte(html.EscapeString(string(wild))) } else if err != nil { panic(err) } }() buf := bytes.NewBuffer(make([]byte, 0, len(wild)*2)) if err := html.Render(buf, n); err != nil { return []byte(html.EscapeString(string(wild))) } well = buf.Bytes() openBody := []byte("<body>") i := bytes.Index(well, openBody) if i < 0 { return []byte(html.EscapeString(string(wild))) } well = well[i+len(openBody):] closeBody := []byte("</body>") i = bytes.Index(well, closeBody) if i < 0 { return []byte(html.EscapeString(string(wild))) } return well[:i] }
// Remove export, if exportsFile is an empty string /etc/exports is used func Remove(exportsFile string, identifier string) ([]byte, error) { if exportsFile == "" { exportsFile = defaultExportsFile } exports, err := ioutil.ReadFile(exportsFile) if err != nil { return nil, err } beginMark := []byte(fmt.Sprintf("# BEGIN: %s", identifier)) endMark := []byte(fmt.Sprintf("# END: %s\n", identifier)) begin := bytes.Index(exports, beginMark) end := bytes.Index(exports, endMark) if begin == -1 || end == -1 { return nil, fmt.Errorf("Couldn't not find export %s in %s", identifier, exportsFile) } newExports := append(exports[:begin], exports[end+len(endMark):]...) newExports = append(bytes.TrimSpace(newExports), '\n') if err := ioutil.WriteFile(exportsFile, newExports, 0644); err != nil { return nil, err } return newExports, nil }
// Next returns the next message. At the end of the iteration, // io.EOF is returned as the error. func (r *moReader) Next() (*Message, error) { r.init() if r.err != nil { return nil, r.err } if r.pos >= r.header.MsgCount { r.err = io.EOF return nil, r.err } msg := Message{} var err error // Read msgid and msgstr. if msg.Id, err = r.readMessage(r.header.IdTableOffset + r.pos*8); err != nil { r.err = err return nil, err } if msg.Str, err = r.readMessage(r.header.StrTableOffset + r.pos*8); err != nil { r.err = err return nil, err } // Is this a context message? if idx := bytes.Index(msg.Id, eotBytes); idx != -1 { msg.Ctxt = msg.Id[:idx] msg.Id = msg.Id[idx+1:] } // Is this a plural message? if idx := bytes.Index(msg.Id, nulBytes); idx != -1 { msg.IdPlural = msg.Id[idx+1:] msg.Id = msg.Id[:idx] msg.StrPlural = bytes.Split(msg.Str, nulBytes) msg.Str = nil } r.pos += 1 return &msg, nil }
func (i *ImageIndex) FetchPage(data []byte, deep int) { // openTag: <a openTag := []byte{0x3c, 0x61} openPos := 0 closePos := 0 // hrefTag: href hrefTag := []byte{0x68, 0x72, 0x65, 0x66} hrefPos := 0 // quote: " (0x22) quoteOpenPos := 0 quoteClosePos := 0 found := bytes.Index(data[openPos:], openTag) var tmpSlice []byte var url string for found = bytes.Index(data[openPos:], openTag); found != -1; found = bytes.Index(data[openPos:], openTag) { openPos = openPos + found + 3 closePos = bytes.IndexByte(data[openPos:], 0x3e) tmpSlice = data[openPos : openPos+closePos] hrefPos = bytes.Index(tmpSlice, hrefTag) if hrefPos != -1 { quoteOpenPos = bytes.IndexByte(tmpSlice[hrefPos+5:], 0x22) if quoteOpenPos != -1 { quoteClosePos = bytes.IndexByte(tmpSlice[hrefPos+5+quoteOpenPos+1:], 0x22) if quoteClosePos != -1 { url, _ = FullURL(i.rootURL, string(tmpSlice[hrefPos+5+quoteOpenPos+1:hrefPos+5+quoteOpenPos+quoteClosePos+1])) i.pageList.PushBack(pageInfo{url, deep}) } } } } }
func hasModifiedPlayground(fname string) (mod string, ok bool) { var start, stop, content, playground []byte var startOffset, stopOffset int start = []byte("// PLAYGROUND START\n") stop = []byte("// PLAYGROUND STOP\n") content, err := ioutil.ReadFile(fname) if err != nil { log.Fatalf("[ERROR] %s\n", err) } startOffset = bytes.Index(content, start) stopOffset = bytes.Index(content, stop) if startOffset == -1 || stopOffset == -1 { return "", false } playground = content[startOffset+len(start) : stopOffset] ok = (string(playground) != PlaygroundTmpl) return string(playground), ok }
// parseCRILog parses logs in CRI log format. CRI Log format example: // 2016-10-06T00:17:09.669794202Z stdout log content 1 // 2016-10-06T00:17:09.669794203Z stderr log content 2 func parseCRILog(log []byte, msg *logMessage) error { var err error // Parse timestamp idx := bytes.Index(log, delimiter) if idx < 0 { return fmt.Errorf("timestamp is not found") } msg.timestamp, err = time.Parse(timeFormat, string(log[:idx])) if err != nil { return fmt.Errorf("unexpected timestamp format %q: %v", timeFormat, err) } // Parse stream type log = log[idx+1:] idx = bytes.Index(log, delimiter) if idx < 0 { return fmt.Errorf("stream type is not found") } msg.stream = streamType(log[:idx]) if msg.stream != stdoutType && msg.stream != stderrType { return fmt.Errorf("unexpected stream type %q", msg.stream) } // Get log content msg.log = log[idx+1:] return nil }
// EncodeKey returns a canonical encoding of an Entry (minus its value). func EncodeKey(source *spb.VName, factName string, edgeKind string, target *spb.VName) ([]byte, error) { if source == nil { return nil, errors.New("invalid Entry: missing source VName for key encoding") } else if (edgeKind == "" || target == nil) && (edgeKind != "" || target != nil) { return nil, errors.New("invalid Entry: edgeKind and target Ticket must be both non-empty or empty") } else if strings.Index(edgeKind, entryKeySepStr) != -1 { return nil, errors.New("invalid Entry: edgeKind contains key separator") } else if strings.Index(factName, entryKeySepStr) != -1 { return nil, errors.New("invalid Entry: factName contains key separator") } keySuffix := []byte(entryKeySepStr + edgeKind + entryKeySepStr + factName + entryKeySepStr) srcEncoding, err := encodeVName(source) if err != nil { return nil, fmt.Errorf("error encoding source VName: %v", err) } else if bytes.Index(srcEncoding, entryKeySepBytes) != -1 { return nil, fmt.Errorf("invalid Entry: source VName contains key separator %v", source) } targetEncoding, err := encodeVName(target) if err != nil { return nil, fmt.Errorf("error encoding target VName: %v", err) } else if bytes.Index(targetEncoding, entryKeySepBytes) != -1 { return nil, errors.New("invalid Entry: target VName contains key separator") } return bytes.Join([][]byte{ entryKeyPrefixBytes, srcEncoding, keySuffix, targetEncoding, }, nil), nil }
func extractInfo(buffer []byte, begin int, end int, out chan<- InfoRecord) error { buf := buffer[begin:] n := begin for true { b := bytes.Index(buf, []byte("<info ")) e := bytes.Index(buf, []byte("</info>")) if b < 0 { return nil } if e < 0 { return fmt.Errorf("Can't parse %s", "file") } e += 7 var res InfoRecord err := xml.Unmarshal(buf[b:e], &res) if err != nil { return err } out <- res buf = buf[e:] n += e if n > end { return nil } } return nil }
func LoadTable(url string) BatchTable { var pdf []byte if strings.HasPrefix(url, "http") { pdf = loadFromUrl(url) } else { pdf = loadFile(url) } if pdf == nil { log.Printf("PDF file wasn't loaded") return nil } table := make(BatchTable) for { begin := bytes.Index(pdf, []byte(StreamStartMarker)) if begin == -1 { break } pdf = pdf[begin+len(StreamStartMarker):] end := bytes.Index(pdf, []byte(StreamEndMarker)) if end == -1 { break } section := pdf[0:end] pdf = pdf[end+len(StreamEndMarker):] buf := bytes.NewBuffer(section) unzipReader, err := zlib.NewReader(buf) if err != nil { log.Printf("Unzip initialization failed, %v", err) continue } unzipped, err := ioutil.ReadAll(unzipReader) if err != nil { log.Printf("Unzip failed, %v", err) continue } records := make([]string, 0) for _, group := range BTETRE.FindAllSubmatch(unzipped, -1) { lines := make([][]byte, 0) for _, group := range TextRE.FindAllSubmatch(group[1], -1) { lines = append(lines, group[1]) } records = append(records, string(bytes.Join(lines, []byte{}))) } for i := 0; i < len(records)-2; i++ { v, err := strconv.ParseInt(records[i], 10, 64) if err == nil && v >= 20000000000 && v < 29000000000 { id := records[i] if _, exists := table[id]; !exists { table[id] = make([]BatchUpdate, 0) } table[id] = append(table[id], BatchUpdate{records[i+1], records[i+2]}) i += 2 } } } return table }
func ParseMessage(message string) *Message { // :<prefix> <command> <params> :<trailing> msg := []byte(message) var prefix, params, trailing []byte if bytes.HasPrefix(msg, []byte(":")) { index := bytes.Index(msg, []byte(" ")) prefix = msg[1:index] msg = msg[index+1:] } cmdEndIndex := bytes.Index(msg, []byte(" ")) command := msg[:cmdEndIndex] msg = msg[cmdEndIndex+1:] trailingStartIndex := bytes.Index(msg, []byte(":")) if trailingStartIndex < 0 { params = msg } else { params = bytes.TrimRight(msg[:trailingStartIndex], " ") trailing = msg[trailingStartIndex+1:] } return &Message{ raw: message, Prefix: prefix, Command: command, Params: params, Trailing: trailing, } }
func extractTOC(content []byte) (newcontent []byte, toc []byte) { origContent := make([]byte, len(content)) copy(origContent, content) first := []byte(`<nav> <ul>`) last := []byte(`</ul> </nav>`) replacement := []byte(`<nav id="TableOfContents"> <ul>`) startOfTOC := bytes.Index(content, first) peekEnd := len(content) if peekEnd > 70+startOfTOC { peekEnd = 70 + startOfTOC } if startOfTOC < 0 { return stripEmptyNav(content), toc } // Need to peek ahead to see if this nav element is actually the right one. correctNav := bytes.Index(content[startOfTOC:peekEnd], []byte(`#toc_0`)) if correctNav < 0 { // no match found return content, toc } lengthOfTOC := bytes.Index(content[startOfTOC:], last) + len(last) endOfTOC := startOfTOC + lengthOfTOC newcontent = append(content[:startOfTOC], content[endOfTOC:]...) toc = append(replacement, origContent[startOfTOC+len(first):endOfTOC]...) return }
func (blp *Blp) parsePositionData(line []byte) { if bytes.HasPrefix(line, mysqlctl.BINLOG_POSITION_PREFIX) { //Master Position if blp.nextStmtPosition == 0 { return } } else if bytes.Index(line, mysqlctl.BINLOG_ROTATE_TO) != -1 { blp.parseRotateEvent(line) } else if bytes.Index(line, mysqlctl.BINLOG_END_LOG_POS) != -1 { //Ignore the position data that appears at the start line of binlog. if bytes.Index(line, mysqlctl.BINLOG_START) != -1 { return } blp.parseMasterPosition(line) if blp.nextStmtPosition != 0 { blp.currentPosition.Position.MasterPosition = blp.nextStmtPosition } } if bytes.Index(line, mysqlctl.BINLOG_XID) != -1 { blp.parseXid(line) } // FIXME(shrutip): group_id is most relevant for commit events // check how group_id is set for ddls and possibly move this block // in parseXid if bytes.Index(line, mysqlctl.BINLOG_GROUP_ID) != -1 { blp.parseGroupId(line) } }
/* * Parse the http data of the page get from url and retrieve the id list */ func GetVideoIdsFromSearch(searchUrl string) (idList []string, err error) { //Get the http code of the page get from url body, err := GetHttpFromUrl(searchUrl) if err != nil { return } //Retrive id list idBeg := []byte("class=\"yt-lockup yt-lockup-tile yt-lockup-video vve-check clearfix\" data-context-item-id=\"") beg := 0 for { //Find the index of begin pattern offset := bytes.Index(body[beg:], idBeg) if offset < 0 { return } beg += offset + len(idBeg) //Find the index of closing parenthesis offset = bytes.Index(body[beg:], []byte("\"")) if offset < 0 { err = errors.New("unmatched parenthesis") return } end := beg + offset idList = append(idList, string(body[beg:end])) } return }
func parseIndex(sql []byte) (indexName string, indexId interface{}, userId uint64) { var err error keyspaceIndex := bytes.Index(sql, KEYSPACE_ID_COMMENT) if keyspaceIndex == -1 { panic(NewBinlogParseError(fmt.Sprintf("Error parsing index comment, doesn't contain keyspace id %v", string(sql)))) } keyspaceIdComment := sql[keyspaceIndex+len(KEYSPACE_ID_COMMENT):] indexCommentStart := bytes.Index(keyspaceIdComment, INDEX_COMMENT) if indexCommentStart != -1 { indexCommentParts := bytes.SplitN(keyspaceIdComment[indexCommentStart:], COLON_BYTE, 2) userId, err = strconv.ParseUint(string(bytes.SplitN(indexCommentParts[1], mysqlctl.SPACE, 2)[0]), 10, 64) if err != nil { panic(NewBinlogParseError(fmt.Sprintf("Error converting user_id %v", string(sql)))) } indexNameId := bytes.Split(indexCommentParts[0], DOT_BYTE) indexName = string(indexNameId[1]) if indexName == "username" { indexId = string(bytes.TrimRight(indexNameId[2], COLON)) } else { indexId, err = strconv.ParseUint(string(bytes.TrimRight(indexNameId[2], COLON)), 10, 64) if err != nil { panic(NewBinlogParseError(fmt.Sprintf("Error converting index id %v %v", string(bytes.TrimRight(indexNameId[2], COLON)), string(sql)))) } } } return }
// sections returns a collection of file sections, // each of which begins with a line satisfying prefix. // text before the first instance of such a line is // returned separately. func sections(text []byte, prefix string) ([]byte, [][]byte) { n := 0 for b := text; ; { if hasPrefix(b, prefix) { n++ } nl := bytes.Index(b, newline) if nl < 0 { break } b = b[nl+1:] } sect := make([][]byte, n+1) n = 0 for b := text; ; { if hasPrefix(b, prefix) { sect[n] = text[0 : len(text)-len(b)] n++ text = b } nl := bytes.Index(b, newline) if nl < 0 { sect[n] = text break } b = b[nl+1:] } return sect[0], sect[1:] }
//Data ouput includes the left and right string. func (h *Cut) Between(left, right string) Paste { c := make(chan Data) go func() { s := *h startPat, endPat := []byte(left), []byte(right) newline := []byte("\n") space := []byte(" ") for { startPos := bytes.Index(s, startPat) if startPos == -1 || len(s) == 0 { close(c) break } s = s[startPos:] endPos := bytes.Index(s, endPat) + len(endPat) c <- bytes.Replace(s[:endPos], newline, space, -1) s = s[endPos:] } }() return Paste(c) }