// contextAfterText starts in context c, consumes some tokens from the front of // s, then returns the context after those tokens and the unprocessed suffix. func contextAfterText(c context, s []byte) (context, int) { if c.delim == delimNone { c1, i := tSpecialTagEnd(c, s) if i == 0 { // A special end tag (`</script>`) has been seen and // all content preceding it has been consumed. return c1, 0 } // Consider all content up to any end tag. return transitionFunc[c.state](c, s[:i]) } // We are at the beginning of an attribute value. i := bytes.IndexAny(s, delimEnds[c.delim]) if i == -1 { i = len(s) } if c.delim == delimSpaceOrTagEnd { // http://www.w3.org/TR/html5/syntax.html#attribute-value-(unquoted)-state // lists the runes below as error characters. // Error out because HTML parsers may differ on whether // "<a id= onclick=f(" ends inside id's or onclick's value, // "<a class=`foo " ends inside a value, // "<a style=font:'Arial'" needs open-quote fixup. // IE treats '`' as a quotation character. if j := bytes.IndexAny(s[:i], "\"'<=`"); j >= 0 { return context{ state: stateError, err: errorf(ErrBadHTML, nil, 0, "%q in unquoted attr: %q", s[j:j+1], s[:i]), }, len(s) } } if i == len(s) { // Remain inside the attribute. // Decode the value so non-HTML rules can easily handle // <button onclick="alert("Hi!")"> // without having to entity decode token boundaries. for u := []byte(html.UnescapeString(string(s))); len(u) != 0; { c1, i1 := transitionFunc[c.state](c, u) c, u = c1, u[i1:] } return c, len(s) } element := c.element // If this is a non-JS "type" attribute inside "script" tag, do not treat the contents as JS. if c.state == stateAttr && c.element == elementScript && c.attr == attrScriptType && !isJSType(string(s[:i])) { element = elementNone } if c.delim != delimSpaceOrTagEnd { // Consume any quote. i++ } // On exiting an attribute, we discard all state information // except the state and element. return context{state: stateTag, element: element}, i }
func ExampleIndexAny() { fmt.Println(bytes.IndexAny([]byte("chicken"), "aeiouy")) fmt.Println(bytes.IndexAny([]byte("crwth"), "aeiouy")) // Output: // 2 // -1 }
func tabsToSpaces(in []byte, tabsize int) []byte { if bytes.IndexByte(in, '\t') == -1 { return in } spaces := bytes.Repeat([]byte(" "), tabsize) var out []byte i := bytes.IndexAny(in, "\n\r\f\t") col := 0 for i != -1 { out = append(out, in[:i]...) col += utf8.RuneCount(in[:i]) if in[i] == '\t' { nspaces := tabsize - (col % tabsize) out = append(out, spaces[:nspaces]...) col += nspaces } else { // line feed out = append(out, in[i]) col = 0 } in = in[i+1:] i = bytes.IndexAny(in, "\n\r\f\t") } return append(out, in...) }
func parserank(barr []byte) (int, error) { ndx1 := bytes.IndexAny(barr, ":") if ndx1 == -1 { return 0, errors.New("Malformed rank line, could not find rank") } barr = barr[ndx1+1:] ndx1 = bytes.IndexAny(barr, ":") if ndx1 == -1 { return 0, errors.New("Malformed rank line, could not find rank") } barr = barr[ndx1+1:] // Find rows ndx1 = bytes.Index(barr, []byte("row")) if ndx1 == -1 { return 0, errors.New("Malformed rank line, could not find row") } // Attempt to parse the row nobj, err := strconv.ParseInt(string(bytes.TrimSpace(barr[:ndx1])), 10, 32) if err != nil { return 0, err } return int(nobj), nil }
func singleline(w *writer, prefix string, buf []byte) { w.WriteString(prefix) idx := bytes.IndexAny(buf, "\r\n") for idx != -1 { w.Write(buf[:idx]) w.WriteByte(' ') buf = buf[idx+1:] idx = bytes.IndexAny(buf, "\r\n") } w.Write(buf) }
func process_data(data []byte) (result []byte, err error) { fmt.Println(len(data), string(data[:10])) current_start := 0 switch data[current_start] { case '{': // Find the closing delimiter for a dictionary: '}' { delimiter_count := 1 position := 6 for ;position < len(data); position++ { switch data[position] { case '}': delimiter_count-- case '{': delimiter_count++ } if delimiter_count == 0 { // found the closing delimiter break } } } case '[': // Starting a list: get first list element result, err = process_data(data[1:]) default: // process series of one or more comma separated quoted strings or integers followed by '}' or ']' { series := []byte {} series_end := bytes.IndexAny(data, "]}") current_start := 0 for current_start < series_end { // process string and integer tokens token := []byte {} switch data[current_start] { case '"': // string token_end := bytes.IndexByte(data[1:], '"') token = data[1:token_end] current_start = bytes.IndexAny(data, ",]}") default: // integer next_start := bytes.IndexAny(data, ",]}") token = data[current_start:next_start] current_start = next_start } series = append(series, token...) if data[current_start] == ',' { // another token to process current_start++ series = append(series, byte(' ')) } fmt.Println(string(series)) } err = nil result = append([]byte(nil), series...) } } return result, err }
func EnvSub(r io.Reader) (io.Reader, error) { bufIn := bufio.NewReader(r) bufOut := new(bytes.Buffer) for { chunk, err := bufIn.ReadBytes(byte('%')) if err != nil { if err == io.EOF { // We're done. bufOut.Write(chunk) break } return nil, err } bufOut.Write(chunk[:len(chunk)-1]) tmp := make([]byte, 4) tmp, err = bufIn.Peek(4) if err != nil { if err == io.EOF { // End of file, write the last few bytes out and exit. bufOut.WriteRune('%') bufOut.Write(tmp) break } return nil, err } if string(tmp) == "ENV[" { // Found opening delimiter, advance the read cursor and look for // closing delimiter. tmp, err = bufIn.ReadBytes(byte('[')) if err != nil { // This shouldn't happen, since the Peek succeeded. return nil, err } chunk, err = bufIn.ReadBytes(byte(']')) if err != nil { if err == io.EOF { // No closing delimiter, return an error return nil, ErrMissingCloseDelim } return nil, err } // `chunk` is now holding var name + closing delimiter. // var name contains invalid characters, return an error if bytes.IndexAny(chunk, invalidEnvChars) != -1 || bytes.Index(chunk, invalidEnvPrefix) != -1 { return nil, ErrInvalidChars } varName := string(chunk[:len(chunk)-1]) varVal := os.Getenv(varName) bufOut.WriteString(varVal) } else { // Just a random '%', not an opening delimiter, write it out and // keep going. bufOut.WriteRune('%') } } return bufOut, nil }
// Parse a file, based on lines. Note that arr may be modified even // if an error occured. arr.Add is called with a single line in it, with // any comments and leading and trailing whitespace removed. // Subsequent parsing is left up to arr.Add func (l LineIOParams) Parse(fn string, arr LineIOType) error { // Open the file ff, err := os.Open(fn) if err != nil { return err } defer ff.Close() scan := bufio.NewScanner(ff) scan.Split(bufio.ScanLines) var bb []byte var n int for scan.Scan() { // Trim out leading and trailing whitespace bb = scan.Bytes() bb = bytes.TrimSpace(bb) if n = bytes.IndexAny(bb, l.Comment); n != -1 { bb = bb[0:n] } if len(bb) > 0 { err = arr.Add(bb) if err != nil { return err } } } return scan.Err() }
// validate validates the options. func (opts *PUBLISHOptions) validate() error { // Check the QoS. if !mqtt.ValidQoS(opts.QoS) { return ErrInvalidQoS } // Check the length of the Topic Name. if len(opts.TopicName) > maxStringsLen { return ErrTopicNameExceedsMaxStringsLen } // Check if the Topic Name contains the wildcard characters. if bytes.IndexAny(opts.TopicName, wildcards) != -1 { return ErrTopicNameContainsWildcards } // Check the length of the Application Message. if len(opts.Message) > maxStringsLen { return ErrMessageExceedsMaxStringsLen } // End the validation if the QoS equals to QoS 0. if opts.QoS == mqtt.QoS0 { return nil } // Check the Packet Identifier. if opts.PacketID == 0 { return ErrInvalidPacketID } return nil }
func parsehdr(barr []byte, fn string) (int, error) { barr = bytes.TrimSpace(barr) // First character is a # if barr[0] != '#' { return 0, errors.New("Missing # to start header") } barr = barr[1:] // Second elt is the filename ndx1 := bytes.IndexAny(barr, ":") if ndx1 == -1 { return 0, errors.New("Malformed header, couldn't find filename") } if bytes.Compare(bytes.TrimSpace(barr[:ndx1]), []byte(fn)) != 0 { return 0, errors.New("Malformed header, unable to find filename in header") } // Find ranks barr = barr[ndx1+1:] ndx1 = bytes.Index(barr, []byte("rank")) if ndx1 == -1 { return 0, errors.New("Malformed header, could not find rank") } // Attempt to parse the rank rank64, err := strconv.ParseInt(string(bytes.TrimSpace(barr[:ndx1])), 10, 32) if err != nil { return 0, err } return int(rank64), nil }
func (r *Reader) header(line []byte) (seqio.SequenceAppender, error) { s := r.t.Clone().(seqio.SequenceAppender) fieldMark := bytes.IndexAny(line, " \t") var err error if fieldMark < 0 { err = s.SetName(string(line[len(r.IDPrefix):])) return s, err } else { err = s.SetName(string(line[len(r.IDPrefix):fieldMark])) _err := s.SetDescription(string(line[fieldMark+1:])) if err != nil || _err != nil { switch { case err == _err: return s, err case err != nil && _err != nil: return s, fmt.Errorf("fasta: multiple errors: name: %s, desc:%s", err, _err) case err != nil: return s, err case _err != nil: return s, _err } } } return s, nil }
func GetInfo(pid int) (info Info, err error) { info.Pid = pid var bs []byte bs, err = ioutil.ReadFile(fmt.Sprintf("/proc/%d/cmdline", pid)) if err != nil { return } var comm = string(bs) if strings.HasSuffix(comm, "\x00") { comm = comm[:len(comm)-1] } info.Comm = strings.Replace(comm, "\x00", " ", -1) bs, err = ioutil.ReadFile(fmt.Sprintf("/proc/%d/smaps", pid)) if err != nil { return } var total int64 for _, line := range bytes.Split(bs, []byte("\n")) { if bytes.HasPrefix(line, []byte("Swap:")) { start := bytes.IndexAny(line, "0123456789") end := bytes.Index(line[start:], []byte(" ")) size, err := strconv.ParseInt(string(line[start:start+end]), 10, 0) if err != nil { continue } total += size } } info.Size = total return }
func (l *lexer) lex() { var err error defer func() { close(l.optchan) close(l.errchan) }() next := l.lexNextSection for next != nil { if l.buf.Buffered() >= SYSTEMD_LINE_MAX { // systemd truncates lines longer than LINE_MAX // https://bugs.freedesktop.org/show_bug.cgi?id=85308 // Rather than allowing this to pass silently, let's // explicitly gate people from encountering this line, err := l.buf.Peek(SYSTEMD_LINE_MAX) if err != nil { l.errchan <- err return } if bytes.IndexAny(line, SYSTEMD_NEWLINE) == -1 { l.errchan <- ErrLineTooLong return } } next, err = next() if err != nil { l.errchan <- err return } } }
func ParseMatches(r io.Reader) ([]Match, error) { s := bufio.NewScanner(os.Stdin) s.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { i := bytes.IndexAny(data, "\n \t\r") if len(data) == 0 { return 0, nil, nil } else if i == -1 && !atEOF { return 0, nil, nil } else if i == -1 && atEOF { return len(data), bytes.TrimSpace(data), nil } token = bytes.TrimSpace(data[:i]) if len(token) == 0 { token = nil } return i + 1, token, nil }) matches := []Match{} for { if !s.Scan() { break } winner := s.Text() if !s.Scan() { return nil, errors.New("odd number of players causes opponentless match") } loser := s.Text() matches = append(matches, Match{winner, loser}) } return matches, s.Err() }
func censorPasswords(m *HttpMessage, msg []byte) { keywords := _Config.Passwords.Hide_keywords if m.IsRequest && m.ContentLength > 0 && strings.Contains(m.ContentType, "urlencoded") { for _, keyword := range keywords { index := bytes.Index(msg[m.bodyOffset:], []byte(keyword)) if index > 0 { start_index := m.bodyOffset + index + len(keyword) end_index := bytes.IndexAny(msg[m.bodyOffset+index+len(keyword):], "& \r\n") if end_index > 0 { end_index += m.bodyOffset + index if end_index > m.end { end_index = m.end } } else { end_index = m.end } if end_index-start_index < 120 { for i := start_index; i < end_index; i++ { msg[i] = byte('*') } } } } } }
// tLineCmt is the context transition function for //comment states. func tLineCmt(c context, s []byte) (context, int) { var lineTerminators string var endState state switch c.state { case stateJSLineCmt: lineTerminators, endState = "\n\r\u2028\u2029", stateJS case stateCSSLineCmt: lineTerminators, endState = "\n\f\r", stateCSS // Line comments are not part of any published CSS standard but // are supported by the 4 major browsers. // This defines line comments as // LINECOMMENT ::= "//" [^\n\f\d]* // since http://www.w3.org/TR/css3-syntax/#SUBTOK-nl defines // newlines: // nl ::= #xA | #xD #xA | #xD | #xC default: panic(c.state.String()) } i := bytes.IndexAny(s, lineTerminators) if i == -1 { return c, len(s) } c.state = endState // Per section 7.4 of EcmaScript 5 : http://es5.github.com/#x7.4 // "However, the LineTerminator at the end of the line is not // considered to be part of the single-line comment; it is // recognized separately by the lexical grammar and becomes part // of the stream of input elements for the syntactic grammar." return c, i }
func indexAny(s []byte, chars string) { if i := bytes.IndexAny(s, chars); i == -1 { log.Printf("No unicode characters in %q appear in %s", chars, s) } else { log.Printf("A unicode character in %q appears at index %d in %s", chars, i, s) } }
func (s *Statements) Scan() { go s.Parser.Parse() scanner := bufio.NewScanner(s.Parser) depth := 0 scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { if i := bytes.IndexAny(data, ";{}"); i >= 0 { if string(data[i]) == "{" { depth++ } else if string(data[i]) == "}" { depth-- } return i + 1, data[0:i], nil } else if atEOF { return len(data), nil, nil } return 0, nil, nil }) for scanner.Scan() { stmt := string(bytes.Join(bytes.Fields(scanner.Bytes()), []byte{' '})) s.stmts <- &stmtMsg{stmt, depth} } }
// tJS is the context transition function for the JS state. func tJS(c context, s []byte) (context, int) { i := bytes.IndexAny(s, `"'/`) if i == -1 { // Entire input is non string, comment, regexp tokens. c.jsCtx = nextJSCtx(s, c.jsCtx) return c, len(s) } c.jsCtx = nextJSCtx(s[:i], c.jsCtx) switch s[i] { case '"': c.state, c.jsCtx = stateJSDqStr, jsCtxRegexp case '\'': c.state, c.jsCtx = stateJSSqStr, jsCtxRegexp case '/': switch { case i+1 < len(s) && s[i+1] == '/': c.state, i = stateJSLineCmt, i+1 case i+1 < len(s) && s[i+1] == '*': c.state, i = stateJSBlockCmt, i+1 case c.jsCtx == jsCtxRegexp: c.state = stateJSRegexp case c.jsCtx == jsCtxDivOp: c.jsCtx = jsCtxRegexp default: return context{ state: stateError, err: errorf(ErrSlashAmbig, 0, "'/' could start a division or regexp: %.32q", s[i:]), }, len(s) } default: panic("unreachable") } return c, i + 1 }
func main() { s := []byte("大家好大家早") index := bytes.IndexAny(s, "好早") if index >= 0 { fmt.Printf("%d: %s\n", index, string(s[index:])) } }
func scanLines(data []byte, atEOF bool) (advance int, token []byte, err error) { if i := bytes.IndexAny(data, "\r\n"); i >= 0 { if data[i] == '\r' { if i == len(data)-1 { if atEOF { // final line return len(data), data[:len(data)-1], nil } return 0, nil, nil // LF may follow, request more data } if data[i+1] == '\n' { return i + 2, data[:i], nil } return i + 1, data[:i], nil } // data[i] == '\n' return i + 1, data[:i], nil } if atEOF { // final line return len(data), data, nil } // request more data return 0, nil, nil }
func cleanComment(in []byte) ([]byte, bool) { i := bytes.IndexAny(in, "#;") if i == -1 { return nil, false } return in[i:], true }
func (cmd *UpdateUserProvidedService) Execute(c flags.FlagContext) { serviceInstance := cmd.serviceInstanceReq.GetServiceInstance() if !serviceInstance.IsUserProvided() { cmd.ui.Failed(T("Service Instance is not user provided")) return } drainUrl := c.String("l") credentials := strings.Trim(c.String("p"), `'"`) routeServiceUrl := c.String("r") credentialsMap := make(map[string]interface{}) if c.IsSet("p") { jsonBytes, err := util.GetContentsFromFlagValue(credentials) if err != nil && strings.HasPrefix(credentials, "@") { cmd.ui.Failed(err.Error()) } if bytes.IndexAny(jsonBytes, "[{") != -1 { err = json.Unmarshal(jsonBytes, &credentialsMap) if err != nil { cmd.ui.Failed(T("JSON is invalid: {{.ErrorDescription}}", map[string]interface{}{"ErrorDescription": err.Error()})) } } else { for _, param := range strings.Split(credentials, ",") { param = strings.Trim(param, " ") credentialsMap[param] = cmd.ui.Ask("%s", param) } } } cmd.ui.Say(T("Updating user provided service {{.ServiceName}} in org {{.OrgName}} / space {{.SpaceName}} as {{.CurrentUser}}...", map[string]interface{}{ "ServiceName": terminal.EntityNameColor(serviceInstance.Name), "OrgName": terminal.EntityNameColor(cmd.config.OrganizationFields().Name), "SpaceName": terminal.EntityNameColor(cmd.config.SpaceFields().Name), "CurrentUser": terminal.EntityNameColor(cmd.config.Username()), })) serviceInstance.Params = credentialsMap serviceInstance.SysLogDrainUrl = drainUrl serviceInstance.RouteServiceUrl = routeServiceUrl apiErr := cmd.userProvidedServiceInstanceRepo.Update(serviceInstance.ServiceInstanceFields) if apiErr != nil { cmd.ui.Failed(apiErr.Error()) return } cmd.ui.Ok() cmd.ui.Say(T("TIP: Use '{{.CFRestageCommand}}' for any bound apps to ensure your env variable changes take effect", map[string]interface{}{ "CFRestageCommand": terminal.CommandColor(cf.Name() + " restage"), })) if routeServiceUrl == "" && credentials == "" && drainUrl == "" { cmd.ui.Warn(T("No flags specified. No changes were made.")) } }
func readLine(data *[]byte, pattern *regexp.Regexp) ([]string, error) { if len(*data) == 0 { return nil, io.EOF } indexes := pattern.FindSubmatchIndex(*data) if indexes == nil { log.Println(len(*data), string((*data)[:200])) return nil, ErrLineNotFound } else if indexes[0] != 0 { log.Println(indexes) // fmt.Println(string((*data)[:indexes[len(indexes)-1]])) nl := bytes.IndexAny(*data, "\r\n") if nl == -1 && nl+2 < len(*data) { return nil, ErrGarbageData } *data = (*data)[nl+1:] return readLine(data, pattern) } parts := make([]string, len(indexes)/2-1) for i := 2; i < len(indexes); i += 2 { parts[i/2-1] = string((*data)[indexes[i]:indexes[i+1]]) } *data = (*data)[indexes[1]:] return parts, nil }
// htmlEscape makes sure input is HTML clean, if necessary. func htmlEscape(input []byte) []byte { if !*html || bytes.IndexAny(input, `&"<>`) < 0 { return input } var b bytes.Buffer template.HTMLEscape(&b, input) return b.Bytes() }
/* Accept a buffer of json, read (we assume) from openstack, and validate it for bad things: leading '<' before '{' is found (suggesting html), null bytes etc. */ func scanj4gook(buf []byte) (err error) { jloc := bytes.IndexAny(buf, "{") if jloc < 0 { dump_array("scan4gook", 30, buf) return fmt.Errorf("invalid json: no opening curly brace found") } hloc := bytes.IndexAny(buf, "<>") if hloc > 0 && hloc < jloc { // html tag seems to be found before possible json dump_array("scan4gook", 30, buf) return fmt.Errorf("invalid json: HTML tag, or similar, found outside of json") } return nil }
func getType(line []byte) (typ, split int) { if len(line) == 0 { return } if line[0] == '-' { typ = typSequence split = 1 return } typ = typScalar if line[0] == ' ' || line[0] == '"' { return } // the first character is real // need to iterate past the first word // things like "foo:" and "foo :" are mappings // everything else is a scalar idx := bytes.IndexAny(line, " \":") if idx < 0 { return } if line[idx] == '"' { return } if line[idx] == ':' { typ = typMapping split = idx } else if line[idx] == ' ' { // we have a space // need to see if its all spaces until a : for i := idx; i < len(line); i++ { switch ch := line[i]; ch { case ' ': continue case ':': typ = typMapping split = i break default: break } } } if typ == typMapping && split+1 < len(line) && line[split+1] != ' ' { typ = typScalar split = 0 } return }
// ParseKnownHosts parses an entry in the format of the known_hosts file. // // The known_hosts format is documented in the sshd(8) manual page. This // function will parse a single entry from in. On successful return, marker // will contain the optional marker value (i.e. "cert-authority" or "revoked") // or else be empty, hosts will contain the hosts that this entry matches, // pubKey will contain the public key and comment will contain any trailing // comment at the end of the line. See the sshd(8) manual page for the various // forms that a host string can take. // // The unparsed remainder of the input will be returned in rest. This function // can be called repeatedly to parse multiple entries. // // If no entries were found in the input then err will be io.EOF. Otherwise a // non-nil err value indicates a parse error. func ParseKnownHosts(in []byte) (marker string, hosts []string, pubKey PublicKey, comment string, rest []byte, err error) { for len(in) > 0 { end := bytes.IndexByte(in, '\n') if end != -1 { rest = in[end+1:] in = in[:end] } else { rest = nil } end = bytes.IndexByte(in, '\r') if end != -1 { in = in[:end] } in = bytes.TrimSpace(in) if len(in) == 0 || in[0] == '#' { in = rest continue } i := bytes.IndexAny(in, " \t") if i == -1 { in = rest continue } // Strip out the beginning of the known_host key. // This is either an optional marker or a (set of) hostname(s). keyFields := bytes.Fields(in) if len(keyFields) < 3 || len(keyFields) > 5 { return "", nil, nil, "", nil, errors.New("ssh: invalid entry in known_hosts data") } // keyFields[0] is either "@cert-authority", "@revoked" or a comma separated // list of hosts marker := "" if keyFields[0][0] == '@' { marker = string(keyFields[0][1:]) keyFields = keyFields[1:] } hosts := string(keyFields[0]) // keyFields[1] contains the key type (e.g. “ssh-rsa”). // However, that information is duplicated inside the // base64-encoded key and so is ignored here. key := bytes.Join(keyFields[2:], []byte(" ")) if pubKey, comment, err = parseAuthorizedKey(key); err != nil { return "", nil, nil, "", nil, err } return marker, strings.Split(hosts, ","), pubKey, comment, rest, nil } return "", nil, nil, "", nil, io.EOF }
func FindBytes(s, p []byte, plain bool) (bool, int, int, [][]byte) { if plain || bytes.IndexAny(p, SPECIALS) == -1 { if index := lmemfind(s, p); index != -1 { return true, index, index + len(p), nil } else { return false, -1, -1, nil } } // Perform a find and capture, looping to potentially find a match later in // the string var anchor bool = false if p[0] == '^' { p = p[1:] anchor = true } ms := new(matchState) ms.capture = make([]capture, LUA_MAXCAPTURES, LUA_MAXCAPTURES) var init int = 0 for { res := match(ms, s[init:], p) if res != nil { //debug(fmt.Sprintf("match res: %q", res)) // Determine the start and end indices of the match var start int = init var end int = len(s) - len(res) // Fetch the captures captures := new([LUA_MAXCAPTURES][]byte) var i int var nlevels int if ms.level == 0 && len(s) > 0 { nlevels = 1 } else { nlevels = ms.level } for i = 0; i < nlevels; i++ { captures[i] = get_onecapture(ms, i, s, res) } return true, start, end, captures[0:nlevels] } else if len(s)-init == 0 || anchor { break } init = init + 1 } // No match found return false, -1, -1, nil }
func writeSuffixLines(w io.Writer, prefix, suffix, str string) error { count, err := startLine(w, prefix, suffix, false) if err != nil { return err } sl := len(suffix) bs := []byte(str) t := len(bs) nl := true ii := 0 for ii < t { b := bs[ii] if nl { if b == ' ' || b == '\t' { ii++ continue } nl = false } slice := bs[ii:] next := bytes.IndexAny(slice, " \n") if next == 0 { ii++ continue } if next == -1 { next = len(slice) - 1 } if count+sl+next >= maxLineLength { count, err = startLine(w, prefix, suffix, true) if err != nil { return err } nl = true } c, err := w.Write(slice[:next+1]) if err != nil { return err } count += c ii += c if slice[next] == '\n' { count, err = startLine(w, prefix, suffix, false) if err != nil { return err } nl = true } } if suffix != "" { if _, err := io.WriteString(w, suffix); err != nil { return err } } _, err = w.Write(newLine) return err }