func ExampleCount() { fmt.Println(bytes.Count([]byte("cheese"), []byte("e"))) fmt.Println(bytes.Count([]byte("five"), []byte(""))) // before & after each rune // Output: // 3 // 5 }
func solve(input io.Reader) { scanner := bufio.NewScanner(input) scanner.Scan() A := []byte(scanner.Text()) scanner.Scan() B := []byte(scanner.Text()) if bytes.Count(A, []byte("1")) != bytes.Count(B, []byte("1")) { fmt.Println("-1") return } swaps := 0 for i := 0; i < len(A); i++ { if A[i] != B[i] { for j := i + 1; j < len(A); j++ { if A[j] != B[j] && B[i] != B[j] { swaps++ B[i], B[j] = B[j], B[i] break } } } } fmt.Println(swaps) }
func init() { Unix.DecodeLine = func(p []byte) (depth int, name []byte, err error) { var n int // TODO(rjeczalik): Count up to first non-box character. depth = (bytes.Count(p, boxSpace) + bytes.Count(p, boxHardSpace) + bytes.Count(p, boxVertical)) / 4 if n = bytes.LastIndex(p, boxHorizontal); n == -1 { err = errors.New("invalid syntax: " + string(p)) return } name = p[n:] if n = bytes.Index(name, boxSpace); n == -1 { err = errors.New("invalid syntax: " + string(p)) return } name = name[n+1:] return } Unix.EncodeState = func(st EncodingState) []byte { return box[st] } Tab.DecodeLine = func(p []byte) (depth int, name []byte, err error) { depth = bytes.Count(p, []byte{'\t'}) name = p[depth:] return } Tab.EncodeState = func(st EncodingState) []byte { return []byte{'\t'} } }
func TestList(t *testing.T) { // use buffer instead of Stdout so we can inspect the results var b bytes.Buffer setOutput(&b) defer revertOutput() // use custom name so test won't interfere with a real _gen.go setCustomName("_gen_test.go") defer revertCustomName() // remove existing files, start fresh os.Remove(customName) // standard if err := runMain([]string{"gen", "list"}); err != nil { t.Error(err) } // one line for title, 2 standard typewriters if lines := bytes.Count(b.Bytes(), []byte("\n")); lines != 3 { t.Errorf("standard list should output 3 lines, got %v", lines) } // clear out the output buffer b.Reset() // create a custom typewriter import file w, err := os.Create(customName) if err != nil { t.Error(err) } defer os.Remove(customName) p := pkg{ Name: "main", Imports: []string{ // non-standard typewriter `_ "github.com/clipperhouse/gen/typewriters/foowriter"`, `_ "github.com/clipperhouse/gen/typewriters/genwriter"`, `_ "github.com/clipperhouse/gen/typewriters/container"`, }, } if err := tmpl.Execute(w, p); err != nil { t.Error(err) } // custom file now exists if err := runMain([]string{"gen", "list"}); err != nil { t.Error(err) } // one line for title, 3 custom typewriters if lines := bytes.Count(b.Bytes(), []byte("\n")); lines != 4 { t.Errorf("standard list should output 4 lines, got %v", lines) } }
func slicecount() { s := []byte("banana") sep1 := []byte("ban") sep2 := []byte("na") sep3 := []byte("a") fmt.Println(bytes.Count(s, sep1)) fmt.Println(bytes.Count(s, sep2)) fmt.Println(bytes.Count(s, sep3)) }
func Check(path string) { File, err := os.Open(path) if err != nil { fmt.Println(err) return } defer File.Close() buf := bufio.NewReader(File) var num int = 1 var errornum int = 0 s := []byte("{{") e := []byte("}}") for { line, _, err := buf.ReadLine() if err != nil { if err.Error() == "EOF" { break } return } if bytes.Count(line, s) != bytes.Count(line, e) { fmt.Printf("Line%d: %s\n", num, string(line)) errornum++ continue } if bytes.Count(line, []byte("{{.}}")) != 0 { fmt.Printf("Line%d: %s\n", num, string(line)) errornum++ continue } for i := 0; i < bytes.Count(line, s); i++ { first := bytes.Index(line, s) last := bytes.Index(line, e) if first == -1 || last == -1 { continue } if bytes.Index(line[first:last], []byte("{{.")) != 0 { fmt.Printf("Error Line %d: %s\n", num, string(line)) errornum++ break } line = line[last:] } } if errornum != 0 { fmt.Printf("Error num %d From %s\n", errornum, path) return } return }
func (d *datagram) validate() (err error) { switch { case d.offset < 2: err = errors.New("Datagram has no opcode") case d.opcode() > 6: err = errors.New("Invalid opcode") default: switch d.opcode() { case opCodeRRQ, opCodeWRQ: switch { case len(d.filename()) < 1: err = errors.New("No filename provided") case d.buf[d.offset-1] != 0x0: // End with NULL err = fmt.Errorf("Corrupt %v datagram", d.opcode()) case bytes.Count(d.buf[2:d.offset], []byte{0x0})%2 != 0: // Number of NULL chars is not even err = fmt.Errorf("Corrupt %v datagram", d.opcode()) default: switch d.mode() { case ModeNetASCII, ModeOctet: break case modeMail: err = errors.New("MAIL transfer mode is unsupported") default: err = errors.New("Invalid transfer mode") } } case opCodeACK, opCodeDATA: if d.offset < 4 { err = errors.New("Corrupt block number") } case opCodeERROR: switch { case d.offset < 5: err = errors.New("Corrupt ERROR datagram") case d.buf[d.offset-1] != 0x0: err = errors.New("Corrupt ERROR datagram") case bytes.Count(d.buf[4:d.offset], []byte{0x0}) > 1: err = errors.New("Corrupt ERROR datagram") } case opCodeOACK: switch { case d.buf[d.offset-1] != 0x0: err = errors.New("Corrupt OACK datagram") case bytes.Count(d.buf[2:d.offset], []byte{0x0})%2 != 0: // Number of NULL chars is not even err = errors.New("Corrupt OACK datagram") } } } return }
func numTCP() (ntcp, nopen, nclose int, err error) { lsof, err := exec.Command("lsof", "-n", "-p", strconv.Itoa(os.Getpid())).Output() if err != nil { return 0, 0, 0, err } ntcp += bytes.Count(lsof, []byte("TCP")) for _, state := range []string{"LISTEN", "SYN_SENT", "SYN_RECEIVED", "ESTABLISHED"} { nopen += bytes.Count(lsof, []byte(state)) } for _, state := range []string{"CLOSED", "CLOSE_WAIT", "LAST_ACK", "FIN_WAIT_1", "FIN_WAIT_2", "CLOSING", "TIME_WAIT"} { nclose += bytes.Count(lsof, []byte(state)) } return ntcp, nopen, nclose, nil }
func shortText(t []byte) []byte { if t == nil { return nil } // Cut signature. i := bytes.LastIndex(t, sigDash) j := bytes.LastIndex(t, quote) if i > j && bytes.Count(t[i+1:], nl) <= 10 { t = t[:i+1] } // Cut trailing quoted text. for { rest, last := lastLine(t) trim := bytes.TrimSpace(last) if len(rest) < len(t) && (len(trim) == 0 || trim[0] == '>') { t = rest continue } break } // Cut 'On foo.*wrote:' line. rest, last := lastLine(t) if onwrote.Match(last) { t = rest } // Cut trailing blank lines. for { rest, last := lastLine(t) trim := bytes.TrimSpace(last) if len(rest) < len(t) && len(trim) == 0 { t = rest continue } break } // Cut signature again. i = bytes.LastIndex(t, sigDash) j = bytes.LastIndex(t, quote) if i > j && bytes.Count(t[i+1:], nl) <= 10 { t = t[:i+1] } return t }
func Write(ch chan []byte, k, v string) error { data := []byte(k + "=" + v + "\n") if bytes.Count(data, []byte("=")) > 1 || bytes.Count(data, []byte("=")) > 1 { return errors.New("Cannot have '=' in stream key or value") } if bytes.Count(data, []byte("\n")) > 1 || bytes.Count(data, []byte("\n")) > 1 { return errors.New("Cannot have newline in stream key or value") } select { case ch <- data: default: return ErrOverflow } return nil }
func loadConfig() *Config { var config *Config file, err := ioutil.ReadFile("mipples.json") if err != nil { panic(err) } if err := json.Unmarshal(file, &config); err != nil { syntaxErr, ok := err.(*json.SyntaxError) if !ok { log.Fatalf("Cannot read config: %s", err) } // We have a syntax error. Extract out the line number and friends. // https://groups.google.com/forum/#!topic/golang-nuts/fizimmXtVfc newline := []byte{'\x0a'} // Calculate the start/end position of the line where the error is start := bytes.LastIndex(file[:syntaxErr.Offset], newline) + 1 end := len(file) if idx := bytes.Index(file[start:], newline); idx >= 0 { end = start + idx } // Count the line number we're on plus the offset in the line line := bytes.Count(file[:start], newline) + 1 pos := int(syntaxErr.Offset) - start - 1 log.Fatalf("Cannot read config. Error in line %d, char %d: %s\n%s", line, pos, syntaxErr, file[start:end]) } return config }
func TestMessagePlainPGPSingleKey(t *testing.T) { const caddyFile = `mailout { to [email protected] cc "*****@*****.**" subject "Encrypted contact 🔑" body testdata/mail_plainTextMessage.txt [email protected] testdata/B06469EE_nopw.pub.asc }` buf := new(bytes.Buffer) srv := testMessageServer(t, caddyFile, buf, 2) defer srv.Close() data := make(url.Values) data.Set("firstname", "Ken") data.Set("lastname", "Thompson") data.Set("email", "*****@*****.**") data.Set("name", "Ken Thompson") testDoPost(t, srv.URL, data) assert.Len(t, buf.String(), 2710) // whenever you change the template, change also here assert.Contains(t, buf.String(), "Subject: =?UTF-8?q?Encrypted_contact_=F0=9F=94=91?=") assert.Contains(t, buf.String(), "Cc: [email protected]") assert.Exactly(t, 1, bytes.Count(buf.Bytes(), maillog.MultiMessageSeparator)) assert.Contains(t, buf.String(), `This shows the content of a text template.`) //t.Log(buf.String()) }
func (p *parser) processDefine(line []byte) { line = concatline(line) if glog.V(1) { glog.Infof("concatline:%q", line) } if !p.isEndef(line) { if p.inDef != nil { p.inDef = append(p.inDef, '\n') } p.inDef = append(p.inDef, line...) if p.inDef == nil { p.inDef = []byte{} } return } glog.V(1).Infof("multilineAssign %q %q", p.defineVar, p.inDef) aast, err := newAssignAST(p, p.defineVar, p.inDef, "=") if err != nil { p.err = p.srcpos().errorf("assign error %q=%q: %v", p.defineVar, p.inDef, err) return } aast.srcpos = p.srcpos() aast.srcpos.lineno -= bytes.Count(p.inDef, []byte{'\n'}) p.addStatement(aast) p.defineVar = nil p.inDef = nil return }
func parseCover(fn string) []*SourceFile { profs, err := parseProfiles(fn) if err != nil { log.Fatalf("Error parsing coverage: %v", err) } var rv []*SourceFile for _, prof := range profs { path, err := findFile(prof.FileName) if err != nil { log.Fatalf("Can't find %v", err) } fb, err := ioutil.ReadFile(path) if err != nil { log.Fatalf("Error reading %v: %v", path, err) } sf := &SourceFile{ Name: prof.FileName, Source: string(fb), Coverage: make([]interface{}, 1+bytes.Count(fb, []byte{'\n'})), } for _, block := range prof.Blocks { for i := block.StartLine; i <= block.EndLine; i++ { sf.Coverage[i-1] = block.Count } } rv = append(rv, sf) } return rv }
func TestTrie_compact(t *testing.T) { trie := NewTrie() trie.Insert(Prefix("a"), 0) trie.Insert(Prefix("ab"), 0) trie.Insert(Prefix("abc"), 0) trie.Insert(Prefix("abcd"), 0) trie.Insert(Prefix("abcde"), 0) trie.Insert(Prefix("abcdef"), 0) trie.Insert(Prefix("abcdefg"), 0) trie.Insert(Prefix("abcdefgi"), 0) trie.Insert(Prefix("abcdefgij"), 0) trie.Insert(Prefix("abcdefgijk"), 0) trie.Delete(Prefix("abcdef")) trie.Delete(Prefix("abcde")) trie.Delete(Prefix("abcdefg")) trie.Delete(Prefix("a")) trie.Delete(Prefix("abc")) trie.Delete(Prefix("ab")) trie.Visit(func(prefix Prefix, item Item) error { // 97 ~~ 'a', for ch := byte(97); ch <= 107; ch++ { if c := bytes.Count(prefix, []byte{ch}); c > 1 { t.Errorf("%q appeared in %q %v times", ch, prefix, c) } } return nil }) }
// InitialValidationB is like InitialValidation but for byte array inputs. func InitialValidationB(metric_id []byte, version metricVersion) error { if version == Legacy { if bytes.Contains(metric_id, doubleDot) { return fmt.Errorf("metric '%s' has an empty node", metric_id) } return ValidateSensibleCharsB(metric_id) } if version == M20 { if bytes.Contains(metric_id, m20Is) { return fmt.Errorf("metric '%s' has both = and _is_", metric_id) } if !bytes.HasPrefix(metric_id, m20UnitPre) && !bytes.Contains(metric_id, m20UnitMid) { return fmt.Errorf("metric '%s' has no unit tag", metric_id) } if !bytes.HasPrefix(metric_id, m20TTPre) && !bytes.Contains(metric_id, m20TTMid) { return fmt.Errorf("metric '%s' has no target_type tag", metric_id) } } else { //version == M20NoEquals if bytes.Contains(metric_id, m20NEIS) { return fmt.Errorf("metric '%s' has both = and _is_", metric_id) } if !bytes.HasPrefix(metric_id, m20NEUnitPre) && !bytes.Contains(metric_id, m20NEUnitMid) { return fmt.Errorf("metric '%s' has no unit tag", metric_id) } if !bytes.HasPrefix(metric_id, m20NETTPre) && !bytes.Contains(metric_id, m20NETTMid) { return fmt.Errorf("metric '%s' has no target_type tag", metric_id) } } if bytes.Count(metric_id, dot) < 2 { return fmt.Errorf("metric '%s': must have at least one tag_k/tag_v pair beyond unit and target_type", metric_id) } return nil }
func grep(re *Regexp, b []byte) []int { var m []int lineno := 1 for { i := re.Match(b, true, true) if i < 0 { break } start := bytes.LastIndex(b[:i], nl) + 1 end := i + 1 if end > len(b) { end = len(b) } lineno += bytes.Count(b[:start], nl) m = append(m, lineno) if start < end && b[end-1] == '\n' { lineno++ } b = b[end:] if len(b) == 0 { break } } return m }
func (gas *GameServer) acceptConn(c net.Conn) { fmt.Println("acceptConn") buf := make([]byte, common.BUF_SIZE) var data bytes.Buffer for { n, _ := c.Read(buf) if n == 0 { fmt.Println("close by peer") break } data.Write(buf[:n]) cn := bytes.Count(data.Bytes(), []byte{common.DELIMITER}) for ; cn > 0; cn-- { jn, err := data.ReadString(common.DELIMITER) fmt.Println(time.Now().String()[:19], jn) if err != nil { fmt.Println("err", err) continue } var unknow interface{} err = json.Unmarshal([]byte(jn), &unknow) if err != nil { fmt.Println("Unmarshal error") continue } switch unknow.(type) { case map[string]interface{}: //? gas.dispatchOp(unknow, c) } } } }
// need access to the original query contents in order to print it out properly, // unfortunately. func formatPgErr(contents *[]byte, pgerr *pq.Error) string { pos, _ := strconv.Atoi(pgerr.Position) lineNo := bytes.Count((*contents)[:pos], []byte("\n")) + 1 columnNo := pos - bytes.LastIndex((*contents)[:pos], []byte("\n")) - 1 return fmt.Sprint("PGERROR: line ", lineNo, " pos ", columnNo, ": ", pgerr.Message, ". ", pgerr.Detail) }
//test if the TemplateBytes works func TestTemplateBytes(t *testing.T) { template := NewTemplateBytes([]byte("<html></html>")) content := template.Content() if bytes.Count(content, []byte("<html>")) != 1 { t.Error("Cannot read the template") } }
func (w *fileLogWriter) lines() (int, error) { fd, err := os.Open(w.Filename) if err != nil { return 0, err } defer fd.Close() buf := make([]byte, 32768) // 32k count := 0 lineSep := []byte{'\n'} for { c, err := fd.Read(buf) if err != nil && err != io.EOF { return count, err } count += bytes.Count(buf[:c], lineSep) if err == io.EOF { break } } return count, nil }
// Parse a *json.SyntaxError into a pretty error message func (c *Config) parseSyntaxError(js []byte, err error) error { json_err, ok := err.(*json.SyntaxError) if !ok { return err } start := bytes.LastIndex(js[:json_err.Offset], []byte("\n")) + 1 end := bytes.Index(js[start:], []byte("\n")) if end >= 0 { end += start } else { end = len(js) } line, pos := bytes.Count(js[:start], []byte("\n")), int(json_err.Offset)-start-1 var posStr string if pos > 0 { posStr = strings.Repeat(" ", pos) } else { posStr = "" } return fmt.Errorf("%s on line %d\n%s\n%s^", err, line, js[start:end], posStr) }
func countOpenFiles(t *testing.T) int { out, err := exec.Command("/bin/sh", "-c", fmt.Sprintf("lsof -p %v", os.Getpid())).Output() if err != nil { t.Fatal(err) } return bytes.Count(out, []byte("\n")) }
// parses: // "<size_u32> <big-blobref> <big-offset>" func parseMetaRow(v []byte) (m meta, err error) { row := v sp := bytes.IndexByte(v, ' ') if sp < 1 || sp == len(v)-1 { return meta{}, fmt.Errorf("invalid metarow %q", v) } m.exists = true size, err := strutil.ParseUintBytes(v[:sp], 10, 32) if err != nil { return meta{}, fmt.Errorf("invalid metarow size %q", v) } m.size = uint32(size) v = v[sp+1:] // remains: "<big-blobref> <big-offset>" if bytes.Count(v, singleSpace) != 1 { return meta{}, fmt.Errorf("invalid metarow %q: wrong number of spaces", row) } sp = bytes.IndexByte(v, ' ') largeRef, ok := blob.ParseBytes(v[:sp]) if !ok { return meta{}, fmt.Errorf("invalid metarow %q: bad blobref %q", row, v[:sp]) } m.largeRef = largeRef off, err := strutil.ParseUintBytes(v[sp+1:], 10, 32) if err != nil { return meta{}, fmt.Errorf("invalid metarow %q: bad offset: %v", row, err) } m.largeOff = uint32(off) return m, nil }
/* * Get json data from http code. */ func GetJsonFromHttp(httpData []byte) (map[string]interface{}, error) { //Find out if this page is age-restricted if bytes.Index(httpData, []byte("og:restrictions:age")) != -1 { return nil, errors.New("this page is age-restricted") } //Find begining of json data jsonBeg := "ytplayer.config = {" beg := bytes.Index(httpData, []byte(jsonBeg)) if beg == -1 { //pattern not found return nil, PatternNotFoundError{_pattern: jsonBeg} } beg += len(jsonBeg) //len(jsonBeg) returns the number of bytes in jsonBeg //Find offset of json data unmatchedBrackets := 1 offset := 0 for unmatchedBrackets > 0 { nextRight := bytes.Index(httpData[beg+offset:], []byte("}")) if nextRight == -1 { return nil, errors.New("unmatched brackets") } unmatchedBrackets -= 1 unmatchedBrackets += bytes.Count(httpData[beg+offset:beg+offset+nextRight], []byte("{")) offset += nextRight + 1 } //Load json data var f interface{} err := json.Unmarshal(httpData[beg-1:beg+offset], &f) if err != nil { return nil, err } return f.(map[string]interface{}), nil }
func TestWideChar(t *testing.T) { input := []byte(`タイトル == サブタイトル --- aaa/あああ ---------- `) expected := []byte(`タイトル ======== サブタイトル ------------ aaa/あああ ---------- `) output, err := markdown.Process("", input, nil) if err != nil { log.Fatalln(err) } diff, err := diff(expected, output) if err != nil { log.Fatalln(err) } if len(diff) != 0 { t.Errorf("Difference of %d lines:\n%s", bytes.Count(diff, []byte("\n")), string(diff)) } }
func getFileSegment(file string, start, end uint32, header bool) string { f, err := ioutil.ReadFile(file) if err != nil { return "" } if header { startLine := bytes.Count(f[:start], []byte{'\n'}) + 1 // Roll 'start' back and 'end' forward to the nearest // newline. for ; start-1 > 0 && f[start-1] != '\n'; start-- { } for ; end < uint32(len(f)) && f[end] != '\n'; end++ { } var out []string onLine := startLine for _, line := range bytes.Split(f[start:end], []byte{'\n'}) { var marker string if startLine == onLine { marker = ":" } else { marker = "-" } out = append(out, fmt.Sprintf("%s:%d%s%s", file, onLine, marker, string(line), )) onLine++ } return strings.Join(out, "\n") } return string(f[start:end]) }
func openDescriptors() int { out, err := lsof(os.Getpid()) if err != nil { return 0 } return bytes.Count(out, []byte("\n")) }
func numOpenFDS(t *testing.T) (n int, lsof []byte) { lsof, err := exec.Command("lsof", "-b", "-n", "-p", strconv.Itoa(os.Getpid())).Output() if err != nil { t.Skip("skipping test; error finding or running lsof") } return bytes.Count(lsof, []byte("\n")), lsof }
// render code chunks using verbatim, or listings if we have a language func (options *xml2) BlockCode(out *bytes.Buffer, text []byte, lang string, caption []byte, subfigure, callout bool) { ial := options.inlineAttr() ial.GetOrDefaultAttr("align", "center") prefix := ial.Value("prefix") ial.DropAttr("prefix") // it's a fake attribute, so drop it ial.DropAttr("callout") // it's a fake attribute, so drop it // subfigure stuff. TODO(miek): check if len(caption) > 0 { ial.GetOrDefaultAttr("title", string(sanitizeXML(caption))) } ial.DropAttr("type") s := ial.String() out.WriteString("\n<figure" + s + "><artwork" + ial.Key("align") + ">\n") if prefix != "" { nl := bytes.Count(text, []byte{'\n'}) text = bytes.Replace(text, []byte{'\n'}, []byte("\n"+prefix), nl-1) // add prefix at the start as well text = append([]byte(prefix), text...) } if callout { attrEscapeInCode(options, out, text) } else { writeEntity(out, text) } out.WriteString("</artwork></figure>\n") }