func main() {
	var str = "hello world\ni am jemy\r"

	var buf = []byte(str)
	fmt.Println("----------ScanBytes----------")
	for {
		advance, token, err := bufio.ScanBytes(buf, true)
		if advance == 0 {
			break
		}
		fmt.Println(advance, token, err)
		if advance <= len(buf) {
			buf = buf[advance:]
		}
	}

	fmt.Println("----------ScanLines----------")
	buf = []byte(str)
	for {
		advance, token, err := bufio.ScanLines(buf, true)
		if advance == 0 {
			break
		}
		fmt.Print(advance, string(token), err)
		fmt.Println()
		if advance <= len(buf) {
			buf = buf[advance:]
		}
	}

	fmt.Println("----------ScanRunes----------")
	buf = []byte(str)
	for {
		advance, token, err := bufio.ScanRunes(buf, true)
		if advance == 0 {
			break
		}
		fmt.Print(advance, string(token), len(token), err)
		fmt.Println()
		if advance <= len(buf) {
			buf = buf[advance:]
		}
	}

	fmt.Println("----------ScanWords----------")
	buf = []byte(str)
	for {
		advance, token, err := bufio.ScanWords(buf, true)
		if advance == 0 {
			break
		}
		fmt.Print(advance, string(token), len(token), err)
		fmt.Println()
		if advance <= len(buf) {
			buf = buf[advance:]
		}
	}
}
コード例 #2
0
ファイル: unix_reader.go プロジェクト: prodigeni/packer
// scanUnixLine is a bufio.Scanner SplitFunc. It tokenizes on lines, but
// only returns unix-style lines. So even if the line is "one\r\n", the
// token returned will be "one\n".
func scanUnixLine(data []byte, atEOF bool) (advance int, token []byte, err error) {
	advance, token, err = bufio.ScanLines(data, atEOF)
	if advance == 0 {
		return
	}
	return advance, append(token, "\n"...), err
}
コード例 #3
0
ファイル: utils.go プロジェクト: husobee/sudoku
// puzzleScanSplit - This is the customer scanner split function used to both
// parse and validate the stdin representation of the puzzle
func puzzleScanSplit(data []byte, atEOF bool) (advance int, token []byte, err error) {
	// based on scanlines, we will validate each line at a time
	advance, token, err = bufio.ScanLines(data, atEOF)
	if err == nil && token != nil {
		if len(token) != maxInputRowLength {
			// line length is incorrect, error
			err = ErrParseInvalidLineLength
			return
		}
		// check that each line is correct format
		for i, b := range token {
			if isEvenNumber(i) {
				// even, should be either a Number or Blank
				if !isNumber(b) && !isBlank(b) {
					//error
					err = ErrParseInvalidCharacter
					return
				}
			} else {
				// odd, should be space
				if !isSpace(b) {
					err = ErrParseInvalidCharacter
					return
				}
			}
		}
	}
	return
}
コード例 #4
0
ファイル: context.go プロジェクト: xenonn/docker-gen
func GetCurrentContainerID() string {
	file, err := os.Open("/proc/self/cgroup")

	if err != nil {
		return ""
	}

	reader := bufio.NewReader(file)
	scanner := bufio.NewScanner(reader)
	scanner.Split(bufio.ScanLines)

	regex := "/docker/([[:alnum:]]{64})$"
	re := regexp.MustCompilePOSIX(regex)

	for scanner.Scan() {
		_, lines, err := bufio.ScanLines([]byte(scanner.Text()), true)
		if err == nil {
			if re.MatchString(string(lines)) {
				submatches := re.FindStringSubmatch(string(lines))
				containerID := submatches[1]

				return containerID
			}
		}
	}

	return ""
}
コード例 #5
0
ファイル: parse.go プロジェクト: hwaf/hwaf-cmt2yml
func scan_line(data []byte, atEOF bool) (advance int, token []byte, err error) {
	advance, token, err = bufio.ScanLines(data, atEOF)
	return
	// sz := len(token)
	// if sz > 0 && token[sz-1] == '\\' {
	// 	return
	// }
}
コード例 #6
0
ファイル: n2n2.go プロジェクト: deepglint/nsqelastic
func GetTopic(m *nsq.Message) string {
	body := m.Body
	a, b, _ := bufio.ScanLines(body, false)
	//log.Println(a, string(b), c)
	m.Body = m.Body[a:]
	//bytes.TrimPrefix(m.Body, b)
	return string(b)
}
コード例 #7
0
ファイル: rodrego.go プロジェクト: jamesr/go-rodrego
/* Replace any Mac newlines with Unix newlines. Because, y'know, there are Mac
 * newlines in the example programs. DOS newlines are handled by the golang
 * standard bufio.ScanLines function. */
func magicSplit(data []byte, atEOF bool) (advance int, token []byte, err error) {
	innerline := regexp.MustCompile("\r([^\n])")
	endline := regexp.MustCompile("\r$")
	replaced := innerline.ReplaceAll(data, []byte("\n$1"))
	replaced = endline.ReplaceAll(replaced, []byte("\n"))
	advance, token, err = bufio.ScanLines(replaced, atEOF)
	return
}
コード例 #8
0
ファイル: mmap.go プロジェクト: johnsiilver/golib
// Readline returns each line of text, stripped of any trailing end-of-line marker. The returned line may be empty.
// The end-of-line marker is one optional carriage return followed by one mandatory newline.
// In regular expression notation, it is `\r?\n`. The last non-empty line of input will be returned even if it has no newline.
func (s *stringer) ReadLine() (string, error) {
	if s.ptr == s.len {
		return "", io.EOF
	}

	i, t, _ := bufio.ScanLines(s.data[s.ptr:], false)
	if i == 0 {
		i, t, _ = bufio.ScanLines(s.data[s.ptr:], true)
		if i == 0 {
			return "", io.EOF
		}
	}

	s.ptr += i
	if s.ptr == s.len {
		return string(t), io.EOF
	}
	return string(t), nil
}
コード例 #9
0
ファイル: unix_reader.go プロジェクト: JNPRAutomate/packer
// scanUnixLine is a bufio.Scanner SplitFunc. It tokenizes on lines, but
// only returns unix-style lines. So even if the line is "one\r\n", the
// token returned will be "one\n".
func scanUnixLine(data []byte, atEOF bool) (advance int, token []byte, err error) {
	advance, token, err = bufio.ScanLines(data, atEOF)
	if advance == 0 {
		// If we reached the end of a line without a newline, then
		// just return as it is. Otherwise the Scanner will keep trying
		// to scan, blocking forever.
		return
	}

	return advance, append(token, '\n'), err
}
コード例 #10
0
ファイル: data.go プロジェクト: theseusyang/occult
func writeStore(f *zip.File, nodeID int) (dbName string) {

	rc, e := f.Open()
	if e != nil {
		glog.Fatal(e)
	}
	defer rc.Close()
	scanner := bufio.NewScanner(rc)
	// Create a custom split function by wrapping the existing ScanLines function.
	split := func(data []byte, atEOF bool) (advance int, line []byte, err error) {
		advance, line, err = bufio.ScanLines(data, atEOF)
		if err == nil && line != nil {
			// can validate here and return error.
		}
		return
	}
	// Set the split function for the scanning operation.
	scanner.Split(split)

	// create store
	name := path.Base(f.Name) + "-" + strconv.Itoa(nodeID)
	dbName = path.Join(OutDir, name)

	// Return if db exists.
	if _, err := os.Stat(dbName); err == nil {
		glog.Infof("db %s already exist, skipping...", dbName)
		return dbName
	}

	glog.Infof("creating store %s", dbName)
	db, err := store.NewStore(dbName)
	fatalIf(err)
	defer db.Close()

	var key uint64
	for scanner.Scan() {
		newObs := Obs{}
		fields := strings.Fields(scanner.Text())
		newObs.User, e = strconv.Atoi(fields[0])
		fatalIf(e)
		newObs.Item, e = strconv.Atoi(fields[1])
		fatalIf(e)
		newObs.Rating, e = strconv.Atoi(fields[2])
		fatalIf(e)
		var io interface{} = newObs
		fatalIf(db.Put(key, &io))
		key++
	}
	if err = scanner.Err(); err != nil {
		glog.Fatalf("Invalid input: %s", err)
	}
	glog.Infof("wrote %d records", key)
	return dbName
}
コード例 #11
0
ファイル: split.go プロジェクト: ww24/gopham
func split() (f bufio.SplitFunc) {
	// 読み取り位置
	offset := 0
	f = func(data []byte, atEOF bool) (advance int, token []byte, err error) {
		// 1 行取得
		d := data[offset:]
		advance, token, err = bufio.ScanLines(d, atEOF)

		// 1 行に満たない場合はそのまま return
		if advance == 0 && token == nil {
			offset = 0
			return
		}

		// 1 行以上の場合
		if token != nil {
			offset += advance

			// 改行が存在した場合
			if advance == 1 && len(token) == 0 {
				// 改行が連続した場合
				if offset > 1 {
					advance = offset
					token = []byte(strings.Trim(string(data[:offset-2]), "\n"))
					offset = 0
					return
				}

				token = nil
				if len(data) <= offset {
					offset = 0
				}
				return
			}

			// ファイルの終端
			if atEOF && advance == len(d) {
				advance = offset
				token = []byte(strings.Trim(string(data[:offset]), "\n"))
				offset = 0
				return
			}

			advance, token, err = f(data, atEOF)
			return
		}

		offset = 0
		return
	}
	return
}
コード例 #12
0
ファイル: set1.go プロジェクト: juniorz/cryptopals
func NewHexEncodingScanner(r io.Reader) *bufio.Scanner {
	s := bufio.NewScanner(r)
	split := func(data []byte, atEOF bool) (advance int, token []byte, err error) {
		advance, token, err = bufio.ScanLines(data, atEOF)
		if err == nil && token != nil {
			token, err = hex.DecodeString(string(token))
		}
		return
	}

	s.Split(split)

	return s
}
コード例 #13
0
func (f *Automatic) automaticScannerSplit(data []byte, atEOF bool) (advance int, token []byte, err error) {
	if atEOF && len(data) == 0 {
		return 0, nil, nil
	}

	switch format, err := detect(data); format {
	case detectedRFC6587:
		return rfc6587ScannerSplit(data, atEOF)
	case detectedRFC3164, detectedRFC5424:
		// the default
		return bufio.ScanLines(data, atEOF)
	default:
		if err != nil {
			return 0, nil, err
		}
		// Request more data
		return 0, nil, nil
	}
}
コード例 #14
0
ファイル: Parser.go プロジェクト: jw56578/nand2tetris-2
func consumeLine(data []byte, atEOF bool) (advance int, token []byte, err error) {
	advance, token, err = bufio.ScanLines(data, atEOF)
	if advance == 0 && len(token) == 0 {
		return
	}
	tokenString := string(token)
	//Skip empty lines
	if advance == 2 {
		advance, token, err = consumeLine(data[advance:len(data)], atEOF)
		advance = advance + 2
	}
	//Drop lines with comments OR drop line ending with comments
	if strings.IndexAny(tokenString, "/") == 0 {
		storedAdvance := advance
		advance, token, err = consumeLine(data[storedAdvance:len(data)], atEOF)
		advance = advance + storedAdvance
	} else if commentIndex := strings.IndexAny(tokenString, "/"); commentIndex != -1 {
		token = token[0:commentIndex]
	}
	return
}
コード例 #15
0
ファイル: rewrite.go プロジェクト: Kubuxu/gx-lua
func fixCanonicalImports(buf []byte) (bool, error) {
	var i int
	var changed bool
	for {
		n, tok, err := bufio.ScanLines(buf[i:], true)
		if err != nil {
			return false, err
		}
		if n == 0 {
			return changed, nil
		}
		i += n

		stripped := stripImportComment(tok)
		if stripped != nil {
			nstr := copy(tok, stripped)
			copy(tok[nstr:], bytes.Repeat([]byte(" "), len(tok)-nstr))
			changed = true
		}
	}
}
コード例 #16
0
ファイル: netrc.go プロジェクト: shaunduncan/hk
func getToken(b []byte, pos *filePos) ([]byte, *token, error) {
	adv, wordb, err := bufio.ScanWords(b, true)
	if err != nil {
		return b, nil, err // should never happen
	}
	b = b[adv:]
	word := string(wordb)
	if word == "" {
		return b, nil, nil // EOF reached
	}

	t := new(token)
	var ok bool
	t.kind, ok = keywords[word]
	if !ok {
		return b, nil, &Error{pos.name, pos.line, "keyword expected; got " + word}
	}
	if t.kind == tkDefault {
		return b, t, nil
	}
	if t.kind == tkComment {
		t.value = word + " "
		adv, wordb, err = bufio.ScanLines(b, true)
		if err != nil {
			return b, nil, err // should never happen
		}
		t.value = t.value + string(wordb)
		return b[adv:], t, nil
	}

	if word == "" {
		return b, nil, &Error{pos.name, pos.line, "word expected"}
	}
	if t.kind == tkMacdef {
		adv, lineb, err := bufio.ScanLines(b, true)
		if err != nil {
			return b, nil, err // should never happen
		}
		b = b[adv:]
		adv, wordb, err = bufio.ScanWords(lineb, true)
		if err != nil {
			return b, nil, err // should never happen
		}
		word = string(wordb)
		t.macroName = word
		lineb = lineb[adv:]

		// Macro value starts on next line. The rest of current line
		// should contain nothing but whitespace
		i := 0
		for i < len(lineb) {
			r, size := utf8.DecodeRune(lineb[i:])
			if r == '\n' {
				i += size
				pos.line++
				break
			}
			if !unicode.IsSpace(r) {
				return b, nil, &Error{pos.name, pos.line, "unexpected word"}
			}
			i += size
		}

		// Find end of macro value
		i = bytes.Index(b, []byte("\n\n"))
		if i < 0 { // EOF reached
			i = len(b)
		}
		t.value = string(b[0:i])

		return b[i:], t, nil
	} else {
		adv, wordb, err = bufio.ScanWords(b, true)
		if err != nil {
			return b, nil, err // should never happen
		}
		word = string(wordb)
		b = b[adv:]
	}
	t.value = word
	return b, t, nil
}
コード例 #17
-1
ファイル: assembler.go プロジェクト: jw56578/nand2tetris-2
func consumeLine(data []byte, atEOF bool) (advance int, token []byte, err error) {
	advance, token, err = bufio.ScanLines(data, atEOF)
	if advance == 0 && len(token) == 0 {
		return
	}
	tokenString := string(token)
	//Skip empty lines
	if advance == 2 {
		advance, token, err = consumeLine(data[advance:len(data)], atEOF)
		advance = advance + 2
	}
	//Drop lines with comments OR drop line ending with comments
	if strings.IndexAny(tokenString, "/") == 0 {
		storedAdvance := advance
		advance, token, err = consumeLine(data[storedAdvance:len(data)], atEOF)
		advance = advance + storedAdvance
	} else if commentIndex := strings.IndexAny(tokenString, "/"); commentIndex != -1 {
		token = token[0:commentIndex]
	}

	//Remove all spaces
	token = bytes.Map(func(r rune) (newR rune) {
		if unicode.IsSpace(r) {
			newR = -1
		} else {
			newR = r
		}
		return
	}, token)
	return
}