Esempio n. 1
0
func ExampleScanner_Scan() {
	// src is the input that we want to tokenize.
	src := []byte(`[profile "A"]
color = blue ; Comment`)

	// Initialize the scanner.
	var s scanner.Scanner
	fset := token.NewFileSet()                      // positions are relative to fset
	file := fset.AddFile("", fset.Base(), len(src)) // register input "file"
	s.Init(file, src, nil /* no error handler */, scanner.ScanComments)

	// Repeated calls to Scan yield the token sequence found in the input.
	for {
		pos, tok, lit := s.Scan()
		if tok == token.EOF {
			break
		}
		fmt.Printf("%s\t%q\t%q\n", fset.Position(pos), tok, lit)
	}

	// output:
	// 1:1	"["	""
	// 1:2	"IDENT"	"profile"
	// 1:10	"STRING"	"\"A\""
	// 1:13	"]"	""
	// 1:14	"\n"	""
	// 2:1	"IDENT"	"color"
	// 2:7	"="	""
	// 2:9	"STRING"	"blue"
	// 2:14	"COMMENT"	"; Comment"
}
Esempio n. 2
0
File: read.go Progetto: ruizu/gcfg
func readInto(config interface{}, fset *token.FileSet, file *token.File, src []byte) error {
	var s scanner.Scanner
	var errs scanner.ErrorList
	s.Init(file, src, func(p token.Position, m string) { errs.Add(p, m) }, 0)
	sect, sectsub := "", ""
	pos, tok, lit := s.Scan()
	errfn := func(msg string) error {
		return fmt.Errorf("%s: %s", fset.Position(pos), msg)
	}
	for {
		if errs.Len() > 0 {
			return errs.Err()
		}
		switch tok {
		case token.EOF:
			return nil
		case token.EOL, token.COMMENT:
			pos, tok, lit = s.Scan()
		case token.LBRACK:
			pos, tok, lit = s.Scan()
			if errs.Len() > 0 {
				return errs.Err()
			}
			if tok != token.IDENT {
				return errfn("expected section name")
			}
			sect, sectsub = lit, ""
			pos, tok, lit = s.Scan()
			if errs.Len() > 0 {
				return errs.Err()
			}
			if tok == token.STRING {
				sectsub = unquote(lit)
				if sectsub == "" {
					return errfn("empty subsection name")
				}
				pos, tok, lit = s.Scan()
				if errs.Len() > 0 {
					return errs.Err()
				}
			}
			if tok != token.RBRACK {
				if sectsub == "" {
					return errfn("expected subsection name or right bracket")
				}
				return errfn("expected right bracket")
			}
			pos, tok, lit = s.Scan()
			if tok != token.EOL && tok != token.EOF && tok != token.COMMENT {
				return errfn("expected EOL, EOF, or comment")
			}
		case token.IDENT:
			if sect == "" {
				return errfn("expected section header")
			}
			n := lit
			pos, tok, lit = s.Scan()
			if errs.Len() > 0 {
				return errs.Err()
			}
			blank, v := tok == token.EOF || tok == token.EOL || tok == token.COMMENT, ""
			if !blank {
				if tok != token.ASSIGN {
					return errfn("expected '='")
				}
				pos, tok, lit = s.Scan()
				if errs.Len() > 0 {
					return errs.Err()
				}
				if tok != token.STRING {
					return errfn("expected value")
				}
				v = unquote(lit)
				pos, tok, lit = s.Scan()
				if errs.Len() > 0 {
					return errs.Err()
				}
				if tok != token.EOL && tok != token.EOF && tok != token.COMMENT {
					return errfn("expected EOL, EOF, or comment")
				}
			}
			err := set(config, sect, sectsub, n, blank, v)
			if err != nil {
				return err
			}
		default:
			if sect == "" {
				return errfn("expected section header")
			}
			return errfn("expected section header or variable declaration")
		}
	}
	panic("never reached")
}