func ExampleScanner_Scan() { // src is the input that we want to tokenize. src := []byte(`[profile "A"] color = blue ; Comment`) // Initialize the scanner. var s scanner.Scanner fset := token.NewFileSet() // positions are relative to fset file := fset.AddFile("", fset.Base(), len(src)) // register input "file" s.Init(file, src, nil /* no error handler */, scanner.ScanComments) // Repeated calls to Scan yield the token sequence found in the input. for { pos, tok, lit := s.Scan() if tok == token.EOF { break } fmt.Printf("%s\t%q\t%q\n", fset.Position(pos), tok, lit) } // output: // 1:1 "[" "" // 1:2 "IDENT" "profile" // 1:10 "STRING" "\"A\"" // 1:13 "]" "" // 1:14 "\n" "" // 2:1 "IDENT" "color" // 2:7 "=" "" // 2:9 "STRING" "blue" // 2:14 "COMMENT" "; Comment" }
func BenchmarkScan(b *testing.B) { b.StopTimer() fset := token.NewFileSet() file := fset.AddFile("", fset.Base(), len(source)) var s Scanner b.StartTimer() for i := b.N - 1; i >= 0; i-- { s.Init(file, source, nil, ScanComments) for { _, tok, _ := s.Scan() if tok == token.EOF { break } } } }
// Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package scanner import ( "os" "strings" "testing" ) import ( "github.com/cenkalti/dalga/dalga/Godeps/_workspace/src/code.google.com/p/gcfg/token" ) var fset = token.NewFileSet() const /* class */ ( special = iota literal operator ) func tokenclass(tok token.Token) int { switch { case tok.IsLiteral(): return literal case tok.IsOperator(): return operator } return special