// #line processing. func (in *Input) line() { // Only need to handle Plan 9 format: #line 337 "filename" tok := in.Stack.Next() if tok != scanner.Int { in.expectText("expected line number after #line") } line, err := strconv.Atoi(in.Stack.Text()) if err != nil { in.Error("error parsing #line (cannot happen):", err) } tok = in.Stack.Next() if tok != scanner.String { in.expectText("expected file name in #line") } file, err := strconv.Unquote(in.Stack.Text()) if err != nil { in.Error("unquoting #line file name: ", err) } tok = in.Stack.Next() if tok != '\n' { in.Error("unexpected token at end of #line: ", tok) } obj.Linklinehist(linkCtxt, histLine, file, line) in.Stack.SetPos(line, file) }
func (t *Tokenizer) Close() { if t.file != nil { t.file.Close() // It's an open file, so pop the line history. obj.Linklinehist(linkCtxt, histLine, "<pop>", 0) } }
func newfile(s string, f *os.File) { i := ionext i.Link = iostack iostack = i i.F = f if f == nil { var err error i.F, err = os.Open(s) if err != nil { Yyerror("%ca: %v", Thechar, err) errorexit() } } fi.P = nil obj.Linklinehist(Ctxt, int(Lineno), s, 0) }
func filbuf() int { var n int loop: i := iostack if i == nil { return EOF } if i.F == nil { goto pop } n, _ = i.F.Read(i.B[:]) if n == 0 { i.F.Close() obj.Linklinehist(Ctxt, int(Lineno), "<pop>", 0) goto pop } fi.P = i.B[1:n] return int(i.B[0]) & 0xff pop: iostack = i.Link i.Link = iofree iofree = i i = iostack if i == nil { return EOF } fi.P = i.P if len(fi.P) == 0 { goto loop } tmp8 := fi.P fi.P = fi.P[1:] return int(tmp8[0]) & 0xff }
func NewTokenizer(name string, r io.Reader, file *os.File) *Tokenizer { var s scanner.Scanner s.Init(r) // Newline is like a semicolon; other space characters are fine. s.Whitespace = 1<<'\t' | 1<<'\r' | 1<<' ' // Don't skip comments: we need to count newlines. s.Mode = scanner.ScanChars | scanner.ScanFloats | scanner.ScanIdents | scanner.ScanInts | scanner.ScanStrings | scanner.ScanComments s.Position.Filename = name s.IsIdentRune = isIdentRune if file != nil { obj.Linklinehist(linkCtxt, histLine, name, 0) } return &Tokenizer{ s: &s, line: 1, fileName: name, file: file, } }