func fileTokens(tf *token.File) (toks []string, err error) { src, err := ioutil.ReadFile(tf.Name()) if err != nil { return nil, err } s := &scanner.Scanner{} s.Init(tf, src, nil, 0) tokmap := make(TokenSet) for { _, tok, lit := s.Scan() if tok == token.EOF { break } if tok == token.STRING { // XXX: what if strings are misspelled? lit = lit[1 : len(lit)-1] } tokmap[lit] = struct{}{} } for k, _ := range tokmap { toks = append(toks, k) } return toks, nil }
func (d *DIBuilder) getFile(file *token.File) llvm.Metadata { if diFile := d.files[file]; diFile.C != nil { return diFile } diFile := d.builder.CreateFile(d.remapFilePath(file.Name()), "") d.files[file] = diFile return diFile }
func (d *DIBuilder) getFile(file *token.File) llvm.Value { if diFile := d.files[file]; !diFile.IsNil() { return diFile } diFile := d.builder.CreateFile(d.remapFilePath(file.Name()), "") d.files[file] = diFile return diFile }
// createCompileUnit creates and returns debug metadata for the compile // unit as a whole, using the first file in the file set as a representative // (the choice of file is arbitrary). func (d *DIBuilder) createCompileUnit() llvm.Metadata { var file *token.File d.fset.Iterate(func(f *token.File) bool { file = f return false }) dir, err := os.Getwd() if err != nil { panic("could not get current directory: " + err.Error()) } return d.builder.CreateCompileUnit(llvm.DICompileUnit{ Language: llvm.DW_LANG_Go, File: d.remapFilePath(file.Name()), Dir: dir, Producer: "llgo", }) }
// Init prepares the scanner s to tokenize the text src by setting the // scanner at the beginning of src. The scanner uses the file set file // for position information and it adds line information for each line. // It is ok to re-use the same file when re-scanning the same file as // line information which is already present is ignored. Init causes a // panic if the file size does not match the src size. // // Calls to Scan will invoke the error handler err if they encounter a // syntax error and err is not nil. Also, for each error encountered, // the Scanner field ErrorCount is incremented by one. The mode parameter // determines how comments are handled. // // Note that Init may call err if there is an error in the first character // of the file. // func (s *Scanner) Init(file *token.File, src []byte, err ErrorHandler, mode Mode) { // Explicitly initialize all fields since a scanner may be reused. if file.Size() != len(src) { panic(fmt.Sprintf("file size (%d) does not match src len (%d)", file.Size(), len(src))) } s.file = file s.dir, _ = filepath.Split(file.Name()) s.src = src s.err = err s.mode = mode s.ch = ' ' s.offset = 0 s.rdOffset = 0 s.lineOffset = 0 s.insertSemi = false s.ErrorCount = 0 s.next() }
// Init prepares the scanner S to tokenize the text src by setting the // scanner at the beginning of src. The scanner uses the file set file // for position information and it adds line information for each line. // It is ok to re-use the same file when re-scanning the same file as // line information which is already present is ignored. Init causes a // panic if the file size does not match the src size. // // Calls to Scan will use the error handler err if they encounter a // syntax error and err is not nil. Also, for each error encountered, // the Scanner field ErrorCount is incremented by one. The mode parameter // determines how comments, illegal characters, and semicolons are handled. // // Note that Init may call err if there is an error in the first character // of the file. // func (S *Scanner) Init(file *token.File, src []byte, err ErrorHandler, mode uint) { // Explicitly initialize all fields since a scanner may be reused. if file.Size() != len(src) { panic("file size does not match src len") } S.file = file S.dir, _ = filepath.Split(file.Name()) S.src = src S.err = err S.mode = mode S.ch = ' ' S.offset = 0 S.rdOffset = 0 S.lineOffset = 0 S.insertSemi = false S.ErrorCount = 0 S.next() }
func (s *Scanner) Init(file *token.File, src []byte, err ErrorHandler, mode Mode) { if file.Size() != len(src) { panic(fmt.Sprintf("file size (%d) does not match src len (%d)", file.Size(), len(src))) } s.tokScanner = scan.Scanner{Matcher: getTokenMatcher()} s.errScanner = scan.Scanner{Matcher: getErrorMatcher()} s.src = skipBOM(src) s.tokScanner.SetSource(s.src) s.errScanner.SetSource(s.src) s.file = file s.fileBase = s.file.Base() s.dir, _ = filepath.Split(file.Name()) s.err = err s.mode = mode s.ErrorCount = 0 s.preSemi = false s.semiPos = 0 }
func (s *Scanner) Init(file *token.File, src []byte, err ErrorHandler, mode Mode) { //fmt.Println("Init src", strconv.Quote(string(src)), mode) if file.Size() != len(src) { panic(fmt.Sprintf("file size (%d) does not match src len (%d)", file.Size(), len(src))) } s.gombiScanner = newGombiScanner() s.SetSource(skipBOM(src)) s.file = file s.dir, _ = filepath.Split(file.Name()) s.err = err s.mode = mode s.ErrorCount = 0 s.lastIsPreSemi = false s.commentAfterPreSemi = false s.endOfLinePos = 0 s.endOfLine = 0 s.commentQueue.reset() }