func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode uint) {
	p.file = fset.AddFile(filename, fset.Base(), len(src))
	p.scanner.Init(p.file, src, p, scannerMode(mode))
	p.mode = mode
	p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently)
	p.next()
}
Example #2
0
func (p *parser) init(fset *token.FileSet, filename string, src []byte) {
	p.ErrorVector.Reset()
	p.file = fset.AddFile(filename, fset.Base(), len(src))
	p.scanner.Init(p.file, src, p, scanner.AllowIllegalChars) // return '@' as token.ILLEGAL w/o error message
	p.next()                                                  // initializes pos, tok, lit
	p.packs = make(map[string]string)
	p.rules = make(map[string]expr)
}
Example #3
0
func (p *parser) init(fset *token.FileSet, filename string, src []byte) {
	p.file = fset.AddFile(filename, -1, len(src))
	var m scanner.Mode
	eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
	p.scanner.Init(p.file, src, eh, m)

	p.next()
}
Example #4
0
File: parser.go Project: h12w/gombi
func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) {
	p.file = fset.AddFile(filename, -1, len(src))
	var m scanner.Mode
	if mode&ParseComments != 0 {
		m = scanner.ScanComments
	}
	eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
	p.scanner.Init(p.file, src, eh, m)

	p.mode = mode
	p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently)

	//p.next()
}
Example #5
0
File: spec.go Project: jnwhiteh/go
func (p *ebnfParser) parse(fset *token.FileSet, out io.Writer, src []byte) {
	// initialize ebnfParser
	p.out = out
	p.src = src
	p.file = fset.AddFile("", fset.Base(), len(src))
	p.scanner.Init(p.file, src, p, scanner.AllowIllegalChars)
	p.next() // initializes pos, tok, lit

	// process source
	for p.tok != token.EOF {
		p.parseProduction()
	}
	p.flush()
}
Example #6
0
func (p *parser) parse(fset *token.FileSet, filename string, src []byte) Grammar {
	// initialize parser
	p.fset = fset
	p.ErrorVector.Reset()
	p.scanner.Init(fset.AddFile(filename, fset.Base(), len(src)), src, p, scanner.AllowIllegalChars)
	p.next() // initializes pos, tok, lit

	grammar := make(Grammar)
	for p.tok != token.EOF {
		prod := p.parseProduction()
		name := prod.Name.String
		if _, found := grammar[name]; !found {
			grammar[name] = prod
		} else {
			p.error(prod.Pos(), name+" declared already")
		}
	}

	return grammar
}
Example #7
0
// Parse parses src as a single yacc source file fname and returns the
// corresponding Specification. If the source couldn't be read, the returned
// Specification is nil and the error indicates all of the specific failures.
func Parse(fset *token.FileSet, fname string, src []byte) (s *Specification, err error) {
	r := bytes.NewBuffer(src)
	file := fset.AddFile(fname, -1, len(src))
	lx, err := newLexer(file, r)
	if err != nil {
		return nil, err
	}

	y := yyParse(lx)
	n := len(lx.errors)
	if y != 0 || n != 0 {
		if n == 0 {
			panic("internal error")
		}

		return nil, lx.errors
	}

	return lx.spec, nil
}
Example #8
0
func ParseFile(fset *token.FileSet, filename string) ATree {

	fmt.Println("dbg 1")

	// get source
	text, err := ioutil.ReadFile(filename)
	if err != nil {
		return nil
	}

	var lexer scanner.Scanner

	fileSet := fset.AddFile(filename, -1, len(text))

	errorHandler := func(pos token.Position, msg string) {
		// FIXME this happened for ILLEGAL tokens, forex '?'
		panic("SUCCESS in scanner errorHandler")
	}

	var m scanner.Mode
	lexer.Init(fileSet, text, errorHandler, m)

	fmt.Println("dbg 2.1")

	// Repeated calls to Scan yield the token sequence found in the input.
	//	for {
	//		_, tok, lit := lexer.Scan()
	//		if tok == token.EOF {
	//			break
	//		}
	//		fmt.Printf("\t%s    %q\n", tok, lit)
	//	}
	//

	fmt.Println("dbg 2.2")

	return parseCode(&lexer, []ATree{})
}
Example #9
0
func printDecl(decl ast.Node, fset *token.FileSet, buf []byte) (Code, []byte) {
	v := &annotationVisitor{}
	ast.Walk(v, decl)

	buf = buf[:0]
	err := (&printer.Config{Mode: printer.UseSpaces, Tabwidth: 4}).Fprint(sliceWriter{&buf}, fset, decl)
	if err != nil {
		return Code{Text: err.Error()}, buf
	}

	var annotations []Annotation
	var s scanner.Scanner
	fset = token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(buf))
	s.Init(file, buf, nil, scanner.ScanComments)
loop:
	for {
		pos, tok, lit := s.Scan()
		switch tok {
		case token.EOF:
			break loop
		case token.COMMENT:
			p := file.Offset(pos)
			e := p + len(lit)
			if p > math.MaxInt16 || e > math.MaxInt16 {
				break loop
			}
			annotations = append(annotations, Annotation{Kind: CommentAnnotation, Pos: int16(p), End: int16(e)})
		case token.IDENT:
			if len(v.annotations) == 0 {
				// Oops!
				break loop
			}
			annotation := v.annotations[0]
			v.annotations = v.annotations[1:]
			if annotation.Kind == -1 {
				continue
			}
			p := file.Offset(pos)
			e := p + len(lit)
			if p > math.MaxInt16 || e > math.MaxInt16 {
				break loop
			}
			annotation.Pos = int16(p)
			annotation.End = int16(e)
			if len(annotations) > 0 && annotation.Kind == ExportLinkAnnotation {
				prev := annotations[len(annotations)-1]
				if prev.Kind == PackageLinkAnnotation &&
					prev.ImportPath == annotation.ImportPath &&
					prev.End+1 == annotation.Pos {
					// merge with previous
					annotation.Pos = prev.Pos
					annotations[len(annotations)-1] = annotation
					continue loop
				}
			}
			annotations = append(annotations, annotation)
		}
	}
	return Code{Text: string(buf), Annotations: annotations}, buf
}