Exemplo n.º 1
0
func NewToken(src string, cursor int) tokenIterator {
	src = src[:cursor] + ";" + src[cursor:]
	tokens := make([]tokenItem, 0, 1000)
	var s scanner.Scanner
	fset := token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(src))
	s.Init(file, []byte(src), nil, 0)
	index := 0
	for {
		pos, tok, lit := s.Scan()
		if tok == token.EOF {
			break
		}
		off := fset.Position(pos).Offset

		tokens = append(tokens, tokenItem{
			off: off,
			tok: tok,
			lit: lit,
		})
		if cursor > off {
			index++
		}
	}
	return tokenIterator{
		tokens: tokens,
		index:  index,
	}
}
Exemplo n.º 2
0
func formatCode(src []byte, annotations []doc.TypeAnnotation) string {

	// Collect comment positions in type annotation with Name = ""
	var (
		comments []doc.TypeAnnotation
		s        scanner.Scanner
	)
	fset := token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(src))
	s.Init(file, src, nil, scanner.ScanComments)
commentLoop:
	for {
		pos, tok, lit := s.Scan()
		switch tok {
		case token.EOF:
			break commentLoop
		case token.COMMENT:
			p := file.Offset(pos)
			comments = append(comments, doc.TypeAnnotation{Pos: p, End: p + len(lit)})
		}
	}

	// Merge type annotations and comments without modifying the caller's slice
	// of annoations.
	switch {
	case len(comments) == 0:
		// nothing to do
	case len(annotations) == 0:
		annotations = comments
	default:
		annotations = append(comments, annotations...)
		sort.Sort(sortByPos(annotations))
	}

	var buf bytes.Buffer
	last := 0
	for _, a := range annotations {
		template.HTMLEscape(&buf, src[last:a.Pos])
		if a.Name != "" {
			p := a.ImportPath
			if p != "" {
				p = "/" + p
			}
			buf.WriteString(`<a href="`)
			buf.WriteString(urlFn(p))
			buf.WriteByte('#')
			buf.WriteString(urlFn(a.Name))
			buf.WriteString(`">`)
			template.HTMLEscape(&buf, src[a.Pos:a.End])
			buf.WriteString(`</a>`)
		} else {
			buf.WriteString(`<span class="com">`)
			template.HTMLEscape(&buf, src[a.Pos:a.End])
			buf.WriteString(`</span>`)
		}
		last = a.End
	}
	template.HTMLEscape(&buf, src[last:])
	return buf.String()
}
Exemplo n.º 3
0
func ExampleScanner_Scan() {
	// src is the input that we want to tokenize.
	src := []byte("cos(x) + 1i*sin(x) // Euler")

	// Initialize the scanner.
	var s scanner.Scanner
	fset := token.NewFileSet()                      // positions are relative to fset
	file := fset.AddFile("", fset.Base(), len(src)) // register input "file"
	s.Init(file, src, nil /* no error handler */, scanner.ScanComments)

	// Repeated calls to Scan yield the token sequence found in the input.
	for {
		pos, tok, lit := s.Scan()
		if tok == token.EOF {
			break
		}
		fmt.Printf("%s\t%s\t%q\n", fset.Position(pos), tok, lit)
	}

	// output:
	// 1:1	IDENT	"cos"
	// 1:4	(	""
	// 1:5	IDENT	"x"
	// 1:6	)	""
	// 1:8	+	""
	// 1:10	IMAG	"1i"
	// 1:12	*	""
	// 1:13	IDENT	"sin"
	// 1:16	(	""
	// 1:17	IDENT	"x"
	// 1:18	)	""
	// 1:20	;	"\n"
	// 1:20	COMMENT	"// Euler"
}
Exemplo n.º 4
0
Arquivo: cmd.go Projeto: richlowe/gcc
// newScanner creates a new scanner that scans that given input bytes.
func newScanner(input []byte) (*scanner.Scanner, *scanner.ErrorVector) {
	sc := new(scanner.Scanner)
	ev := new(scanner.ErrorVector)
	file := fset.AddFile("input", fset.Base(), len(input))
	sc.Init(file, input, ev, 0)
	return sc, ev
}
Exemplo n.º 5
0
// formatHistory reformats the provided Go source by collapsing all lines
// and adding semicolons where required, suitable for adding to line history.
func formatHistory(input []byte) string {
	var buf bytes.Buffer
	var s scanner.Scanner
	fset := token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(input))
	s.Init(file, input, nil, 0)
	pos, tok, lit := s.Scan()
	for tok != token.EOF {
		if int(pos)-1 > buf.Len() {
			n := int(pos) - 1 - buf.Len()
			buf.WriteString(strings.Repeat(" ", n))
		}
		var semicolon bool
		if tok == token.SEMICOLON {
			semicolon = true
		} else if lit != "" {
			buf.WriteString(lit)
		} else {
			buf.WriteString(tok.String())
		}
		pos, tok, lit = s.Scan()
		if semicolon {
			switch tok {
			case token.RBRACE, token.RPAREN, token.EOF:
			default:
				buf.WriteRune(';')
			}
		}
	}
	return buf.String()
}
Exemplo n.º 6
0
func (this *tokenizedGoContent) Update() {
	content := this.GetSources()[0].(caret.MultilineContentI)

	this.segments = nil

	src := []byte(content.Content())
	//src := []byte(w.Content.Content()[w.Content.Line(beginLineIndex).Start:w.Content.Line(endLineIndex).Start])

	var s scanner.Scanner
	fset := token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(src))
	s.Init(file, src, nil, scanner.ScanComments)

	// Repeated calls to Scan yield the token sequence found in the input.
	// TODO: Perhaps include whitespace in between tokens?
	for {
		pos, tok, lit := s.Scan()
		if tok == token.EOF {
			break
		}

		offset := uint32(fset.Position(pos).Offset)

		this.segments = append(this.segments, tokLit{offset: offset, tok: tok, lit: lit})
	}

	// HACK: Fake last element.
	this.segments = append(this.segments, tokLit{offset: uint32(content.LenContent())})
}
Exemplo n.º 7
0
// PipeInit initializes a pipeline; input will be read from iReader.
func PipeInit(iReader io.Reader) *Pipe {
	input := make(chan Token)
	output := make(chan string)
	sync := make(chan interface{})
	p := &Pipe{input, output, sync}

	src, err := ioutil.ReadAll(iReader)
	if err != nil {
		panic(err)
	}

	fset := token.NewFileSet()
	file := fset.AddFile("<stdin>", fset.Base(), len(src))

	s := scanner.Scanner{}
	s.Init(file, src, nil, scanner.ScanComments)

	go func() {
		pos, tok, str := s.Scan()
		for tok != token.EOF {
			if len(str) == 0 {
				str = tok.String()
			}
			if tok == token.COMMENT {
				str = str + "\n"
			}
			input <- Token{fset.Position(pos), tok, str}
			<-sync // wait for sent token to land
			pos, tok, str = s.Scan()
		}
		close(input)
	}()

	return p
}
Exemplo n.º 8
0
// Scan scans the specified Go source file and returns a channel with Token.
//
// The EOF token is not returned, and the last token does not contain the "\n"
// character.
func Scan(name string, input []byte) chan *Token {
	var s scanner.Scanner

	fset := token.NewFileSet()
	file := fset.AddFile(name, fset.Base(), len(input))
	tokens := make(chan *Token)
	out := make(chan *Token)

	s.Init(file, input, nil, scanner.ScanComments)
	l := lexer{
		input:  string(input),
		file:   file,
		s:      s,
		tokens: tokens,
		out:    out,
	}

	// In the first stage we collect tokens, their literal code and their
	// offset in the source code.
	go l.run1()

	// In the second stage we add white space after each token.
	go l.run2()

	return out
}
Exemplo n.º 9
0
func injectImport(src string) string {
	const inj = `

import __yyfmt__ "fmt"
`
	fset := token.NewFileSet()
	file := fset.AddFile("", -1, len(src))
	var s scanner.Scanner
	s.Init(
		file,
		[]byte(src),
		nil,
		scanner.ScanComments,
	)
	for {
		switch _, tok, _ := s.Scan(); tok {
		case token.EOF:
			return inj + src
		case token.PACKAGE:
			s.Scan() // ident
			pos, _, _ := s.Scan()
			ofs := file.Offset(pos)
			return src[:ofs] + inj + src[ofs:]
		}
	}
}
Exemplo n.º 10
0
// expectedErrors collects the regular expressions of ERROR comments found
// in files and returns them as a map of error positions to error messages.
//
func expectedErrors(t *testing.T, filename string, src []byte) map[token.Pos]string {
	errors := make(map[token.Pos]string)

	var s scanner.Scanner
	// file was parsed already - do not add it again to the file
	// set otherwise the position information returned here will
	// not match the position information collected by the parser
	s.Init(getFile(filename), src, nil, scanner.ScanComments)
	var prev token.Pos // position of last non-comment, non-semicolon token

	for {
		pos, tok, lit := s.Scan()
		switch tok {
		case token.EOF:
			return errors
		case token.COMMENT:
			s := errRx.FindStringSubmatch(lit)
			if len(s) == 2 {
				errors[prev] = string(s[1])
			}
		default:
			prev = pos
		}
	}

	panic("unreachable")
}
Exemplo n.º 11
0
func main2() {

	filename := "/Users/obaskakov/IdeaProjects/goCrazy/code.scm"
	src, err := ioutil.ReadFile(filename)
	check(err)

	//	src := []byte("cos(x) + 1i*sin(x) // Euler")

	// Initialize the scanner.
	var s scanner.Scanner
	fset := token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(src)) // register input "file"
	s.Init(file, src, nil /* no error handler */, scanner.ScanComments)

	// Repeated calls to Scan yield the token sequence found in the input.
	for {
		_, tok, lit := s.Scan()
		if tok == token.EOF {
			break
		}
		fmt.Printf("\t%s    %q\n", tok, lit)
		//		fmt.Printf("%s\t%s\t%q\n", fset.Position(pos), tok, lit)
	}

}
Exemplo n.º 12
0
// expectedErrors collects the regular expressions of ERROR comments found
// in files and returns them as a map of error positions to error messages.
//
func expectedErrors(t *testing.T, testname string, files map[string]*ast.File) map[token.Pos]string {
	errors := make(map[token.Pos]string)
	for filename := range files {
		src, err := ioutil.ReadFile(filename)
		if err != nil {
			t.Fatalf("%s: could not read %s", testname, filename)
		}

		var s scanner.Scanner
		// file was parsed already - do not add it again to the file
		// set otherwise the position information returned here will
		// not match the position information collected by the parser
		s.Init(getFile(filename), src, nil, scanner.ScanComments)
		var prev token.Pos // position of last non-comment token

	scanFile:
		for {
			pos, tok, lit := s.Scan()
			switch tok {
			case token.EOF:
				break scanFile
			case token.COMMENT:
				s := errRx.FindStringSubmatch(lit)
				if len(s) == 2 {
					errors[prev] = string(s[1])
				}
			default:
				prev = pos
			}
		}
	}
	return errors
}
Exemplo n.º 13
0
func new_token_iterator(src []byte, cursor int) token_iterator {
	tokens := make([]token_item, 0, 1000)
	var s scanner.Scanner
	fset := token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(src))
	s.Init(file, src, nil, 0)
	token_index := 0
	for {
		pos, tok, lit := s.Scan()
		if tok == token.EOF {
			break
		}
		off := fset.Position(pos).Offset
		tokens = append(tokens, token_item{
			off: off,
			tok: tok,
			lit: lit,
		})
		if cursor > off {
			token_index++
		}
	}
	return token_iterator{
		tokens:      tokens,
		token_index: token_index,
	}
}
Exemplo n.º 14
0
// expectedErrors collects the regular expressions of ERROR comments
// found in the package files of pkg and returns them in sorted order
// (by filename and position).
func expectedErrors(t *testing.T, pkg *ast.Package) (list scanner.ErrorList) {
	// scan all package files
	for filename := range pkg.Files {
		src, err := ioutil.ReadFile(filename)
		if err != nil {
			t.Fatalf("expectedErrors(%s): %v", pkg.Name, err)
		}

		var s scanner.Scanner
		file := fset.AddFile(filename, fset.Base(), len(src))
		s.Init(file, src, nil, scanner.ScanComments)
		var prev token.Pos // position of last non-comment token
	loop:
		for {
			pos, tok, lit := s.Scan()
			switch tok {
			case token.EOF:
				break loop
			case token.COMMENT:
				s := errRx.FindStringSubmatch(lit)
				if len(s) == 2 {
					list = append(list, &scanner.Error{fset.Position(prev), string(s[1])})
				}
			default:
				prev = pos
			}
		}
	}
	sort.Sort(list) // multiple files may not be sorted
	return
}
Exemplo n.º 15
0
func newTokenIterator(src []byte, cursor int) (tokenIterator, int) {
	fset := token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(src))
	cursorPos := file.Pos(cursor)

	var s scanner.Scanner
	s.Init(file, src, nil, 0)
	tokens := make([]tokenItem, 0, 1000)
	lastPos := token.NoPos
	for {
		pos, tok, lit := s.Scan()
		if tok == token.EOF || pos >= cursorPos {
			break
		}
		tokens = append(tokens, tokenItem{
			tok: tok,
			lit: lit,
		})
		lastPos = pos
	}
	return tokenIterator{
		tokens: tokens,
		pos:    len(tokens) - 1,
	}, int(cursorPos - lastPos)
}
Exemplo n.º 16
0
func (this *highlightedGoContent) Update() {
	content := this.GetSources()[0].(caret.MultilineContentI)

	this.segments = nil

	src := []byte(content.Content())
	//src := []byte(w.Content.Content()[w.Content.Line(beginLineIndex).Start:w.Content.Line(endLineIndex).Start])

	var s scanner.Scanner
	fset := token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(src))
	s.Init(file, src, nil, scanner.ScanComments)

	// Repeated calls to Scan yield the token sequence found in the input.
	for {
		pos, tok, lit := s.Scan()
		if tok == token.EOF {
			break
		}

		offset := uint32(fset.Position(pos).Offset)
		kind := highlight_go.TokenKind(tok, lit)
		s := highlightGoStyle[kind]
		this.segments = append(this.segments, highlightSegment{offset: offset, color: s.color, fontOptions: s.fontOptions})
	}

	// HACK: Fake last element.
	this.segments = append(this.segments, highlightSegment{offset: uint32(content.LenContent())})
}
Exemplo n.º 17
0
// firstComment returns the contents of the first comment in
// the given file, assuming there's one within the first KB.
func firstComment(filename string) string {
	f, err := os.Open(filename)
	if err != nil {
		return ""
	}
	defer f.Close()

	var src [1 << 10]byte // read at most 1KB
	n, _ := f.Read(src[:])

	var s scanner.Scanner
	s.Init(fset.AddFile("", fset.Base(), n), src[:n], nil, scanner.ScanComments)
	for {
		_, tok, lit := s.Scan()
		switch tok {
		case token.COMMENT:
			// remove trailing */ of multi-line comment
			if lit[1] == '*' {
				lit = lit[:len(lit)-2]
			}
			return strings.TrimSpace(lit[2:])
		case token.EOF:
			return ""
		}
	}
}
Exemplo n.º 18
0
// newScanner creates a new scanner that scans that given input bytes.
func newScanner(input []byte) (*scanner.Scanner, *scanner.ErrorVector) {
	sc := new(scanner.Scanner)
	ev := new(scanner.ErrorVector)
	sc.Init("input", input, ev, 0)

	return sc, ev
}
Exemplo n.º 19
0
func initScanner(in string) scanner.Scanner {
	var s scanner.Scanner
	src := []byte(in)
	fset := token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(src))
	s.Init(file, src, nil, 0)
	return s
}
Exemplo n.º 20
0
func isIdentifier(s string) bool {
	var S scanner.Scanner
	S.Init("", strings.Bytes(s), nil, 0)
	if _, tok, _ := S.Scan(); tok == token.IDENT {
		_, tok, _ := S.Scan()
		return tok == token.EOF
	}
	return false
}
Exemplo n.º 21
0
Arquivo: code.go Projeto: golang/gddo
func (b *builder) printExample(e *doc.Example) (code Code, output string) {
	output = e.Output

	b.buf = b.buf[:0]
	var n interface{}
	if _, ok := e.Code.(*ast.File); ok {
		n = e.Play
	} else {
		n = &printer.CommentedNode{Node: e.Code, Comments: e.Comments}
	}
	err := (&printer.Config{Mode: printer.UseSpaces, Tabwidth: 4}).Fprint(sliceWriter{&b.buf}, b.fset, n)
	if err != nil {
		return Code{Text: err.Error()}, output
	}

	// additional formatting if this is a function body
	if i := len(b.buf); i >= 2 && b.buf[0] == '{' && b.buf[i-1] == '}' {
		// remove surrounding braces
		b.buf = b.buf[1 : i-1]
		// unindent
		b.buf = bytes.Replace(b.buf, []byte("\n    "), []byte("\n"), -1)
		// remove output comment
		if j := exampleOutputRx.FindIndex(b.buf); j != nil {
			b.buf = bytes.TrimSpace(b.buf[:j[0]])
		}
	} else {
		// drop output, as the output comment will appear in the code
		output = ""
	}

	var annotations []Annotation
	var s scanner.Scanner
	fset := token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(b.buf))
	s.Init(file, b.buf, nil, scanner.ScanComments)
	prevTok := token.ILLEGAL
scanLoop:
	for {
		pos, tok, lit := s.Scan()
		switch tok {
		case token.EOF:
			break scanLoop
		case token.COMMENT:
			p := file.Offset(pos)
			e := p + len(lit)
			if prevTok == token.COMMENT {
				annotations[len(annotations)-1].End = int32(e)
			} else {
				annotations = append(annotations, Annotation{Kind: CommentAnnotation, Pos: int32(p), End: int32(e)})
			}
		}
		prevTok = tok
	}

	return Code{Text: string(b.buf), Annotations: annotations}, output
}
Exemplo n.º 22
0
func isIdentifier(s string) bool {
	var S scanner.Scanner
	fset := token.NewFileSet()
	S.Init(fset.AddFile("", fset.Base(), len(s)), []byte(s), nil, 0)
	if _, tok, _ := S.Scan(); tok == token.IDENT {
		_, tok, _ := S.Scan()
		return tok == token.EOF
	}
	return false
}
Exemplo n.º 23
0
// errMap collects the regular expressions of ERROR comments found
// in files and returns them as a map of error positions to error messages.
//
func errMap(t *testing.T, testname string, files []*ast.File) map[string][]string {
	// map of position strings to lists of error message patterns
	errmap := make(map[string][]string)

	for _, file := range files {
		filename := fset.Position(file.Package).Filename
		src, err := ioutil.ReadFile(filename)
		if err != nil {
			t.Fatalf("%s: could not read %s", testname, filename)
		}

		var s scanner.Scanner
		s.Init(fset.AddFile(filename, -1, len(src)), src, nil, scanner.ScanComments)
		var prev token.Pos // position of last non-comment, non-semicolon token
		var here token.Pos // position immediately after the token at position prev

	scanFile:
		for {
			pos, tok, lit := s.Scan()
			switch tok {
			case token.EOF:
				break scanFile
			case token.COMMENT:
				if lit[1] == '*' {
					lit = lit[:len(lit)-2] // strip trailing */
				}
				if s := errRx.FindStringSubmatch(lit[2:]); len(s) == 3 {
					pos := prev
					if s[1] == "HERE" {
						pos = here
					}
					p := fset.Position(pos).String()
					errmap[p] = append(errmap[p], strings.TrimSpace(s[2]))
				}
			case token.SEMICOLON:
				// ignore automatically inserted semicolon
				if lit == "\n" {
					continue scanFile
				}
				fallthrough
			default:
				prev = pos
				var l int // token length
				if tok.IsLiteral() {
					l = len(lit)
				} else {
					l = len(tok.String())
				}
				here = prev + token.Pos(l)
			}
		}
	}

	return errmap
}
Exemplo n.º 24
0
func New(fname string, src []byte, wr io.Writer) *Temple {
	var s scanner.Scanner
	fset := token.NewFileSet()
	file := fset.AddFile(fname, fset.Base(), len(src))
	s.Init(file, src, nil, scanner.ScanComments)
	return &Temple{
		scn:  &s,
		fset: fset,
		prnt: &printer_{wr: wr},
	}
}
Exemplo n.º 25
0
Arquivo: code.go Projeto: AlekSi/gddo
func (b *builder) printDecl(decl ast.Decl) (d Code) {
	v := &annotationVisitor{pathIndex: make(map[string]int)}
	ast.Walk(v, decl)
	b.buf = b.buf[:0]
	err := (&printer.Config{Mode: printer.UseSpaces, Tabwidth: 4}).Fprint(sliceWriter{&b.buf}, b.fset, decl)
	if err != nil {
		return Code{Text: err.Error()}
	}

	var annotations []Annotation
	var s scanner.Scanner
	fset := token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(b.buf))
	s.Init(file, b.buf, nil, scanner.ScanComments)
loop:
	for {
		pos, tok, lit := s.Scan()
		switch tok {
		case token.EOF:
			break loop
		case token.COMMENT:
			p := file.Offset(pos)
			e := p + len(lit)
			annotations = append(annotations, Annotation{Kind: CommentAnnotation, Pos: int32(p), End: int32(e)})
		case token.IDENT:
			if len(v.annotations) == 0 {
				// Oops!
				break loop
			}
			annotation := v.annotations[0]
			v.annotations = v.annotations[1:]
			if annotation.Kind == -1 {
				continue
			}
			p := file.Offset(pos)
			e := p + len(lit)
			annotation.Pos = int32(p)
			annotation.End = int32(e)
			if len(annotations) > 0 && annotation.Kind == ExportLinkAnnotation {
				prev := annotations[len(annotations)-1]
				if prev.Kind == PackageLinkAnnotation &&
					prev.PathIndex == annotation.PathIndex &&
					prev.End+1 == annotation.Pos {
					// merge with previous
					annotation.Pos = prev.Pos
					annotations[len(annotations)-1] = annotation
					continue loop
				}
			}
			annotations = append(annotations, annotation)
		}
	}
	return Code{Text: string(b.buf), Annotations: annotations, Paths: v.paths}
}
Exemplo n.º 26
0
func main() {

	// read
	flag.Parse()
	filename := flag.Arg(0)
	fptr, err := os.Open(filename)
	if err != nil {
		panic(err)
	}
	frdr := bufio.NewReaderSize(fptr, 0x1000000)
	src, err := frdr.ReadSlice('\x7F')
	if err != nil {
		if err != io.EOF {
			panic(err)
		}
	}

	// scan
	var s scanner.Scanner
	fset := token.NewFileSet()
	file := fset.AddFile(filename, fset.Base(), len(src))
	s.Init(file, src, nil, scanner.ScanComments)

	for {
		_, tok, lit := s.Scan()
		if tok == token.EOF {
			break
		}

		switch tok {
		case token.SEMICOLON:
			fmt.Print(";")

		case token.COMMENT:
			continue

		case token.IDENT:
			fallthrough
		case token.INT:
			fallthrough
		case token.FLOAT:
			fallthrough
		case token.IMAG:
			fallthrough
		case token.CHAR:
			fallthrough
		case token.STRING:
			fmt.Printf("%s ", lit)
		default:
			fmt.Printf("%s ", tok.String())
		}
	}
}
Exemplo n.º 27
0
func Print(src []byte, w io.Writer, p syntaxhighlight.Printer) error {
	var s scanner.Scanner
	fset := token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(src))
	s.Init(file, src, nil, scanner.ScanComments)

	var lastOffset int

	for {
		pos, tok, lit := s.Scan()
		if tok == token.EOF {
			break
		}

		var tokString string
		if lit != "" {
			tokString = lit
		} else {
			tokString = tok.String()
		}

		// TODO: Clean this up.
		//if tok == token.SEMICOLON {
		if tok == token.SEMICOLON && lit == "\n" {
			continue
		}

		// Whitespace between previous and current tokens.
		offset := fset.Position(pos).Offset
		if whitespace := string(src[lastOffset:offset]); whitespace != "" {
			err := p.Print(w, syntaxhighlight.Whitespace, whitespace)
			if err != nil {
				return err
			}
		}
		lastOffset = offset + len(tokString)

		err := p.Print(w, TokenKind(tok, lit), tokString)
		if err != nil {
			return err
		}
	}

	// Print final whitespace after the last token.
	if whitespace := string(src[lastOffset:]); whitespace != "" {
		err := p.Print(w, syntaxhighlight.Whitespace, whitespace)
		if err != nil {
			return err
		}
	}

	return nil
}
Exemplo n.º 28
0
func (this *go_highlighter) highlight_file() {
	var s scanner.Scanner
	fset := token.NewFileSet()
	s.Init(fset.AddFile(current_file, fset.Base(), len(this.data)), this.data, nil, 0)
	for {
		pos, tok, str := s.Scan()
		if tok == token.EOF {
			break
		}

		if tok.IsKeyword() {
			this.highlight("keyword", pos, pos+token.Pos(len(str)))
		}
	}

	ast.Inspect(this.file, func(node ast.Node) bool {
		switch n := node.(type) {
		case *ast.BasicLit:
			switch n.Kind {
			case token.STRING, token.CHAR:
				this.highlight("string", n.Pos(), n.End())
			case token.INT, token.FLOAT, token.IMAG:
				this.highlight("number", n.Pos(), n.End())
			}
		case *ast.Ident:
			if tag, ok := go_highlighter_idents[n.Name]; ok {
				this.highlight(tag, n.Pos(), n.End())
				break
			}

			if n.Obj != nil && n.Obj.Pos() == n.Pos() {
				if n.Obj.Kind == ast.Fun {
					this.highlight("function", n.Pos(), n.End())
				} else {
					this.highlight("declaration", n.Pos(), n.End())
				}
			}
		case *ast.CallExpr:
			switch f := n.Fun.(type) {
			case *ast.Ident:
				this.highlight("funcall", f.Pos(), f.End())
			case *ast.SelectorExpr:
				this.highlight("funcall", f.Sel.Pos(), f.Sel.End())
			}
		}

		return true
	})

	for _, cg := range this.file.Comments {
		this.highlight("comment", cg.Pos(), cg.End())
	}
}
Exemplo n.º 29
0
Arquivo: code.go Projeto: golang/gddo
func (b *builder) printDecl(decl ast.Decl) (d Code) {
	v := &declVisitor{pathIndex: make(map[string]int)}
	ast.Walk(v, decl)
	b.buf = b.buf[:0]
	err := (&printer.Config{Mode: printer.UseSpaces, Tabwidth: 4}).Fprint(
		sliceWriter{&b.buf},
		b.fset,
		&printer.CommentedNode{Node: decl, Comments: v.comments})
	if err != nil {
		return Code{Text: err.Error()}
	}

	var annotations []Annotation
	var s scanner.Scanner
	fset := token.NewFileSet()
	file := fset.AddFile("", fset.Base(), len(b.buf))
	s.Init(file, b.buf, nil, scanner.ScanComments)
	prevTok := token.ILLEGAL
loop:
	for {
		pos, tok, lit := s.Scan()
		switch tok {
		case token.EOF:
			break loop
		case token.COMMENT:
			p := file.Offset(pos)
			e := p + len(lit)
			if prevTok == token.COMMENT {
				annotations[len(annotations)-1].End = int32(e)
			} else {
				annotations = append(annotations, Annotation{Kind: CommentAnnotation, Pos: int32(p), End: int32(e)})
			}
		case token.IDENT:
			if len(v.annotations) == 0 {
				// Oops!
				break loop
			}
			annotation := v.annotations[0]
			v.annotations = v.annotations[1:]
			if annotation.Kind == -1 {
				continue
			}
			p := file.Offset(pos)
			e := p + len(lit)
			annotation.Pos = int32(p)
			annotation.End = int32(e)
			annotations = append(annotations, annotation)
		}
		prevTok = tok
	}
	return Code{Text: string(b.buf), Annotations: annotations, Paths: v.paths}
}
Exemplo n.º 30
0
// Benchmarks the lexer from the standard library for comparison.
func BenchmarkStandardLibraryLexer(b *testing.B) {
	var lex scanner.Scanner
	src := []byte(test.Prog)
	fileSet := token.NewFileSet()
	file := fileSet.AddFile("", fileSet.Base(), len(src))
	for i := 0; i < b.N; i++ {
		lex.Init(file, src, nil, scanner.ScanComments)
		tok := token.ILLEGAL
		for tok != token.EOF {
			_, tok, _ = lex.Scan()
		}
	}
}