// intersperseComments consumes all comments that appear before the next token // tok and prints it together with the buffered whitespace (i.e., the whitespace // that needs to be written before the next token). A heuristic is used to mix // the comments and whitespace. intersperseComments returns true if a pending // formfeed was dropped from the whitespace buffer. // func (p *printer) intersperseComments(next token.Position, tok token.Token) (droppedFF bool) { var last *ast.Comment for ; p.commentBefore(next); p.cindex++ { for _, c := range p.comments[p.cindex].List { p.writeCommentPrefix(p.fset.Position(c.Pos()), next, last, tok.IsKeyword()) p.writeComment(c) last = c } } if last != nil { if last.Text[1] == '*' && p.fset.Position(last.Pos()).Line == next.Line { // the last comment is a /*-style comment and the next item // follows on the same line: separate with an extra blank p.write([]byte{' '}) } // ensure that there is a line break after a //-style comment, // before a closing '}' unless explicitly disabled, or at eof needsLinebreak := last.Text[1] == '/' || tok == token.RBRACE && p.mode&noExtraLinebreak == 0 || tok == token.EOF return p.writeCommentSuffix(needsLinebreak) } // no comment was written - we should never reach here since // intersperseComments should not be called in that case p.internalError("intersperseComments called without pending comments") return false }
func (p *parser) expect(tok token.Token) token.Pos { pos := p.pos if p.tok != tok { p.errorExpected(pos, "'"+tok.String()+"'") } p.next() // make progress return pos }
func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl { if p.trace { defer un(trace(p, "GenDecl("+keyword.String()+")")) } doc := p.leadComment pos := p.expect(keyword) var lparen, rparen token.Pos var list []ast.Spec if p.tok == token.LPAREN { lparen = p.pos p.next() for p.tok != token.RPAREN && p.tok != token.EOF { list = append(list, f(p, p.leadComment)) } rparen = p.expect(token.RPAREN) p.expectSemi() } else { list = append(list, f(p, nil)) } return &ast.GenDecl{doc, pos, keyword, lparen, list, rparen} }
func tokenclass(tok token.Token) int { switch { case tok.IsLiteral(): return literal case tok.IsOperator(): return operator case tok.IsKeyword(): return keyword } return special }