func (parser *Parser) parseFieldDecl(table *ast.Table) bool { token := parser.peek() if token.Type != lexer.TokenType('}') { for parser.parseAnnotation() { } typeDecl := parser.expectTypeDecl("expect table(%s) field type declare", table) tokenName := parser.expectf(lexer.TokenID, "expect table(%s) field name", table) parser.expectf(lexer.TokenType(';'), "expect table(%s) field end tag ;", table) name := tokenName.Value.(string) field, ok := table.NewField(name, typeDecl) if !ok { parser.errorf(token.Start, "duplicate table(%s) field(%s)", table, name) } parser.attachAnnotation(field) _setNodePos(field, token.Start, tokenName.End) parser.attachComment(field) return true } return false }
func (parser *Parser) parseSeq(component ast.Type) (typeDecl ast.Type, ok bool) { token := parser.peek() if token.Type != lexer.TokenType('[') { return nil, false } ok = true parser.next() token = parser.peek() if token.Type == lexer.TokenINT { parser.next() typeDecl = ast.NewSeq(component, int(token.Value.(int64))) } else { typeDecl = ast.NewSeq(component, -1) } end := parser.expectf(lexer.TokenType(']'), "seq type must end with ]").End start, _ := Pos(component) _setNodePos(typeDecl, start, end) return }
func (parser *Parser) parseExceptions(method *ast.Method) { token := parser.peek() if token.Type != lexer.KeyThrows { return } parser.next() parser.expectf(lexer.TokenType('('), "method exception table must start with (") for { typeDecl := parser.expectTypeDecl("expect exception type") exception := method.NewException(typeDecl) start, end := Pos(typeDecl) _setNodePos(exception, start, end) token := parser.peek() if token.Type != lexer.TokenType(',') { break } parser.next() } parser.expectf(lexer.TokenType(')'), "method exception table must end with )") }
func (parser *Parser) expectContract(fmtstring string, args ...interface{}) *ast.Contract { msg := fmt.Sprintf(fmtstring, args...) start := parser.expectf(lexer.KeyContract, "expect keyword contract").Start token := parser.expectf(lexer.TokenID, "expect contract name") name := token.Value.(string) parser.expectf(lexer.TokenType('{'), "contract body must start with {") contract, ok := parser.script.NewContract(name) parser.D("parse contract %s", name) parser.attachAnnotation(contract) if !ok { parser.errorf(token.Start, "%s\n\tduplicate contract(%s) defined", msg, name) } for parser.parseMethodDecl(contract.(*ast.Contract)) { } end := parser.expectf(lexer.TokenType('}'), "contract body must end with }").End _setNodePos(contract, start, end) parser.attachComment(contract) parser.D("parse contract %s -- success", name) return contract.(*ast.Contract) }
func (parser *Parser) parseImport() bool { for parser.parseComment() { } token := parser.peek() //the import instructions must be typed at the beginning of script if token.Type != lexer.KeyImport { return false } // move next token parser.next() usingNamePath, start, end := parser.expectFullName("expect using name path") using := parser.script.Using(usingNamePath) parser.expectf(lexer.TokenType(';'), "import name path must end with ';'") parser.D("parse using :%s", using) _setNodePos(using, start, end) parser.attachComment(using) return true }
func (parser *Parser) expectFullName(fmtstring string, args ...interface{}) (string, lexer.Position, lexer.Position) { msg := fmt.Sprintf(fmtstring, args...) var buff bytes.Buffer token := parser.expectf(lexer.TokenID, msg) buff.WriteString(token.Value.(string)) start := token.Start end := token.End for { token = parser.peek() if token.Type != lexer.TokenType('.') { break } buff.WriteRune('.') parser.next() token = parser.expectf(lexer.TokenID, msg) buff.WriteString(token.Value.(string)) end = token.End } return buff.String(), start, end }
func (parser *Parser) parseParams(method *ast.Method) { parser.expectf(lexer.TokenType('('), "method param table must start with (") for { token := parser.peek() if token.Type == lexer.TokenType(')') { break } for parser.parseAnnotation() { } typeDecl := parser.expectTypeDecl("expect method param type declare") nameToken := parser.expectf(lexer.TokenID, "expect method param name") name := nameToken.Value.(string) param, ok := method.NewParam(name, typeDecl) parser.attachAnnotation(param) if !ok { parser.errorf(token.Start, "duplicate method(%s) param(%s)", method, name) } _setNodePos(param, token.Start, nameToken.End) token = parser.peek() if token.Type != lexer.TokenType(',') { break } parser.next() } parser.expectf(lexer.TokenType(')'), "method param table must end with )") }
func (parser *Parser) parseMethodDecl(contract *ast.Contract) bool { for parser.parseAnnotation() { } token := parser.peek() if token.Type != lexer.TokenType('}') { returnVal := parser.expectTypeDecl("expect method return type") tokenName := parser.expectf(lexer.TokenID, "expect method name") name := tokenName.Value.(string) parser.D("parse method %s", name) method, ok := contract.NewMethod(name) if !ok { parser.errorf(token.Start, "duplicate contract(%s) field(%s)", contract, name) } parser.attachAnnotation(method) method.Return = returnVal parser.parseParams(method) parser.parseExceptions(method) end := parser.expectf(lexer.TokenType(';'), "expect method name").End _setNodePos(method, token.Start, end) parser.attachComment(method) return true } return false }
func (parser *Parser) parseAnnotation() bool { for parser.parseComment() { } token := parser.peek() if token.Type != lexer.TokenType('@') { return false } start := token.Start parser.next() name, start, end := parser.expectFullName("expect annotation name") annotation := ast.NewAnnotation(name) parser.D("annotation [%s]", name) token = parser.peek() if token.Type == lexer.TokenType('(') { args := parser.expectArgsTable("expect annotation arg table") _, end = Pos(args) annotation.Args = args } _setNodePos(annotation, start, end) parser.annotationStack = append(parser.annotationStack, annotation) return true }
func (parser *Parser) expectTable(fmtstring string, args ...interface{}) *ast.Table { msg := fmt.Sprintf(fmtstring, args...) start := parser.expectf(lexer.KeyTable, "expect keyword table").Start token := parser.expectf(lexer.TokenID, "expect table name") name := token.Value.(string) parser.expectf(lexer.TokenType('{'), "table body must start with {") table, ok := parser.script.NewTable(name) parser.attachAnnotation(table) parser.D("parse table %s", name) if !ok { parser.errorf(token.Start, "%s\n\tduplicate table(%s) defined", msg, name) } for parser.parseFieldDecl(table.(*ast.Table)) { } end := parser.expectf(lexer.TokenType('}'), "table body must end with }").End _setNodePos(table, start, end) parser.attachComment(table) parser.D("parse table %s -- success", name) return table.(*ast.Table) }
func (parser *Parser) parsePackage() { parser.D("[] parse script's package line") for parser.parseComment() { } parser.expectf(lexer.KeyPackage, "script must start with package keyword") parser.script.Package, _, _ = parser.expectFullName("expect script's package name") parser.expectf(lexer.TokenType(';'), "package name must end with ';'") parser.D("package [%s]", parser.script.Package) parser.D("parse script's package line -- success") }
func (parser *Parser) expectExpr(fmtStr string, args ...interface{}) ast.Expr { msg := fmt.Sprintf(fmtStr, args...) for { token := parser.peek() var expr ast.Expr switch token.Type { case lexer.TokenINT: parser.next() expr = ast.NewNumeric(float64(token.Value.(int64))) _setNodePos(expr, token.Start, token.End) case lexer.TokenFLOAT: parser.next() expr = ast.NewNumeric(token.Value.(float64)) _setNodePos(expr, token.Start, token.End) case lexer.TokenSTRING: parser.next() expr = ast.NewString(token.Value.(string)) _setNodePos(expr, token.Start, token.End) case lexer.TokenTrue: parser.next() expr = ast.NewBoolean(true) _setNodePos(expr, token.Start, token.End) case lexer.TokenFalse: parser.next() expr = ast.NewBoolean(false) _setNodePos(expr, token.Start, token.End) case lexer.TokenID: name, start, end := parser.expectFullName("expect constant reference or table instance") token = parser.peek() if token.Type == lexer.TokenType('(') { initargs := parser.expectArgsTable("expect table instance init args table") newObj := ast.NewNewObj(name, initargs) _setNodePos(newObj, start, end) return newObj } expr = ast.NewConstantRef(name) _setNodePos(expr, start, end) } if expr != nil { token := parser.peek() switch token.Type { case lexer.OpBitOr, lexer.OpBitAnd: parser.next() rhs := parser.expectExpr("expect binary op(%s) rhs", token.Type) binaryOp := ast.NewBinaryOp(token.Type, expr, rhs) start, _ := Pos(expr) _, end := Pos(binaryOp) _setNodePos(binaryOp, start, end) expr = binaryOp } return expr } parser.errorf(token.Start, msg) parser.next() } }
func (parser *Parser) parseArgsTable() *ast.ArgsTable { token := parser.peek() if token.Type != lexer.TokenType('(') { return nil } parser.next() token = parser.peek() if token.Value == lexer.TokenType(')') { return ast.NewArgsTable(true) } token = parser.peek() start := token.Start end := token.End var args *ast.ArgsTable // this is named args table if token.Type == lexer.TokenLABEL { args = ast.NewArgsTable(true) for { token := parser.expectf(lexer.TokenLABEL, "expect arg label") label := token.Value.(string) arg := parser.expectArg("expect label(%s) value", label) parser.D("lable:%s", label) namedArg := ast.NewNamedArg(label, arg) _, end = Pos(arg) _setNodePos(namedArg, token.Start, end) args.Append(namedArg) if parser.peek().Type != lexer.TokenType(',') { break } parser.next() } } else { args = ast.NewArgsTable(false) for { arg := parser.expectArg("expect arg") args.Append(arg) if parser.peek().Type != lexer.TokenType(',') { break } parser.next() } } _setNodePos(args, start, end) parser.expectf(lexer.TokenType(')'), "arg table must end with ')'") return args }
func (parser *Parser) expectEnum(fmtstring string, args ...interface{}) *ast.Enum { msg := fmt.Sprintf(fmtstring, args...) start := parser.expectf(lexer.KeyEnum, "expect keyword enum").Start token := parser.expectf(lexer.TokenID, "expect contract name") name := token.Value.(string) enum, ok := parser.script.NewEnum(name) parser.D("parse enum %s", name) parser.attachAnnotation(enum) if !ok { parser.errorf(token.Start, "%s\n\tduplicate enum(%s) defined", msg, name) } parser.expectf(lexer.TokenType('{'), "contract body must start with {") for { for parser.parseComment() { } token = parser.peek() if token.Type == lexer.TokenType('}') { break } constantName := parser.expectf(lexer.TokenID, "expect enum constant name") name = constantName.Value.(string) constant, ok := enum.(*ast.Enum).NewConstant(name) if !ok { parser.errorf(token.Start, "%s\n\tduplicate enum(%s) contract(%s) defined", msg, enum, name) } end := constantName.End token = parser.peek() if token.Type == lexer.TokenType('(') { parser.next() val := int32(parser.expectf(lexer.TokenINT, "expect constant value").Value.(int64)) constant.Value = val end = parser.expectf(lexer.TokenType(')'), "enum constant val must end with )").End } _setNodePos(constant, constantName.Start, end) parser.attachComment(constant) token = parser.peek() if token.Type != lexer.TokenType(',') { break } parser.next() } end := parser.expectf(lexer.TokenType('}'), "contract body must end with }").End _setNodePos(enum, start, end) parser.attachComment(enum) return enum.(*ast.Enum) }