func (v *parser) parseMatchStat() *MatchStatNode { defer un(trace(v, "matchstat")) if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_MATCH) { return nil } startToken := v.consumeToken() value := v.parseExpr() if value == nil { v.err("Expected valid expresson as value in match statement") } v.expect(lexer.TOKEN_SEPARATOR, "{") var cases []*MatchCaseNode for { if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") { break } var pattern ParseNode if v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, "_") { patTok := v.consumeToken() pattern = &DefaultPatternNode{} pattern.SetWhere(patTok.Where) } else { pattern = v.parseExpr() } if pattern == nil { v.err("Expected valid expression as pattern in match statement") } v.expect(lexer.TOKEN_OPERATOR, "=>") var body ParseNode if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "{") { body = v.parseBlock() } else { body = v.parseStat() } if body == nil { v.err("Expected valid arm statement in match clause") } v.expect(lexer.TOKEN_SEPARATOR, ",") caseNode := &MatchCaseNode{Pattern: pattern, Body: body} caseNode.SetWhere(lexer.NewSpan(pattern.Where().Start(), body.Where().End())) cases = append(cases, caseNode) } endToken := v.expect(lexer.TOKEN_SEPARATOR, "}") res := &MatchStatNode{Value: value, Cases: cases} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseTypeParameter() *TypeParameterNode { name := v.expect(lexer.TOKEN_IDENTIFIER, "") var restrictions []*NameNode if v.tokenMatches(0, lexer.TOKEN_OPERATOR, ":") { v.consumeToken() for { restriction := v.parseName() if restriction == nil { v.err("Expected valid name in type restriction") } restrictions = append(restrictions, restriction) if !v.tokenMatches(0, lexer.TOKEN_OPERATOR, "&") { break } v.consumeToken() } } res := &TypeParameterNode{Name: NewLocatedString(name), Restrictions: restrictions} if idx := len(restrictions) - 1; idx >= 0 { res.SetWhere(lexer.NewSpan(name.Where.Start(), restrictions[idx].Where().End())) } else { res.SetWhere(lexer.NewSpanFromTokens(name, name)) } return res }
func (v *parser) parseSizeofExpr() *SizeofExprNode { defer un(trace(v, "sizeofexpr")) if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_SIZEOF) { return nil } startToken := v.consumeToken() v.expect(lexer.TOKEN_SEPARATOR, "(") var typ ParseNode value := v.parseExpr() if value == nil { typ = v.parseType(true) if typ == nil { v.err("Expected valid expression or type in sizeof expression") } } endToken := v.expect(lexer.TOKEN_SEPARATOR, ")") res := &SizeofExprNode{Value: value, Type: typ} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseBlock() *BlockNode { defer un(trace(v, "block")) if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "{") { return nil } startToken := v.consumeToken() var nodes []ParseNode for { node, is_cond := v.parseNode() if node == nil { break } if !is_cond { v.expect(lexer.TOKEN_SEPARATOR, ";") } nodes = append(nodes, node) } endToken := v.expect(lexer.TOKEN_SEPARATOR, "}") res := &BlockNode{Nodes: nodes} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseEnumType() *EnumTypeNode { defer un(trace(v, "enumtype")) if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_ENUM) { return nil } startToken := v.consumeToken() v.expect(lexer.TOKEN_SEPARATOR, "{") var members []*EnumEntryNode for { if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") { break } member := v.parseEnumEntry() if member == nil { v.err("Expected valid enum entry in enum") } members = append(members, member) if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") { break } v.consumeToken() } endToken := v.expect(lexer.TOKEN_SEPARATOR, "}") res := &EnumTypeNode{Members: members} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseToplevelDirective() ParseNode { defer un(trace(v, "toplevel-directive")) if !v.tokensMatch(lexer.TOKEN_OPERATOR, "#", lexer.TOKEN_IDENTIFIER, "") { return nil } start := v.expect(lexer.TOKEN_OPERATOR, "#") directive := v.expect(lexer.TOKEN_IDENTIFIER, "") switch directive.Contents { case "link": library := v.expect(lexer.TOKEN_STRING, "") res := &LinkDirectiveNode{Library: NewLocatedString(library)} res.SetWhere(lexer.NewSpanFromTokens(start, library)) return res case "use": module := v.parseName() if module == nil { v.errPosSpecific(directive.Where.End(), "Expected name after use directive") } v.deps = append(v.deps, module) res := &UseDirectiveNode{Module: module} res.SetWhere(lexer.NewSpan(start.Where.Start(), module.Where().End())) return res default: v.errTokenSpecific(directive, "No such directive `%s`", directive.Contents) return nil } }
func (v *parser) parseMatchStat() *MatchStatNode { if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_MATCH) { return nil } startToken := v.consumeToken() value := v.parseExpr() if value == nil { v.err("Expected valid expresson as value in match statement") } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "{") { v.err("Expected starting `{` after value in match statement, got `%s`", v.peek(0).Contents) } v.consumeToken() var cases []*MatchCaseNode for { if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") { break } var pattern ParseNode if v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, "_") { patTok := v.consumeToken() pattern = &DefaultPatternNode{} pattern.SetWhere(patTok.Where) } else { pattern = v.parseExpr() } if pattern == nil { v.err("Expected valid expression as pattern in match statement") } if !v.tokenMatches(0, lexer.TOKEN_OPERATOR, "->") { v.err("Expected `->` after match pattern, got `%s`", v.peek(0).Contents) } v.consumeToken() body := v.parseStat() if body == nil { v.err("Expected valid statement as body in match statement") } caseNode := &MatchCaseNode{Pattern: pattern, Body: body} caseNode.SetWhere(lexer.NewSpan(pattern.Where().Start(), body.Where().End())) cases = append(cases, caseNode) } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") { v.err("Expected closing `}` after match statement, got `%s`", v.peek(0).Contents) } endToken := v.consumeToken() res := &MatchStatNode{Value: value, Cases: cases} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseTypeDecl() *TypeDeclNode { defer un(trace(v, "typdecl")) if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, "type") { return nil } startToken := v.consumeToken() name := v.expect(lexer.TOKEN_IDENTIFIER, "") if isReservedKeyword(name.Contents) { v.err("Cannot use reserved keyword `%s` as type name", name.Contents) } genericSigil := v.parseGenericSigil() typ := v.parseType(true) endToken := v.expect(lexer.TOKEN_SEPARATOR, ";") res := &TypeDeclNode{ Name: NewLocatedString(name), GenericSigil: genericSigil, Type: typ, } res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseGenericSigil() *GenericSigilNode { defer un(trace(v, "genericsigil")) if !v.tokenMatches(0, lexer.TOKEN_OPERATOR, "<") { return nil } startToken := v.consumeToken() var parameters []*TypeParameterNode for { parameter := v.parseTypeParameter() if parameter == nil { v.err("Expected valid type parameter in generic sigil") } parameters = append(parameters, parameter) if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") { break } v.consumeToken() } endToken := v.expect(lexer.TOKEN_OPERATOR, ">") res := &GenericSigilNode{Parameters: parameters} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseArrayLit() *ArrayLiteralNode { defer un(trace(v, "arraylit")) if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "[") { return nil } startToken := v.consumeToken() var values []ParseNode for { if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "]") { break } value := v.parseExpr() if value == nil { v.err("Expected valid expression in array literal") } values = append(values, value) if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") { break } v.consumeToken() } endToken := v.expect(lexer.TOKEN_SEPARATOR, "]") res := &ArrayLiteralNode{Values: values} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseTupleLit() *TupleLiteralNode { if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "(") { return nil } startToken := v.consumeToken() var values []ParseNode for { if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ")") { break } value := v.parseExpr() if value == nil { v.err("Expected valid expression in tuple literal") } values = append(values, value) if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") { break } v.consumeToken() } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ")") { v.err("Expected closing `]` after tuple literal, got `%s`", v.peek(0).Contents) } endToken := v.consumeToken() res := &TupleLiteralNode{Values: values} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseTupleType() *TupleTypeNode { defer un(trace(v, "tupletype")) if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "(") { return nil } startToken := v.consumeToken() var members []ParseNode for { memberType := v.parseType(true) if memberType == nil { v.err("Expected valid type in tuple type") } members = append(members, memberType) if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") { break } v.consumeToken() } endToken := v.expect(lexer.TOKEN_SEPARATOR, ")") res := &TupleTypeNode{MemberTypes: members} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseCastExpr() *CastExprNode { if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_CAST) { return nil } startToken := v.consumeToken() if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "(") { v.err("Expected opening `(` in cast expression, got `%s`", v.peek(0).Contents) } v.consumeToken() typ := v.parseType() if typ == nil { v.err("Expected valid type in cast expression") } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") { v.err("Expected `,` in cast expresion, got `%s`", v.peek(0).Contents) } v.consumeToken() value := v.parseExpr() if value == nil { v.err("Expected valid expression in cast expression") } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ")") { v.err("Expected closing `)` after cast expression, got `%s`", v.peek(0).Contents) } endToken := v.consumeToken() res := &CastExprNode{Type: typ, Value: value} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseSizeofExpr() *SizeofExprNode { if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_SIZEOF) { return nil } startToken := v.consumeToken() if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "(") { v.err("Expected opening `(` in sizeof expression, got `%s`", v.peek(0).Contents) } v.consumeToken() value := v.parseExpr() if value == nil { v.err("Expected valid expression in sizeof expression") } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ")") { v.err("Expected closing `)` after sizeof expression, got `%s`", v.peek(0).Contents) } endToken := v.consumeToken() res := &SizeofExprNode{Value: value} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseTupleType() *TupleTypeNode { startToken := v.consumeToken() var members []ParseNode for { memberType := v.parseType() if memberType == nil { v.err("Expected valid type in tuple type") } members = append(members, memberType) if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") { break } v.consumeToken() } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ")") { v.err("Expected closing `)` after tuple type, got `%s`", v.peek(0).Contents) } endToken := v.consumeToken() res := &TupleTypeNode{MemberTypes: members} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseImplDecl() *ImplDeclNode { if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_IMPL) { return nil } startToken := v.consumeToken() if !v.nextIs(lexer.TOKEN_IDENTIFIER) { v.err("Expected struct name after `impl` keyword, got `%s`", v.peek(0).Contents) } structName := v.consumeToken() if isReservedKeyword(structName.Contents) { v.err("Cannot use reserved keyword `%s` as struct name", structName.Contents) } var traitName *lexer.Token if v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_FOR) { v.consumeToken() if !v.nextIs(lexer.TOKEN_IDENTIFIER) { v.err("Expected trait name after `for` in impl declaration, got `%s`", v.peek(0).Contents) } traitName = v.consumeToken() if isReservedKeyword(traitName.Contents) { v.err("Cannot use reserved keyword `%s` as trait name", traitName.Contents) } } if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, "{") { v.err("Expected starting `{` after impl start, got `%s`", v.peek(0).Contents) } v.consumeToken() var members []*FunctionDeclNode for { if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") { break } member, ok := v.parseDecl().(*FunctionDeclNode) if member == nil || !ok { v.err("Expected valid function declaration in impl declaration") } members = append(members, member) } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") { v.err("Expected closing `}` after impl declaration, got `%s`", v.peek(0).Contents) } endToken := v.consumeToken() res := &ImplDeclNode{StructName: NewLocatedString(structName), Members: members} if traitName != nil { res.TraitName = NewLocatedString(traitName) } res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseReturnStat() *ReturnStatNode { defer un(trace(v, "returnstat")) if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_RETURN) { return nil } startToken := v.consumeToken() value := v.parseExpr() endToken := v.expect(lexer.TOKEN_SEPARATOR, ";") res := &ReturnStatNode{Value: value} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseEnumDecl() *EnumDeclNode { if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_ENUM) { return nil } startToken := v.consumeToken() if !v.nextIs(lexer.TOKEN_IDENTIFIER) { v.err("Expected enum name after `enum` keyword, got `%s`", v.peek(0).Contents) } name := v.consumeToken() if isReservedKeyword(name.Contents) { v.err("Cannot use reserved keyword `%s` as name for enum", name.Contents) } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "{") { v.err("Expected starting `{` after enum name, got `%s`", v.peek(0).Contents) } v.consumeToken() var members []*EnumEntryNode for { if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") { break } member := v.parseEnumEntry() if member == nil { v.err("Expected valid enum entry in enum") } members = append(members, member) if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") { break } v.consumeToken() } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") { v.err("Expected closing `}` after enum, got `%s`", v.peek(0).Contents) } endToken := v.consumeToken() res := &EnumDeclNode{Name: NewLocatedString(name), Members: members} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseStructDecl() *StructDeclNode { if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_STRUCT) { return nil } startToken := v.consumeToken() if !v.nextIs(lexer.TOKEN_IDENTIFIER) { v.err("Expected name after struct keyword, got `%s`", v.peek(0).Contents) } name := v.consumeToken() if isReservedKeyword(name.Contents) { v.err("Cannot use reserved keyword `%s` as name for struct", name.Contents) } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "{") { v.err("Expected starting `{` after struct name, got `%s`", v.peek(0).Contents) } v.consumeToken() var members []*VarDeclNode for { if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") { break } member := v.parseVarDeclBody() if member == nil { v.err("Expected valid variable declaration in struct") } members = append(members, member) if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") { v.consumeToken() } } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") { v.err("Expected closing `}` after struct, got `%s`", v.peek(0).Contents) } endToken := v.consumeToken() res := &StructDeclNode{Name: NewLocatedString(name), Members: members} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseReturnStat() *ReturnStatNode { if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_RETURN) { return nil } startToken := v.consumeToken() value := v.parseExpr() if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ";") { v.err("Expected `;` after return statement, got `%s`", v.peek(0).Contents) } endToken := v.consumeToken() res := &ReturnStatNode{Value: value} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseStructLit() *StructLiteralNode { defer un(trace(v, "structlit")) startPos := v.currentToken name := v.parseName() if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "{") { v.currentToken = startPos return nil } startToken := v.consumeToken() var members []LocatedString var values []ParseNode for { if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, "") { break } member := v.expect(lexer.TOKEN_IDENTIFIER, "") members = append(members, NewLocatedString(member)) v.expect(lexer.TOKEN_OPERATOR, ":") value := v.parseExpr() if value == nil { v.err("Expected valid expression in struct literal") } values = append(values, value) if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") { break } v.consumeToken() } endToken := v.expect(lexer.TOKEN_SEPARATOR, "}") res := &StructLiteralNode{Name: name, Members: members, Values: values} if name != nil { res.SetWhere(lexer.NewSpan(name.Where().Start(), endToken.Where.End())) } else { res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) } return res }
func (v *parser) parseUseDecl() *UseDeclNode { defer un(trace(v, "usedecl")) if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_USE) { return nil } startToken := v.consumeToken() module := v.parseName() if module == nil { v.err("Expected valid module name after `use` keyword") } endToken := v.expect(lexer.TOKEN_SEPARATOR, ";") res := &UseDeclNode{Module: module} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseDeferStat() *DeferStatNode { defer un(trace(v, "deferstat")) if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_DEFER) { return nil } startToken := v.consumeToken() call, ok := v.parseExpr().(*CallExprNode) if !ok { v.err("Expected valid call expression in defer statement") } endToken := v.expect(lexer.TOKEN_SEPARATOR, ";") res := &DeferStatNode{Call: call} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseTraitDecl() *TraitDeclNode { if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_TRAIT) { return nil } startToken := v.consumeToken() if !v.nextIs(lexer.TOKEN_IDENTIFIER) { v.err("Expected trait name after `trait` keyword, got `%s`", v.peek(0).Contents) } name := v.consumeToken() if isReservedKeyword(name.Contents) { v.err("Cannot use reserved keyword `%s` as name for trait", name.Contents) } if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, "{") { v.err("Expected starting `{` after trait name, got `%s`", v.peek(0).Contents) } v.consumeToken() var members []*FunctionDeclNode for { if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") { break } member, ok := v.parseDecl().(*FunctionDeclNode) if member == nil || !ok { v.err("Expected valid function declaration in trait declaration") } members = append(members, member) } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") { v.err("Expected closing `}` after trait declaration, got `%s`", v.peek(0).Contents) } endToken := v.consumeToken() res := &TraitDeclNode{Name: NewLocatedString(name), Members: members} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseStructType(requireKeyword bool) *StructTypeNode { defer un(trace(v, "structtype")) var startToken *lexer.Token if requireKeyword { if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_STRUCT) { return nil } startToken = v.consumeToken() v.expect(lexer.TOKEN_SEPARATOR, "{") } else { if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "{") { return nil } startToken = v.consumeToken() } var members []*VarDeclNode for { if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") { break } member := v.parseVarDeclBody() if member == nil { v.err("Expected valid variable declaration in struct") } members = append(members, member) if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") { v.consumeToken() } } endToken := v.expect(lexer.TOKEN_SEPARATOR, "}") res := &StructTypeNode{Members: members} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseUseDecl() *UseDeclNode { if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_USE) { return nil } startToken := v.consumeToken() module := v.parseName() if module == nil { v.err("Expected valid module name after `use` keyword") } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ";") { v.err("Expected `;` after use-construct, got `%s`", v.peek(0).Contents) } endToken := v.consumeToken() res := &UseDeclNode{Module: module} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseDeferStat() *DeferStatNode { if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_DEFER) { return nil } startToken := v.consumeToken() call := v.parseCallExpr() if call == nil { v.err("Expected valid call expression in defer statement") } if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ";") { v.err("Expected `;` after defer statement, got `%s`", v.peek(0).Contents) } endToken := v.consumeToken() res := &DeferStatNode{Call: call} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseDefaultStat() *DefaultStatNode { defer un(trace(v, "defaultstat")) if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_DEFAULT) { return nil } startToken := v.consumeToken() v.expect(lexer.TOKEN_SEPARATOR, "(") target := v.parseExpr() if target == nil { v.err("Expected valid expression in default statement") } endToken := v.expect(lexer.TOKEN_SEPARATOR, ")") res := &DefaultStatNode{Target: target} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseTupleLit() *TupleLiteralNode { defer un(trace(v, "tuplelit")) startPos := v.currentToken if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "(") { return nil } startToken := v.consumeToken() var values []ParseNode for { if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ")") { break } value := v.parseExpr() if value == nil { v.err("Expected valid expression in tuple literal") } values = append(values, value) if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") { break } v.consumeToken() } endToken := v.expect(lexer.TOKEN_SEPARATOR, ")") // Dirty hack if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ".") { v.currentToken = startPos return nil } res := &TupleLiteralNode{Values: values} res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }
func (v *parser) parseInterfaceType() *InterfaceTypeNode { defer un(trace(v, "interfacetype")) if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_INTERFACE) { return nil } startToken := v.consumeToken() v.expect(lexer.TOKEN_SEPARATOR, "{") // when we hit a }; // this means our interface is done... var functions []*FunctionHeaderNode for { if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") && v.tokenMatches(1, lexer.TOKEN_SEPARATOR, ";") { break } function := v.parseFuncHeader(false) if function != nil { // TODO trailing comma v.expect(lexer.TOKEN_SEPARATOR, ",") functions = append(functions, function) } else { v.err("Failed to parse function in interface") } } endToken := v.expect(lexer.TOKEN_SEPARATOR, "}") res := &InterfaceTypeNode{ Functions: functions, } res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken)) return res }