Beispiel #1
0
func (v *parser) parseIfStat() *IfStatNode {
	defer un(trace(v, "ifstat"))

	if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_IF) {
		return nil
	}
	startToken := v.consumeToken()

	var parts []*ConditionBodyNode
	var lastPart *ConditionBodyNode
	for {
		condition := v.parseExpr()
		if condition == nil {
			v.err("Expected valid expression as condition in if statement")
		}

		body := v.parseBlock()
		if body == nil {
			v.err("Expected valid block after condition in if statement")
		}

		lastPart = &ConditionBodyNode{Condition: condition, Body: body}
		lastPart.SetWhere(lexer.NewSpan(condition.Where().Start(), body.Where().End()))
		parts = append(parts, lastPart)

		if !v.tokensMatch(lexer.TOKEN_IDENTIFIER, KEYWORD_ELSE, lexer.TOKEN_IDENTIFIER, KEYWORD_IF) {
			break
		}
		v.consumeTokens(2)
	}

	var elseBody *BlockNode
	if v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_ELSE) {
		v.consumeToken()

		elseBody = v.parseBlock()
		if elseBody == nil {
			v.err("Expected valid block after `else` keyword in if statement")
		}
	}

	res := &IfStatNode{Parts: parts, ElseBody: elseBody}
	if elseBody != nil {
		res.SetWhere(lexer.NewSpan(startToken.Where.Start(), elseBody.Where().End()))
	} else {
		res.SetWhere(lexer.NewSpan(startToken.Where.Start(), lastPart.Where().End()))
	}
	return res
}
Beispiel #2
0
func (v *parser) parseName() *NameNode {
	if !v.nextIs(lexer.TOKEN_IDENTIFIER) {
		return nil
	}

	var parts []LocatedString
	for {
		if !v.nextIs(lexer.TOKEN_IDENTIFIER) {
			v.err("Expected identifier after `::` in name, got `%s`", v.peek(0).Contents)
		}

		part := NewLocatedString(v.consumeToken())
		if isReservedKeyword(part.Value) {
			v.err("Cannot use reserved keyword `%s` as name", part.Value)
		}
		parts = append(parts, part)

		if !v.tokenMatches(0, lexer.TOKEN_OPERATOR, "::") {
			break
		}
		v.consumeToken()
	}

	name, parts := parts[len(parts)-1], parts[:len(parts)-1]
	res := &NameNode{Modules: parts, Name: name}
	if len(parts) > 0 {
		res.SetWhere(lexer.NewSpan(parts[0].Where.Start(), name.Where.End()))
	} else {
		res.SetWhere(name.Where)
	}
	return res
}
Beispiel #3
0
func (v *parser) parseMatchStat() *MatchStatNode {
	defer un(trace(v, "matchstat"))

	if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_MATCH) {
		return nil
	}
	startToken := v.consumeToken()

	value := v.parseExpr()
	if value == nil {
		v.err("Expected valid expresson as value in match statement")
	}

	v.expect(lexer.TOKEN_SEPARATOR, "{")

	var cases []*MatchCaseNode
	for {
		if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") {
			break
		}

		var pattern ParseNode
		if v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, "_") {
			patTok := v.consumeToken()

			pattern = &DefaultPatternNode{}
			pattern.SetWhere(patTok.Where)
		} else {
			pattern = v.parseExpr()
		}

		if pattern == nil {
			v.err("Expected valid expression as pattern in match statement")
		}

		v.expect(lexer.TOKEN_OPERATOR, "=>")

		var body ParseNode
		if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "{") {
			body = v.parseBlock()
		} else {
			body = v.parseStat()
		}
		if body == nil {
			v.err("Expected valid arm statement in match clause")
		}

		v.expect(lexer.TOKEN_SEPARATOR, ",")

		caseNode := &MatchCaseNode{Pattern: pattern, Body: body}
		caseNode.SetWhere(lexer.NewSpan(pattern.Where().Start(), body.Where().End()))
		cases = append(cases, caseNode)
	}

	endToken := v.expect(lexer.TOKEN_SEPARATOR, "}")

	res := &MatchStatNode{Value: value, Cases: cases}
	res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken))
	return res
}
Beispiel #4
0
func (v *parser) parseArrayType() *ArrayTypeNode {
	startToken := v.consumeToken()

	length := v.parseNumberLit()
	if length != nil && length.IsFloat {
		v.err("Expected integer length for array type")
	}

	if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "]") {
		v.err("Expected closing `]` in array type, got `%s`", v.peek(0).Contents)
	}
	v.consumeToken()

	memberType := v.parseType()
	if memberType == nil {
		v.err("Expected valid type in array type")
	}

	res := &ArrayTypeNode{MemberType: memberType}
	if length != nil {
		// TODO: Defend against overflow
		res.Length = int(length.IntValue)
	}
	res.SetWhere(lexer.NewSpan(startToken.Where.Start(), memberType.Where().End()))
	return res
}
Beispiel #5
0
func (v *parser) parseEnumEntry() *EnumEntryNode {
	if !v.nextIs(lexer.TOKEN_IDENTIFIER) {
		return nil
	}
	name := v.consumeToken()

	if isReservedKeyword(name.Contents) {
		v.err("Cannot use reserved keyword `%s` as name for enum entry", name.Contents)
	}

	var value ParseNode
	if v.tokenMatches(0, lexer.TOKEN_OPERATOR, "=") {
		v.consumeToken()

		value = v.parseExpr()
		if value == nil {
			v.err("Expected valid expression after `=` in enum entry")
		}
	}

	res := &EnumEntryNode{Name: NewLocatedString(name), Value: value}
	if value != nil {
		res.SetWhere(lexer.NewSpan(name.Where.Start(), value.Where().End()))
	} else {
		res.SetWhere(name.Where)
	}
	return res
}
Beispiel #6
0
func (v *parser) parseAssignStat() ParseNode {
	startPos := v.currentToken

	accessExpr := v.parseAccessExpr()
	if accessExpr == nil || !v.tokenMatches(0, lexer.TOKEN_OPERATOR, "=") {
		v.currentToken = startPos
		return nil
	}

	// consume '='
	v.consumeToken()

	value := v.parseExpr()
	if value == nil {
		v.err("Expected valid expression in assignment statement")
	}

	if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ";") {
		v.err("Expected `;` after assignment statement, got `%s`", v.peek(0).Contents)
	}
	endToken := v.consumeToken()

	res := &AssignStatNode{Target: accessExpr, Value: value}
	res.SetWhere(lexer.NewSpan(accessExpr.Where().Start(), endToken.Where.End()))
	return res
}
Beispiel #7
0
func (v *parser) parseToplevelDirective() ParseNode {
	defer un(trace(v, "toplevel-directive"))

	if !v.tokensMatch(lexer.TOKEN_OPERATOR, "#", lexer.TOKEN_IDENTIFIER, "") {
		return nil
	}
	start := v.expect(lexer.TOKEN_OPERATOR, "#")

	directive := v.expect(lexer.TOKEN_IDENTIFIER, "")
	switch directive.Contents {
	case "link":
		library := v.expect(lexer.TOKEN_STRING, "")
		res := &LinkDirectiveNode{Library: NewLocatedString(library)}
		res.SetWhere(lexer.NewSpanFromTokens(start, library))
		return res

	case "use":
		module := v.parseName()
		if module == nil {
			v.errPosSpecific(directive.Where.End(), "Expected name after use directive")
		}

		v.deps = append(v.deps, module)

		res := &UseDirectiveNode{Module: module}
		res.SetWhere(lexer.NewSpan(start.Where.Start(), module.Where().End()))
		return res

	default:
		v.errTokenSpecific(directive, "No such directive `%s`", directive.Contents)
		return nil
	}
}
Beispiel #8
0
func (v *parser) parseBinopAssignStat() ParseNode {
	defer un(trace(v, "binopassignstat"))

	startPos := v.currentToken

	accessExpr := v.parseExpr()
	if accessExpr == nil || !v.tokensMatch(lexer.TOKEN_OPERATOR, "", lexer.TOKEN_OPERATOR, "=") {
		v.currentToken = startPos
		return nil
	}

	typ := stringToBinOpType(v.peek(0).Contents)
	if typ == BINOP_ERR || typ.Category() == OP_COMPARISON {
		v.err("Invalid binary operator `%s`", v.peek(0).Contents)
	}
	v.consumeToken()

	// consume '='
	v.consumeToken()

	var value ParseNode
	value = v.parseCompositeLiteral()
	if value == nil {
		value = v.parseExpr()
	}

	// no composite and no expr = err
	if value == nil {
		v.err("Expected valid expression in assignment statement")
	}

	res := &BinopAssignStatNode{Target: accessExpr, Operator: typ, Value: value}
	res.SetWhere(lexer.NewSpan(accessExpr.Where().Start(), value.Where().End()))
	return res
}
Beispiel #9
0
func (v *parser) parseArrayType() *ArrayTypeNode {
	defer un(trace(v, "arraytype"))

	if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "[") {
		return nil
	}
	startToken := v.consumeToken()

	length := v.parseNumberLit()
	if length != nil && length.IsFloat {
		v.err("Expected integer length for array type")
	}

	v.expect(lexer.TOKEN_SEPARATOR, "]")

	memberType := v.parseType(true)
	if memberType == nil {
		v.err("Expected valid type in array type")
	}

	res := &ArrayTypeNode{MemberType: memberType}
	if length != nil {
		// TODO: Defend against overflow
		res.Length = int(length.IntValue.Int64())
	}
	res.SetWhere(lexer.NewSpan(startToken.Where.Start(), memberType.Where().End()))
	return res
}
Beispiel #10
0
func (v *parser) parseArrayLenExpr() *ArrayLenExprNode {
	defer un(trace(v, "arraylenexpr"))

	if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_LEN) {
		return nil
	}
	startToken := v.consumeToken()

	v.expect(lexer.TOKEN_SEPARATOR, "(")

	var array ParseNode
	array = v.parseCompositeLiteral()
	if array == nil {
		array = v.parseExpr()
	}
	if array == nil {
		v.err("Expected valid expression in array length expression")
	}

	v.expect(lexer.TOKEN_SEPARATOR, ")")

	end := v.peek(0)
	res := &ArrayLenExprNode{ArrayExpr: array}
	res.SetWhere(lexer.NewSpan(startToken.Where.Start(), end.Where.Start()))
	return res
}
Beispiel #11
0
func (v *parser) parseTypeDecl() *TypeDeclNode {
	defer un(trace(v, "typdecl"))

	if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, "type") {
		return nil
	}

	startToken := v.consumeToken()

	name := v.expect(lexer.TOKEN_IDENTIFIER, "")
	if isReservedKeyword(name.Contents) {
		v.err("Cannot use reserved keyword `%s` as type name", name.Contents)
	}

	genericSigil := v.parseGenericSigil()

	typ := v.parseType(true)

	endToken := v.expect(lexer.TOKEN_SEPARATOR, ";")

	res := &TypeDeclNode{
		Name:         NewLocatedString(name),
		GenericSigil: genericSigil,
		Type:         typ,
	}
	res.SetWhere(lexer.NewSpan(startToken.Where.Start(), endToken.Where.End()))

	return res
}
Beispiel #12
0
func (v *parser) parseAssignStat() ParseNode {
	defer un(trace(v, "assignstat"))

	startPos := v.currentToken

	accessExpr := v.parseExpr()
	if accessExpr == nil || !v.tokenMatches(0, lexer.TOKEN_OPERATOR, "=") {
		v.currentToken = startPos
		return nil
	}

	// consume '='
	v.consumeToken()

	var value ParseNode
	value = v.parseCompositeLiteral()
	if value == nil {
		value = v.parseExpr()
	}

	// not a composite or expr = error
	if value == nil {
		v.err("Expected valid expression in assignment statement")
	}

	res := &AssignStatNode{Target: accessExpr, Value: value}
	res.SetWhere(lexer.NewSpan(accessExpr.Where().Start(), value.Where().End()))
	return res
}
Beispiel #13
0
func (v *parser) parseStringLit() *StringLitNode {
	defer un(trace(v, "stringlit"))

	var cstring bool
	var firstToken, stringToken *lexer.Token

	if v.tokenMatches(0, lexer.TOKEN_STRING, "") {
		cstring = false
		firstToken = v.consumeToken()
		stringToken = firstToken
	} else if v.tokensMatch(lexer.TOKEN_IDENTIFIER, "c", lexer.TOKEN_STRING, "") {
		cstring = true
		firstToken = v.consumeToken()
		stringToken = v.consumeToken()
	} else {
		return nil
	}

	unescaped, err := UnescapeString(stringToken.Contents)
	if err != nil {
		v.errTokenSpecific(stringToken, "Invalid string literal: %s", err)
	}

	res := &StringLitNode{Value: unescaped, IsCString: cstring}
	res.SetWhere(lexer.NewSpan(firstToken.Where.Start(), stringToken.Where.End()))
	return res
}
Beispiel #14
0
func (v *parser) parseTypeParameter() *TypeParameterNode {
	name := v.expect(lexer.TOKEN_IDENTIFIER, "")

	var restrictions []*NameNode
	if v.tokenMatches(0, lexer.TOKEN_OPERATOR, ":") {
		v.consumeToken()
		for {
			restriction := v.parseName()
			if restriction == nil {
				v.err("Expected valid name in type restriction")
			}
			restrictions = append(restrictions, restriction)

			if !v.tokenMatches(0, lexer.TOKEN_OPERATOR, "&") {
				break
			}
			v.consumeToken()
		}
	}

	res := &TypeParameterNode{Name: NewLocatedString(name), Restrictions: restrictions}
	if idx := len(restrictions) - 1; idx >= 0 {
		res.SetWhere(lexer.NewSpan(name.Where.Start(), restrictions[idx].Where().End()))
	} else {
		res.SetWhere(lexer.NewSpanFromTokens(name, name))
	}
	return res
}
Beispiel #15
0
func (v *parser) parseName() *NameNode {
	defer un(trace(v, "name"))

	if !v.nextIs(lexer.TOKEN_IDENTIFIER) {
		return nil
	}

	var parts []LocatedString
	for {
		part := v.expect(lexer.TOKEN_IDENTIFIER, "")
		parts = append(parts, NewLocatedString(part))

		if !v.tokenMatches(0, lexer.TOKEN_OPERATOR, "::") {
			break
		}
		v.consumeToken()
	}

	name, parts := parts[len(parts)-1], parts[:len(parts)-1]
	res := &NameNode{Modules: parts, Name: name}
	if len(parts) > 0 {
		res.SetWhere(lexer.NewSpan(parts[0].Where.Start(), name.Where.End()))
	} else {
		res.SetWhere(name.Where)
	}
	return res
}
Beispiel #16
0
func (v *parser) parseMatchStat() *MatchStatNode {
	if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_MATCH) {
		return nil
	}
	startToken := v.consumeToken()

	value := v.parseExpr()
	if value == nil {
		v.err("Expected valid expresson as value in match statement")
	}

	if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "{") {
		v.err("Expected starting `{` after value in match statement, got `%s`", v.peek(0).Contents)
	}
	v.consumeToken()

	var cases []*MatchCaseNode
	for {
		if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") {
			break
		}

		var pattern ParseNode
		if v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, "_") {
			patTok := v.consumeToken()

			pattern = &DefaultPatternNode{}
			pattern.SetWhere(patTok.Where)
		} else {
			pattern = v.parseExpr()
		}

		if pattern == nil {
			v.err("Expected valid expression as pattern in match statement")
		}

		if !v.tokenMatches(0, lexer.TOKEN_OPERATOR, "->") {
			v.err("Expected `->` after match pattern, got `%s`", v.peek(0).Contents)
		}
		v.consumeToken()

		body := v.parseStat()
		if body == nil {
			v.err("Expected valid statement as body in match statement")
		}

		caseNode := &MatchCaseNode{Pattern: pattern, Body: body}
		caseNode.SetWhere(lexer.NewSpan(pattern.Where().Start(), body.Where().End()))
		cases = append(cases, caseNode)
	}

	if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") {
		v.err("Expected closing `}` after match statement, got `%s`", v.peek(0).Contents)
	}
	endToken := v.consumeToken()

	res := &MatchStatNode{Value: value, Cases: cases}
	res.SetWhere(lexer.NewSpanFromTokens(startToken, endToken))
	return res
}
Beispiel #17
0
func (v *parser) parseFuncDecl() *FunctionDeclNode {
	funcHeader := v.parseFuncHeader()
	if funcHeader == nil {
		return nil
	}

	var body *BlockNode
	var stat, expr ParseNode
	var endPosition lexer.Position
	if v.tokenMatches(0, lexer.TOKEN_OPERATOR, "->") {
		v.consumeToken()

		if stat = v.parseStat(); stat != nil {
			endPosition = stat.Where().End()
		} else if expr = v.parseExpr(); expr != nil {
			if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ";") {
				v.err("Expected `;` after function declaration, got `%s`", v.peek(0).Contents)
			}
			v.consumeToken()
			endPosition = expr.Where().End()
		} else {
			v.err("Expected valid statement or expression after `->` in function declaration")
		}
	} else {
		body = v.parseBlock()
		if body != nil {
			endPosition = body.Where().End()
		}
	}

	var maybeEndToken *lexer.Token
	if body == nil && stat == nil && expr == nil {
		if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ";") {
			v.err("Expected `;` after body-less function declaration, got `%s`", v.peek(0).Contents)
		}
		maybeEndToken = v.consumeToken()
	}

	res := &FunctionDeclNode{Header: funcHeader, Body: body, Stat: stat, Expr: expr}
	if body != nil || stat != nil || expr != nil {
		res.SetWhere(lexer.NewSpan(funcHeader.Where().Start(), endPosition))
	} else {
		res.SetWhere(lexer.NewSpan(funcHeader.Where().Start(), maybeEndToken.Where.End()))
	}
	return res
}
Beispiel #18
0
func (v *parser) parseVarDeclBody() *VarDeclNode {
	defer un(trace(v, "vardeclbody"))

	startPos := v.currentToken

	var mutable *lexer.Token
	if v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_MUT) {
		mutable = v.consumeToken()
	}

	if !v.tokensMatch(lexer.TOKEN_IDENTIFIER, "", lexer.TOKEN_OPERATOR, ":") {
		v.currentToken = startPos
		return nil
	}

	name := v.consumeToken()

	// consume ':'
	v.consumeToken()

	varType := v.parseType(true)
	if varType == nil && !v.tokenMatches(0, lexer.TOKEN_OPERATOR, "=") {
		v.err("Expected valid type in variable declaration")
	}

	var value ParseNode
	if v.tokenMatches(0, lexer.TOKEN_OPERATOR, "=") {
		v.consumeToken()

		value = v.parseCompositeLiteral()
		if value == nil {
			value = v.parseExpr()
		}

		if value == nil {
			v.err("Expected valid expression after `=` in variable declaration")
		}
	}

	res := &VarDeclNode{Name: NewLocatedString(name), Type: varType}
	start := name.Where.Start()
	if mutable != nil {
		res.Mutable = NewLocatedString(mutable)
		start = mutable.Where.Start()
	}

	var end lexer.Position
	if value != nil {
		res.Value = value
		end = value.Where().End()
	} else {
		end = varType.Where().End()
	}

	res.SetWhere(lexer.NewSpan(start, end))
	return res
}
Beispiel #19
0
func (v *parser) parsePrimaryExpr() ParseNode {
	defer un(trace(v, "primaryexpr"))

	var res ParseNode

	if sizeofExpr := v.parseSizeofExpr(); sizeofExpr != nil {
		res = sizeofExpr
	} else if arrayLenExpr := v.parseArrayLenExpr(); arrayLenExpr != nil {
		res = arrayLenExpr
	} else if defaultExpr := v.parseDefaultExpr(); defaultExpr != nil {
		res = defaultExpr
	} else if addrofExpr := v.parseAddrofExpr(); addrofExpr != nil {
		res = addrofExpr
	} else if litExpr := v.parseLitExpr(); litExpr != nil {
		res = litExpr
	} else if lambdaExpr := v.parseLambdaExpr(); lambdaExpr != nil {
		res = lambdaExpr
	} else if unaryExpr := v.parseUnaryExpr(); unaryExpr != nil {
		res = unaryExpr
	} else if castExpr := v.parseCastExpr(); castExpr != nil {
		res = castExpr
	} else if name := v.parseName(); name != nil {
		startPos := v.currentToken

		var parameters []ParseNode
		if v.tokenMatches(0, lexer.TOKEN_OPERATOR, "<") {
			v.consumeToken()

			for {
				typ := v.parseType(true)
				if typ == nil {
					break
				}
				parameters = append(parameters, typ)

				if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") {
					break
				}
				v.consumeToken()
			}

			if !v.tokenMatches(0, lexer.TOKEN_OPERATOR, ">") {
				v.currentToken = startPos
				parameters = nil
			} else {
				endToken := v.consumeToken()
				_ = endToken // TODO: Do somethign with end token?
			}
		}

		res = &VariableAccessNode{Name: name, Parameters: parameters}
		res.SetWhere(lexer.NewSpan(name.Where().Start(), name.Where().End()))
	}

	return res
}
Beispiel #20
0
func (v *parser) parseCompositeLiteral() ParseNode {
	defer un(trace(v, "complit"))

	startPos := v.currentToken
	typ := v.parseType(true, true)

	if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "{") {
		v.currentToken = startPos
		return nil
	}
	v.consumeToken() // eat opening bracket

	res := &CompositeLiteralNode{
		Type: typ,
	}

	var lastToken *lexer.Token

	for {
		if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") {
			lastToken = v.consumeToken()
			break
		}

		var field LocatedString

		if v.tokensMatch(lexer.TOKEN_IDENTIFIER, "", lexer.TOKEN_OPERATOR, ":") {
			field = NewLocatedString(v.consumeToken())
			v.consumeToken()
		}

		val := v.parseExpr()
		if val == nil {
			v.err("Expected value in composite literal, found `%s`", v.peek(0).Contents)
		}

		res.Fields = append(res.Fields, field)
		res.Values = append(res.Values, val)

		if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ",") {
			v.consumeToken()
			continue
		} else if v.tokenMatches(0, lexer.TOKEN_SEPARATOR, "}") {
			lastToken = v.consumeToken()
			break
		} else {
			v.err("Unexpected `%s`", v.peek(0).Contents)
		}
	}

	res.SetWhere(lexer.NewSpan(typ.Where().Start(), lastToken.Where.End()))

	return res
}
Beispiel #21
0
func (v *parser) parsePointerType() *PointerTypeNode {
	startToken := v.consumeToken()

	target := v.parseType()
	if target == nil {
		v.err("Expected valid type after `^` in pointer type")
	}

	res := &PointerTypeNode{TargetType: target}
	res.SetWhere(lexer.NewSpan(startToken.Where.Start(), target.Where().End()))

	return res
}
Beispiel #22
0
func (v *parser) parseReturnStat() *ReturnStatNode {
	defer un(trace(v, "returnstat"))

	if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_RETURN) {
		return nil
	}
	startToken := v.consumeToken()

	value := v.parseExpr()

	res := &ReturnStatNode{Value: value}
	res.SetWhere(lexer.NewSpan(startToken.Where.Start(), value.Where().End()))
	return res
}
Beispiel #23
0
func (v *parser) parseVarDecl() *VarDeclNode {
	defer un(trace(v, "vardecl"))

	body := v.parseVarDeclBody()
	if body == nil {
		return nil
	}

	endToken := v.expect(lexer.TOKEN_SEPARATOR, ";")

	res := body
	res.SetWhere(lexer.NewSpan(body.Where().Start(), endToken.Where.End()))
	return res
}
Beispiel #24
0
func (v *parser) parseVarDecl() *VarDeclNode {
	body := v.parseVarDeclBody()
	if body == nil {
		return nil
	}

	if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ";") {
		v.err("Expected `;` after variable declaration, got `%s`", v.peek(0).Contents)
	}
	endToken := v.consumeToken()

	res := body
	res.SetWhere(lexer.NewSpan(body.Where().Start(), endToken.Where.End()))
	return res
}
Beispiel #25
0
func (v *parser) parseAddrofExpr() *AddrofExprNode {
	if !v.tokenMatches(0, lexer.TOKEN_OPERATOR, "&") {
		return nil
	}
	startToken := v.consumeToken()

	value := v.parseExpr()
	if value == nil {
		v.err("Expected valid expression after addrof expression")
	}

	res := &AddrofExprNode{Value: value}
	res.SetWhere(lexer.NewSpan(startToken.Where.Start(), value.Where().End()))
	return res
}
Beispiel #26
0
func (v *parser) parseCallStat() *CallStatNode {
	callExpr := v.parseCallExpr()
	if callExpr == nil {
		return nil
	}

	if !v.tokenMatches(0, lexer.TOKEN_SEPARATOR, ";") {
		v.err("Expected `;` after call statement")
	}
	endToken := v.consumeToken()

	res := &CallStatNode{Call: callExpr}
	res.SetWhere(lexer.NewSpan(callExpr.Where().Start(), endToken.Where.End()))
	return res
}
Beispiel #27
0
func (v *parser) parseVarDecl(isTopLevel bool) *VarDeclNode {
	defer un(trace(v, "vardecl"))

	body := v.parseVarDeclBody()
	if body == nil {
		return nil
	}
	if isTopLevel {
		v.expect(lexer.TOKEN_SEPARATOR, ";")
	}

	res := body
	res.SetWhere(lexer.NewSpan(body.Where().Start(), body.Where().End()))
	return res
}
Beispiel #28
0
func (v *parser) parseCallStat() *CallStatNode {
	defer un(trace(v, "callstat"))

	startPos := v.currentToken

	callExpr, ok := v.parseExpr().(*CallExprNode)
	if !ok {
		v.currentToken = startPos
		return nil
	}

	res := &CallStatNode{Call: callExpr}
	res.SetWhere(lexer.NewSpan(callExpr.Where().Start(), callExpr.Where().End()))
	return res
}
Beispiel #29
0
func (v *parser) parseBinaryOperator(upperPrecedence int, lhand ParseNode) ParseNode {
	defer un(trace(v, "binop"))

	// TODO: I have a suspicion this might break with some combinations of operators
	startPos := v.currentToken

	tok := v.peek(0)
	if tok.Type != lexer.TOKEN_OPERATOR || v.peek(1).Contents == ";" {
		return nil
	}

	for {
		tokPrecedence := v.getPrecedence(stringToBinOpType(v.peek(0).Contents))
		if tokPrecedence < upperPrecedence {
			return lhand
		}

		typ := stringToBinOpType(v.peek(0).Contents)
		if typ == BINOP_ERR {
			v.err("Invalid binary operator `%s`", v.peek(0).Contents)
		}
		v.consumeToken()

		rhand := v.parsePostfixExpr()
		if rhand == nil {
			v.currentToken = startPos
			return nil
		}

		nextPrecedence := v.getPrecedence(stringToBinOpType(v.peek(0).Contents))
		if tokPrecedence < nextPrecedence {
			rhand = v.parseBinaryOperator(tokPrecedence+1, rhand)
			if rhand == nil {
				v.currentToken = startPos
				return nil
			}
		}

		temp := &BinaryExprNode{
			Lhand:    lhand,
			Rhand:    rhand,
			Operator: typ,
		}
		temp.SetWhere(lexer.NewSpan(lhand.Where().Start(), rhand.Where().Start()))
		lhand = temp
	}
}
Beispiel #30
0
func (v *parser) parseDeferStat() *DeferStatNode {
	defer un(trace(v, "deferstat"))

	if !v.tokenMatches(0, lexer.TOKEN_IDENTIFIER, KEYWORD_DEFER) {
		return nil
	}
	startToken := v.consumeToken()

	call, ok := v.parseExpr().(*CallExprNode)
	if !ok {
		v.err("Expected valid call expression in defer statement")
	}

	res := &DeferStatNode{Call: call}
	res.SetWhere(lexer.NewSpan(startToken.Where.Start(), call.Where().End()))
	return res
}