Exemple #1
0
func parseDefine(node *ast.Tuple) ast.Node {
	fmt.Println("#parseDefine")
	nNode := len(node.Nodes)
	if nNode != 3 {
		panic("unexpeced define expression")
	}
	// implicit lambda expressions, e.g., (define (add x y) (+ x y))
	var vars ast.Node
	var expr ast.Node

	if node.Nodes[1].Type() == const_.TUPLE {
		t := node.Nodes[1].(*ast.Tuple)
		vars = t.Nodes[0]

		var args []ast.Node
		for _, node := range t.Nodes[1:] {
			args = append(args, node)
		}

		body := parseNode(node.Nodes[2])
		expr = ast.NewLambda(ast.NewTuple(args), body)
	} else {

		// 1. normal definition, e.g. (defien a 1)
		// 2. explicit lambda expressions, e.g. (define inc (lambda (x) (+ x 1)))
		vars = node.Nodes[1]
		expr = parseNode(node.Nodes[2])
	}

	return ast.NewDefine(vars, expr)
}
Exemple #2
0
func (self *Parser) parse(nodes *[]ast.Node, dep int, typ lexer.TokenType) {
	for token := self.l.NextToken(); token.Type != lexer.TokenEOF; token = self.l.NextToken() {

		//fmt.Println(token.Name)
		var node ast.Node
		switch token.Type {
		case lexer.TokenBool:
			node = ast.NewBool(token.Name)

		case lexer.TokenChar:
			node = ast.NewChar(token.Name)

		case lexer.TokenString:
			node = ast.NewString(token.Name)

		case lexer.TokenQuote:
			quoteExpr := []ast.Node{ast.NewIdent(const_.QUOTE)}
			self.parse(&quoteExpr, dep+1, lexer.TokenQuote)
			node = ast.NewTuple(quoteExpr)

		case lexer.TokenQuasiQuote:
			qqExpr := []ast.Node{ast.NewIdent(const_.QUASIQUOTE)}
			self.parse(&qqExpr, dep+1, lexer.TokenQuasiQuote)
			node = ast.NewTuple(qqExpr)

		case lexer.TokenUnQuote:
			expr := []ast.Node{ast.NewIdent(const_.UNQUOTE)}
			self.parse(&expr, dep+1, lexer.TokenUnQuote)
			node = ast.NewTuple(expr)

		case lexer.TokenUnQuoteSplicing:
			expr := []ast.Node{ast.NewIdent(const_.UNQUOTE_SPLICING)}
			self.parse(&expr, dep+1, lexer.TokenUnQuoteSplicing)
			node = ast.NewTuple(expr)

		case lexer.TokenIdent:
			node = ast.NewIdent(token.Name)

		case lexer.TokenNumber:
			if strings.ContainsAny(token.Name, "Ee.") {
				node = ast.NewFloat(token.Name)
			} else {
				node = ast.NewInt(token.Name)
			}

		case lexer.TokenLParen:
			sub := make([]ast.Node, 0)
			self.parse(&sub, dep+1, -1)
			node = ast.NewTuple(sub)

		case lexer.TokenRParen:
			return

		case lexer.TokenLVect:
			vect := []ast.Node{ast.NewIdent(const_.VECT)}
			self.parse(&vect, dep+1, -1)
			node = ast.NewTuple(vect)

		case lexer.TokenRVect:
			return

		default:
			panic("unexpected token")
		}
		*nodes = append(*nodes, node)

		if (typ == lexer.TokenQuote || typ == lexer.TokenQuasiQuote) ||
			(typ == lexer.TokenUnQuote || typ == lexer.TokenUnQuoteSplicing) {
			break
		}
	}
}