func PreParser(l *lexer.Lexer, elements []ast.Node, delimiter string) []ast.Node { for token := l.NextToken(); token.Type != lexer.TokenEOF; token = l.NextToken() { switch token.Type { case lexer.TokenIdentifier: elements = append(elements, ast.NewName(token.Value)) case lexer.TokenIntegerLiteral: elements = append(elements, ast.NewInt(token.Value)) case lexer.TokenFloatLiteral: elements = append(elements, ast.NewFloat(token.Value)) case lexer.TokenStringLiteral: elements = append(elements, ast.NewString(token.Value)) case lexer.TokenOpenParen: tuple := ast.NewTuple(PreParser(l, make([]ast.Node, 0), "(")) elements = append(elements, tuple) case lexer.TokenCloseParen: if delimiter != "(" { panic(fmt.Sprint("read: unexpected `)'")) } return elements case lexer.TokenQuote: quote := []ast.Node{ast.NewName(constants.QUOTE)} quote = append(quote, PreParser(l, make([]ast.Node, 0), "'")...) elements = append(elements, ast.NewTuple(quote)) case lexer.TokenQuasiquote: quasiquote := []ast.Node{ast.NewName(constants.QUASIQUOTE)} quasiquote = append(quasiquote, PreParser(l, make([]ast.Node, 0), "`")...) elements = append(elements, ast.NewTuple(quasiquote)) case lexer.TokenUnquote: unquote := []ast.Node{ast.NewName(constants.UNQUOTE)} unquote = append(unquote, PreParser(l, make([]ast.Node, 0), ",")...) elements = append(elements, ast.NewTuple(unquote)) case lexer.TokenUnquoteSplicing: unquoteSplicing := []ast.Node{ast.NewName(constants.UNQUOTE_SPLICING)} unquoteSplicing = append(unquoteSplicing, PreParser(l, make([]ast.Node, 0), ",@")...) elements = append(elements, ast.NewTuple(unquoteSplicing)) case lexer.TokenError: panic(fmt.Errorf("token error: %s", token.Value)) default: panic(fmt.Errorf("unexpected token type: %v", token.Type)) } switch delimiter { case "'", "`", ",", ",@": return elements } } if delimiter != " " { panic(fmt.Errorf("unclosed delimeter, expected: `%s'", delimiter)) } return elements }
func ParseNestedQuasiquote(tuple *ast.Tuple, level int) ast.Node { // tuple can be: // (unquote <datum>) // (unquote-splicing <datum>) // (quasiquote <datum>) // also can be: // (var1 var2 (unquote <datum>)) etc. // We should handle these scenarios carefully. elements := tuple.Elements if len(elements) == 0 { return tuple } if name, ok := elements[0].(*ast.Name); ok { switch name.Identifier { case constants.UNQUOTE: return ParseUnquote(tuple, level-1) case constants.UNQUOTE_SPLICING: return ParseUnquoteSplicing(tuple, level-1) case constants.QUASIQUOTE: return ParseQuasiquote(tuple, level+1) } } slice := make([]ast.Node, 0, len(elements)) for _, node := range elements { if _, ok := node.(*ast.Tuple); ok { node = ParseNestedQuasiquote(node.(*ast.Tuple), level) } slice = append(slice, node) } return ast.NewTuple(slice) }