Beispiel #1
0
func TestQuoteList(t *testing.T) {
	r := strings.NewReader("'(1 2)")
	parser := New(lexer.New(r))
	actual := types.NewList(types.Number(1), types.Number(2))
	exps, err := parser.Parse()
	if err != nil {
		t.Fatalf("parser failed: %s", err)
	}
	if !reflect.DeepEqual(exps, actual) {
		t.Fatalf("expressions is not expected. %v", exps)
	}
}
Beispiel #2
0
// Parse scans given tokens and return it into expression.
func (p *Parser) Parse() (exps types.Expression, err error) {
	token, err := p.lex.Next()
	if err != nil {
		return nil, err
	}

	switch token {
	case "'":
		// if start with (, deal as list.
		// recursive scan until ")"
		// if start with "'(", it's list.
		if p.lex.Peek() == '(' {
			// this is (. skip it.
			if _, err := p.lex.Next(); err != nil {
				return nil, err
			}
			tokens, err := p.parseList()
			if err != nil {
				return nil, err
			}
			return types.NewList(tokens...), nil
		}
		// if not start with (, it's simply string (return string itself).
		t, err := p.lex.Next()
		if err != nil {
			return nil, err
		}
		return types.Symbol(t), nil
	case "(":
		// start S-Expression. Parse as list.
		return p.parseList()
	case ")":
		return nil, errors.New("unexpected ')'")
	case "#t":
		return types.Boolean(true), nil
	case "#f":
		return types.Boolean(false), nil
	default:
		// if token is string, get unquoted.
		// It's get test from \"test\".
		if p.lex.IsTokenString() {
			return strconv.Unquote(p.lex.TokenText())
		}
		// try conversion to float. if failed, treated as symbol.
		if n, err := strconv.ParseFloat(token, 64); err == nil {
			return types.Number(n), nil
		}
		return types.Symbol(token), nil
	}
}
Beispiel #3
0
func List(args ...types.Expression) (types.Expression, error) {
	return types.NewList(args...), nil
}