Esempio n. 1
0
func TestItemString(t *testing.T) {
	type TestCase struct {
		Input    Token
		Expected string
	}
	tests := []TestCase{
		{item(A), `"a"`},
		{item(TokenEOF), "EOF"},
		{Token{TokenError, "some error"}, "some error"},
		{Token{token.New("long"), "test string output for long values"}, `"test strin"...`},
	}

	for idx, tt := range tests {
		output := tt.Input.String()
		if output != tt.Expected {
			t.Errorf("%d. Failed!\n\tExpected: %v\n\tGot: %v\n", idx+1, tt.Expected, output)
		}
	}
}
Esempio n. 2
0
package lexer

import (
	"reflect"
	"strings"
	"testing"

	"github.com/toqueteos/langtulia/token"
)

var (
	LPAREN = token.New("(")
	RPAREN = token.New(")")
	A      = token.New("a")
	B      = token.New("b")
	X      = token.New("x")
	Y      = token.New("y")
	ZOO    = token.New("zoo")
	OR     = token.New("|")
	SPACE  = token.New("space")
)

// single function grammar just for testing all public API methods.
func lexText(l *Lexer) StateFn {
	switch r := l.Next(); {
	case r == EOF:
		l.Emit(TokenEOF)
		return nil
	case r == '\n':
		l.Ignore()
	case isSpace(r):
Esempio n. 3
0
// http://golang.org/src/text/template/parse/lex.go
//
// More info on Rob Pike's Lexical scanning in Go:
// https://www.youtube.com/watch?v=HxaD_trXwRE
package lexer

import (
	"fmt"
	"strings"
	"unicode/utf8"

	"github.com/toqueteos/langtulia/token"
)

var (
	TokenError = token.New("err")
	TokenEOF   = token.New("EOF")
)

// Token contains a token and its value.
type Token struct {
	Token token.Token
	Value string
}

func (t Token) String() string {
	switch t.Token {
	case TokenError:
		return t.Value
	case TokenEOF:
		return "EOF"