Example #1
0
func (S *Lexer) KeywordOrIdent() *Token {
	buf := util.NewStringBuffer()
	for S.isLetter() {
		buf.Append(S.ch)
		S.LETTER()
	}
	str := buf.String()
	switch str {
	case "import":
		return &Token{tokenType: IMPORT, text: str}
	case "static":
		return &Token{tokenType: STATIC, text: str}
	case "package":
		return &Token{tokenType: PACKAGE, text: str}
	case "class":
		return &Token{tokenType: CLASS, text: str}
	case "case":
		return &Token{tokenType: CASE, text: str}
	case "match":
		return &Token{tokenType: MATCH, text: str}
	case "return":
		return &Token{tokenType: RETURN, text: str}
	default:
		return &Token{tokenType: IDENT, text: str}
	}
	return nil
}
Example #2
0
func TestStringBuffer(t *testing.T) {
	b := util.NewStringBuffer()
	s := "abcกขค"
	for _, ch := range s {
		b.Append(ch)
	}
	if b.String() != s {
		t.Fatalf("string buffer error")
	}

	b.AppendStr("xyz")
	if b.String() != "abcกขคxyz" {
		t.Fatalf("string buffer error")
	}
}