func Token2eof(tok common.Token) *EofTok { et, ok := tok.(*EofTok) if !ok { tok.Error("Not an EOF token") } return et }
func Token2int(tok common.Token) *IntTok { it, ok := tok.(*IntTok) if !ok { tok.Error("Not an integer token") } return it }
func Token2operator(tok common.Token) *OperatorTok { ot, ok := tok.(*OperatorTok) if !ok { tok.Error("Not an operator token") } return ot }
func Token2simple(tok common.Token) *SimpleToken { st, ok := tok.(*SimpleToken) if !ok { tok.Error("Not a simple token") } return st }
func Token2id(tok common.Token) *IdTok { it, ok := tok.(*IdTok) if !ok { tok.Error("Not an ID token") } return it }
func Token2string(tok common.Token) *StringTok { st, ok := tok.(*StringTok) if !ok { tok.Error("Not a string token") } return st }
func Token2space(tok common.Token) *SpaceTok { st, ok := tok.(*SpaceTok) if !ok { tok.Error("Not a space token") } return st }
func Token2char(tok common.Token) *CharTok { ct, ok := tok.(*CharTok) if !ok { tok.Error("Not a character token") } return ct }
func testStringVsTokens(t *testing.T, str string, toks []*tstTok) { lx := NewLexer(srcbuf.NewSourceFromBuffer(strings.Bytes(str))) var tok common.Token var i int for tok, i = lx.GetToken(), 0; tok.Type() != common.TOK_EOF && i < len(toks); tok, i = lx.GetToken(), i+1 { switch typ := tok.(type) { case *IntTok: if toks[i].typ != typ.Type() { t.Errorf("Expected token type %v, but got: %v.\n", toks[i].typ, typ.Type()) } if toks[i].numVal != typ.Value() { t.Errorf("Expected value %v, but got: %v.\n", toks[i].numVal, typ.Value()) } case *CharTok: if toks[i].typ != typ.Type() { t.Errorf("Expected token type %v, but got: %v.\n", toks[i].typ, typ.Type()) } if toks[i].numVal != int64(typ.Value()) { t.Errorf("Expected value %v, but got: %v.\n", toks[i].numVal, typ.Value()) } case *StringTok: if toks[i].typ != typ.Type() { t.Errorf("Expected token type %v, but got: %v.\n", toks[i].typ, typ.Type()) } if toks[i].strVal != typ.Value() { t.Errorf("Expected value %v, but got: %v.\n", toks[i].strVal, typ.Value()) } case *SpaceTok: if toks[i].typ != typ.Type() { t.Errorf("Expected token type %v, but got: %v.\n", toks[i].typ, typ.Type()) } space := int64(typ.Space()) if typ.AtStartOfLine() { space += 1000 } if toks[i].numVal != space { t.Errorf("Expected space %v, but got: %v.\n", toks[i].numVal, space) } default: if toks[i].typ != typ.Type() { t.Errorf("Expected token type %v, but got: %v.\n", toks[i].typ, typ.Type()) } if toks[i].checkContent && toks[i].content != typ.Content() { t.Errorf("Expected content %v, but got: %v.\n", toks[i].content, typ.Content()) } } } if i != len(toks) { t.Error("Got too few tokens!") } if tok.Type() != common.TOK_EOF { t.Error("Got too many tokens!") } }
// special handling of EOF (so we have valid code if possible) func handleEof(tok common.Token, tb *tokBuf) bool { if tok.Type() == common.TOK_EOF { if tb.indentLevel > 0 { handleIndent(tb.lx.NewSpaceTok(tok, 0, true), tok, tb); } else { tb.curTok = tb.tokBuf.PushBack(tok); } return true; } return false; }
func handleSpace(tok common.Token, tb *tokBuf) bool { if tok.Type() == common.TOK_SPACE { spaceTok := lexer.Token2space(tok); if spaceTok.AtStartOfLine() { handlePossibleIndent(tok, tb); } else { tb.curTok = tb.tokBuf.PushBack(tok); } return true; } return false; }
func recordIndent(indent int, tok common.Token, tb *tokBuf) { // we can have half indentations (2 spaces) and // full indentations (4 spaces) switch indent { case 2: tb.indentLevel++; tb.curTok = tb.tokBuf.PushBack(tb.lx.NewCopyTok(common.TOK_HALF_INDENT, tok)); case 4: tb.indentLevel += 2; tb.curTok = tb.tokBuf.PushBack(tb.lx.NewCopyTok(common.TOK_INDENT, tok)); default: tok.Error("Indentation error"); } }
func handleColon(tok common.Token, tb *tokBuf) bool { if tok.Type() == common.TOK_COLON { curTok := tb.tokBuf.PushBack(tok); tok2 := tb.getFilteredToken(); // whats behind the colon? if tok2.Type() != common.TOK_NL { // No TOK_NL: just 2 normal tokens then tb.tokBuf.PushBack(tok2); } else { curTok.Value = tb.lx.NewAnyTok(common.TOK_BLOCK_START, tok.SourcePiece().Start(), tok2.SourcePiece().End()); } tb.curTok = curTok; return true; } return false; }
func (lx *Lexer) NewCopyTok(typ common.TokEnum, tok common.Token) common.Token { return &SimpleToken{typ, tok.SourcePiece()} }
// remove old lines from the underlying source buffer func (tb *tokBuf) ClearUpTo(tok common.Token) { tb.lx.ClearUpTo(tok.SourcePiece().Start()); }
func recordDedent(dedent int, tok common.Token, tb *tokBuf) { if dedent & 1 != 0 { tok.Error("Uneven dedentation error"); } recordOtherDedents(recordFirstDedent(dedent/2, tok, tb), tok, tb); }