// lexFileGocc lexes the given file and pretty-prints the n first tokens to // standard output, using the Gocc generated lexer. func lexFileGocc(path string, n int) (err error) { var s scanner.Scanner if path == "-" { fmt.Fprintln(os.Stderr, "Lexing from standard input") s, err = scanner.New(os.Stdin) } else { fmt.Fprintf(os.Stderr, "Lexing %q\n", path) s, err = scanner.Open(path) } if err != nil { return errutil.Err(err) } for i := 0; ; i++ { if n != 0 && i == n { break } tok := s.Scan() if tok.Type == gocctoken.INVALID { elog.Printf("ERROR %d: %#v\n", i, tok) fmt.Printf(" lit: %q\n", string(tok.Lit)) } else { fmt.Printf("token %d: %#v\n", i, tok) fmt.Printf(" lit: %q\n", string(tok.Lit)) } if tok.Type == gocctoken.EOF { break } } fmt.Fprintln(os.Stderr) return nil }
func TestLexer(t *testing.T) { var golden = []struct { path string toks []*token.Token }{ { path: "../../testdata/incorrect/lexer/bad.c", toks: []*token.Token{ { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 114}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("main"), Pos: token.Pos{Offset: 118}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 122}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("void"), Pos: token.Pos{Offset: 123}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 127}, }, { Type: token.TokMap.Type("{"), Lit: []byte("{"), Pos: token.Pos{Offset: 129}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("42"), Pos: token.Pos{Offset: 133}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 135}, }, { Type: token.TokMap.Type("}"), Lit: []byte("}"), Pos: token.Pos{Offset: 137}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("/*\n\tIt is not legal to end the code like this, \n\twithout a comment terminator\n"), Pos: token.Pos{Offset: 143}, }, { Type: token.TokMap.Type("$"), Lit: []byte(""), Pos: token.Pos{Offset: 221}, }, }, }, { path: "../../testdata/incorrect/lexer/good.c", toks: []*token.Token{ { Type: token.TokMap.Type("{"), Lit: []byte("{"), Pos: token.Pos{Offset: 220}, }, { Type: token.TokMap.Type("}"), Lit: []byte("}"), Pos: token.Pos{Offset: 222}, }, { Type: token.TokMap.Type("if"), Lit: []byte("if"), Pos: token.Pos{Offset: 277}, }, { Type: token.TokMap.Type("else"), Lit: []byte("else"), Pos: token.Pos{Offset: 280}, }, { Type: token.TokMap.Type("while"), Lit: []byte("while"), Pos: token.Pos{Offset: 285}, }, { Type: token.TokMap.Type("return"), Lit: []byte("return"), Pos: token.Pos{Offset: 332}, }, { Type: token.TokMap.Type("&&"), Lit: []byte("&&"), Pos: token.Pos{Offset: 339}, }, { Type: token.TokMap.Type("=="), Lit: []byte("=="), Pos: token.Pos{Offset: 342}, }, { Type: token.TokMap.Type("!="), Lit: []byte("!="), Pos: token.Pos{Offset: 345}, }, { Type: token.TokMap.Type("<="), Lit: []byte("<="), Pos: token.Pos{Offset: 348}, }, { Type: token.TokMap.Type(">="), Lit: []byte(">="), Pos: token.Pos{Offset: 351}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("char"), Pos: token.Pos{Offset: 354}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 359}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("void"), Pos: token.Pos{Offset: 363}, }, { Type: token.TokMap.Type("+"), Lit: []byte("+"), Pos: token.Pos{Offset: 371}, }, { Type: token.TokMap.Type("-"), Lit: []byte("-"), Pos: token.Pos{Offset: 373}, }, { Type: token.TokMap.Type("*"), Lit: []byte("*"), Pos: token.Pos{Offset: 375}, }, { Type: token.TokMap.Type("/"), Lit: []byte("/"), Pos: token.Pos{Offset: 377}, }, { Type: token.TokMap.Type("<"), Lit: []byte("<"), Pos: token.Pos{Offset: 380}, }, { Type: token.TokMap.Type(">"), Lit: []byte(">"), Pos: token.Pos{Offset: 382}, }, { Type: token.TokMap.Type("="), Lit: []byte("="), Pos: token.Pos{Offset: 384}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 387}, }, { Type: token.TokMap.Type(","), Lit: []byte(","), Pos: token.Pos{Offset: 388}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 389}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 390}, }, { Type: token.TokMap.Type("["), Lit: []byte("["), Pos: token.Pos{Offset: 392}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("was"), Pos: token.Pos{Offset: 393}, }, { Type: token.TokMap.Type("-"), Lit: []byte("-"), Pos: token.Pos{Offset: 396}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("colon"), Pos: token.Pos{Offset: 397}, }, { Type: token.TokMap.Type("]"), Lit: []byte("]"), Pos: token.Pos{Offset: 402}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("17"), Pos: token.Pos{Offset: 522}, }, { Type: token.TokMap.Type("-"), Lit: []byte("-"), Pos: token.Pos{Offset: 525}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("17"), Pos: token.Pos{Offset: 526}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("ponderosa"), Pos: token.Pos{Offset: 568}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("Black"), Pos: token.Pos{Offset: 578}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("Steel"), Pos: token.Pos{Offset: 584}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("PUMPKIN"), Pos: token.Pos{Offset: 590}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("AfterMath"), Pos: token.Pos{Offset: 598}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("aBBaoN"), Pos: token.Pos{Offset: 608}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("faT"), Pos: token.Pos{Offset: 615}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("TRacKs"), Pos: token.Pos{Offset: 619}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("K9"), Pos: token.Pos{Offset: 628}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("R23"), Pos: token.Pos{Offset: 631}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("B52"), Pos: token.Pos{Offset: 635}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("Track15"), Pos: token.Pos{Offset: 639}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("not4money"), Pos: token.Pos{Offset: 647}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("378"), Pos: token.Pos{Offset: 657}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("WHOIS666999SIOHM"), Pos: token.Pos{Offset: 661}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("was"), Pos: token.Pos{Offset: 687}, }, { Type: token.TokMap.Type("-"), Lit: []byte("-"), Pos: token.Pos{Offset: 690}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("floating"), Pos: token.Pos{Offset: 691}, }, { Type: token.TokMap.Type("-"), Lit: []byte("-"), Pos: token.Pos{Offset: 699}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("point"), Pos: token.Pos{Offset: 700}, }, { Type: token.TokMap.Type("-"), Lit: []byte("-"), Pos: token.Pos{Offset: 705}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("number"), Pos: token.Pos{Offset: 706}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("2"), Pos: token.Pos{Offset: 852}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("die4U"), Pos: token.Pos{Offset: 853}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("Function"), Pos: token.Pos{Offset: 920}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("PrOceDuRE"), Pos: token.Pos{Offset: 929}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("begIN"), Pos: token.Pos{Offset: 939}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("eNd"), Pos: token.Pos{Offset: 945}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("PrinT"), Pos: token.Pos{Offset: 949}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("rEad"), Pos: token.Pos{Offset: 955}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("iF"), Pos: token.Pos{Offset: 960}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("THen"), Pos: token.Pos{Offset: 963}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("StaTic"), Pos: token.Pos{Offset: 968}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("ElSe"), Pos: token.Pos{Offset: 976}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("wHilE"), Pos: token.Pos{Offset: 981}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("Do"), Pos: token.Pos{Offset: 987}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("reTurN"), Pos: token.Pos{Offset: 990}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("noT"), Pos: token.Pos{Offset: 997}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("AnD"), Pos: token.Pos{Offset: 1001}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("OR"), Pos: token.Pos{Offset: 1005}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("TrUE"), Pos: token.Pos{Offset: 1008}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("bOOl"), Pos: token.Pos{Offset: 1013}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("FalsE"), Pos: token.Pos{Offset: 1018}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("sizE"), Pos: token.Pos{Offset: 1024}, }, { Type: token.TokMap.Type("$"), Lit: []byte(""), // Note, a new line character has been inserted to ensure that // the file ends with a new line; thus the actual offset of the // EOF is 1099. Pos: token.Pos{Offset: 1100}, }, }, }, { path: "../../testdata/incorrect/lexer/long-char.c", toks: []*token.Token{ { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 0}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("main"), Pos: token.Pos{Offset: 4}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 8}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("void"), Pos: token.Pos{Offset: 9}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 13}, }, { Type: token.TokMap.Type("{"), Lit: []byte("{"), Pos: token.Pos{Offset: 15}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("char"), Pos: token.Pos{Offset: 19}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("c"), Pos: token.Pos{Offset: 24}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 25}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("c"), Pos: token.Pos{Offset: 29}, }, { Type: token.TokMap.Type("="), Lit: []byte("="), Pos: token.Pos{Offset: 31}, }, { Type: token.TokMap.Type("char_lit"), Lit: []byte("'c'"), Pos: token.Pos{Offset: 33}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 36}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("c"), Pos: token.Pos{Offset: 46}, }, { Type: token.TokMap.Type("="), Lit: []byte("="), Pos: token.Pos{Offset: 48}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("'cc"), Pos: token.Pos{Offset: 50}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("'; "), Pos: token.Pos{Offset: 53}, }, { Type: token.TokMap.Type("}"), Lit: []byte("}"), Pos: token.Pos{Offset: 66}, }, { Type: token.TokMap.Type("$"), Lit: []byte(""), Pos: token.Pos{Offset: 68}, }, }, }, { path: "../../testdata/incorrect/lexer/ugly.c", toks: []*token.Token{ { Type: token.TokMap.Type("INVALID"), Lit: []byte("|"), Pos: token.Pos{Offset: 0}, }, { Type: token.TokMap.Type("+"), Lit: []byte("+"), Pos: token.Pos{Offset: 1}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x01"), Pos: token.Pos{Offset: 2}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x02"), Pos: token.Pos{Offset: 3}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x03"), Pos: token.Pos{Offset: 4}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x04"), Pos: token.Pos{Offset: 5}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x05"), Pos: token.Pos{Offset: 6}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x06"), Pos: token.Pos{Offset: 7}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x07"), Pos: token.Pos{Offset: 8}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x08"), Pos: token.Pos{Offset: 9}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x0E"), Pos: token.Pos{Offset: 15}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x0F"), Pos: token.Pos{Offset: 16}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x10"), Pos: token.Pos{Offset: 17}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x11"), Pos: token.Pos{Offset: 18}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x12"), Pos: token.Pos{Offset: 19}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x13"), Pos: token.Pos{Offset: 20}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x14"), Pos: token.Pos{Offset: 21}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x15"), Pos: token.Pos{Offset: 22}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x16"), Pos: token.Pos{Offset: 23}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x17"), Pos: token.Pos{Offset: 24}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x18"), Pos: token.Pos{Offset: 25}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x19"), Pos: token.Pos{Offset: 26}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x1A"), Pos: token.Pos{Offset: 27}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x1B"), Pos: token.Pos{Offset: 28}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x1C"), Pos: token.Pos{Offset: 29}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x1D"), Pos: token.Pos{Offset: 30}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x1E"), Pos: token.Pos{Offset: 31}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte("\x1F"), Pos: token.Pos{Offset: 32}, }, { Type: token.TokMap.Type("!"), Lit: []byte("!"), Pos: token.Pos{Offset: 34}, }, { Type: token.TokMap.Type("INVALID"), Lit: []byte(`"`), Pos: token.Pos{Offset: 35}, }, { Type: token.TokMap.Type("$"), Lit: []byte(""), // Note, a new line character has been inserted to ensure that // the file ends with a new line; thus the actual offset of the // EOF is 533. Pos: token.Pos{Offset: 534}, }, }, }, { path: "../../testdata/quiet/lexer/l01.c", toks: []*token.Token{ { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 0}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("main"), Pos: token.Pos{Offset: 4}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 9}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("void"), Pos: token.Pos{Offset: 10}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 14}, }, { Type: token.TokMap.Type("{"), Lit: []byte("{"), Pos: token.Pos{Offset: 16}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 18}, }, { Type: token.TokMap.Type("}"), Lit: []byte("}"), Pos: token.Pos{Offset: 20}, }, { Type: token.TokMap.Type("$"), Lit: []byte(""), Pos: token.Pos{Offset: 23}, }, }, }, { path: "../../testdata/quiet/lexer/l02.c", toks: []*token.Token{ { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 0}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("foo"), Pos: token.Pos{Offset: 4}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 7}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 10}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("BarBara"), Pos: token.Pos{Offset: 14}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 21}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 24}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("bar_bara"), Pos: token.Pos{Offset: 28}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 36}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 39}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("bar4711"), Pos: token.Pos{Offset: 43}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 50}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 53}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("b4rb4r4"), Pos: token.Pos{Offset: 57}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 64}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 67}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("abcdefghijklmnopqrstuvwxyz_ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789"), Pos: token.Pos{Offset: 71}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 135}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 138}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("main"), Pos: token.Pos{Offset: 142}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 146}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("void"), Pos: token.Pos{Offset: 147}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 151}, }, { Type: token.TokMap.Type("{"), Lit: []byte("{"), Pos: token.Pos{Offset: 153}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 155}, }, { Type: token.TokMap.Type("}"), Lit: []byte("}"), Pos: token.Pos{Offset: 157}, }, { Type: token.TokMap.Type("$"), Lit: []byte(""), Pos: token.Pos{Offset: 159}, }, }, }, { path: "../../testdata/quiet/lexer/l03.c", toks: []*token.Token{ { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 0}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("main"), Pos: token.Pos{Offset: 4}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 8}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("void"), Pos: token.Pos{Offset: 9}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 13}, }, { Type: token.TokMap.Type("{"), Lit: []byte("{"), Pos: token.Pos{Offset: 15}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 19}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("i"), Pos: token.Pos{Offset: 23}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 24}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("i"), Pos: token.Pos{Offset: 28}, }, { Type: token.TokMap.Type("="), Lit: []byte("="), Pos: token.Pos{Offset: 30}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("123456789"), Pos: token.Pos{Offset: 32}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 41}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("i"), Pos: token.Pos{Offset: 71}, }, { Type: token.TokMap.Type("="), Lit: []byte("="), Pos: token.Pos{Offset: 73}, }, { Type: token.TokMap.Type("char_lit"), Lit: []byte("'0'"), Pos: token.Pos{Offset: 75}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 78}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("i"), Pos: token.Pos{Offset: 82}, }, { Type: token.TokMap.Type("="), Lit: []byte("="), Pos: token.Pos{Offset: 84}, }, { Type: token.TokMap.Type("char_lit"), Lit: []byte("'a'"), Pos: token.Pos{Offset: 86}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 89}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("i"), Pos: token.Pos{Offset: 93}, }, { Type: token.TokMap.Type("="), Lit: []byte("="), Pos: token.Pos{Offset: 95}, }, { Type: token.TokMap.Type("char_lit"), Lit: []byte("' '"), Pos: token.Pos{Offset: 97}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 100}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("i"), Pos: token.Pos{Offset: 104}, }, { Type: token.TokMap.Type("="), Lit: []byte("="), Pos: token.Pos{Offset: 106}, }, { Type: token.TokMap.Type("char_lit"), Lit: []byte(`'\n'`), Pos: token.Pos{Offset: 108}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 112}, }, { Type: token.TokMap.Type("}"), Lit: []byte("}"), Pos: token.Pos{Offset: 114}, }, { Type: token.TokMap.Type("$"), Lit: []byte(""), Pos: token.Pos{Offset: 117}, }, }, }, { path: "../../testdata/quiet/lexer/l04.c", toks: []*token.Token{ { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 0}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("main"), Pos: token.Pos{Offset: 4}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 8}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("void"), Pos: token.Pos{Offset: 9}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 13}, }, { Type: token.TokMap.Type("{"), Lit: []byte("{"), Pos: token.Pos{Offset: 15}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 19}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("i"), Pos: token.Pos{Offset: 23}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 24}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("char"), Pos: token.Pos{Offset: 28}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("j"), Pos: token.Pos{Offset: 33}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 34}, }, { Type: token.TokMap.Type("if"), Lit: []byte("if"), Pos: token.Pos{Offset: 38}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 41}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("1"), Pos: token.Pos{Offset: 42}, }, { Type: token.TokMap.Type("=="), Lit: []byte("=="), Pos: token.Pos{Offset: 43}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("0"), Pos: token.Pos{Offset: 45}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 46}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("i"), Pos: token.Pos{Offset: 48}, }, { Type: token.TokMap.Type("="), Lit: []byte("="), Pos: token.Pos{Offset: 50}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("0"), Pos: token.Pos{Offset: 52}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 53}, }, { Type: token.TokMap.Type("else"), Lit: []byte("else"), Pos: token.Pos{Offset: 58}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("i"), Pos: token.Pos{Offset: 63}, }, { Type: token.TokMap.Type("="), Lit: []byte("="), Pos: token.Pos{Offset: 65}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("1"), Pos: token.Pos{Offset: 67}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 68}, }, { Type: token.TokMap.Type("while"), Lit: []byte("while"), Pos: token.Pos{Offset: 72}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 78}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("1"), Pos: token.Pos{Offset: 79}, }, { Type: token.TokMap.Type("=="), Lit: []byte("=="), Pos: token.Pos{Offset: 80}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("0"), Pos: token.Pos{Offset: 82}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 83}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("i"), Pos: token.Pos{Offset: 85}, }, { Type: token.TokMap.Type("="), Lit: []byte("="), Pos: token.Pos{Offset: 87}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("0"), Pos: token.Pos{Offset: 89}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 90}, }, { Type: token.TokMap.Type("return"), Lit: []byte("return"), Pos: token.Pos{Offset: 94}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("42"), Pos: token.Pos{Offset: 101}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 103}, }, { Type: token.TokMap.Type("}"), Lit: []byte("}"), Pos: token.Pos{Offset: 105}, }, { Type: token.TokMap.Type("$"), Lit: []byte(""), Pos: token.Pos{Offset: 107}, }, }, }, { path: "../../testdata/quiet/lexer/l05.c", toks: []*token.Token{ { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 0}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("main"), Pos: token.Pos{Offset: 4}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 8}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("void"), Pos: token.Pos{Offset: 9}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 13}, }, { Type: token.TokMap.Type("{"), Lit: []byte("{"), Pos: token.Pos{Offset: 15}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 19}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("i"), Pos: token.Pos{Offset: 23}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 24}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("1"), Pos: token.Pos{Offset: 28}, }, { Type: token.TokMap.Type("!="), Lit: []byte("!="), Pos: token.Pos{Offset: 29}, }, { Type: token.TokMap.Type("!"), Lit: []byte("!"), Pos: token.Pos{Offset: 31}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("3"), Pos: token.Pos{Offset: 32}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 33}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("4"), Pos: token.Pos{Offset: 37}, }, { Type: token.TokMap.Type("&&"), Lit: []byte("&&"), Pos: token.Pos{Offset: 38}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 40}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("6"), Pos: token.Pos{Offset: 41}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 42}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 43}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("7"), Pos: token.Pos{Offset: 47}, }, { Type: token.TokMap.Type("*"), Lit: []byte("*"), Pos: token.Pos{Offset: 48}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("8"), Pos: token.Pos{Offset: 50}, }, { Type: token.TokMap.Type("+"), Lit: []byte("+"), Pos: token.Pos{Offset: 51}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("10"), Pos: token.Pos{Offset: 52}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 54}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 58}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("11"), Pos: token.Pos{Offset: 59}, }, { Type: token.TokMap.Type("-"), Lit: []byte("-"), Pos: token.Pos{Offset: 61}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("12"), Pos: token.Pos{Offset: 62}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 64}, }, { Type: token.TokMap.Type("+"), Lit: []byte("+"), Pos: token.Pos{Offset: 65}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 66}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("12"), Pos: token.Pos{Offset: 67}, }, { Type: token.TokMap.Type("/"), Lit: []byte("/"), Pos: token.Pos{Offset: 69}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("16"), Pos: token.Pos{Offset: 70}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 72}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 73}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("17"), Pos: token.Pos{Offset: 77}, }, { Type: token.TokMap.Type("<="), Lit: []byte("<="), Pos: token.Pos{Offset: 79}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("18"), Pos: token.Pos{Offset: 81}, }, { Type: token.TokMap.Type("<"), Lit: []byte("<"), Pos: token.Pos{Offset: 84}, }, { Type: token.TokMap.Type("-"), Lit: []byte("-"), Pos: token.Pos{Offset: 85}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("20"), Pos: token.Pos{Offset: 86}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 88}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("i"), Pos: token.Pos{Offset: 92}, }, { Type: token.TokMap.Type("="), Lit: []byte("="), Pos: token.Pos{Offset: 93}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("21"), Pos: token.Pos{Offset: 94}, }, { Type: token.TokMap.Type("=="), Lit: []byte("=="), Pos: token.Pos{Offset: 96}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("22"), Pos: token.Pos{Offset: 98}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 100}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("25"), Pos: token.Pos{Offset: 104}, }, { Type: token.TokMap.Type(">="), Lit: []byte(">="), Pos: token.Pos{Offset: 107}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("27"), Pos: token.Pos{Offset: 109}, }, { Type: token.TokMap.Type(">"), Lit: []byte(">"), Pos: token.Pos{Offset: 111}, }, { Type: token.TokMap.Type("int_lit"), Lit: []byte("28"), Pos: token.Pos{Offset: 112}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 114}, }, { Type: token.TokMap.Type("}"), Lit: []byte("}"), Pos: token.Pos{Offset: 116}, }, { Type: token.TokMap.Type("$"), Lit: []byte(""), Pos: token.Pos{Offset: 118}, }, }, }, { path: "../../testdata/quiet/lexer/l06.c", toks: []*token.Token{ { Type: token.TokMap.Type("ident"), Lit: []byte("int"), Pos: token.Pos{Offset: 259}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("main"), Pos: token.Pos{Offset: 263}, }, { Type: token.TokMap.Type("("), Lit: []byte("("), Pos: token.Pos{Offset: 267}, }, { Type: token.TokMap.Type("ident"), Lit: []byte("void"), Pos: token.Pos{Offset: 297}, }, { Type: token.TokMap.Type(")"), Lit: []byte(")"), Pos: token.Pos{Offset: 302}, }, { Type: token.TokMap.Type("{"), Lit: []byte("{"), Pos: token.Pos{Offset: 304}, }, { Type: token.TokMap.Type(";"), Lit: []byte(";"), Pos: token.Pos{Offset: 306}, }, { Type: token.TokMap.Type("}"), Lit: []byte("}"), Pos: token.Pos{Offset: 308}, }, { Type: token.TokMap.Type("$"), Lit: []byte(""), Pos: token.Pos{Offset: 313}, }, }, }, } for _, g := range golden { log.Println("path:", g.path) s, err := scanner.Open(g.path) if err != nil { t.Error(err) continue } for j := 0; ; j++ { got := s.Scan() if j >= len(g.toks) { t.Errorf("%s: invalid number of tokens; expected %d tokens, got > %d", g.path, len(g.toks), j) break } if want := g.toks[j]; !tokenEqual(got, want) { t.Errorf("%s: token %d mismatch; expected %#v, got %#v", g.path, j, want, got) } if got.Type == token.EOF { if j != len(g.toks)-1 { t.Errorf("%s: invalid number of tokens; expected %d tokens, got %d", g.path, len(g.toks), j+1) } break } } } }
func TestParser(t *testing.T) { var golden = []struct { path string want *ast.File }{ { path: "../../testdata/quiet/lexer/l05.c", want: &ast.File{ Decls: []ast.Decl{ &ast.FuncDecl{ FuncType: &ast.FuncType{ Result: &ast.Ident{ NamePos: 0, Name: "int", }, Lparen: 8, Params: []*ast.VarDecl{ { VarType: &ast.Ident{ NamePos: 9, Name: "void", }, }, }, Rparen: 13, }, FuncName: &ast.Ident{ NamePos: 4, Name: "main", }, Body: &ast.BlockStmt{ Lbrace: 15, Items: []ast.BlockItem{ &ast.VarDecl{ VarType: &ast.Ident{ NamePos: 19, Name: "int", }, VarName: &ast.Ident{ NamePos: 23, Name: "i", }, }, &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.BasicLit{ ValPos: 28, Kind: token.IntLit, Val: "1", }, OpPos: 29, Op: token.Ne, Y: &ast.UnaryExpr{ OpPos: 31, Op: token.Not, X: &ast.BasicLit{ ValPos: 32, Kind: token.IntLit, Val: "3", }, }, }, }, &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.BasicLit{ ValPos: 37, Kind: token.IntLit, Val: "4", }, OpPos: 38, Op: token.Land, Y: &ast.ParenExpr{ Lparen: 40, X: &ast.BasicLit{ ValPos: 41, Kind: token.IntLit, Val: "6", }, Rparen: 42, }, }, }, &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.BinaryExpr{ X: &ast.BasicLit{ ValPos: 47, Kind: token.IntLit, Val: "7", }, OpPos: 48, Op: token.Mul, Y: &ast.BasicLit{ ValPos: 50, Kind: token.IntLit, Val: "8", }, }, OpPos: 51, Op: token.Add, Y: &ast.BasicLit{ ValPos: 52, Kind: token.IntLit, Val: "10", }, }, }, &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.ParenExpr{ Lparen: 58, X: &ast.BinaryExpr{ X: &ast.BasicLit{ ValPos: 59, Kind: token.IntLit, Val: "11", }, OpPos: 61, Op: token.Sub, Y: &ast.BasicLit{ ValPos: 62, Kind: token.IntLit, Val: "12", }, }, Rparen: 64, }, OpPos: 65, Op: token.Add, Y: &ast.ParenExpr{ Lparen: 66, X: &ast.BinaryExpr{ X: &ast.BasicLit{ ValPos: 67, Kind: token.IntLit, Val: "12", }, OpPos: 69, Op: token.Div, Y: &ast.BasicLit{ ValPos: 70, Kind: token.IntLit, Val: "16", }, }, Rparen: 72, }, }, }, &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.BinaryExpr{ X: &ast.BasicLit{ ValPos: 77, Kind: token.IntLit, Val: "17", }, OpPos: 79, Op: token.Le, Y: &ast.BasicLit{ ValPos: 81, Kind: token.IntLit, Val: "18", }, }, OpPos: 84, Op: token.Lt, Y: &ast.UnaryExpr{ OpPos: 85, Op: token.Sub, X: &ast.BasicLit{ ValPos: 86, Kind: token.IntLit, Val: "20", }, }, }, }, &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 92, Name: "i", }, OpPos: 93, Op: token.Assign, Y: &ast.BinaryExpr{ X: &ast.BasicLit{ ValPos: 94, Kind: token.IntLit, Val: "21", }, OpPos: 96, Op: token.Eq, Y: &ast.BasicLit{ ValPos: 98, Kind: token.IntLit, Val: "22", }, }, }, }, &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.BinaryExpr{ X: &ast.BasicLit{ ValPos: 104, Kind: token.IntLit, Val: "25", }, OpPos: 107, Op: token.Ge, Y: &ast.BasicLit{ ValPos: 109, Kind: token.IntLit, Val: "27", }, }, OpPos: 111, Op: token.Gt, Y: &ast.BasicLit{ ValPos: 112, Kind: token.IntLit, Val: "28", }, }, }, }, Rbrace: 116, }, }, }, }, }, { path: "../../testdata/quiet/parser/p01.c", want: &ast.File{ Decls: []ast.Decl{ &ast.FuncDecl{ FuncType: &ast.FuncType{ Result: &ast.Ident{ NamePos: 0, Name: "int", }, Lparen: 8, Params: []*ast.VarDecl{ { VarType: &ast.Ident{ NamePos: 9, Name: "void", }, }, }, Rparen: 13, }, FuncName: &ast.Ident{ NamePos: 4, Name: "main", }, Body: &ast.BlockStmt{ Lbrace: 15, Items: []ast.BlockItem{ &ast.VarDecl{ VarType: &ast.Ident{ NamePos: 19, Name: "int", }, VarName: &ast.Ident{ NamePos: 23, Name: "x", }, }, &ast.VarDecl{ VarType: &ast.Ident{ NamePos: 28, Name: "int", }, VarName: &ast.Ident{ NamePos: 32, Name: "y", }, }, &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 37, Name: "x", }, OpPos: 39, Op: token.Assign, Y: &ast.BasicLit{ ValPos: 41, Kind: token.IntLit, Val: "42", }, }, }, &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 47, Name: "x", }, OpPos: 48, Op: token.Assign, Y: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 49, Name: "y", }, OpPos: 50, Op: token.Assign, Y: &ast.BasicLit{ ValPos: 51, Kind: token.IntLit, Val: "4711", }, }, }, }, }, Rbrace: 57, }, }, }, }, }, { path: "../../testdata/quiet/parser/p02.c", want: &ast.File{ Decls: []ast.Decl{ &ast.FuncDecl{ FuncType: &ast.FuncType{ Result: &ast.Ident{ NamePos: 1, Name: "int", }, Lparen: 9, Params: []*ast.VarDecl{ { VarType: &ast.Ident{ NamePos: 10, Name: "void", }, }, }, Rparen: 14, }, FuncName: &ast.Ident{ NamePos: 5, Name: "main", }, Body: &ast.BlockStmt{ Lbrace: 16, Items: []ast.BlockItem{ &ast.VarDecl{ VarType: &ast.Ident{ NamePos: 20, Name: "int", }, VarName: &ast.Ident{ NamePos: 24, Name: "x", }, }, &ast.EmptyStmt{ Semicolon: 29, }, &ast.WhileStmt{ While: 33, Cond: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 40, Name: "x", }, OpPos: 41, Op: token.Lt, Y: &ast.BasicLit{ ValPos: 42, Kind: token.IntLit, Val: "10", }, }, Body: &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 46, Name: "x", }, OpPos: 48, Op: token.Assign, Y: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 50, Name: "x", }, OpPos: 52, Op: token.Add, Y: &ast.BasicLit{ ValPos: 54, Kind: token.IntLit, Val: "3", }, }, }, }, }, &ast.IfStmt{ If: 60, Cond: &ast.BasicLit{ ValPos: 64, Kind: token.IntLit, Val: "1", }, Body: &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 67, Name: "x", }, OpPos: 69, Op: token.Assign, Y: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 71, Name: "x", }, OpPos: 73, Op: token.Add, Y: &ast.BasicLit{ ValPos: 75, Kind: token.IntLit, Val: "3", }, }, }, }, }, }, Rbrace: 78, }, }, }, }, }, { path: "../../testdata/quiet/parser/p03.c", want: &ast.File{ Decls: []ast.Decl{ &ast.FuncDecl{ FuncType: &ast.FuncType{ Result: &ast.Ident{ NamePos: 1, Name: "int", }, Lparen: 9, Params: []*ast.VarDecl{ { VarType: &ast.Ident{ NamePos: 10, Name: "void", }, }, }, Rparen: 14, }, FuncName: &ast.Ident{ NamePos: 5, Name: "main", }, Body: &ast.BlockStmt{ Lbrace: 16, Items: []ast.BlockItem{ &ast.VarDecl{ VarType: &ast.Ident{ NamePos: 20, Name: "int", }, VarName: &ast.Ident{ NamePos: 24, Name: "x", }, }, &ast.IfStmt{ If: 29, Cond: &ast.BinaryExpr{ X: &ast.BasicLit{ ValPos: 33, Kind: token.IntLit, Val: "1", }, OpPos: 34, Op: token.Lt, Y: &ast.BasicLit{ ValPos: 35, Kind: token.IntLit, Val: "2", }, }, Body: &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 38, Name: "x", }, OpPos: 40, Op: token.Assign, Y: &ast.BasicLit{ ValPos: 42, Kind: token.IntLit, Val: "1", }, }, }, Else: &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 53, Name: "x", }, OpPos: 55, Op: token.Assign, Y: &ast.BasicLit{ ValPos: 57, Kind: token.IntLit, Val: "2", }, }, }, }, }, Rbrace: 60, }, }, }, }, }, { path: "../../testdata/quiet/parser/p04.c", want: &ast.File{ Decls: []ast.Decl{ &ast.FuncDecl{ FuncType: &ast.FuncType{ Result: &ast.Ident{ NamePos: 0, Name: "int", }, Lparen: 8, Params: []*ast.VarDecl{ { VarType: &ast.Ident{ NamePos: 9, Name: "void", }, }, }, Rparen: 13, }, FuncName: &ast.Ident{ NamePos: 4, Name: "main", }, Body: &ast.BlockStmt{ Lbrace: 15, Items: []ast.BlockItem{ &ast.VarDecl{ VarType: &ast.Ident{ NamePos: 19, Name: "int", }, VarName: &ast.Ident{ NamePos: 23, Name: "x", }, }, &ast.VarDecl{ VarType: &ast.Ident{ NamePos: 28, Name: "int", }, VarName: &ast.Ident{ NamePos: 32, Name: "y", }, }, &ast.VarDecl{ VarType: &ast.Ident{ NamePos: 37, Name: "int", }, VarName: &ast.Ident{ NamePos: 41, Name: "z", }, }, &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.BinaryExpr{ X: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 49, Name: "x", }, OpPos: 50, Op: token.Sub, Y: &ast.Ident{ NamePos: 51, Name: "y", }, }, OpPos: 52, Op: token.Sub, Y: &ast.Ident{ NamePos: 53, Name: "z", }, }, OpPos: 54, Op: token.Sub, Y: &ast.BasicLit{ ValPos: 55, Kind: token.IntLit, Val: "42", }, }, }, &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.BinaryExpr{ X: &ast.BinaryExpr{ X: &ast.BinaryExpr{ X: &ast.UnaryExpr{ OpPos: 90, Op: token.Not, X: &ast.Ident{ NamePos: 91, Name: "x", }, }, OpPos: 93, Op: token.Mul, Y: &ast.Ident{ NamePos: 95, Name: "y", }, }, OpPos: 97, Op: token.Add, Y: &ast.Ident{ NamePos: 99, Name: "z", }, }, OpPos: 101, Op: token.Lt, Y: &ast.Ident{ NamePos: 103, Name: "x", }, }, OpPos: 105, Op: token.Ne, Y: &ast.BinaryExpr{ X: &ast.BasicLit{ ValPos: 108, Kind: token.IntLit, Val: "42", }, OpPos: 111, Op: token.Lt, Y: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 113, Name: "x", }, OpPos: 115, Op: token.Add, Y: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 117, Name: "y", }, OpPos: 119, Op: token.Mul, Y: &ast.UnaryExpr{ OpPos: 121, Op: token.Not, X: &ast.Ident{ NamePos: 122, Name: "x", }, }, }, }, }, }, }, }, Rbrace: 164, }, }, }, }, }, { path: "../../testdata/quiet/parser/p05.c", want: &ast.File{ Decls: []ast.Decl{ &ast.VarDecl{ VarType: &ast.ArrayType{ Elem: &ast.Ident{ NamePos: 0, Name: "int", }, Lbracket: 5, Len: 10, Rbracket: 8, }, VarName: &ast.Ident{ NamePos: 4, Name: "c", }, }, &ast.VarDecl{ VarType: &ast.ArrayType{ Elem: &ast.Ident{ NamePos: 11, Name: "char", }, Lbracket: 17, Len: 10, Rbracket: 20, }, VarName: &ast.Ident{ NamePos: 16, Name: "d", }, }, &ast.FuncDecl{ FuncType: &ast.FuncType{ Result: &ast.Ident{ NamePos: 24, Name: "void", }, Lparen: 30, Params: []*ast.VarDecl{ { VarType: &ast.ArrayType{ Elem: &ast.Ident{ NamePos: 31, Name: "int", }, Lbracket: 36, Rbracket: 37, }, VarName: &ast.Ident{ NamePos: 35, Name: "h", }, }, { VarType: &ast.ArrayType{ Elem: &ast.Ident{ NamePos: 40, Name: "char", }, Lbracket: 46, Rbracket: 47, }, VarName: &ast.Ident{ NamePos: 45, Name: "i", }, }, }, Rparen: 48, }, FuncName: &ast.Ident{ NamePos: 29, Name: "f", }, Body: &ast.BlockStmt{ Lbrace: 50, Rbrace: 52, }, }, &ast.FuncDecl{ FuncType: &ast.FuncType{ Result: &ast.Ident{ NamePos: 56, Name: "int", }, Lparen: 64, Params: []*ast.VarDecl{ { VarType: &ast.Ident{ NamePos: 65, Name: "void", }, }, }, Rparen: 69, }, FuncName: &ast.Ident{ NamePos: 60, Name: "main", }, Body: &ast.BlockStmt{ Lbrace: 71, Items: []ast.BlockItem{ &ast.EmptyStmt{ Semicolon: 75, }, }, Rbrace: 77, }, }, }, }, }, { path: "../../testdata/quiet/parser/p06.c", want: &ast.File{ Decls: []ast.Decl{ &ast.FuncDecl{ FuncType: &ast.FuncType{ Result: &ast.Ident{ NamePos: 0, Name: "void", }, Lparen: 6, Params: []*ast.VarDecl{ { VarType: &ast.Ident{ NamePos: 7, Name: "void", }, }, }, Rparen: 11, }, FuncName: &ast.Ident{ NamePos: 5, Name: "f", }, Body: &ast.BlockStmt{ Lbrace: 13, Items: []ast.BlockItem{ &ast.ReturnStmt{ Return: 17, }, }, Rbrace: 25, }, }, &ast.FuncDecl{ FuncType: &ast.FuncType{ Result: &ast.Ident{ NamePos: 28, Name: "int", }, Lparen: 33, Params: []*ast.VarDecl{ { VarType: &ast.Ident{ NamePos: 34, Name: "void", }, }, }, Rparen: 38, }, FuncName: &ast.Ident{ NamePos: 32, Name: "g", }, Body: &ast.BlockStmt{ Lbrace: 40, Items: []ast.BlockItem{ &ast.ReturnStmt{ Return: 44, Result: &ast.BasicLit{ ValPos: 51, Kind: token.IntLit, Val: "42", }, }, }, Rbrace: 55, }, }, &ast.FuncDecl{ FuncType: &ast.FuncType{ Result: &ast.Ident{ NamePos: 58, Name: "int", }, Lparen: 66, Params: []*ast.VarDecl{ { VarType: &ast.Ident{ NamePos: 67, Name: "void", }, }, }, Rparen: 71, }, FuncName: &ast.Ident{ NamePos: 62, Name: "main", }, Body: &ast.BlockStmt{ Lbrace: 72, Items: []ast.BlockItem{ &ast.ExprStmt{ X: &ast.CallExpr{ Name: &ast.Ident{ NamePos: 76, Name: "f", }, Lparen: 77, Rparen: 78, }, }, &ast.ExprStmt{ X: &ast.CallExpr{ Name: &ast.Ident{ NamePos: 83, Name: "g", }, Lparen: 84, Rparen: 85, }, }, }, Rbrace: 88, }, }, }, }, }, { path: "../../testdata/quiet/parser/p07.c", want: &ast.File{ Decls: []ast.Decl{ &ast.FuncDecl{ FuncType: &ast.FuncType{ Result: &ast.Ident{ NamePos: 0, Name: "int", }, Lparen: 8, Params: []*ast.VarDecl{ { VarType: &ast.Ident{ NamePos: 9, Name: "void", }, }, }, Rparen: 13, }, FuncName: &ast.Ident{ NamePos: 4, Name: "main", }, Body: &ast.BlockStmt{ Lbrace: 14, Items: []ast.BlockItem{ &ast.VarDecl{ VarType: &ast.Ident{ NamePos: 18, Name: "int", }, VarName: &ast.Ident{ NamePos: 22, Name: "x", }, }, &ast.VarDecl{ VarType: &ast.Ident{ NamePos: 27, Name: "int", }, VarName: &ast.Ident{ NamePos: 31, Name: "y", }, }, &ast.IfStmt{ If: 37, Cond: &ast.Ident{ NamePos: 40, Name: "x", }, Body: &ast.WhileStmt{ While: 43, Cond: &ast.Ident{ NamePos: 50, Name: "y", }, Body: &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 53, Name: "x", }, OpPos: 54, Op: token.Assign, Y: &ast.BasicLit{ ValPos: 55, Kind: token.IntLit, Val: "42", }, }, }, }, }, &ast.WhileStmt{ While: 64, Cond: &ast.Ident{ NamePos: 70, Name: "x", }, Body: &ast.IfStmt{ If: 73, Cond: &ast.Ident{ NamePos: 76, Name: "y", }, Body: &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 79, Name: "x", }, OpPos: 80, Op: token.Assign, Y: &ast.BasicLit{ ValPos: 81, Kind: token.IntLit, Val: "42", }, }, }, }, }, }, Rbrace: 85, }, }, }, }, }, { path: "../../testdata/quiet/parser/p08.c", want: &ast.File{ Decls: []ast.Decl{ &ast.FuncDecl{ FuncType: &ast.FuncType{ Result: &ast.Ident{ NamePos: 70, Name: "int", }, Lparen: 78, Params: []*ast.VarDecl{ { VarType: &ast.Ident{ NamePos: 79, Name: "void", }, }, }, Rparen: 83, }, FuncName: &ast.Ident{ NamePos: 74, Name: "main", }, Body: &ast.BlockStmt{ Lbrace: 84, Items: []ast.BlockItem{ &ast.VarDecl{ VarType: &ast.Ident{ NamePos: 88, Name: "int", }, VarName: &ast.Ident{ NamePos: 92, Name: "x", }, }, &ast.VarDecl{ VarType: &ast.Ident{ NamePos: 97, Name: "int", }, VarName: &ast.Ident{ NamePos: 101, Name: "y", }, }, &ast.IfStmt{ If: 107, Cond: &ast.Ident{ NamePos: 110, Name: "x", }, Body: &ast.IfStmt{ If: 118, Cond: &ast.Ident{ NamePos: 122, Name: "y", }, Body: &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 125, Name: "x", }, OpPos: 127, Op: token.Assign, Y: &ast.BasicLit{ ValPos: 129, Kind: token.IntLit, Val: "4711", }, }, }, Else: &ast.ExprStmt{ X: &ast.BinaryExpr{ X: &ast.Ident{ NamePos: 144, Name: "x", }, OpPos: 145, Op: token.Assign, Y: &ast.BasicLit{ ValPos: 146, Kind: token.IntLit, Val: "42", }, }, }, }, }, }, Rbrace: 150, }, }, }, }, }, } for _, g := range golden { log.Println("path:", g.path) s, err := scanner.Open(g.path) if err != nil { t.Error(err) continue } p := parser.NewParser() file, err := p.Parse(s) if err != nil { t.Error(err) continue } got := file.(*ast.File) if !reflect.DeepEqual(got, g.want) { t.Errorf("%q: AST mismatch; expected %#v, got %#v", g.path, g.want, got) fmt.Println(pretty.Diff(g.want, got)) } } }
func TestParserError(t *testing.T) { var golden = []struct { path string want string }{ { path: "../../testdata/incorrect/parser/pe01.c", want: `102: unexpected ")", expected ["!" "(" "-" "char_lit" "ident" "int_lit"]`, }, { path: "../../testdata/incorrect/parser/pe02.c", want: `112: unexpected "}", expected ["!=" "&&" "*" "+" "-" "/" ";" "<" "<=" "=" "==" ">" ">="]`, }, { path: "../../testdata/incorrect/parser/pe03.c", want: `129: unexpected "}", expected ["!" "(" "-" ";" "char_lit" "ident" "if" "int_lit" "return" "while" "{"]`, }, { path: "../../testdata/incorrect/parser/pe04.c", want: `111: unexpected "a", expected ["!=" "&&" "(" "*" "+" "-" "/" ";" "<" "<=" "=" "==" ">" ">=" "["]`, }, { path: "../../testdata/incorrect/parser/pe05.c", want: `71: unexpected "else", expected ["ident"]`, }, { path: "../../testdata/incorrect/parser/pe06.c", want: `73: unexpected "b", expected ["(" ";" "["]`, }, { path: "../../testdata/incorrect/parser/pe07.c", want: `72: unexpected ",", expected ["(" ";" "["]`, }, { path: "../../testdata/incorrect/parser/pe08.c", want: `86: unexpected "42", expected [";" "{"]`, }, { // TODO: The ';' at offset 80 in pe09.c shuold probably be a '{', as // indicated by the comment "// '}' missing " // // Update this test case if the test file is fixed. path: "../../testdata/incorrect/parser/pe09.c", want: `87: unexpected ";", expected ["$" "ident" "typedef"]`, }, { path: "../../testdata/incorrect/parser/pe10.c", want: `135: unexpected ")", expected ["!" "(" "-" "char_lit" "ident" "int_lit"]`, }, { path: "../../testdata/incorrect/parser/pe11.c", want: `70: unexpected "(", expected ["ident"]`, }, { path: "../../testdata/incorrect/parser/pe12.c", want: `77: unexpected "{", expected ["(" ";" "["]`, }, { path: "../../testdata/incorrect/parser/pe13.c", // Note, empty parameter list is explicitly allowed by the parser, // and act the same as a void parameter, as per design decisions. // // References. // https://github.com/mewmew/uc/issues/46 // https://github.com/mewmew/uc/issues/50 // https://github.com/mewmew/uc/issues/33 want: "", }, { path: "../../testdata/incorrect/parser/pe14.c", // Note, nested procedures is explicitly allowed by the parser, as the // validation is postponed to the semantic analysis checker. // // References. // https://github.com/mewmew/uc/issues/38 // https://github.com/mewmew/uc/issues/43 want: "", }, } for _, g := range golden { log.Println("path:", g.path) s, err := scanner.Open(g.path) if err != nil { t.Error(err) continue } p := parser.NewParser() _, err = p.Parse(s) got := "" if err != nil { if e, ok := err.(*errors.Error); ok { // Unwrap Gocc error. err = parser.NewError(e) } got = err.Error() } if got != g.want { t.Errorf("%q: error mismatch; expected `%v`, got `%v`", g.path, g.want, got) } } }