示例#1
0
func Driver(input string) (bool, string) {
	ch := make(chan lexer.Token, 100)
	go mylexer.Lexer(input, ch)
	v := <-ch
	lex := LexType{ch, v}
	ok, msg := Module(&lex)
	return ok, msg
}
示例#2
0
func tozama_test(t *testing.T) {
	c := make(chan lexer.Token, 100)
	s := ``
	var tok mylexer.Token
	v := []lexer.Token{
	      mylexer.Token{"<EOF>", 0, 1, 1, lexer.EOF},
	}
	go mylexer.Lexer(s, c)
	lexer.CheckTest(s, c, v, t)
}
示例#3
0
func Test_2e8b519f3621f75765c979941913e5b5_20130120120713(t *testing.T) {
	c := make(chan lexer.Token, 100)
	s := `123.E
`
	v := []lexer.Token{
		TestToken{lexer.ERROR, "123.E", 0, 1, 1},
	}
	go mylexer.Lexer(s, c)
	lexer.CheckTest(s, c, v, t)
}
示例#4
0
func Test_d1aefb1bfa9a074d43b1315f35c41bed_20130120120713(t *testing.T) {
	c := make(chan lexer.Token, 100)
	s := `'a'
`
	v := []lexer.Token{
		TestToken{lexer.ERROR, "'", 0, 1, 1},
	}
	go mylexer.Lexer(s, c)
	lexer.CheckTest(s, c, v, t)
}
示例#5
0
func Test_3aa87855bc07809d26729c80731b2bf5_20130120120713(t *testing.T) {
	c := make(chan lexer.Token, 100)
	s := `"ab
`
	v := []lexer.Token{
		TestToken{lexer.ERROR, "string error", 0, 1, 1},
	}
	go mylexer.Lexer(s, c)
	lexer.CheckTest(s, c, v, t)
}
示例#6
0
func Test_808479e2fef3695589dc6d22d906aac9_20130120120713(t *testing.T) {
	c := make(chan lexer.Token, 100)
	s := `123.E+
`
	v := []lexer.Token{
		TestToken{lexer.ERROR, "123.E+", 0, 1, 1},
	}
	go mylexer.Lexer(s, c)
	lexer.CheckTest(s, c, v, t)
}
示例#7
0
func Test_6e64b75f984ced67dac2a627d85e3867_20130120120713(t *testing.T) {
	c := make(chan lexer.Token, 100)
	s := `123.D+
`
	v := []lexer.Token{
		TestToken{lexer.ERROR, "123.D+", 0, 1, 1},
	}
	go mylexer.Lexer(s, c)
	lexer.CheckTest(s, c, v, t)
}
示例#8
0
func main() {
	var input string
	//fmt.Println("Length of Args: " , len(os.Args))
	/*      if len(os.Args) == 1 {
		input = ""
	}	else {
		input = os.Args[1]
	}
	fmt.Println("Actual arguments: ", input) */
	var output chan<- lexer.Token
	output = make(chan lexer.Token, 1000)
	//fmt.Println("Input: ", input)
	dat, err := ioutil.ReadFile("/home/wallace/test_strings.txt")
	if (err == nil) && (dat != nil) {
		fmt.Println("Try adding this functionality")
	}
	input = string(dat)
	fmt.Println("Value of input is ", input)
	mylexer.Lexer(input, output)
}
示例#9
0
func Test_de5303bd2e4cd113654e807f6361d0d0_20130122210446(t *testing.T) {
	c := make(chan lexer.Token, 100)
	s := `MODULE Sample1;

  IMPORT c := Console;

BEGIN
  c.String("Hello, World!"); c.Ln;
END Sample1.`

	v := []lexer.Token{
		TestToken{lexer.MODULE, "MODULE", 0, 1, 1},
		TestToken{lexer.IDENT, "Sample1", 0, 1, 8},
		TestToken{lexer.SEMICOLON, ";", 0, 1, 15},
		TestToken{lexer.IMPORT, "IMPORT", 0, 3, 3},
		TestToken{lexer.IDENT, "c", 0, 3, 10},
		TestToken{lexer.COLONEQUAL, ":=", 0, 3, 12},
		TestToken{lexer.IDENT, "Console", 0, 3, 15},
		TestToken{lexer.SEMICOLON, ";", 0, 3, 22},
		TestToken{lexer.BEGIN, "BEGIN", 0, 5, 1},
		TestToken{lexer.IDENT, "c", 0, 6, 3},
		TestToken{lexer.DOT, ".", 0, 6, 4},
		TestToken{lexer.IDENT, "String", 0, 6, 5},
		TestToken{lexer.LPAREN, "(", 0, 6, 11},
		TestToken{lexer.STRING, "\"Hello, World!\"", 0, 6, 12},
		TestToken{lexer.RPAREN, ")", 0, 6, 27},
		TestToken{lexer.SEMICOLON, ";", 0, 6, 28},
		TestToken{lexer.IDENT, "c", 0, 6, 30},
		TestToken{lexer.DOT, ".", 0, 6, 31},
		TestToken{lexer.IDENT, "Ln", 0, 6, 32},
		TestToken{lexer.SEMICOLON, ";", 0, 6, 34},
		TestToken{lexer.END, "END", 0, 7, 1},
		TestToken{lexer.IDENT, "Sample1", 0, 7, 5},
		TestToken{lexer.DOT, ".", 0, 7, 12},
		TestToken{lexer.EOF, "<EOF>", 0, 7, 13},
	}
	go mylexer.Lexer(s, c)
	lexer.CheckTest(s, c, v, t)
}
示例#10
0
func Test_b40ed0109d32f2a975a7f5260ca12947_20130122210446(t *testing.T) {
	c := make(chan lexer.Token, 100)
	s := `MODULE Sample2;

  IMPORT Memory, c := Console;

  VAR arr: ARRAY 20 OF CHAR;
    fixarr: POINTER TO ARRAY 20 OF CHAR;
    dynarr: POINTER TO ARRAY OF CHAR;

BEGIN
  arr := "Hello, World!";
  c.String(arr); c.Ln;

  NEW(fixarr);
  fixarr^ := "Hello, World!";
  c.String(fixarr^); c.Ln;
 
  NEW(dynarr, 20);
  dynarr^ := "Hello, World!";
  c.String(dynarr^); c.Ln;
END Sample2.`
	v := []lexer.Token{
		TestToken{lexer.MODULE, "MODULE", 0, 1, 1},
		TestToken{lexer.IDENT, "Sample2", 0, 1, 8},
		TestToken{lexer.SEMICOLON, ";", 0, 1, 15},
		TestToken{lexer.IMPORT, "IMPORT", 0, 3, 3},
		TestToken{lexer.IDENT, "Memory", 0, 3, 10},
		TestToken{lexer.COMMA, ",", 0, 3, 16},
		TestToken{lexer.IDENT, "c", 0, 3, 18},
		TestToken{lexer.COLONEQUAL, ":=", 0, 3, 20},
		TestToken{lexer.IDENT, "Console", 0, 3, 23},
		TestToken{lexer.SEMICOLON, ";", 0, 3, 30},
		TestToken{lexer.VAR, "VAR", 0, 5, 3},
		TestToken{lexer.IDENT, "arr", 0, 5, 7},
		TestToken{lexer.COLON, ":", 0, 5, 10},
		TestToken{lexer.ARRAY, "ARRAY", 0, 5, 12},
		TestToken{lexer.INTEGER, "20", 0, 5, 18},
		TestToken{lexer.OF, "OF", 0, 5, 21},
		TestToken{lexer.IDENT, "CHAR", 0, 5, 24},
		TestToken{lexer.SEMICOLON, ";", 0, 5, 28},
		TestToken{lexer.IDENT, "fixarr", 0, 6, 5},
		TestToken{lexer.COLON, ":", 0, 6, 11},
		TestToken{lexer.POINTER, "POINTER", 0, 6, 13},
		TestToken{lexer.TO, "TO", 0, 6, 21},
		TestToken{lexer.ARRAY, "ARRAY", 0, 6, 24},
		TestToken{lexer.INTEGER, "20", 0, 6, 30},
		TestToken{lexer.OF, "OF", 0, 6, 33},
		TestToken{lexer.IDENT, "CHAR", 0, 6, 36},
		TestToken{lexer.SEMICOLON, ";", 0, 6, 40},
		TestToken{lexer.IDENT, "dynarr", 0, 7, 5},
		TestToken{lexer.COLON, ":", 0, 7, 11},
		TestToken{lexer.POINTER, "POINTER", 0, 7, 13},
		TestToken{lexer.TO, "TO", 0, 7, 21},
		TestToken{lexer.ARRAY, "ARRAY", 0, 7, 24},
		TestToken{lexer.OF, "OF", 0, 7, 30},
		TestToken{lexer.IDENT, "CHAR", 0, 7, 33},
		TestToken{lexer.SEMICOLON, ";", 0, 7, 37},
		TestToken{lexer.BEGIN, "BEGIN", 0, 9, 1},
		TestToken{lexer.IDENT, "arr", 0, 10, 3},
		TestToken{lexer.COLONEQUAL, ":=", 0, 10, 7},
		TestToken{lexer.STRING, "\"Hello, World!\"", 0, 10, 10},
		TestToken{lexer.SEMICOLON, ";", 0, 10, 25},
		TestToken{lexer.IDENT, "c", 0, 11, 3},
		TestToken{lexer.DOT, ".", 0, 11, 4},
		TestToken{lexer.IDENT, "String", 0, 11, 5},
		TestToken{lexer.LPAREN, "(", 0, 11, 11},
		TestToken{lexer.IDENT, "arr", 0, 11, 12},
		TestToken{lexer.RPAREN, ")", 0, 11, 15},
		TestToken{lexer.SEMICOLON, ";", 0, 11, 16},
		TestToken{lexer.IDENT, "c", 0, 11, 18},
		TestToken{lexer.DOT, ".", 0, 11, 19},
		TestToken{lexer.IDENT, "Ln", 0, 11, 20},
		TestToken{lexer.SEMICOLON, ";", 0, 11, 22},
		TestToken{lexer.IDENT, "NEW", 0, 13, 3},
		TestToken{lexer.LPAREN, "(", 0, 13, 6},
		TestToken{lexer.IDENT, "fixarr", 0, 13, 7},
		TestToken{lexer.RPAREN, ")", 0, 13, 13},
		TestToken{lexer.SEMICOLON, ";", 0, 13, 14},
		TestToken{lexer.IDENT, "fixarr", 0, 14, 3},
		TestToken{lexer.CARAT, "^", 0, 14, 9},
		TestToken{lexer.COLONEQUAL, ":=", 0, 14, 11},
		TestToken{lexer.STRING, "\"Hello, World!\"", 0, 14, 14},
		TestToken{lexer.SEMICOLON, ";", 0, 14, 29},
		TestToken{lexer.IDENT, "c", 0, 15, 3},
		TestToken{lexer.DOT, ".", 0, 15, 4},
		TestToken{lexer.IDENT, "String", 0, 15, 5},
		TestToken{lexer.LPAREN, "(", 0, 15, 11},
		TestToken{lexer.IDENT, "fixarr", 0, 15, 12},
		TestToken{lexer.CARAT, "^", 0, 15, 18},
		TestToken{lexer.RPAREN, ")", 0, 15, 19},
		TestToken{lexer.SEMICOLON, ";", 0, 15, 20},
		TestToken{lexer.IDENT, "c", 0, 15, 22},
		TestToken{lexer.DOT, ".", 0, 15, 23},
		TestToken{lexer.IDENT, "Ln", 0, 15, 24},
		TestToken{lexer.SEMICOLON, ";", 0, 15, 26},
		TestToken{lexer.IDENT, "NEW", 0, 17, 3},
		TestToken{lexer.LPAREN, "(", 0, 17, 6},
		TestToken{lexer.IDENT, "dynarr", 0, 17, 7},
		TestToken{lexer.COMMA, ",", 0, 17, 13},
		TestToken{lexer.INTEGER, "20", 0, 17, 15},
		TestToken{lexer.RPAREN, ")", 0, 17, 17},
		TestToken{lexer.SEMICOLON, ";", 0, 17, 18},
		TestToken{lexer.IDENT, "dynarr", 0, 18, 3},
		TestToken{lexer.CARAT, "^", 0, 18, 9},
		TestToken{lexer.COLONEQUAL, ":=", 0, 18, 11},
		TestToken{lexer.STRING, "\"Hello, World!\"", 0, 18, 14},
		TestToken{lexer.SEMICOLON, ";", 0, 18, 29},
		TestToken{lexer.IDENT, "c", 0, 19, 3},
		TestToken{lexer.DOT, ".", 0, 19, 4},
		TestToken{lexer.IDENT, "String", 0, 19, 5},
		TestToken{lexer.LPAREN, "(", 0, 19, 11},
		TestToken{lexer.IDENT, "dynarr", 0, 19, 12},
		TestToken{lexer.CARAT, "^", 0, 19, 18},
		TestToken{lexer.RPAREN, ")", 0, 19, 19},
		TestToken{lexer.SEMICOLON, ";", 0, 19, 20},
		TestToken{lexer.IDENT, "c", 0, 19, 22},
		TestToken{lexer.DOT, ".", 0, 19, 23},
		TestToken{lexer.IDENT, "Ln", 0, 19, 24},
		TestToken{lexer.SEMICOLON, ";", 0, 19, 26},
		TestToken{lexer.END, "END", 0, 20, 1},
		TestToken{lexer.IDENT, "Sample2", 0, 20, 5},
		TestToken{lexer.DOT, ".", 0, 20, 12},
		TestToken{lexer.EOF, "<EOF>", 0, 20, 13},
	}
	go mylexer.Lexer(s, c)
	lexer.CheckTest(s, c, v, t)
}
示例#11
0
func Test_4e93b7421a260953d822d103467435c4_20130120105815(t *testing.T) {
	c := make(chan lexer.Token, 100)
	s := `123H
123ABH
123X
123ABX
123.E+456
123.D+456
123.E-456
123.D-456
123.E456
123.D456
123.89E+456
123.89D+456
123.89E-456
123.89D-456
123.89E456
123.89D456
123.9E+456
123.9D+456
123.9E-456
123.9D-456
123.9E456
123.9D456
123abH
123abh

3H
3ABH
3X
3ABX
3.E+5
3.D+5
3.E-5
3.D-5
3.E5
3.D5
3.89E+5
3.89D+5
3.89E-5
3.89D-5
3.89E5
3.89D5
3.9E+5
3.9D+5
3.9E-5
3.9D-5
3.9E5
3.9D5
3abH
3abh

123H
123ABH
123X
123ABX
123.E+5
123.D+5
123.E-5
123.D-5
123.E5
123.D5
123.89E+5
123.89D+5
123.89E-5
123.89D-5
123.89E5
123.89D5
123.9E+5
123.9D+5
123.9E-5
123.9D-5
123.9E5
123.9D5
123abH
123abh

3H
3ABH
3X
3ABX
3.E+5
3.D+5
3.E-5
3.D-5
3.E5
3.D5
3.89E+5
3.89D+5
3.89E-5
3.89D-5
3.89E5
3.89D5
3.9E+5
3.9D+5
3.9E-5
3.9D-5
3.9E5
3.9D5
3abH
3abh
`
	v := []lexer.Token{
		TestToken{lexer.INTEGER, "123H", 0, 1, 1},
		TestToken{lexer.INTEGER, "123ABH", 0, 2, 1},
		TestToken{lexer.STRING, "123X", 0, 3, 1},
		TestToken{lexer.STRING, "123ABX", 0, 4, 1},
		TestToken{lexer.REAL, "123.E+456", 0, 5, 1},
		TestToken{lexer.REAL, "123.D+456", 0, 6, 1},
		TestToken{lexer.REAL, "123.E-456", 0, 7, 1},
		TestToken{lexer.REAL, "123.D-456", 0, 8, 1},
		TestToken{lexer.REAL, "123.E456", 0, 9, 1},
		TestToken{lexer.REAL, "123.D456", 0, 10, 1},
		TestToken{lexer.REAL, "123.89E+456", 0, 11, 1},
		TestToken{lexer.REAL, "123.89D+456", 0, 12, 1},
		TestToken{lexer.REAL, "123.89E-456", 0, 13, 1},
		TestToken{lexer.REAL, "123.89D-456", 0, 14, 1},
		TestToken{lexer.REAL, "123.89E456", 0, 15, 1},
		TestToken{lexer.REAL, "123.89D456", 0, 16, 1},
		TestToken{lexer.REAL, "123.9E+456", 0, 17, 1},
		TestToken{lexer.REAL, "123.9D+456", 0, 18, 1},
		TestToken{lexer.REAL, "123.9E-456", 0, 19, 1},
		TestToken{lexer.REAL, "123.9D-456", 0, 20, 1},
		TestToken{lexer.REAL, "123.9E456", 0, 21, 1},
		TestToken{lexer.REAL, "123.9D456", 0, 22, 1},
		TestToken{lexer.INTEGER, "123", 0, 23, 1},
		TestToken{lexer.IDENT, "abH", 0, 23, 4},
		TestToken{lexer.INTEGER, "123", 0, 24, 1},
		TestToken{lexer.IDENT, "abh", 0, 24, 4},
		TestToken{lexer.INTEGER, "3H", 0, 26, 1},
		TestToken{lexer.INTEGER, "3ABH", 0, 27, 1},
		TestToken{lexer.STRING, "3X", 0, 28, 1},
		TestToken{lexer.STRING, "3ABX", 0, 29, 1},
		TestToken{lexer.REAL, "3.E+5", 0, 30, 1},
		TestToken{lexer.REAL, "3.D+5", 0, 31, 1},
		TestToken{lexer.REAL, "3.E-5", 0, 32, 1},
		TestToken{lexer.REAL, "3.D-5", 0, 33, 1},
		TestToken{lexer.REAL, "3.E5", 0, 34, 1},
		TestToken{lexer.REAL, "3.D5", 0, 35, 1},
		TestToken{lexer.REAL, "3.89E+5", 0, 36, 1},
		TestToken{lexer.REAL, "3.89D+5", 0, 37, 1},
		TestToken{lexer.REAL, "3.89E-5", 0, 38, 1},
		TestToken{lexer.REAL, "3.89D-5", 0, 39, 1},
		TestToken{lexer.REAL, "3.89E5", 0, 40, 1},
		TestToken{lexer.REAL, "3.89D5", 0, 41, 1},
		TestToken{lexer.REAL, "3.9E+5", 0, 42, 1},
		TestToken{lexer.REAL, "3.9D+5", 0, 43, 1},
		TestToken{lexer.REAL, "3.9E-5", 0, 44, 1},
		TestToken{lexer.REAL, "3.9D-5", 0, 45, 1},
		TestToken{lexer.REAL, "3.9E5", 0, 46, 1},
		TestToken{lexer.REAL, "3.9D5", 0, 47, 1},
		TestToken{lexer.INTEGER, "3", 0, 48, 1},
		TestToken{lexer.IDENT, "abH", 0, 48, 2},
		TestToken{lexer.INTEGER, "3", 0, 49, 1},
		TestToken{lexer.IDENT, "abh", 0, 49, 2},
		TestToken{lexer.INTEGER, "123H", 0, 51, 1},
		TestToken{lexer.INTEGER, "123ABH", 0, 52, 1},
		TestToken{lexer.STRING, "123X", 0, 53, 1},
		TestToken{lexer.STRING, "123ABX", 0, 54, 1},
		TestToken{lexer.REAL, "123.E+5", 0, 55, 1},
		TestToken{lexer.REAL, "123.D+5", 0, 56, 1},
		TestToken{lexer.REAL, "123.E-5", 0, 57, 1},
		TestToken{lexer.REAL, "123.D-5", 0, 58, 1},
		TestToken{lexer.REAL, "123.E5", 0, 59, 1},
		TestToken{lexer.REAL, "123.D5", 0, 60, 1},
		TestToken{lexer.REAL, "123.89E+5", 0, 61, 1},
		TestToken{lexer.REAL, "123.89D+5", 0, 62, 1},
		TestToken{lexer.REAL, "123.89E-5", 0, 63, 1},
		TestToken{lexer.REAL, "123.89D-5", 0, 64, 1},
		TestToken{lexer.REAL, "123.89E5", 0, 65, 1},
		TestToken{lexer.REAL, "123.89D5", 0, 66, 1},
		TestToken{lexer.REAL, "123.9E+5", 0, 67, 1},
		TestToken{lexer.REAL, "123.9D+5", 0, 68, 1},
		TestToken{lexer.REAL, "123.9E-5", 0, 69, 1},
		TestToken{lexer.REAL, "123.9D-5", 0, 70, 1},
		TestToken{lexer.REAL, "123.9E5", 0, 71, 1},
		TestToken{lexer.REAL, "123.9D5", 0, 72, 1},
		TestToken{lexer.INTEGER, "123", 0, 73, 1},
		TestToken{lexer.IDENT, "abH", 0, 73, 4},
		TestToken{lexer.INTEGER, "123", 0, 74, 1},
		TestToken{lexer.IDENT, "abh", 0, 74, 4},
		TestToken{lexer.INTEGER, "3H", 0, 76, 1},
		TestToken{lexer.INTEGER, "3ABH", 0, 77, 1},
		TestToken{lexer.STRING, "3X", 0, 78, 1},
		TestToken{lexer.STRING, "3ABX", 0, 79, 1},
		TestToken{lexer.REAL, "3.E+5", 0, 80, 1},
		TestToken{lexer.REAL, "3.D+5", 0, 81, 1},
		TestToken{lexer.REAL, "3.E-5", 0, 82, 1},
		TestToken{lexer.REAL, "3.D-5", 0, 83, 1},
		TestToken{lexer.REAL, "3.E5", 0, 84, 1},
		TestToken{lexer.REAL, "3.D5", 0, 85, 1},
		TestToken{lexer.REAL, "3.89E+5", 0, 86, 1},
		TestToken{lexer.REAL, "3.89D+5", 0, 87, 1},
		TestToken{lexer.REAL, "3.89E-5", 0, 88, 1},
		TestToken{lexer.REAL, "3.89D-5", 0, 89, 1},
		TestToken{lexer.REAL, "3.89E5", 0, 90, 1},
		TestToken{lexer.REAL, "3.89D5", 0, 91, 1},
		TestToken{lexer.REAL, "3.9E+5", 0, 92, 1},
		TestToken{lexer.REAL, "3.9D+5", 0, 93, 1},
		TestToken{lexer.REAL, "3.9E-5", 0, 94, 1},
		TestToken{lexer.REAL, "3.9D-5", 0, 95, 1},
		TestToken{lexer.REAL, "3.9E5", 0, 96, 1},
		TestToken{lexer.REAL, "3.9D5", 0, 97, 1},
		TestToken{lexer.INTEGER, "3", 0, 98, 1},
		TestToken{lexer.IDENT, "abH", 0, 98, 2},
		TestToken{lexer.INTEGER, "3", 0, 99, 1},
		TestToken{lexer.IDENT, "abh", 0, 99, 2},
		TestToken{lexer.EOF, "<EOF>", 0, 100, 1},
	}
	go mylexer.Lexer(s, c)
	lexer.CheckTest(s, c, v, t)
}