Beispiel #1
0
func testLexer(input string, t *testing.T) {
	l := new(json.Lexer)
	l.Init(input, PanicOnError)
	spacesRE := regexp.MustCompile(`^\s+$`)

	next := l.Next()
	offset := 0
	for next != json.EOI {
		s, e := l.Pos()
		if s > offset && !spacesRE.MatchString(input[offset:s]) {
			t.Errorf("Spaces expected: %s", input[offset:s])
		}
		offset = e
		token := string(input[s:e])
		switch next {
		case json.LBRACE, json.RBRACE, json.LBRACK, json.RBRACK,
			json.COLON, json.COMMA, json.NULL, json.TRUE, json.FALSE:
			if token != next.String() {
				t.Errorf("Bad token %v: %s", next, token)
			}
		case json.JSONSTRING:
			if !strings.HasPrefix(token, `"`) || !strings.HasSuffix(token, `"`) {
				t.Errorf("Bad string literal: %s", token)
			}
		}
		next = l.Next()
	}
}
Beispiel #2
0
func BenchmarkParser(b *testing.B) {
	l := new(json.Lexer)
	p := new(json.Parser)
	onError := func(line, offset, len int, msg string) {
		b.Errorf("%d, %d: %s", line, offset, msg)
	}

	p.Init(onError, func(t json.NodeType, offset, endoffset int) {})
	for i := 0; i < b.N; i++ {
		l.Init(jsonExample, onError)
		p.Parse(l)
	}
	b.SetBytes(int64(len(jsonExample)))
}
Beispiel #3
0
func BenchmarkLexer(b *testing.B) {
	l := new(json.Lexer)
	for i := 0; i < b.N; i++ {
		l.Init(jsonExample, PanicOnError)
		next := l.Next()
		for next != json.EOI {
			next = l.Next()
		}
	}
	b.SetBytes(int64(len(jsonExample)))
}
Beispiel #4
0
func TestParser(t *testing.T) {
	l := new(json.Lexer)
	p := new(json.Parser)

	seen := map[json.NodeType]bool{}
	for _, tc := range jsParseTests {
		seen[tc.nt] = true
		for _, input := range tc.inputs {
			test := pt.NewParserTest(tc.nt.String(), input, t)
			l.Init(test.Source(), test.Error)
			p.Init(test.Error, func(t json.NodeType, offset, endoffset int) {
				if t == tc.nt {
					test.Consume(offset, endoffset)
				}
			})
			test.Done(p.Parse(l))
		}
	}
	for n := json.NodeType(1); n < json.NodeTypeMax; n++ {
		if !seen[n] {
			t.Errorf("%v is not tested", n)
		}
	}
}