func TestStringScanner1(t *testing.T) { testData := ` "teststring" ` reader := strings.NewReader(testData) jsonScanner := scanner.NewJSONScanner(reader) scanner := tokenscanner.NewStringScanner(jsonScanner).(*tokenscanner.StringScanner) if !scanner.CanProcess() { t.Error("testData can process") } if scanner.RuneOfToken() != '"' { t.Error("StringScanner.RunOfToken must be '\"'") } if !scanner.CanProcessRune('"') { t.Error("StringScanner.CanProcessRune can process rune '\"'") } expectedToken := token.NewToken(token.KindString, "teststring") token, err := scanner.Scan() if err != nil { t.Error(err) } if !expectedToken.IsEqualTo(token) { t.Errorf("expected=%#v, actual=%#v", expectedToken, token) } }
func TestMultiCharacterScannerScan(t *testing.T) { testCaseList := []testCaseForTestMultiCharacterScannerScan{ {data: " null ", tokenKind: token.KindNull, expectedError: false, expected: token.NewToken(token.KindNull, string(token.KindNull))}, {data: " true ", tokenKind: token.KindTrue, expectedError: false, expected: token.NewToken(token.KindTrue, string(token.KindTrue))}, {data: "false ", tokenKind: token.KindFalse, expectedError: false, expected: token.NewToken(token.KindFalse, string(token.KindFalse))}, {data: "f alse ", tokenKind: token.KindFalse, expectedError: true, expected: token.NewToken(token.KindEmpty, "")}, {data: "tru e", tokenKind: token.KindTrue, expectedError: true, expected: token.NewToken(token.KindEmpty, "")}, {data: "xnull", tokenKind: token.KindNull, expectedError: true, expected: token.NewToken(token.KindEmpty, "")}, } for testIndex, testCase := range testCaseList { stringReader := strings.NewReader(testCase.data) jsonScanner := scanner.NewJSONScanner(stringReader) scanner := tokenscanner.NewMultiCharacterScanner(testCase.tokenKind, jsonScanner).(*tokenscanner.MultiCharacterScanner) actual, err := scanner.Scan() if err == nil && testCase.expectedError { t.Errorf("testCase[%d] must return error", testIndex) } if err != nil && !testCase.expectedError { t.Error(err) } expected := testCase.expected if !actual.IsEqualTo(expected) { t.Errorf("testCase[%d] expected=%#v, actual=%#v", testIndex, expected, actual) } } }
func TestSingleCharacterScannerMultiScanner(t *testing.T) { testData := ` { [ ], : } ` expectedString := `{[],:}` reader := strings.NewReader(testData) jsonScanner := scanner.NewJSONScanner(reader) scanners := []tokenscanner.TokenScanner{ tokenscanner.NewSingleCharacterScanner(token.KindBeginObject, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindEndObject, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindBeginArray, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindEndArray, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindSeparator, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindComma, jsonScanner), } var buffer bytes.Buffer for { _, err := jsonScanner.PeekNonWhiteSpaceRune() if err == io.EOF { break } scanned := false for scannerIndex, scanner := range scanners { if scanner.CanProcess() { scanned = true currentToken, err := scanner.Scan() buffer.WriteString(currentToken.Value) if err != nil { t.Log(scannerIndex) t.Fatal(err) } } } if !scanned { peek, err := jsonScanner.PeekNonWhiteSpaceRune() if err != nil { t.Log("err:", err) } else { t.Log("current rune:", string(peek)) } t.Fatal("can't scan") } } if buffer.String() != expectedString { t.Errorf("Scan result is wrong expected=%#v, actual=%#v\n", testData, buffer.String()) } }
func (tokenizer *Tokenizer) queueTokensTo(index int) error { for tokenizer.buffer.length() <= index { if _, err := tokenizer.scanner.PeekNonWhiteSpaceRune(); err != nil { return err } for _, scanner := range tokenizer.tokenScanners { if scanner.CanProcess() { nextToken, err := scanner.Scan() if err != nil { return err } tokenizer.buffer.enqueue(nextToken) break } } } return nil }
func TestNumber1(t *testing.T) { testData := ` -2.8e-10 ` reader := strings.NewReader(testData) jsonScanner := scanner.NewJSONScanner(reader) scanner := tokenscanner.NewNumberScanner(jsonScanner) if !scanner.CanProcess() { t.Error("token can't process") } scannedToken, err := scanner.Scan() if err != nil { t.Error(err) } if scannedToken.Kind != token.KindNumber { t.Error("token must be number") } if scannedToken.Value != "-2.8e-10" { t.Errorf("token value is wrong: %#v", scannedToken.Value) } }
func TestStringScannerScan(t *testing.T) { testCaseList := []testCaseForTestStringScannerScan{ { data: ` "abcdefg" `, expected: token.NewToken(token.KindString, "abcdefg"), }, { data: ` "\t\n\r\b\f\\\/" `, expected: token.NewToken(token.KindString, ` `+` `+"\r\b\f\\/"), }, { data: ` "\u65E5\u672C\u8A9E\uD867\uDE3D" `, expected: token.NewToken(token.KindString, "日本語𩸽"), }, } for testIndex, testCase := range testCaseList { reader := strings.NewReader(testCase.data) jsonScanner := scanner.NewJSONScanner(reader) scanner := tokenscanner.NewStringScanner(jsonScanner).(*tokenscanner.StringScanner) actual, err := scanner.Scan() if err != nil { t.Error(err) } if !testCase.expected.IsEqualTo(actual) { t.Errorf("testCase[%d] expected=%#v, actual=%#v", testIndex, testCase.expected, actual) } } }
func TestSingleCharacterScannerScan(t *testing.T) { testCaseList := []testCaseForTestSingleCharacterScannerScan{ {data: "{", tokenKind: token.KindBeginObject, expected: token.NewToken(token.KindBeginObject, string(token.KindBeginObject))}, {data: "}", tokenKind: token.KindEndObject, expected: token.NewToken(token.KindEndObject, string(token.KindEndObject))}, {data: "[", tokenKind: token.KindBeginArray, expected: token.NewToken(token.KindBeginArray, string(token.KindBeginArray))}, {data: "]", tokenKind: token.KindEndArray, expected: token.NewToken(token.KindEndArray, string(token.KindEndArray))}, {data: ",", tokenKind: token.KindComma, expected: token.NewToken(token.KindComma, string(token.KindComma))}, {data: ":", tokenKind: token.KindSeparator, expected: token.NewToken(token.KindSeparator, string(token.KindSeparator))}, } for testIndex, testCase := range testCaseList { reader := strings.NewReader(testCase.data) jsonScanner := scanner.NewJSONScanner(reader) scanner := tokenscanner.NewSingleCharacterScanner(testCase.tokenKind, jsonScanner) actual, err := scanner.Scan() if err != nil { t.Logf("testCase[%d]", testIndex) t.Error(err) } if actual != testCase.expected { t.Errorf("testCase[%d] expected=%#v, actual=%#v\n", testIndex, testCase.expected, actual) } } }
func TestCompositeCase(t *testing.T) { testData := ` { false: [true, null, { null: true }] } ` expectedString := "{false:[true,null,{null:true}]}" expectedTokens := []token.Token{ token.NewToken(token.KindBeginObject, string(token.KindBeginObject)), token.NewToken(token.KindFalse, string(token.KindFalse)), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindBeginArray, string(token.KindBeginArray)), token.NewToken(token.KindTrue, string(token.KindTrue)), token.NewToken(token.KindComma, string(token.KindComma)), token.NewToken(token.KindNull, string(token.KindNull)), token.NewToken(token.KindComma, string(token.KindComma)), token.NewToken(token.KindBeginObject, string(token.KindBeginObject)), token.NewToken(token.KindNull, string(token.KindNull)), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindTrue, string(token.KindTrue)), token.NewToken(token.KindEndObject, string(token.KindEndObject)), token.NewToken(token.KindEndArray, string(token.KindEndArray)), token.NewToken(token.KindEndObject, string(token.KindEndObject)), } reader := strings.NewReader(testData) jsonScanner := scanner.NewJSONScanner(reader) scanners := []tokenscanner.TokenScanner{ tokenscanner.NewSingleCharacterScanner(token.KindBeginObject, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindEndObject, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindBeginArray, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindEndArray, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindSeparator, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindComma, jsonScanner), tokenscanner.NewMultiCharacterScanner(token.KindNull, jsonScanner), tokenscanner.NewMultiCharacterScanner(token.KindTrue, jsonScanner), tokenscanner.NewMultiCharacterScanner(token.KindFalse, jsonScanner), } var buffer bytes.Buffer expectedTokenIndex := 0 for { _, err := jsonScanner.PeekNonWhiteSpaceRune() if err == io.EOF { break } for scannerIndex, scanner := range scanners { if scanner.CanProcess() { scannedToken, err := scanner.Scan() if err != nil { t.Log(scannerIndex) t.Error(err) } if !expectedTokens[expectedTokenIndex].IsEqualTo(scannedToken) { t.Errorf("expected=%#v, actual=%#v", expectedTokens[expectedTokenIndex], scannedToken) } expectedTokenIndex++ buffer.WriteString(scannedToken.Value) } } } if buffer.String() != expectedString { t.Errorf("expected=%#v, actual=%#v", expectedString, buffer.String()) } }
func TestTokenizer1(t *testing.T) { testData := ` { "key": [ { "inner1": "stringValue", "inner2": true, "inner3": false, "inner4": null, "inner5": { "nested": -2.5e-3 } } ] } ` reader := strings.NewReader(testData) jsonScanner := scanner.NewJSONScanner(reader) scanners := []tokenscanner.TokenScanner{ tokenscanner.NewSingleCharacterScanner(token.KindBeginObject, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindEndObject, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindBeginArray, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindEndArray, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindSeparator, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindComma, jsonScanner), tokenscanner.NewMultiCharacterScanner(token.KindTrue, jsonScanner), tokenscanner.NewMultiCharacterScanner(token.KindFalse, jsonScanner), tokenscanner.NewMultiCharacterScanner(token.KindNull, jsonScanner), tokenscanner.NewStringScanner(jsonScanner), tokenscanner.NewNumberScanner(jsonScanner), } expectedTokens := []token.Token{ token.NewToken(token.KindBeginObject, string(token.KindBeginObject)), token.NewToken(token.KindString, "key"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindBeginArray, string(token.KindBeginArray)), token.NewToken(token.KindBeginObject, string(token.KindBeginObject)), token.NewToken(token.KindString, "inner1"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindString, "stringValue"), token.NewToken(token.KindComma, string(token.KindComma)), token.NewToken(token.KindString, "inner2"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindTrue, string(token.KindTrue)), token.NewToken(token.KindComma, string(token.KindComma)), token.NewToken(token.KindString, "inner3"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindFalse, string(token.KindFalse)), token.NewToken(token.KindComma, string(token.KindComma)), token.NewToken(token.KindString, "inner4"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindNull, string(token.KindNull)), token.NewToken(token.KindComma, string(token.KindComma)), token.NewToken(token.KindString, "inner5"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindBeginObject, string(token.KindBeginObject)), token.NewToken(token.KindString, "nested"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindNumber, "-2.5e-3"), token.NewToken(token.KindEndObject, string(token.KindEndObject)), token.NewToken(token.KindEndObject, string(token.KindEndObject)), token.NewToken(token.KindEndArray, string(token.KindEndArray)), token.NewToken(token.KindEndObject, string(token.KindEndObject)), } for { if _, err := jsonScanner.PeekNonWhiteSpaceRune(); err == io.EOF { break } for expectedIndex, expected := range expectedTokens { for scannerIndex, scanner := range scanners { if scanner.CanProcess() { actual, err := scanner.Scan() if err != nil { t.Logf("scanners[%d]: ", scannerIndex) t.Error(err) } if !expected.IsEqualTo(actual) { t.Errorf("expected[%d] = %#v, actual = %#v\n", expectedIndex, expected, actual) } break } } } } }