func TestStringScanner1(t *testing.T) { testData := ` "teststring" ` reader := strings.NewReader(testData) jsonScanner := scanner.NewJSONScanner(reader) scanner := tokenscanner.NewStringScanner(jsonScanner).(*tokenscanner.StringScanner) if !scanner.CanProcess() { t.Error("testData can process") } if scanner.RuneOfToken() != '"' { t.Error("StringScanner.RunOfToken must be '\"'") } if !scanner.CanProcessRune('"') { t.Error("StringScanner.CanProcessRune can process rune '\"'") } expectedToken := token.NewToken(token.KindString, "teststring") token, err := scanner.Scan() if err != nil { t.Error(err) } if !expectedToken.IsEqualTo(token) { t.Errorf("expected=%#v, actual=%#v", expectedToken, token) } }
func TestSingleCharacterScannerCanProcess(t *testing.T) { testCaseList := []testCaseForTestSingleCharacterScannerCanProcess{ {data: "{", tokenKind: token.KindBeginObject, expected: true}, {data: "}", tokenKind: token.KindEndObject, expected: true}, {data: "[", tokenKind: token.KindBeginArray, expected: true}, {data: "]", tokenKind: token.KindEndArray, expected: true}, {data: ",", tokenKind: token.KindComma, expected: true}, {data: ":", tokenKind: token.KindSeparator, expected: true}, {data: "", tokenKind: token.KindBeginObject, expected: false}, {data: "a", tokenKind: token.KindEndObject, expected: false}, {data: "}", tokenKind: token.KindBeginArray, expected: false}, {data: "{", tokenKind: token.KindEndArray, expected: false}, {data: "-", tokenKind: token.KindComma, expected: false}, {data: "=", tokenKind: token.KindSeparator, expected: false}, } for testIndex, testCase := range testCaseList { reader := strings.NewReader(testCase.data) jsonScanner := scanner.NewJSONScanner(reader) scanner := tokenscanner.NewSingleCharacterScanner(testCase.tokenKind, jsonScanner) actual := scanner.CanProcess() if actual != testCase.expected { t.Errorf("testCase[%d] expected=%#v, actual=%#v\n", testIndex, testCase.expected, actual) } } }
func TestSingleCharacterScannerMultiScanner(t *testing.T) { testData := ` { [ ], : } ` expectedString := `{[],:}` reader := strings.NewReader(testData) jsonScanner := scanner.NewJSONScanner(reader) scanners := []tokenscanner.TokenScanner{ tokenscanner.NewSingleCharacterScanner(token.KindBeginObject, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindEndObject, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindBeginArray, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindEndArray, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindSeparator, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindComma, jsonScanner), } var buffer bytes.Buffer for { _, err := jsonScanner.PeekNonWhiteSpaceRune() if err == io.EOF { break } scanned := false for scannerIndex, scanner := range scanners { if scanner.CanProcess() { scanned = true currentToken, err := scanner.Scan() buffer.WriteString(currentToken.Value) if err != nil { t.Log(scannerIndex) t.Fatal(err) } } } if !scanned { peek, err := jsonScanner.PeekNonWhiteSpaceRune() if err != nil { t.Log("err:", err) } else { t.Log("current rune:", string(peek)) } t.Fatal("can't scan") } } if buffer.String() != expectedString { t.Errorf("Scan result is wrong expected=%#v, actual=%#v\n", testData, buffer.String()) } }
func (tokenizer *Tokenizer) queueTokensTo(index int) error { for tokenizer.buffer.length() <= index { if _, err := tokenizer.scanner.PeekNonWhiteSpaceRune(); err != nil { return err } for _, scanner := range tokenizer.tokenScanners { if scanner.CanProcess() { nextToken, err := scanner.Scan() if err != nil { return err } tokenizer.buffer.enqueue(nextToken) break } } } return nil }
func TestNumber1(t *testing.T) { testData := ` -2.8e-10 ` reader := strings.NewReader(testData) jsonScanner := scanner.NewJSONScanner(reader) scanner := tokenscanner.NewNumberScanner(jsonScanner) if !scanner.CanProcess() { t.Error("token can't process") } scannedToken, err := scanner.Scan() if err != nil { t.Error(err) } if scannedToken.Kind != token.KindNumber { t.Error("token must be number") } if scannedToken.Value != "-2.8e-10" { t.Errorf("token value is wrong: %#v", scannedToken.Value) } }
func TestMultiCharacterScannerCanProcess(t *testing.T) { testCaseList := []testCaseForTestMultiCharacterScannerCanProcess{ {data: " null ", tokenKind: token.KindNull, expected: true}, {data: " true ", tokenKind: token.KindTrue, expected: true}, {data: "false ", tokenKind: token.KindFalse, expected: true}, {data: "xtrue ", tokenKind: token.KindFalse, expected: false}, {data: "alse ", tokenKind: token.KindTrue, expected: false}, {data: "false ", tokenKind: token.KindNull, expected: false}, } for testIndex, testCase := range testCaseList { stringReader := strings.NewReader(testCase.data) jsonScanner := scanner.NewJSONScanner(stringReader) scanner := tokenscanner.NewMultiCharacterScanner(testCase.tokenKind, jsonScanner).(*tokenscanner.MultiCharacterScanner) actual := scanner.CanProcess() expected := testCase.expected if actual != expected { t.Errorf("testCase[%d] expected=%#v, actual=%#v", testIndex, expected, actual) } } }
func TestCompositeCase(t *testing.T) { testData := ` { false: [true, null, { null: true }] } ` expectedString := "{false:[true,null,{null:true}]}" expectedTokens := []token.Token{ token.NewToken(token.KindBeginObject, string(token.KindBeginObject)), token.NewToken(token.KindFalse, string(token.KindFalse)), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindBeginArray, string(token.KindBeginArray)), token.NewToken(token.KindTrue, string(token.KindTrue)), token.NewToken(token.KindComma, string(token.KindComma)), token.NewToken(token.KindNull, string(token.KindNull)), token.NewToken(token.KindComma, string(token.KindComma)), token.NewToken(token.KindBeginObject, string(token.KindBeginObject)), token.NewToken(token.KindNull, string(token.KindNull)), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindTrue, string(token.KindTrue)), token.NewToken(token.KindEndObject, string(token.KindEndObject)), token.NewToken(token.KindEndArray, string(token.KindEndArray)), token.NewToken(token.KindEndObject, string(token.KindEndObject)), } reader := strings.NewReader(testData) jsonScanner := scanner.NewJSONScanner(reader) scanners := []tokenscanner.TokenScanner{ tokenscanner.NewSingleCharacterScanner(token.KindBeginObject, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindEndObject, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindBeginArray, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindEndArray, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindSeparator, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindComma, jsonScanner), tokenscanner.NewMultiCharacterScanner(token.KindNull, jsonScanner), tokenscanner.NewMultiCharacterScanner(token.KindTrue, jsonScanner), tokenscanner.NewMultiCharacterScanner(token.KindFalse, jsonScanner), } var buffer bytes.Buffer expectedTokenIndex := 0 for { _, err := jsonScanner.PeekNonWhiteSpaceRune() if err == io.EOF { break } for scannerIndex, scanner := range scanners { if scanner.CanProcess() { scannedToken, err := scanner.Scan() if err != nil { t.Log(scannerIndex) t.Error(err) } if !expectedTokens[expectedTokenIndex].IsEqualTo(scannedToken) { t.Errorf("expected=%#v, actual=%#v", expectedTokens[expectedTokenIndex], scannedToken) } expectedTokenIndex++ buffer.WriteString(scannedToken.Value) } } } if buffer.String() != expectedString { t.Errorf("expected=%#v, actual=%#v", expectedString, buffer.String()) } }
func TestTokenizer1(t *testing.T) { testData := ` { "key": [ { "inner1": "stringValue", "inner2": true, "inner3": false, "inner4": null, "inner5": { "nested": -2.5e-3 } } ] } ` reader := strings.NewReader(testData) jsonScanner := scanner.NewJSONScanner(reader) scanners := []tokenscanner.TokenScanner{ tokenscanner.NewSingleCharacterScanner(token.KindBeginObject, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindEndObject, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindBeginArray, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindEndArray, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindSeparator, jsonScanner), tokenscanner.NewSingleCharacterScanner(token.KindComma, jsonScanner), tokenscanner.NewMultiCharacterScanner(token.KindTrue, jsonScanner), tokenscanner.NewMultiCharacterScanner(token.KindFalse, jsonScanner), tokenscanner.NewMultiCharacterScanner(token.KindNull, jsonScanner), tokenscanner.NewStringScanner(jsonScanner), tokenscanner.NewNumberScanner(jsonScanner), } expectedTokens := []token.Token{ token.NewToken(token.KindBeginObject, string(token.KindBeginObject)), token.NewToken(token.KindString, "key"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindBeginArray, string(token.KindBeginArray)), token.NewToken(token.KindBeginObject, string(token.KindBeginObject)), token.NewToken(token.KindString, "inner1"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindString, "stringValue"), token.NewToken(token.KindComma, string(token.KindComma)), token.NewToken(token.KindString, "inner2"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindTrue, string(token.KindTrue)), token.NewToken(token.KindComma, string(token.KindComma)), token.NewToken(token.KindString, "inner3"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindFalse, string(token.KindFalse)), token.NewToken(token.KindComma, string(token.KindComma)), token.NewToken(token.KindString, "inner4"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindNull, string(token.KindNull)), token.NewToken(token.KindComma, string(token.KindComma)), token.NewToken(token.KindString, "inner5"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindBeginObject, string(token.KindBeginObject)), token.NewToken(token.KindString, "nested"), token.NewToken(token.KindSeparator, string(token.KindSeparator)), token.NewToken(token.KindNumber, "-2.5e-3"), token.NewToken(token.KindEndObject, string(token.KindEndObject)), token.NewToken(token.KindEndObject, string(token.KindEndObject)), token.NewToken(token.KindEndArray, string(token.KindEndArray)), token.NewToken(token.KindEndObject, string(token.KindEndObject)), } for { if _, err := jsonScanner.PeekNonWhiteSpaceRune(); err == io.EOF { break } for expectedIndex, expected := range expectedTokens { for scannerIndex, scanner := range scanners { if scanner.CanProcess() { actual, err := scanner.Scan() if err != nil { t.Logf("scanners[%d]: ", scannerIndex) t.Error(err) } if !expected.IsEqualTo(actual) { t.Errorf("expected[%d] = %#v, actual = %#v\n", expectedIndex, expected, actual) } break } } } } }