Exemple #1
0
func TestSingleCharacterScannerCanProcess(t *testing.T) {
	testCaseList := []testCaseForTestSingleCharacterScannerCanProcess{
		{data: "{", tokenKind: token.KindBeginObject, expected: true},
		{data: "}", tokenKind: token.KindEndObject, expected: true},
		{data: "[", tokenKind: token.KindBeginArray, expected: true},
		{data: "]", tokenKind: token.KindEndArray, expected: true},
		{data: ",", tokenKind: token.KindComma, expected: true},
		{data: ":", tokenKind: token.KindSeparator, expected: true},

		{data: "", tokenKind: token.KindBeginObject, expected: false},
		{data: "a", tokenKind: token.KindEndObject, expected: false},
		{data: "}", tokenKind: token.KindBeginArray, expected: false},
		{data: "{", tokenKind: token.KindEndArray, expected: false},
		{data: "-", tokenKind: token.KindComma, expected: false},
		{data: "=", tokenKind: token.KindSeparator, expected: false},
	}

	for testIndex, testCase := range testCaseList {
		reader := strings.NewReader(testCase.data)
		jsonScanner := scanner.NewJSONScanner(reader)
		scanner := tokenscanner.NewSingleCharacterScanner(testCase.tokenKind, jsonScanner)
		actual := scanner.CanProcess()
		if actual != testCase.expected {
			t.Errorf("testCase[%d] expected=%#v, actual=%#v\n", testIndex, testCase.expected, actual)
		}
	}
}
Exemple #2
0
func TestSingleCharacterScannerRuneOfToken(t *testing.T) {
	testCaseList := []testCaseForTestSingleCharacterScannerRuneOfToken{
		{data: "{", tokenKind: token.KindBeginObject, expected: '{'},
		{data: "}", tokenKind: token.KindEndObject, expected: '}'},
		{data: "[", tokenKind: token.KindBeginArray, expected: '['},
		{data: "]", tokenKind: token.KindEndArray, expected: ']'},
		{data: ",", tokenKind: token.KindComma, expected: ','},
		{data: ":", tokenKind: token.KindSeparator, expected: ':'},
	}
	for testIndex, testCase := range testCaseList {
		reader := strings.NewReader(testCase.data)
		jsonScanner := scanner.NewJSONScanner(reader)
		scanner := tokenscanner.NewSingleCharacterScanner(testCase.tokenKind, jsonScanner)
		actual := scanner.(*tokenscanner.SingleCharacterScanner).RuneOfToken()
		if actual != testCase.expected {
			t.Errorf("testCase[%d] expected=%#v, actual=%#v\n", testIndex, testCase.expected, actual)
		}
	}
}
Exemple #3
0
func TestSingleCharacterScannerMultiScanner(t *testing.T) {
	testData := `
	{
		[	],
		:
	}
	`
	expectedString := `{[],:}`
	reader := strings.NewReader(testData)
	jsonScanner := scanner.NewJSONScanner(reader)
	scanners := []tokenscanner.TokenScanner{
		tokenscanner.NewSingleCharacterScanner(token.KindBeginObject, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindEndObject, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindBeginArray, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindEndArray, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindSeparator, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindComma, jsonScanner),
	}
	var buffer bytes.Buffer
	for {
		_, err := jsonScanner.PeekNonWhiteSpaceRune()
		if err == io.EOF {
			break
		}
		scanned := false
		for scannerIndex, scanner := range scanners {
			if scanner.CanProcess() {
				scanned = true
				currentToken, err := scanner.Scan()
				buffer.WriteString(currentToken.Value)
				if err != nil {
					t.Log(scannerIndex)
					t.Fatal(err)
				}
			}
		}
		if !scanned {
			peek, err := jsonScanner.PeekNonWhiteSpaceRune()
			if err != nil {
				t.Log("err:", err)
			} else {
				t.Log("current rune:", string(peek))
			}
			t.Fatal("can't scan")
		}
	}
	if buffer.String() != expectedString {
		t.Errorf("Scan result is wrong expected=%#v, actual=%#v\n", testData, buffer.String())
	}
}
Exemple #4
0
func createJSONScanners(scanner scanner.Scanner) []tokenscanner.TokenScanner {
	return []tokenscanner.TokenScanner{
		tokenscanner.NewSingleCharacterScanner(token.KindBeginObject, scanner),
		tokenscanner.NewSingleCharacterScanner(token.KindEndObject, scanner),
		tokenscanner.NewSingleCharacterScanner(token.KindBeginArray, scanner),
		tokenscanner.NewSingleCharacterScanner(token.KindEndArray, scanner),
		tokenscanner.NewSingleCharacterScanner(token.KindSeparator, scanner),
		tokenscanner.NewSingleCharacterScanner(token.KindComma, scanner),

		tokenscanner.NewMultiCharacterScanner(token.KindTrue, scanner),
		tokenscanner.NewMultiCharacterScanner(token.KindFalse, scanner),
		tokenscanner.NewMultiCharacterScanner(token.KindNull, scanner),

		tokenscanner.NewStringScanner(scanner),
		tokenscanner.NewNumberScanner(scanner),
	}

}
Exemple #5
0
func TestSingleCharacterScannerScan(t *testing.T) {
	testCaseList := []testCaseForTestSingleCharacterScannerScan{
		{data: "{", tokenKind: token.KindBeginObject, expected: token.NewToken(token.KindBeginObject, string(token.KindBeginObject))},
		{data: "}", tokenKind: token.KindEndObject, expected: token.NewToken(token.KindEndObject, string(token.KindEndObject))},
		{data: "[", tokenKind: token.KindBeginArray, expected: token.NewToken(token.KindBeginArray, string(token.KindBeginArray))},
		{data: "]", tokenKind: token.KindEndArray, expected: token.NewToken(token.KindEndArray, string(token.KindEndArray))},
		{data: ",", tokenKind: token.KindComma, expected: token.NewToken(token.KindComma, string(token.KindComma))},
		{data: ":", tokenKind: token.KindSeparator, expected: token.NewToken(token.KindSeparator, string(token.KindSeparator))},
	}

	for testIndex, testCase := range testCaseList {
		reader := strings.NewReader(testCase.data)
		jsonScanner := scanner.NewJSONScanner(reader)
		scanner := tokenscanner.NewSingleCharacterScanner(testCase.tokenKind, jsonScanner)
		actual, err := scanner.Scan()
		if err != nil {
			t.Logf("testCase[%d]", testIndex)
			t.Error(err)
		}
		if actual != testCase.expected {
			t.Errorf("testCase[%d] expected=%#v, actual=%#v\n", testIndex, testCase.expected, actual)
		}
	}
}
Exemple #6
0
func TestCompositeCase(t *testing.T) {
	testData := `
	{
		false: [true, null,
		{
			null: true
		}]
	}
	`
	expectedString := "{false:[true,null,{null:true}]}"
	expectedTokens := []token.Token{
		token.NewToken(token.KindBeginObject, string(token.KindBeginObject)),
		token.NewToken(token.KindFalse, string(token.KindFalse)),
		token.NewToken(token.KindSeparator, string(token.KindSeparator)),
		token.NewToken(token.KindBeginArray, string(token.KindBeginArray)),
		token.NewToken(token.KindTrue, string(token.KindTrue)),
		token.NewToken(token.KindComma, string(token.KindComma)),
		token.NewToken(token.KindNull, string(token.KindNull)),
		token.NewToken(token.KindComma, string(token.KindComma)),
		token.NewToken(token.KindBeginObject, string(token.KindBeginObject)),
		token.NewToken(token.KindNull, string(token.KindNull)),
		token.NewToken(token.KindSeparator, string(token.KindSeparator)),
		token.NewToken(token.KindTrue, string(token.KindTrue)),
		token.NewToken(token.KindEndObject, string(token.KindEndObject)),
		token.NewToken(token.KindEndArray, string(token.KindEndArray)),
		token.NewToken(token.KindEndObject, string(token.KindEndObject)),
	}

	reader := strings.NewReader(testData)
	jsonScanner := scanner.NewJSONScanner(reader)
	scanners := []tokenscanner.TokenScanner{
		tokenscanner.NewSingleCharacterScanner(token.KindBeginObject, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindEndObject, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindBeginArray, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindEndArray, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindSeparator, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindComma, jsonScanner),
		tokenscanner.NewMultiCharacterScanner(token.KindNull, jsonScanner),
		tokenscanner.NewMultiCharacterScanner(token.KindTrue, jsonScanner),
		tokenscanner.NewMultiCharacterScanner(token.KindFalse, jsonScanner),
	}
	var buffer bytes.Buffer
	expectedTokenIndex := 0
	for {
		_, err := jsonScanner.PeekNonWhiteSpaceRune()
		if err == io.EOF {
			break
		}
		for scannerIndex, scanner := range scanners {
			if scanner.CanProcess() {
				scannedToken, err := scanner.Scan()
				if err != nil {
					t.Log(scannerIndex)
					t.Error(err)
				}
				if !expectedTokens[expectedTokenIndex].IsEqualTo(scannedToken) {
					t.Errorf("expected=%#v, actual=%#v", expectedTokens[expectedTokenIndex], scannedToken)
				}
				expectedTokenIndex++
				buffer.WriteString(scannedToken.Value)
			}
		}
	}
	if buffer.String() != expectedString {
		t.Errorf("expected=%#v, actual=%#v", expectedString, buffer.String())
	}
}
Exemple #7
0
func TestTokenizer1(t *testing.T) {
	testData := `
	{
		"key": [
			{
				"inner1": "stringValue",
				"inner2": true,
				"inner3": false,
				"inner4": null,
				"inner5": {
					"nested": -2.5e-3
				}
			}
		]
	}
	`
	reader := strings.NewReader(testData)
	jsonScanner := scanner.NewJSONScanner(reader)

	scanners := []tokenscanner.TokenScanner{
		tokenscanner.NewSingleCharacterScanner(token.KindBeginObject, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindEndObject, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindBeginArray, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindEndArray, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindSeparator, jsonScanner),
		tokenscanner.NewSingleCharacterScanner(token.KindComma, jsonScanner),

		tokenscanner.NewMultiCharacterScanner(token.KindTrue, jsonScanner),
		tokenscanner.NewMultiCharacterScanner(token.KindFalse, jsonScanner),
		tokenscanner.NewMultiCharacterScanner(token.KindNull, jsonScanner),

		tokenscanner.NewStringScanner(jsonScanner),
		tokenscanner.NewNumberScanner(jsonScanner),
	}

	expectedTokens := []token.Token{
		token.NewToken(token.KindBeginObject, string(token.KindBeginObject)),
		token.NewToken(token.KindString, "key"),
		token.NewToken(token.KindSeparator, string(token.KindSeparator)),
		token.NewToken(token.KindBeginArray, string(token.KindBeginArray)),
		token.NewToken(token.KindBeginObject, string(token.KindBeginObject)),

		token.NewToken(token.KindString, "inner1"),
		token.NewToken(token.KindSeparator, string(token.KindSeparator)),
		token.NewToken(token.KindString, "stringValue"),
		token.NewToken(token.KindComma, string(token.KindComma)),

		token.NewToken(token.KindString, "inner2"),
		token.NewToken(token.KindSeparator, string(token.KindSeparator)),
		token.NewToken(token.KindTrue, string(token.KindTrue)),
		token.NewToken(token.KindComma, string(token.KindComma)),

		token.NewToken(token.KindString, "inner3"),
		token.NewToken(token.KindSeparator, string(token.KindSeparator)),
		token.NewToken(token.KindFalse, string(token.KindFalse)),
		token.NewToken(token.KindComma, string(token.KindComma)),

		token.NewToken(token.KindString, "inner4"),
		token.NewToken(token.KindSeparator, string(token.KindSeparator)),
		token.NewToken(token.KindNull, string(token.KindNull)),
		token.NewToken(token.KindComma, string(token.KindComma)),

		token.NewToken(token.KindString, "inner5"),
		token.NewToken(token.KindSeparator, string(token.KindSeparator)),

		token.NewToken(token.KindBeginObject, string(token.KindBeginObject)),

		token.NewToken(token.KindString, "nested"),
		token.NewToken(token.KindSeparator, string(token.KindSeparator)),
		token.NewToken(token.KindNumber, "-2.5e-3"),

		token.NewToken(token.KindEndObject, string(token.KindEndObject)),

		token.NewToken(token.KindEndObject, string(token.KindEndObject)),
		token.NewToken(token.KindEndArray, string(token.KindEndArray)),
		token.NewToken(token.KindEndObject, string(token.KindEndObject)),
	}

	for {
		if _, err := jsonScanner.PeekNonWhiteSpaceRune(); err == io.EOF {
			break
		}
		for expectedIndex, expected := range expectedTokens {
			for scannerIndex, scanner := range scanners {
				if scanner.CanProcess() {
					actual, err := scanner.Scan()
					if err != nil {
						t.Logf("scanners[%d]: ", scannerIndex)
						t.Error(err)
					}
					if !expected.IsEqualTo(actual) {
						t.Errorf("expected[%d] = %#v, actual = %#v\n", expectedIndex, expected, actual)
					}
					break
				}
			}
		}
	}
}