Skip to content

Commit 0343e4c

Browse files
committed
ADD more test case for better test coverage.
1 parent 277fc94 commit 0343e4c

File tree

2 files changed

+86
-99
lines changed

2 files changed

+86
-99
lines changed

lexer.go

Lines changed: 8 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -82,11 +82,6 @@ func (lexer *Lexer) skipJSONSegment(n int) {
8282
lexer.JSONSegment = lexer.JSONSegment[n:]
8383
}
8484

85-
// push escape character `\` into JSON content
86-
func (lexer *Lexer) pushEscapeCharacterIntoJSONContent() {
87-
lexer.JSONContent.WriteByte(TOKEN_ESCAPE_CHARACTER_SYMBOL)
88-
}
89-
9085
// push negative symbol `-` into JSON content
9186
func (lexer *Lexer) pushNegativeIntoJSONContent() {
9287
lexer.JSONContent.WriteByte(TOKEN_NEGATIVE_SYMBOL)
@@ -280,15 +275,6 @@ func (lexer *Lexer) appendString(str string) error {
280275
lexer.cleanPaddingContent()
281276
}
282277

283-
// double escape character `\`, `\`
284-
if lexer.streamStoppedWithLeadingEscapeCharacter() {
285-
lexer.pushEscapeCharacterIntoJSONContent()
286-
lexer.JSONContent.WriteByte(tokenSymbol)
287-
// pop `\` from stack
288-
lexer.popTokenStack()
289-
continue
290-
}
291-
292278
// write current token symbol to JSON content
293279
lexer.JSONContent.WriteByte(tokenSymbol)
294280

@@ -536,12 +522,6 @@ func (lexer *Lexer) appendString(str string) error {
536522
continue
537523
}
538524

539-
// check if json stream stopped with padding content
540-
if lexer.havePaddingContent() {
541-
lexer.appendPaddingContentToJSONContent()
542-
lexer.cleanPaddingContent()
543-
}
544-
545525
// write current token symbol to JSON content
546526
lexer.JSONContent.WriteByte(tokenSymbol)
547527

@@ -641,7 +621,7 @@ func (lexer *Lexer) appendString(str string) error {
641621
continue
642622
}
643623

644-
// check if json stream stopped with padding content
624+
// check if json stream stopped with padding content, like case `[true , f`
645625
if lexer.havePaddingContent() {
646626
lexer.appendPaddingContentToJSONContent()
647627
lexer.cleanPaddingContent()
@@ -730,8 +710,8 @@ func (lexer *Lexer) appendString(str string) error {
730710
}
731711
lexer.pushTokenStack(token)
732712
lexer.popMirrorTokenStack()
733-
case TOKEN_ALPHABET_LOWERCASE_N:
734713

714+
case TOKEN_ALPHABET_LOWERCASE_N:
735715
// \n escape `\`, `n`
736716
if lexer.streamStoppedWithLeadingEscapeCharacter() {
737717
// push padding escape character `\` into JSON content
@@ -746,7 +726,7 @@ func (lexer *Lexer) appendString(str string) error {
746726
continue
747727
}
748728

749-
// check if json stream stopped with padding content
729+
// check if json stream stopped with padding content, like case `[true , n`
750730
if lexer.havePaddingContent() {
751731
lexer.appendPaddingContentToJSONContent()
752732
lexer.cleanPaddingContent()
@@ -771,8 +751,8 @@ func (lexer *Lexer) appendString(str string) error {
771751
// in object, pop `n`
772752
lexer.popMirrorTokenStack()
773753
}
774-
case TOKEN_ALPHABET_LOWERCASE_R:
775754

755+
case TOKEN_ALPHABET_LOWERCASE_R:
776756
// \r escape `\`, `r`
777757
if lexer.streamStoppedWithLeadingEscapeCharacter() {
778758
// push padding escape character `\` into JSON content
@@ -812,8 +792,8 @@ func (lexer *Lexer) appendString(str string) error {
812792
}
813793
lexer.pushTokenStack(token)
814794
lexer.popMirrorTokenStack()
815-
case TOKEN_ALPHABET_LOWERCASE_S:
816795

796+
case TOKEN_ALPHABET_LOWERCASE_S:
817797
// write current token symbol to JSON content
818798
lexer.JSONContent.WriteByte(tokenSymbol)
819799

@@ -856,7 +836,7 @@ func (lexer *Lexer) appendString(str string) error {
856836
continue
857837
}
858838

859-
// check if json stream stopped with padding content
839+
// check if json stream stopped with padding content, like case `[true , t`
860840
if lexer.havePaddingContent() {
861841
lexer.appendPaddingContentToJSONContent()
862842
lexer.cleanPaddingContent()
@@ -1034,7 +1014,7 @@ func (lexer *Lexer) appendString(str string) error {
10341014
continue
10351015
}
10361016

1037-
// check if json stream stopped with padding content
1017+
// check if json stream stopped with padding content, like `[1 , 1`
10381018
if lexer.havePaddingContent() {
10391019
lexer.appendPaddingContentToJSONContent()
10401020
lexer.cleanPaddingContent()
@@ -1143,7 +1123,7 @@ func (lexer *Lexer) appendString(str string) error {
11431123
continue
11441124
}
11451125

1146-
// check if json stream stopped with padding content
1126+
// check if json stream stopped with padding content, like `[1 , -`
11471127
if lexer.havePaddingContent() {
11481128
lexer.appendPaddingContentToJSONContent()
11491129
lexer.cleanPaddingContent()

lexer_test.go

Lines changed: 78 additions & 71 deletions
Original file line numberDiff line numberDiff line change
@@ -11,77 +11,84 @@ import (
1111
func TestCompleteJSON_base(t *testing.T) {
1212
streamingJSONCase := map[string]string{
1313
// test case: basic object properity
14-
`{`: `{}`, // mirror stack: [], should remove from stack: [], should push into mirror stack: [`}`]
15-
`{}`: `{}`, // mirror stack: [], should remove from stack: [], should push into mirror stack: []
16-
`{"`: `{"":null}`, // mirror stack: [`}`], should remove from stack: [], should push into mirror stack: [`"`, `:`, `n`, `u`, `l`, `l`]
17-
`{""`: `{"":null}`, // mirror stack: [`"`, `:`, `n`, `u`, `l`, `l`,`}`], should remove from stack: [`"`], should push into mirror stack: []
18-
`{"a`: `{"a":null}`,
19-
`{"a"`: `{"a":null}`,
20-
`{"a":`: `{"a":null}`,
21-
`{"a":n`: `{"a":null}`,
22-
`{"a":nu`: `{"a":null}`,
23-
`{"a":nul`: `{"a":null}`,
24-
`{"a":null`: `{"a":null}`,
25-
`{"a":null,`: `{"a":null}`,
26-
`{"a":t`: `{"a":true}`,
27-
`{"a":tr`: `{"a":true}`,
28-
`{"a":tru`: `{"a":true}`,
29-
`{"a":true`: `{"a":true}`,
30-
`{"a":true,`: `{"a":true}`,
31-
`{"a":f`: `{"a":false}`,
32-
`{"a":fa`: `{"a":false}`,
33-
`{"a":fal`: `{"a":false}`,
34-
`{"a":fals`: `{"a":false}`,
35-
`{"a":false`: `{"a":false}`,
36-
`{"a":false,`: `{"a":false}`,
37-
`{"a":-`: `{"a":0}`,
38-
`{"a":12`: `{"a":12}`,
39-
`{"a":-0`: `{"a":-0}`, // @TODO: should be 0, not -0
40-
`{"a":-12`: `{"a":-12}`,
41-
`{"a":12,`: `{"a":12}`,
42-
`{"a":12.`: `{"a":12.0}`,
43-
`{"a":12.15`: `{"a":12.15}`,
44-
`{"a":12.15,`: `{"a":12.15}`,
45-
`{"a":-12.15,`: `{"a":-12.15}`,
46-
`{"a":-1.215e,`: `{"a":-1.215}`,
47-
`{"a":-1.215E,`: `{"a":-1.215}`,
48-
`{"a":-1.215e1,`: `{"a":-1.215e1}`,
49-
`{"a":-1.215e-1,`: `{"a":-1.215e-1}`,
50-
`{"a":-1.215e+1,`: `{"a":-1.215e+1}`,
51-
`{"a":-1.215E1,`: `{"a":-1.215E1}`,
52-
`{"a":-1.215E-1,`: `{"a":-1.215E-1}`,
53-
`{"a":-1.215E+1,`: `{"a":-1.215E+1}`,
54-
`{"a":-1.215e12`: `{"a":-1.215e12}`,
55-
`{"a":-1.215E12`: `{"a":-1.215E12}`,
56-
`{"a":-1.215e12,`: `{"a":-1.215e12}`,
57-
`{"a":-1.215E12,`: `{"a":-1.215E12}`,
58-
`{"a":"`: `{"a":""}`,
59-
`{"a":""`: `{"a":""}`,
60-
`{"a":"",`: `{"a":""}`,
61-
`{"a":"string`: `{"a":"string"}`,
62-
`{"a":"string"`: `{"a":"string"}`,
63-
`{"a":"string",`: `{"a":"string"}`,
64-
`{"a":"\u0`: `{"a":""}`,
65-
`{"a":"\u00`: `{"a":""}`,
66-
`{"a":"\u004`: `{"a":""}`,
67-
`{"a":"\u0049`: `{"a":"\u0049"}`,
68-
`{"a":"\u0049"`: `{"a":"\u0049"}`,
69-
`{"a":"\u0049",`: `{"a":"\u0049"}`,
70-
`{"a":"\u0049","b":"`: `{"a":"\u0049","b":""}`,
71-
`{"a":"\u0049","b":"\`: `{"a":"\u0049","b":""}`,
72-
`{"a":"\u0049","b":"\u`: `{"a":"\u0049","b":""}`,
73-
`{"a":"\u0049","b":"\u0`: `{"a":"\u0049","b":""}`,
74-
`{"a":"\u0049","b":"\u00`: `{"a":"\u0049","b":""}`,
75-
`{"a":"\u0049","b":"\u005`: `{"a":"\u0049","b":""}`,
76-
`{"a":"\u0049","b":"\u0050`: `{"a":"\u0049","b":"\u0050"}`,
77-
`{"a":"\u0049","b":"\u0050"`: `{"a":"\u0049","b":"\u0050"}`,
78-
`{"a":"\u0049","b":"\u0050"}`: `{"a":"\u0049","b":"\u0050"}`,
79-
`{"a":"\u0123",`: `{"a":"\u0123"}`,
80-
`{"a":"\u4567",`: `{"a":"\u4567"}`,
81-
`{"a":"\u89ab",`: `{"a":"\u89ab"}`,
82-
`{"a":"\u89AB",`: `{"a":"\u89AB"}`,
83-
`{"a":"\ucdef",`: `{"a":"\ucdef"}`,
84-
`{"a":"\uCDEF",`: `{"a":"\uCDEF"}`,
14+
`{`: `{}`, // mirror stack: [], should remove from stack: [], should push into mirror stack: [`}`]
15+
`{}`: `{}`, // mirror stack: [], should remove from stack: [], should push into mirror stack: []
16+
`{"`: `{"":null}`, // mirror stack: [`}`], should remove from stack: [], should push into mirror stack: [`"`, `:`, `n`, `u`, `l`, `l`]
17+
`{""`: `{"":null}`, // mirror stack: [`"`, `:`, `n`, `u`, `l`, `l`,`}`], should remove from stack: [`"`], should push into mirror stack: []
18+
`{"a`: `{"a":null}`,
19+
`{"a"`: `{"a":null}`,
20+
`{"a":`: `{"a":null}`,
21+
`{"a":n`: `{"a":null}`,
22+
`{"a":nu`: `{"a":null}`,
23+
`{"a":nul`: `{"a":null}`,
24+
`{"a":null`: `{"a":null}`,
25+
`{"a":null , "b`: `{"a":null , "b":null}`,
26+
`{"a":t`: `{"a":true}`,
27+
`{"a":tr`: `{"a":true}`,
28+
`{"a":tru`: `{"a":true}`,
29+
`{"a":true`: `{"a":true}`,
30+
`{"a":true,`: `{"a":true}`,
31+
`{"a":true , "b`: `{"a":true , "b":null}`,
32+
`{"a":f`: `{"a":false}`,
33+
`{"a":fa`: `{"a":false}`,
34+
`{"a":fal`: `{"a":false}`,
35+
`{"a":fals`: `{"a":false}`,
36+
`{"a":false`: `{"a":false}`,
37+
`{"a":false,`: `{"a":false}`,
38+
`{"a":false , "b`: `{"a":false , "b":null}`,
39+
`{"a":-`: `{"a":0}`,
40+
`{"a":12`: `{"a":12}`,
41+
`{"a":-0`: `{"a":-0}`, // @TODO: should be 0, not -0
42+
`{"a":-12`: `{"a":-12}`,
43+
`{"a":12,`: `{"a":12}`,
44+
`{"a":12.`: `{"a":12.0}`,
45+
`{"a":12.15`: `{"a":12.15}`,
46+
`{"a":12.15,`: `{"a":12.15}`,
47+
`{"a":-12.15,`: `{"a":-12.15}`,
48+
`{"a":-1.215e,`: `{"a":-1.215}`,
49+
`{"a":-1.215E,`: `{"a":-1.215}`,
50+
`{"a":-1.215e1,`: `{"a":-1.215e1}`,
51+
`{"a":-1.215e-1,`: `{"a":-1.215e-1}`,
52+
`{"a":-1.215e+1,`: `{"a":-1.215e+1}`,
53+
`{"a":-1.215E1,`: `{"a":-1.215E1}`,
54+
`{"a":-1.215E-1,`: `{"a":-1.215E-1}`,
55+
`{"a":-1.215E+1,`: `{"a":-1.215E+1}`,
56+
`{"a":-1.215e12`: `{"a":-1.215e12}`,
57+
`{"a":-1.215E12`: `{"a":-1.215E12}`,
58+
`{"a":-1.215e12,`: `{"a":-1.215e12}`,
59+
`{"a":-1.215E12,`: `{"a":-1.215E12}`,
60+
`{"a":"`: `{"a":""}`,
61+
`{"a":""`: `{"a":""}`,
62+
`{"a":"",`: `{"a":""}`,
63+
`{"a":"string`: `{"a":"string"}`,
64+
`{"a":"string"`: `{"a":"string"}`,
65+
`{"a":"string",`: `{"a":"string"}`,
66+
`{"a":"abcdefghijklmnopqrstuvwxyz",`: `{"a":"abcdefghijklmnopqrstuvwxyz"}`,
67+
`{"a":"ABCDEFGHIJKLMNOPQRSTUVWXYZ",`: `{"a":"ABCDEFGHIJKLMNOPQRSTUVWXYZ"}`,
68+
`{"a":"0123456789",`: `{"a":"0123456789"}`,
69+
`{"a":"\u0`: `{"a":""}`,
70+
`{"a":"\u00`: `{"a":""}`,
71+
`{"a":"\u004`: `{"a":""}`,
72+
`{"a":"\u0049`: `{"a":"\u0049"}`,
73+
`{"a":"\u0049"`: `{"a":"\u0049"}`,
74+
`{"a":"\u0049",`: `{"a":"\u0049"}`,
75+
`{"a":"\u0049","b":"`: `{"a":"\u0049","b":""}`,
76+
`{"a":"\u0049","b":"\`: `{"a":"\u0049","b":""}`,
77+
`{"a":"\u0049","b":"\u`: `{"a":"\u0049","b":""}`,
78+
`{"a":"\u0049","b":"\u0`: `{"a":"\u0049","b":""}`,
79+
`{"a":"\u0049","b":"\u00`: `{"a":"\u0049","b":""}`,
80+
`{"a":"\u0049","b":"\u005`: `{"a":"\u0049","b":""}`,
81+
`{"a":"\u0049","b":"\u0050`: `{"a":"\u0049","b":"\u0050"}`,
82+
`{"a":"\u0049","b":"\u0050"`: `{"a":"\u0049","b":"\u0050"}`,
83+
`{"a":"\u0049","b":"\u0050"}`: `{"a":"\u0049","b":"\u0050"}`,
84+
`{"a":"\u0123",`: `{"a":"\u0123"}`,
85+
`{"a":"\u4567",`: `{"a":"\u4567"}`,
86+
`{"a":"\u89ab",`: `{"a":"\u89ab"}`,
87+
`{"a":"\u89AB",`: `{"a":"\u89AB"}`,
88+
`{"a":"\ucdef",`: `{"a":"\ucdef"}`,
89+
`{"a":"\ucdee",`: `{"a":"\ucdee"}`,
90+
`{"a":"\uaaaa",`: `{"a":"\uaaaa"}`,
91+
`{"a":"\uCDEF",`: `{"a":"\uCDEF"}`,
8592

8693
// test case: escape character
8794
`{"\`: `{"":null}`,

0 commit comments

Comments
 (0)