@@ -67,11 +67,13 @@ func TestTokenize(t *testing.T) {
6767 {2e4 , Token {key : TokenFloat , value : []byte ("2e4" )}},
6868 }
6969 for _ , v := range floats {
70- stream := tokenizer .ParseBytes (v .token .value )
71- require .Equal (t , v .token .Value (), stream .CurrentToken ().Value ())
72- require .Equal (t , v .token .Key (), stream .CurrentToken ().Key ())
73- require .Equal (t , v .token .StringSettings (), stream .CurrentToken ().StringSettings ())
74- require .Equal (t , v .value , stream .CurrentToken ().ValueFloat64 ())
70+ t .Run (string (v .token .value ), func (t * testing.T ) {
71+ stream := tokenizer .ParseBytes (v .token .value )
72+ require .Equalf (t , v .token .Value (), stream .CurrentToken ().Value (), "check %s" , v .token .value )
73+ require .Equal (t , v .token .Key (), stream .CurrentToken ().Key ())
74+ require .Equal (t , v .token .StringSettings (), stream .CurrentToken ().StringSettings ())
75+ require .Equal (t , v .value , stream .CurrentToken ().ValueFloat64 ())
76+ })
7577 }
7678 })
7779
@@ -144,11 +146,16 @@ func TestTokenizeEdgeCases(t *testing.T) {
144146 {key : TokenInteger , value : s2b ("0" ), offset : 0 , line : 1 , id : 0 },
145147 {key : TokenKeyword , value : s2b ("E" ), offset : 1 , line : 1 , id : 1 },
146148 }},
149+ {"0E+" , []Token { // https://github.com/bzick/tokenizer/issues/28
150+ {key : TokenInteger , value : s2b ("0" ), offset : 0 , line : 1 , id : 0 },
151+ {key : TokenKeyword , value : s2b ("E" ), offset : 1 , line : 1 , id : 1 },
152+ {key : TokenUnknown , value : s2b ("+" ), offset : 2 , line : 1 , id : 2 },
153+ }},
147154 }
148155 for _ , v := range data1 {
149156 t .Run (v .str , func (t * testing.T ) {
150157 stream := tokenizer .ParseString (v .str )
151- require .Samef (t , v .tokens , stream .GetSnippet (10 , 10 ), "parse data1 %s: %s" , v .str , stream )
158+ require .Equalf (t , v .tokens , stream .GetSnippet (10 , 10 ), "parse data1 %s: %s" , v .str , stream )
152159 })
153160 }
154161 })
0 commit comments