Skip to content

Commit 33bb86e

Browse files
committed
Add more edge cases
1 parent 2b29f75 commit 33bb86e

File tree

2 files changed

+14
-7
lines changed

2 files changed

+14
-7
lines changed

parser.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,7 @@ func (p *parsing) parseNumber() bool {
301301
end = p.pos
302302
hasPoint = true
303303
} else if p.curr == 'e' || p.curr == 'E' {
304-
if !hasNumber || !(isNumberByte(nextByte) || nextByte == '-') || hasExp {
304+
if !hasNumber || !(isNumberByte(nextByte) || nextByte == '-' || nextByte == '+') || hasExp {
305305
break
306306
}
307307
floatTraitPos = p.pos

tokenizer_test.go

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -67,11 +67,13 @@ func TestTokenize(t *testing.T) {
6767
{2e4, Token{key: TokenFloat, value: []byte("2e4")}},
6868
}
6969
for _, v := range floats {
70-
stream := tokenizer.ParseBytes(v.token.value)
71-
require.Equal(t, v.token.Value(), stream.CurrentToken().Value())
72-
require.Equal(t, v.token.Key(), stream.CurrentToken().Key())
73-
require.Equal(t, v.token.StringSettings(), stream.CurrentToken().StringSettings())
74-
require.Equal(t, v.value, stream.CurrentToken().ValueFloat64())
70+
t.Run(string(v.token.value), func(t *testing.T) {
71+
stream := tokenizer.ParseBytes(v.token.value)
72+
require.Equalf(t, v.token.Value(), stream.CurrentToken().Value(), "check %s", v.token.value)
73+
require.Equal(t, v.token.Key(), stream.CurrentToken().Key())
74+
require.Equal(t, v.token.StringSettings(), stream.CurrentToken().StringSettings())
75+
require.Equal(t, v.value, stream.CurrentToken().ValueFloat64())
76+
})
7577
}
7678
})
7779

@@ -144,11 +146,16 @@ func TestTokenizeEdgeCases(t *testing.T) {
144146
{key: TokenInteger, value: s2b("0"), offset: 0, line: 1, id: 0},
145147
{key: TokenKeyword, value: s2b("E"), offset: 1, line: 1, id: 1},
146148
}},
149+
{"0E+", []Token{ // https://github.com/bzick/tokenizer/issues/28
150+
{key: TokenInteger, value: s2b("0"), offset: 0, line: 1, id: 0},
151+
{key: TokenKeyword, value: s2b("E"), offset: 1, line: 1, id: 1},
152+
{key: TokenUnknown, value: s2b("+"), offset: 2, line: 1, id: 2},
153+
}},
147154
}
148155
for _, v := range data1 {
149156
t.Run(v.str, func(t *testing.T) {
150157
stream := tokenizer.ParseString(v.str)
151-
require.Samef(t, v.tokens, stream.GetSnippet(10, 10), "parse data1 %s: %s", v.str, stream)
158+
require.Equalf(t, v.tokens, stream.GetSnippet(10, 10), "parse data1 %s: %s", v.str, stream)
152159
})
153160
}
154161
})

0 commit comments

Comments
 (0)