Skip to content

Commit 25ea868

Browse files
authored
#9: panic with last taken in stream (#18)
1 parent cef4da6 commit 25ea868

File tree

3 files changed

+22
-2
lines changed

3 files changed

+22
-2
lines changed

parser.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -199,11 +199,11 @@ func (p *parsing) parse() {
199199
p.token.key = TokenUnknown
200200
p.token.value = p.str[p.pos : p.pos+1]
201201
p.token.offset = p.offset + p.pos
202-
p.next()
203202
p.emmitToken()
204203
if p.curr == 0 {
205204
break
206205
}
206+
p.next()
207207
}
208208
if len(p.token.indent) > 0 {
209209
p.tail = p.token.indent

stream_test.go

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -256,6 +256,22 @@ func TestIssue11(t *testing.T) {
256256
require.Equal(t, "2", string(stream.CurrentToken().Value()))
257257
}
258258

259+
func TestIssue9(t *testing.T) {
260+
parser := New()
261+
buf := bytes.NewBuffer([]byte("a]"))
262+
263+
stream := parser.ParseStream(buf, 4096)
264+
defer stream.Close()
265+
stream.CurrentToken()
266+
for stream.IsValid() {
267+
switch stream.CurrentToken().Key() {
268+
default:
269+
// println(stream.CurrentToken().ValueString())
270+
stream.GoNext()
271+
}
272+
}
273+
}
274+
259275
var pattern = []byte(`<item count=10 valid id="n9762"> Носки <![CDATA[ socks ]]></item>`)
260276

261277
type dataGenerator struct {

tokenizer_test.go

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@ func TestTokenizeEdgeCases(t *testing.T) {
116116
{key: TokenUnknown, value: s2b("_"), offset: 3, line: 1, id: 1},
117117
{key: TokenInteger, value: s2b("1"), offset: 4, line: 1, id: 2},
118118
}},
119-
{"1..2", []Token{
119+
{"1..2", []Token{ // https://github.com/bzick/tokenizer/issues/11
120120
{key: TokenInteger, value: s2b("1"), offset: 0, line: 1, id: 0},
121121
{key: TokenUnknown, value: s2b("."), offset: 1, line: 1, id: 1},
122122
{key: TokenFloat, value: s2b(".2"), offset: 2, line: 1, id: 2},
@@ -136,6 +136,10 @@ func TestTokenizeEdgeCases(t *testing.T) {
136136
{key: TokenFloat, value: s2b(".1"), offset: 0, line: 1, id: 0},
137137
{key: TokenFloat, value: s2b(".2"), offset: 2, line: 1, id: 1},
138138
}},
139+
{"a]", []Token{ // https://github.com/bzick/tokenizer/issues/9
140+
{key: TokenKeyword, value: s2b("a"), offset: 0, line: 1, id: 0},
141+
{key: TokenUnknown, value: s2b("]"), offset: 1, line: 1, id: 1},
142+
}},
139143
}
140144
for _, v := range data1 {
141145
stream := tokenizer.ParseString(v.str)

0 commit comments

Comments
 (0)