1
1
mirror of https://github.com/kanaka/mal.git synced 2024-09-11 13:55:55 +03:00

Merge pull request #127 from dubek/fix-nim-tokenizer-oom

nim: fix tokenizer endless loop (and out-of-memory) on bad input
This commit is contained in:
Joel Martin 2015-12-04 16:44:10 -06:00
commit a136ea497e
2 changed files with 3 additions and 1 deletions

View File

@ -28,7 +28,7 @@ proc tokenize(str: string): seq[string] =
while pos < str.len:
var matches: array[2, string]
var len = str.findBounds(tokenRE, matches, pos)
if len.first != -1 and len.last != -1:
if len.first != -1 and len.last != -1 and len.last >= len.first:
pos = len.last + 1
if matches[0][0] != ';':
result.add matches[0]

View File

@ -87,6 +87,8 @@ abc-def
; expected ']', got EOF
"abc
; expected '"', got EOF
(1 "abc
; expected ')', got EOF
;;
;; -------- Optional Functionality --------