Improved Windows support in the tokenizer
This commit is contained in:
parent
487c7fb3a6
commit
42bbe5d4ed
1 changed files with 1 additions and 1 deletions
|
@ -9,7 +9,7 @@ func Tokenize(buffer []byte) List {
|
||||||
|
|
||||||
for i < Position(len(buffer)) {
|
for i < Position(len(buffer)) {
|
||||||
switch buffer[i] {
|
switch buffer[i] {
|
||||||
case ' ', '\t':
|
case ' ', '\t', '\r':
|
||||||
case ',':
|
case ',':
|
||||||
tokens = append(tokens, Token{Kind: Separator, Position: i, Length: 1})
|
tokens = append(tokens, Token{Kind: Separator, Position: i, Length: 1})
|
||||||
case '(':
|
case '(':
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue