Improved Windows support in the tokenizer
This commit is contained in:
@ -9,7 +9,7 @@ func Tokenize(buffer []byte) List {
|
|||||||
|
|
||||||
for i < Position(len(buffer)) {
|
for i < Position(len(buffer)) {
|
||||||
switch buffer[i] {
|
switch buffer[i] {
|
||||||
case ' ', '\t':
|
case ' ', '\t', '\r':
|
||||||
case ',':
|
case ',':
|
||||||
tokens = append(tokens, Token{Kind: Separator, Position: i, Length: 1})
|
tokens = append(tokens, Token{Kind: Separator, Position: i, Length: 1})
|
||||||
case '(':
|
case '(':
|
||||||
|
Reference in New Issue
Block a user