Improved Windows support in the tokenizer
This commit is contained in:
@ -9,7 +9,7 @@ func Tokenize(buffer []byte) List {
|
||||
|
||||
for i < Position(len(buffer)) {
|
||||
switch buffer[i] {
|
||||
case ' ', '\t':
|
||||
case ' ', '\t', '\r':
|
||||
case ',':
|
||||
tokens = append(tokens, Token{Kind: Separator, Position: i, Length: 1})
|
||||
case '(':
|
||||
|
Reference in New Issue
Block a user