diff --git a/src/build/token/Tokenize.go b/src/build/token/Tokenize.go index 05fa306..23bf106 100644 --- a/src/build/token/Tokenize.go +++ b/src/build/token/Tokenize.go @@ -1,7 +1,5 @@ package token -import "bytes" - // Tokenize turns the file contents into a list of tokens. func Tokenize(buffer []byte) List { var ( @@ -87,24 +85,15 @@ func Tokenize(buffer []byte) List { identifier := buffer[position:i] kind := Identifier - switch identifier[0] { - case 'i': - switch { - case bytes.Equal(identifier, []byte("if")): - kind = If - case bytes.Equal(identifier, []byte("import")): - kind = Import - } - case 'l': - switch { - case bytes.Equal(identifier, []byte("loop")): - kind = Loop - } - case 'r': - switch { - case bytes.Equal(identifier, []byte("return")): - kind = Return - } + switch string(identifier) { + case "if": + kind = If + case "import": + kind = Import + case "loop": + kind = Loop + case "return": + kind = Return } tokens = append(tokens, Token{Kind: kind, Position: position, Length: Length(len(identifier))})