Improved tokenizer performance

This commit is contained in:
Eduard Urbach 2024-07-24 11:55:35 +02:00
parent 26dfe5b5eb
commit ee563230a8
Signed by: eduard
GPG key ID: 49226B848C78F6C8

View file

@ -1,7 +1,5 @@
package token
import "bytes"
// Tokenize turns the file contents into a list of tokens.
func Tokenize(buffer []byte) List {
var (
@ -87,25 +85,16 @@ func Tokenize(buffer []byte) List {
identifier := buffer[position:i]
kind := Identifier
switch identifier[0] {
case 'i':
switch {
case bytes.Equal(identifier, []byte("if")):
switch string(identifier) {
case "if":
kind = If
case bytes.Equal(identifier, []byte("import")):
case "import":
kind = Import
}
case 'l':
switch {
case bytes.Equal(identifier, []byte("loop")):
case "loop":
kind = Loop
}
case 'r':
switch {
case bytes.Equal(identifier, []byte("return")):
case "return":
kind = Return
}
}
tokens = append(tokens, Token{Kind: kind, Position: position, Length: Length(len(identifier))})
continue