Improved tokenizer performance
This commit is contained in:
parent
26dfe5b5eb
commit
ee563230a8
1 changed files with 9 additions and 20 deletions
|
@ -1,7 +1,5 @@
|
|||
package token
|
||||
|
||||
import "bytes"
|
||||
|
||||
// Tokenize turns the file contents into a list of tokens.
|
||||
func Tokenize(buffer []byte) List {
|
||||
var (
|
||||
|
@ -87,24 +85,15 @@ func Tokenize(buffer []byte) List {
|
|||
identifier := buffer[position:i]
|
||||
kind := Identifier
|
||||
|
||||
switch identifier[0] {
|
||||
case 'i':
|
||||
switch {
|
||||
case bytes.Equal(identifier, []byte("if")):
|
||||
kind = If
|
||||
case bytes.Equal(identifier, []byte("import")):
|
||||
kind = Import
|
||||
}
|
||||
case 'l':
|
||||
switch {
|
||||
case bytes.Equal(identifier, []byte("loop")):
|
||||
kind = Loop
|
||||
}
|
||||
case 'r':
|
||||
switch {
|
||||
case bytes.Equal(identifier, []byte("return")):
|
||||
kind = Return
|
||||
}
|
||||
switch string(identifier) {
|
||||
case "if":
|
||||
kind = If
|
||||
case "import":
|
||||
kind = Import
|
||||
case "loop":
|
||||
kind = Loop
|
||||
case "return":
|
||||
kind = Return
|
||||
}
|
||||
|
||||
tokens = append(tokens, Token{Kind: kind, Position: position, Length: Length(len(identifier))})
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue