From 3268f7a7ee480cd124244805725b9f17d99a1d41 Mon Sep 17 00:00:00 2001 From: Eduard Urbach Date: Wed, 26 Jun 2024 18:49:03 +0200 Subject: [PATCH] Added tokenizer benchmark --- src/build/token/Token_test.go | 36 +++++++++++++++++++++++++++++++++++ src/build/token/Tokenize.go | 7 ++++++- src/build/token/bench_test.go | 15 +++++++++++++++ 3 files changed, 57 insertions(+), 1 deletion(-) create mode 100644 src/build/token/bench_test.go diff --git a/src/build/token/Token_test.go b/src/build/token/Token_test.go index b083cfd..6bd1065 100644 --- a/src/build/token/Token_test.go +++ b/src/build/token/Token_test.go @@ -137,6 +137,42 @@ func TestNumber(t *testing.T) { }) } +func TestOperator(t *testing.T) { + tokens := token.Tokenize([]byte(`+ - * / ==`)) + assert.DeepEqual(t, tokens, token.List{ + { + Kind: token.Operator, + Bytes: []byte("+"), + Position: 0, + }, + { + Kind: token.Operator, + Bytes: []byte("-"), + Position: 2, + }, + { + Kind: token.Operator, + Bytes: []byte("*"), + Position: 4, + }, + { + Kind: token.Operator, + Bytes: []byte("/"), + Position: 6, + }, + { + Kind: token.Operator, + Bytes: []byte("=="), + Position: 8, + }, + { + Kind: token.EOF, + Bytes: nil, + Position: 10, + }, + }) +} + func TestSeparator(t *testing.T) { tokens := token.Tokenize([]byte("a,b,c")) assert.DeepEqual(t, tokens, token.List{ diff --git a/src/build/token/Tokenize.go b/src/build/token/Tokenize.go index 0e51758..a8e4acd 100644 --- a/src/build/token/Tokenize.go +++ b/src/build/token/Tokenize.go @@ -144,5 +144,10 @@ func isNumber(c byte) bool { } func isOperator(c byte) bool { - return c == '=' || c == ':' || c == '+' || c == '-' || c == '*' || c == '/' || c == '<' || c == '>' || c == '!' || c == '&' || c == '|' || c == '^' || c == '%' || c == '.' + switch c { + case '=', ':', '.', '+', '-', '*', '/', '<', '>', '&', '|', '^', '%', '!': + return true + default: + return false + } } diff --git a/src/build/token/bench_test.go b/src/build/token/bench_test.go new file mode 100644 index 0000000..b6db849 --- /dev/null +++ b/src/build/token/bench_test.go @@ -0,0 +1,15 @@ +package token_test + +import ( + "testing" + + "git.akyoto.dev/cli/q/src/build/token" +) + +func BenchmarkTokenize(b *testing.B) { + input := []byte("hello := 123\nworld := 456") + + for i := 0; i < b.N; i++ { + token.Tokenize(input) + } +}