diff --git a/examples/hello/hello.q b/examples/hello/hello.q index c006280..1da41c1 100644 --- a/examples/hello/hello.q +++ b/examples/hello/hello.q @@ -1,3 +1,4 @@ +// Comment main() { address := 4194304 + 1 length := (0 + 50 - 20) * 10 / 100 @@ -7,10 +8,12 @@ main() { } } +// Comment print(address, length) { write(length-2, address, length) } +// Comment write(fd, address, length) { syscall(1, fd, address, length) } \ No newline at end of file diff --git a/src/build/token/Token_test.go b/src/build/token/Token_test.go index 6bd1065..5616634 100644 --- a/src/build/token/Token_test.go +++ b/src/build/token/Token_test.go @@ -209,6 +209,93 @@ func TestSeparator(t *testing.T) { }) } +func TestComment(t *testing.T) { + tokens := token.Tokenize([]byte("// Hello\n// World")) + assert.DeepEqual(t, tokens, token.List{ + { + Kind: token.Comment, + Bytes: []byte(`// Hello`), + Position: 0, + }, + { + Kind: token.NewLine, + Bytes: []byte("\n"), + Position: 8, + }, + { + Kind: token.Comment, + Bytes: []byte(`// World`), + Position: 9, + }, + { + Kind: token.EOF, + Bytes: nil, + Position: 17, + }, + }) + + tokens = token.Tokenize([]byte("// Hello\n")) + assert.DeepEqual(t, tokens, token.List{ + { + Kind: token.Comment, + Bytes: []byte(`// Hello`), + Position: 0, + }, + { + Kind: token.NewLine, + Bytes: []byte("\n"), + Position: 8, + }, + { + Kind: token.EOF, + Bytes: nil, + Position: 9, + }, + }) + + tokens = token.Tokenize([]byte(`// Hello`)) + assert.DeepEqual(t, tokens, token.List{ + { + Kind: token.Comment, + Bytes: []byte(`// Hello`), + Position: 0, + }, + { + Kind: token.EOF, + Bytes: nil, + Position: 8, + }, + }) + + tokens = token.Tokenize([]byte(`//`)) + assert.DeepEqual(t, tokens, token.List{ + { + Kind: token.Comment, + Bytes: []byte(`//`), + Position: 0, + }, + { + Kind: token.EOF, + Bytes: nil, + Position: 2, + }, + }) + + tokens = token.Tokenize([]byte(`/`)) + assert.DeepEqual(t, tokens, token.List{ + { + Kind: token.Operator, + Bytes: []byte(`/`), + Position: 0, + }, + { + Kind: token.EOF, + Bytes: nil, + Position: 1, + }, + }) +} + func TestString(t *testing.T) { tokens := token.Tokenize([]byte(`"Hello" "World"`)) assert.DeepEqual(t, tokens, token.List{ diff --git a/src/build/token/Tokenize.go b/src/build/token/Tokenize.go index 4c4223f..bd2d4c6 100644 --- a/src/build/token/Tokenize.go +++ b/src/build/token/Tokenize.go @@ -75,6 +75,20 @@ func Tokenize(buffer []byte) List { case '\n': tokens = append(tokens, Token{NewLine, i, newLineBytes}) + // Comment + case '/': + if i+1 >= len(buffer) || buffer[i+1] != '/' { + tokens = append(tokens, Token{Operator, i, buffer[i : i+1]}) + i++ + continue + } + + for i < len(buffer) && buffer[i] != '\n' { + i++ + } + + continue + default: // Identifiers if isIdentifierStart(buffer[i]) {