Improved tokenizer

This commit is contained in:
2025-03-19 22:56:17 +01:00
parent 7f86dc6747
commit bb460c918c
2 changed files with 8 additions and 6 deletions

View File

@ -15,8 +15,8 @@ func (s *Scanner) scanFile(path string, pkg string) error {
return err
}
defer reader.File.Close()
tokens := token.Tokenize(&reader)
reader.File.Close()
file := &fs.File{
Path: path,

View File

@ -37,7 +37,8 @@ func (t *Reader) read() error {
return nil
}
n, err := t.File.Read(t.Buffer[len(t.Buffer):cap(t.Buffer)])
end := min(len(t.Buffer)+4096, cap(t.Buffer))
n, err := t.File.Read(t.Buffer[len(t.Buffer):end])
t.Buffer = t.Buffer[:len(t.Buffer)+n]
if err != nil {
@ -56,20 +57,21 @@ func (t *Reader) read() error {
return nil
}
func (t *Reader) Open(path string) (err error) {
t.File, err = os.Open(path)
func (t *Reader) Open(path string) error {
f, err := os.Open(path)
if err != nil {
return err
}
info, err := t.File.Stat()
info, err := f.Stat()
if err != nil {
return err
}
t.File = f
t.Size = Position(info.Size())
t.Buffer = make([]byte, 0, 4096)
t.Buffer = make([]byte, 0, t.Size+1)
return t.read()
}