Improved tokenizer

This commit is contained in:
2025-03-19 22:56:17 +01:00
parent 7f86dc6747
commit bb460c918c
2 changed files with 8 additions and 6 deletions

View File

@ -15,8 +15,8 @@ func (s *Scanner) scanFile(path string, pkg string) error {
return err return err
} }
defer reader.File.Close()
tokens := token.Tokenize(&reader) tokens := token.Tokenize(&reader)
reader.File.Close()
file := &fs.File{ file := &fs.File{
Path: path, Path: path,

View File

@ -37,7 +37,8 @@ func (t *Reader) read() error {
return nil return nil
} }
n, err := t.File.Read(t.Buffer[len(t.Buffer):cap(t.Buffer)]) end := min(len(t.Buffer)+4096, cap(t.Buffer))
n, err := t.File.Read(t.Buffer[len(t.Buffer):end])
t.Buffer = t.Buffer[:len(t.Buffer)+n] t.Buffer = t.Buffer[:len(t.Buffer)+n]
if err != nil { if err != nil {
@ -56,20 +57,21 @@ func (t *Reader) read() error {
return nil return nil
} }
func (t *Reader) Open(path string) (err error) { func (t *Reader) Open(path string) error {
t.File, err = os.Open(path) f, err := os.Open(path)
if err != nil { if err != nil {
return err return err
} }
info, err := t.File.Stat() info, err := f.Stat()
if err != nil { if err != nil {
return err return err
} }
t.File = f
t.Size = Position(info.Size()) t.Size = Position(info.Size())
t.Buffer = make([]byte, 0, 4096) t.Buffer = make([]byte, 0, t.Size+1)
return t.read() return t.read()
} }