Reduced number of tokens processed
This commit is contained in:
parent
57c9fc22d1
commit
1c27f0cad2
2 changed files with 9 additions and 15 deletions
|
@ -176,12 +176,12 @@ func scanFile(path string, functions chan<- *Function) error {
|
|||
for i < len(tokens) {
|
||||
if tokens[i].Kind == token.BlockStart {
|
||||
blockLevel++
|
||||
i++
|
||||
|
||||
if blockLevel == 1 {
|
||||
bodyStart = i
|
||||
}
|
||||
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -225,7 +225,7 @@ func scanFile(path string, functions chan<- *Function) error {
|
|||
Name: tokens[nameStart].Text(),
|
||||
File: file,
|
||||
Head: tokens[paramsStart:bodyStart],
|
||||
Body: tokens[bodyStart : i+1],
|
||||
Body: tokens[bodyStart:i],
|
||||
Variables: map[string]*Variable{},
|
||||
CPU: cpu.CPU{
|
||||
General: x64.GeneralRegisters,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue