-
Notifications
You must be signed in to change notification settings - Fork 38
/
haxe.go
109 lines (87 loc) · 2.25 KB
/
haxe.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
package deps
import (
"fmt"
"io"
"os"
"regexp"
"strings"
"github.com/wakatime/wakatime-cli/pkg/log"
"github.com/alecthomas/chroma/v2"
"github.com/alecthomas/chroma/v2/lexers"
)
var haxeExcludeRegex = regexp.MustCompile(`(?i)^haxe$`)
// StateHaxe is a token parsing state.
type StateHaxe int
const (
// StateHaxeUnknown represents an unknown token parsing state.
StateHaxeUnknown StateHaxe = iota
// StateHaxeImport means we are in import section during token parsing.
StateHaxeImport
)
// ParserHaxe is a dependency parser for the Haxe programming language.
// It is not thread safe.
type ParserHaxe struct {
State StateHaxe
Output []string
}
// Parse parses dependencies from Haxe file content using the chroma Haxe lexer.
func (p *ParserHaxe) Parse(filepath string) ([]string, error) {
reader, err := os.Open(filepath) // nolint:gosec
if err != nil {
return nil, fmt.Errorf("failed to open file %q: %s", filepath, err)
}
defer func() {
if err := reader.Close(); err != nil {
log.Debugf("failed to close file: %s", err)
}
}()
p.init()
defer p.init()
data, err := io.ReadAll(reader)
if err != nil {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
iter, err := lexers.Haxe.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
for _, token := range iter.Tokens() {
p.processToken(token)
}
return p.Output, nil
}
func (p *ParserHaxe) append(dep string) {
dep = strings.TrimSpace(dep)
if haxeExcludeRegex.MatchString(dep) {
return
}
p.Output = append(p.Output, dep)
}
func (p *ParserHaxe) init() {
p.State = StateHaxeUnknown
p.Output = []string{}
}
func (p *ParserHaxe) processToken(token chroma.Token) {
switch {
case token.Type == chroma.KeywordNamespace:
p.processKeywordNamespace(token.Value)
case token.Type == chroma.NameNamespace:
p.processNameNamespace(token.Value)
case token.Type != chroma.Text:
p.State = StateHaxeUnknown
}
}
func (p *ParserHaxe) processKeywordNamespace(value string) {
switch value {
case "import":
p.State = StateHaxeImport
default:
p.State = StateHaxeUnknown
}
}
func (p *ParserHaxe) processNameNamespace(value string) {
if p.State == StateHaxeImport {
p.append(value)
}
p.State = StateHaxeUnknown
}