/
editorconfigparser.pas
125 lines (101 loc) · 3.23 KB
/
editorconfigparser.pas
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
unit EditorConfigParser;
{$mode delphi}
interface
uses
SysUtils, Classes, Recognizers, Parsers, TokenTypes;
function CreateRecognizers: TTokenRecognizers;
function CreateParser: TParser<TFmt>;
implementation
uses Types, Tokenizers, ParseResult, TokenParsers, CodeFmtParsers;
(* Recognizers *)
type
TTokenType = (ttEol, ttWhiteSpace, ttDigits, ttPound, ttLeftBracket, ttRightBracket, ttIdentifier, ttUnknown);
TTokenTypeSet = set of TTokenType;
const
AllTokenTypes: TTokenTypeSet = [ttEol..ttUnknown];
function CreateRecognizer(TokenType: TTokenType): TTokenRecognizer;
begin
case TokenType of
ttEol: Result := TNewLineRecognizer.Create;
ttWhiteSpace: Result := TPredicateRecognizer.Create(IsWhiteSpace);
ttDigits: Result := TPredicateRecognizer.Create(IsDigit);
ttPound: Result := TSingleCharRecognizer.Create('#');
ttLeftBracket: Result := TSingleCharRecognizer.Create('[');
ttRightBracket: Result := TSingleCharRecognizer.Create(']');
ttIdentifier: Result := IdentifierRecognizer;
ttUnknown: Result := TAnyRecognizer.Create;
else raise Exception.Create('Unknown token type')
end;
end;
function CreateRecognizers: TTokenRecognizers;
var
TokenType: TTokenType;
begin
SetLength(Result, Ord(High(AllTokenTypes)) - Ord(Low(AllTokenTypes)) + 1);
for TokenType := Low(AllTokenTypes) to High(AllTokenTypes) do
Result[Ord(TokenType)] := CreateRecognizer(TokenType);
end;
(* Parsers *)
(* TokenTypeFilterParser *)
function FilterToken(TokenType: TTokenType): TParser<TToken>;
begin
Result := FilterTokenType(Ord(TokenType));
end;
function FilterTokens(TokenTypes: TTokenTypeSet): TParser<TToken>;
var
x: TByteDynArray;
TokenType: TTokenType;
begin
SetLength(x, 0);
for TokenType in TokenTypes do
begin
SetLength(x, Length(x) + 1);
x[Length(x) - 1] := Ord(TokenType);
end;
Result := FilterTokenTypes(x);
end;
(* Simple Parser maps tokens almost as-is from one enum to another *)
function SimpleParser(TokenType: TTokenType; HigherTokenType: THigherTokenType): TParser<TFmt>; overload;
begin
Result := TListToFmtMapper.Create(MapTokenToList(FilterToken(TokenType)), HigherTokenType);
end;
function SimpleParser: TParser<TFmt>; overload;
var
SourceTokens: array of TTokenType = [ttEol, ttWhiteSpace, ttDigits, ttIdentifier, ttUnknown];
DestTokens: array of THigherTokenType = [htCRLF, htSpace, htNumber, htIdentifier, htUnknown];
i: Integer;
begin
Result := nil;
for i := Low(SourceTokens) to High(SourceTokens) do
Result := OrElse<TFmt>(Result, SimpleParser(SourceTokens[i], DestTokens[i]));
end;
// [section]
function SectionParser: TParser<TFmt>;
begin
Result := TListToFmtMapper.Create(
Seq(
Seq(
FilterToken(ttLeftBracket),
ManyTokens(FilterTokens([ttWhiteSpace, ttDigits, ttIdentifier, ttUnknown]))
),
FilterToken(ttRightBracket)
),
htDirective
);
end;
function NoEol: TParser<TTokenLinkedList>;
begin
Result := ManyTokens(FilterTokens(AllTokenTypes - [ttEol]));
end;
function CommentParser: TParser<TFmt>;
begin
Result := TListToFmtMapper.Create(
Seq(FilterToken(ttPound), NoEol),
htComment
);
end;
function CreateParser: TParser<TFmt>;
begin
Result := CommentParser.OrElse(SectionParser).OrElse(SimpleParser);
end;
end.