OILS / frontend / match.py View on Github | oils.pub

331 lines, 177 significant
1"""
2match.py - lexer primitives, implemented with re2c or Python regexes.
3"""
4
5from _devbuild.gen.id_kind_asdl import Id, Id_t
6from _devbuild.gen.types_asdl import lex_mode_t
7from frontend import lexer_def
8
9from typing import Tuple, Callable, Dict, List, Any, TYPE_CHECKING
10
11# bin/osh should work without compiling fastlex? But we want all the unit
12# tests to run with a known version of it.
13try:
14 import fastlex
15except ImportError:
16 fastlex = None
17
18if fastlex:
19 re = None # re module isn't in CPython slice
20else:
21 import re # type: ignore
22
23if TYPE_CHECKING:
24 SRE_Pattern = Any # Do we need a .pyi file for re or _sre?
25 SimpleMatchFunc = Callable[[str, int], Tuple[Id_t, int]]
26 LexerPairs = List[Tuple[SRE_Pattern, Id_t]]
27
28
29def _LongestMatch(re_list, line, start_pos):
30 # type: (LexerPairs, str, int) -> Tuple[Id_t, int]
31
32 # Simulate the rule for \x00, which we generate in frontend/match.re2c.h
33 if start_pos >= len(line):
34 return Id.Eol_Tok, start_pos
35 # Simulate C-style string handling: \x00 is empty string.
36 if line[start_pos] == '\0':
37 return Id.Eol_Tok, start_pos
38
39 matches = []
40 for regex, tok_type in re_list:
41 m = regex.match(line, start_pos) # left-anchored
42 if m:
43 matches.append((m.end(0), tok_type, m.group(0)))
44 if not matches:
45 raise AssertionError('no match at position %d: %r' % (start_pos, line))
46 end_pos, tok_type, tok_val = max(matches, key=lambda m: m[0])
47 #util.log('%s %s', tok_type, end_pos)
48 return tok_type, end_pos
49
50
51def _CompileAll(pat_list):
52 # type: (List[Tuple[bool, str, Id_t]]) -> LexerPairs
53 result = []
54 for is_regex, pat, token_id in pat_list:
55 if not is_regex:
56 pat = re.escape(pat) # type: ignore # turn $ into \$
57 result.append((re.compile(pat), token_id)) # type: ignore
58 return result
59
60
61class _MatchOshToken_Slow(object):
62 """An abstract matcher that doesn't depend on OSH."""
63
64 def __init__(self, lexer_def):
65 # type: (Dict[lex_mode_t, List[Tuple[bool, str, Id_t]]]) -> None
66 self.lexer_def = {} # type: Dict[lex_mode_t, LexerPairs]
67 for lex_mode, pat_list in lexer_def.items():
68 self.lexer_def[lex_mode] = _CompileAll(pat_list)
69
70 def __call__(self, lex_mode, line, start_pos):
71 # type: (lex_mode_t, str, int) -> Tuple[Id_t, int]
72 """Returns (id, end_pos)."""
73 re_list = self.lexer_def[lex_mode]
74
75 return _LongestMatch(re_list, line, start_pos)
76
77
78def _MatchOshToken_Fast(lex_mode, line, start_pos):
79 # type: (lex_mode_t, str, int) -> Tuple[Id_t, int]
80 """Returns (Id, end_pos)."""
81 tok_type, end_pos = fastlex.MatchOshToken(lex_mode, line, start_pos)
82 # IMPORTANT: We're reusing Id instances here. Ids are very common, so this
83 # saves memory.
84 return tok_type, end_pos
85
86
87class _MatchTokenSlow(object):
88
89 def __init__(self, pat_list):
90 # type: (List[Tuple[bool, str, Id_t]]) -> None
91 self.pat_list = _CompileAll(pat_list)
92
93 def __call__(self, line, start_pos):
94 # type: (str, int) -> Tuple[Id_t, int]
95 return _LongestMatch(self.pat_list, line, start_pos)
96
97
98def _MatchEchoToken_Fast(line, start_pos):
99 # type: (str, int) -> Tuple[Id_t, int]
100 tok_type, end_pos = fastlex.MatchEchoToken(line, start_pos)
101 return tok_type, end_pos
102
103
104def _MatchPrintfBToken_Fast(line, start_pos):
105 # type: (str, int) -> Tuple[Id_t, int]
106 tok_type, end_pos = fastlex.MatchPrintfBToken(line, start_pos)
107 return tok_type, end_pos
108
109
110def _MatchGlobToken_Fast(line, start_pos):
111 # type: (str, int) -> Tuple[Id_t, int]
112 tok_type, end_pos = fastlex.MatchGlobToken(line, start_pos)
113 return tok_type, end_pos
114
115
116def _MatchPS1Token_Fast(line, start_pos):
117 # type: (str, int) -> Tuple[Id_t, int]
118 tok_type, end_pos = fastlex.MatchPS1Token(line, start_pos)
119 return tok_type, end_pos
120
121
122def _MatchHistoryToken_Fast(line, start_pos):
123 # type: (str, int) -> Tuple[Id_t, int]
124 tok_type, end_pos = fastlex.MatchHistoryToken(line, start_pos)
125 return tok_type, end_pos
126
127
128def _MatchBraceRangeToken_Fast(line, start_pos):
129 # type: (str, int) -> Tuple[Id_t, int]
130 tok_type, end_pos = fastlex.MatchBraceRangeToken(line, start_pos)
131 return tok_type, end_pos
132
133
134def _MatchJ8Token_Fast(line, start_pos):
135 # type: (str, int) -> Tuple[Id_t, int]
136 tok_type, end_pos = fastlex.MatchJ8Token(line, start_pos)
137 return tok_type, end_pos
138
139
140def _MatchJ8LinesToken_Fast(line, start_pos):
141 # type: (str, int) -> Tuple[Id_t, int]
142 tok_type, end_pos = fastlex.MatchJ8LinesToken(line, start_pos)
143 return tok_type, end_pos
144
145
146def _MatchJ8StrToken_Fast(line, start_pos):
147 # type: (str, int) -> Tuple[Id_t, int]
148 tok_type, end_pos = fastlex.MatchJ8StrToken(line, start_pos)
149 return tok_type, end_pos
150
151
152def _MatchJsonStrToken_Fast(line, start_pos):
153 # type: (str, int) -> Tuple[Id_t, int]
154 tok_type, end_pos = fastlex.MatchJsonStrToken(line, start_pos)
155 return tok_type, end_pos
156
157
158def _MatchShNumberToken_Fast(line, start_pos):
159 # type: (str, int) -> Tuple[Id_t, int]
160 tok_type, end_pos = fastlex.MatchShNumberToken(line, start_pos)
161 return tok_type, end_pos
162
163
164if fastlex:
165 OneToken = _MatchOshToken_Fast
166 ECHO_MATCHER = _MatchEchoToken_Fast
167 PRINTF_B_MATCHER = _MatchPrintfBToken_Fast
168 GLOB_MATCHER = _MatchGlobToken_Fast
169 PS1_MATCHER = _MatchPS1Token_Fast
170 HISTORY_MATCHER = _MatchHistoryToken_Fast
171 BRACE_RANGE_MATCHER = _MatchBraceRangeToken_Fast
172
173 MatchJ8Token = _MatchJ8Token_Fast
174 MatchJ8LinesToken = _MatchJ8LinesToken_Fast
175 MatchJ8StrToken = _MatchJ8StrToken_Fast
176 MatchJsonStrToken = _MatchJsonStrToken_Fast
177 MatchShNumberToken = _MatchShNumberToken_Fast
178
179 IsUtf8Codeset = fastlex.IsUtf8Codeset
180 IsValidVarName = fastlex.IsValidVarName
181 ShouldHijack = fastlex.ShouldHijack
182 LooksLikeInteger = fastlex.LooksLikeInteger
183 LooksLikeYshInt = fastlex.LooksLikeYshInt
184 LooksLikeYshFloat = fastlex.LooksLikeYshFloat
185else:
186 OneToken = _MatchOshToken_Slow(lexer_def.LEXER_DEF)
187 ECHO_MATCHER = _MatchTokenSlow(lexer_def.ECHO_E_DEF)
188 PRINTF_B_MATCHER = _MatchTokenSlow(lexer_def.PRINTF_B_DEF)
189 GLOB_MATCHER = _MatchTokenSlow(lexer_def.GLOB_DEF)
190 PS1_MATCHER = _MatchTokenSlow(lexer_def.PS1_DEF)
191 HISTORY_MATCHER = _MatchTokenSlow(lexer_def.HISTORY_DEF)
192 BRACE_RANGE_MATCHER = _MatchTokenSlow(lexer_def.BRACE_RANGE_DEF)
193
194 MatchJ8Token = _MatchTokenSlow(lexer_def.J8_DEF)
195 MatchJ8LinesToken = _MatchTokenSlow(lexer_def.J8_LINES_DEF)
196 MatchJ8StrToken = _MatchTokenSlow(lexer_def.J8_STR_DEF)
197 MatchJsonStrToken = _MatchTokenSlow(lexer_def.JSON_STR_DEF)
198 MatchShNumberToken = _MatchTokenSlow(lexer_def.SH_NUMBER_DEF)
199
200 # Used by osh/cmd_parse.py to validate for loop name. Note it must be
201 # anchored on the right.
202 _VAR_NAME_RE = re.compile(lexer_def.VAR_NAME_RE + '$') # type: ignore
203
204 # yapf: disable
205 _IS_UTF8_CODESET_RE = re.compile(lexer_def.IS_UTF8_CODESET_RE + '$') # type: ignore
206
207 def IsUtf8Codeset(s):
208 # type: (str) -> bool
209 return bool(_IS_UTF8_CODESET_RE.match(s))
210
211 def IsValidVarName(s):
212 # type: (str) -> bool
213 return bool(_VAR_NAME_RE.match(s))
214
215 # yapf: disable
216 _SHOULD_HIJACK_RE = re.compile(lexer_def.SHOULD_HIJACK_RE + '$') # type: ignore
217
218 def ShouldHijack(s):
219 # type: (str) -> bool
220 return bool(_SHOULD_HIJACK_RE.match(s))
221
222 #
223 # Integer/float
224 #
225
226 _LOOKS_LIKE_INTEGER_RE = re.compile(lexer_def.LOOKS_LIKE_INTEGER + '$') # type: ignore
227
228 def LooksLikeInteger(s):
229 # type: (str) -> bool
230 return bool(_LOOKS_LIKE_INTEGER_RE.match(s))
231
232 _LOOKS_LIKE_YSH_INT_RE = re.compile(lexer_def.LOOKS_LIKE_YSH_INT + '$') # type: ignore
233
234 def LooksLikeYshInt(s):
235 # type: (str) -> bool
236 return bool(_LOOKS_LIKE_YSH_INT_RE.match(s))
237
238 _LOOKS_LIKE_YSH_FLOAT_RE = re.compile(lexer_def.LOOKS_LIKE_YSH_FLOAT + '$') # type: ignore
239
240 def LooksLikeYshFloat(s):
241 # type: (str) -> bool
242 return bool(_LOOKS_LIKE_YSH_FLOAT_RE.match(s))
243 # yapf: enable
244
245
246class SimpleLexer(object):
247
248 def __init__(self, match_func, s):
249 # type: (SimpleMatchFunc, str) -> None
250 self.match_func = match_func
251 self.s = s
252 self.pos = 0
253
254 def Next(self):
255 # type: () -> Tuple[Id_t, str]
256 """
257 Note: match_func will return Id.Eol_Tok repeatedly the terminating NUL
258 """
259 tok_id, end_pos = self.match_func(self.s, self.pos)
260 val = self.s[self.pos:end_pos]
261 self.pos = end_pos
262 return tok_id, val
263
264 def Tokens(self):
265 # type: () -> List[Tuple[Id_t, str]]
266 tokens = [] # type: List[Tuple[Id_t, str]]
267 while True:
268 tok_id, val = self.Next()
269 if tok_id == Id.Eol_Tok: # NUL terminator
270 break
271 tokens.append((tok_id, val))
272 return tokens
273
274
275# Iterated over in builtin/io_osh.py
276def EchoLexer(s):
277 # type: (str) -> SimpleLexer
278 return SimpleLexer(ECHO_MATCHER, s)
279
280
281def PrintfBLexer(s):
282 # type: (str) -> SimpleLexer
283 return SimpleLexer(PRINTF_B_MATCHER, s)
284
285
286def BraceRangeLexer(s):
287 # type: (str) -> SimpleLexer
288 return SimpleLexer(BRACE_RANGE_MATCHER, s)
289
290
291def GlobLexer(s):
292 # type: (str) -> SimpleLexer
293 return SimpleLexer(GLOB_MATCHER, s)
294
295
296# These tokens are "slurped"
297
298
299def HistoryTokens(s):
300 # type: (str) -> List[Tuple[Id_t, str]]
301 lex = SimpleLexer(HISTORY_MATCHER, s)
302 return lex.Tokens()
303
304
305def Ps1Tokens(s):
306 # type: (str) -> List[Tuple[Id_t, str]]
307 lex = SimpleLexer(PS1_MATCHER, s)
308 return lex.Tokens()
309
310
311#
312# builtin/bracket_osh.py
313#
314
315
316def BracketUnary(s):
317 # type: (str) -> Id_t
318 from _devbuild.gen.id_kind import TEST_UNARY_LOOKUP # break circular dep
319 return TEST_UNARY_LOOKUP.get(s, Id.Undefined_Tok)
320
321
322def BracketBinary(s):
323 # type: (str) -> Id_t
324 from _devbuild.gen.id_kind import TEST_BINARY_LOOKUP
325 return TEST_BINARY_LOOKUP.get(s, Id.Undefined_Tok)
326
327
328def BracketOther(s):
329 # type: (str) -> Id_t
330 from _devbuild.gen.id_kind import TEST_OTHER_LOOKUP
331 return TEST_OTHER_LOOKUP.get(s, Id.Undefined_Tok)