|
1 # -*- coding: utf-8 -*- |
|
2 """ |
|
3 pygments.lexers.erlang |
|
4 ~~~~~~~~~~~~~~~~~~~~~~ |
|
5 |
|
6 Lexers for Erlang. |
|
7 |
|
8 :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. |
|
9 :license: BSD, see LICENSE for details. |
|
10 """ |
|
11 |
|
12 import re |
|
13 |
|
14 from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions, \ |
|
15 include, default |
|
16 from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ |
|
17 Number, Punctuation, Generic |
|
18 |
|
19 __all__ = ['ErlangLexer', 'ErlangShellLexer', 'ElixirConsoleLexer', |
|
20 'ElixirLexer'] |
|
21 |
|
22 |
|
23 line_re = re.compile('.*?\n') |
|
24 |
|
25 |
|
26 class ErlangLexer(RegexLexer): |
|
27 """ |
|
28 For the Erlang functional programming language. |
|
29 |
|
30 Blame Jeremy Thurgood (http://jerith.za.net/). |
|
31 |
|
32 .. versionadded:: 0.9 |
|
33 """ |
|
34 |
|
35 name = 'Erlang' |
|
36 aliases = ['erlang'] |
|
37 filenames = ['*.erl', '*.hrl', '*.es', '*.escript'] |
|
38 mimetypes = ['text/x-erlang'] |
|
39 |
|
40 keywords = ( |
|
41 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if', |
|
42 'let', 'of', 'query', 'receive', 'try', 'when', |
|
43 ) |
|
44 |
|
45 builtins = ( # See erlang(3) man page |
|
46 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list', |
|
47 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions', |
|
48 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module', |
|
49 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit', |
|
50 'float', 'float_to_list', 'fun_info', 'fun_to_list', |
|
51 'function_exported', 'garbage_collect', 'get', 'get_keys', |
|
52 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary', |
|
53 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean', |
|
54 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list', |
|
55 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record', |
|
56 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom', |
|
57 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom', |
|
58 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple', |
|
59 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5', |
|
60 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor', |
|
61 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2', |
|
62 'pid_to_list', 'port_close', 'port_command', 'port_connect', |
|
63 'port_control', 'port_call', 'port_info', 'port_to_list', |
|
64 'process_display', 'process_flag', 'process_info', 'purge_module', |
|
65 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process', |
|
66 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie', |
|
67 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor', |
|
68 'spawn_opt', 'split_binary', 'start_timer', 'statistics', |
|
69 'suspend_process', 'system_flag', 'system_info', 'system_monitor', |
|
70 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered', |
|
71 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list', |
|
72 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis' |
|
73 ) |
|
74 |
|
75 operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)' |
|
76 word_operators = ( |
|
77 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor', |
|
78 'div', 'not', 'or', 'orelse', 'rem', 'xor' |
|
79 ) |
|
80 |
|
81 atom_re = r"(?:[a-z]\w*|'[^\n']*[^\\]')" |
|
82 |
|
83 variable_re = r'(?:[A-Z_]\w*)' |
|
84 |
|
85 esc_char_re = r'[bdefnrstv\'"\\]' |
|
86 esc_octal_re = r'[0-7][0-7]?[0-7]?' |
|
87 esc_hex_re = r'(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})' |
|
88 esc_ctrl_re = r'\^[a-zA-Z]' |
|
89 escape_re = r'(?:\\(?:'+esc_char_re+r'|'+esc_octal_re+r'|'+esc_hex_re+r'|'+esc_ctrl_re+r'))' |
|
90 |
|
91 macro_re = r'(?:'+variable_re+r'|'+atom_re+r')' |
|
92 |
|
93 base_re = r'(?:[2-9]|[12][0-9]|3[0-6])' |
|
94 |
|
95 tokens = { |
|
96 'root': [ |
|
97 (r'\s+', Text), |
|
98 (r'%.*\n', Comment), |
|
99 (words(keywords, suffix=r'\b'), Keyword), |
|
100 (words(builtins, suffix=r'\b'), Name.Builtin), |
|
101 (words(word_operators, suffix=r'\b'), Operator.Word), |
|
102 (r'^-', Punctuation, 'directive'), |
|
103 (operators, Operator), |
|
104 (r'"', String, 'string'), |
|
105 (r'<<', Name.Label), |
|
106 (r'>>', Name.Label), |
|
107 ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)), |
|
108 ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()', |
|
109 bygroups(Name.Function, Text, Punctuation)), |
|
110 (r'[+-]?' + base_re + r'#[0-9a-zA-Z]+', Number.Integer), |
|
111 (r'[+-]?\d+', Number.Integer), |
|
112 (r'[+-]?\d+.\d+', Number.Float), |
|
113 (r'[]\[:_@\".{}()|;,]', Punctuation), |
|
114 (variable_re, Name.Variable), |
|
115 (atom_re, Name), |
|
116 (r'\?'+macro_re, Name.Constant), |
|
117 (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char), |
|
118 (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label), |
|
119 |
|
120 # Erlang script shebang |
|
121 (r'\A#!.+\n', Comment.Hashbang), |
|
122 |
|
123 # EEP 43: Maps |
|
124 # http://www.erlang.org/eeps/eep-0043.html |
|
125 (r'#\{', Punctuation, 'map_key'), |
|
126 ], |
|
127 'string': [ |
|
128 (escape_re, String.Escape), |
|
129 (r'"', String, '#pop'), |
|
130 (r'~[0-9.*]*[~#+BPWXb-ginpswx]', String.Interpol), |
|
131 (r'[^"\\~]+', String), |
|
132 (r'~', String), |
|
133 ], |
|
134 'directive': [ |
|
135 (r'(define)(\s*)(\()('+macro_re+r')', |
|
136 bygroups(Name.Entity, Text, Punctuation, Name.Constant), '#pop'), |
|
137 (r'(record)(\s*)(\()('+macro_re+r')', |
|
138 bygroups(Name.Entity, Text, Punctuation, Name.Label), '#pop'), |
|
139 (atom_re, Name.Entity, '#pop'), |
|
140 ], |
|
141 'map_key': [ |
|
142 include('root'), |
|
143 (r'=>', Punctuation, 'map_val'), |
|
144 (r':=', Punctuation, 'map_val'), |
|
145 (r'\}', Punctuation, '#pop'), |
|
146 ], |
|
147 'map_val': [ |
|
148 include('root'), |
|
149 (r',', Punctuation, '#pop'), |
|
150 (r'(?=\})', Punctuation, '#pop'), |
|
151 ], |
|
152 } |
|
153 |
|
154 |
|
155 class ErlangShellLexer(Lexer): |
|
156 """ |
|
157 Shell sessions in erl (for Erlang code). |
|
158 |
|
159 .. versionadded:: 1.1 |
|
160 """ |
|
161 name = 'Erlang erl session' |
|
162 aliases = ['erl'] |
|
163 filenames = ['*.erl-sh'] |
|
164 mimetypes = ['text/x-erl-shellsession'] |
|
165 |
|
166 _prompt_re = re.compile(r'\d+>(?=\s|\Z)') |
|
167 |
|
168 def get_tokens_unprocessed(self, text): |
|
169 erlexer = ErlangLexer(**self.options) |
|
170 |
|
171 curcode = '' |
|
172 insertions = [] |
|
173 for match in line_re.finditer(text): |
|
174 line = match.group() |
|
175 m = self._prompt_re.match(line) |
|
176 if m is not None: |
|
177 end = m.end() |
|
178 insertions.append((len(curcode), |
|
179 [(0, Generic.Prompt, line[:end])])) |
|
180 curcode += line[end:] |
|
181 else: |
|
182 if curcode: |
|
183 for item in do_insertions(insertions, |
|
184 erlexer.get_tokens_unprocessed(curcode)): |
|
185 yield item |
|
186 curcode = '' |
|
187 insertions = [] |
|
188 if line.startswith('*'): |
|
189 yield match.start(), Generic.Traceback, line |
|
190 else: |
|
191 yield match.start(), Generic.Output, line |
|
192 if curcode: |
|
193 for item in do_insertions(insertions, |
|
194 erlexer.get_tokens_unprocessed(curcode)): |
|
195 yield item |
|
196 |
|
197 |
|
198 def gen_elixir_string_rules(name, symbol, token): |
|
199 states = {} |
|
200 states['string_' + name] = [ |
|
201 (r'[^#%s\\]+' % (symbol,), token), |
|
202 include('escapes'), |
|
203 (r'\\.', token), |
|
204 (r'(%s)' % (symbol,), bygroups(token), "#pop"), |
|
205 include('interpol') |
|
206 ] |
|
207 return states |
|
208 |
|
209 |
|
210 def gen_elixir_sigstr_rules(term, token, interpol=True): |
|
211 if interpol: |
|
212 return [ |
|
213 (r'[^#%s\\]+' % (term,), token), |
|
214 include('escapes'), |
|
215 (r'\\.', token), |
|
216 (r'%s[a-zA-Z]*' % (term,), token, '#pop'), |
|
217 include('interpol') |
|
218 ] |
|
219 else: |
|
220 return [ |
|
221 (r'[^%s\\]+' % (term,), token), |
|
222 (r'\\.', token), |
|
223 (r'%s[a-zA-Z]*' % (term,), token, '#pop'), |
|
224 ] |
|
225 |
|
226 |
|
227 class ElixirLexer(RegexLexer): |
|
228 """ |
|
229 For the `Elixir language <http://elixir-lang.org>`_. |
|
230 |
|
231 .. versionadded:: 1.5 |
|
232 """ |
|
233 |
|
234 name = 'Elixir' |
|
235 aliases = ['elixir', 'ex', 'exs'] |
|
236 filenames = ['*.ex', '*.exs'] |
|
237 mimetypes = ['text/x-elixir'] |
|
238 |
|
239 KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch') |
|
240 KEYWORD_OPERATOR = ('not', 'and', 'or', 'when', 'in') |
|
241 BUILTIN = ( |
|
242 'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise', |
|
243 'quote', 'unquote', 'unquote_splicing', 'throw', 'super', |
|
244 ) |
|
245 BUILTIN_DECLARATION = ( |
|
246 'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop', |
|
247 'defdelegate', 'defexception', 'defstruct', 'defimpl', 'defcallback', |
|
248 ) |
|
249 |
|
250 BUILTIN_NAMESPACE = ('import', 'require', 'use', 'alias') |
|
251 CONSTANT = ('nil', 'true', 'false') |
|
252 |
|
253 PSEUDO_VAR = ('_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__') |
|
254 |
|
255 OPERATORS3 = ( |
|
256 '<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==', |
|
257 '~>>', '<~>', '|~>', '<|>', |
|
258 ) |
|
259 OPERATORS2 = ( |
|
260 '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~', |
|
261 '->', '<-', '|', '.', '=', '~>', '<~', |
|
262 ) |
|
263 OPERATORS1 = ('<', '>', '+', '-', '*', '/', '!', '^', '&') |
|
264 |
|
265 PUNCTUATION = ( |
|
266 '\\\\', '<<', '>>', '=>', '(', ')', ':', ';', ',', '[', ']', |
|
267 ) |
|
268 |
|
269 def get_tokens_unprocessed(self, text): |
|
270 for index, token, value in RegexLexer.get_tokens_unprocessed(self, text): |
|
271 if token is Name: |
|
272 if value in self.KEYWORD: |
|
273 yield index, Keyword, value |
|
274 elif value in self.KEYWORD_OPERATOR: |
|
275 yield index, Operator.Word, value |
|
276 elif value in self.BUILTIN: |
|
277 yield index, Keyword, value |
|
278 elif value in self.BUILTIN_DECLARATION: |
|
279 yield index, Keyword.Declaration, value |
|
280 elif value in self.BUILTIN_NAMESPACE: |
|
281 yield index, Keyword.Namespace, value |
|
282 elif value in self.CONSTANT: |
|
283 yield index, Name.Constant, value |
|
284 elif value in self.PSEUDO_VAR: |
|
285 yield index, Name.Builtin.Pseudo, value |
|
286 else: |
|
287 yield index, token, value |
|
288 else: |
|
289 yield index, token, value |
|
290 |
|
291 def gen_elixir_sigil_rules(): |
|
292 # all valid sigil terminators (excluding heredocs) |
|
293 terminators = [ |
|
294 (r'\{', r'\}', 'cb'), |
|
295 (r'\[', r'\]', 'sb'), |
|
296 (r'\(', r'\)', 'pa'), |
|
297 (r'<', r'>', 'ab'), |
|
298 (r'/', r'/', 'slas'), |
|
299 (r'\|', r'\|', 'pipe'), |
|
300 ('"', '"', 'quot'), |
|
301 ("'", "'", 'apos'), |
|
302 ] |
|
303 |
|
304 # heredocs have slightly different rules |
|
305 triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')] |
|
306 |
|
307 token = String.Other |
|
308 states = {'sigils': []} |
|
309 |
|
310 for term, name in triquotes: |
|
311 states['sigils'] += [ |
|
312 (r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc), |
|
313 (name + '-end', name + '-intp')), |
|
314 (r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc), |
|
315 (name + '-end', name + '-no-intp')), |
|
316 ] |
|
317 |
|
318 states[name + '-end'] = [ |
|
319 (r'[a-zA-Z]+', token, '#pop'), |
|
320 default('#pop'), |
|
321 ] |
|
322 states[name + '-intp'] = [ |
|
323 (r'^\s*' + term, String.Heredoc, '#pop'), |
|
324 include('heredoc_interpol'), |
|
325 ] |
|
326 states[name + '-no-intp'] = [ |
|
327 (r'^\s*' + term, String.Heredoc, '#pop'), |
|
328 include('heredoc_no_interpol'), |
|
329 ] |
|
330 |
|
331 for lterm, rterm, name in terminators: |
|
332 states['sigils'] += [ |
|
333 (r'~[a-z]' + lterm, token, name + '-intp'), |
|
334 (r'~[A-Z]' + lterm, token, name + '-no-intp'), |
|
335 ] |
|
336 states[name + '-intp'] = gen_elixir_sigstr_rules(rterm, token) |
|
337 states[name + '-no-intp'] = \ |
|
338 gen_elixir_sigstr_rules(rterm, token, interpol=False) |
|
339 |
|
340 return states |
|
341 |
|
342 op3_re = "|".join(re.escape(s) for s in OPERATORS3) |
|
343 op2_re = "|".join(re.escape(s) for s in OPERATORS2) |
|
344 op1_re = "|".join(re.escape(s) for s in OPERATORS1) |
|
345 ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re) |
|
346 punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION) |
|
347 alnum = r'\w' |
|
348 name_re = r'(?:\.\.\.|[a-z_]%s*[!?]?)' % alnum |
|
349 modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum} |
|
350 complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re) |
|
351 special_atom_re = r'(?:\.\.\.|<<>>|%\{\}|%|\{\})' |
|
352 |
|
353 long_hex_char_re = r'(\\x\{)([\da-fA-F]+)(\})' |
|
354 hex_char_re = r'(\\x[\da-fA-F]{1,2})' |
|
355 escape_char_re = r'(\\[abdefnrstv])' |
|
356 |
|
357 tokens = { |
|
358 'root': [ |
|
359 (r'\s+', Text), |
|
360 (r'#.*$', Comment.Single), |
|
361 |
|
362 # Various kinds of characters |
|
363 (r'(\?)' + long_hex_char_re, |
|
364 bygroups(String.Char, |
|
365 String.Escape, Number.Hex, String.Escape)), |
|
366 (r'(\?)' + hex_char_re, |
|
367 bygroups(String.Char, String.Escape)), |
|
368 (r'(\?)' + escape_char_re, |
|
369 bygroups(String.Char, String.Escape)), |
|
370 (r'\?\\?.', String.Char), |
|
371 |
|
372 # '::' has to go before atoms |
|
373 (r':::', String.Symbol), |
|
374 (r'::', Operator), |
|
375 |
|
376 # atoms |
|
377 (r':' + special_atom_re, String.Symbol), |
|
378 (r':' + complex_name_re, String.Symbol), |
|
379 (r':"', String.Symbol, 'string_double_atom'), |
|
380 (r":'", String.Symbol, 'string_single_atom'), |
|
381 |
|
382 # [keywords: ...] |
|
383 (r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re), |
|
384 bygroups(String.Symbol, Punctuation)), |
|
385 |
|
386 # @attributes |
|
387 (r'@' + name_re, Name.Attribute), |
|
388 |
|
389 # identifiers |
|
390 (name_re, Name), |
|
391 (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)), |
|
392 |
|
393 # operators and punctuation |
|
394 (op3_re, Operator), |
|
395 (op2_re, Operator), |
|
396 (punctuation_re, Punctuation), |
|
397 (r'&\d', Name.Entity), # anon func arguments |
|
398 (op1_re, Operator), |
|
399 |
|
400 # numbers |
|
401 (r'0b[01]+', Number.Bin), |
|
402 (r'0o[0-7]+', Number.Oct), |
|
403 (r'0x[\da-fA-F]+', Number.Hex), |
|
404 (r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float), |
|
405 (r'\d(_?\d)*', Number.Integer), |
|
406 |
|
407 # strings and heredocs |
|
408 (r'"""\s*', String.Heredoc, 'heredoc_double'), |
|
409 (r"'''\s*$", String.Heredoc, 'heredoc_single'), |
|
410 (r'"', String.Double, 'string_double'), |
|
411 (r"'", String.Single, 'string_single'), |
|
412 |
|
413 include('sigils'), |
|
414 |
|
415 (r'%\{', Punctuation, 'map_key'), |
|
416 (r'\{', Punctuation, 'tuple'), |
|
417 ], |
|
418 'heredoc_double': [ |
|
419 (r'^\s*"""', String.Heredoc, '#pop'), |
|
420 include('heredoc_interpol'), |
|
421 ], |
|
422 'heredoc_single': [ |
|
423 (r"^\s*'''", String.Heredoc, '#pop'), |
|
424 include('heredoc_interpol'), |
|
425 ], |
|
426 'heredoc_interpol': [ |
|
427 (r'[^#\\\n]+', String.Heredoc), |
|
428 include('escapes'), |
|
429 (r'\\.', String.Heredoc), |
|
430 (r'\n+', String.Heredoc), |
|
431 include('interpol'), |
|
432 ], |
|
433 'heredoc_no_interpol': [ |
|
434 (r'[^\\\n]+', String.Heredoc), |
|
435 (r'\\.', String.Heredoc), |
|
436 (r'\n+', String.Heredoc), |
|
437 ], |
|
438 'escapes': [ |
|
439 (long_hex_char_re, |
|
440 bygroups(String.Escape, Number.Hex, String.Escape)), |
|
441 (hex_char_re, String.Escape), |
|
442 (escape_char_re, String.Escape), |
|
443 ], |
|
444 'interpol': [ |
|
445 (r'#\{', String.Interpol, 'interpol_string'), |
|
446 ], |
|
447 'interpol_string': [ |
|
448 (r'\}', String.Interpol, "#pop"), |
|
449 include('root') |
|
450 ], |
|
451 'map_key': [ |
|
452 include('root'), |
|
453 (r':', Punctuation, 'map_val'), |
|
454 (r'=>', Punctuation, 'map_val'), |
|
455 (r'\}', Punctuation, '#pop'), |
|
456 ], |
|
457 'map_val': [ |
|
458 include('root'), |
|
459 (r',', Punctuation, '#pop'), |
|
460 (r'(?=\})', Punctuation, '#pop'), |
|
461 ], |
|
462 'tuple': [ |
|
463 include('root'), |
|
464 (r'\}', Punctuation, '#pop'), |
|
465 ], |
|
466 } |
|
467 tokens.update(gen_elixir_string_rules('double', '"', String.Double)) |
|
468 tokens.update(gen_elixir_string_rules('single', "'", String.Single)) |
|
469 tokens.update(gen_elixir_string_rules('double_atom', '"', String.Symbol)) |
|
470 tokens.update(gen_elixir_string_rules('single_atom', "'", String.Symbol)) |
|
471 tokens.update(gen_elixir_sigil_rules()) |
|
472 |
|
473 |
|
474 class ElixirConsoleLexer(Lexer): |
|
475 """ |
|
476 For Elixir interactive console (iex) output like: |
|
477 |
|
478 .. sourcecode:: iex |
|
479 |
|
480 iex> [head | tail] = [1,2,3] |
|
481 [1,2,3] |
|
482 iex> head |
|
483 1 |
|
484 iex> tail |
|
485 [2,3] |
|
486 iex> [head | tail] |
|
487 [1,2,3] |
|
488 iex> length [head | tail] |
|
489 3 |
|
490 |
|
491 .. versionadded:: 1.5 |
|
492 """ |
|
493 |
|
494 name = 'Elixir iex session' |
|
495 aliases = ['iex'] |
|
496 mimetypes = ['text/x-elixir-shellsession'] |
|
497 |
|
498 _prompt_re = re.compile(r'(iex|\.{3})(\(\d+\))?> ') |
|
499 |
|
500 def get_tokens_unprocessed(self, text): |
|
501 exlexer = ElixirLexer(**self.options) |
|
502 |
|
503 curcode = '' |
|
504 in_error = False |
|
505 insertions = [] |
|
506 for match in line_re.finditer(text): |
|
507 line = match.group() |
|
508 if line.startswith(u'** '): |
|
509 in_error = True |
|
510 insertions.append((len(curcode), |
|
511 [(0, Generic.Error, line[:-1])])) |
|
512 curcode += line[-1:] |
|
513 else: |
|
514 m = self._prompt_re.match(line) |
|
515 if m is not None: |
|
516 in_error = False |
|
517 end = m.end() |
|
518 insertions.append((len(curcode), |
|
519 [(0, Generic.Prompt, line[:end])])) |
|
520 curcode += line[end:] |
|
521 else: |
|
522 if curcode: |
|
523 for item in do_insertions( |
|
524 insertions, exlexer.get_tokens_unprocessed(curcode)): |
|
525 yield item |
|
526 curcode = '' |
|
527 insertions = [] |
|
528 token = Generic.Error if in_error else Generic.Output |
|
529 yield match.start(), token, line |
|
530 if curcode: |
|
531 for item in do_insertions( |
|
532 insertions, exlexer.get_tokens_unprocessed(curcode)): |
|
533 yield item |