153 tokens = { |
153 tokens = { |
154 'root': [ |
154 'root': [ |
155 (r'\s+', Text), |
155 (r'\s+', Text), |
156 (r'--.*\n?', Comment.Single), |
156 (r'--.*\n?', Comment.Single), |
157 (r'/\*', Comment.Multiline, 'multiline-comments'), |
157 (r'/\*', Comment.Multiline, 'multiline-comments'), |
158 (r'(' + '|'.join(s.replace(" ", "\s+") |
158 (r'(' + '|'.join(s.replace(" ", r"\s+") |
159 for s in DATATYPES + PSEUDO_TYPES) |
159 for s in DATATYPES + PSEUDO_TYPES) |
160 + r')\b', Name.Builtin), |
160 + r')\b', Name.Builtin), |
161 (words(KEYWORDS, suffix=r'\b'), Keyword), |
161 (words(KEYWORDS, suffix=r'\b'), Keyword), |
162 (r'[+*/<>=~!@#%^&|`?-]+', Operator), |
162 (r'[+*/<>=~!@#%^&|`?-]+', Operator), |
163 (r'::', Operator), # cast |
163 (r'::', Operator), # cast |
306 |
306 |
307 # consume the lines of the command: start with an optional prompt |
307 # consume the lines of the command: start with an optional prompt |
308 # and continue until the end of command is detected |
308 # and continue until the end of command is detected |
309 curcode = '' |
309 curcode = '' |
310 insertions = [] |
310 insertions = [] |
311 while 1: |
311 for line in lines: |
312 try: |
|
313 line = next(lines) |
|
314 except StopIteration: |
|
315 # allow the emission of partially collected items |
|
316 # the repl loop will be broken below |
|
317 break |
|
318 |
|
319 # Identify a shell prompt in case of psql commandline example |
312 # Identify a shell prompt in case of psql commandline example |
320 if line.startswith('$') and not curcode: |
313 if line.startswith('$') and not curcode: |
321 lexer = get_lexer_by_name('console', **self.options) |
314 lexer = get_lexer_by_name('console', **self.options) |
322 for x in lexer.get_tokens_unprocessed(line): |
315 for x in lexer.get_tokens_unprocessed(line): |
323 yield x |
316 yield x |
344 sql.get_tokens_unprocessed(curcode)): |
337 sql.get_tokens_unprocessed(curcode)): |
345 yield item |
338 yield item |
346 |
339 |
347 # Emit the output lines |
340 # Emit the output lines |
348 out_token = Generic.Output |
341 out_token = Generic.Output |
349 while 1: |
342 for line in lines: |
350 line = next(lines) |
|
351 mprompt = re_prompt.match(line) |
343 mprompt = re_prompt.match(line) |
352 if mprompt is not None: |
344 if mprompt is not None: |
353 # push the line back to have it processed by the prompt |
345 # push the line back to have it processed by the prompt |
354 lines.send(line) |
346 lines.send(line) |
355 break |
347 break |
361 out_token = Generic.Error |
353 out_token = Generic.Error |
362 yield (mmsg.start(1), Generic.Strong, mmsg.group(1)) |
354 yield (mmsg.start(1), Generic.Strong, mmsg.group(1)) |
363 yield (mmsg.start(2), out_token, mmsg.group(2)) |
355 yield (mmsg.start(2), out_token, mmsg.group(2)) |
364 else: |
356 else: |
365 yield (0, out_token, line) |
357 yield (0, out_token, line) |
|
358 else: |
|
359 return |
366 |
360 |
367 |
361 |
368 class SqlLexer(RegexLexer): |
362 class SqlLexer(RegexLexer): |
369 """ |
363 """ |
370 Lexer for Structured Query Language. Currently, this lexer does |
364 Lexer for Structured Query Language. Currently, this lexer does |
497 # Use re.UNICODE to allow non ASCII letters in names. |
491 # Use re.UNICODE to allow non ASCII letters in names. |
498 flags = re.IGNORECASE | re.UNICODE |
492 flags = re.IGNORECASE | re.UNICODE |
499 tokens = { |
493 tokens = { |
500 'root': [ |
494 'root': [ |
501 (r'\s+', Whitespace), |
495 (r'\s+', Whitespace), |
502 (r'--(?m).*?$\n?', Comment.Single), |
496 (r'(?m)--.*?$\n?', Comment.Single), |
503 (r'/\*', Comment.Multiline, 'multiline-comments'), |
497 (r'/\*', Comment.Multiline, 'multiline-comments'), |
504 (words(_tsql_builtins.OPERATORS), Operator), |
498 (words(_tsql_builtins.OPERATORS), Operator), |
505 (words(_tsql_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word), |
499 (words(_tsql_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word), |
506 (words(_tsql_builtins.TYPES, suffix=r'\b'), Name.Class), |
500 (words(_tsql_builtins.TYPES, suffix=r'\b'), Name.Class), |
507 (words(_tsql_builtins.FUNCTIONS, suffix=r'\b'), Name.Function), |
501 (words(_tsql_builtins.FUNCTIONS, suffix=r'\b'), Name.Function), |