ThirdParty/Pygments/pygments/lexers/agile.py

changeset 2426
da76c71624de
parent 1705
b0fbc9300f2b
child 2525
8b507a9a2d40
equal deleted inserted replaced
2425:ace8a08028f3 2426:da76c71624de
3 pygments.lexers.agile 3 pygments.lexers.agile
4 ~~~~~~~~~~~~~~~~~~~~~ 4 ~~~~~~~~~~~~~~~~~~~~~
5 5
6 Lexers for agile languages. 6 Lexers for agile languages.
7 7
8 :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS. 8 :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
9 :license: BSD, see LICENSE for details. 9 :license: BSD, see LICENSE for details.
10 """ 10 """
11 11
12 import re 12 import re
13 13
20 20
21 21
22 __all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer', 22 __all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
23 'Python3Lexer', 'Python3TracebackLexer', 'RubyLexer', 23 'Python3Lexer', 'Python3TracebackLexer', 'RubyLexer',
24 'RubyConsoleLexer', 'PerlLexer', 'LuaLexer', 'MoonScriptLexer', 24 'RubyConsoleLexer', 'PerlLexer', 'LuaLexer', 'MoonScriptLexer',
25 'MiniDLexer', 'IoLexer', 'TclLexer', 'FactorLexer', 'FancyLexer'] 25 'CrocLexer', 'MiniDLexer', 'IoLexer', 'TclLexer', 'FactorLexer',
26 'FancyLexer', 'DgLexer']
26 27
27 # b/w compatibility 28 # b/w compatibility
28 from pygments.lexers.functional import SchemeLexer 29 from pygments.lexers.functional import SchemeLexer
29 from pygments.lexers.jvm import IokeLexer, ClojureLexer 30 from pygments.lexers.jvm import IokeLexer, ClojureLexer
30 31
35 """ 36 """
36 For `Python <http://www.python.org>`_ source code. 37 For `Python <http://www.python.org>`_ source code.
37 """ 38 """
38 39
39 name = 'Python' 40 name = 'Python'
40 aliases = ['python', 'py'] 41 aliases = ['python', 'py', 'sage']
41 filenames = ['*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac'] 42 filenames = ['*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage']
42 mimetypes = ['text/x-python', 'application/x-python'] 43 mimetypes = ['text/x-python', 'application/x-python']
43 44
44 tokens = { 45 tokens = {
45 'root': [ 46 'root': [
46 (r'\n', Text), 47 (r'\n', Text),
74 include('numbers'), 75 include('numbers'),
75 ], 76 ],
76 'keywords': [ 77 'keywords': [
77 (r'(assert|break|continue|del|elif|else|except|exec|' 78 (r'(assert|break|continue|del|elif|else|except|exec|'
78 r'finally|for|global|if|lambda|pass|print|raise|' 79 r'finally|for|global|if|lambda|pass|print|raise|'
79 r'return|try|while|yield|as|with)\b', Keyword), 80 r'return|try|while|yield(\s+from)?|as|with)\b', Keyword),
80 ], 81 ],
81 'builtins': [ 82 'builtins': [
82 (r'(?<!\.)(__import__|abs|all|any|apply|basestring|bin|bool|buffer|' 83 (r'(?<!\.)(__import__|abs|all|any|apply|basestring|bin|bool|buffer|'
83 r'bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|' 84 r'bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|'
84 r'complex|delattr|dict|dir|divmod|enumerate|eval|execfile|exit|' 85 r'complex|delattr|dict|dir|divmod|enumerate|eval|execfile|exit|'
133 (r'', Text, '#pop') # all else: go back 134 (r'', Text, '#pop') # all else: go back
134 ], 135 ],
135 'fromimport': [ 136 'fromimport': [
136 (r'(?:[ \t]|\\\n)+', Text), 137 (r'(?:[ \t]|\\\n)+', Text),
137 (r'import\b', Keyword.Namespace, '#pop'), 138 (r'import\b', Keyword.Namespace, '#pop'),
139 # if None occurs here, it's "raise x from None", since None can
140 # never be a module name
141 (r'None\b', Name.Builtin.Pseudo, '#pop'),
142 # sadly, in "raise x from y" y will be highlighted as namespace too
138 (r'[a-zA-Z_.][a-zA-Z0-9_.]*', Name.Namespace), 143 (r'[a-zA-Z_.][a-zA-Z0-9_.]*', Name.Namespace),
144 # anything else here also means "raise x from y" and is therefore
145 # not an error
146 (r'', Text, '#pop'),
139 ], 147 ],
140 'stringescape': [ 148 'stringescape': [
141 (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|' 149 (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
142 r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape) 150 r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
143 ], 151 ],
198 206
199 tokens = PythonLexer.tokens.copy() 207 tokens = PythonLexer.tokens.copy()
200 tokens['keywords'] = [ 208 tokens['keywords'] = [
201 (r'(assert|break|continue|del|elif|else|except|' 209 (r'(assert|break|continue|del|elif|else|except|'
202 r'finally|for|global|if|lambda|pass|raise|nonlocal|' 210 r'finally|for|global|if|lambda|pass|raise|nonlocal|'
203 r'return|try|while|yield|as|with|True|False|None)\b', Keyword), 211 r'return|try|while|yield(\s+from)?|as|with|True|False|None)\b',
212 Keyword),
204 ] 213 ]
205 tokens['builtins'] = [ 214 tokens['builtins'] = [
206 (r'(?<!\.)(__import__|abs|all|any|bin|bool|bytearray|bytes|' 215 (r'(?<!\.)(__import__|abs|all|any|bin|bool|bytearray|bytes|'
207 r'chr|classmethod|cmp|compile|complex|delattr|dict|dir|' 216 r'chr|classmethod|cmp|compile|complex|delattr|dict|dir|'
208 r'divmod|enumerate|eval|filter|float|format|frozenset|getattr|' 217 r'divmod|enumerate|eval|filter|float|format|frozenset|getattr|'
254 ] 263 ]
255 tokens['fromimport'] = [ 264 tokens['fromimport'] = [
256 (r'(\s+)(import)\b', bygroups(Text, Keyword), '#pop'), 265 (r'(\s+)(import)\b', bygroups(Text, Keyword), '#pop'),
257 (r'\.', Name.Namespace), 266 (r'\.', Name.Namespace),
258 (uni_name, Name.Namespace), 267 (uni_name, Name.Namespace),
268 (r'', Text, '#pop'),
259 ] 269 ]
260 # don't highlight "%s" substitutions 270 # don't highlight "%s" substitutions
261 tokens['strings'] = [ 271 tokens['strings'] = [
262 (r'[^\\\'"%\n]+', String), 272 (r'[^\\\'"%\n]+', String),
263 # quotes, percents and backslashes must be parsed one at a time 273 # quotes, percents and backslashes must be parsed one at a time
381 bygroups(Text, Name.Builtin, Text, Number, Text)), 391 bygroups(Text, Name.Builtin, Text, Number, Text)),
382 (r'^( )(.+)(\n)', 392 (r'^( )(.+)(\n)',
383 bygroups(Text, using(PythonLexer), Text)), 393 bygroups(Text, using(PythonLexer), Text)),
384 (r'^([ \t]*)(\.\.\.)(\n)', 394 (r'^([ \t]*)(\.\.\.)(\n)',
385 bygroups(Text, Comment, Text)), # for doctests... 395 bygroups(Text, Comment, Text)), # for doctests...
386 (r'^(.+)(: )(.+)(\n)', 396 (r'^([^:]+)(: )(.+)(\n)',
387 bygroups(Generic.Error, Text, Name, Text), '#pop'), 397 bygroups(Generic.Error, Text, Name, Text), '#pop'),
388 (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)', 398 (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)',
389 bygroups(Generic.Error, Text), '#pop') 399 bygroups(Generic.Error, Text), '#pop')
390 ], 400 ],
391 } 401 }
417 bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)), 427 bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
418 (r'^( )(.+)(\n)', 428 (r'^( )(.+)(\n)',
419 bygroups(Text, using(Python3Lexer), Text)), 429 bygroups(Text, using(Python3Lexer), Text)),
420 (r'^([ \t]*)(\.\.\.)(\n)', 430 (r'^([ \t]*)(\.\.\.)(\n)',
421 bygroups(Text, Comment, Text)), # for doctests... 431 bygroups(Text, Comment, Text)), # for doctests...
422 (r'^(.+)(: )(.+)(\n)', 432 (r'^([^:]+)(: )(.+)(\n)',
423 bygroups(Generic.Error, Text, Name, Text), '#pop'), 433 bygroups(Generic.Error, Text, Name, Text), '#pop'),
424 (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)', 434 (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)',
425 bygroups(Generic.Error, Text), '#pop') 435 bygroups(Generic.Error, Text), '#pop')
426 ], 436 ],
427 } 437 }
509 (r'\:@{0,2}([a-zA-Z_]\w*[\!\?]?|\*\*?|[-+]@?|' 519 (r'\:@{0,2}([a-zA-Z_]\w*[\!\?]?|\*\*?|[-+]@?|'
510 r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)', String.Symbol), 520 r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)', String.Symbol),
511 (r":'(\\\\|\\'|[^'])*'", String.Symbol), 521 (r":'(\\\\|\\'|[^'])*'", String.Symbol),
512 (r"'(\\\\|\\'|[^'])*'", String.Single), 522 (r"'(\\\\|\\'|[^'])*'", String.Single),
513 (r':"', String.Symbol, 'simple-sym'), 523 (r':"', String.Symbol, 'simple-sym'),
524 (r'([a-zA-Z_][a-zA-Z0-9]*)(:)',
525 bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
514 (r'"', String.Double, 'simple-string'), 526 (r'"', String.Double, 'simple-string'),
515 (r'(?<!\.)`', String.Backtick, 'simple-backtick'), 527 (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
516 ] 528 ]
517 529
518 # double-quoted string and symbol 530 # double-quoted string and symbol
593 (r'(BEGIN|END|alias|begin|break|case|defined\?|' 605 (r'(BEGIN|END|alias|begin|break|case|defined\?|'
594 r'do|else|elsif|end|ensure|for|if|in|next|redo|' 606 r'do|else|elsif|end|ensure|for|if|in|next|redo|'
595 r'rescue|raise|retry|return|super|then|undef|unless|until|when|' 607 r'rescue|raise|retry|return|super|then|undef|unless|until|when|'
596 r'while|yield)\b', Keyword), 608 r'while|yield)\b', Keyword),
597 # start of function, class and module names 609 # start of function, class and module names
598 (r'(module)(\s+)([a-zA-Z_][a-zA-Z0-9_]*(::[a-zA-Z_][a-zA-Z0-9_]*)*)', 610 (r'(module)(\s+)([a-zA-Z_][a-zA-Z0-9_]*'
611 r'(?:::[a-zA-Z_][a-zA-Z0-9_]*)*)',
599 bygroups(Keyword, Text, Name.Namespace)), 612 bygroups(Keyword, Text, Name.Namespace)),
600 (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'), 613 (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
601 (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'), 614 (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
602 (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), 615 (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
603 # special methods 616 # special methods
638 heredoc_callback), 651 heredoc_callback),
639 # empty string heredocs 652 # empty string heredocs
640 (r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback), 653 (r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
641 (r'__END__', Comment.Preproc, 'end-part'), 654 (r'__END__', Comment.Preproc, 'end-part'),
642 # multiline regex (after keywords or assignments) 655 # multiline regex (after keywords or assignments)
643 (r'(?:^|(?<=[=<>~!])|' 656 (r'(?:^|(?<=[=<>~!:])|'
644 r'(?<=(?:\s|;)when\s)|' 657 r'(?<=(?:\s|;)when\s)|'
645 r'(?<=(?:\s|;)or\s)|' 658 r'(?<=(?:\s|;)or\s)|'
646 r'(?<=(?:\s|;)and\s)|' 659 r'(?<=(?:\s|;)and\s)|'
647 r'(?<=(?:\s|;|\.)index\s)|' 660 r'(?<=(?:\s|;|\.)index\s)|'
648 r'(?<=(?:\s|;|\.)scan\s)|' 661 r'(?<=(?:\s|;|\.)scan\s)|'
1187 if token == Punctuation and value == ".": 1200 if token == Punctuation and value == ".":
1188 token = Operator 1201 token = Operator
1189 yield index, token, value 1202 yield index, token, value
1190 1203
1191 1204
1192 1205 class CrocLexer(RegexLexer):
1193 class MiniDLexer(RegexLexer): 1206 """
1194 """ 1207 For `Croc <http://jfbillingsley.com/croc>`_ source.
1195 For `MiniD <http://www.dsource.org/projects/minid>`_ (a D-like scripting 1208 """
1196 language) source. 1209 name = 'Croc'
1197 """ 1210 filenames = ['*.croc']
1198 name = 'MiniD' 1211 aliases = ['croc']
1199 filenames = ['*.md'] 1212 mimetypes = ['text/x-crocsrc']
1200 aliases = ['minid']
1201 mimetypes = ['text/x-minidsrc']
1202 1213
1203 tokens = { 1214 tokens = {
1204 'root': [ 1215 'root': [
1205 (r'\n', Text), 1216 (r'\n', Text),
1206 (r'\s+', Text), 1217 (r'\s+', Text),
1207 # Comments 1218 # Comments
1208 (r'//(.*?)\n', Comment.Single), 1219 (r'//(.*?)\n', Comment.Single),
1209 (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), 1220 (r'/\*', Comment.Multiline, 'nestedcomment'),
1210 (r'/\+', Comment.Multiline, 'nestedcomment'),
1211 # Keywords 1221 # Keywords
1212 (r'(as|assert|break|case|catch|class|continue|coroutine|default' 1222 (r'(as|assert|break|case|catch|class|continue|default'
1213 r'|do|else|finally|for|foreach|function|global|namespace' 1223 r'|do|else|finally|for|foreach|function|global|namespace'
1214 r'|if|import|in|is|local|module|return|super|switch' 1224 r'|if|import|in|is|local|module|return|scope|super|switch'
1215 r'|this|throw|try|vararg|while|with|yield)\b', Keyword), 1225 r'|this|throw|try|vararg|while|with|yield)\b', Keyword),
1216 (r'(false|true|null)\b', Keyword.Constant), 1226 (r'(false|true|null)\b', Keyword.Constant),
1217 # FloatLiteral 1227 # FloatLiteral
1218 (r'([0-9][0-9_]*)?\.[0-9_]+([eE][+\-]?[0-9_]+)?', Number.Float), 1228 (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?',
1229 Number.Float),
1219 # IntegerLiteral 1230 # IntegerLiteral
1220 # -- Binary 1231 # -- Binary
1221 (r'0[Bb][01_]+', Number), 1232 (r'0[bB][01][01_]*', Number),
1222 # -- Octal
1223 (r'0[Cc][0-7_]+', Number.Oct),
1224 # -- Hexadecimal 1233 # -- Hexadecimal
1225 (r'0[xX][0-9a-fA-F_]+', Number.Hex), 1234 (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex),
1226 # -- Decimal 1235 # -- Decimal
1227 (r'(0|[1-9][0-9_]*)', Number.Integer), 1236 (r'([0-9][0-9_]*)(?![.eE])', Number.Integer),
1228 # CharacterLiteral 1237 # CharacterLiteral
1229 (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}""" 1238 (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}"""
1230 r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""", 1239 r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
1231 String.Char 1240 String.Char
1232 ), 1241 ),
1233 # StringLiteral 1242 # StringLiteral
1234 # -- WysiwygString 1243 # -- WysiwygString
1235 (r'@"(""|[^"])*"', String), 1244 (r'@"(""|[^"])*"', String),
1236 # -- AlternateWysiwygString 1245 (r'@`(``|[^`])*`', String),
1237 (r'`(``|.)*`', String), 1246 (r"@'(''|[^'])*'", String),
1238 # -- DoubleQuotedString 1247 # -- DoubleQuotedString
1239 (r'"(\\\\|\\"|[^"])*"', String), 1248 (r'"(\\\\|\\"|[^"])*"', String),
1240 # Tokens 1249 # Tokens
1241 ( 1250 (
1242 r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>' 1251 r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>'
1245 ), 1254 ),
1246 # Identifier 1255 # Identifier
1247 (r'[a-zA-Z_]\w*', Name), 1256 (r'[a-zA-Z_]\w*', Name),
1248 ], 1257 ],
1249 'nestedcomment': [ 1258 'nestedcomment': [
1250 (r'[^+/]+', Comment.Multiline), 1259 (r'[^*/]+', Comment.Multiline),
1251 (r'/\+', Comment.Multiline, '#push'), 1260 (r'/\*', Comment.Multiline, '#push'),
1252 (r'\+/', Comment.Multiline, '#pop'), 1261 (r'\*/', Comment.Multiline, '#pop'),
1253 (r'[+/]', Comment.Multiline), 1262 (r'[*/]', Comment.Multiline),
1254 ], 1263 ],
1255 } 1264 }
1265
1266
1267 class MiniDLexer(CrocLexer):
1268 """
1269 For MiniD source. MiniD is now known as Croc.
1270 """
1271 name = 'MiniD'
1272 filenames = ['*.md']
1273 aliases = ['minid']
1274 mimetypes = ['text/x-minidsrc']
1256 1275
1257 1276
1258 class IoLexer(RegexLexer): 1277 class IoLexer(RegexLexer):
1259 """ 1278 """
1260 For `Io <http://iolanguage.com/>`_ (a small, prototype-based 1279 For `Io <http://iolanguage.com/>`_ (a small, prototype-based
1799 bygroups(Number.Integer, Text, Operator)), 1818 bygroups(Number.Integer, Text, Operator)),
1800 (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float), 1819 (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
1801 (r'\d+', Number.Integer) 1820 (r'\d+', Number.Integer)
1802 ] 1821 ]
1803 } 1822 }
1823
1824
1825 class DgLexer(RegexLexer):
1826 """
1827 Lexer for `dg <http://pyos.github.com/dg>`_,
1828 a functional and object-oriented programming language
1829 running on the CPython 3 VM.
1830
1831 *New in Pygments 1.6.*
1832 """
1833 name = 'dg'
1834 aliases = ['dg']
1835 filenames = ['*.dg']
1836 mimetypes = ['text/x-dg']
1837
1838 tokens = {
1839 'root': [
1840 # Whitespace:
1841 (r'\s+', Text),
1842 (r'#.*?$', Comment.Single),
1843 # Lexemes:
1844 # Numbers
1845 (r'0[bB][01]+', Number.Bin),
1846 (r'0[oO][0-7]+', Number.Oct),
1847 (r'0[xX][\da-fA-F]+', Number.Hex),
1848 (r'[+-]?\d+\.\d+([eE][+-]?\d+)?[jJ]?', Number.Float),
1849 (r'[+-]?\d+[eE][+-]?\d+[jJ]?', Number.Float),
1850 (r'[+-]?\d+[jJ]?', Number.Integer),
1851 # Character/String Literals
1852 (r"[br]*'''", String, combined('stringescape', 'tsqs', 'string')),
1853 (r'[br]*"""', String, combined('stringescape', 'tdqs', 'string')),
1854 (r"[br]*'", String, combined('stringescape', 'sqs', 'string')),
1855 (r'[br]*"', String, combined('stringescape', 'dqs', 'string')),
1856 # Operators
1857 (r"`\w+'*`", Operator), # Infix links
1858 # Reserved infix links
1859 (r'\b(or|and|if|else|where|is|in)\b', Operator.Word),
1860 (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator),
1861 # Identifiers
1862 # Python 3 types
1863 (r"(?<!\.)(bool|bytearray|bytes|classmethod|complex|dict'?|"
1864 r"float|frozenset|int|list'?|memoryview|object|property|range|"
1865 r"set'?|slice|staticmethod|str|super|tuple'?|type)"
1866 r"(?!['\w])", Name.Builtin),
1867 # Python 3 builtins + some more
1868 (r'(?<!\.)(__import__|abs|all|any|bin|bind|chr|cmp|compile|complex|'
1869 r'delattr|dir|divmod|drop|dropwhile|enumerate|eval|filter|flip|'
1870 r'foldl1?|format|fst|getattr|globals|hasattr|hash|head|hex|id|'
1871 r'init|input|isinstance|issubclass|iter|iterate|last|len|locals|'
1872 r'map|max|min|next|oct|open|ord|pow|print|repr|reversed|round|'
1873 r'setattr|scanl1?|snd|sorted|sum|tail|take|takewhile|vars|zip)'
1874 r"(?!['\w])", Name.Builtin),
1875 (r"(?<!\.)(self|Ellipsis|NotImplemented|None|True|False)(?!['\w])",
1876 Name.Builtin.Pseudo),
1877 (r"(?<!\.)[A-Z]\w*(Error|Exception|Warning)'*(?!['\w])",
1878 Name.Exception),
1879 (r"(?<!\.)(KeyboardInterrupt|SystemExit|StopIteration|"
1880 r"GeneratorExit)(?!['\w])", Name.Exception),
1881 # Compiler-defined identifiers
1882 (r"(?<![\.\w])(import|inherit|for|while|switch|not|raise|unsafe|"
1883 r"yield|with)(?!['\w])", Keyword.Reserved),
1884 # Other links
1885 (r"[A-Z_']+\b", Name),
1886 (r"[A-Z][\w']*\b", Keyword.Type),
1887 (r"\w+'*", Name),
1888 # Blocks
1889 (r'[()]', Punctuation),
1890 ],
1891 'stringescape': [
1892 (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
1893 r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
1894 ],
1895 'string': [
1896 (r'%(\([a-zA-Z0-9_]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
1897 '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
1898 (r'[^\\\'"%\n]+', String),
1899 # quotes, percents and backslashes must be parsed one at a time
1900 (r'[\'"\\]', String),
1901 # unhandled string formatting sign
1902 (r'%', String),
1903 (r'\n', String)
1904 ],
1905 'dqs': [
1906 (r'"', String, '#pop')
1907 ],
1908 'sqs': [
1909 (r"'", String, '#pop')
1910 ],
1911 'tdqs': [
1912 (r'"""', String, '#pop')
1913 ],
1914 'tsqs': [
1915 (r"'''", String, '#pop')
1916 ],
1917 }

eric ide

mercurial