ThirdParty/Pygments/pygments/lexers/templates.py

changeset 4172
4f20dba37ab6
parent 3145
a9de05d4a22f
child 4697
c2e9bf425554
equal deleted inserted replaced
4170:8bc578136279 4172:4f20dba37ab6
3 pygments.lexers.templates 3 pygments.lexers.templates
4 ~~~~~~~~~~~~~~~~~~~~~~~~~ 4 ~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 Lexers for various template engines' markup. 6 Lexers for various template engines' markup.
7 7
8 :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. 8 :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
9 :license: BSD, see LICENSE for details. 9 :license: BSD, see LICENSE for details.
10 """ 10 """
11 11
12 from __future__ import unicode_literals
13
14 import re 12 import re
15 13
16 from pygments.lexers.web import \ 14 from pygments.lexers.html import HtmlLexer, XmlLexer
17 PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer, LassoLexer 15 from pygments.lexers.javascript import JavascriptLexer, LassoLexer
18 from pygments.lexers.agile import PythonLexer, PerlLexer 16 from pygments.lexers.css import CssLexer
19 from pygments.lexers.compiled import JavaLexer 17 from pygments.lexers.php import PhpLexer
20 from pygments.lexers.jvm import TeaLangLexer 18 from pygments.lexers.python import PythonLexer
19 from pygments.lexers.perl import PerlLexer
20 from pygments.lexers.jvm import JavaLexer, TeaLangLexer
21 from pygments.lexers.data import YamlLexer
21 from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \ 22 from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
22 include, using, this 23 include, using, this, default, combined
23 from pygments.token import Error, Punctuation, \ 24 from pygments.token import Error, Punctuation, Whitespace, \
24 Text, Comment, Operator, Keyword, Name, String, Number, Other, Token 25 Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
25 from pygments.util import html_doctype_matches, looks_like_xml 26 from pygments.util import html_doctype_matches, looks_like_xml
26 27
27 __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer', 28 __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
28 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer', 29 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
29 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer', 30 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
36 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer', 37 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
37 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer', 38 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
38 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer', 39 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
39 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer', 40 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
40 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer', 41 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
41 'ColdfusionHtmlLexer', 'VelocityLexer', 'VelocityHtmlLexer', 42 'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
42 'VelocityXmlLexer', 'SspLexer', 'TeaTemplateLexer', 'LassoHtmlLexer', 43 'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
43 'LassoXmlLexer', 'LassoCssLexer', 'LassoJavascriptLexer'] 44 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
45 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
46 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
47 'TwigLexer', 'TwigHtmlLexer']
44 48
45 49
46 class ErbLexer(Lexer): 50 class ErbLexer(Lexer):
47 """ 51 """
48 Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating) 52 Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating)
59 mimetypes = ['application/x-ruby-templating'] 63 mimetypes = ['application/x-ruby-templating']
60 64
61 _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M) 65 _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
62 66
63 def __init__(self, **options): 67 def __init__(self, **options):
64 from pygments.lexers.agile import RubyLexer 68 from pygments.lexers.ruby import RubyLexer
65 self.ruby_lexer = RubyLexer(**options) 69 self.ruby_lexer = RubyLexer(**options)
66 Lexer.__init__(self, **options) 70 Lexer.__init__(self, **options)
67 71
68 def get_tokens_unprocessed(self, text): 72 def get_tokens_unprocessed(self, text):
69 """ 73 """
102 yield idx, Comment.Preproc, tag 106 yield idx, Comment.Preproc, tag
103 idx += len(tag) 107 idx += len(tag)
104 data = tokens.pop() 108 data = tokens.pop()
105 r_idx = 0 109 r_idx = 0
106 for r_idx, r_token, r_value in \ 110 for r_idx, r_token, r_value in \
107 self.ruby_lexer.get_tokens_unprocessed(data): 111 self.ruby_lexer.get_tokens_unprocessed(data):
108 yield r_idx + idx, r_token, r_value 112 yield r_idx + idx, r_token, r_value
109 idx += len(data) 113 idx += len(data)
110 state = 2 114 state = 2
111 elif tag in ('%>', '-%>'): 115 elif tag in ('%>', '-%>'):
112 yield idx, Error, tag 116 yield idx, Error, tag
115 # % raw ruby statements 119 # % raw ruby statements
116 else: 120 else:
117 yield idx, Comment.Preproc, tag[0] 121 yield idx, Comment.Preproc, tag[0]
118 r_idx = 0 122 r_idx = 0
119 for r_idx, r_token, r_value in \ 123 for r_idx, r_token, r_value in \
120 self.ruby_lexer.get_tokens_unprocessed(tag[1:]): 124 self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
121 yield idx + 1 + r_idx, r_token, r_value 125 yield idx + 1 + r_idx, r_token, r_value
122 idx += len(tag) 126 idx += len(tag)
123 state = 0 127 state = 0
124 # block ends 128 # block ends
125 elif state == 2: 129 elif state == 2:
159 (r'(\{)(\*.*?\*)(\})', 163 (r'(\{)(\*.*?\*)(\})',
160 bygroups(Comment.Preproc, Comment, Comment.Preproc)), 164 bygroups(Comment.Preproc, Comment, Comment.Preproc)),
161 (r'(\{php\})(.*?)(\{/php\})', 165 (r'(\{php\})(.*?)(\{/php\})',
162 bygroups(Comment.Preproc, using(PhpLexer, startinline=True), 166 bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
163 Comment.Preproc)), 167 Comment.Preproc)),
164 (r'(\{)(/?[a-zA-Z_][a-zA-Z0-9_]*)(\s*)', 168 (r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
165 bygroups(Comment.Preproc, Name.Function, Text), 'smarty'), 169 bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
166 (r'\{', Comment.Preproc, 'smarty') 170 (r'\{', Comment.Preproc, 'smarty')
167 ], 171 ],
168 'smarty': [ 172 'smarty': [
169 (r'\s+', Text), 173 (r'\s+', Text),
174 (r'\{', Comment.Preproc, '#push'),
170 (r'\}', Comment.Preproc, '#pop'), 175 (r'\}', Comment.Preproc, '#pop'),
171 (r'#[a-zA-Z_][a-zA-Z0-9_]*#', Name.Variable), 176 (r'#[a-zA-Z_]\w*#', Name.Variable),
172 (r'\$[a-zA-Z_][a-zA-Z0-9_]*(\.[a-zA-Z0-9_]+)*', Name.Variable), 177 (r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
173 (r'[~!%^&*()+=|\[\]:;,.<>/?{}@-]', Operator), 178 (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
174 (r'(true|false|null)\b', Keyword.Constant), 179 (r'(true|false|null)\b', Keyword.Constant),
175 (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|" 180 (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
176 r"0[xX][0-9a-fA-F]+[Ll]?", Number), 181 r"0[xX][0-9a-fA-F]+[Ll]?", Number),
177 (r'"(\\\\|\\"|[^"])*"', String.Double), 182 (r'"(\\\\|\\"|[^"])*"', String.Double),
178 (r"'(\\\\|\\'|[^'])*'", String.Single), 183 (r"'(\\\\|\\'|[^'])*'", String.Single),
179 (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Attribute) 184 (r'[a-zA-Z_]\w*', Name.Attribute)
180 ] 185 ]
181 } 186 }
182 187
183 def analyse_text(text): 188 def analyse_text(text):
184 rv = 0.0 189 rv = 0.0
201 data is left untouched by the lexer. 206 data is left untouched by the lexer.
202 """ 207 """
203 208
204 name = 'Velocity' 209 name = 'Velocity'
205 aliases = ['velocity'] 210 aliases = ['velocity']
206 filenames = ['*.vm','*.fhtml'] 211 filenames = ['*.vm', '*.fhtml']
207 212
208 flags = re.MULTILINE | re.DOTALL 213 flags = re.MULTILINE | re.DOTALL
209 214
210 identifier = r'[a-zA-Z_][a-zA-Z0-9_]*' 215 identifier = r'[a-zA-Z_]\w*'
211 216
212 tokens = { 217 tokens = {
213 'root': [ 218 'root': [
214 (r'[^{#$]+', Other), 219 (r'[^{#$]+', Other),
215 (r'(#)(\*.*?\*)(#)', 220 (r'(#)(\*.*?\*)(#)',
227 (identifier, Name.Variable), 232 (identifier, Name.Variable),
228 (r'\(', Punctuation, 'funcparams'), 233 (r'\(', Punctuation, 'funcparams'),
229 (r'(\.)(' + identifier + r')', 234 (r'(\.)(' + identifier + r')',
230 bygroups(Punctuation, Name.Variable), '#push'), 235 bygroups(Punctuation, Name.Variable), '#push'),
231 (r'\}', Punctuation, '#pop'), 236 (r'\}', Punctuation, '#pop'),
232 (r'', Other, '#pop') 237 default('#pop')
233 ], 238 ],
234 'directiveparams': [ 239 'directiveparams': [
235 (r'(&&|\|\||==?|!=?|[-<>+*%&\|\^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b', 240 (r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
236 Operator), 241 Operator),
237 (r'\[', Operator, 'rangeoperator'), 242 (r'\[', Operator, 'rangeoperator'),
238 (r'\b' + identifier + r'\b', Name.Function), 243 (r'\b' + identifier + r'\b', Name.Function),
239 include('funcparams') 244 include('funcparams')
240 ], 245 ],
251 (r"'(\\\\|\\'|[^'])*'", String.Single), 256 (r"'(\\\\|\\'|[^'])*'", String.Single),
252 (r"0[xX][0-9a-fA-F]+[Ll]?", Number), 257 (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
253 (r"\b[0-9]+\b", Number), 258 (r"\b[0-9]+\b", Number),
254 (r'(true|false|null)\b', Keyword.Constant), 259 (r'(true|false|null)\b', Keyword.Constant),
255 (r'\(', Punctuation, '#push'), 260 (r'\(', Punctuation, '#push'),
256 (r'\)', Punctuation, '#pop') 261 (r'\)', Punctuation, '#pop'),
262 (r'\[', Punctuation, '#push'),
263 (r'\]', Punctuation, '#pop'),
257 ] 264 ]
258 } 265 }
259 266
260 def analyse_text(text): 267 def analyse_text(text):
261 rv = 0.0 268 rv = 0.0
263 rv += 0.25 270 rv += 0.25
264 if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text): 271 if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text):
265 rv += 0.15 272 rv += 0.15
266 if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text): 273 if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
267 rv += 0.15 274 rv += 0.15
268 if re.search(r'\$\{?[a-zA-Z_][a-zA-Z0-9_]*(\([^)]*\))?' 275 if re.search(r'\$\{?[a-zA-Z_]\w*(\([^)]*\))?'
269 r'(\.[a-zA-Z0-9_]+(\([^)]*\))?)*\}?', text): 276 r'(\.\w+(\([^)]*\))?)*\}?', text):
270 rv += 0.01 277 rv += 0.01
271 return rv 278 return rv
272 279
273 280
274 class VelocityHtmlLexer(DelegatingLexer): 281 class VelocityHtmlLexer(DelegatingLexer):
275 """ 282 """
276 Subclass of the `VelocityLexer` that highlights unlexer data 283 Subclass of the `VelocityLexer` that highlights unlexed data
277 with the `HtmlLexer`. 284 with the `HtmlLexer`.
278 285
279 """ 286 """
280 287
281 name = 'HTML+Velocity' 288 name = 'HTML+Velocity'
282 aliases = ['html+velocity'] 289 aliases = ['html+velocity']
283 alias_filenames = ['*.html','*.fhtml'] 290 alias_filenames = ['*.html', '*.fhtml']
284 mimetypes = ['text/html+velocity'] 291 mimetypes = ['text/html+velocity']
285 292
286 def __init__(self, **options): 293 def __init__(self, **options):
287 super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer, 294 super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer,
288 **options) 295 **options)
289 296
290 297
291 class VelocityXmlLexer(DelegatingLexer): 298 class VelocityXmlLexer(DelegatingLexer):
292 """ 299 """
293 Subclass of the `VelocityLexer` that highlights unlexer data 300 Subclass of the `VelocityLexer` that highlights unlexed data
294 with the `XmlLexer`. 301 with the `XmlLexer`.
295 302
296 """ 303 """
297 304
298 name = 'XML+Velocity' 305 name = 'XML+Velocity'
299 aliases = ['xml+velocity'] 306 aliases = ['xml+velocity']
300 alias_filenames = ['*.xml','*.vm'] 307 alias_filenames = ['*.xml', '*.vm']
301 mimetypes = ['application/xml+velocity'] 308 mimetypes = ['application/xml+velocity']
302 309
303 def __init__(self, **options): 310 def __init__(self, **options):
304 super(VelocityXmlLexer, self).__init__(XmlLexer, VelocityLexer, 311 super(VelocityXmlLexer, self).__init__(XmlLexer, VelocityLexer,
305 **options) 312 **options)
306 313
307 def analyse_text(text): 314 def analyse_text(text):
308 rv = VelocityLexer.analyse_text(text) - 0.01 315 rv = VelocityLexer.analyse_text(text) - 0.01
309 if looks_like_xml(text): 316 if looks_like_xml(text):
310 rv += 0.5 317 rv += 0.4
311 return rv 318 return rv
312 319
313 320
314 class DjangoLexer(RegexLexer): 321 class DjangoLexer(RegexLexer):
315 """ 322 """
343 r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})', 350 r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
344 bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc, 351 bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
345 Text, Comment.Preproc, Text, Keyword, Text, 352 Text, Comment.Preproc, Text, Keyword, Text,
346 Comment.Preproc)), 353 Comment.Preproc)),
347 # filter blocks 354 # filter blocks
348 (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)', 355 (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
349 bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function), 356 bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
350 'block'), 357 'block'),
351 (r'(\{%)(-?\s*)([a-zA-Z_][a-zA-Z0-9_]*)', 358 (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
352 bygroups(Comment.Preproc, Text, Keyword), 'block'), 359 bygroups(Comment.Preproc, Text, Keyword), 'block'),
353 (r'\{', Other) 360 (r'\{', Other)
354 ], 361 ],
355 'varnames': [ 362 'varnames': [
356 (r'(\|)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)', 363 (r'(\|)(\s*)([a-zA-Z_]\w*)',
357 bygroups(Operator, Text, Name.Function)), 364 bygroups(Operator, Text, Name.Function)),
358 (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_][a-zA-Z0-9_]*)', 365 (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
359 bygroups(Keyword, Text, Keyword, Text, Name.Function)), 366 bygroups(Keyword, Text, Keyword, Text, Name.Function)),
360 (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo), 367 (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
361 (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|' 368 (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
362 r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b', 369 r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
363 Keyword), 370 Keyword),
364 (r'(loop|block|super|forloop)\b', Name.Builtin), 371 (r'(loop|block|super|forloop)\b', Name.Builtin),
365 (r'[a-zA-Z][a-zA-Z0-9_-]*', Name.Variable), 372 (r'[a-zA-Z][\w-]*', Name.Variable),
366 (r'\.[a-zA-Z0-9_]+', Name.Variable), 373 (r'\.\w+', Name.Variable),
367 (r':?"(\\\\|\\"|[^"])*"', String.Double), 374 (r':?"(\\\\|\\"|[^"])*"', String.Double),
368 (r":?'(\\\\|\\'|[^'])*'", String.Single), 375 (r":?'(\\\\|\\'|[^'])*'", String.Single),
369 (r'([{}()\[\]+\-*/,:~]|[><=]=?)', Operator), 376 (r'([{}()\[\]+\-*/,:~]|[><=]=?)', Operator),
370 (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|" 377 (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
371 r"0[xX][0-9a-fA-F]+[Ll]?", Number), 378 r"0[xX][0-9a-fA-F]+[Ll]?", Number),
397 class MyghtyLexer(RegexLexer): 404 class MyghtyLexer(RegexLexer):
398 """ 405 """
399 Generic `myghty templates`_ lexer. Code that isn't Myghty 406 Generic `myghty templates`_ lexer. Code that isn't Myghty
400 markup is yielded as `Token.Other`. 407 markup is yielded as `Token.Other`.
401 408
402 *New in Pygments 0.6.* 409 .. versionadded:: 0.6
403 410
404 .. _myghty templates: http://www.myghty.org/ 411 .. _myghty templates: http://www.myghty.org/
405 """ 412 """
406 413
407 name = 'Myghty' 414 name = 'Myghty'
442 } 449 }
443 450
444 451
445 class MyghtyHtmlLexer(DelegatingLexer): 452 class MyghtyHtmlLexer(DelegatingLexer):
446 """ 453 """
447 Subclass of the `MyghtyLexer` that highlights unlexer data 454 Subclass of the `MyghtyLexer` that highlights unlexed data
448 with the `HtmlLexer`. 455 with the `HtmlLexer`.
449 456
450 *New in Pygments 0.6.* 457 .. versionadded:: 0.6
451 """ 458 """
452 459
453 name = 'HTML+Myghty' 460 name = 'HTML+Myghty'
454 aliases = ['html+myghty'] 461 aliases = ['html+myghty']
455 mimetypes = ['text/html+myghty'] 462 mimetypes = ['text/html+myghty']
459 **options) 466 **options)
460 467
461 468
462 class MyghtyXmlLexer(DelegatingLexer): 469 class MyghtyXmlLexer(DelegatingLexer):
463 """ 470 """
464 Subclass of the `MyghtyLexer` that highlights unlexer data 471 Subclass of the `MyghtyLexer` that highlights unlexed data
465 with the `XmlLexer`. 472 with the `XmlLexer`.
466 473
467 *New in Pygments 0.6.* 474 .. versionadded:: 0.6
468 """ 475 """
469 476
470 name = 'XML+Myghty' 477 name = 'XML+Myghty'
471 aliases = ['xml+myghty'] 478 aliases = ['xml+myghty']
472 mimetypes = ['application/xml+myghty'] 479 mimetypes = ['application/xml+myghty']
476 **options) 483 **options)
477 484
478 485
479 class MyghtyJavascriptLexer(DelegatingLexer): 486 class MyghtyJavascriptLexer(DelegatingLexer):
480 """ 487 """
481 Subclass of the `MyghtyLexer` that highlights unlexer data 488 Subclass of the `MyghtyLexer` that highlights unlexed data
482 with the `JavascriptLexer`. 489 with the `JavascriptLexer`.
483 490
484 *New in Pygments 0.6.* 491 .. versionadded:: 0.6
485 """ 492 """
486 493
487 name = 'JavaScript+Myghty' 494 name = 'JavaScript+Myghty'
488 aliases = ['js+myghty', 'javascript+myghty'] 495 aliases = ['js+myghty', 'javascript+myghty']
489 mimetypes = ['application/x-javascript+myghty', 496 mimetypes = ['application/x-javascript+myghty',
495 MyghtyLexer, **options) 502 MyghtyLexer, **options)
496 503
497 504
498 class MyghtyCssLexer(DelegatingLexer): 505 class MyghtyCssLexer(DelegatingLexer):
499 """ 506 """
500 Subclass of the `MyghtyLexer` that highlights unlexer data 507 Subclass of the `MyghtyLexer` that highlights unlexed data
501 with the `CssLexer`. 508 with the `CssLexer`.
502 509
503 *New in Pygments 0.6.* 510 .. versionadded:: 0.6
504 """ 511 """
505 512
506 name = 'CSS+Myghty' 513 name = 'CSS+Myghty'
507 aliases = ['css+myghty'] 514 aliases = ['css+myghty']
508 mimetypes = ['text/css+myghty'] 515 mimetypes = ['text/css+myghty']
517 Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't 524 Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
518 Mason markup is HTML. 525 Mason markup is HTML.
519 526
520 .. _mason templates: http://www.masonhq.com/ 527 .. _mason templates: http://www.masonhq.com/
521 528
522 *New in Pygments 1.4.* 529 .. versionadded:: 1.4
523 """ 530 """
524 name = 'Mason' 531 name = 'Mason'
525 aliases = ['mason'] 532 aliases = ['mason']
526 filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'] 533 filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
527 mimetypes = ['application/x-mason'] 534 mimetypes = ['application/x-mason']
570 class MakoLexer(RegexLexer): 577 class MakoLexer(RegexLexer):
571 """ 578 """
572 Generic `mako templates`_ lexer. Code that isn't Mako 579 Generic `mako templates`_ lexer. Code that isn't Mako
573 markup is yielded as `Token.Other`. 580 markup is yielded as `Token.Other`.
574 581
575 *New in Pygments 0.7.* 582 .. versionadded:: 0.7
576 583
577 .. _mako templates: http://www.makotemplates.org/ 584 .. _mako templates: http://www.makotemplates.org/
578 """ 585 """
579 586
580 name = 'Mako' 587 name = 'Mako'
589 (r'(\s*)(%)([^\n]*)(\n|\Z)', 596 (r'(\s*)(%)([^\n]*)(\n|\Z)',
590 bygroups(Text, Comment.Preproc, using(PythonLexer), Other)), 597 bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
591 (r'(\s*)(##[^\n]*)(\n|\Z)', 598 (r'(\s*)(##[^\n]*)(\n|\Z)',
592 bygroups(Text, Comment.Preproc, Other)), 599 bygroups(Text, Comment.Preproc, Other)),
593 (r'(?s)<%doc>.*?</%doc>', Comment.Preproc), 600 (r'(?s)<%doc>.*?</%doc>', Comment.Preproc),
594 (r'(<%)([\w\.\:]+)', 601 (r'(<%)([\w.:]+)',
595 bygroups(Comment.Preproc, Name.Builtin), 'tag'), 602 bygroups(Comment.Preproc, Name.Builtin), 'tag'),
596 (r'(</%)([\w\.\:]+)(>)', 603 (r'(</%)([\w.:]+)(>)',
597 bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)), 604 bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
598 (r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'), 605 (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
599 (r'(<%(?:!?))(.*?)(%>)(?s)', 606 (r'(<%(?:!?))(.*?)(%>)(?s)',
600 bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), 607 bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
601 (r'(\$\{)(.*?)(\})', 608 (r'(\$\{)(.*?)(\})',
602 bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), 609 bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
603 (r'''(?sx) 610 (r'''(?sx)
638 class MakoHtmlLexer(DelegatingLexer): 645 class MakoHtmlLexer(DelegatingLexer):
639 """ 646 """
640 Subclass of the `MakoLexer` that highlights unlexed data 647 Subclass of the `MakoLexer` that highlights unlexed data
641 with the `HtmlLexer`. 648 with the `HtmlLexer`.
642 649
643 *New in Pygments 0.7.* 650 .. versionadded:: 0.7
644 """ 651 """
645 652
646 name = 'HTML+Mako' 653 name = 'HTML+Mako'
647 aliases = ['html+mako'] 654 aliases = ['html+mako']
648 mimetypes = ['text/html+mako'] 655 mimetypes = ['text/html+mako']
649 656
650 def __init__(self, **options): 657 def __init__(self, **options):
651 super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, 658 super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
652 **options) 659 **options)
660
653 661
654 class MakoXmlLexer(DelegatingLexer): 662 class MakoXmlLexer(DelegatingLexer):
655 """ 663 """
656 Subclass of the `MakoLexer` that highlights unlexer data 664 Subclass of the `MakoLexer` that highlights unlexed data
657 with the `XmlLexer`. 665 with the `XmlLexer`.
658 666
659 *New in Pygments 0.7.* 667 .. versionadded:: 0.7
660 """ 668 """
661 669
662 name = 'XML+Mako' 670 name = 'XML+Mako'
663 aliases = ['xml+mako'] 671 aliases = ['xml+mako']
664 mimetypes = ['application/xml+mako'] 672 mimetypes = ['application/xml+mako']
665 673
666 def __init__(self, **options): 674 def __init__(self, **options):
667 super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, 675 super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
668 **options) 676 **options)
677
669 678
670 class MakoJavascriptLexer(DelegatingLexer): 679 class MakoJavascriptLexer(DelegatingLexer):
671 """ 680 """
672 Subclass of the `MakoLexer` that highlights unlexer data 681 Subclass of the `MakoLexer` that highlights unlexed data
673 with the `JavascriptLexer`. 682 with the `JavascriptLexer`.
674 683
675 *New in Pygments 0.7.* 684 .. versionadded:: 0.7
676 """ 685 """
677 686
678 name = 'JavaScript+Mako' 687 name = 'JavaScript+Mako'
679 aliases = ['js+mako', 'javascript+mako'] 688 aliases = ['js+mako', 'javascript+mako']
680 mimetypes = ['application/x-javascript+mako', 689 mimetypes = ['application/x-javascript+mako',
681 'text/x-javascript+mako', 690 'text/x-javascript+mako',
682 'text/javascript+mako'] 691 'text/javascript+mako']
683 692
684 def __init__(self, **options): 693 def __init__(self, **options):
685 super(MakoJavascriptLexer, self).__init__(JavascriptLexer, 694 super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
686 MakoLexer, **options) 695 MakoLexer, **options)
696
687 697
688 class MakoCssLexer(DelegatingLexer): 698 class MakoCssLexer(DelegatingLexer):
689 """ 699 """
690 Subclass of the `MakoLexer` that highlights unlexer data 700 Subclass of the `MakoLexer` that highlights unlexed data
691 with the `CssLexer`. 701 with the `CssLexer`.
692 702
693 *New in Pygments 0.7.* 703 .. versionadded:: 0.7
694 """ 704 """
695 705
696 name = 'CSS+Mako' 706 name = 'CSS+Mako'
697 aliases = ['css+mako'] 707 aliases = ['css+mako']
698 mimetypes = ['text/css+mako'] 708 mimetypes = ['text/css+mako']
699 709
700 def __init__(self, **options): 710 def __init__(self, **options):
701 super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, 711 super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
702 **options) 712 **options)
703 713
704 714
705 # Genshi and Cheetah lexers courtesy of Matt Good. 715 # Genshi and Cheetah lexers courtesy of Matt Good.
706 716
707 class CheetahPythonLexer(Lexer): 717 class CheetahPythonLexer(Lexer):
741 (r'#slurp$', Comment.Preproc), 751 (r'#slurp$', Comment.Preproc),
742 (r'(#[a-zA-Z]+)([^#\n]*)(#|$)', 752 (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
743 (bygroups(Comment.Preproc, using(CheetahPythonLexer), 753 (bygroups(Comment.Preproc, using(CheetahPythonLexer),
744 Comment.Preproc))), 754 Comment.Preproc))),
745 # TODO support other Python syntax like $foo['bar'] 755 # TODO support other Python syntax like $foo['bar']
746 (r'(\$)([a-zA-Z_][a-zA-Z0-9_\.]*[a-zA-Z0-9_])', 756 (r'(\$)([a-zA-Z_][\w.]*\w)',
747 bygroups(Comment.Preproc, using(CheetahPythonLexer))), 757 bygroups(Comment.Preproc, using(CheetahPythonLexer))),
748 (r'(\$\{!?)(.*?)(\})(?s)', 758 (r'(\$\{!?)(.*?)(\})(?s)',
749 bygroups(Comment.Preproc, using(CheetahPythonLexer), 759 bygroups(Comment.Preproc, using(CheetahPythonLexer),
750 Comment.Preproc)), 760 Comment.Preproc)),
751 (r'''(?sx) 761 (r'''(?sx)
752 (.+?) # anything, followed by: 762 (.+?) # anything, followed by:
753 (?: 763 (?:
754 (?=[#][#a-zA-Z]*) | # an eval comment 764 (?=\#[#a-zA-Z]*) | # an eval comment
755 (?=\$[a-zA-Z_{]) | # a substitution 765 (?=\$[a-zA-Z_{]) | # a substitution
756 \Z # end of string 766 \Z # end of string
757 ) 767 )
758 ''', Other), 768 ''', Other),
759 (r'\s+', Text), 769 (r'\s+', Text),
761 } 771 }
762 772
763 773
764 class CheetahHtmlLexer(DelegatingLexer): 774 class CheetahHtmlLexer(DelegatingLexer):
765 """ 775 """
766 Subclass of the `CheetahLexer` that highlights unlexer data 776 Subclass of the `CheetahLexer` that highlights unlexed data
767 with the `HtmlLexer`. 777 with the `HtmlLexer`.
768 """ 778 """
769 779
770 name = 'HTML+Cheetah' 780 name = 'HTML+Cheetah'
771 aliases = ['html+cheetah', 'html+spitfire'] 781 aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
772 mimetypes = ['text/html+cheetah', 'text/html+spitfire'] 782 mimetypes = ['text/html+cheetah', 'text/html+spitfire']
773 783
774 def __init__(self, **options): 784 def __init__(self, **options):
775 super(CheetahHtmlLexer, self).__init__(HtmlLexer, CheetahLexer, 785 super(CheetahHtmlLexer, self).__init__(HtmlLexer, CheetahLexer,
776 **options) 786 **options)
777 787
778 788
779 class CheetahXmlLexer(DelegatingLexer): 789 class CheetahXmlLexer(DelegatingLexer):
780 """ 790 """
781 Subclass of the `CheetahLexer` that highlights unlexer data 791 Subclass of the `CheetahLexer` that highlights unlexed data
782 with the `XmlLexer`. 792 with the `XmlLexer`.
783 """ 793 """
784 794
785 name = 'XML+Cheetah' 795 name = 'XML+Cheetah'
786 aliases = ['xml+cheetah', 'xml+spitfire'] 796 aliases = ['xml+cheetah', 'xml+spitfire']
791 **options) 801 **options)
792 802
793 803
794 class CheetahJavascriptLexer(DelegatingLexer): 804 class CheetahJavascriptLexer(DelegatingLexer):
795 """ 805 """
796 Subclass of the `CheetahLexer` that highlights unlexer data 806 Subclass of the `CheetahLexer` that highlights unlexed data
797 with the `JavascriptLexer`. 807 with the `JavascriptLexer`.
798 """ 808 """
799 809
800 name = 'JavaScript+Cheetah' 810 name = 'JavaScript+Cheetah'
801 aliases = ['js+cheetah', 'javascript+cheetah', 811 aliases = ['js+cheetah', 'javascript+cheetah',
822 aliases = ['genshitext'] 832 aliases = ['genshitext']
823 mimetypes = ['application/x-genshi-text', 'text/x-genshi'] 833 mimetypes = ['application/x-genshi-text', 'text/x-genshi']
824 834
825 tokens = { 835 tokens = {
826 'root': [ 836 'root': [
827 (r'[^#\$\s]+', Other), 837 (r'[^#$\s]+', Other),
828 (r'^(\s*)(##.*)$', bygroups(Text, Comment)), 838 (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
829 (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'), 839 (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
830 include('variable'), 840 include('variable'),
831 (r'[#\$\s]', Other), 841 (r'[#$\s]', Other),
832 ], 842 ],
833 'directive': [ 843 'directive': [
834 (r'\n', Text, '#pop'), 844 (r'\n', Text, '#pop'),
835 (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'), 845 (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
836 (r'(choose|when|with)([^\S\n]+)(.*)', 846 (r'(choose|when|with)([^\S\n]+)(.*)',
839 (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'), 849 (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
840 ], 850 ],
841 'variable': [ 851 'variable': [
842 (r'(?<!\$)(\$\{)(.+?)(\})', 852 (r'(?<!\$)(\$\{)(.+?)(\})',
843 bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), 853 bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
844 (r'(?<!\$)(\$)([a-zA-Z_][a-zA-Z0-9_\.]*)', 854 (r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
845 Name.Variable), 855 Name.Variable),
846 ] 856 ]
847 } 857 }
848 858
849 859
855 865
856 flags = re.DOTALL 866 flags = re.DOTALL
857 867
858 tokens = { 868 tokens = {
859 'root': [ 869 'root': [
860 (r'[^<\$]+', Other), 870 (r'[^<$]+', Other),
861 (r'(<\?python)(.*?)(\?>)', 871 (r'(<\?python)(.*?)(\?>)',
862 bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), 872 bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
863 # yield style and script blocks as Other 873 # yield style and script blocks as Other
864 (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other), 874 (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
865 (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'), 875 (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
866 (r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'), 876 (r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
867 include('variable'), 877 include('variable'),
868 (r'[<\$]', Other), 878 (r'[<$]', Other),
869 ], 879 ],
870 'pytag': [ 880 'pytag': [
871 (r'\s+', Text), 881 (r'\s+', Text),
872 (r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'pyattr'), 882 (r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
873 (r'/?\s*>', Name.Tag, '#pop'), 883 (r'/?\s*>', Name.Tag, '#pop'),
874 ], 884 ],
875 'pyattr': [ 885 'pyattr': [
876 ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'), 886 ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
877 ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'), 887 ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
878 (r'[^\s>]+', String, '#pop'), 888 (r'[^\s>]+', String, '#pop'),
879 ], 889 ],
880 'tag': [ 890 'tag': [
881 (r'\s+', Text), 891 (r'\s+', Text),
882 (r'py:[a-zA-Z0-9_-]+\s*=', Name.Attribute, 'pyattr'), 892 (r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
883 (r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'), 893 (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
884 (r'/?\s*>', Name.Tag, '#pop'), 894 (r'/?\s*>', Name.Tag, '#pop'),
885 ], 895 ],
886 'attr': [ 896 'attr': [
887 ('"', String, 'attr-dstring'), 897 ('"', String, 'attr-dstring'),
888 ("'", String, 'attr-sstring'), 898 ("'", String, 'attr-sstring'),
903 include('variable') 913 include('variable')
904 ], 914 ],
905 'variable': [ 915 'variable': [
906 (r'(?<!\$)(\$\{)(.+?)(\})', 916 (r'(?<!\$)(\$\{)(.+?)(\})',
907 bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), 917 bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
908 (r'(?<!\$)(\$)([a-zA-Z_][a-zA-Z0-9_\.]*)', 918 (r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
909 Name.Variable), 919 Name.Variable),
910 ] 920 ]
911 } 921 }
912 922
913 923
1110 return rv 1120 return rv
1111 1121
1112 1122
1113 class XmlPhpLexer(DelegatingLexer): 1123 class XmlPhpLexer(DelegatingLexer):
1114 """ 1124 """
1115 Subclass of `PhpLexer` that higlights unhandled data with the `XmlLexer`. 1125 Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
1116 """ 1126 """
1117 1127
1118 name = 'XML+PHP' 1128 name = 'XML+PHP'
1119 aliases = ['xml+php'] 1129 aliases = ['xml+php']
1120 alias_filenames = ['*.xml', '*.php', '*.php[345]'] 1130 alias_filenames = ['*.xml', '*.php', '*.php[345]']
1168 return PhpLexer.analyse_text(text) 1178 return PhpLexer.analyse_text(text)
1169 1179
1170 1180
1171 class HtmlSmartyLexer(DelegatingLexer): 1181 class HtmlSmartyLexer(DelegatingLexer):
1172 """ 1182 """
1173 Subclass of the `SmartyLexer` that highighlights unlexed data with the 1183 Subclass of the `SmartyLexer` that highlights unlexed data with the
1174 `HtmlLexer`. 1184 `HtmlLexer`.
1175 1185
1176 Nested Javascript and CSS is highlighted too. 1186 Nested Javascript and CSS is highlighted too.
1177 """ 1187 """
1178 1188
1251 return SmartyLexer.analyse_text(text) - 0.05 1261 return SmartyLexer.analyse_text(text) - 0.05
1252 1262
1253 1263
1254 class HtmlDjangoLexer(DelegatingLexer): 1264 class HtmlDjangoLexer(DelegatingLexer):
1255 """ 1265 """
1256 Subclass of the `DjangoLexer` that highighlights unlexed data with the 1266 Subclass of the `DjangoLexer` that highlights unlexed data with the
1257 `HtmlLexer`. 1267 `HtmlLexer`.
1258 1268
1259 Nested Javascript and CSS is highlighted too. 1269 Nested Javascript and CSS is highlighted too.
1260 """ 1270 """
1261 1271
1262 name = 'HTML+Django/Jinja' 1272 name = 'HTML+Django/Jinja'
1263 aliases = ['html+django', 'html+jinja'] 1273 aliases = ['html+django', 'html+jinja', 'htmldjango']
1264 alias_filenames = ['*.html', '*.htm', '*.xhtml'] 1274 alias_filenames = ['*.html', '*.htm', '*.xhtml']
1265 mimetypes = ['text/html+django', 'text/html+jinja'] 1275 mimetypes = ['text/html+django', 'text/html+jinja']
1266 1276
1267 def __init__(self, **options): 1277 def __init__(self, **options):
1268 super(HtmlDjangoLexer, self).__init__(HtmlLexer, DjangoLexer, **options) 1278 super(HtmlDjangoLexer, self).__init__(HtmlLexer, DjangoLexer, **options)
1341 class JspRootLexer(RegexLexer): 1351 class JspRootLexer(RegexLexer):
1342 """ 1352 """
1343 Base for the `JspLexer`. Yields `Token.Other` for area outside of 1353 Base for the `JspLexer`. Yields `Token.Other` for area outside of
1344 JSP tags. 1354 JSP tags.
1345 1355
1346 *New in Pygments 0.7.* 1356 .. versionadded:: 0.7
1347 """ 1357 """
1348 1358
1349 tokens = { 1359 tokens = {
1350 'root': [ 1360 'root': [
1351 (r'<%\S?', Keyword, 'sec'), 1361 (r'<%\S?', Keyword, 'sec'),
1365 1375
1366 class JspLexer(DelegatingLexer): 1376 class JspLexer(DelegatingLexer):
1367 """ 1377 """
1368 Lexer for Java Server Pages. 1378 Lexer for Java Server Pages.
1369 1379
1370 *New in Pygments 0.7.* 1380 .. versionadded:: 0.7
1371 """ 1381 """
1372 name = 'Java Server Page' 1382 name = 'Java Server Page'
1373 aliases = ['jsp'] 1383 aliases = ['jsp']
1374 filenames = ['*.jsp'] 1384 filenames = ['*.jsp']
1375 mimetypes = ['application/x-jsp'] 1385 mimetypes = ['application/x-jsp']
1388 1398
1389 class EvoqueLexer(RegexLexer): 1399 class EvoqueLexer(RegexLexer):
1390 """ 1400 """
1391 For files using the Evoque templating system. 1401 For files using the Evoque templating system.
1392 1402
1393 *New in Pygments 1.1.* 1403 .. versionadded:: 1.1
1394 """ 1404 """
1395 name = 'Evoque' 1405 name = 'Evoque'
1396 aliases = ['evoque'] 1406 aliases = ['evoque']
1397 filenames = ['*.evoque'] 1407 filenames = ['*.evoque']
1398 mimetypes = ['application/x-evoque'] 1408 mimetypes = ['application/x-evoque']
1410 (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})', 1420 (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
1411 bygroups(Punctuation, Name.Builtin, Punctuation, None, 1421 bygroups(Punctuation, Name.Builtin, Punctuation, None,
1412 String, Punctuation)), 1422 String, Punctuation)),
1413 # directives: evoque, overlay 1423 # directives: evoque, overlay
1414 # see doc for handling first name arg: /directives/evoque/ 1424 # see doc for handling first name arg: /directives/evoque/
1415 #+ minor inconsistency: the "name" in e.g. $overlay{name=site_base} 1425 # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
1416 # should be using(PythonLexer), not passed out as String 1426 # should be using(PythonLexer), not passed out as String
1417 (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?' 1427 (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?'
1418 r'(.*?)((?(4)%)\})', 1428 r'(.*?)((?(4)%)\})',
1419 bygroups(Punctuation, Name.Builtin, Punctuation, None, 1429 bygroups(Punctuation, Name.Builtin, Punctuation, None,
1420 String, using(PythonLexer), Punctuation)), 1430 String, using(PythonLexer), Punctuation)),
1436 (r'\]#', Comment.Multiline, '#pop'), 1446 (r'\]#', Comment.Multiline, '#pop'),
1437 (r'[\]#]', Comment.Multiline) 1447 (r'[\]#]', Comment.Multiline)
1438 ], 1448 ],
1439 } 1449 }
1440 1450
1451
1441 class EvoqueHtmlLexer(DelegatingLexer): 1452 class EvoqueHtmlLexer(DelegatingLexer):
1442 """ 1453 """
1443 Subclass of the `EvoqueLexer` that highlights unlexed data with the 1454 Subclass of the `EvoqueLexer` that highlights unlexed data with the
1444 `HtmlLexer`. 1455 `HtmlLexer`.
1445 1456
1446 *New in Pygments 1.1.* 1457 .. versionadded:: 1.1
1447 """ 1458 """
1448 name = 'HTML+Evoque' 1459 name = 'HTML+Evoque'
1449 aliases = ['html+evoque'] 1460 aliases = ['html+evoque']
1450 filenames = ['*.html'] 1461 filenames = ['*.html']
1451 mimetypes = ['text/html+evoque'] 1462 mimetypes = ['text/html+evoque']
1452 1463
1453 def __init__(self, **options): 1464 def __init__(self, **options):
1454 super(EvoqueHtmlLexer, self).__init__(HtmlLexer, EvoqueLexer, 1465 super(EvoqueHtmlLexer, self).__init__(HtmlLexer, EvoqueLexer,
1455 **options) 1466 **options)
1456 1467
1468
1457 class EvoqueXmlLexer(DelegatingLexer): 1469 class EvoqueXmlLexer(DelegatingLexer):
1458 """ 1470 """
1459 Subclass of the `EvoqueLexer` that highlights unlexed data with the 1471 Subclass of the `EvoqueLexer` that highlights unlexed data with the
1460 `XmlLexer`. 1472 `XmlLexer`.
1461 1473
1462 *New in Pygments 1.1.* 1474 .. versionadded:: 1.1
1463 """ 1475 """
1464 name = 'XML+Evoque' 1476 name = 'XML+Evoque'
1465 aliases = ['xml+evoque'] 1477 aliases = ['xml+evoque']
1466 filenames = ['*.xml'] 1478 filenames = ['*.xml']
1467 mimetypes = ['application/xml+evoque'] 1479 mimetypes = ['application/xml+evoque']
1468 1480
1469 def __init__(self, **options): 1481 def __init__(self, **options):
1470 super(EvoqueXmlLexer, self).__init__(XmlLexer, EvoqueLexer, 1482 super(EvoqueXmlLexer, self).__init__(XmlLexer, EvoqueLexer,
1471 **options) 1483 **options)
1472 1484
1485
1473 class ColdfusionLexer(RegexLexer): 1486 class ColdfusionLexer(RegexLexer):
1474 """ 1487 """
1475 Coldfusion statements 1488 Coldfusion statements
1476 """ 1489 """
1477 name = 'cfstatement' 1490 name = 'cfstatement'
1478 aliases = ['cfs'] 1491 aliases = ['cfs']
1479 filenames = [] 1492 filenames = []
1480 mimetypes = [] 1493 mimetypes = []
1481 flags = re.IGNORECASE | re.MULTILINE 1494 flags = re.IGNORECASE
1482 1495
1483 tokens = { 1496 tokens = {
1484 'root': [ 1497 'root': [
1485 (r'//.*', Comment), 1498 (r'//.*?\n', Comment.Single),
1499 (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
1486 (r'\+\+|--', Operator), 1500 (r'\+\+|--', Operator),
1487 (r'[-+*/^&=!]', Operator), 1501 (r'[-+*/^&=!]', Operator),
1488 (r'<=|>=|<|>', Operator), 1502 (r'<=|>=|<|>|==', Operator),
1489 (r'mod\b', Operator), 1503 (r'mod\b', Operator),
1490 (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator), 1504 (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
1491 (r'\|\||&&', Operator), 1505 (r'\|\||&&', Operator),
1506 (r'\?', Operator),
1492 (r'"', String.Double, 'string'), 1507 (r'"', String.Double, 'string'),
1493 # There is a special rule for allowing html in single quoted 1508 # There is a special rule for allowing html in single quoted
1494 # strings, evidently. 1509 # strings, evidently.
1495 (r"'.*?'", String.Single), 1510 (r"'.*?'", String.Single),
1496 (r'\d+', Number), 1511 (r'\d+', Number),
1497 (r'(if|else|len|var|case|default|break|switch)\b', Keyword), 1512 (r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
1498 (r'([A-Za-z_$][A-Za-z0-9_.]*)(\s*)(\()', 1513 r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
1514 r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
1515 (r'(true|false|null)\b', Keyword.Constant),
1516 (r'(application|session|client|cookie|super|this|variables|arguments)\b',
1517 Name.Constant),
1518 (r'([a-z_$][\w.]*)(\s*)(\()',
1499 bygroups(Name.Function, Text, Punctuation)), 1519 bygroups(Name.Function, Text, Punctuation)),
1500 (r'[A-Za-z_$][A-Za-z0-9_.]*', Name.Variable), 1520 (r'[a-z_$][\w.]*', Name.Variable),
1501 (r'[()\[\]{};:,.\\]', Punctuation), 1521 (r'[()\[\]{};:,.\\]', Punctuation),
1502 (r'\s+', Text), 1522 (r'\s+', Text),
1503 ], 1523 ],
1504 'string': [ 1524 'string': [
1505 (r'""', String.Double), 1525 (r'""', String.Double),
1525 (r'[^<]+', Other), 1545 (r'[^<]+', Other),
1526 include('tags'), 1546 include('tags'),
1527 (r'<[^<>]*', Other), 1547 (r'<[^<>]*', Other),
1528 ], 1548 ],
1529 'tags': [ 1549 'tags': [
1530 (r'(?s)<!---.*?--->', Comment.Multiline), 1550 (r'<!---', Comment.Multiline, 'cfcomment'),
1531 (r'(?s)<!--.*?-->', Comment), 1551 (r'(?s)<!--.*?-->', Comment),
1532 (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'), 1552 (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
1533 (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)', 1553 (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
1534 bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)), 1554 bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
1535 # negative lookbehind is for strings with embedded > 1555 # negative lookbehind is for strings with embedded >
1541 ], 1561 ],
1542 'cfoutput': [ 1562 'cfoutput': [
1543 (r'[^#<]+', Other), 1563 (r'[^#<]+', Other),
1544 (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer), 1564 (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
1545 Punctuation)), 1565 Punctuation)),
1546 #(r'<cfoutput.*?>', Name.Builtin, '#push'), 1566 # (r'<cfoutput.*?>', Name.Builtin, '#push'),
1547 (r'</cfoutput.*?>', Name.Builtin, '#pop'), 1567 (r'</cfoutput.*?>', Name.Builtin, '#pop'),
1548 include('tags'), 1568 include('tags'),
1549 (r'(?s)<[^<>]*', Other), 1569 (r'(?s)<[^<>]*', Other),
1550 (r'#', Other), 1570 (r'#', Other),
1551 ], 1571 ],
1572 'cfcomment': [
1573 (r'<!---', Comment.Multiline, '#push'),
1574 (r'--->', Comment.Multiline, '#pop'),
1575 (r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
1576 ],
1552 } 1577 }
1553 1578
1554 1579
1555 class ColdfusionHtmlLexer(DelegatingLexer): 1580 class ColdfusionHtmlLexer(DelegatingLexer):
1556 """ 1581 """
1557 Coldfusion markup in html 1582 Coldfusion markup in html
1558 """ 1583 """
1559 name = 'Coldfusion HTML' 1584 name = 'Coldfusion HTML'
1560 aliases = ['cfm'] 1585 aliases = ['cfm']
1561 filenames = ['*.cfm', '*.cfml', '*.cfc'] 1586 filenames = ['*.cfm', '*.cfml']
1562 mimetypes = ['application/x-coldfusion'] 1587 mimetypes = ['application/x-coldfusion']
1563 1588
1564 def __init__(self, **options): 1589 def __init__(self, **options):
1565 super(ColdfusionHtmlLexer, self).__init__(HtmlLexer, ColdfusionMarkupLexer, 1590 super(ColdfusionHtmlLexer, self).__init__(HtmlLexer, ColdfusionMarkupLexer,
1566 **options) 1591 **options)
1567 1592
1568 1593
1594 class ColdfusionCFCLexer(DelegatingLexer):
1595 """
1596 Coldfusion markup/script components
1597
1598 .. versionadded:: 2.0
1599 """
1600 name = 'Coldfusion CFC'
1601 aliases = ['cfc']
1602 filenames = ['*.cfc']
1603 mimetypes = []
1604
1605 def __init__(self, **options):
1606 super(ColdfusionCFCLexer, self).__init__(ColdfusionHtmlLexer, ColdfusionLexer,
1607 **options)
1608
1609
1569 class SspLexer(DelegatingLexer): 1610 class SspLexer(DelegatingLexer):
1570 """ 1611 """
1571 Lexer for Scalate Server Pages. 1612 Lexer for Scalate Server Pages.
1572 1613
1573 *New in Pygments 1.4.* 1614 .. versionadded:: 1.4
1574 """ 1615 """
1575 name = 'Scalate Server Page' 1616 name = 'Scalate Server Page'
1576 aliases = ['ssp'] 1617 aliases = ['ssp']
1577 filenames = ['*.ssp'] 1618 filenames = ['*.ssp']
1578 mimetypes = ['application/x-ssp'] 1619 mimetypes = ['application/x-ssp']
1594 class TeaTemplateRootLexer(RegexLexer): 1635 class TeaTemplateRootLexer(RegexLexer):
1595 """ 1636 """
1596 Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of 1637 Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
1597 code blocks. 1638 code blocks.
1598 1639
1599 *New in Pygments 1.5.* 1640 .. versionadded:: 1.5
1600 """ 1641 """
1601 1642
1602 tokens = { 1643 tokens = {
1603 'root': [ 1644 'root': [
1604 (r'<%\S?', Keyword, 'sec'), 1645 (r'<%\S?', Keyword, 'sec'),
1605 (r'[^<]+', Other), 1646 (r'[^<]+', Other),
1606 (r'<', Other), 1647 (r'<', Other),
1607 ], 1648 ],
1608 'sec': [ 1649 'sec': [
1609 (r'%>', Keyword, '#pop'), 1650 (r'%>', Keyword, '#pop'),
1610 # note: '\w\W' != '.' without DOTALL. 1651 # note: '\w\W' != '.' without DOTALL.
1611 (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)), 1652 (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
1612 ], 1653 ],
1613 } 1654 }
1614 1655
1615 1656
1616 class TeaTemplateLexer(DelegatingLexer): 1657 class TeaTemplateLexer(DelegatingLexer):
1617 """ 1658 """
1618 Lexer for `Tea Templates <http://teatrove.org/>`_. 1659 Lexer for `Tea Templates <http://teatrove.org/>`_.
1619 1660
1620 *New in Pygments 1.5.* 1661 .. versionadded:: 1.5
1621 """ 1662 """
1622 name = 'Tea' 1663 name = 'Tea'
1623 aliases = ['tea'] 1664 aliases = ['tea']
1624 filenames = ['*.tea'] 1665 filenames = ['*.tea']
1625 mimetypes = ['text/x-tea'] 1666 mimetypes = ['text/x-tea']
1642 Subclass of the `LassoLexer` which highlights unhandled data with the 1683 Subclass of the `LassoLexer` which highlights unhandled data with the
1643 `HtmlLexer`. 1684 `HtmlLexer`.
1644 1685
1645 Nested JavaScript and CSS is also highlighted. 1686 Nested JavaScript and CSS is also highlighted.
1646 1687
1647 *New in Pygments 1.6.* 1688 .. versionadded:: 1.6
1648 """ 1689 """
1649 1690
1650 name = 'HTML+Lasso' 1691 name = 'HTML+Lasso'
1651 aliases = ['html+lasso'] 1692 aliases = ['html+lasso']
1652 alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]', 1693 alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
1657 1698
1658 def __init__(self, **options): 1699 def __init__(self, **options):
1659 super(LassoHtmlLexer, self).__init__(HtmlLexer, LassoLexer, **options) 1700 super(LassoHtmlLexer, self).__init__(HtmlLexer, LassoLexer, **options)
1660 1701
1661 def analyse_text(text): 1702 def analyse_text(text):
1662 rv = LassoLexer.analyse_text(text) 1703 rv = LassoLexer.analyse_text(text) - 0.01
1663 if re.search(r'<\w+>', text, re.I): 1704 if html_doctype_matches(text): # same as HTML lexer
1664 rv += 0.2
1665 if html_doctype_matches(text):
1666 rv += 0.5 1705 rv += 0.5
1667 return rv 1706 return rv
1668 1707
1669 1708
1670 class LassoXmlLexer(DelegatingLexer): 1709 class LassoXmlLexer(DelegatingLexer):
1671 """ 1710 """
1672 Subclass of the `LassoLexer` which highlights unhandled data with the 1711 Subclass of the `LassoLexer` which highlights unhandled data with the
1673 `XmlLexer`. 1712 `XmlLexer`.
1674 1713
1675 *New in Pygments 1.6.* 1714 .. versionadded:: 1.6
1676 """ 1715 """
1677 1716
1678 name = 'XML+Lasso' 1717 name = 'XML+Lasso'
1679 aliases = ['xml+lasso'] 1718 aliases = ['xml+lasso']
1680 alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]', 1719 alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
1683 1722
1684 def __init__(self, **options): 1723 def __init__(self, **options):
1685 super(LassoXmlLexer, self).__init__(XmlLexer, LassoLexer, **options) 1724 super(LassoXmlLexer, self).__init__(XmlLexer, LassoLexer, **options)
1686 1725
1687 def analyse_text(text): 1726 def analyse_text(text):
1688 rv = LassoLexer.analyse_text(text) 1727 rv = LassoLexer.analyse_text(text) - 0.01
1689 if looks_like_xml(text): 1728 if looks_like_xml(text):
1690 rv += 0.5 1729 rv += 0.4
1691 return rv 1730 return rv
1692 1731
1693 1732
1694 class LassoCssLexer(DelegatingLexer): 1733 class LassoCssLexer(DelegatingLexer):
1695 """ 1734 """
1696 Subclass of the `LassoLexer` which highlights unhandled data with the 1735 Subclass of the `LassoLexer` which highlights unhandled data with the
1697 `CssLexer`. 1736 `CssLexer`.
1698 1737
1699 *New in Pygments 1.6.* 1738 .. versionadded:: 1.6
1700 """ 1739 """
1701 1740
1702 name = 'CSS+Lasso' 1741 name = 'CSS+Lasso'
1703 aliases = ['css+lasso'] 1742 aliases = ['css+lasso']
1704 alias_filenames = ['*.css'] 1743 alias_filenames = ['*.css']
1707 def __init__(self, **options): 1746 def __init__(self, **options):
1708 options['requiredelimiters'] = True 1747 options['requiredelimiters'] = True
1709 super(LassoCssLexer, self).__init__(CssLexer, LassoLexer, **options) 1748 super(LassoCssLexer, self).__init__(CssLexer, LassoLexer, **options)
1710 1749
1711 def analyse_text(text): 1750 def analyse_text(text):
1712 rv = LassoLexer.analyse_text(text) 1751 rv = LassoLexer.analyse_text(text) - 0.05
1713 if re.search(r'\w+:.+;', text): 1752 if re.search(r'\w+:.+?;', text):
1714 rv += 0.1 1753 rv += 0.1
1715 if 'padding:' in text: 1754 if 'padding:' in text:
1716 rv += 0.1 1755 rv += 0.1
1717 return rv 1756 return rv
1718 1757
1720 class LassoJavascriptLexer(DelegatingLexer): 1759 class LassoJavascriptLexer(DelegatingLexer):
1721 """ 1760 """
1722 Subclass of the `LassoLexer` which highlights unhandled data with the 1761 Subclass of the `LassoLexer` which highlights unhandled data with the
1723 `JavascriptLexer`. 1762 `JavascriptLexer`.
1724 1763
1725 *New in Pygments 1.6.* 1764 .. versionadded:: 1.6
1726 """ 1765 """
1727 1766
1728 name = 'JavaScript+Lasso' 1767 name = 'JavaScript+Lasso'
1729 aliases = ['js+lasso', 'javascript+lasso'] 1768 aliases = ['js+lasso', 'javascript+lasso']
1730 alias_filenames = ['*.js'] 1769 alias_filenames = ['*.js']
1736 options['requiredelimiters'] = True 1775 options['requiredelimiters'] = True
1737 super(LassoJavascriptLexer, self).__init__(JavascriptLexer, LassoLexer, 1776 super(LassoJavascriptLexer, self).__init__(JavascriptLexer, LassoLexer,
1738 **options) 1777 **options)
1739 1778
1740 def analyse_text(text): 1779 def analyse_text(text):
1741 rv = LassoLexer.analyse_text(text) 1780 rv = LassoLexer.analyse_text(text) - 0.05
1742 if 'function' in text: 1781 if 'function' in text:
1743 rv += 0.2 1782 rv += 0.2
1744 return rv 1783 return rv
1784
1785
1786 class HandlebarsLexer(RegexLexer):
1787 """
1788 Generic `handlebars <http://handlebarsjs.com/>` template lexer.
1789
1790 Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
1791 Everything else is left for a delegating lexer.
1792
1793 .. versionadded:: 2.0
1794 """
1795
1796 name = "Handlebars"
1797 aliases = ['handlebars']
1798
1799 tokens = {
1800 'root': [
1801 (r'[^{]+', Other),
1802
1803 (r'\{\{!.*\}\}', Comment),
1804
1805 (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
1806 (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
1807 ],
1808
1809 'tag': [
1810 (r'\s+', Text),
1811 (r'\}\}\}', Comment.Special, '#pop'),
1812 (r'\}\}', Comment.Preproc, '#pop'),
1813
1814 # Handlebars
1815 (r'([#/]*)(each|if|unless|else|with|log|in)', bygroups(Keyword,
1816 Keyword)),
1817
1818 # General {{#block}}
1819 (r'([#/])([\w-]+)', bygroups(Name.Function, Name.Function)),
1820
1821 # {{opt=something}}
1822 (r'([\w-]+)(=)', bygroups(Name.Attribute, Operator)),
1823
1824 # borrowed from DjangoLexer
1825 (r':?"(\\\\|\\"|[^"])*"', String.Double),
1826 (r":?'(\\\\|\\'|[^'])*'", String.Single),
1827 (r'[a-zA-Z][\w-]*', Name.Variable),
1828 (r'\.[\w-]+', Name.Variable),
1829 (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
1830 r"0[xX][0-9a-fA-F]+[Ll]?", Number),
1831 ]
1832 }
1833
1834
1835 class HandlebarsHtmlLexer(DelegatingLexer):
1836 """
1837 Subclass of the `HandlebarsLexer` that highlights unlexed data with the
1838 `HtmlLexer`.
1839
1840 .. versionadded:: 2.0
1841 """
1842
1843 name = "HTML+Handlebars"
1844 aliases = ["html+handlebars"]
1845 filenames = ['*.handlebars', '*.hbs']
1846 mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
1847
1848 def __init__(self, **options):
1849 super(HandlebarsHtmlLexer, self).__init__(HtmlLexer, HandlebarsLexer, **options)
1850
1851
1852 class YamlJinjaLexer(DelegatingLexer):
1853 """
1854 Subclass of the `DjangoLexer` that highlights unlexed data with the
1855 `YamlLexer`.
1856
1857 Commonly used in Saltstack salt states.
1858
1859 .. versionadded:: 2.0
1860 """
1861
1862 name = 'YAML+Jinja'
1863 aliases = ['yaml+jinja', 'salt', 'sls']
1864 filenames = ['*.sls']
1865 mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
1866
1867 def __init__(self, **options):
1868 super(YamlJinjaLexer, self).__init__(YamlLexer, DjangoLexer, **options)
1869
1870
1871 class LiquidLexer(RegexLexer):
1872 """
1873 Lexer for `Liquid templates
1874 <http://www.rubydoc.info/github/Shopify/liquid>`_.
1875
1876 .. versionadded:: 2.0
1877 """
1878 name = 'liquid'
1879 aliases = ['liquid']
1880 filenames = ['*.liquid']
1881
1882 tokens = {
1883 'root': [
1884 (r'[^{]+', Text),
1885 # tags and block tags
1886 (r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
1887 # output tags
1888 (r'(\{\{)(\s*)([^\s}]+)',
1889 bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
1890 'output'),
1891 (r'\{', Text)
1892 ],
1893
1894 'tag-or-block': [
1895 # builtin logic blocks
1896 (r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
1897 (r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
1898 combined('end-of-block', 'whitespace', 'generic')),
1899 (r'(else)(\s*)(%\})',
1900 bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
1901
1902 # other builtin blocks
1903 (r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
1904 bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
1905 Whitespace, Punctuation), '#pop'),
1906 (r'(comment)(\s*)(%\})',
1907 bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
1908 (r'(raw)(\s*)(%\})',
1909 bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
1910
1911 # end of block
1912 (r'(end(case|unless|if))(\s*)(%\})',
1913 bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
1914 (r'(end([^\s%]+))(\s*)(%\})',
1915 bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
1916
1917 # builtin tags (assign and include are handled together with usual tags)
1918 (r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
1919 bygroups(Name.Tag, Whitespace,
1920 using(this, state='generic'), Punctuation, Whitespace),
1921 'variable-tag-markup'),
1922
1923 # other tags or blocks
1924 (r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
1925 ],
1926
1927 'output': [
1928 include('whitespace'),
1929 ('\}\}', Punctuation, '#pop'), # end of output
1930
1931 (r'\|', Punctuation, 'filters')
1932 ],
1933
1934 'filters': [
1935 include('whitespace'),
1936 (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
1937
1938 (r'([^\s|:]+)(:?)(\s*)',
1939 bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
1940 ],
1941
1942 'filter-markup': [
1943 (r'\|', Punctuation, '#pop'),
1944 include('end-of-tag'),
1945 include('default-param-markup')
1946 ],
1947
1948 'condition': [
1949 include('end-of-block'),
1950 include('whitespace'),
1951
1952 (r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
1953 bygroups(using(this, state = 'generic'), Whitespace, Operator,
1954 Whitespace, using(this, state = 'generic'), Whitespace,
1955 Punctuation)),
1956 (r'\b!', Operator),
1957 (r'\bnot\b', Operator.Word),
1958 (r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
1959 bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
1960 Whitespace, using(this, state = 'generic'))),
1961
1962 include('generic'),
1963 include('whitespace')
1964 ],
1965
1966 'generic-value': [
1967 include('generic'),
1968 include('end-at-whitespace')
1969 ],
1970
1971 'operator': [
1972 (r'(\s*)((=|!|>|<)=?)(\s*)',
1973 bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
1974 (r'(\s*)(\bcontains\b)(\s*)',
1975 bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
1976 ],
1977
1978 'end-of-tag': [
1979 (r'\}\}', Punctuation, '#pop')
1980 ],
1981
1982 'end-of-block': [
1983 (r'%\}', Punctuation, ('#pop', '#pop'))
1984 ],
1985
1986 'end-at-whitespace': [
1987 (r'\s+', Whitespace, '#pop')
1988 ],
1989
1990 # states for unknown markup
1991 'param-markup': [
1992 include('whitespace'),
1993 # params with colons or equals
1994 (r'([^\s=:]+)(\s*)(=|:)',
1995 bygroups(Name.Attribute, Whitespace, Operator)),
1996 # explicit variables
1997 (r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
1998 bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
1999 Whitespace, Punctuation)),
2000
2001 include('string'),
2002 include('number'),
2003 include('keyword'),
2004 (r',', Punctuation)
2005 ],
2006
2007 'default-param-markup': [
2008 include('param-markup'),
2009 (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
2010 ],
2011
2012 'variable-param-markup': [
2013 include('param-markup'),
2014 include('variable'),
2015 (r'.', Text) # fallback
2016 ],
2017
2018 'tag-markup': [
2019 (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
2020 include('default-param-markup')
2021 ],
2022
2023 'variable-tag-markup': [
2024 (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
2025 include('variable-param-markup')
2026 ],
2027
2028 # states for different values types
2029 'keyword': [
2030 (r'\b(false|true)\b', Keyword.Constant)
2031 ],
2032
2033 'variable': [
2034 (r'[a-zA-Z_]\w*', Name.Variable),
2035 (r'(?<=\w)\.(?=\w)', Punctuation)
2036 ],
2037
2038 'string': [
2039 (r"'[^']*'", String.Single),
2040 (r'"[^"]*"', String.Double)
2041 ],
2042
2043 'number': [
2044 (r'\d+\.\d+', Number.Float),
2045 (r'\d+', Number.Integer)
2046 ],
2047
2048 'generic': [ # decides for variable, string, keyword or number
2049 include('keyword'),
2050 include('string'),
2051 include('number'),
2052 include('variable')
2053 ],
2054
2055 'whitespace': [
2056 (r'[ \t]+', Whitespace)
2057 ],
2058
2059 # states for builtin blocks
2060 'comment': [
2061 (r'(\{%)(\s*)(endcomment)(\s*)(%\})',
2062 bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
2063 Punctuation), ('#pop', '#pop')),
2064 (r'.', Comment)
2065 ],
2066
2067 'raw': [
2068 (r'[^{]+', Text),
2069 (r'(\{%)(\s*)(endraw)(\s*)(%\})',
2070 bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
2071 Punctuation), '#pop'),
2072 (r'\{', Text)
2073 ],
2074 }
2075
2076
2077 class TwigLexer(RegexLexer):
2078 """
2079 `Twig <http://twig.sensiolabs.org/>`_ template lexer.
2080
2081 It just highlights Twig code between the preprocessor directives,
2082 other data is left untouched by the lexer.
2083
2084 .. versionadded:: 2.0
2085 """
2086
2087 name = 'Twig'
2088 aliases = ['twig']
2089 mimetypes = ['application/x-twig']
2090
2091 flags = re.M | re.S
2092
2093 # Note that a backslash is included in the following two patterns
2094 # PHP uses a backslash as a namespace separator
2095 _ident_char = r'[\\\w-]|[^\x00-\x7f]'
2096 _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
2097 _ident_end = r'(?:' + _ident_char + ')*'
2098 _ident_inner = _ident_begin + _ident_end
2099
2100 tokens = {
2101 'root': [
2102 (r'[^{]+', Other),
2103 (r'\{\{', Comment.Preproc, 'var'),
2104 # twig comments
2105 (r'\{\#.*?\#\}', Comment),
2106 # raw twig blocks
2107 (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
2108 r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
2109 bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
2110 Other, Comment.Preproc, Text, Keyword, Text,
2111 Comment.Preproc)),
2112 (r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
2113 r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
2114 bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
2115 Other, Comment.Preproc, Text, Keyword, Text,
2116 Comment.Preproc)),
2117 # filter blocks
2118 (r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
2119 bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
2120 'tag'),
2121 (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
2122 bygroups(Comment.Preproc, Text, Keyword), 'tag'),
2123 (r'\{', Other),
2124 ],
2125 'varnames': [
2126 (r'(\|)(\s*)(%s)' % _ident_inner,
2127 bygroups(Operator, Text, Name.Function)),
2128 (r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
2129 bygroups(Keyword, Text, Keyword, Text, Name.Function)),
2130 (r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
2131 (r'(in|not|and|b-and|or|b-or|b-xor|is'
2132 r'if|elseif|else|import'
2133 r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
2134 r'matches|starts\s+with|ends\s+with)\b',
2135 Keyword),
2136 (r'(loop|block|parent)\b', Name.Builtin),
2137 (_ident_inner, Name.Variable),
2138 (r'\.' + _ident_inner, Name.Variable),
2139 (r'\.[0-9]+', Number),
2140 (r':?"(\\\\|\\"|[^"])*"', String.Double),
2141 (r":?'(\\\\|\\'|[^'])*'", String.Single),
2142 (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
2143 (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
2144 r"0[xX][0-9a-fA-F]+[Ll]?", Number),
2145 ],
2146 'var': [
2147 (r'\s+', Text),
2148 (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
2149 include('varnames')
2150 ],
2151 'tag': [
2152 (r'\s+', Text),
2153 (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
2154 include('varnames'),
2155 (r'.', Punctuation),
2156 ],
2157 }
2158
2159
2160 class TwigHtmlLexer(DelegatingLexer):
2161 """
2162 Subclass of the `TwigLexer` that highlights unlexed data with the
2163 `HtmlLexer`.
2164
2165 .. versionadded:: 2.0
2166 """
2167
2168 name = "HTML+Twig"
2169 aliases = ["html+twig"]
2170 filenames = ['*.twig']
2171 mimetypes = ['text/html+twig']
2172
2173 def __init__(self, **options):
2174 super(TwigHtmlLexer, self).__init__(HtmlLexer, TwigLexer, **options)

eric ide

mercurial