ThirdParty/Pygments/pygments/lexers/templates.py

changeset 1705
b0fbc9300f2b
parent 808
8f85926125ef
child 2426
da76c71624de
equal deleted inserted replaced
1704:02ae6c55b35b 1705:b0fbc9300f2b
3 pygments.lexers.templates 3 pygments.lexers.templates
4 ~~~~~~~~~~~~~~~~~~~~~~~~~ 4 ~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 Lexers for various template engines' markup. 6 Lexers for various template engines' markup.
7 7
8 :copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS. 8 :copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
9 :license: BSD, see LICENSE for details. 9 :license: BSD, see LICENSE for details.
10 """ 10 """
11 11
12 import re 12 import re
13 13
14 from pygments.lexers.web import \ 14 from pygments.lexers.web import \
15 PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer 15 PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer
16 from pygments.lexers.agile import PythonLexer, PerlLexer 16 from pygments.lexers.agile import PythonLexer, PerlLexer
17 from pygments.lexers.compiled import JavaLexer 17 from pygments.lexers.compiled import JavaLexer
18 from pygments.lexers.jvm import TeaLangLexer
18 from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \ 19 from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
19 include, using, this 20 include, using, this
20 from pygments.token import Error, Punctuation, \ 21 from pygments.token import Error, Punctuation, \
21 Text, Comment, Operator, Keyword, Name, String, Number, Other, Token 22 Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
22 from pygments.util import html_doctype_matches, looks_like_xml 23 from pygments.util import html_doctype_matches, looks_like_xml
31 'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer', 32 'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
32 'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer', 33 'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
33 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer', 34 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
34 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer', 35 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
35 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer', 36 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
36 'CheetahXmlLexer', 'CheetahJavascriptLexer', 37 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
37 'EvoqueLexer', 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 38 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
38 'ColdfusionLexer', 'ColdfusionHtmlLexer', 39 'ColdfusionHtmlLexer', 'VelocityLexer', 'VelocityHtmlLexer',
39 'VelocityLexer', 'VelocityHtmlLexer', 'VelocityXmlLexer', 40 'VelocityXmlLexer', 'SspLexer', 'TeaTemplateLexer']
40 'SspLexer']
41 41
42 42
43 class ErbLexer(Lexer): 43 class ErbLexer(Lexer):
44 """ 44 """
45 Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating) 45 Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating)
166 (r'\s+', Text), 166 (r'\s+', Text),
167 (r'\}', Comment.Preproc, '#pop'), 167 (r'\}', Comment.Preproc, '#pop'),
168 (r'#[a-zA-Z_][a-zA-Z0-9_]*#', Name.Variable), 168 (r'#[a-zA-Z_][a-zA-Z0-9_]*#', Name.Variable),
169 (r'\$[a-zA-Z_][a-zA-Z0-9_]*(\.[a-zA-Z0-9_]+)*', Name.Variable), 169 (r'\$[a-zA-Z_][a-zA-Z0-9_]*(\.[a-zA-Z0-9_]+)*', Name.Variable),
170 (r'[~!%^&*()+=|\[\]:;,.<>/?{}@-]', Operator), 170 (r'[~!%^&*()+=|\[\]:;,.<>/?{}@-]', Operator),
171 ('(true|false|null)\b', Keyword.Constant), 171 (r'(true|false|null)\b', Keyword.Constant),
172 (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|" 172 (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
173 r"0[xX][0-9a-fA-F]+[Ll]?", Number), 173 r"0[xX][0-9a-fA-F]+[Ll]?", Number),
174 (r'"(\\\\|\\"|[^"])*"', String.Double), 174 (r'"(\\\\|\\"|[^"])*"', String.Double),
175 (r"'(\\\\|\\'|[^'])*'", String.Single), 175 (r"'(\\\\|\\'|[^'])*'", String.Single),
176 (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Attribute) 176 (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Attribute)
404 mimetypes = ['application/x-myghty'] 404 mimetypes = ['application/x-myghty']
405 405
406 tokens = { 406 tokens = {
407 'root': [ 407 'root': [
408 (r'\s+', Text), 408 (r'\s+', Text),
409 (r'(<%(def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)', 409 (r'(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
410 bygroups(Name.Tag, None, Text, Name.Function, Name.Tag, 410 bygroups(Name.Tag, Text, Name.Function, Name.Tag,
411 using(this), Name.Tag)), 411 using(this), Name.Tag)),
412 (r'(<%(\w+))(.*?)(>)(.*?)(</%\2\s*>)(?s)', 412 (r'(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
413 bygroups(Name.Tag, None, Name.Function, Name.Tag, 413 bygroups(Name.Tag, Name.Function, Name.Tag,
414 using(PythonLexer), Name.Tag)), 414 using(PythonLexer), Name.Tag)),
415 (r'(<&[^|])(.*?)(,.*?)?(&>)', 415 (r'(<&[^|])(.*?)(,.*?)?(&>)',
416 bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)), 416 bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
417 (r'(<&\|)(.*?)(,.*?)?(&>)(?s)', 417 (r'(<&\|)(.*?)(,.*?)?(&>)(?s)',
418 bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)), 418 bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
523 tokens = { 523 tokens = {
524 'root': [ 524 'root': [
525 (r'\s+', Text), 525 (r'\s+', Text),
526 (r'(<%doc>)(.*?)(</%doc>)(?s)', 526 (r'(<%doc>)(.*?)(</%doc>)(?s)',
527 bygroups(Name.Tag, Comment.Multiline, Name.Tag)), 527 bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
528 (r'(<%(def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)', 528 (r'(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
529 bygroups(Name.Tag, None, Text, Name.Function, Name.Tag, 529 bygroups(Name.Tag, Text, Name.Function, Name.Tag,
530 using(this), Name.Tag)), 530 using(this), Name.Tag)),
531 (r'(<%(\w+))(.*?)(>)(.*?)(</%\2\s*>)(?s)', 531 (r'(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
532 bygroups(Name.Tag, None, Name.Function, Name.Tag, 532 bygroups(Name.Tag, Name.Function, Name.Tag,
533 using(PerlLexer), Name.Tag)), 533 using(PerlLexer), Name.Tag)),
534 (r'(<&[^|])(.*?)(,.*?)?(&>)(?s)', 534 (r'(<&[^|])(.*?)(,.*?)?(&>)(?s)',
535 bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)), 535 bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
536 (r'(<&\|)(.*?)(,.*?)?(&>)(?s)', 536 (r'(<&\|)(.*?)(,.*?)?(&>)(?s)',
537 bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)), 537 bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
614 (r'<%', Comment.Preproc), 614 (r'<%', Comment.Preproc),
615 (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin), 615 (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
616 include('tag'), 616 include('tag'),
617 ], 617 ],
618 'tag': [ 618 'tag': [
619 (r'((?:\w+)\s*=)\s*(".*?")', 619 (r'((?:\w+)\s*=)(\s*)(".*?")',
620 bygroups(Name.Attribute, String)), 620 bygroups(Name.Attribute, Text, String)),
621 (r'/?\s*>', Comment.Preproc, '#pop'), 621 (r'/?\s*>', Comment.Preproc, '#pop'),
622 (r'\s+', Text), 622 (r'\s+', Text),
623 ], 623 ],
624 'attr': [ 624 'attr': [
625 ('".*?"', String, '#pop'), 625 ('".*?"', String, '#pop'),
1401 # svn keywords 1401 # svn keywords
1402 (r'\$\w+:[^$\n]*\$', Comment.Multiline), 1402 (r'\$\w+:[^$\n]*\$', Comment.Multiline),
1403 # directives: begin, end 1403 # directives: begin, end
1404 (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})', 1404 (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
1405 bygroups(Punctuation, Name.Builtin, Punctuation, None, 1405 bygroups(Punctuation, Name.Builtin, Punctuation, None,
1406 String, Punctuation, None)), 1406 String, Punctuation)),
1407 # directives: evoque, overlay 1407 # directives: evoque, overlay
1408 # see doc for handling first name arg: /directives/evoque/ 1408 # see doc for handling first name arg: /directives/evoque/
1409 #+ minor inconsistency: the "name" in e.g. $overlay{name=site_base} 1409 #+ minor inconsistency: the "name" in e.g. $overlay{name=site_base}
1410 # should be using(PythonLexer), not passed out as String 1410 # should be using(PythonLexer), not passed out as String
1411 (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?' 1411 (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?'
1412 r'(.*?)((?(4)%)\})', 1412 r'(.*?)((?(4)%)\})',
1413 bygroups(Punctuation, Name.Builtin, Punctuation, None, 1413 bygroups(Punctuation, Name.Builtin, Punctuation, None,
1414 String, using(PythonLexer), Punctuation, None)), 1414 String, using(PythonLexer), Punctuation)),
1415 # directives: if, for, prefer, test 1415 # directives: if, for, prefer, test
1416 (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})', 1416 (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
1417 bygroups(Punctuation, Name.Builtin, Punctuation, None, 1417 bygroups(Punctuation, Name.Builtin, Punctuation, None,
1418 using(PythonLexer), Punctuation, None)), 1418 using(PythonLexer), Punctuation)),
1419 # directive clauses (no {} expression) 1419 # directive clauses (no {} expression)
1420 (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)), 1420 (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
1421 # expressions 1421 # expressions
1422 (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})', 1422 (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
1423 bygroups(Punctuation, None, using(PythonLexer), 1423 bygroups(Punctuation, None, using(PythonLexer),
1424 Name.Builtin, None, None, Punctuation, None)), 1424 Name.Builtin, None, None, Punctuation)),
1425 (r'#', Other), 1425 (r'#', Other),
1426 ], 1426 ],
1427 'comment': [ 1427 'comment': [
1428 (r'[^\]#]', Comment.Multiline), 1428 (r'[^\]#]', Comment.Multiline),
1429 (r'#\[', Comment.Multiline, '#push'), 1429 (r'#\[', Comment.Multiline, '#push'),
1487 # There is a special rule for allowing html in single quoted 1487 # There is a special rule for allowing html in single quoted
1488 # strings, evidently. 1488 # strings, evidently.
1489 (r"'.*?'", String.Single), 1489 (r"'.*?'", String.Single),
1490 (r'\d+', Number), 1490 (r'\d+', Number),
1491 (r'(if|else|len|var|case|default|break|switch)\b', Keyword), 1491 (r'(if|else|len|var|case|default|break|switch)\b', Keyword),
1492 (r'([A-Za-z_$][A-Za-z0-9_.]*)\s*(\()', bygroups(Name.Function, Punctuation)), 1492 (r'([A-Za-z_$][A-Za-z0-9_.]*)(\s*)(\()',
1493 bygroups(Name.Function, Text, Punctuation)),
1493 (r'[A-Za-z_$][A-Za-z0-9_.]*', Name.Variable), 1494 (r'[A-Za-z_$][A-Za-z0-9_.]*', Name.Variable),
1494 (r'[()\[\]{};:,.\\]', Punctuation), 1495 (r'[()\[\]{};:,.\\]', Punctuation),
1495 (r'\s+', Text), 1496 (r'\s+', Text),
1496 ], 1497 ],
1497 'string': [ 1498 'string': [
1579 if looks_like_xml(text): 1580 if looks_like_xml(text):
1580 rv += 0.2 1581 rv += 0.2
1581 if '<%' in text and '%>' in text: 1582 if '<%' in text and '%>' in text:
1582 rv += 0.1 1583 rv += 0.1
1583 return rv 1584 return rv
1585
1586
1587 class TeaTemplateRootLexer(RegexLexer):
1588 """
1589 Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
1590 code blocks.
1591
1592 *New in Pygments 1.5.*
1593 """
1594
1595 tokens = {
1596 'root': [
1597 (r'<%\S?', Keyword, 'sec'),
1598 (r'[^<]+', Other),
1599 (r'<', Other),
1600 ],
1601 'sec': [
1602 (r'%>', Keyword, '#pop'),
1603 # note: '\w\W' != '.' without DOTALL.
1604 (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
1605 ],
1606 }
1607
1608
1609 class TeaTemplateLexer(DelegatingLexer):
1610 """
1611 Lexer for `Tea Templates <http://teatrove.org/>`_.
1612
1613 *New in Pygments 1.5.*
1614 """
1615 name = 'Tea'
1616 aliases = ['tea']
1617 filenames = ['*.tea']
1618 mimetypes = ['text/x-tea']
1619
1620 def __init__(self, **options):
1621 super(TeaTemplateLexer, self).__init__(XmlLexer,
1622 TeaTemplateRootLexer, **options)
1623
1624 def analyse_text(text):
1625 rv = TeaLangLexer.analyse_text(text) - 0.01
1626 if looks_like_xml(text):
1627 rv += 0.4
1628 if '<%' in text and '%>' in text:
1629 rv += 0.1
1630 return rv

eric ide

mercurial