19 |
19 |
20 __all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer', |
20 __all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer', |
21 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer', |
21 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer', |
22 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer', |
22 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer', |
23 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer', |
23 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer', |
24 'PkgConfigLexer', 'PacmanConfLexer'] |
24 'PkgConfigLexer', 'PacmanConfLexer', 'AugeasLexer', 'TOMLLexer'] |
25 |
25 |
26 |
26 |
27 class IniLexer(RegexLexer): |
27 class IniLexer(RegexLexer): |
28 """ |
28 """ |
29 Lexer for configuration files in INI style. |
29 Lexer for configuration files in INI style. |
298 flags = re.MULTILINE | re.IGNORECASE |
298 flags = re.MULTILINE | re.IGNORECASE |
299 |
299 |
300 tokens = { |
300 tokens = { |
301 'root': [ |
301 'root': [ |
302 (r'\s+', Text), |
302 (r'\s+', Text), |
303 (r'(#.*?)$', Comment), |
303 (r'#(.*\\\n)+.*$|(#.*?)$', Comment), |
304 (r'(<[^\s>]+)(?:(\s+)(.*?))?(>)', |
304 (r'(<[^\s>]+)(?:(\s+)(.*))?(>)', |
305 bygroups(Name.Tag, Text, String, Name.Tag)), |
305 bygroups(Name.Tag, Text, String, Name.Tag)), |
306 (r'([a-z]\w*)(\s+)', |
306 (r'[a-z]\w*', Name.Builtin, 'value'), |
307 bygroups(Name.Builtin, Text), 'value'), |
|
308 (r'\.+', Text), |
307 (r'\.+', Text), |
309 ], |
308 ], |
310 'value': [ |
309 'value': [ |
311 (r'\\\n', Text), |
310 (r'\\\n', Text), |
312 (r'$', Text, '#pop'), |
311 (r'$', Text, '#pop'), |
313 (r'\\', Text), |
312 (r'\\', Text), |
314 (r'[^\S\n]+', Text), |
313 (r'[^\S\n]+', Text), |
315 (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number), |
314 (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number), |
316 (r'\d+', Number), |
315 (r'\d+', Number), |
317 (r'/([a-z0-9][\w./-]+)', String.Other), |
316 (r'/([*a-z0-9][*\w./-]+)', String.Other), |
318 (r'(on|off|none|any|all|double|email|dns|min|minimal|' |
317 (r'(on|off|none|any|all|double|email|dns|min|minimal|' |
319 r'os|productonly|full|emerg|alert|crit|error|warn|' |
318 r'os|productonly|full|emerg|alert|crit|error|warn|' |
320 r'notice|info|debug|registry|script|inetd|standalone|' |
319 r'notice|info|debug|registry|script|inetd|standalone|' |
321 r'user|group)\b', Keyword), |
320 r'user|group)\b', Keyword), |
322 (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double), |
321 (r'"([^"\\]*(?:\\(.|[\n])[^"\\]*)*)"', String.Double), |
323 (r'[^\s"\\]+', Text) |
322 (r'[^\s"\\]+', Text) |
324 ], |
323 ], |
325 } |
324 } |
326 |
325 |
327 |
326 |
538 name = 'Docker' |
537 name = 'Docker' |
539 aliases = ['docker', 'dockerfile'] |
538 aliases = ['docker', 'dockerfile'] |
540 filenames = ['Dockerfile', '*.docker'] |
539 filenames = ['Dockerfile', '*.docker'] |
541 mimetypes = ['text/x-dockerfile-config'] |
540 mimetypes = ['text/x-dockerfile-config'] |
542 |
541 |
543 _keywords = (r'(?:FROM|MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)') |
542 _keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)') |
544 _bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)') |
543 _bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)') |
545 _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex |
544 _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex |
546 flags = re.IGNORECASE | re.MULTILINE |
545 flags = re.IGNORECASE | re.MULTILINE |
547 |
546 |
548 tokens = { |
547 tokens = { |
549 'root': [ |
548 'root': [ |
550 (r'#.*', Comment), |
549 (r'#.*', Comment), |
|
550 (r'(FROM)([ \t]*)(\S*)([ \t]*)(?:(AS)([ \t]*)(\S*))?', |
|
551 bygroups(Keyword, Text, String, Text, Keyword, Text, String)), |
551 (r'(ONBUILD)(%s)' % (_lb,), bygroups(Keyword, using(BashLexer))), |
552 (r'(ONBUILD)(%s)' % (_lb,), bygroups(Keyword, using(BashLexer))), |
552 (r'(HEALTHCHECK)((%s--\w+=\w+%s)*)' % (_lb, _lb), |
553 (r'(HEALTHCHECK)((%s--\w+=\w+%s)*)' % (_lb, _lb), |
553 bygroups(Keyword, using(BashLexer))), |
554 bygroups(Keyword, using(BashLexer))), |
554 (r'(VOLUME|ENTRYPOINT|CMD|SHELL)(%s)(\[.*?\])' % (_lb,), |
555 (r'(VOLUME|ENTRYPOINT|CMD|SHELL)(%s)(\[.*?\])' % (_lb,), |
555 bygroups(Keyword, using(BashLexer), using(JsonLexer))), |
556 bygroups(Keyword, using(BashLexer), using(JsonLexer))), |
572 name = 'Terraform' |
573 name = 'Terraform' |
573 aliases = ['terraform', 'tf'] |
574 aliases = ['terraform', 'tf'] |
574 filenames = ['*.tf'] |
575 filenames = ['*.tf'] |
575 mimetypes = ['application/x-tf', 'application/x-terraform'] |
576 mimetypes = ['application/x-tf', 'application/x-terraform'] |
576 |
577 |
577 tokens = { |
578 embedded_keywords = ('ingress', 'egress', 'listener', 'default', |
578 'root': [ |
579 'connection', 'alias', 'terraform', 'tags', 'vars', |
579 include('string'), |
580 'config', 'lifecycle', 'timeouts') |
580 include('punctuation'), |
581 |
581 include('curly'), |
582 tokens = { |
582 include('basic'), |
583 'root': [ |
583 include('whitespace'), |
584 include('string'), |
584 (r'[0-9]+', Number), |
585 include('punctuation'), |
|
586 include('curly'), |
|
587 include('basic'), |
|
588 include('whitespace'), |
|
589 (r'[0-9]+', Number), |
585 ], |
590 ], |
586 'basic': [ |
591 'basic': [ |
587 (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type), |
592 (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type), |
588 (r'\s*/\*', Comment.Multiline, 'comment'), |
593 (r'\s*/\*', Comment.Multiline, 'comment'), |
589 (r'\s*#.*\n', Comment.Single), |
594 (r'\s*#.*\n', Comment.Single), |
590 (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)), |
595 (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)), |
591 (words(('variable', 'resource', 'provider', 'provisioner', 'module'), |
596 (words(('variable', 'resource', 'provider', 'provisioner', 'module', |
592 prefix=r'\b', suffix=r'\b'), Keyword.Reserved, 'function'), |
597 'backend', 'data', 'output'), prefix=r'\b', suffix=r'\b'), |
593 (words(('ingress', 'egress', 'listener', 'default', 'connection', 'alias'), |
598 Keyword.Reserved, 'function'), |
594 prefix=r'\b', suffix=r'\b'), Keyword.Declaration), |
599 (words(embedded_keywords, prefix=r'\b', suffix=r'\b'), |
595 (r'\$\{', String.Interpol, 'var_builtin'), |
600 Keyword.Declaration), |
|
601 (r'\$\{', String.Interpol, 'var_builtin'), |
596 ], |
602 ], |
597 'function': [ |
603 'function': [ |
598 (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)), |
604 (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)), |
599 include('punctuation'), |
605 include('punctuation'), |
600 include('curly'), |
606 include('curly'), |
601 ], |
607 ], |
602 'var_builtin': [ |
608 'var_builtin': [ |
603 (r'\$\{', String.Interpol, '#push'), |
609 (r'\$\{', String.Interpol, '#push'), |
604 (words(('concat', 'file', 'join', 'lookup', 'element'), |
610 (words(('concat', 'file', 'join', 'lookup', 'element'), |
605 prefix=r'\b', suffix=r'\b'), Name.Builtin), |
611 prefix=r'\b', suffix=r'\b'), Name.Builtin), |
835 |
841 |
836 # fallback |
842 # fallback |
837 (r'.', Text), |
843 (r'.', Text), |
838 ], |
844 ], |
839 } |
845 } |
|
846 |
|
847 |
|
848 class AugeasLexer(RegexLexer): |
|
849 """ |
|
850 Lexer for `Augeas <http://augeas.net>`_. |
|
851 |
|
852 .. versionadded:: 2.4 |
|
853 """ |
|
854 name = 'Augeas' |
|
855 aliases = ['augeas'] |
|
856 filenames = ['*.aug'] |
|
857 |
|
858 tokens = { |
|
859 'root': [ |
|
860 (r'(module)(\s*)([^\s=]+)', bygroups(Keyword.Namespace, Text, Name.Namespace)), |
|
861 (r'(let)(\s*)([^\s=]+)', bygroups(Keyword.Declaration, Text, Name.Variable)), |
|
862 (r'(del|store|value|counter|seq|key|label|autoload|incl|excl|transform|test|get|put)(\s+)', bygroups(Name.Builtin, Text)), |
|
863 (r'(\()([^:]+)(\:)(unit|string|regexp|lens|tree|filter)(\))', bygroups(Punctuation, Name.Variable, Punctuation, Keyword.Type, Punctuation)), |
|
864 (r'\(\*', Comment.Multiline, 'comment'), |
|
865 (r'[*+\-.;=?|]', Operator), |
|
866 (r'[()\[\]{}]', Operator), |
|
867 (r'"', String.Double, 'string'), |
|
868 (r'\/', String.Regex, 'regex'), |
|
869 (r'([A-Z]\w*)(\.)(\w+)', bygroups(Name.Namespace, Punctuation, Name.Variable)), |
|
870 (r'.', Name.Variable), |
|
871 (r'\s', Text), |
|
872 ], |
|
873 'string': [ |
|
874 (r'\\.', String.Escape), |
|
875 (r'[^"]', String.Double), |
|
876 (r'"', String.Double, '#pop'), |
|
877 ], |
|
878 'regex': [ |
|
879 (r'\\.', String.Escape), |
|
880 (r'[^/]', String.Regex), |
|
881 (r'\/', String.Regex, '#pop'), |
|
882 ], |
|
883 'comment': [ |
|
884 (r'[^*)]', Comment.Multiline), |
|
885 (r'\(\*', Comment.Multiline, '#push'), |
|
886 (r'\*\)', Comment.Multiline, '#pop'), |
|
887 (r'[)*]', Comment.Multiline) |
|
888 ], |
|
889 } |
|
890 |
|
891 |
|
892 class TOMLLexer(RegexLexer): |
|
893 """ |
|
894 Lexer for `TOML <https://github.com/toml-lang/toml>`_, a simple language |
|
895 for config files. |
|
896 |
|
897 .. versionadded:: 2.4 |
|
898 """ |
|
899 |
|
900 name = 'TOML' |
|
901 aliases = ['toml'] |
|
902 filenames = ['*.toml', 'Pipfile', 'poetry.lock'] |
|
903 |
|
904 tokens = { |
|
905 'root': [ |
|
906 |
|
907 # Basics, comments, strings |
|
908 (r'\s+', Text), |
|
909 (r'#.*?$', Comment.Single), |
|
910 # Basic string |
|
911 (r'"(\\\\|\\"|[^"])*"', String), |
|
912 # Literal string |
|
913 (r'\'\'\'(.*)\'\'\'', String), |
|
914 (r'\'[^\']*\'', String), |
|
915 (r'(true|false)$', Keyword.Constant), |
|
916 (r'[a-zA-Z_][\w\-]*', Name), |
|
917 |
|
918 (r'\[.*?\]$', Keyword), |
|
919 # Datetime |
|
920 # TODO this needs to be expanded, as TOML is rather flexible: |
|
921 # https://github.com/toml-lang/toml#offset-date-time |
|
922 (r'\d{4}-\d{2}-\d{2}(?:T| )\d{2}:\d{2}:\d{2}(?:Z|[-+]\d{2}:\d{2})', Number.Integer), |
|
923 |
|
924 # Numbers |
|
925 (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float), |
|
926 (r'\d+[eE][+-]?[0-9]+j?', Number.Float), |
|
927 # Handle +-inf, +-infinity, +-nan |
|
928 (r'[+-]?(?:(inf(?:inity)?)|nan)', Number.Float), |
|
929 (r'[+-]?\d+', Number.Integer), |
|
930 |
|
931 # Punctuation |
|
932 (r'[]{}:(),;[]', Punctuation), |
|
933 (r'\.', Punctuation), |
|
934 |
|
935 # Operators |
|
936 (r'=', Operator) |
|
937 |
|
938 ] |
|
939 } |