3 pygments.lexers.configs |
3 pygments.lexers.configs |
4 ~~~~~~~~~~~~~~~~~~~~~~~ |
4 ~~~~~~~~~~~~~~~~~~~~~~~ |
5 |
5 |
6 Lexers for configuration file formats. |
6 Lexers for configuration file formats. |
7 |
7 |
8 :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. |
8 :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. |
9 :license: BSD, see LICENSE for details. |
9 :license: BSD, see LICENSE for details. |
10 """ |
10 """ |
11 |
11 |
12 import re |
12 import re |
13 |
13 |
14 from pygments.lexer import RegexLexer, default, words, bygroups, include, using |
14 from pygments.lexer import RegexLexer, default, words, bygroups, include, using |
15 from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ |
15 from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ |
16 Number, Punctuation, Whitespace |
16 Number, Punctuation, Whitespace, Literal |
17 from pygments.lexers.shell import BashLexer |
17 from pygments.lexers.shell import BashLexer |
18 |
18 |
19 __all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer', |
19 __all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer', |
20 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer', |
20 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer', |
21 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer'] |
21 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer', |
|
22 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer', |
|
23 'PkgConfigLexer', 'PacmanConfLexer'] |
22 |
24 |
23 |
25 |
24 class IniLexer(RegexLexer): |
26 class IniLexer(RegexLexer): |
25 """ |
27 """ |
26 Lexer for configuration files in INI style. |
28 Lexer for configuration files in INI style. |
27 """ |
29 """ |
28 |
30 |
29 name = 'INI' |
31 name = 'INI' |
30 aliases = ['ini', 'cfg', 'dosini'] |
32 aliases = ['ini', 'cfg', 'dosini'] |
31 filenames = ['*.ini', '*.cfg'] |
33 filenames = ['*.ini', '*.cfg', '*.inf'] |
32 mimetypes = ['text/x-ini'] |
34 mimetypes = ['text/x-ini', 'text/inf'] |
33 |
35 |
34 tokens = { |
36 tokens = { |
35 'root': [ |
37 'root': [ |
36 (r'\s+', Text), |
38 (r'\s+', Text), |
37 (r'[;#].*', Comment.Single), |
39 (r'[;#].*', Comment.Single), |
538 'root': [ |
540 'root': [ |
539 (r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,), |
541 (r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,), |
540 bygroups(Name.Keyword, Whitespace, Keyword)), |
542 bygroups(Name.Keyword, Whitespace, Keyword)), |
541 (r'^(%s)\b(.*)' % (_keywords,), bygroups(Keyword, String)), |
543 (r'^(%s)\b(.*)' % (_keywords,), bygroups(Keyword, String)), |
542 (r'#.*', Comment), |
544 (r'#.*', Comment), |
543 (r'RUN', Keyword), # Rest of line falls through |
545 (r'RUN', Keyword), # Rest of line falls through |
544 (r'(.*\\\n)*.+', using(BashLexer)), |
546 (r'(.*\\\n)*.+', using(BashLexer)), |
545 ], |
547 ], |
546 } |
548 } |
|
549 |
|
550 |
|
551 class TerraformLexer(RegexLexer): |
|
552 """ |
|
553 Lexer for `terraformi .tf files <https://www.terraform.io/>`_. |
|
554 |
|
555 .. versionadded:: 2.1 |
|
556 """ |
|
557 |
|
558 name = 'Terraform' |
|
559 aliases = ['terraform', 'tf'] |
|
560 filenames = ['*.tf'] |
|
561 mimetypes = ['application/x-tf', 'application/x-terraform'] |
|
562 |
|
563 tokens = { |
|
564 'root': [ |
|
565 include('string'), |
|
566 include('punctuation'), |
|
567 include('curly'), |
|
568 include('basic'), |
|
569 include('whitespace'), |
|
570 (r'[0-9]+', Number), |
|
571 ], |
|
572 'basic': [ |
|
573 (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type), |
|
574 (r'\s*/\*', Comment.Multiline, 'comment'), |
|
575 (r'\s*#.*\n', Comment.Single), |
|
576 (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)), |
|
577 (words(('variable', 'resource', 'provider', 'provisioner', 'module'), |
|
578 prefix=r'\b', suffix=r'\b'), Keyword.Reserved, 'function'), |
|
579 (words(('ingress', 'egress', 'listener', 'default', 'connection'), |
|
580 prefix=r'\b', suffix=r'\b'), Keyword.Declaration), |
|
581 ('\$\{', String.Interpol, 'var_builtin'), |
|
582 ], |
|
583 'function': [ |
|
584 (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)), |
|
585 include('punctuation'), |
|
586 include('curly'), |
|
587 ], |
|
588 'var_builtin': [ |
|
589 (r'\$\{', String.Interpol, '#push'), |
|
590 (words(('concat', 'file', 'join', 'lookup', 'element'), |
|
591 prefix=r'\b', suffix=r'\b'), Name.Builtin), |
|
592 include('string'), |
|
593 include('punctuation'), |
|
594 (r'\s+', Text), |
|
595 (r'\}', String.Interpol, '#pop'), |
|
596 ], |
|
597 'string': [ |
|
598 (r'(".*")', bygroups(String.Double)), |
|
599 ], |
|
600 'punctuation': [ |
|
601 (r'[\[\]\(\),.]', Punctuation), |
|
602 ], |
|
603 # Keep this seperate from punctuation - we sometimes want to use different |
|
604 # Tokens for { } |
|
605 'curly': [ |
|
606 (r'\{', Text.Punctuation), |
|
607 (r'\}', Text.Punctuation), |
|
608 ], |
|
609 'comment': [ |
|
610 (r'[^*/]', Comment.Multiline), |
|
611 (r'/\*', Comment.Multiline, '#push'), |
|
612 (r'\*/', Comment.Multiline, '#pop'), |
|
613 (r'[*/]', Comment.Multiline) |
|
614 ], |
|
615 'whitespace': [ |
|
616 (r'\n', Text), |
|
617 (r'\s+', Text), |
|
618 (r'\\\n', Text), |
|
619 ], |
|
620 } |
|
621 |
|
622 |
|
623 class TermcapLexer(RegexLexer): |
|
624 """ |
|
625 Lexer for termcap database source. |
|
626 |
|
627 This is very simple and minimal. |
|
628 |
|
629 .. versionadded:: 2.1 |
|
630 """ |
|
631 name = 'Termcap' |
|
632 aliases = ['termcap',] |
|
633 |
|
634 filenames = ['termcap', 'termcap.src',] |
|
635 mimetypes = [] |
|
636 |
|
637 # NOTE: |
|
638 # * multiline with trailing backslash |
|
639 # * separator is ':' |
|
640 # * to embed colon as data, we must use \072 |
|
641 # * space after separator is not allowed (mayve) |
|
642 tokens = { |
|
643 'root': [ |
|
644 (r'^#.*$', Comment), |
|
645 (r'^[^\s#:\|]+', Name.Tag, 'names'), |
|
646 ], |
|
647 'names': [ |
|
648 (r'\n', Text, '#pop'), |
|
649 (r':', Punctuation, 'defs'), |
|
650 (r'\|', Punctuation), |
|
651 (r'[^:\|]+', Name.Attribute), |
|
652 ], |
|
653 'defs': [ |
|
654 (r'\\\n[ \t]*', Text), |
|
655 (r'\n[ \t]*', Text, '#pop:2'), |
|
656 (r'(#)([0-9]+)', bygroups(Operator, Number)), |
|
657 (r'=', Operator, 'data'), |
|
658 (r':', Punctuation), |
|
659 (r'[^\s:=#]+', Name.Class), |
|
660 ], |
|
661 'data': [ |
|
662 (r'\\072', Literal), |
|
663 (r':', Punctuation, '#pop'), |
|
664 (r'[^:\\]+', Literal), # for performance |
|
665 (r'.', Literal), |
|
666 ], |
|
667 } |
|
668 |
|
669 |
|
670 class TerminfoLexer(RegexLexer): |
|
671 """ |
|
672 Lexer for terminfo database source. |
|
673 |
|
674 This is very simple and minimal. |
|
675 |
|
676 .. versionadded:: 2.1 |
|
677 """ |
|
678 name = 'Terminfo' |
|
679 aliases = ['terminfo',] |
|
680 |
|
681 filenames = ['terminfo', 'terminfo.src',] |
|
682 mimetypes = [] |
|
683 |
|
684 # NOTE: |
|
685 # * multiline with leading whitespace |
|
686 # * separator is ',' |
|
687 # * to embed comma as data, we can use \, |
|
688 # * space after separator is allowed |
|
689 tokens = { |
|
690 'root': [ |
|
691 (r'^#.*$', Comment), |
|
692 (r'^[^\s#,\|]+', Name.Tag, 'names'), |
|
693 ], |
|
694 'names': [ |
|
695 (r'\n', Text, '#pop'), |
|
696 (r'(,)([ \t]*)', bygroups(Punctuation, Text), 'defs'), |
|
697 (r'\|', Punctuation), |
|
698 (r'[^,\|]+', Name.Attribute), |
|
699 ], |
|
700 'defs': [ |
|
701 (r'\n[ \t]+', Text), |
|
702 (r'\n', Text, '#pop:2'), |
|
703 (r'(#)([0-9]+)', bygroups(Operator, Number)), |
|
704 (r'=', Operator, 'data'), |
|
705 (r'(,)([ \t]*)', bygroups(Punctuation, Text)), |
|
706 (r'[^\s,=#]+', Name.Class), |
|
707 ], |
|
708 'data': [ |
|
709 (r'\\[,\\]', Literal), |
|
710 (r'(,)([ \t]*)', bygroups(Punctuation, Text), '#pop'), |
|
711 (r'[^\\,]+', Literal), # for performance |
|
712 (r'.', Literal), |
|
713 ], |
|
714 } |
|
715 |
|
716 |
|
717 class PkgConfigLexer(RegexLexer): |
|
718 """ |
|
719 Lexer for `pkg-config |
|
720 <http://www.freedesktop.org/wiki/Software/pkg-config/>`_ |
|
721 (see also `manual page <http://linux.die.net/man/1/pkg-config>`_). |
|
722 |
|
723 .. versionadded:: 2.1 |
|
724 """ |
|
725 |
|
726 name = 'PkgConfig' |
|
727 aliases = ['pkgconfig',] |
|
728 filenames = ['*.pc',] |
|
729 mimetypes = [] |
|
730 |
|
731 tokens = { |
|
732 'root': [ |
|
733 (r'#.*$', Comment.Single), |
|
734 |
|
735 # variable definitions |
|
736 (r'^(\w+)(=)', bygroups(Name.Attribute, Operator)), |
|
737 |
|
738 # keyword lines |
|
739 (r'^([\w.]+)(:)', |
|
740 bygroups(Name.Tag, Punctuation), 'spvalue'), |
|
741 |
|
742 # variable references |
|
743 include('interp'), |
|
744 |
|
745 # fallback |
|
746 (r'[^${}#=:\n.]+', Text), |
|
747 (r'.', Text), |
|
748 ], |
|
749 'interp': [ |
|
750 # you can escape literal "$" as "$$" |
|
751 (r'\$\$', Text), |
|
752 |
|
753 # variable references |
|
754 (r'\$\{', String.Interpol, 'curly'), |
|
755 ], |
|
756 'curly': [ |
|
757 (r'\}', String.Interpol, '#pop'), |
|
758 (r'\w+', Name.Attribute), |
|
759 ], |
|
760 'spvalue': [ |
|
761 include('interp'), |
|
762 |
|
763 (r'#.*$', Comment.Single, '#pop'), |
|
764 (r'\n', Text, '#pop'), |
|
765 |
|
766 # fallback |
|
767 (r'[^${}#\n]+', Text), |
|
768 (r'.', Text), |
|
769 ], |
|
770 } |
|
771 |
|
772 |
|
773 class PacmanConfLexer(RegexLexer): |
|
774 """ |
|
775 Lexer for `pacman.conf |
|
776 <https://www.archlinux.org/pacman/pacman.conf.5.html>`_. |
|
777 |
|
778 Actually, IniLexer works almost fine for this format, |
|
779 but it yield error token. It is because pacman.conf has |
|
780 a form without assignment like: |
|
781 |
|
782 UseSyslog |
|
783 Color |
|
784 TotalDownload |
|
785 CheckSpace |
|
786 VerbosePkgLists |
|
787 |
|
788 These are flags to switch on. |
|
789 |
|
790 .. versionadded:: 2.1 |
|
791 """ |
|
792 |
|
793 name = 'PacmanConf' |
|
794 aliases = ['pacmanconf',] |
|
795 filenames = ['pacman.conf',] |
|
796 mimetypes = [] |
|
797 |
|
798 tokens = { |
|
799 'root': [ |
|
800 # comment |
|
801 (r'#.*$', Comment.Single), |
|
802 |
|
803 # section header |
|
804 (r'^\s*\[.*?\]\s*$', Keyword), |
|
805 |
|
806 # variable definitions |
|
807 # (Leading space is allowed...) |
|
808 (r'(\w+)(\s*)(=)', |
|
809 bygroups(Name.Attribute, Text, Operator)), |
|
810 |
|
811 # flags to on |
|
812 (r'^(\s*)(\w+)(\s*)$', |
|
813 bygroups(Text, Name.Attribute, Text)), |
|
814 |
|
815 # built-in special values |
|
816 (words(( |
|
817 '$repo', # repository |
|
818 '$arch', # architecture |
|
819 '%o', # outfile |
|
820 '%u', # url |
|
821 ), suffix=r'\b'), |
|
822 Name.Variable), |
|
823 |
|
824 # fallback |
|
825 (r'.', Text), |
|
826 ], |
|
827 } |