|
1 # -*- coding: utf-8 -*- |
|
2 """ |
|
3 pygments.lexers.nix |
|
4 ~~~~~~~~~~~~~~~~~~~ |
|
5 |
|
6 Lexers for the NixOS Nix language. |
|
7 |
|
8 :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. |
|
9 :license: BSD, see LICENSE for details. |
|
10 """ |
|
11 |
|
12 import re |
|
13 |
|
14 from pygments.lexer import RegexLexer, include |
|
15 from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ |
|
16 Number, Punctuation, Literal |
|
17 |
|
18 __all__ = ['NixLexer'] |
|
19 |
|
20 |
|
21 class NixLexer(RegexLexer): |
|
22 """ |
|
23 For the `Nix language <http://nixos.org/nix/>`_. |
|
24 |
|
25 .. versionadded:: 2.0 |
|
26 """ |
|
27 |
|
28 name = 'Nix' |
|
29 aliases = ['nixos', 'nix'] |
|
30 filenames = ['*.nix'] |
|
31 mimetypes = ['text/x-nix'] |
|
32 |
|
33 flags = re.MULTILINE | re.UNICODE |
|
34 |
|
35 keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if', |
|
36 'else', 'then', '...'] |
|
37 builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins', |
|
38 'map', 'removeAttrs', 'throw', 'toString', 'derivation'] |
|
39 operators = ['++', '+', '?', '.', '!', '//', '==', |
|
40 '!=', '&&', '||', '->', '='] |
|
41 |
|
42 punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"] |
|
43 |
|
44 tokens = { |
|
45 'root': [ |
|
46 # comments starting with # |
|
47 (r'#.*$', Comment.Single), |
|
48 |
|
49 # multiline comments |
|
50 (r'/\*', Comment.Multiline, 'comment'), |
|
51 |
|
52 # whitespace |
|
53 (r'\s+', Text), |
|
54 |
|
55 # keywords |
|
56 ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword), |
|
57 |
|
58 # highlight the builtins |
|
59 ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins), |
|
60 Name.Builtin), |
|
61 |
|
62 (r'\b(true|false|null)\b', Name.Constant), |
|
63 |
|
64 # operators |
|
65 ('(%s)' % '|'.join(re.escape(entry) for entry in operators), |
|
66 Operator), |
|
67 |
|
68 # word operators |
|
69 (r'\b(or|and)\b', Operator.Word), |
|
70 |
|
71 # punctuations |
|
72 ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation), |
|
73 |
|
74 # integers |
|
75 (r'[0-9]+', Number.Integer), |
|
76 |
|
77 # strings |
|
78 (r'"', String.Double, 'doublequote'), |
|
79 (r"''", String.Single, 'singlequote'), |
|
80 |
|
81 # paths |
|
82 (r'[\w.+-]*(\/[\w.+-]+)+', Literal), |
|
83 (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal), |
|
84 |
|
85 # urls |
|
86 (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal), |
|
87 |
|
88 # names of variables |
|
89 (r'[\w-]+\s*=', String.Symbol), |
|
90 (r'[a-zA-Z_][\w\'-]*', Text), |
|
91 |
|
92 ], |
|
93 'comment': [ |
|
94 (r'[^/*]+', Comment.Multiline), |
|
95 (r'/\*', Comment.Multiline, '#push'), |
|
96 (r'\*/', Comment.Multiline, '#pop'), |
|
97 (r'[*/]', Comment.Multiline), |
|
98 ], |
|
99 'singlequote': [ |
|
100 (r"'''", String.Escape), |
|
101 (r"''\$\{", String.Escape), |
|
102 (r"''\n", String.Escape), |
|
103 (r"''\r", String.Escape), |
|
104 (r"''\t", String.Escape), |
|
105 (r"''", String.Single, '#pop'), |
|
106 (r'\$\{', String.Interpol, 'antiquote'), |
|
107 (r"[^']", String.Single), |
|
108 ], |
|
109 'doublequote': [ |
|
110 (r'\\', String.Escape), |
|
111 (r'\\"', String.Escape), |
|
112 (r'\\$\{', String.Escape), |
|
113 (r'"', String.Double, '#pop'), |
|
114 (r'\$\{', String.Interpol, 'antiquote'), |
|
115 (r'[^"]', String.Double), |
|
116 ], |
|
117 'antiquote': [ |
|
118 (r"\}", String.Interpol, '#pop'), |
|
119 # TODO: we should probably escape also here ''${ \${ |
|
120 (r"\$\{", String.Interpol, '#push'), |
|
121 include('root'), |
|
122 ], |
|
123 } |
|
124 |
|
125 def analyse_text(text): |
|
126 rv = 0.0 |
|
127 # TODO: let/in |
|
128 if re.search(r'import.+?<[^>]+>', text): |
|
129 rv += 0.4 |
|
130 if re.search(r'mkDerivation\s+(\(|\{|rec)', text): |
|
131 rv += 0.4 |
|
132 if re.search(r'=\s+mkIf\s+', text): |
|
133 rv += 0.4 |
|
134 if re.search(r'\{[a-zA-Z,\s]+\}:', text): |
|
135 rv += 0.1 |
|
136 return rv |