1 # -*- coding: utf-8 -*- |
|
2 """ |
|
3 pygments.lexers.configs |
|
4 ~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 |
|
6 Lexers for configuration file formats. |
|
7 |
|
8 :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. |
|
9 :license: BSD, see LICENSE for details. |
|
10 """ |
|
11 |
|
12 import re |
|
13 |
|
14 from pygments.lexer import RegexLexer, default, words, bygroups, include, using |
|
15 from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ |
|
16 Number, Punctuation, Whitespace, Literal, Generic |
|
17 from pygments.lexers.shell import BashLexer |
|
18 from pygments.lexers.data import JsonLexer |
|
19 |
|
20 __all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer', |
|
21 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer', |
|
22 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer', |
|
23 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer', |
|
24 'PkgConfigLexer', 'PacmanConfLexer', 'AugeasLexer', 'TOMLLexer', |
|
25 'SingularityLexer'] |
|
26 |
|
27 |
|
28 class IniLexer(RegexLexer): |
|
29 """ |
|
30 Lexer for configuration files in INI style. |
|
31 """ |
|
32 |
|
33 name = 'INI' |
|
34 aliases = ['ini', 'cfg', 'dosini'] |
|
35 filenames = ['*.ini', '*.cfg', '*.inf'] |
|
36 mimetypes = ['text/x-ini', 'text/inf'] |
|
37 |
|
38 tokens = { |
|
39 'root': [ |
|
40 (r'\s+', Text), |
|
41 (r'[;#].*', Comment.Single), |
|
42 (r'\[.*?\]$', Keyword), |
|
43 (r'(.*?)([ \t]*)(=)([ \t]*)([^\t\n]*)', |
|
44 bygroups(Name.Attribute, Text, Operator, Text, String)), |
|
45 # standalone option, supported by some INI parsers |
|
46 (r'(.+?)$', Name.Attribute), |
|
47 ], |
|
48 } |
|
49 |
|
50 def analyse_text(text): |
|
51 npos = text.find('\n') |
|
52 if npos < 3: |
|
53 return False |
|
54 return text[0] == '[' and text[npos-1] == ']' |
|
55 |
|
56 |
|
57 class RegeditLexer(RegexLexer): |
|
58 """ |
|
59 Lexer for `Windows Registry |
|
60 <http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced |
|
61 by regedit. |
|
62 |
|
63 .. versionadded:: 1.6 |
|
64 """ |
|
65 |
|
66 name = 'reg' |
|
67 aliases = ['registry'] |
|
68 filenames = ['*.reg'] |
|
69 mimetypes = ['text/x-windows-registry'] |
|
70 |
|
71 tokens = { |
|
72 'root': [ |
|
73 (r'Windows Registry Editor.*', Text), |
|
74 (r'\s+', Text), |
|
75 (r'[;#].*', Comment.Single), |
|
76 (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$', |
|
77 bygroups(Keyword, Operator, Name.Builtin, Keyword)), |
|
78 # String keys, which obey somewhat normal escaping |
|
79 (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)', |
|
80 bygroups(Name.Attribute, Text, Operator, Text), |
|
81 'value'), |
|
82 # Bare keys (includes @) |
|
83 (r'(.*?)([ \t]*)(=)([ \t]*)', |
|
84 bygroups(Name.Attribute, Text, Operator, Text), |
|
85 'value'), |
|
86 ], |
|
87 'value': [ |
|
88 (r'-', Operator, '#pop'), # delete value |
|
89 (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)', |
|
90 bygroups(Name.Variable, Punctuation, Number), '#pop'), |
|
91 # As far as I know, .reg files do not support line continuation. |
|
92 (r'.+', String, '#pop'), |
|
93 default('#pop'), |
|
94 ] |
|
95 } |
|
96 |
|
97 def analyse_text(text): |
|
98 return text.startswith('Windows Registry Editor') |
|
99 |
|
100 |
|
101 class PropertiesLexer(RegexLexer): |
|
102 """ |
|
103 Lexer for configuration files in Java's properties format. |
|
104 |
|
105 Note: trailing whitespace counts as part of the value as per spec |
|
106 |
|
107 .. versionadded:: 1.4 |
|
108 """ |
|
109 |
|
110 name = 'Properties' |
|
111 aliases = ['properties', 'jproperties'] |
|
112 filenames = ['*.properties'] |
|
113 mimetypes = ['text/x-java-properties'] |
|
114 |
|
115 tokens = { |
|
116 'root': [ |
|
117 (r'^(\w+)([ \t])(\w+\s*)$', bygroups(Name.Attribute, Text, String)), |
|
118 (r'^\w+(\\[ \t]\w*)*$', Name.Attribute), |
|
119 (r'(^ *)([#!].*)', bygroups(Text, Comment)), |
|
120 # More controversial comments |
|
121 (r'(^ *)((?:;|//).*)', bygroups(Text, Comment)), |
|
122 (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)', |
|
123 bygroups(Name.Attribute, Text, Operator, Text, String)), |
|
124 (r'\s', Text), |
|
125 ], |
|
126 } |
|
127 |
|
128 |
|
129 def _rx_indent(level): |
|
130 # Kconfig *always* interprets a tab as 8 spaces, so this is the default. |
|
131 # Edit this if you are in an environment where KconfigLexer gets expanded |
|
132 # input (tabs expanded to spaces) and the expansion tab width is != 8, |
|
133 # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width). |
|
134 # Value range here is 2 <= {tab_width} <= 8. |
|
135 tab_width = 8 |
|
136 # Regex matching a given indentation {level}, assuming that indentation is |
|
137 # a multiple of {tab_width}. In other cases there might be problems. |
|
138 if tab_width == 2: |
|
139 space_repeat = '+' |
|
140 else: |
|
141 space_repeat = '{1,%d}' % (tab_width - 1) |
|
142 if level == 1: |
|
143 level_repeat = '' |
|
144 else: |
|
145 level_repeat = '{%s}' % level |
|
146 return r'(?:\t| %s\t| {%s})%s.*\n' % (space_repeat, tab_width, level_repeat) |
|
147 |
|
148 |
|
149 class KconfigLexer(RegexLexer): |
|
150 """ |
|
151 For Linux-style Kconfig files. |
|
152 |
|
153 .. versionadded:: 1.6 |
|
154 """ |
|
155 |
|
156 name = 'Kconfig' |
|
157 aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config'] |
|
158 # Adjust this if new kconfig file names appear in your environment |
|
159 filenames = ['Kconfig*', '*Config.in*', 'external.in*', |
|
160 'standard-modules.in'] |
|
161 mimetypes = ['text/x-kconfig'] |
|
162 # No re.MULTILINE, indentation-aware help text needs line-by-line handling |
|
163 flags = 0 |
|
164 |
|
165 def call_indent(level): |
|
166 # If indentation >= {level} is detected, enter state 'indent{level}' |
|
167 return (_rx_indent(level), String.Doc, 'indent%s' % level) |
|
168 |
|
169 def do_indent(level): |
|
170 # Print paragraphs of indentation level >= {level} as String.Doc, |
|
171 # ignoring blank lines. Then return to 'root' state. |
|
172 return [ |
|
173 (_rx_indent(level), String.Doc), |
|
174 (r'\s*\n', Text), |
|
175 default('#pop:2') |
|
176 ] |
|
177 |
|
178 tokens = { |
|
179 'root': [ |
|
180 (r'\s+', Text), |
|
181 (r'#.*?\n', Comment.Single), |
|
182 (words(( |
|
183 'mainmenu', 'config', 'menuconfig', 'choice', 'endchoice', |
|
184 'comment', 'menu', 'endmenu', 'visible if', 'if', 'endif', |
|
185 'source', 'prompt', 'select', 'depends on', 'default', |
|
186 'range', 'option'), suffix=r'\b'), |
|
187 Keyword), |
|
188 (r'(---help---|help)[\t ]*\n', Keyword, 'help'), |
|
189 (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b', |
|
190 Name.Builtin), |
|
191 (r'[!=&|]', Operator), |
|
192 (r'[()]', Punctuation), |
|
193 (r'[0-9]+', Number.Integer), |
|
194 (r"'(''|[^'])*'", String.Single), |
|
195 (r'"(""|[^"])*"', String.Double), |
|
196 (r'\S+', Text), |
|
197 ], |
|
198 # Help text is indented, multi-line and ends when a lower indentation |
|
199 # level is detected. |
|
200 'help': [ |
|
201 # Skip blank lines after help token, if any |
|
202 (r'\s*\n', Text), |
|
203 # Determine the first help line's indentation level heuristically(!). |
|
204 # Attention: this is not perfect, but works for 99% of "normal" |
|
205 # indentation schemes up to a max. indentation level of 7. |
|
206 call_indent(7), |
|
207 call_indent(6), |
|
208 call_indent(5), |
|
209 call_indent(4), |
|
210 call_indent(3), |
|
211 call_indent(2), |
|
212 call_indent(1), |
|
213 default('#pop'), # for incomplete help sections without text |
|
214 ], |
|
215 # Handle text for indentation levels 7 to 1 |
|
216 'indent7': do_indent(7), |
|
217 'indent6': do_indent(6), |
|
218 'indent5': do_indent(5), |
|
219 'indent4': do_indent(4), |
|
220 'indent3': do_indent(3), |
|
221 'indent2': do_indent(2), |
|
222 'indent1': do_indent(1), |
|
223 } |
|
224 |
|
225 |
|
226 class Cfengine3Lexer(RegexLexer): |
|
227 """ |
|
228 Lexer for `CFEngine3 <http://cfengine.org>`_ policy files. |
|
229 |
|
230 .. versionadded:: 1.5 |
|
231 """ |
|
232 |
|
233 name = 'CFEngine3' |
|
234 aliases = ['cfengine3', 'cf3'] |
|
235 filenames = ['*.cf'] |
|
236 mimetypes = [] |
|
237 |
|
238 tokens = { |
|
239 'root': [ |
|
240 (r'#.*?\n', Comment), |
|
241 (r'(body)(\s+)(\S+)(\s+)(control)', |
|
242 bygroups(Keyword, Text, Keyword, Text, Keyword)), |
|
243 (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()', |
|
244 bygroups(Keyword, Text, Keyword, Text, Name.Function, Punctuation), |
|
245 'arglist'), |
|
246 (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)', |
|
247 bygroups(Keyword, Text, Keyword, Text, Name.Function)), |
|
248 (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)', |
|
249 bygroups(Punctuation, Name.Variable, Punctuation, |
|
250 Text, Keyword.Type, Text, Operator, Text)), |
|
251 (r'(\S+)(\s*)(=>)(\s*)', |
|
252 bygroups(Keyword.Reserved, Text, Operator, Text)), |
|
253 (r'"', String, 'string'), |
|
254 (r'(\w+)(\()', bygroups(Name.Function, Punctuation)), |
|
255 (r'([\w.!&|()]+)(::)', bygroups(Name.Class, Punctuation)), |
|
256 (r'(\w+)(:)', bygroups(Keyword.Declaration, Punctuation)), |
|
257 (r'@[{(][^)}]+[})]', Name.Variable), |
|
258 (r'[(){},;]', Punctuation), |
|
259 (r'=>', Operator), |
|
260 (r'->', Operator), |
|
261 (r'\d+\.\d+', Number.Float), |
|
262 (r'\d+', Number.Integer), |
|
263 (r'\w+', Name.Function), |
|
264 (r'\s+', Text), |
|
265 ], |
|
266 'string': [ |
|
267 (r'\$[{(]', String.Interpol, 'interpol'), |
|
268 (r'\\.', String.Escape), |
|
269 (r'"', String, '#pop'), |
|
270 (r'\n', String), |
|
271 (r'.', String), |
|
272 ], |
|
273 'interpol': [ |
|
274 (r'\$[{(]', String.Interpol, '#push'), |
|
275 (r'[})]', String.Interpol, '#pop'), |
|
276 (r'[^${()}]+', String.Interpol), |
|
277 ], |
|
278 'arglist': [ |
|
279 (r'\)', Punctuation, '#pop'), |
|
280 (r',', Punctuation), |
|
281 (r'\w+', Name.Variable), |
|
282 (r'\s+', Text), |
|
283 ], |
|
284 } |
|
285 |
|
286 |
|
287 class ApacheConfLexer(RegexLexer): |
|
288 """ |
|
289 Lexer for configuration files following the Apache config file |
|
290 format. |
|
291 |
|
292 .. versionadded:: 0.6 |
|
293 """ |
|
294 |
|
295 name = 'ApacheConf' |
|
296 aliases = ['apacheconf', 'aconf', 'apache'] |
|
297 filenames = ['.htaccess', 'apache.conf', 'apache2.conf'] |
|
298 mimetypes = ['text/x-apacheconf'] |
|
299 flags = re.MULTILINE | re.IGNORECASE |
|
300 |
|
301 tokens = { |
|
302 'root': [ |
|
303 (r'\s+', Text), |
|
304 (r'#(.*\\\n)+.*$|(#.*?)$', Comment), |
|
305 (r'(<[^\s>/][^\s>]*)(?:(\s+)(.*))?(>)', |
|
306 bygroups(Name.Tag, Text, String, Name.Tag)), |
|
307 (r'(</[^\s>]+)(>)', |
|
308 bygroups(Name.Tag, Name.Tag)), |
|
309 (r'[a-z]\w*', Name.Builtin, 'value'), |
|
310 (r'\.+', Text), |
|
311 ], |
|
312 'value': [ |
|
313 (r'\\\n', Text), |
|
314 (r'$', Text, '#pop'), |
|
315 (r'\\', Text), |
|
316 (r'[^\S\n]+', Text), |
|
317 (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number), |
|
318 (r'\d+', Number), |
|
319 (r'/([*a-z0-9][*\w./-]+)', String.Other), |
|
320 (r'(on|off|none|any|all|double|email|dns|min|minimal|' |
|
321 r'os|productonly|full|emerg|alert|crit|error|warn|' |
|
322 r'notice|info|debug|registry|script|inetd|standalone|' |
|
323 r'user|group)\b', Keyword), |
|
324 (r'"([^"\\]*(?:\\(.|\n)[^"\\]*)*)"', String.Double), |
|
325 (r'[^\s"\\]+', Text) |
|
326 ], |
|
327 } |
|
328 |
|
329 |
|
330 class SquidConfLexer(RegexLexer): |
|
331 """ |
|
332 Lexer for `squid <http://www.squid-cache.org/>`_ configuration files. |
|
333 |
|
334 .. versionadded:: 0.9 |
|
335 """ |
|
336 |
|
337 name = 'SquidConf' |
|
338 aliases = ['squidconf', 'squid.conf', 'squid'] |
|
339 filenames = ['squid.conf'] |
|
340 mimetypes = ['text/x-squidconf'] |
|
341 flags = re.IGNORECASE |
|
342 |
|
343 keywords = ( |
|
344 "access_log", "acl", "always_direct", "announce_host", |
|
345 "announce_period", "announce_port", "announce_to", "anonymize_headers", |
|
346 "append_domain", "as_whois_server", "auth_param_basic", |
|
347 "authenticate_children", "authenticate_program", "authenticate_ttl", |
|
348 "broken_posts", "buffered_logs", "cache_access_log", "cache_announce", |
|
349 "cache_dir", "cache_dns_program", "cache_effective_group", |
|
350 "cache_effective_user", "cache_host", "cache_host_acl", |
|
351 "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high", |
|
352 "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer", |
|
353 "cache_peer_access", "cahce_replacement_policy", "cache_stoplist", |
|
354 "cache_stoplist_pattern", "cache_store_log", "cache_swap", |
|
355 "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db", |
|
356 "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir", |
|
357 "dead_peer_timeout", "debug_options", "delay_access", "delay_class", |
|
358 "delay_initial_bucket_level", "delay_parameters", "delay_pools", |
|
359 "deny_info", "dns_children", "dns_defnames", "dns_nameservers", |
|
360 "dns_testnames", "emulate_httpd_log", "err_html_text", |
|
361 "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port", |
|
362 "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width", |
|
363 "ftp_passive", "ftp_user", "half_closed_clients", "header_access", |
|
364 "header_replace", "hierarchy_stoplist", "high_response_time_warning", |
|
365 "high_page_fault_warning", "hosts_file", "htcp_port", "http_access", |
|
366 "http_anonymizer", "httpd_accel", "httpd_accel_host", |
|
367 "httpd_accel_port", "httpd_accel_uses_host_header", |
|
368 "httpd_accel_with_proxy", "http_port", "http_reply_access", |
|
369 "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout", |
|
370 "ident_lookup", "ident_lookup_access", "ident_timeout", |
|
371 "incoming_http_average", "incoming_icp_average", "inside_firewall", |
|
372 "ipcache_high", "ipcache_low", "ipcache_size", "local_domain", |
|
373 "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries", |
|
374 "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries", |
|
375 "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr", |
|
376 "mcast_miss_encode_key", "mcast_miss_port", "memory_pools", |
|
377 "memory_pools_limit", "memory_replacement_policy", "mime_table", |
|
378 "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops", |
|
379 "minimum_object_size", "minimum_retry_timeout", "miss_access", |
|
380 "negative_dns_ttl", "negative_ttl", "neighbor_timeout", |
|
381 "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period", |
|
382 "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy", |
|
383 "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl", |
|
384 "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp", |
|
385 "quick_abort", "quick_abort_max", "quick_abort_min", |
|
386 "quick_abort_pct", "range_offset_limit", "read_timeout", |
|
387 "redirect_children", "redirect_program", |
|
388 "redirect_rewrites_host_header", "reference_age", |
|
389 "refresh_pattern", "reload_into_ims", "request_body_max_size", |
|
390 "request_size", "request_timeout", "shutdown_lifetime", |
|
391 "single_parent_bypass", "siteselect_timeout", "snmp_access", |
|
392 "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy", |
|
393 "store_avg_object_size", "store_objects_per_bucket", |
|
394 "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs", |
|
395 "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize", |
|
396 "test_reachability", "udp_hit_obj", "udp_hit_obj_size", |
|
397 "udp_incoming_address", "udp_outgoing_address", "unique_hostname", |
|
398 "unlinkd_program", "uri_whitespace", "useragent_log", |
|
399 "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port", |
|
400 ) |
|
401 |
|
402 opts = ( |
|
403 "proxy-only", "weight", "ttl", "no-query", "default", "round-robin", |
|
404 "multicast-responder", "on", "off", "all", "deny", "allow", "via", |
|
405 "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2", |
|
406 "credentialsttl", "none", "disable", "offline_toggle", "diskd", |
|
407 ) |
|
408 |
|
409 actions = ( |
|
410 "shutdown", "info", "parameter", "server_list", "client_list", |
|
411 r'squid.conf', |
|
412 ) |
|
413 |
|
414 actions_stats = ( |
|
415 "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns", |
|
416 "redirector", "io", "reply_headers", "filedescriptors", "netdb", |
|
417 ) |
|
418 |
|
419 actions_log = ("status", "enable", "disable", "clear") |
|
420 |
|
421 acls = ( |
|
422 "url_regex", "urlpath_regex", "referer_regex", "port", "proto", |
|
423 "req_mime_type", "rep_mime_type", "method", "browser", "user", "src", |
|
424 "dst", "time", "dstdomain", "ident", "snmp_community", |
|
425 ) |
|
426 |
|
427 ip_re = ( |
|
428 r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|' |
|
429 r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|' |
|
430 r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|' |
|
431 r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}' |
|
432 r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|' |
|
433 r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|' |
|
434 r'[1-9]?\d)){3}))' |
|
435 ) |
|
436 |
|
437 tokens = { |
|
438 'root': [ |
|
439 (r'\s+', Whitespace), |
|
440 (r'#', Comment, 'comment'), |
|
441 (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword), |
|
442 (words(opts, prefix=r'\b', suffix=r'\b'), Name.Constant), |
|
443 # Actions |
|
444 (words(actions, prefix=r'\b', suffix=r'\b'), String), |
|
445 (words(actions_stats, prefix=r'stats/', suffix=r'\b'), String), |
|
446 (words(actions_log, prefix=r'log/', suffix=r'='), String), |
|
447 (words(acls, prefix=r'\b', suffix=r'\b'), Keyword), |
|
448 (ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float), |
|
449 (r'(?:\b\d+\b(?:-\b\d+|%)?)', Number), |
|
450 (r'\S+', Text), |
|
451 ], |
|
452 'comment': [ |
|
453 (r'\s*TAG:.*', String.Escape, '#pop'), |
|
454 (r'.+', Comment, '#pop'), |
|
455 default('#pop'), |
|
456 ], |
|
457 } |
|
458 |
|
459 |
|
460 class NginxConfLexer(RegexLexer): |
|
461 """ |
|
462 Lexer for `Nginx <http://nginx.net/>`_ configuration files. |
|
463 |
|
464 .. versionadded:: 0.11 |
|
465 """ |
|
466 name = 'Nginx configuration file' |
|
467 aliases = ['nginx'] |
|
468 filenames = ['nginx.conf'] |
|
469 mimetypes = ['text/x-nginx-conf'] |
|
470 |
|
471 tokens = { |
|
472 'root': [ |
|
473 (r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)), |
|
474 (r'[^\s;#]+', Keyword, 'stmt'), |
|
475 include('base'), |
|
476 ], |
|
477 'block': [ |
|
478 (r'\}', Punctuation, '#pop:2'), |
|
479 (r'[^\s;#]+', Keyword.Namespace, 'stmt'), |
|
480 include('base'), |
|
481 ], |
|
482 'stmt': [ |
|
483 (r'\{', Punctuation, 'block'), |
|
484 (r';', Punctuation, '#pop'), |
|
485 include('base'), |
|
486 ], |
|
487 'base': [ |
|
488 (r'#.*\n', Comment.Single), |
|
489 (r'on|off', Name.Constant), |
|
490 (r'\$[^\s;#()]+', Name.Variable), |
|
491 (r'([a-z0-9.-]+)(:)([0-9]+)', |
|
492 bygroups(Name, Punctuation, Number.Integer)), |
|
493 (r'[a-z-]+/[a-z-+]+', String), # mimetype |
|
494 # (r'[a-zA-Z._-]+', Keyword), |
|
495 (r'[0-9]+[km]?\b', Number.Integer), |
|
496 (r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)), |
|
497 (r'[:=~]', Punctuation), |
|
498 (r'[^\s;#{}$]+', String), # catch all |
|
499 (r'/[^\s;#]*', Name), # pathname |
|
500 (r'\s+', Text), |
|
501 (r'[$;]', Text), # leftover characters |
|
502 ], |
|
503 } |
|
504 |
|
505 |
|
506 class LighttpdConfLexer(RegexLexer): |
|
507 """ |
|
508 Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files. |
|
509 |
|
510 .. versionadded:: 0.11 |
|
511 """ |
|
512 name = 'Lighttpd configuration file' |
|
513 aliases = ['lighty', 'lighttpd'] |
|
514 filenames = [] |
|
515 mimetypes = ['text/x-lighttpd-conf'] |
|
516 |
|
517 tokens = { |
|
518 'root': [ |
|
519 (r'#.*\n', Comment.Single), |
|
520 (r'/\S*', Name), # pathname |
|
521 (r'[a-zA-Z._-]+', Keyword), |
|
522 (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number), |
|
523 (r'[0-9]+', Number), |
|
524 (r'=>|=~|\+=|==|=|\+', Operator), |
|
525 (r'\$[A-Z]+', Name.Builtin), |
|
526 (r'[(){}\[\],]', Punctuation), |
|
527 (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double), |
|
528 (r'\s+', Text), |
|
529 ], |
|
530 |
|
531 } |
|
532 |
|
533 |
|
534 class DockerLexer(RegexLexer): |
|
535 """ |
|
536 Lexer for `Docker <http://docker.io>`_ configuration files. |
|
537 |
|
538 .. versionadded:: 2.0 |
|
539 """ |
|
540 name = 'Docker' |
|
541 aliases = ['docker', 'dockerfile'] |
|
542 filenames = ['Dockerfile', '*.docker'] |
|
543 mimetypes = ['text/x-dockerfile-config'] |
|
544 |
|
545 _keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)') |
|
546 _bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)') |
|
547 _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex |
|
548 flags = re.IGNORECASE | re.MULTILINE |
|
549 |
|
550 tokens = { |
|
551 'root': [ |
|
552 (r'#.*', Comment), |
|
553 (r'(FROM)([ \t]*)(\S*)([ \t]*)(?:(AS)([ \t]*)(\S*))?', |
|
554 bygroups(Keyword, Text, String, Text, Keyword, Text, String)), |
|
555 (r'(ONBUILD)(%s)' % (_lb,), bygroups(Keyword, using(BashLexer))), |
|
556 (r'(HEALTHCHECK)((%s--\w+=\w+%s)*)' % (_lb, _lb), |
|
557 bygroups(Keyword, using(BashLexer))), |
|
558 (r'(VOLUME|ENTRYPOINT|CMD|SHELL)(%s)(\[.*?\])' % (_lb,), |
|
559 bygroups(Keyword, using(BashLexer), using(JsonLexer))), |
|
560 (r'(LABEL|ENV|ARG)((%s\w+=\w+%s)*)' % (_lb, _lb), |
|
561 bygroups(Keyword, using(BashLexer))), |
|
562 (r'(%s|VOLUME)\b(.*)' % (_keywords), bygroups(Keyword, String)), |
|
563 (r'(%s)' % (_bash_keywords,), Keyword), |
|
564 (r'(.*\\\n)*.+', using(BashLexer)), |
|
565 ] |
|
566 } |
|
567 |
|
568 |
|
569 class TerraformLexer(RegexLexer): |
|
570 """ |
|
571 Lexer for `terraformi .tf files <https://www.terraform.io/>`_. |
|
572 |
|
573 .. versionadded:: 2.1 |
|
574 """ |
|
575 |
|
576 name = 'Terraform' |
|
577 aliases = ['terraform', 'tf'] |
|
578 filenames = ['*.tf'] |
|
579 mimetypes = ['application/x-tf', 'application/x-terraform'] |
|
580 |
|
581 embedded_keywords = ('ingress', 'egress', 'listener', 'default', |
|
582 'connection', 'alias', 'terraform', 'tags', 'vars', |
|
583 'config', 'lifecycle', 'timeouts') |
|
584 |
|
585 tokens = { |
|
586 'root': [ |
|
587 include('string'), |
|
588 include('punctuation'), |
|
589 include('curly'), |
|
590 include('basic'), |
|
591 include('whitespace'), |
|
592 (r'[0-9]+', Number), |
|
593 ], |
|
594 'basic': [ |
|
595 (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type), |
|
596 (r'\s*/\*', Comment.Multiline, 'comment'), |
|
597 (r'\s*#.*\n', Comment.Single), |
|
598 (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)), |
|
599 (words(('variable', 'resource', 'provider', 'provisioner', 'module', |
|
600 'backend', 'data', 'output'), prefix=r'\b', suffix=r'\b'), |
|
601 Keyword.Reserved, 'function'), |
|
602 (words(embedded_keywords, prefix=r'\b', suffix=r'\b'), |
|
603 Keyword.Declaration), |
|
604 (r'\$\{', String.Interpol, 'var_builtin'), |
|
605 ], |
|
606 'function': [ |
|
607 (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)), |
|
608 include('punctuation'), |
|
609 include('curly'), |
|
610 ], |
|
611 'var_builtin': [ |
|
612 (r'\$\{', String.Interpol, '#push'), |
|
613 (words(('concat', 'file', 'join', 'lookup', 'element'), |
|
614 prefix=r'\b', suffix=r'\b'), Name.Builtin), |
|
615 include('string'), |
|
616 include('punctuation'), |
|
617 (r'\s+', Text), |
|
618 (r'\}', String.Interpol, '#pop'), |
|
619 ], |
|
620 'string': [ |
|
621 (r'(".*")', bygroups(String.Double)), |
|
622 ], |
|
623 'punctuation': [ |
|
624 (r'[\[\](),.]', Punctuation), |
|
625 ], |
|
626 # Keep this seperate from punctuation - we sometimes want to use different |
|
627 # Tokens for { } |
|
628 'curly': [ |
|
629 (r'\{', Text.Punctuation), |
|
630 (r'\}', Text.Punctuation), |
|
631 ], |
|
632 'comment': [ |
|
633 (r'[^*/]', Comment.Multiline), |
|
634 (r'/\*', Comment.Multiline, '#push'), |
|
635 (r'\*/', Comment.Multiline, '#pop'), |
|
636 (r'[*/]', Comment.Multiline) |
|
637 ], |
|
638 'whitespace': [ |
|
639 (r'\n', Text), |
|
640 (r'\s+', Text), |
|
641 (r'\\\n', Text), |
|
642 ], |
|
643 } |
|
644 |
|
645 |
|
646 class TermcapLexer(RegexLexer): |
|
647 """ |
|
648 Lexer for termcap database source. |
|
649 |
|
650 This is very simple and minimal. |
|
651 |
|
652 .. versionadded:: 2.1 |
|
653 """ |
|
654 name = 'Termcap' |
|
655 aliases = ['termcap'] |
|
656 filenames = ['termcap', 'termcap.src'] |
|
657 mimetypes = [] |
|
658 |
|
659 # NOTE: |
|
660 # * multiline with trailing backslash |
|
661 # * separator is ':' |
|
662 # * to embed colon as data, we must use \072 |
|
663 # * space after separator is not allowed (mayve) |
|
664 tokens = { |
|
665 'root': [ |
|
666 (r'^#.*$', Comment), |
|
667 (r'^[^\s#:|]+', Name.Tag, 'names'), |
|
668 ], |
|
669 'names': [ |
|
670 (r'\n', Text, '#pop'), |
|
671 (r':', Punctuation, 'defs'), |
|
672 (r'\|', Punctuation), |
|
673 (r'[^:|]+', Name.Attribute), |
|
674 ], |
|
675 'defs': [ |
|
676 (r'\\\n[ \t]*', Text), |
|
677 (r'\n[ \t]*', Text, '#pop:2'), |
|
678 (r'(#)([0-9]+)', bygroups(Operator, Number)), |
|
679 (r'=', Operator, 'data'), |
|
680 (r':', Punctuation), |
|
681 (r'[^\s:=#]+', Name.Class), |
|
682 ], |
|
683 'data': [ |
|
684 (r'\\072', Literal), |
|
685 (r':', Punctuation, '#pop'), |
|
686 (r'[^:\\]+', Literal), # for performance |
|
687 (r'.', Literal), |
|
688 ], |
|
689 } |
|
690 |
|
691 |
|
692 class TerminfoLexer(RegexLexer): |
|
693 """ |
|
694 Lexer for terminfo database source. |
|
695 |
|
696 This is very simple and minimal. |
|
697 |
|
698 .. versionadded:: 2.1 |
|
699 """ |
|
700 name = 'Terminfo' |
|
701 aliases = ['terminfo'] |
|
702 filenames = ['terminfo', 'terminfo.src'] |
|
703 mimetypes = [] |
|
704 |
|
705 # NOTE: |
|
706 # * multiline with leading whitespace |
|
707 # * separator is ',' |
|
708 # * to embed comma as data, we can use \, |
|
709 # * space after separator is allowed |
|
710 tokens = { |
|
711 'root': [ |
|
712 (r'^#.*$', Comment), |
|
713 (r'^[^\s#,|]+', Name.Tag, 'names'), |
|
714 ], |
|
715 'names': [ |
|
716 (r'\n', Text, '#pop'), |
|
717 (r'(,)([ \t]*)', bygroups(Punctuation, Text), 'defs'), |
|
718 (r'\|', Punctuation), |
|
719 (r'[^,|]+', Name.Attribute), |
|
720 ], |
|
721 'defs': [ |
|
722 (r'\n[ \t]+', Text), |
|
723 (r'\n', Text, '#pop:2'), |
|
724 (r'(#)([0-9]+)', bygroups(Operator, Number)), |
|
725 (r'=', Operator, 'data'), |
|
726 (r'(,)([ \t]*)', bygroups(Punctuation, Text)), |
|
727 (r'[^\s,=#]+', Name.Class), |
|
728 ], |
|
729 'data': [ |
|
730 (r'\\[,\\]', Literal), |
|
731 (r'(,)([ \t]*)', bygroups(Punctuation, Text), '#pop'), |
|
732 (r'[^\\,]+', Literal), # for performance |
|
733 (r'.', Literal), |
|
734 ], |
|
735 } |
|
736 |
|
737 |
|
738 class PkgConfigLexer(RegexLexer): |
|
739 """ |
|
740 Lexer for `pkg-config |
|
741 <http://www.freedesktop.org/wiki/Software/pkg-config/>`_ |
|
742 (see also `manual page <http://linux.die.net/man/1/pkg-config>`_). |
|
743 |
|
744 .. versionadded:: 2.1 |
|
745 """ |
|
746 |
|
747 name = 'PkgConfig' |
|
748 aliases = ['pkgconfig'] |
|
749 filenames = ['*.pc'] |
|
750 mimetypes = [] |
|
751 |
|
752 tokens = { |
|
753 'root': [ |
|
754 (r'#.*$', Comment.Single), |
|
755 |
|
756 # variable definitions |
|
757 (r'^(\w+)(=)', bygroups(Name.Attribute, Operator)), |
|
758 |
|
759 # keyword lines |
|
760 (r'^([\w.]+)(:)', |
|
761 bygroups(Name.Tag, Punctuation), 'spvalue'), |
|
762 |
|
763 # variable references |
|
764 include('interp'), |
|
765 |
|
766 # fallback |
|
767 (r'[^${}#=:\n.]+', Text), |
|
768 (r'.', Text), |
|
769 ], |
|
770 'interp': [ |
|
771 # you can escape literal "$" as "$$" |
|
772 (r'\$\$', Text), |
|
773 |
|
774 # variable references |
|
775 (r'\$\{', String.Interpol, 'curly'), |
|
776 ], |
|
777 'curly': [ |
|
778 (r'\}', String.Interpol, '#pop'), |
|
779 (r'\w+', Name.Attribute), |
|
780 ], |
|
781 'spvalue': [ |
|
782 include('interp'), |
|
783 |
|
784 (r'#.*$', Comment.Single, '#pop'), |
|
785 (r'\n', Text, '#pop'), |
|
786 |
|
787 # fallback |
|
788 (r'[^${}#\n]+', Text), |
|
789 (r'.', Text), |
|
790 ], |
|
791 } |
|
792 |
|
793 |
|
794 class PacmanConfLexer(RegexLexer): |
|
795 """ |
|
796 Lexer for `pacman.conf |
|
797 <https://www.archlinux.org/pacman/pacman.conf.5.html>`_. |
|
798 |
|
799 Actually, IniLexer works almost fine for this format, |
|
800 but it yield error token. It is because pacman.conf has |
|
801 a form without assignment like: |
|
802 |
|
803 UseSyslog |
|
804 Color |
|
805 TotalDownload |
|
806 CheckSpace |
|
807 VerbosePkgLists |
|
808 |
|
809 These are flags to switch on. |
|
810 |
|
811 .. versionadded:: 2.1 |
|
812 """ |
|
813 |
|
814 name = 'PacmanConf' |
|
815 aliases = ['pacmanconf'] |
|
816 filenames = ['pacman.conf'] |
|
817 mimetypes = [] |
|
818 |
|
819 tokens = { |
|
820 'root': [ |
|
821 # comment |
|
822 (r'#.*$', Comment.Single), |
|
823 |
|
824 # section header |
|
825 (r'^\s*\[.*?\]\s*$', Keyword), |
|
826 |
|
827 # variable definitions |
|
828 # (Leading space is allowed...) |
|
829 (r'(\w+)(\s*)(=)', |
|
830 bygroups(Name.Attribute, Text, Operator)), |
|
831 |
|
832 # flags to on |
|
833 (r'^(\s*)(\w+)(\s*)$', |
|
834 bygroups(Text, Name.Attribute, Text)), |
|
835 |
|
836 # built-in special values |
|
837 (words(( |
|
838 '$repo', # repository |
|
839 '$arch', # architecture |
|
840 '%o', # outfile |
|
841 '%u', # url |
|
842 ), suffix=r'\b'), |
|
843 Name.Variable), |
|
844 |
|
845 # fallback |
|
846 (r'.', Text), |
|
847 ], |
|
848 } |
|
849 |
|
850 |
|
851 class AugeasLexer(RegexLexer): |
|
852 """ |
|
853 Lexer for `Augeas <http://augeas.net>`_. |
|
854 |
|
855 .. versionadded:: 2.4 |
|
856 """ |
|
857 name = 'Augeas' |
|
858 aliases = ['augeas'] |
|
859 filenames = ['*.aug'] |
|
860 |
|
861 tokens = { |
|
862 'root': [ |
|
863 (r'(module)(\s*)([^\s=]+)', bygroups(Keyword.Namespace, Text, Name.Namespace)), |
|
864 (r'(let)(\s*)([^\s=]+)', bygroups(Keyword.Declaration, Text, Name.Variable)), |
|
865 (r'(del|store|value|counter|seq|key|label|autoload|incl|excl|transform|test|get|put)(\s+)', bygroups(Name.Builtin, Text)), |
|
866 (r'(\()([^:]+)(\:)(unit|string|regexp|lens|tree|filter)(\))', bygroups(Punctuation, Name.Variable, Punctuation, Keyword.Type, Punctuation)), |
|
867 (r'\(\*', Comment.Multiline, 'comment'), |
|
868 (r'[*+\-.;=?|]', Operator), |
|
869 (r'[()\[\]{}]', Operator), |
|
870 (r'"', String.Double, 'string'), |
|
871 (r'\/', String.Regex, 'regex'), |
|
872 (r'([A-Z]\w*)(\.)(\w+)', bygroups(Name.Namespace, Punctuation, Name.Variable)), |
|
873 (r'.', Name.Variable), |
|
874 (r'\s', Text), |
|
875 ], |
|
876 'string': [ |
|
877 (r'\\.', String.Escape), |
|
878 (r'[^"]', String.Double), |
|
879 (r'"', String.Double, '#pop'), |
|
880 ], |
|
881 'regex': [ |
|
882 (r'\\.', String.Escape), |
|
883 (r'[^/]', String.Regex), |
|
884 (r'\/', String.Regex, '#pop'), |
|
885 ], |
|
886 'comment': [ |
|
887 (r'[^*)]', Comment.Multiline), |
|
888 (r'\(\*', Comment.Multiline, '#push'), |
|
889 (r'\*\)', Comment.Multiline, '#pop'), |
|
890 (r'[)*]', Comment.Multiline) |
|
891 ], |
|
892 } |
|
893 |
|
894 |
|
895 class TOMLLexer(RegexLexer): |
|
896 """ |
|
897 Lexer for `TOML <https://github.com/toml-lang/toml>`_, a simple language |
|
898 for config files. |
|
899 |
|
900 .. versionadded:: 2.4 |
|
901 """ |
|
902 |
|
903 name = 'TOML' |
|
904 aliases = ['toml'] |
|
905 filenames = ['*.toml', 'Pipfile', 'poetry.lock'] |
|
906 |
|
907 tokens = { |
|
908 'root': [ |
|
909 |
|
910 # Basics, comments, strings |
|
911 (r'\s+', Text), |
|
912 (r'#.*?$', Comment.Single), |
|
913 # Basic string |
|
914 (r'"(\\\\|\\[^\\]|[^"\\])*"', String), |
|
915 # Literal string |
|
916 (r'\'\'\'(.*)\'\'\'', String), |
|
917 (r'\'[^\']*\'', String), |
|
918 (r'(true|false)$', Keyword.Constant), |
|
919 (r'[a-zA-Z_][\w\-]*', Name), |
|
920 |
|
921 (r'\[.*?\]$', Keyword), |
|
922 # Datetime |
|
923 # TODO this needs to be expanded, as TOML is rather flexible: |
|
924 # https://github.com/toml-lang/toml#offset-date-time |
|
925 (r'\d{4}-\d{2}-\d{2}(?:T| )\d{2}:\d{2}:\d{2}(?:Z|[-+]\d{2}:\d{2})', Number.Integer), |
|
926 |
|
927 # Numbers |
|
928 (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float), |
|
929 (r'\d+[eE][+-]?[0-9]+j?', Number.Float), |
|
930 # Handle +-inf, +-infinity, +-nan |
|
931 (r'[+-]?(?:(inf(?:inity)?)|nan)', Number.Float), |
|
932 (r'[+-]?\d+', Number.Integer), |
|
933 |
|
934 # Punctuation |
|
935 (r'[]{}:(),;[]', Punctuation), |
|
936 (r'\.', Punctuation), |
|
937 |
|
938 # Operators |
|
939 (r'=', Operator) |
|
940 |
|
941 ] |
|
942 } |
|
943 |
|
944 |
|
945 class SingularityLexer(RegexLexer): |
|
946 """ |
|
947 Lexer for `Singularity definition files |
|
948 <https://www.sylabs.io/guides/3.0/user-guide/definition_files.html>`_. |
|
949 |
|
950 .. versionadded:: 2.6 |
|
951 """ |
|
952 |
|
953 name = 'Singularity' |
|
954 aliases = ['singularity'] |
|
955 filenames = ['*.def', 'Singularity'] |
|
956 flags = re.IGNORECASE | re.MULTILINE | re.DOTALL |
|
957 |
|
958 _headers = r'^(\s*)(bootstrap|from|osversion|mirrorurl|include|registry|namespace|includecmd)(:)' |
|
959 _section = r'^%(?:pre|post|setup|environment|help|labels|test|runscript|files|startscript)\b' |
|
960 _appsect = r'^%app(?:install|help|run|labels|env|test|files)\b' |
|
961 |
|
962 tokens = { |
|
963 'root': [ |
|
964 (_section, Generic.Heading, 'script'), |
|
965 (_appsect, Generic.Heading, 'script'), |
|
966 (_headers, bygroups(Text, Keyword, Text)), |
|
967 (r'\s*#.*?\n', Comment), |
|
968 (r'\b(([0-9]+\.?[0-9]*)|(\.[0-9]+))\b', Number), |
|
969 (r'(?!^\s*%).', Text), |
|
970 ], |
|
971 'script': [ |
|
972 (r'(.+?(?=^\s*%))|(.*)', using(BashLexer), '#pop'), |
|
973 ], |
|
974 } |
|
975 |
|
976 def analyse_text(text): |
|
977 """This is a quite simple script file, but there are a few keywords |
|
978 which seem unique to this language.""" |
|
979 result = 0 |
|
980 if re.search(r'\b(?:osversion|includecmd|mirrorurl)\b', text, re.IGNORECASE): |
|
981 result += 0.5 |
|
982 |
|
983 if re.search(SingularityLexer._section[1:], text): |
|
984 result += 0.49 |
|
985 |
|
986 return result |
|