eric6/ThirdParty/Pygments/pygments/lexers/configs.py

changeset 6942
2602857055c5
parent 6651
e8f3b5568b21
child 7547
21b0534faebc
equal deleted inserted replaced
6941:f99d60d6b59b 6942:2602857055c5
1 # -*- coding: utf-8 -*-
2 """
3 pygments.lexers.configs
4 ~~~~~~~~~~~~~~~~~~~~~~~
5
6 Lexers for configuration file formats.
7
8 :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
9 :license: BSD, see LICENSE for details.
10 """
11
12 import re
13
14 from pygments.lexer import RegexLexer, default, words, bygroups, include, using
15 from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
16 Number, Punctuation, Whitespace, Literal
17 from pygments.lexers.shell import BashLexer
18 from pygments.lexers.data import JsonLexer
19
20 __all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer',
21 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer',
22 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer',
23 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer',
24 'PkgConfigLexer', 'PacmanConfLexer']
25
26
27 class IniLexer(RegexLexer):
28 """
29 Lexer for configuration files in INI style.
30 """
31
32 name = 'INI'
33 aliases = ['ini', 'cfg', 'dosini']
34 filenames = ['*.ini', '*.cfg', '*.inf']
35 mimetypes = ['text/x-ini', 'text/inf']
36
37 tokens = {
38 'root': [
39 (r'\s+', Text),
40 (r'[;#].*', Comment.Single),
41 (r'\[.*?\]$', Keyword),
42 (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
43 bygroups(Name.Attribute, Text, Operator, Text, String)),
44 # standalone option, supported by some INI parsers
45 (r'(.+?)$', Name.Attribute),
46 ],
47 }
48
49 def analyse_text(text):
50 npos = text.find('\n')
51 if npos < 3:
52 return False
53 return text[0] == '[' and text[npos-1] == ']'
54
55
56 class RegeditLexer(RegexLexer):
57 """
58 Lexer for `Windows Registry
59 <http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced
60 by regedit.
61
62 .. versionadded:: 1.6
63 """
64
65 name = 'reg'
66 aliases = ['registry']
67 filenames = ['*.reg']
68 mimetypes = ['text/x-windows-registry']
69
70 tokens = {
71 'root': [
72 (r'Windows Registry Editor.*', Text),
73 (r'\s+', Text),
74 (r'[;#].*', Comment.Single),
75 (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
76 bygroups(Keyword, Operator, Name.Builtin, Keyword)),
77 # String keys, which obey somewhat normal escaping
78 (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
79 bygroups(Name.Attribute, Text, Operator, Text),
80 'value'),
81 # Bare keys (includes @)
82 (r'(.*?)([ \t]*)(=)([ \t]*)',
83 bygroups(Name.Attribute, Text, Operator, Text),
84 'value'),
85 ],
86 'value': [
87 (r'-', Operator, '#pop'), # delete value
88 (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
89 bygroups(Name.Variable, Punctuation, Number), '#pop'),
90 # As far as I know, .reg files do not support line continuation.
91 (r'.+', String, '#pop'),
92 default('#pop'),
93 ]
94 }
95
96 def analyse_text(text):
97 return text.startswith('Windows Registry Editor')
98
99
100 class PropertiesLexer(RegexLexer):
101 """
102 Lexer for configuration files in Java's properties format.
103
104 Note: trailing whitespace counts as part of the value as per spec
105
106 .. versionadded:: 1.4
107 """
108
109 name = 'Properties'
110 aliases = ['properties', 'jproperties']
111 filenames = ['*.properties']
112 mimetypes = ['text/x-java-properties']
113
114 tokens = {
115 'root': [
116 (r'^(\w+)([ \t])(\w+\s*)$', bygroups(Name.Attribute, Text, String)),
117 (r'^\w+(\\[ \t]\w*)*$', Name.Attribute),
118 (r'(^ *)([#!].*)', bygroups(Text, Comment)),
119 # More controversial comments
120 (r'(^ *)((?:;|//).*)', bygroups(Text, Comment)),
121 (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
122 bygroups(Name.Attribute, Text, Operator, Text, String)),
123 (r'\s', Text),
124 ],
125 }
126
127
128 def _rx_indent(level):
129 # Kconfig *always* interprets a tab as 8 spaces, so this is the default.
130 # Edit this if you are in an environment where KconfigLexer gets expanded
131 # input (tabs expanded to spaces) and the expansion tab width is != 8,
132 # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width).
133 # Value range here is 2 <= {tab_width} <= 8.
134 tab_width = 8
135 # Regex matching a given indentation {level}, assuming that indentation is
136 # a multiple of {tab_width}. In other cases there might be problems.
137 if tab_width == 2:
138 space_repeat = '+'
139 else:
140 space_repeat = '{1,%d}' % (tab_width - 1)
141 if level == 1:
142 level_repeat = ''
143 else:
144 level_repeat = '{%s}' % level
145 return r'(?:\t| %s\t| {%s})%s.*\n' % (space_repeat, tab_width, level_repeat)
146
147
148 class KconfigLexer(RegexLexer):
149 """
150 For Linux-style Kconfig files.
151
152 .. versionadded:: 1.6
153 """
154
155 name = 'Kconfig'
156 aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
157 # Adjust this if new kconfig file names appear in your environment
158 filenames = ['Kconfig', '*Config.in*', 'external.in*',
159 'standard-modules.in']
160 mimetypes = ['text/x-kconfig']
161 # No re.MULTILINE, indentation-aware help text needs line-by-line handling
162 flags = 0
163
164 def call_indent(level):
165 # If indentation >= {level} is detected, enter state 'indent{level}'
166 return (_rx_indent(level), String.Doc, 'indent%s' % level)
167
168 def do_indent(level):
169 # Print paragraphs of indentation level >= {level} as String.Doc,
170 # ignoring blank lines. Then return to 'root' state.
171 return [
172 (_rx_indent(level), String.Doc),
173 (r'\s*\n', Text),
174 default('#pop:2')
175 ]
176
177 tokens = {
178 'root': [
179 (r'\s+', Text),
180 (r'#.*?\n', Comment.Single),
181 (words((
182 'mainmenu', 'config', 'menuconfig', 'choice', 'endchoice',
183 'comment', 'menu', 'endmenu', 'visible if', 'if', 'endif',
184 'source', 'prompt', 'select', 'depends on', 'default',
185 'range', 'option'), suffix=r'\b'),
186 Keyword),
187 (r'(---help---|help)[\t ]*\n', Keyword, 'help'),
188 (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b',
189 Name.Builtin),
190 (r'[!=&|]', Operator),
191 (r'[()]', Punctuation),
192 (r'[0-9]+', Number.Integer),
193 (r"'(''|[^'])*'", String.Single),
194 (r'"(""|[^"])*"', String.Double),
195 (r'\S+', Text),
196 ],
197 # Help text is indented, multi-line and ends when a lower indentation
198 # level is detected.
199 'help': [
200 # Skip blank lines after help token, if any
201 (r'\s*\n', Text),
202 # Determine the first help line's indentation level heuristically(!).
203 # Attention: this is not perfect, but works for 99% of "normal"
204 # indentation schemes up to a max. indentation level of 7.
205 call_indent(7),
206 call_indent(6),
207 call_indent(5),
208 call_indent(4),
209 call_indent(3),
210 call_indent(2),
211 call_indent(1),
212 default('#pop'), # for incomplete help sections without text
213 ],
214 # Handle text for indentation levels 7 to 1
215 'indent7': do_indent(7),
216 'indent6': do_indent(6),
217 'indent5': do_indent(5),
218 'indent4': do_indent(4),
219 'indent3': do_indent(3),
220 'indent2': do_indent(2),
221 'indent1': do_indent(1),
222 }
223
224
225 class Cfengine3Lexer(RegexLexer):
226 """
227 Lexer for `CFEngine3 <http://cfengine.org>`_ policy files.
228
229 .. versionadded:: 1.5
230 """
231
232 name = 'CFEngine3'
233 aliases = ['cfengine3', 'cf3']
234 filenames = ['*.cf']
235 mimetypes = []
236
237 tokens = {
238 'root': [
239 (r'#.*?\n', Comment),
240 (r'(body)(\s+)(\S+)(\s+)(control)',
241 bygroups(Keyword, Text, Keyword, Text, Keyword)),
242 (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()',
243 bygroups(Keyword, Text, Keyword, Text, Name.Function, Punctuation),
244 'arglist'),
245 (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)',
246 bygroups(Keyword, Text, Keyword, Text, Name.Function)),
247 (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)',
248 bygroups(Punctuation, Name.Variable, Punctuation,
249 Text, Keyword.Type, Text, Operator, Text)),
250 (r'(\S+)(\s*)(=>)(\s*)',
251 bygroups(Keyword.Reserved, Text, Operator, Text)),
252 (r'"', String, 'string'),
253 (r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
254 (r'([\w.!&|()]+)(::)', bygroups(Name.Class, Punctuation)),
255 (r'(\w+)(:)', bygroups(Keyword.Declaration, Punctuation)),
256 (r'@[{(][^)}]+[})]', Name.Variable),
257 (r'[(){},;]', Punctuation),
258 (r'=>', Operator),
259 (r'->', Operator),
260 (r'\d+\.\d+', Number.Float),
261 (r'\d+', Number.Integer),
262 (r'\w+', Name.Function),
263 (r'\s+', Text),
264 ],
265 'string': [
266 (r'\$[{(]', String.Interpol, 'interpol'),
267 (r'\\.', String.Escape),
268 (r'"', String, '#pop'),
269 (r'\n', String),
270 (r'.', String),
271 ],
272 'interpol': [
273 (r'\$[{(]', String.Interpol, '#push'),
274 (r'[})]', String.Interpol, '#pop'),
275 (r'[^${()}]+', String.Interpol),
276 ],
277 'arglist': [
278 (r'\)', Punctuation, '#pop'),
279 (r',', Punctuation),
280 (r'\w+', Name.Variable),
281 (r'\s+', Text),
282 ],
283 }
284
285
286 class ApacheConfLexer(RegexLexer):
287 """
288 Lexer for configuration files following the Apache config file
289 format.
290
291 .. versionadded:: 0.6
292 """
293
294 name = 'ApacheConf'
295 aliases = ['apacheconf', 'aconf', 'apache']
296 filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
297 mimetypes = ['text/x-apacheconf']
298 flags = re.MULTILINE | re.IGNORECASE
299
300 tokens = {
301 'root': [
302 (r'\s+', Text),
303 (r'(#.*?)$', Comment),
304 (r'(<[^\s>]+)(?:(\s+)(.*?))?(>)',
305 bygroups(Name.Tag, Text, String, Name.Tag)),
306 (r'([a-z]\w*)(\s+)',
307 bygroups(Name.Builtin, Text), 'value'),
308 (r'\.+', Text),
309 ],
310 'value': [
311 (r'\\\n', Text),
312 (r'$', Text, '#pop'),
313 (r'\\', Text),
314 (r'[^\S\n]+', Text),
315 (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
316 (r'\d+', Number),
317 (r'/([a-z0-9][\w./-]+)', String.Other),
318 (r'(on|off|none|any|all|double|email|dns|min|minimal|'
319 r'os|productonly|full|emerg|alert|crit|error|warn|'
320 r'notice|info|debug|registry|script|inetd|standalone|'
321 r'user|group)\b', Keyword),
322 (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
323 (r'[^\s"\\]+', Text)
324 ],
325 }
326
327
328 class SquidConfLexer(RegexLexer):
329 """
330 Lexer for `squid <http://www.squid-cache.org/>`_ configuration files.
331
332 .. versionadded:: 0.9
333 """
334
335 name = 'SquidConf'
336 aliases = ['squidconf', 'squid.conf', 'squid']
337 filenames = ['squid.conf']
338 mimetypes = ['text/x-squidconf']
339 flags = re.IGNORECASE
340
341 keywords = (
342 "access_log", "acl", "always_direct", "announce_host",
343 "announce_period", "announce_port", "announce_to", "anonymize_headers",
344 "append_domain", "as_whois_server", "auth_param_basic",
345 "authenticate_children", "authenticate_program", "authenticate_ttl",
346 "broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
347 "cache_dir", "cache_dns_program", "cache_effective_group",
348 "cache_effective_user", "cache_host", "cache_host_acl",
349 "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
350 "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
351 "cache_peer_access", "cahce_replacement_policy", "cache_stoplist",
352 "cache_stoplist_pattern", "cache_store_log", "cache_swap",
353 "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
354 "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
355 "dead_peer_timeout", "debug_options", "delay_access", "delay_class",
356 "delay_initial_bucket_level", "delay_parameters", "delay_pools",
357 "deny_info", "dns_children", "dns_defnames", "dns_nameservers",
358 "dns_testnames", "emulate_httpd_log", "err_html_text",
359 "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
360 "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
361 "ftp_passive", "ftp_user", "half_closed_clients", "header_access",
362 "header_replace", "hierarchy_stoplist", "high_response_time_warning",
363 "high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
364 "http_anonymizer", "httpd_accel", "httpd_accel_host",
365 "httpd_accel_port", "httpd_accel_uses_host_header",
366 "httpd_accel_with_proxy", "http_port", "http_reply_access",
367 "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
368 "ident_lookup", "ident_lookup_access", "ident_timeout",
369 "incoming_http_average", "incoming_icp_average", "inside_firewall",
370 "ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
371 "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
372 "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
373 "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
374 "mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
375 "memory_pools_limit", "memory_replacement_policy", "mime_table",
376 "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
377 "minimum_object_size", "minimum_retry_timeout", "miss_access",
378 "negative_dns_ttl", "negative_ttl", "neighbor_timeout",
379 "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
380 "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
381 "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
382 "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
383 "quick_abort", "quick_abort_max", "quick_abort_min",
384 "quick_abort_pct", "range_offset_limit", "read_timeout",
385 "redirect_children", "redirect_program",
386 "redirect_rewrites_host_header", "reference_age",
387 "refresh_pattern", "reload_into_ims", "request_body_max_size",
388 "request_size", "request_timeout", "shutdown_lifetime",
389 "single_parent_bypass", "siteselect_timeout", "snmp_access",
390 "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
391 "store_avg_object_size", "store_objects_per_bucket",
392 "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
393 "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
394 "test_reachability", "udp_hit_obj", "udp_hit_obj_size",
395 "udp_incoming_address", "udp_outgoing_address", "unique_hostname",
396 "unlinkd_program", "uri_whitespace", "useragent_log",
397 "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
398 )
399
400 opts = (
401 "proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
402 "multicast-responder", "on", "off", "all", "deny", "allow", "via",
403 "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
404 "credentialsttl", "none", "disable", "offline_toggle", "diskd",
405 )
406
407 actions = (
408 "shutdown", "info", "parameter", "server_list", "client_list",
409 r'squid.conf',
410 )
411
412 actions_stats = (
413 "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
414 "redirector", "io", "reply_headers", "filedescriptors", "netdb",
415 )
416
417 actions_log = ("status", "enable", "disable", "clear")
418
419 acls = (
420 "url_regex", "urlpath_regex", "referer_regex", "port", "proto",
421 "req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
422 "dst", "time", "dstdomain", "ident", "snmp_community",
423 )
424
425 ip_re = (
426 r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
427 r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
428 r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
429 r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
430 r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
431 r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
432 r'[1-9]?\d)){3}))'
433 )
434
435 tokens = {
436 'root': [
437 (r'\s+', Whitespace),
438 (r'#', Comment, 'comment'),
439 (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
440 (words(opts, prefix=r'\b', suffix=r'\b'), Name.Constant),
441 # Actions
442 (words(actions, prefix=r'\b', suffix=r'\b'), String),
443 (words(actions_stats, prefix=r'stats/', suffix=r'\b'), String),
444 (words(actions_log, prefix=r'log/', suffix=r'='), String),
445 (words(acls, prefix=r'\b', suffix=r'\b'), Keyword),
446 (ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float),
447 (r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
448 (r'\S+', Text),
449 ],
450 'comment': [
451 (r'\s*TAG:.*', String.Escape, '#pop'),
452 (r'.+', Comment, '#pop'),
453 default('#pop'),
454 ],
455 }
456
457
458 class NginxConfLexer(RegexLexer):
459 """
460 Lexer for `Nginx <http://nginx.net/>`_ configuration files.
461
462 .. versionadded:: 0.11
463 """
464 name = 'Nginx configuration file'
465 aliases = ['nginx']
466 filenames = ['nginx.conf']
467 mimetypes = ['text/x-nginx-conf']
468
469 tokens = {
470 'root': [
471 (r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)),
472 (r'[^\s;#]+', Keyword, 'stmt'),
473 include('base'),
474 ],
475 'block': [
476 (r'\}', Punctuation, '#pop:2'),
477 (r'[^\s;#]+', Keyword.Namespace, 'stmt'),
478 include('base'),
479 ],
480 'stmt': [
481 (r'\{', Punctuation, 'block'),
482 (r';', Punctuation, '#pop'),
483 include('base'),
484 ],
485 'base': [
486 (r'#.*\n', Comment.Single),
487 (r'on|off', Name.Constant),
488 (r'\$[^\s;#()]+', Name.Variable),
489 (r'([a-z0-9.-]+)(:)([0-9]+)',
490 bygroups(Name, Punctuation, Number.Integer)),
491 (r'[a-z-]+/[a-z-+]+', String), # mimetype
492 # (r'[a-zA-Z._-]+', Keyword),
493 (r'[0-9]+[km]?\b', Number.Integer),
494 (r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)),
495 (r'[:=~]', Punctuation),
496 (r'[^\s;#{}$]+', String), # catch all
497 (r'/[^\s;#]*', Name), # pathname
498 (r'\s+', Text),
499 (r'[$;]', Text), # leftover characters
500 ],
501 }
502
503
504 class LighttpdConfLexer(RegexLexer):
505 """
506 Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files.
507
508 .. versionadded:: 0.11
509 """
510 name = 'Lighttpd configuration file'
511 aliases = ['lighty', 'lighttpd']
512 filenames = []
513 mimetypes = ['text/x-lighttpd-conf']
514
515 tokens = {
516 'root': [
517 (r'#.*\n', Comment.Single),
518 (r'/\S*', Name), # pathname
519 (r'[a-zA-Z._-]+', Keyword),
520 (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
521 (r'[0-9]+', Number),
522 (r'=>|=~|\+=|==|=|\+', Operator),
523 (r'\$[A-Z]+', Name.Builtin),
524 (r'[(){}\[\],]', Punctuation),
525 (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
526 (r'\s+', Text),
527 ],
528
529 }
530
531
532 class DockerLexer(RegexLexer):
533 """
534 Lexer for `Docker <http://docker.io>`_ configuration files.
535
536 .. versionadded:: 2.0
537 """
538 name = 'Docker'
539 aliases = ['docker', 'dockerfile']
540 filenames = ['Dockerfile', '*.docker']
541 mimetypes = ['text/x-dockerfile-config']
542
543 _keywords = (r'(?:FROM|MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
544 _bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)')
545 _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex
546 flags = re.IGNORECASE | re.MULTILINE
547
548 tokens = {
549 'root': [
550 (r'#.*', Comment),
551 (r'(ONBUILD)(%s)' % (_lb,), bygroups(Keyword, using(BashLexer))),
552 (r'(HEALTHCHECK)((%s--\w+=\w+%s)*)' % (_lb, _lb),
553 bygroups(Keyword, using(BashLexer))),
554 (r'(VOLUME|ENTRYPOINT|CMD|SHELL)(%s)(\[.*?\])' % (_lb,),
555 bygroups(Keyword, using(BashLexer), using(JsonLexer))),
556 (r'(LABEL|ENV|ARG)((%s\w+=\w+%s)*)' % (_lb, _lb),
557 bygroups(Keyword, using(BashLexer))),
558 (r'(%s|VOLUME)\b(.*)' % (_keywords), bygroups(Keyword, String)),
559 (r'(%s)' % (_bash_keywords,), Keyword),
560 (r'(.*\\\n)*.+', using(BashLexer)),
561 ]
562 }
563
564
565 class TerraformLexer(RegexLexer):
566 """
567 Lexer for `terraformi .tf files <https://www.terraform.io/>`_.
568
569 .. versionadded:: 2.1
570 """
571
572 name = 'Terraform'
573 aliases = ['terraform', 'tf']
574 filenames = ['*.tf']
575 mimetypes = ['application/x-tf', 'application/x-terraform']
576
577 tokens = {
578 'root': [
579 include('string'),
580 include('punctuation'),
581 include('curly'),
582 include('basic'),
583 include('whitespace'),
584 (r'[0-9]+', Number),
585 ],
586 'basic': [
587 (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type),
588 (r'\s*/\*', Comment.Multiline, 'comment'),
589 (r'\s*#.*\n', Comment.Single),
590 (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)),
591 (words(('variable', 'resource', 'provider', 'provisioner', 'module'),
592 prefix=r'\b', suffix=r'\b'), Keyword.Reserved, 'function'),
593 (words(('ingress', 'egress', 'listener', 'default', 'connection', 'alias'),
594 prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
595 (r'\$\{', String.Interpol, 'var_builtin'),
596 ],
597 'function': [
598 (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)),
599 include('punctuation'),
600 include('curly'),
601 ],
602 'var_builtin': [
603 (r'\$\{', String.Interpol, '#push'),
604 (words(('concat', 'file', 'join', 'lookup', 'element'),
605 prefix=r'\b', suffix=r'\b'), Name.Builtin),
606 include('string'),
607 include('punctuation'),
608 (r'\s+', Text),
609 (r'\}', String.Interpol, '#pop'),
610 ],
611 'string': [
612 (r'(".*")', bygroups(String.Double)),
613 ],
614 'punctuation': [
615 (r'[\[\](),.]', Punctuation),
616 ],
617 # Keep this seperate from punctuation - we sometimes want to use different
618 # Tokens for { }
619 'curly': [
620 (r'\{', Text.Punctuation),
621 (r'\}', Text.Punctuation),
622 ],
623 'comment': [
624 (r'[^*/]', Comment.Multiline),
625 (r'/\*', Comment.Multiline, '#push'),
626 (r'\*/', Comment.Multiline, '#pop'),
627 (r'[*/]', Comment.Multiline)
628 ],
629 'whitespace': [
630 (r'\n', Text),
631 (r'\s+', Text),
632 (r'\\\n', Text),
633 ],
634 }
635
636
637 class TermcapLexer(RegexLexer):
638 """
639 Lexer for termcap database source.
640
641 This is very simple and minimal.
642
643 .. versionadded:: 2.1
644 """
645 name = 'Termcap'
646 aliases = ['termcap']
647 filenames = ['termcap', 'termcap.src']
648 mimetypes = []
649
650 # NOTE:
651 # * multiline with trailing backslash
652 # * separator is ':'
653 # * to embed colon as data, we must use \072
654 # * space after separator is not allowed (mayve)
655 tokens = {
656 'root': [
657 (r'^#.*$', Comment),
658 (r'^[^\s#:|]+', Name.Tag, 'names'),
659 ],
660 'names': [
661 (r'\n', Text, '#pop'),
662 (r':', Punctuation, 'defs'),
663 (r'\|', Punctuation),
664 (r'[^:|]+', Name.Attribute),
665 ],
666 'defs': [
667 (r'\\\n[ \t]*', Text),
668 (r'\n[ \t]*', Text, '#pop:2'),
669 (r'(#)([0-9]+)', bygroups(Operator, Number)),
670 (r'=', Operator, 'data'),
671 (r':', Punctuation),
672 (r'[^\s:=#]+', Name.Class),
673 ],
674 'data': [
675 (r'\\072', Literal),
676 (r':', Punctuation, '#pop'),
677 (r'[^:\\]+', Literal), # for performance
678 (r'.', Literal),
679 ],
680 }
681
682
683 class TerminfoLexer(RegexLexer):
684 """
685 Lexer for terminfo database source.
686
687 This is very simple and minimal.
688
689 .. versionadded:: 2.1
690 """
691 name = 'Terminfo'
692 aliases = ['terminfo']
693 filenames = ['terminfo', 'terminfo.src']
694 mimetypes = []
695
696 # NOTE:
697 # * multiline with leading whitespace
698 # * separator is ','
699 # * to embed comma as data, we can use \,
700 # * space after separator is allowed
701 tokens = {
702 'root': [
703 (r'^#.*$', Comment),
704 (r'^[^\s#,|]+', Name.Tag, 'names'),
705 ],
706 'names': [
707 (r'\n', Text, '#pop'),
708 (r'(,)([ \t]*)', bygroups(Punctuation, Text), 'defs'),
709 (r'\|', Punctuation),
710 (r'[^,|]+', Name.Attribute),
711 ],
712 'defs': [
713 (r'\n[ \t]+', Text),
714 (r'\n', Text, '#pop:2'),
715 (r'(#)([0-9]+)', bygroups(Operator, Number)),
716 (r'=', Operator, 'data'),
717 (r'(,)([ \t]*)', bygroups(Punctuation, Text)),
718 (r'[^\s,=#]+', Name.Class),
719 ],
720 'data': [
721 (r'\\[,\\]', Literal),
722 (r'(,)([ \t]*)', bygroups(Punctuation, Text), '#pop'),
723 (r'[^\\,]+', Literal), # for performance
724 (r'.', Literal),
725 ],
726 }
727
728
729 class PkgConfigLexer(RegexLexer):
730 """
731 Lexer for `pkg-config
732 <http://www.freedesktop.org/wiki/Software/pkg-config/>`_
733 (see also `manual page <http://linux.die.net/man/1/pkg-config>`_).
734
735 .. versionadded:: 2.1
736 """
737
738 name = 'PkgConfig'
739 aliases = ['pkgconfig']
740 filenames = ['*.pc']
741 mimetypes = []
742
743 tokens = {
744 'root': [
745 (r'#.*$', Comment.Single),
746
747 # variable definitions
748 (r'^(\w+)(=)', bygroups(Name.Attribute, Operator)),
749
750 # keyword lines
751 (r'^([\w.]+)(:)',
752 bygroups(Name.Tag, Punctuation), 'spvalue'),
753
754 # variable references
755 include('interp'),
756
757 # fallback
758 (r'[^${}#=:\n.]+', Text),
759 (r'.', Text),
760 ],
761 'interp': [
762 # you can escape literal "$" as "$$"
763 (r'\$\$', Text),
764
765 # variable references
766 (r'\$\{', String.Interpol, 'curly'),
767 ],
768 'curly': [
769 (r'\}', String.Interpol, '#pop'),
770 (r'\w+', Name.Attribute),
771 ],
772 'spvalue': [
773 include('interp'),
774
775 (r'#.*$', Comment.Single, '#pop'),
776 (r'\n', Text, '#pop'),
777
778 # fallback
779 (r'[^${}#\n]+', Text),
780 (r'.', Text),
781 ],
782 }
783
784
785 class PacmanConfLexer(RegexLexer):
786 """
787 Lexer for `pacman.conf
788 <https://www.archlinux.org/pacman/pacman.conf.5.html>`_.
789
790 Actually, IniLexer works almost fine for this format,
791 but it yield error token. It is because pacman.conf has
792 a form without assignment like:
793
794 UseSyslog
795 Color
796 TotalDownload
797 CheckSpace
798 VerbosePkgLists
799
800 These are flags to switch on.
801
802 .. versionadded:: 2.1
803 """
804
805 name = 'PacmanConf'
806 aliases = ['pacmanconf']
807 filenames = ['pacman.conf']
808 mimetypes = []
809
810 tokens = {
811 'root': [
812 # comment
813 (r'#.*$', Comment.Single),
814
815 # section header
816 (r'^\s*\[.*?\]\s*$', Keyword),
817
818 # variable definitions
819 # (Leading space is allowed...)
820 (r'(\w+)(\s*)(=)',
821 bygroups(Name.Attribute, Text, Operator)),
822
823 # flags to on
824 (r'^(\s*)(\w+)(\s*)$',
825 bygroups(Text, Name.Attribute, Text)),
826
827 # built-in special values
828 (words((
829 '$repo', # repository
830 '$arch', # architecture
831 '%o', # outfile
832 '%u', # url
833 ), suffix=r'\b'),
834 Name.Variable),
835
836 # fallback
837 (r'.', Text),
838 ],
839 }

eric ide

mercurial