ThirdParty/Pygments/pygments/lexers/other.py

changeset 684
2f29a0b6e1c7
parent 12
1d8dd9706f46
child 808
8f85926125ef
equal deleted inserted replaced
682:91114a975eda 684:2f29a0b6e1c7
3 pygments.lexers.other 3 pygments.lexers.other
4 ~~~~~~~~~~~~~~~~~~~~~ 4 ~~~~~~~~~~~~~~~~~~~~~
5 5
6 Lexers for other languages. 6 Lexers for other languages.
7 7
8 :copyright: Copyright 2006-2009 by the Pygments team, see AUTHORS. 8 :copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
9 :license: BSD, see LICENSE for details. 9 :license: BSD, see LICENSE for details.
10 """ 10 """
11 11
12 import re 12 import re
13 13
22 __all__ = ['SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer', 'BrainfuckLexer', 22 __all__ = ['SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer', 'BrainfuckLexer',
23 'BashLexer', 'BatchLexer', 'BefungeLexer', 'RedcodeLexer', 23 'BashLexer', 'BatchLexer', 'BefungeLexer', 'RedcodeLexer',
24 'MOOCodeLexer', 'SmalltalkLexer', 'TcshLexer', 'LogtalkLexer', 24 'MOOCodeLexer', 'SmalltalkLexer', 'TcshLexer', 'LogtalkLexer',
25 'GnuplotLexer', 'PovrayLexer', 'AppleScriptLexer', 25 'GnuplotLexer', 'PovrayLexer', 'AppleScriptLexer',
26 'BashSessionLexer', 'ModelicaLexer', 'RebolLexer', 'ABAPLexer', 26 'BashSessionLexer', 'ModelicaLexer', 'RebolLexer', 'ABAPLexer',
27 'NewspeakLexer'] 27 'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer']
28 28
29 line_re = re.compile('.*?\n') 29 line_re = re.compile('.*?\n')
30 30
31 31
32 class SqlLexer(RegexLexer): 32 class SqlLexer(RegexLexer):
319 319
320 320
321 321
322 class BashLexer(RegexLexer): 322 class BashLexer(RegexLexer):
323 """ 323 """
324 Lexer for (ba)sh shell scripts. 324 Lexer for (ba|k|)sh shell scripts.
325 325
326 *New in Pygments 0.6.* 326 *New in Pygments 0.6.*
327 """ 327 """
328 328
329 name = 'Bash' 329 name = 'Bash'
330 aliases = ['bash', 'sh'] 330 aliases = ['bash', 'sh', 'ksh']
331 filenames = ['*.sh', '*.ebuild', '*.eclass'] 331 filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass']
332 mimetypes = ['application/x-sh', 'application/x-shellscript'] 332 mimetypes = ['application/x-sh', 'application/x-shellscript']
333 333
334 tokens = { 334 tokens = {
335 'root': [ 335 'root': [
336 include('basic'), 336 include('basic'),
357 (r'[\[\]{}()=]', Operator), 357 (r'[\[\]{}()=]', Operator),
358 (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String), 358 (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
359 (r'&&|\|\|', Operator), 359 (r'&&|\|\|', Operator),
360 ], 360 ],
361 'data': [ 361 'data': [
362 (r'\$?"(\\\\|\\[0-7]+|\\.|[^"])*"', String.Double), 362 (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
363 (r"\$?'(\\\\|\\[0-7]+|\\.|[^'])*'", String.Single), 363 (r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
364 (r';', Text), 364 (r';', Text),
365 (r'\s+', Text), 365 (r'\s+', Text),
366 (r'[^=\s\n\[\]{}()$"\'`\\<]+', Text), 366 (r'[^=\s\n\[\]{}()$"\'`\\<]+', Text),
367 (r'\d+(?= |\Z)', Number), 367 (r'\d+(?= |\Z)', Number),
368 (r'\$#?(\w+|.)', Name.Variable), 368 (r'\$#?(\w+|.)', Name.Variable),
631 (r'\)', Text, 'afterobject'), 631 (r'\)', Text, 'afterobject'),
632 (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'), 632 (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
633 ], 633 ],
634 '_parenth_helper' : [ 634 '_parenth_helper' : [
635 include('whitespaces'), 635 include('whitespaces'),
636 (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
636 (r'[-+*/\\~<>=|&#!?,@%\w+:]+', String.Symbol), 637 (r'[-+*/\\~<>=|&#!?,@%\w+:]+', String.Symbol),
637 # literals 638 # literals
638 (r'\'[^\']*\'', String), 639 (r'\'[^\']*\'', String),
639 (r'\$.', String.Char), 640 (r'\$.', String.Char),
640 (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
641 (r'#*\(', String.Symbol, 'inner_parenth'), 641 (r'#*\(', String.Symbol, 'inner_parenth'),
642 ], 642 ],
643 'parenth' : [ 643 'parenth' : [
644 # This state is a bit tricky since 644 # This state is a bit tricky since
645 # we can't just pop this state 645 # we can't just pop this state
743 (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)), 743 (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
744 (r'[\[\]{}()=]+', Operator), 744 (r'[\[\]{}()=]+', Operator),
745 (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String), 745 (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
746 ], 746 ],
747 'data': [ 747 'data': [
748 (r'"(\\\\|\\[0-7]+|\\.|[^"])*"', String.Double), 748 (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
749 (r"'(\\\\|\\[0-7]+|\\.|[^'])*'", String.Single), 749 (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
750 (r'\s+', Text), 750 (r'\s+', Text),
751 (r'[^=\s\n\[\]{}()$"\'`\\]+', Text), 751 (r'[^=\s\n\[\]{}()$"\'`\\]+', Text),
752 (r'\d+(?= |\Z)', Number), 752 (r'\d+(?= |\Z)', Number),
753 (r'\$#?(\w+|.)', Name.Variable), 753 (r'\$#?(\w+|.)', Name.Variable),
754 ], 754 ],
806 # Execution-context methods 806 # Execution-context methods
807 (r'(parameter|this|se(lf|nder))(?=[(])', Keyword), 807 (r'(parameter|this|se(lf|nder))(?=[(])', Keyword),
808 # Reflection 808 # Reflection
809 (r'(current_predicate|predicate_property)(?=[(])', Keyword), 809 (r'(current_predicate|predicate_property)(?=[(])', Keyword),
810 # DCGs and term expansion 810 # DCGs and term expansion
811 (r'(expand_term|(goal|term)_expansion|phrase)(?=[(])', Keyword), 811 (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])',
812 Keyword),
812 # Entity 813 # Entity
813 (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', 814 (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])',
814 Keyword), 815 Keyword),
815 (r'(object|protocol|category)_property(?=[(])', Keyword), 816 (r'(object|protocol|category)_property(?=[(])', Keyword),
816 # Entity relations 817 # Entity relations
919 (r"[^\\'\n]+", String), 920 (r"[^\\'\n]+", String),
920 (r'\\', String), 921 (r'\\', String),
921 ], 922 ],
922 923
923 'directive': [ 924 'directive': [
925 # Conditional compilation directives
926 (r'(el)?if(?=[(])', Keyword, 'root'),
927 (r'(e(lse|ndif))[.]', Keyword, 'root'),
924 # Entity directives 928 # Entity directives
925 (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'), 929 (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
926 (r'(end_(category|object|protocol))[.]',Keyword, 'root'), 930 (r'(end_(category|object|protocol))[.]',Keyword, 'root'),
927 # Predicate scope directives 931 # Predicate scope directives
928 (r'(public|protected|private)(?=[(])', Keyword, 'root'), 932 (r'(public|protected|private)(?=[(])', Keyword, 'root'),
929 # Other directives 933 # Other directives
930 (r'e(ncoding|xport)(?=[(])', Keyword, 'root'), 934 (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
931 (r'in(fo|itialization)(?=[(])', Keyword, 'root'), 935 (r'in(fo|itialization)(?=[(])', Keyword, 'root'),
932 (r'(dynamic|synchronized|threaded)[.]', Keyword, 'root'), 936 (r'(dynamic|synchronized|threaded)[.]', Keyword, 'root'),
933 (r'(alias|d(ynamic|iscontiguous)|m(eta_predicate|ode|ultifile)' 937 (r'(alias|d(ynamic|iscontiguous)|m(eta_predicate|ode|ultifile)|'
934 r'|synchronized)(?=[(])', Keyword, 'root'), 938 r's(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
935 (r'op(?=[(])', Keyword, 'root'), 939 (r'op(?=[(])', Keyword, 'root'),
936 (r'(calls|use(s|_module))(?=[(])', Keyword, 'root'), 940 (r'(calls|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
937 (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'), 941 (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
938 (r'[a-z][a-zA-Z0-9_]*[.]', Text, 'root'), 942 (r'[a-z][a-zA-Z0-9_]*[.]', Text, 'root'),
939 ], 943 ],
940 944
941 'entityrelations': [ 945 'entityrelations': [
966 # Whitespace 970 # Whitespace
967 (r'\n', Text), 971 (r'\n', Text),
968 (r'\s+', Text), 972 (r'\s+', Text),
969 ] 973 ]
970 } 974 }
975
976 def analyse_text(text):
977 if ':- object(' in text:
978 return True
979 if ':- protocol(' in text:
980 return True
981 if ':- category(' in text:
982 return True
983 return False
971 984
972 985
973 def _shortened(word): 986 def _shortened(word):
974 dpos = word.find('$') 987 dpos = word.find('$')
975 return '|'.join([word[:dpos] + word[dpos+1:i] + r'\b' 988 return '|'.join([word[:dpos] + word[dpos+1:i] + r'\b'
1149 1162
1150 tokens = { 1163 tokens = {
1151 'root': [ 1164 'root': [
1152 (r'/\*[\w\W]*?\*/', Comment.Multiline), 1165 (r'/\*[\w\W]*?\*/', Comment.Multiline),
1153 (r'//.*\n', Comment.Single), 1166 (r'//.*\n', Comment.Single),
1154 (r'"(?:\\.|[^"])+"', String.Double), 1167 (r'(?s)"(?:\\.|[^"\\])+"', String.Double),
1155 (r'#(debug|default|else|end|error|fclose|fopen|if|ifdef|ifndef|' 1168 (r'#(debug|default|else|end|error|fclose|fopen|if|ifdef|ifndef|'
1156 r'include|range|read|render|statistics|switch|undef|version|' 1169 r'include|range|read|render|statistics|switch|undef|version|'
1157 r'warning|while|write|define|macro|local|declare)', 1170 r'warning|while|write|define|macro|local|declare)',
1158 Comment.Preproc), 1171 Comment.Preproc),
1159 (r'\b(aa_level|aa_threshold|abs|acos|acosh|adaptive|adc_bailout|' 1172 (r'\b(aa_level|aa_threshold|abs|acos|acosh|adaptive|adc_bailout|'
2078 (r'\s+', Text), 2091 (r'\s+', Text),
2079 (r'"[^"]*"', Comment) 2092 (r'"[^"]*"', Comment)
2080 ] 2093 ]
2081 } 2094 }
2082 2095
2096 class GherkinLexer(RegexLexer):
2097 """
2098 For `Gherkin <http://cukes.info/>` syntax.
2099
2100 *New in Pygments 1.2.*
2101 """
2102 name = 'Gherkin'
2103 aliases = ['Cucumber', 'cucumber', 'Gherkin', 'gherkin']
2104 filenames = ['*.feature']
2105 mimetypes = ['text/x-gherkin']
2106
2107 feature_keywords_regexp = r'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функционалност|Функционал|Особина|Могућност|Özellik|Właściwość|Tính năng|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
2108 scenario_keywords_regexp = r'^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарио|Сценарий структураси|Сценарий|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Основа|Концепт|Контекст|Założenia|Tình huống|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
2109 examples_regexp = r'^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
2110 step_keywords_regexp = r'^(\s*)(하지만|조건|만일|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Унда |То |Онда |Но |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Агар |А |Și |És |anrhegedig a |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Yna |Ya know how |Ya gotta |Y |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Donat |Donada |Diyelim ki |Dengan |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |But y\'all |But |Biết |Bet |BUT |Atunci |And y\'all |And |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
2111
2112 tokens = {
2113 'comments': [
2114 (r'#.*$', Comment)
2115 ],
2116 'multiline_descriptions' : [
2117 (step_keywords_regexp, Keyword, "#pop"),
2118 include('comments'),
2119 (r"(\s|.)", Name.Constant)
2120 ],
2121 'multiline_descriptions_on_stack' : [
2122 (step_keywords_regexp, Keyword, "#pop:2"),
2123 include('comments'),
2124 (r"(\s|.)", Name.Constant)
2125 ],
2126 'scenario_table_description': [
2127 (r"\s+\|", Text, 'scenario_table_header'),
2128 include('comments'),
2129 (r"(\s|.)", Name.Constant)
2130 ],
2131 'scenario_table_header': [
2132 (r"\s+\|\s*$", Text, "#pop:2"),
2133 (r"(\s+\|\s*)(#.*)$", bygroups(Text, Comment), "#pop:2"),
2134 include('comments'),
2135 (r"\s+\|", Text),
2136 (r"[^\|]", Name.Variable)
2137 ],
2138 'scenario_sections_on_stack': [
2139 (scenario_keywords_regexp,
2140 bygroups(Text, Name.Class, Name.Class, Name.Constant),
2141 "multiline_descriptions_on_stack")
2142 ],
2143 'narrative': [
2144 include('scenario_sections_on_stack'),
2145 (r"(\s|.)", Name.Builtin)
2146 ],
2147 'table_vars': [
2148 (r'(<[^>]*>)', bygroups(Name.Variable))
2149 ],
2150 'string': [
2151 include('table_vars'),
2152 (r'(\s|.)', String),
2153 ],
2154 'py_string': [
2155 (r'"""', String, "#pop"),
2156 include('string'),
2157 ],
2158 'double_string': [
2159 (r'"', String, "#pop"),
2160 include('string'),
2161 ],
2162 'root': [
2163 (r'\n', Text),
2164 include('comments'),
2165 (r'"""', String, "py_string"),
2166 (r'"', String, "double_string"),
2167 include('table_vars'),
2168 (r'@[^@\s]+', Name.Namespace),
2169 (step_keywords_regexp, bygroups(Text, Keyword)),
2170 (feature_keywords_regexp,
2171 bygroups(Name.Class, Name.Class, Name.Constant), 'narrative'),
2172 (scenario_keywords_regexp,
2173 bygroups(Text, Name.Class, Name.Class, Name.Constant),
2174 "multiline_descriptions"),
2175 (examples_regexp,
2176 bygroups(Text, Name.Class, Name.Class, Name.Constant),
2177 "scenario_table_description"),
2178 (r'(\s|.)', Text)
2179 ]
2180 }
2181
2182
2183 class AsymptoteLexer(RegexLexer):
2184 """
2185 For `Asymptote <http://asymptote.sf.net/>`_ source code.
2186
2187 *New in Pygments 1.2.*
2188 """
2189 name = 'Asymptote'
2190 aliases = ['asy', 'asymptote']
2191 filenames = ['*.asy']
2192 mimetypes = ['text/x-asymptote']
2193
2194 #: optional Comment or Whitespace
2195 _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
2196
2197 tokens = {
2198 'whitespace': [
2199 (r'\n', Text),
2200 (r'\s+', Text),
2201 (r'\\\n', Text), # line continuation
2202 (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
2203 (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment),
2204 ],
2205 'statements': [
2206 # simple string (TeX friendly)
2207 (r'"(\\\\|\\"|[^"])*"', String),
2208 # C style string (with character escapes)
2209 (r"'", String, 'string'),
2210 (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
2211 (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
2212 (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
2213 (r'0[0-7]+[Ll]?', Number.Oct),
2214 (r'\d+[Ll]?', Number.Integer),
2215 (r'[~!%^&*+=|?:<>/-]', Operator),
2216 (r'[()\[\],.]', Punctuation),
2217 (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
2218 (r'(and|controls|tension|atleast|curl|if|else|while|for|do|'
2219 r'return|break|continue|struct|typedef|new|access|import|'
2220 r'unravel|from|include|quote|static|public|private|restricted|'
2221 r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword),
2222 # Since an asy-type-name can be also an asy-function-name,
2223 # in the following we test if the string " [a-zA-Z]" follows
2224 # the Keyword.Type.
2225 # Of course it is not perfect !
2226 (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|'
2227 r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|'
2228 r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|'
2229 r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|'
2230 r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|'
2231 r'path3|pen|picture|point|position|projection|real|revolution|'
2232 r'scaleT|scientific|segment|side|slice|splitface|string|surface|'
2233 r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|'
2234 r'transformation|tree|triangle|trilinear|triple|vector|'
2235 r'vertex|void)(?=([ ]{1,}[a-zA-Z]))', Keyword.Type),
2236 # Now the asy-type-name which are not asy-function-name
2237 # except yours !
2238 # Perhaps useless
2239 (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|'
2240 r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|'
2241 r'picture|position|real|revolution|slice|splitface|ticksgridT|'
2242 r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type),
2243 ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
2244 ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
2245 ],
2246 'root': [
2247 include('whitespace'),
2248 # functions
2249 (r'((?:[a-zA-Z0-9_*\s])+?(?:\s|[*]))' # return arguments
2250 r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
2251 r'(\s*\([^;]*?\))' # signature
2252 r'(' + _ws + r')({)',
2253 bygroups(using(this), Name.Function, using(this), using(this),
2254 Punctuation),
2255 'function'),
2256 # function declarations
2257 (r'((?:[a-zA-Z0-9_*\s])+?(?:\s|[*]))' # return arguments
2258 r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
2259 r'(\s*\([^;]*?\))' # signature
2260 r'(' + _ws + r')(;)',
2261 bygroups(using(this), Name.Function, using(this), using(this),
2262 Punctuation)),
2263 ('', Text, 'statement'),
2264 ],
2265 'statement' : [
2266 include('whitespace'),
2267 include('statements'),
2268 ('[{}]', Punctuation),
2269 (';', Punctuation, '#pop'),
2270 ],
2271 'function': [
2272 include('whitespace'),
2273 include('statements'),
2274 (';', Punctuation),
2275 ('{', Punctuation, '#push'),
2276 ('}', Punctuation, '#pop'),
2277 ],
2278 'string': [
2279 (r"'", String, '#pop'),
2280 (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
2281 (r'\n', String),
2282 (r"[^\\'\n]+", String), # all other characters
2283 (r'\\\n', String),
2284 (r'\\n', String), # line continuation
2285 (r'\\', String), # stray backslash
2286 ]
2287 }
2288
2289 def get_tokens_unprocessed(self, text):
2290 from pygments.lexers._asybuiltins import ASYFUNCNAME, ASYVARNAME
2291 for index, token, value in \
2292 RegexLexer.get_tokens_unprocessed(self, text):
2293 if token is Name and value in ASYFUNCNAME:
2294 token = Name.Function
2295 elif token is Name and value in ASYVARNAME:
2296 token = Name.Variable
2297 yield index, token, value

eric ide

mercurial