3 pygments.lexers.special |
3 pygments.lexers.special |
4 ~~~~~~~~~~~~~~~~~~~~~~~ |
4 ~~~~~~~~~~~~~~~~~~~~~~~ |
5 |
5 |
6 Special lexers. |
6 Special lexers. |
7 |
7 |
8 :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. |
8 :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. |
9 :license: BSD, see LICENSE for details. |
9 :license: BSD, see LICENSE for details. |
10 """ |
10 """ |
11 |
11 |
12 import re |
12 import re |
|
13 from io import BytesIO |
13 |
14 |
14 from pygments.lexer import Lexer |
15 from pygments.lexer import Lexer |
15 from pygments.token import Token, Error, Text |
16 from pygments.token import Token, Error, Text |
16 from pygments.util import get_choice_opt, text_type, BytesIO |
17 from pygments.util import get_choice_opt |
17 |
18 |
18 |
19 |
19 __all__ = ['TextLexer', 'RawTokenLexer'] |
20 __all__ = ['TextLexer', 'RawTokenLexer'] |
20 |
21 |
21 |
22 |
61 self.compress = get_choice_opt(options, 'compress', |
63 self.compress = get_choice_opt(options, 'compress', |
62 ['', 'none', 'gz', 'bz2'], '') |
64 ['', 'none', 'gz', 'bz2'], '') |
63 Lexer.__init__(self, **options) |
65 Lexer.__init__(self, **options) |
64 |
66 |
65 def get_tokens(self, text): |
67 def get_tokens(self, text): |
66 if isinstance(text, text_type): |
68 if isinstance(text, str): |
67 # raw token stream never has any non-ASCII characters |
69 # raw token stream never has any non-ASCII characters |
68 text = text.encode('ascii') |
70 text = text.encode('ascii') |
69 if self.compress == 'gz': |
71 if self.compress == 'gz': |
70 import gzip |
72 import gzip |
71 gzipfile = gzip.GzipFile('', 'rb', 9, BytesIO(text)) |
73 gzipfile = gzip.GzipFile('', 'rb', 9, BytesIO(text)) |