Show More
@@ -4,13 +4,13 b' Defines a variety of Pygments lexers for highlighting IPython code.' | |||
|
4 | 4 | |
|
5 | 5 | This includes: |
|
6 | 6 | |
|
7 | IPython3Lexer | |
|
8 | Lexer for pure IPython (python + magic/shell commands) | |
|
7 | IPythonLexer, IPython3Lexer | |
|
8 | Lexers for pure IPython (python + magic/shell commands) | |
|
9 | 9 | |
|
10 | 10 | IPythonPartialTracebackLexer, IPythonTracebackLexer |
|
11 | The partial traceback lexer reads everything but the Python code | |
|
12 | appearing in a traceback. | |
|
13 |
The full lexer combines the partial lexer with |
|
|
11 | Supports 2.x and 3.x via keyword `python3`. The partial traceback | |
|
12 | lexer reads everything but the Python code appearing in a traceback. | |
|
13 | The full lexer combines the partial lexer with an IPython lexer. | |
|
14 | 14 | |
|
15 | 15 | IPythonConsoleLexer |
|
16 | 16 | A lexer for IPython console sessions, with support for tracebacks. |
@@ -35,22 +35,10 b' import re' | |||
|
35 | 35 | |
|
36 | 36 | # Third party |
|
37 | 37 | from pygments.lexers import ( |
|
38 | BashLexer, | |
|
39 | HtmlLexer, | |
|
40 | JavascriptLexer, | |
|
41 | RubyLexer, | |
|
42 | PerlLexer, | |
|
43 | Python3Lexer, | |
|
44 | TexLexer, | |
|
45 | ) | |
|
38 | BashLexer, HtmlLexer, JavascriptLexer, RubyLexer, PerlLexer, PythonLexer, | |
|
39 | Python3Lexer, TexLexer) | |
|
46 | 40 | from pygments.lexer import ( |
|
47 | Lexer, | |
|
48 | DelegatingLexer, | |
|
49 | RegexLexer, | |
|
50 | do_insertions, | |
|
51 | bygroups, | |
|
52 | using, | |
|
53 | inherit, | |
|
41 | Lexer, DelegatingLexer, RegexLexer, do_insertions, bygroups, using, | |
|
54 | 42 | ) |
|
55 | 43 | from pygments.token import ( |
|
56 | 44 | Generic, Keyword, Literal, Name, Operator, Other, Text, Error, |
@@ -61,106 +49,80 b' from pygments.util import get_bool_opt' | |||
|
61 | 49 | |
|
62 | 50 | line_re = re.compile('.*?\n') |
|
63 | 51 | |
|
64 | __all__ = [ | |
|
65 | "IPython3Lexer", | |
|
66 | "IPythonPartialTracebackLexer", | |
|
67 | "IPythonTracebackLexer", | |
|
68 | "IPythonConsoleLexer", | |
|
69 | "IPyLexer", | |
|
70 | ] | |
|
52 | __all__ = ['build_ipy_lexer', 'IPython3Lexer', 'IPythonLexer', | |
|
53 | 'IPythonPartialTracebackLexer', 'IPythonTracebackLexer', | |
|
54 | 'IPythonConsoleLexer', 'IPyLexer'] | |
|
71 | 55 | |
|
72 | 56 | |
|
73 | class IPython3Lexer(Python3Lexer): | |
|
74 | """IPython3 Lexer""" | |
|
57 | def build_ipy_lexer(python3): | |
|
58 | """Builds IPython lexers depending on the value of `python3`. | |
|
75 | 59 |
|
|
76 | name = "IPython3" | |
|
77 | aliases = ["ipython3"] | |
|
60 | The lexer inherits from an appropriate Python lexer and then adds | |
|
61 | information about IPython specific keywords (i.e. magic commands, | |
|
62 | shell commands, etc.) | |
|
78 | 63 |
|
|
79 | tokens = { | |
|
80 | "root": [ | |
|
81 | ( | |
|
82 | r"(?s)(\s*)(%%capture)([^\n]*\n)(.*)", | |
|
83 | bygroups(Text, Operator, Text, using(Python3Lexer)), | |
|
84 | ), | |
|
85 | ( | |
|
86 | r"(?s)(\s*)(%%debug)([^\n]*\n)(.*)", | |
|
87 | bygroups(Text, Operator, Text, using(Python3Lexer)), | |
|
88 | ), | |
|
89 | ( | |
|
90 | r"(?is)(\s*)(%%html)([^\n]*\n)(.*)", | |
|
91 | bygroups(Text, Operator, Text, using(HtmlLexer)), | |
|
92 | ), | |
|
93 | ( | |
|
94 | r"(?s)(\s*)(%%javascript)([^\n]*\n)(.*)", | |
|
95 | bygroups(Text, Operator, Text, using(JavascriptLexer)), | |
|
96 | ), | |
|
97 | ( | |
|
98 | r"(?s)(\s*)(%%js)([^\n]*\n)(.*)", | |
|
99 | bygroups(Text, Operator, Text, using(JavascriptLexer)), | |
|
100 | ), | |
|
101 | ( | |
|
102 | r"(?s)(\s*)(%%latex)([^\n]*\n)(.*)", | |
|
103 |
|
|
|
104 | ), | |
|
105 | ( | |
|
106 | r"(?s)(\s*)(%%perl)([^\n]*\n)(.*)", | |
|
107 |
|
|
|
108 | ), | |
|
109 | ( | |
|
110 | r"(?s)(\s*)(%%prun)([^\n]*\n)(.*)", | |
|
111 |
|
|
|
112 | ), | |
|
113 | ( | |
|
114 | r"(?s)(\s*)(%%pypy)([^\n]*\n)(.*)", | |
|
115 |
|
|
|
116 | ), | |
|
117 | ( | |
|
118 | r"(?s)(\s*)(%%python)([^\n]*\n)(.*)", | |
|
119 | bygroups(Text, Operator, Text, using(Python3Lexer)), | |
|
120 | ), | |
|
121 | ( | |
|
122 | r"(?s)(\s*)(%%python3)([^\n]*\n)(.*)", | |
|
123 | bygroups(Text, Operator, Text, using(Python3Lexer)), | |
|
124 | ), | |
|
125 | ( | |
|
126 | r"(?s)(\s*)(%%ruby)([^\n]*\n)(.*)", | |
|
127 | bygroups(Text, Operator, Text, using(RubyLexer)), | |
|
128 | ), | |
|
129 | ( | |
|
130 | r"(?s)(\s*)(%%time)([^\n]*\n)(.*)", | |
|
131 | bygroups(Text, Operator, Text, using(Python3Lexer)), | |
|
132 | ), | |
|
133 | ( | |
|
134 | r"(?s)(\s*)(%%timeit)([^\n]*\n)(.*)", | |
|
135 | bygroups(Text, Operator, Text, using(Python3Lexer)), | |
|
136 | ), | |
|
137 | ( | |
|
138 | r"(?s)(\s*)(%%writefile)([^\n]*\n)(.*)", | |
|
139 | bygroups(Text, Operator, Text, using(Python3Lexer)), | |
|
140 | ), | |
|
141 | ( | |
|
142 | r"(?s)(\s*)(%%file)([^\n]*\n)(.*)", | |
|
143 | bygroups(Text, Operator, Text, using(Python3Lexer)), | |
|
144 | ), | |
|
64 | Parameters | |
|
65 | ---------- | |
|
66 | python3 : bool | |
|
67 | If `True`, then build an IPython lexer from a Python 3 lexer. | |
|
68 | ||
|
69 | """ | |
|
70 | # It would be nice to have a single IPython lexer class which takes | |
|
71 | # a boolean `python3`. But since there are two Python lexer classes, | |
|
72 | # we will also have two IPython lexer classes. | |
|
73 | if python3: | |
|
74 | PyLexer = Python3Lexer | |
|
75 | name = 'IPython3' | |
|
76 | aliases = ['ipython3'] | |
|
77 | doc = """IPython3 Lexer""" | |
|
78 | else: | |
|
79 | PyLexer = PythonLexer | |
|
80 | name = 'IPython' | |
|
81 | aliases = ['ipython2', 'ipython'] | |
|
82 | doc = """IPython Lexer""" | |
|
83 | ||
|
84 | ipython_tokens = [ | |
|
85 | (r'(?s)(\s*)(%%capture)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), | |
|
86 | (r'(?s)(\s*)(%%debug)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), | |
|
87 | (r'(?is)(\s*)(%%html)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(HtmlLexer))), | |
|
88 | (r'(?s)(\s*)(%%javascript)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))), | |
|
89 | (r'(?s)(\s*)(%%js)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))), | |
|
90 | (r'(?s)(\s*)(%%latex)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(TexLexer))), | |
|
91 | (r'(?s)(\s*)(%%perl)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PerlLexer))), | |
|
92 | (r'(?s)(\s*)(%%prun)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), | |
|
93 | (r'(?s)(\s*)(%%pypy)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), | |
|
94 | (r'(?s)(\s*)(%%python)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), | |
|
95 | (r'(?s)(\s*)(%%python2)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PythonLexer))), | |
|
96 | (r'(?s)(\s*)(%%python3)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))), | |
|
97 | (r'(?s)(\s*)(%%ruby)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(RubyLexer))), | |
|
98 | (r'(?s)(\s*)(%%time)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), | |
|
99 | (r'(?s)(\s*)(%%timeit)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), | |
|
100 | (r'(?s)(\s*)(%%writefile)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), | |
|
101 | (r'(?s)(\s*)(%%file)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), | |
|
145 | 102 |
|
|
146 | ( | |
|
147 | r"(?s)(^\s*)(%%!)([^\n]*\n)(.*)", | |
|
148 | bygroups(Text, Operator, Text, using(BashLexer)), | |
|
149 | ), | |
|
103 | (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))), | |
|
150 | 104 |
|
|
151 | 105 |
|
|
152 | ( | |
|
153 | r"(%)(sx|sc|system)(.*)(\n)", | |
|
154 |
|
|
|
155 | ), | |
|
156 |
|
|
|
157 | (r"^(!!)(.+)(\n)", bygroups(Operator, using(BashLexer), Text)), | |
|
158 | (r"(!)(?!=)(.+)(\n)", bygroups(Operator, using(BashLexer), Text)), | |
|
159 | (r"^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)", bygroups(Text, Operator, Text)), | |
|
160 | (r"(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$", bygroups(Text, Operator, Text)), | |
|
161 | inherit, | |
|
106 | (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword, | |
|
107 | using(BashLexer), Text)), | |
|
108 | (r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)), | |
|
109 | (r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)), | |
|
110 | (r'(!)(?!=)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)), | |
|
111 | (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)', bygroups(Text, Operator, Text)), | |
|
112 | (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$', bygroups(Text, Operator, Text)), | |
|
162 | 113 |
|
|
163 | } | |
|
114 | ||
|
115 | tokens = PyLexer.tokens.copy() | |
|
116 | tokens['root'] = ipython_tokens + tokens['root'] | |
|
117 | ||
|
118 | attrs = {'name': name, 'aliases': aliases, 'filenames': [], | |
|
119 | '__doc__': doc, 'tokens': tokens} | |
|
120 | ||
|
121 | return type(name, (PyLexer,), attrs) | |
|
122 | ||
|
123 | ||
|
124 | IPython3Lexer = build_ipy_lexer(python3=True) | |
|
125 | IPythonLexer = build_ipy_lexer(python3=False) | |
|
164 | 126 | |
|
165 | 127 | |
|
166 | 128 | class IPythonPartialTracebackLexer(RegexLexer): |
@@ -222,9 +184,9 b' class IPythonTracebackLexer(DelegatingLexer):' | |||
|
222 | 184 | this is the line which lists the File and line number. |
|
223 | 185 | |
|
224 | 186 | """ |
|
225 | ||
|
226 | # The lexer inherits from DelegatingLexer. The "root" lexer is the | |
|
227 |
# |
|
|
187 | # The lexer inherits from DelegatingLexer. The "root" lexer is an | |
|
188 | # appropriate IPython lexer, which depends on the value of the boolean | |
|
189 | # `python3`. First, we parse with the partial IPython traceback lexer. | |
|
228 | 190 | # Then, any code marked with the "Other" token is delegated to the root |
|
229 | 191 | # lexer. |
|
230 | 192 | # |
@@ -239,9 +201,19 b' class IPythonTracebackLexer(DelegatingLexer):' | |||
|
239 | 201 | # note we need a __init__ doc, as otherwise it inherits the doc from the super class |
|
240 | 202 | # which will fail the documentation build as it references section of the pygments docs that |
|
241 | 203 | # do not exists when building IPython's docs. |
|
204 | self.python3 = get_bool_opt(options, 'python3', False) | |
|
205 | if self.python3: | |
|
206 | self.aliases = ['ipython3tb'] | |
|
207 | else: | |
|
208 | self.aliases = ['ipython2tb', 'ipythontb'] | |
|
242 | 209 | |
|
243 | super().__init__(IPython3Lexer, IPythonPartialTracebackLexer, **options) | |
|
210 | if self.python3: | |
|
211 | IPyLexer = IPython3Lexer | |
|
212 | else: | |
|
213 | IPyLexer = IPythonLexer | |
|
244 | 214 | |
|
215 | DelegatingLexer.__init__(self, IPyLexer, | |
|
216 | IPythonPartialTracebackLexer, **options) | |
|
245 | 217 | |
|
246 | 218 | class IPythonConsoleLexer(Lexer): |
|
247 | 219 | """ |
@@ -298,6 +270,9 b' class IPythonConsoleLexer(Lexer):' | |||
|
298 | 270 | |
|
299 | 271 | Parameters |
|
300 | 272 | ---------- |
|
273 | python3 : bool | |
|
274 | If `True`, then the console inputs are parsed using a Python 3 | |
|
275 | lexer. Otherwise, they are parsed using a Python 2 lexer. | |
|
301 | 276 | in1_regex : RegexObject |
|
302 | 277 | The compiled regular expression used to detect the start |
|
303 | 278 | of inputs. Although the IPython configuration setting may have a |
@@ -313,7 +288,11 b' class IPythonConsoleLexer(Lexer):' | |||
|
313 | 288 | then the default output prompt is assumed. |
|
314 | 289 | |
|
315 | 290 | """ |
|
316 | self.aliases = ["ipython3console"] | |
|
291 | self.python3 = get_bool_opt(options, 'python3', False) | |
|
292 | if self.python3: | |
|
293 | self.aliases = ['ipython3console'] | |
|
294 | else: | |
|
295 | self.aliases = ['ipython2console', 'ipythonconsole'] | |
|
317 | 296 | |
|
318 | 297 | in1_regex = options.get('in1_regex', self.in1_regex) |
|
319 | 298 | in2_regex = options.get('in2_regex', self.in2_regex) |
@@ -339,8 +318,15 b' class IPythonConsoleLexer(Lexer):' | |||
|
339 | 318 | |
|
340 | 319 | Lexer.__init__(self, **options) |
|
341 | 320 | |
|
342 | self.pylexer = IPython3Lexer(**options) | |
|
343 |
|
|
|
321 | if self.python3: | |
|
322 | pylexer = IPython3Lexer | |
|
323 | tblexer = IPythonTracebackLexer | |
|
324 | else: | |
|
325 | pylexer = IPythonLexer | |
|
326 | tblexer = IPythonTracebackLexer | |
|
327 | ||
|
328 | self.pylexer = pylexer(**options) | |
|
329 | self.tblexer = tblexer(**options) | |
|
344 | 330 | |
|
345 | 331 | self.reset() |
|
346 | 332 | |
@@ -526,16 +512,20 b' class IPyLexer(Lexer):' | |||
|
526 | 512 | def __init__(self, **options): |
|
527 | 513 | """ |
|
528 | 514 | Create a new IPyLexer instance which dispatch to either an |
|
529 |
IPythonC |
|
|
515 | IPythonCOnsoleLexer (if In prompts are present) or and IPythonLexer (if | |
|
530 | 516 | In prompts are not present). |
|
531 | 517 | """ |
|
532 | 518 | # init docstring is necessary for docs not to fail to build do to parent |
|
533 | 519 | # docs referenceing a section in pygments docs. |
|
534 | self.aliases = ["ipy3"] | |
|
520 | self.python3 = get_bool_opt(options, 'python3', False) | |
|
521 | if self.python3: | |
|
522 | self.aliases = ['ipy3'] | |
|
523 | else: | |
|
524 | self.aliases = ['ipy2', 'ipy'] | |
|
535 | 525 | |
|
536 | 526 | Lexer.__init__(self, **options) |
|
537 | 527 | |
|
538 |
self.IPythonLexer = IPython |
|
|
528 | self.IPythonLexer = IPythonLexer(**options) | |
|
539 | 529 | self.IPythonConsoleLexer = IPythonConsoleLexer(**options) |
|
540 | 530 | |
|
541 | 531 | def get_tokens_unprocessed(self, text): |
@@ -547,3 +537,4 b' class IPyLexer(Lexer):' | |||
|
547 | 537 | lex = self.IPythonLexer |
|
548 | 538 | for token in lex.get_tokens_unprocessed(text): |
|
549 | 539 | yield token |
|
540 |
@@ -16,10 +16,10 b' pyg214 = tuple(int(x) for x in pygments_version.split(".")[:2]) >= (2, 14)' | |||
|
16 | 16 | class TestLexers(TestCase): |
|
17 | 17 | """Collection of lexers tests""" |
|
18 | 18 | def setUp(self): |
|
19 |
self.lexer = lexers.IPython |
|
|
19 | self.lexer = lexers.IPythonLexer() | |
|
20 | 20 | self.bash_lexer = BashLexer() |
|
21 | 21 | |
|
22 |
def testIPython |
|
|
22 | def testIPythonLexer(self): | |
|
23 | 23 | fragment = '!echo $HOME\n' |
|
24 | 24 | bash_tokens = [ |
|
25 | 25 | (Token.Operator, '!'), |
@@ -4,10 +4,12 b' import pytest' | |||
|
4 | 4 | import pygments.lexers |
|
5 | 5 | import pygments.lexer |
|
6 | 6 | |
|
7 | from IPython.lib.lexers import IPythonConsoleLexer, IPython3Lexer | |
|
7 | from IPython.lib.lexers import IPythonConsoleLexer, IPythonLexer, IPython3Lexer | |
|
8 | 8 | |
|
9 | 9 | #: the human-readable names of the IPython lexers with ``entry_points`` |
|
10 | EXPECTED_LEXER_NAMES = [cls.name for cls in [IPythonConsoleLexer, IPython3Lexer]] | |
|
10 | EXPECTED_LEXER_NAMES = [ | |
|
11 | cls.name for cls in [IPythonConsoleLexer, IPythonLexer, IPython3Lexer] | |
|
12 | ] | |
|
11 | 13 | |
|
12 | 14 | |
|
13 | 15 | @pytest.fixture |
@@ -20,5 +20,9 b' def setup(app):' | |||
|
20 | 20 | # Alternatively, we could register the lexer with pygments instead. This would |
|
21 | 21 | # require using setuptools entrypoints: http://pygments.org/docs/plugins |
|
22 | 22 | |
|
23 | highlighting.lexers["ipython"] = IPyLexer() | |
|
24 | highlighting.lexers["ipython3"] = IPyLexer() | |
|
23 | ipy2 = IPyLexer(python3=False) | |
|
24 | ipy3 = IPyLexer(python3=True) | |
|
25 | ||
|
26 | highlighting.lexers['ipython'] = ipy2 | |
|
27 | highlighting.lexers['ipython2'] = ipy2 | |
|
28 | highlighting.lexers['ipython3'] = ipy3 |
@@ -9,20 +9,22 b' The IPython console lexer has been rewritten and now supports tracebacks' | |||
|
9 | 9 | and customized input/output prompts. An entire suite of lexers is now |
|
10 | 10 | available at :mod:`IPython.lib.lexers`. These include: |
|
11 | 11 | |
|
12 | IPython3Lexer | |
|
13 |
Lexer for pure IPython |
|
|
12 | IPythonLexer & IPython3Lexer | |
|
13 | Lexers for pure IPython (python + magic/shell commands) | |
|
14 | 14 | |
|
15 | 15 | IPythonPartialTracebackLexer & IPythonTracebackLexer |
|
16 | The partial traceback lexer reads everything but the Python code | |
|
17 | appearing in a traceback. The full lexer combines the partial lexer | |
|
18 | with the IPython3Lexer. | |
|
16 | Supports 2.x and 3.x via the keyword `python3`. The partial traceback | |
|
17 | lexer reads everything but the Python code appearing in a traceback. | |
|
18 | The full lexer combines the partial lexer with an IPython lexer. | |
|
19 | 19 | |
|
20 | 20 | IPythonConsoleLexer |
|
21 |
A lexer for |
|
|
21 | A lexer for IPython console sessions, with support for tracebacks. | |
|
22 | Supports 2.x and 3.x via the keyword `python3`. | |
|
22 | 23 | |
|
23 | 24 | IPyLexer |
|
24 | 25 | A friendly lexer which examines the first line of text and from it, |
|
25 | 26 | decides whether to use an IPython lexer or an IPython console lexer. |
|
27 | Supports 2.x and 3.x via the keyword `python3`. | |
|
26 | 28 | |
|
27 | 29 | Previously, the :class:`IPythonConsoleLexer` class was available at |
|
28 | 30 | :mod:`IPython.sphinxext.ipython_console_hightlight`. It was inserted |
@@ -145,6 +145,7 b' setup_args["entry_points"] = {' | |||
|
145 | 145 | "console_scripts": find_entry_points(), |
|
146 | 146 | "pygments.lexers": [ |
|
147 | 147 | "ipythonconsole = IPython.lib.lexers:IPythonConsoleLexer", |
|
148 | "ipython = IPython.lib.lexers:IPythonLexer", | |
|
148 | 149 | "ipython3 = IPython.lib.lexers:IPython3Lexer", |
|
149 | 150 | ], |
|
150 | 151 | } |
General Comments 0
You need to be logged in to leave comments.
Login now