##// END OF EJS Templates
Cleanup Python 2 compact from Lexers
Samuel Gaist -
Show More
@@ -4,13 +4,13 b' Defines a variety of Pygments lexers for highlighting IPython code.'
4
4
5 This includes:
5 This includes:
6
6
7 IPythonLexer, IPython3Lexer
7 IPython3Lexer
8 Lexers for pure IPython (python + magic/shell commands)
8 Lexer for pure IPython (python + magic/shell commands)
9
9
10 IPythonPartialTracebackLexer, IPythonTracebackLexer
10 IPythonPartialTracebackLexer, IPythonTracebackLexer
11 Supports 2.x and 3.x via keyword `python3`. The partial traceback
11 The partial traceback lexer reads everything but the Python code
12 lexer reads everything but the Python code appearing in a traceback.
12 appearing in a traceback.
13 The full lexer combines the partial lexer with an IPython lexer.
13 The full lexer combines the partial lexer with the IPython3Lexer.
14
14
15 IPythonConsoleLexer
15 IPythonConsoleLexer
16 A lexer for IPython console sessions, with support for tracebacks.
16 A lexer for IPython console sessions, with support for tracebacks.
@@ -35,10 +35,22 b' import re'
35
35
36 # Third party
36 # Third party
37 from pygments.lexers import (
37 from pygments.lexers import (
38 BashLexer, HtmlLexer, JavascriptLexer, RubyLexer, PerlLexer, PythonLexer,
38 BashLexer,
39 Python3Lexer, TexLexer)
39 HtmlLexer,
40 JavascriptLexer,
41 RubyLexer,
42 PerlLexer,
43 Python3Lexer,
44 TexLexer,
45 )
40 from pygments.lexer import (
46 from pygments.lexer import (
41 Lexer, DelegatingLexer, RegexLexer, do_insertions, bygroups, using,
47 Lexer,
48 DelegatingLexer,
49 RegexLexer,
50 do_insertions,
51 bygroups,
52 using,
53 inherit,
42 )
54 )
43 from pygments.token import (
55 from pygments.token import (
44 Generic, Keyword, Literal, Name, Operator, Other, Text, Error,
56 Generic, Keyword, Literal, Name, Operator, Other, Text, Error,
@@ -49,80 +61,106 b' from pygments.util import get_bool_opt'
49
61
50 line_re = re.compile('.*?\n')
62 line_re = re.compile('.*?\n')
51
63
52 __all__ = ['build_ipy_lexer', 'IPython3Lexer', 'IPythonLexer',
64 __all__ = [
53 'IPythonPartialTracebackLexer', 'IPythonTracebackLexer',
65 "IPython3Lexer",
54 'IPythonConsoleLexer', 'IPyLexer']
66 "IPythonPartialTracebackLexer",
55
67 "IPythonTracebackLexer",
68 "IPythonConsoleLexer",
69 "IPyLexer",
70 ]
56
71
57 def build_ipy_lexer(python3):
58 """Builds IPython lexers depending on the value of `python3`.
59
72
60 The lexer inherits from an appropriate Python lexer and then adds
73 class IPython3Lexer(Python3Lexer):
61 information about IPython specific keywords (i.e. magic commands,
74 """IPython3 Lexer"""
62 shell commands, etc.)
63
75
64 Parameters
76 name = "IPython3"
65 ----------
77 aliases = ["ipython3"]
66 python3 : bool
67 If `True`, then build an IPython lexer from a Python 3 lexer.
68
78
69 """
79 tokens = {
70 # It would be nice to have a single IPython lexer class which takes
80 "root": [
71 # a boolean `python3`. But since there are two Python lexer classes,
81 (
72 # we will also have two IPython lexer classes.
82 r"(?s)(\s*)(%%capture)([^\n]*\n)(.*)",
73 if python3:
83 bygroups(Text, Operator, Text, using(Python3Lexer)),
74 PyLexer = Python3Lexer
84 ),
75 name = 'IPython3'
85 (
76 aliases = ['ipython3']
86 r"(?s)(\s*)(%%debug)([^\n]*\n)(.*)",
77 doc = """IPython3 Lexer"""
87 bygroups(Text, Operator, Text, using(Python3Lexer)),
78 else:
88 ),
79 PyLexer = PythonLexer
89 (
80 name = 'IPython'
90 r"(?is)(\s*)(%%html)([^\n]*\n)(.*)",
81 aliases = ['ipython2', 'ipython']
91 bygroups(Text, Operator, Text, using(HtmlLexer)),
82 doc = """IPython Lexer"""
92 ),
83
93 (
84 ipython_tokens = [
94 r"(?s)(\s*)(%%javascript)([^\n]*\n)(.*)",
85 (r'(?s)(\s*)(%%capture)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
95 bygroups(Text, Operator, Text, using(JavascriptLexer)),
86 (r'(?s)(\s*)(%%debug)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
96 ),
87 (r'(?is)(\s*)(%%html)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(HtmlLexer))),
97 (
88 (r'(?s)(\s*)(%%javascript)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))),
98 r"(?s)(\s*)(%%js)([^\n]*\n)(.*)",
89 (r'(?s)(\s*)(%%js)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))),
99 bygroups(Text, Operator, Text, using(JavascriptLexer)),
90 (r'(?s)(\s*)(%%latex)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(TexLexer))),
100 ),
91 (r'(?s)(\s*)(%%perl)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PerlLexer))),
101 (
92 (r'(?s)(\s*)(%%prun)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
102 r"(?s)(\s*)(%%latex)([^\n]*\n)(.*)",
93 (r'(?s)(\s*)(%%pypy)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
103 bygroups(Text, Operator, Text, using(TexLexer)),
94 (r'(?s)(\s*)(%%python)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
104 ),
95 (r'(?s)(\s*)(%%python2)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PythonLexer))),
105 (
96 (r'(?s)(\s*)(%%python3)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
106 r"(?s)(\s*)(%%perl)([^\n]*\n)(.*)",
97 (r'(?s)(\s*)(%%ruby)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(RubyLexer))),
107 bygroups(Text, Operator, Text, using(PerlLexer)),
98 (r'(?s)(\s*)(%%time)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
108 ),
99 (r'(?s)(\s*)(%%timeit)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
109 (
100 (r'(?s)(\s*)(%%writefile)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
110 r"(?s)(\s*)(%%prun)([^\n]*\n)(.*)",
101 (r'(?s)(\s*)(%%file)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
111 bygroups(Text, Operator, Text, using(Python3Lexer)),
102 (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)),
112 ),
103 (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))),
113 (
104 (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)),
114 r"(?s)(\s*)(%%pypy)([^\n]*\n)(.*)",
105 (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)),
115 bygroups(Text, Operator, Text, using(Python3Lexer)),
106 (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword,
116 ),
107 using(BashLexer), Text)),
117 (
108 (r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)),
118 r"(?s)(\s*)(%%python)([^\n]*\n)(.*)",
109 (r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
119 bygroups(Text, Operator, Text, using(Python3Lexer)),
110 (r'(!)(?!=)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
120 ),
111 (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)', bygroups(Text, Operator, Text)),
121 (
112 (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$', bygroups(Text, Operator, Text)),
122 r"(?s)(\s*)(%%python3)([^\n]*\n)(.*)",
113 ]
123 bygroups(Text, Operator, Text, using(Python3Lexer)),
114
124 ),
115 tokens = PyLexer.tokens.copy()
125 (
116 tokens['root'] = ipython_tokens + tokens['root']
126 r"(?s)(\s*)(%%ruby)([^\n]*\n)(.*)",
117
127 bygroups(Text, Operator, Text, using(RubyLexer)),
118 attrs = {'name': name, 'aliases': aliases, 'filenames': [],
128 ),
119 '__doc__': doc, 'tokens': tokens}
129 (
120
130 r"(?s)(\s*)(%%time)([^\n]*\n)(.*)",
121 return type(name, (PyLexer,), attrs)
131 bygroups(Text, Operator, Text, using(Python3Lexer)),
122
132 ),
123
133 (
124 IPython3Lexer = build_ipy_lexer(python3=True)
134 r"(?s)(\s*)(%%timeit)([^\n]*\n)(.*)",
125 IPythonLexer = build_ipy_lexer(python3=False)
135 bygroups(Text, Operator, Text, using(Python3Lexer)),
136 ),
137 (
138 r"(?s)(\s*)(%%writefile)([^\n]*\n)(.*)",
139 bygroups(Text, Operator, Text, using(Python3Lexer)),
140 ),
141 (
142 r"(?s)(\s*)(%%file)([^\n]*\n)(.*)",
143 bygroups(Text, Operator, Text, using(Python3Lexer)),
144 ),
145 (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)),
146 (
147 r"(?s)(^\s*)(%%!)([^\n]*\n)(.*)",
148 bygroups(Text, Operator, Text, using(BashLexer)),
149 ),
150 (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)),
151 (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)),
152 (
153 r"(%)(sx|sc|system)(.*)(\n)",
154 bygroups(Operator, Keyword, using(BashLexer), Text),
155 ),
156 (r"(%)(\w+)(.*\n)", bygroups(Operator, Keyword, Text)),
157 (r"^(!!)(.+)(\n)", bygroups(Operator, using(BashLexer), Text)),
158 (r"(!)(?!=)(.+)(\n)", bygroups(Operator, using(BashLexer), Text)),
159 (r"^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)", bygroups(Text, Operator, Text)),
160 (r"(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$", bygroups(Text, Operator, Text)),
161 inherit,
162 ]
163 }
126
164
127
165
128 class IPythonPartialTracebackLexer(RegexLexer):
166 class IPythonPartialTracebackLexer(RegexLexer):
@@ -184,9 +222,9 b' class IPythonTracebackLexer(DelegatingLexer):'
184 this is the line which lists the File and line number.
222 this is the line which lists the File and line number.
185
223
186 """
224 """
187 # The lexer inherits from DelegatingLexer. The "root" lexer is an
225
188 # appropriate IPython lexer, which depends on the value of the boolean
226 # The lexer inherits from DelegatingLexer. The "root" lexer is the
189 # `python3`. First, we parse with the partial IPython traceback lexer.
227 # IPython3 lexer. First, we parse with the partial IPython traceback lexer.
190 # Then, any code marked with the "Other" token is delegated to the root
228 # Then, any code marked with the "Other" token is delegated to the root
191 # lexer.
229 # lexer.
192 #
230 #
@@ -201,19 +239,9 b' class IPythonTracebackLexer(DelegatingLexer):'
201 # note we need a __init__ doc, as otherwise it inherits the doc from the super class
239 # note we need a __init__ doc, as otherwise it inherits the doc from the super class
202 # which will fail the documentation build as it references section of the pygments docs that
240 # which will fail the documentation build as it references section of the pygments docs that
203 # do not exists when building IPython's docs.
241 # do not exists when building IPython's docs.
204 self.python3 = get_bool_opt(options, 'python3', False)
205 if self.python3:
206 self.aliases = ['ipython3tb']
207 else:
208 self.aliases = ['ipython2tb', 'ipythontb']
209
242
210 if self.python3:
243 super().__init__(IPython3Lexer, IPythonPartialTracebackLexer, **options)
211 IPyLexer = IPython3Lexer
212 else:
213 IPyLexer = IPythonLexer
214
244
215 DelegatingLexer.__init__(self, IPyLexer,
216 IPythonPartialTracebackLexer, **options)
217
245
218 class IPythonConsoleLexer(Lexer):
246 class IPythonConsoleLexer(Lexer):
219 """
247 """
@@ -255,8 +283,8 b' class IPythonConsoleLexer(Lexer):'
255 # continuation = ' .D.: '
283 # continuation = ' .D.: '
256 # template = 'Out[#]: '
284 # template = 'Out[#]: '
257 #
285 #
258 # Where '#' is the 'prompt number' or 'execution count' and 'D'
286 # Where '#' is the 'prompt number' or 'execution count' and 'D'
259 # D is a number of dots matching the width of the execution count
287 # D is a number of dots matching the width of the execution count
260 #
288 #
261 in1_regex = r'In \[[0-9]+\]: '
289 in1_regex = r'In \[[0-9]+\]: '
262 in2_regex = r' \.\.+\.: '
290 in2_regex = r' \.\.+\.: '
@@ -270,9 +298,6 b' class IPythonConsoleLexer(Lexer):'
270
298
271 Parameters
299 Parameters
272 ----------
300 ----------
273 python3 : bool
274 If `True`, then the console inputs are parsed using a Python 3
275 lexer. Otherwise, they are parsed using a Python 2 lexer.
276 in1_regex : RegexObject
301 in1_regex : RegexObject
277 The compiled regular expression used to detect the start
302 The compiled regular expression used to detect the start
278 of inputs. Although the IPython configuration setting may have a
303 of inputs. Although the IPython configuration setting may have a
@@ -288,11 +313,7 b' class IPythonConsoleLexer(Lexer):'
288 then the default output prompt is assumed.
313 then the default output prompt is assumed.
289
314
290 """
315 """
291 self.python3 = get_bool_opt(options, 'python3', False)
316 self.aliases = ["ipython3console"]
292 if self.python3:
293 self.aliases = ['ipython3console']
294 else:
295 self.aliases = ['ipython2console', 'ipythonconsole']
296
317
297 in1_regex = options.get('in1_regex', self.in1_regex)
318 in1_regex = options.get('in1_regex', self.in1_regex)
298 in2_regex = options.get('in2_regex', self.in2_regex)
319 in2_regex = options.get('in2_regex', self.in2_regex)
@@ -318,15 +339,8 b' class IPythonConsoleLexer(Lexer):'
318
339
319 Lexer.__init__(self, **options)
340 Lexer.__init__(self, **options)
320
341
321 if self.python3:
342 self.pylexer = IPython3Lexer(**options)
322 pylexer = IPython3Lexer
343 self.tblexer = IPythonTracebackLexer(**options)
323 tblexer = IPythonTracebackLexer
324 else:
325 pylexer = IPythonLexer
326 tblexer = IPythonTracebackLexer
327
328 self.pylexer = pylexer(**options)
329 self.tblexer = tblexer(**options)
330
344
331 self.reset()
345 self.reset()
332
346
@@ -512,20 +526,16 b' class IPyLexer(Lexer):'
512 def __init__(self, **options):
526 def __init__(self, **options):
513 """
527 """
514 Create a new IPyLexer instance which dispatch to either an
528 Create a new IPyLexer instance which dispatch to either an
515 IPythonCOnsoleLexer (if In prompts are present) or and IPythonLexer (if
529 IPythonConsoleLexer (if In prompts are present) or and IPython3Lexer (if
516 In prompts are not present).
530 In prompts are not present).
517 """
531 """
518 # init docstring is necessary for docs not to fail to build do to parent
532 # init docstring is necessary for docs not to fail to build do to parent
519 # docs referenceing a section in pygments docs.
533 # docs referenceing a section in pygments docs.
520 self.python3 = get_bool_opt(options, 'python3', False)
534 self.aliases = ["ipy3"]
521 if self.python3:
522 self.aliases = ['ipy3']
523 else:
524 self.aliases = ['ipy2', 'ipy']
525
535
526 Lexer.__init__(self, **options)
536 Lexer.__init__(self, **options)
527
537
528 self.IPythonLexer = IPythonLexer(**options)
538 self.IPythonLexer = IPython3Lexer(**options)
529 self.IPythonConsoleLexer = IPythonConsoleLexer(**options)
539 self.IPythonConsoleLexer = IPythonConsoleLexer(**options)
530
540
531 def get_tokens_unprocessed(self, text):
541 def get_tokens_unprocessed(self, text):
@@ -537,4 +547,3 b' class IPyLexer(Lexer):'
537 lex = self.IPythonLexer
547 lex = self.IPythonLexer
538 for token in lex.get_tokens_unprocessed(text):
548 for token in lex.get_tokens_unprocessed(text):
539 yield token
549 yield token
540
@@ -16,10 +16,10 b' pyg214 = tuple(int(x) for x in pygments_version.split(".")[:2]) >= (2, 14)'
16 class TestLexers(TestCase):
16 class TestLexers(TestCase):
17 """Collection of lexers tests"""
17 """Collection of lexers tests"""
18 def setUp(self):
18 def setUp(self):
19 self.lexer = lexers.IPythonLexer()
19 self.lexer = lexers.IPython3Lexer()
20 self.bash_lexer = BashLexer()
20 self.bash_lexer = BashLexer()
21
21
22 def testIPythonLexer(self):
22 def testIPython3Lexer(self):
23 fragment = '!echo $HOME\n'
23 fragment = '!echo $HOME\n'
24 bash_tokens = [
24 bash_tokens = [
25 (Token.Operator, '!'),
25 (Token.Operator, '!'),
@@ -4,12 +4,10 b' import pytest'
4 import pygments.lexers
4 import pygments.lexers
5 import pygments.lexer
5 import pygments.lexer
6
6
7 from IPython.lib.lexers import IPythonConsoleLexer, IPythonLexer, IPython3Lexer
7 from IPython.lib.lexers import IPythonConsoleLexer, IPython3Lexer
8
8
9 #: the human-readable names of the IPython lexers with ``entry_points``
9 #: the human-readable names of the IPython lexers with ``entry_points``
10 EXPECTED_LEXER_NAMES = [
10 EXPECTED_LEXER_NAMES = [cls.name for cls in [IPythonConsoleLexer, IPython3Lexer]]
11 cls.name for cls in [IPythonConsoleLexer, IPythonLexer, IPython3Lexer]
12 ]
13
11
14
12
15 @pytest.fixture
13 @pytest.fixture
@@ -20,9 +20,5 b' def setup(app):'
20 # Alternatively, we could register the lexer with pygments instead. This would
20 # Alternatively, we could register the lexer with pygments instead. This would
21 # require using setuptools entrypoints: http://pygments.org/docs/plugins
21 # require using setuptools entrypoints: http://pygments.org/docs/plugins
22
22
23 ipy2 = IPyLexer(python3=False)
23 highlighting.lexers["ipython"] = IPyLexer()
24 ipy3 = IPyLexer(python3=True)
24 highlighting.lexers["ipython3"] = IPyLexer()
25
26 highlighting.lexers['ipython'] = ipy2
27 highlighting.lexers['ipython2'] = ipy2
28 highlighting.lexers['ipython3'] = ipy3
@@ -9,22 +9,20 b' The IPython console lexer has been rewritten and now supports tracebacks'
9 and customized input/output prompts. An entire suite of lexers is now
9 and customized input/output prompts. An entire suite of lexers is now
10 available at :mod:`IPython.lib.lexers`. These include:
10 available at :mod:`IPython.lib.lexers`. These include:
11
11
12 IPythonLexer & IPython3Lexer
12 IPython3Lexer
13 Lexers for pure IPython (python + magic/shell commands)
13 Lexer for pure IPython (python 3 + magic/shell commands)
14
14
15 IPythonPartialTracebackLexer & IPythonTracebackLexer
15 IPythonPartialTracebackLexer & IPythonTracebackLexer
16 Supports 2.x and 3.x via the keyword `python3`. The partial traceback
16 The partial traceback lexer reads everything but the Python code
17 lexer reads everything but the Python code appearing in a traceback.
17 appearing in a traceback. The full lexer combines the partial lexer
18 The full lexer combines the partial lexer with an IPython lexer.
18 with the IPython3Lexer.
19
19
20 IPythonConsoleLexer
20 IPythonConsoleLexer
21 A lexer for IPython console sessions, with support for tracebacks.
21 A lexer for python 3 IPython console sessions, with support for tracebacks.
22 Supports 2.x and 3.x via the keyword `python3`.
23
22
24 IPyLexer
23 IPyLexer
25 A friendly lexer which examines the first line of text and from it,
24 A friendly lexer which examines the first line of text and from it,
26 decides whether to use an IPython lexer or an IPython console lexer.
25 decides whether to use an IPython lexer or an IPython console lexer.
27 Supports 2.x and 3.x via the keyword `python3`.
28
26
29 Previously, the :class:`IPythonConsoleLexer` class was available at
27 Previously, the :class:`IPythonConsoleLexer` class was available at
30 :mod:`IPython.sphinxext.ipython_console_hightlight`. It was inserted
28 :mod:`IPython.sphinxext.ipython_console_hightlight`. It was inserted
@@ -145,7 +145,6 b' setup_args["entry_points"] = {'
145 "console_scripts": find_entry_points(),
145 "console_scripts": find_entry_points(),
146 "pygments.lexers": [
146 "pygments.lexers": [
147 "ipythonconsole = IPython.lib.lexers:IPythonConsoleLexer",
147 "ipythonconsole = IPython.lib.lexers:IPythonConsoleLexer",
148 "ipython = IPython.lib.lexers:IPythonLexer",
149 "ipython3 = IPython.lib.lexers:IPython3Lexer",
148 "ipython3 = IPython.lib.lexers:IPython3Lexer",
150 ],
149 ],
151 }
150 }
General Comments 0
You need to be logged in to leave comments. Login now