|
@@
-4,13
+4,13
b' Defines a variety of Pygments lexers for highlighting IPython code.'
|
|
4
|
|
|
4
|
|
|
5
|
This includes:
|
|
5
|
This includes:
|
|
6
|
|
|
6
|
|
|
7
|
IPython3Lexer
|
|
7
|
IPythonLexer, IPython3Lexer
|
|
8
|
Lexer for pure IPython (python + magic/shell commands)
|
|
8
|
Lexers for pure IPython (python + magic/shell commands)
|
|
9
|
|
|
9
|
|
|
10
|
IPythonPartialTracebackLexer, IPythonTracebackLexer
|
|
10
|
IPythonPartialTracebackLexer, IPythonTracebackLexer
|
|
11
|
The partial traceback lexer reads everything but the Python code
|
|
11
|
Supports 2.x and 3.x via keyword `python3`. The partial traceback
|
|
12
|
appearing in a traceback.
|
|
12
|
lexer reads everything but the Python code appearing in a traceback.
|
|
13
|
The full lexer combines the partial lexer with the IPython3Lexer.
|
|
13
|
The full lexer combines the partial lexer with an IPython lexer.
|
|
14
|
|
|
14
|
|
|
15
|
IPythonConsoleLexer
|
|
15
|
IPythonConsoleLexer
|
|
16
|
A lexer for IPython console sessions, with support for tracebacks.
|
|
16
|
A lexer for IPython console sessions, with support for tracebacks.
|
|
@@
-35,22
+35,10
b' import re'
|
|
35
|
|
|
35
|
|
|
36
|
# Third party
|
|
36
|
# Third party
|
|
37
|
from pygments.lexers import (
|
|
37
|
from pygments.lexers import (
|
|
38
|
BashLexer,
|
|
38
|
BashLexer, HtmlLexer, JavascriptLexer, RubyLexer, PerlLexer, PythonLexer,
|
|
39
|
HtmlLexer,
|
|
39
|
Python3Lexer, TexLexer)
|
|
40
|
JavascriptLexer,
|
|
|
|
|
41
|
RubyLexer,
|
|
|
|
|
42
|
PerlLexer,
|
|
|
|
|
43
|
Python3Lexer,
|
|
|
|
|
44
|
TexLexer,
|
|
|
|
|
45
|
)
|
|
|
|
|
46
|
from pygments.lexer import (
|
|
40
|
from pygments.lexer import (
|
|
47
|
Lexer,
|
|
41
|
Lexer, DelegatingLexer, RegexLexer, do_insertions, bygroups, using,
|
|
48
|
DelegatingLexer,
|
|
|
|
|
49
|
RegexLexer,
|
|
|
|
|
50
|
do_insertions,
|
|
|
|
|
51
|
bygroups,
|
|
|
|
|
52
|
using,
|
|
|
|
|
53
|
inherit,
|
|
|
|
|
54
|
)
|
|
42
|
)
|
|
55
|
from pygments.token import (
|
|
43
|
from pygments.token import (
|
|
56
|
Generic, Keyword, Literal, Name, Operator, Other, Text, Error,
|
|
44
|
Generic, Keyword, Literal, Name, Operator, Other, Text, Error,
|
|
@@
-61,106
+49,80
b' from pygments.util import get_bool_opt'
|
|
61
|
|
|
49
|
|
|
62
|
line_re = re.compile('.*?\n')
|
|
50
|
line_re = re.compile('.*?\n')
|
|
63
|
|
|
51
|
|
|
64
|
__all__ = [
|
|
52
|
__all__ = ['build_ipy_lexer', 'IPython3Lexer', 'IPythonLexer',
|
|
65
|
"IPython3Lexer",
|
|
53
|
'IPythonPartialTracebackLexer', 'IPythonTracebackLexer',
|
|
66
|
"IPythonPartialTracebackLexer",
|
|
54
|
'IPythonConsoleLexer', 'IPyLexer']
|
|
67
|
"IPythonTracebackLexer",
|
|
|
|
|
68
|
"IPythonConsoleLexer",
|
|
|
|
|
69
|
"IPyLexer",
|
|
|
|
|
70
|
]
|
|
|
|
|
71
|
|
|
55
|
|
|
72
|
|
|
56
|
|
|
73
|
class IPython3Lexer(Python3Lexer):
|
|
57
|
def build_ipy_lexer(python3):
|
|
74
|
"""IPython3 Lexer"""
|
|
58
|
"""Builds IPython lexers depending on the value of `python3`.
|
|
75
|
|
|
59
|
|
|
76
|
name = "IPython3"
|
|
60
|
The lexer inherits from an appropriate Python lexer and then adds
|
|
77
|
aliases = ["ipython3"]
|
|
61
|
information about IPython specific keywords (i.e. magic commands,
|
|
|
|
|
62
|
shell commands, etc.)
|
|
78
|
|
|
63
|
|
|
79
|
tokens = {
|
|
64
|
Parameters
|
|
80
|
"root": [
|
|
65
|
----------
|
|
81
|
(
|
|
66
|
python3 : bool
|
|
82
|
r"(?s)(\s*)(%%capture)([^\n]*\n)(.*)",
|
|
67
|
If `True`, then build an IPython lexer from a Python 3 lexer.
|
|
83
|
bygroups(Text, Operator, Text, using(Python3Lexer)),
|
|
68
|
|
|
84
|
),
|
|
69
|
"""
|
|
85
|
(
|
|
70
|
# It would be nice to have a single IPython lexer class which takes
|
|
86
|
r"(?s)(\s*)(%%debug)([^\n]*\n)(.*)",
|
|
71
|
# a boolean `python3`. But since there are two Python lexer classes,
|
|
87
|
bygroups(Text, Operator, Text, using(Python3Lexer)),
|
|
72
|
# we will also have two IPython lexer classes.
|
|
88
|
),
|
|
73
|
if python3:
|
|
89
|
(
|
|
74
|
PyLexer = Python3Lexer
|
|
90
|
r"(?is)(\s*)(%%html)([^\n]*\n)(.*)",
|
|
75
|
name = 'IPython3'
|
|
91
|
bygroups(Text, Operator, Text, using(HtmlLexer)),
|
|
76
|
aliases = ['ipython3']
|
|
92
|
),
|
|
77
|
doc = """IPython3 Lexer"""
|
|
93
|
(
|
|
78
|
else:
|
|
94
|
r"(?s)(\s*)(%%javascript)([^\n]*\n)(.*)",
|
|
79
|
PyLexer = PythonLexer
|
|
95
|
bygroups(Text, Operator, Text, using(JavascriptLexer)),
|
|
80
|
name = 'IPython'
|
|
96
|
),
|
|
81
|
aliases = ['ipython2', 'ipython']
|
|
97
|
(
|
|
82
|
doc = """IPython Lexer"""
|
|
98
|
r"(?s)(\s*)(%%js)([^\n]*\n)(.*)",
|
|
83
|
|
|
99
|
bygroups(Text, Operator, Text, using(JavascriptLexer)),
|
|
84
|
ipython_tokens = [
|
|
100
|
),
|
|
85
|
(r'(?s)(\s*)(%%capture)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
|
|
101
|
(
|
|
86
|
(r'(?s)(\s*)(%%debug)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
|
|
102
|
r"(?s)(\s*)(%%latex)([^\n]*\n)(.*)",
|
|
87
|
(r'(?is)(\s*)(%%html)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(HtmlLexer))),
|
|
103
|
bygroups(Text, Operator, Text, using(TexLexer)),
|
|
88
|
(r'(?s)(\s*)(%%javascript)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))),
|
|
104
|
),
|
|
89
|
(r'(?s)(\s*)(%%js)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))),
|
|
105
|
(
|
|
90
|
(r'(?s)(\s*)(%%latex)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(TexLexer))),
|
|
106
|
r"(?s)(\s*)(%%perl)([^\n]*\n)(.*)",
|
|
91
|
(r'(?s)(\s*)(%%perl)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PerlLexer))),
|
|
107
|
bygroups(Text, Operator, Text, using(PerlLexer)),
|
|
92
|
(r'(?s)(\s*)(%%prun)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
|
|
108
|
),
|
|
93
|
(r'(?s)(\s*)(%%pypy)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
|
|
109
|
(
|
|
94
|
(r'(?s)(\s*)(%%python)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
|
|
110
|
r"(?s)(\s*)(%%prun)([^\n]*\n)(.*)",
|
|
95
|
(r'(?s)(\s*)(%%python2)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PythonLexer))),
|
|
111
|
bygroups(Text, Operator, Text, using(Python3Lexer)),
|
|
96
|
(r'(?s)(\s*)(%%python3)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
|
|
112
|
),
|
|
97
|
(r'(?s)(\s*)(%%ruby)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(RubyLexer))),
|
|
113
|
(
|
|
98
|
(r'(?s)(\s*)(%%time)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
|
|
114
|
r"(?s)(\s*)(%%pypy)([^\n]*\n)(.*)",
|
|
99
|
(r'(?s)(\s*)(%%timeit)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
|
|
115
|
bygroups(Text, Operator, Text, using(Python3Lexer)),
|
|
100
|
(r'(?s)(\s*)(%%writefile)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
|
|
116
|
),
|
|
101
|
(r'(?s)(\s*)(%%file)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
|
|
117
|
(
|
|
|
|
|
118
|
r"(?s)(\s*)(%%python)([^\n]*\n)(.*)",
|
|
|
|
|
119
|
bygroups(Text, Operator, Text, using(Python3Lexer)),
|
|
|
|
|
120
|
),
|
|
|
|
|
121
|
(
|
|
|
|
|
122
|
r"(?s)(\s*)(%%python3)([^\n]*\n)(.*)",
|
|
|
|
|
123
|
bygroups(Text, Operator, Text, using(Python3Lexer)),
|
|
|
|
|
124
|
),
|
|
|
|
|
125
|
(
|
|
|
|
|
126
|
r"(?s)(\s*)(%%ruby)([^\n]*\n)(.*)",
|
|
|
|
|
127
|
bygroups(Text, Operator, Text, using(RubyLexer)),
|
|
|
|
|
128
|
),
|
|
|
|
|
129
|
(
|
|
|
|
|
130
|
r"(?s)(\s*)(%%time)([^\n]*\n)(.*)",
|
|
|
|
|
131
|
bygroups(Text, Operator, Text, using(Python3Lexer)),
|
|
|
|
|
132
|
),
|
|
|
|
|
133
|
(
|
|
|
|
|
134
|
r"(?s)(\s*)(%%timeit)([^\n]*\n)(.*)",
|
|
|
|
|
135
|
bygroups(Text, Operator, Text, using(Python3Lexer)),
|
|
|
|
|
136
|
),
|
|
|
|
|
137
|
(
|
|
|
|
|
138
|
r"(?s)(\s*)(%%writefile)([^\n]*\n)(.*)",
|
|
|
|
|
139
|
bygroups(Text, Operator, Text, using(Python3Lexer)),
|
|
|
|
|
140
|
),
|
|
|
|
|
141
|
(
|
|
|
|
|
142
|
r"(?s)(\s*)(%%file)([^\n]*\n)(.*)",
|
|
|
|
|
143
|
bygroups(Text, Operator, Text, using(Python3Lexer)),
|
|
|
|
|
144
|
),
|
|
|
|
|
145
|
(r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)),
|
|
102
|
(r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)),
|
|
146
|
(
|
|
103
|
(r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))),
|
|
147
|
r"(?s)(^\s*)(%%!)([^\n]*\n)(.*)",
|
|
|
|
|
148
|
bygroups(Text, Operator, Text, using(BashLexer)),
|
|
|
|
|
149
|
),
|
|
|
|
|
150
|
(r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)),
|
|
104
|
(r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)),
|
|
151
|
(r"\b(\?\??)(\s*)$", bygroups(Operator, Text)),
|
|
105
|
(r"\b(\?\??)(\s*)$", bygroups(Operator, Text)),
|
|
152
|
(
|
|
106
|
(r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword,
|
|
153
|
r"(%)(sx|sc|system)(.*)(\n)",
|
|
107
|
using(BashLexer), Text)),
|
|
154
|
bygroups(Operator, Keyword, using(BashLexer), Text),
|
|
108
|
(r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)),
|
|
155
|
),
|
|
109
|
(r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
|
|
156
|
(r"(%)(\w+)(.*\n)", bygroups(Operator, Keyword, Text)),
|
|
110
|
(r'(!)(?!=)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
|
|
157
|
(r"^(!!)(.+)(\n)", bygroups(Operator, using(BashLexer), Text)),
|
|
111
|
(r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)', bygroups(Text, Operator, Text)),
|
|
158
|
(r"(!)(?!=)(.+)(\n)", bygroups(Operator, using(BashLexer), Text)),
|
|
112
|
(r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$', bygroups(Text, Operator, Text)),
|
|
159
|
(r"^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)", bygroups(Text, Operator, Text)),
|
|
|
|
|
160
|
(r"(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$", bygroups(Text, Operator, Text)),
|
|
|
|
|
161
|
inherit,
|
|
|
|
|
162
|
]
|
|
113
|
]
|
|
163
|
}
|
|
114
|
|
|
|
|
|
115
|
tokens = PyLexer.tokens.copy()
|
|
|
|
|
116
|
tokens['root'] = ipython_tokens + tokens['root']
|
|
|
|
|
117
|
|
|
|
|
|
118
|
attrs = {'name': name, 'aliases': aliases, 'filenames': [],
|
|
|
|
|
119
|
'__doc__': doc, 'tokens': tokens}
|
|
|
|
|
120
|
|
|
|
|
|
121
|
return type(name, (PyLexer,), attrs)
|
|
|
|
|
122
|
|
|
|
|
|
123
|
|
|
|
|
|
124
|
IPython3Lexer = build_ipy_lexer(python3=True)
|
|
|
|
|
125
|
IPythonLexer = build_ipy_lexer(python3=False)
|
|
164
|
|
|
126
|
|
|
165
|
|
|
127
|
|
|
166
|
class IPythonPartialTracebackLexer(RegexLexer):
|
|
128
|
class IPythonPartialTracebackLexer(RegexLexer):
|
|
@@
-222,9
+184,9
b' class IPythonTracebackLexer(DelegatingLexer):'
|
|
222
|
this is the line which lists the File and line number.
|
|
184
|
this is the line which lists the File and line number.
|
|
223
|
|
|
185
|
|
|
224
|
"""
|
|
186
|
"""
|
|
225
|
|
|
187
|
# The lexer inherits from DelegatingLexer. The "root" lexer is an
|
|
226
|
# The lexer inherits from DelegatingLexer. The "root" lexer is the
|
|
188
|
# appropriate IPython lexer, which depends on the value of the boolean
|
|
227
|
# IPython3 lexer. First, we parse with the partial IPython traceback lexer.
|
|
189
|
# `python3`. First, we parse with the partial IPython traceback lexer.
|
|
228
|
# Then, any code marked with the "Other" token is delegated to the root
|
|
190
|
# Then, any code marked with the "Other" token is delegated to the root
|
|
229
|
# lexer.
|
|
191
|
# lexer.
|
|
230
|
#
|
|
192
|
#
|
|
@@
-239,9
+201,19
b' class IPythonTracebackLexer(DelegatingLexer):'
|
|
239
|
# note we need a __init__ doc, as otherwise it inherits the doc from the super class
|
|
201
|
# note we need a __init__ doc, as otherwise it inherits the doc from the super class
|
|
240
|
# which will fail the documentation build as it references section of the pygments docs that
|
|
202
|
# which will fail the documentation build as it references section of the pygments docs that
|
|
241
|
# do not exists when building IPython's docs.
|
|
203
|
# do not exists when building IPython's docs.
|
|
|
|
|
204
|
self.python3 = get_bool_opt(options, 'python3', False)
|
|
|
|
|
205
|
if self.python3:
|
|
|
|
|
206
|
self.aliases = ['ipython3tb']
|
|
|
|
|
207
|
else:
|
|
|
|
|
208
|
self.aliases = ['ipython2tb', 'ipythontb']
|
|
242
|
|
|
209
|
|
|
243
|
super().__init__(IPython3Lexer, IPythonPartialTracebackLexer, **options)
|
|
210
|
if self.python3:
|
|
|
|
|
211
|
IPyLexer = IPython3Lexer
|
|
|
|
|
212
|
else:
|
|
|
|
|
213
|
IPyLexer = IPythonLexer
|
|
244
|
|
|
214
|
|
|
|
|
|
215
|
DelegatingLexer.__init__(self, IPyLexer,
|
|
|
|
|
216
|
IPythonPartialTracebackLexer, **options)
|
|
245
|
|
|
217
|
|
|
246
|
class IPythonConsoleLexer(Lexer):
|
|
218
|
class IPythonConsoleLexer(Lexer):
|
|
247
|
"""
|
|
219
|
"""
|
|
@@
-298,6
+270,9
b' class IPythonConsoleLexer(Lexer):'
|
|
298
|
|
|
270
|
|
|
299
|
Parameters
|
|
271
|
Parameters
|
|
300
|
----------
|
|
272
|
----------
|
|
|
|
|
273
|
python3 : bool
|
|
|
|
|
274
|
If `True`, then the console inputs are parsed using a Python 3
|
|
|
|
|
275
|
lexer. Otherwise, they are parsed using a Python 2 lexer.
|
|
301
|
in1_regex : RegexObject
|
|
276
|
in1_regex : RegexObject
|
|
302
|
The compiled regular expression used to detect the start
|
|
277
|
The compiled regular expression used to detect the start
|
|
303
|
of inputs. Although the IPython configuration setting may have a
|
|
278
|
of inputs. Although the IPython configuration setting may have a
|
|
@@
-313,7
+288,11
b' class IPythonConsoleLexer(Lexer):'
|
|
313
|
then the default output prompt is assumed.
|
|
288
|
then the default output prompt is assumed.
|
|
314
|
|
|
289
|
|
|
315
|
"""
|
|
290
|
"""
|
|
316
|
self.aliases = ["ipython3console"]
|
|
291
|
self.python3 = get_bool_opt(options, 'python3', False)
|
|
|
|
|
292
|
if self.python3:
|
|
|
|
|
293
|
self.aliases = ['ipython3console']
|
|
|
|
|
294
|
else:
|
|
|
|
|
295
|
self.aliases = ['ipython2console', 'ipythonconsole']
|
|
317
|
|
|
296
|
|
|
318
|
in1_regex = options.get('in1_regex', self.in1_regex)
|
|
297
|
in1_regex = options.get('in1_regex', self.in1_regex)
|
|
319
|
in2_regex = options.get('in2_regex', self.in2_regex)
|
|
298
|
in2_regex = options.get('in2_regex', self.in2_regex)
|
|
@@
-339,8
+318,15
b' class IPythonConsoleLexer(Lexer):'
|
|
339
|
|
|
318
|
|
|
340
|
Lexer.__init__(self, **options)
|
|
319
|
Lexer.__init__(self, **options)
|
|
341
|
|
|
320
|
|
|
342
|
self.pylexer = IPython3Lexer(**options)
|
|
321
|
if self.python3:
|
|
343
|
self.tblexer = IPythonTracebackLexer(**options)
|
|
322
|
pylexer = IPython3Lexer
|
|
|
|
|
323
|
tblexer = IPythonTracebackLexer
|
|
|
|
|
324
|
else:
|
|
|
|
|
325
|
pylexer = IPythonLexer
|
|
|
|
|
326
|
tblexer = IPythonTracebackLexer
|
|
|
|
|
327
|
|
|
|
|
|
328
|
self.pylexer = pylexer(**options)
|
|
|
|
|
329
|
self.tblexer = tblexer(**options)
|
|
344
|
|
|
330
|
|
|
345
|
self.reset()
|
|
331
|
self.reset()
|
|
346
|
|
|
332
|
|
|
@@
-526,16
+512,20
b' class IPyLexer(Lexer):'
|
|
526
|
def __init__(self, **options):
|
|
512
|
def __init__(self, **options):
|
|
527
|
"""
|
|
513
|
"""
|
|
528
|
Create a new IPyLexer instance which dispatch to either an
|
|
514
|
Create a new IPyLexer instance which dispatch to either an
|
|
529
|
IPythonConsoleLexer (if In prompts are present) or and IPython3Lexer (if
|
|
515
|
IPythonCOnsoleLexer (if In prompts are present) or and IPythonLexer (if
|
|
530
|
In prompts are not present).
|
|
516
|
In prompts are not present).
|
|
531
|
"""
|
|
517
|
"""
|
|
532
|
# init docstring is necessary for docs not to fail to build do to parent
|
|
518
|
# init docstring is necessary for docs not to fail to build do to parent
|
|
533
|
# docs referenceing a section in pygments docs.
|
|
519
|
# docs referenceing a section in pygments docs.
|
|
534
|
self.aliases = ["ipy3"]
|
|
520
|
self.python3 = get_bool_opt(options, 'python3', False)
|
|
|
|
|
521
|
if self.python3:
|
|
|
|
|
522
|
self.aliases = ['ipy3']
|
|
|
|
|
523
|
else:
|
|
|
|
|
524
|
self.aliases = ['ipy2', 'ipy']
|
|
535
|
|
|
525
|
|
|
536
|
Lexer.__init__(self, **options)
|
|
526
|
Lexer.__init__(self, **options)
|
|
537
|
|
|
527
|
|
|
538
|
self.IPythonLexer = IPython3Lexer(**options)
|
|
528
|
self.IPythonLexer = IPythonLexer(**options)
|
|
539
|
self.IPythonConsoleLexer = IPythonConsoleLexer(**options)
|
|
529
|
self.IPythonConsoleLexer = IPythonConsoleLexer(**options)
|
|
540
|
|
|
530
|
|
|
541
|
def get_tokens_unprocessed(self, text):
|
|
531
|
def get_tokens_unprocessed(self, text):
|
|
@@
-547,3
+537,4
b' class IPyLexer(Lexer):'
|
|
547
|
lex = self.IPythonLexer
|
|
537
|
lex = self.IPythonLexer
|
|
548
|
for token in lex.get_tokens_unprocessed(text):
|
|
538
|
for token in lex.get_tokens_unprocessed(text):
|
|
549
|
yield token
|
|
539
|
yield token
|
|
|
|
|
540
|
|