##// END OF EJS Templates
A few style fixes:...
Matthias Bussonnier -
Show More
@@ -1,432 +1,432 b''
1 1 """Tests for the object inspection functionality.
2 2 """
3 3
4 4 # Copyright (c) IPython Development Team.
5 5 # Distributed under the terms of the Modified BSD License.
6 6
7 7 from __future__ import print_function
8 8
9 9 import os
10 10 import re
11 11 import sys
12 12
13 13 import nose.tools as nt
14 14
15 15 from .. import oinspect
16 16 from IPython.core.magic import (Magics, magics_class, line_magic,
17 17 cell_magic, line_cell_magic,
18 18 register_line_magic, register_cell_magic,
19 19 register_line_cell_magic)
20 20 from decorator import decorator
21 21 from IPython.testing.decorators import skipif
22 22 from IPython.testing.tools import AssertPrints
23 23 from IPython.utils.path import compress_user
24 24 from IPython.utils import py3compat
25 25 from IPython.utils.signatures import Signature, Parameter
26 26
27 27
28 28 #-----------------------------------------------------------------------------
29 29 # Globals and constants
30 30 #-----------------------------------------------------------------------------
31 31
32 32 inspector = oinspect.Inspector()
33 33 ip = get_ipython()
34 34
35 35 #-----------------------------------------------------------------------------
36 36 # Local utilities
37 37 #-----------------------------------------------------------------------------
38 38
39 39 # WARNING: since this test checks the line number where a function is
40 40 # defined, if any code is inserted above, the following line will need to be
41 41 # updated. Do NOT insert any whitespace between the next line and the function
42 42 # definition below.
43 43 THIS_LINE_NUMBER = 43 # Put here the actual number of this line
44 44 def test_find_source_lines():
45 45 nt.assert_equal(oinspect.find_source_lines(test_find_source_lines),
46 46 THIS_LINE_NUMBER+1)
47 47
48 48
49 49 # A couple of utilities to ensure these tests work the same from a source or a
50 50 # binary install
51 51 def pyfile(fname):
52 52 return os.path.normcase(re.sub('.py[co]$', '.py', fname))
53 53
54 54
55 55 def match_pyfiles(f1, f2):
56 56 nt.assert_equal(pyfile(f1), pyfile(f2))
57 57
58 58
59 59 def test_find_file():
60 60 match_pyfiles(oinspect.find_file(test_find_file), os.path.abspath(__file__))
61 61
62 62
63 63 def test_find_file_decorated1():
64 64
65 65 @decorator
66 66 def noop1(f):
67 67 def wrapper():
68 68 return f(*a, **kw)
69 69 return wrapper
70 70
71 71 @noop1
72 72 def f(x):
73 73 "My docstring"
74 74
75 75 match_pyfiles(oinspect.find_file(f), os.path.abspath(__file__))
76 76 nt.assert_equal(f.__doc__, "My docstring")
77 77
78 78
79 79 def test_find_file_decorated2():
80 80
81 81 @decorator
82 82 def noop2(f, *a, **kw):
83 83 return f(*a, **kw)
84 84
85 85 @noop2
86 86 @noop2
87 87 @noop2
88 88 def f(x):
89 89 "My docstring 2"
90 90
91 91 match_pyfiles(oinspect.find_file(f), os.path.abspath(__file__))
92 92 nt.assert_equal(f.__doc__, "My docstring 2")
93 93
94 94
95 95 def test_find_file_magic():
96 96 run = ip.find_line_magic('run')
97 97 nt.assert_not_equal(oinspect.find_file(run), None)
98 98
99 99
100 100 # A few generic objects we can then inspect in the tests below
101 101
102 102 class Call(object):
103 103 """This is the class docstring."""
104 104
105 105 def __init__(self, x, y=1):
106 106 """This is the constructor docstring."""
107 107
108 108 def __call__(self, *a, **kw):
109 109 """This is the call docstring."""
110 110
111 111 def method(self, x, z=2):
112 112 """Some method's docstring"""
113 113
114 114 class HasSignature(object):
115 115 """This is the class docstring."""
116 116 __signature__ = Signature([Parameter('test', Parameter.POSITIONAL_OR_KEYWORD)])
117 117
118 118 def __init__(self, *args):
119 119 """This is the init docstring"""
120 120
121 121
122 122 class SimpleClass(object):
123 123 def method(self, x, z=2):
124 124 """Some method's docstring"""
125 125
126 126
127 127 class OldStyle:
128 128 """An old-style class for testing."""
129 129 pass
130 130
131 131
132 132 def f(x, y=2, *a, **kw):
133 133 """A simple function."""
134 134
135 135
136 136 def g(y, z=3, *a, **kw):
137 137 pass # no docstring
138 138
139 139
140 140 @register_line_magic
141 141 def lmagic(line):
142 142 "A line magic"
143 143
144 144
145 145 @register_cell_magic
146 146 def cmagic(line, cell):
147 147 "A cell magic"
148 148
149 149
150 150 @register_line_cell_magic
151 151 def lcmagic(line, cell=None):
152 152 "A line/cell magic"
153 153
154 154
155 155 @magics_class
156 156 class SimpleMagics(Magics):
157 157 @line_magic
158 158 def Clmagic(self, cline):
159 159 "A class-based line magic"
160 160
161 161 @cell_magic
162 162 def Ccmagic(self, cline, ccell):
163 163 "A class-based cell magic"
164 164
165 165 @line_cell_magic
166 166 def Clcmagic(self, cline, ccell=None):
167 167 "A class-based line/cell magic"
168 168
169 169
170 170 class Awkward(object):
171 171 def __getattr__(self, name):
172 172 raise Exception(name)
173 173
174 174 class NoBoolCall:
175 175 """
176 176 callable with `__bool__` raising should still be inspect-able.
177 177 """
178 178
179 179 def __call__(self):
180 180 """does nothing"""
181 181 pass
182 182
183 183 def __bool__(self):
184 184 """just raise NotImplemented"""
185 185 raise NotImplementedError('Must be implemented')
186 186
187 187
188 188 class SerialLiar(object):
189 189 """Attribute accesses always get another copy of the same class.
190 190
191 191 unittest.mock.call does something similar, but it's not ideal for testing
192 192 as the failure mode is to eat all your RAM. This gives up after 10k levels.
193 193 """
194 194 def __init__(self, max_fibbing_twig, lies_told=0):
195 195 if lies_told > 10000:
196 196 raise RuntimeError('Nose too long, honesty is the best policy')
197 197 self.max_fibbing_twig = max_fibbing_twig
198 198 self.lies_told = lies_told
199 199 max_fibbing_twig[0] = max(max_fibbing_twig[0], lies_told)
200 200
201 201 def __getattr__(self, item):
202 202 return SerialLiar(self.max_fibbing_twig, self.lies_told + 1)
203 203
204 204
205 205 def check_calltip(obj, name, call, docstring):
206 206 """Generic check pattern all calltip tests will use"""
207 207 info = inspector.info(obj, name)
208 208 call_line, ds = oinspect.call_tip(info)
209 209 nt.assert_equal(call_line, call)
210 210 nt.assert_equal(ds, docstring)
211 211
212 212 #-----------------------------------------------------------------------------
213 213 # Tests
214 214 #-----------------------------------------------------------------------------
215 215
216 216 def test_calltip_class():
217 217 check_calltip(Call, 'Call', 'Call(x, y=1)', Call.__init__.__doc__)
218 218
219 219
220 220 def test_calltip_instance():
221 221 c = Call(1)
222 222 check_calltip(c, 'c', 'c(*a, **kw)', c.__call__.__doc__)
223 223
224 224
225 225 def test_calltip_method():
226 226 c = Call(1)
227 227 check_calltip(c.method, 'c.method', 'c.method(x, z=2)', c.method.__doc__)
228 228
229 229
230 230 def test_calltip_function():
231 231 check_calltip(f, 'f', 'f(x, y=2, *a, **kw)', f.__doc__)
232 232
233 233
234 234 def test_calltip_function2():
235 235 check_calltip(g, 'g', 'g(y, z=3, *a, **kw)', '<no docstring>')
236 236
237 237
238 238 @skipif(sys.version_info >= (3, 5))
239 239 def test_calltip_builtin():
240 240 check_calltip(sum, 'sum', None, sum.__doc__)
241 241
242 242
243 243 def test_calltip_line_magic():
244 244 check_calltip(lmagic, 'lmagic', 'lmagic(line)', "A line magic")
245 245
246 246
247 247 def test_calltip_cell_magic():
248 248 check_calltip(cmagic, 'cmagic', 'cmagic(line, cell)', "A cell magic")
249 249
250 250
251 251 def test_calltip_line_cell_magic():
252 252 check_calltip(lcmagic, 'lcmagic', 'lcmagic(line, cell=None)',
253 253 "A line/cell magic")
254 254
255 255
256 256 def test_class_magics():
257 257 cm = SimpleMagics(ip)
258 258 ip.register_magics(cm)
259 259 check_calltip(cm.Clmagic, 'Clmagic', 'Clmagic(cline)',
260 260 "A class-based line magic")
261 261 check_calltip(cm.Ccmagic, 'Ccmagic', 'Ccmagic(cline, ccell)',
262 262 "A class-based cell magic")
263 263 check_calltip(cm.Clcmagic, 'Clcmagic', 'Clcmagic(cline, ccell=None)',
264 264 "A class-based line/cell magic")
265 265
266 266
267 267 def test_info():
268 268 "Check that Inspector.info fills out various fields as expected."
269 269 i = inspector.info(Call, oname='Call')
270 270 nt.assert_equal(i['type_name'], 'type')
271 271 expted_class = str(type(type)) # <class 'type'> (Python 3) or <type 'type'>
272 272 nt.assert_equal(i['base_class'], expted_class)
273 273 nt.assert_equal(i['string_form'], "<class 'IPython.core.tests.test_oinspect.Call'>")
274 274 fname = __file__
275 275 if fname.endswith(".pyc"):
276 276 fname = fname[:-1]
277 277 # case-insensitive comparison needed on some filesystems
278 278 # e.g. Windows:
279 279 nt.assert_equal(i['file'].lower(), compress_user(fname).lower())
280 280 nt.assert_equal(i['definition'], None)
281 281 nt.assert_equal(i['docstring'], Call.__doc__)
282 282 nt.assert_equal(i['source'], None)
283 283 nt.assert_true(i['isclass'])
284 284 _self_py2 = '' if py3compat.PY3 else 'self, '
285 285 nt.assert_equal(i['init_definition'], "Call(%sx, y=1)\n" % _self_py2)
286 286 nt.assert_equal(i['init_docstring'], Call.__init__.__doc__)
287 287
288 288 i = inspector.info(Call, detail_level=1)
289 289 nt.assert_not_equal(i['source'], None)
290 290 nt.assert_equal(i['docstring'], None)
291 291
292 292 c = Call(1)
293 293 c.__doc__ = "Modified instance docstring"
294 294 i = inspector.info(c)
295 295 nt.assert_equal(i['type_name'], 'Call')
296 296 nt.assert_equal(i['docstring'], "Modified instance docstring")
297 297 nt.assert_equal(i['class_docstring'], Call.__doc__)
298 298 nt.assert_equal(i['init_docstring'], Call.__init__.__doc__)
299 299 nt.assert_equal(i['call_docstring'], Call.__call__.__doc__)
300 300
301 301 # Test old-style classes, which for example may not have an __init__ method.
302 302 if not py3compat.PY3:
303 303 i = inspector.info(OldStyle)
304 304 nt.assert_equal(i['type_name'], 'classobj')
305 305
306 306 i = inspector.info(OldStyle())
307 307 nt.assert_equal(i['type_name'], 'instance')
308 308 nt.assert_equal(i['docstring'], OldStyle.__doc__)
309 309
310 310 def test_class_signature():
311 311 info = inspector.info(HasSignature, 'HasSignature')
312 312 nt.assert_equal(info['init_definition'], "HasSignature(test)\n")
313 313 nt.assert_equal(info['init_docstring'], HasSignature.__init__.__doc__)
314 314
315 315 def test_info_awkward():
316 316 # Just test that this doesn't throw an error.
317 317 i = inspector.info(Awkward())
318 318
319 319 def test_bool_raise():
320 320 inspector.info(NoBoolCall())
321 321
322 322 def test_info_serialliar():
323 323 fib_tracker = [0]
324 324 i = inspector.info(SerialLiar(fib_tracker))
325 325
326 326 # Nested attribute access should be cut off at 100 levels deep to avoid
327 327 # infinite loops: https://github.com/ipython/ipython/issues/9122
328 328 nt.assert_less(fib_tracker[0], 9000)
329 329
330 330 def test_calldef_none():
331 331 # We should ignore __call__ for all of these.
332 332 for obj in [f, SimpleClass().method, any, str.upper]:
333 333 print(obj)
334 334 i = inspector.info(obj)
335 335 nt.assert_is(i['call_def'], None)
336 336
337 337 if py3compat.PY3:
338 338 exec("def f_kwarg(pos, *, kwonly): pass")
339 339
340 340 @skipif(not py3compat.PY3)
341 341 def test_definition_kwonlyargs():
342 342 i = inspector.info(f_kwarg, oname='f_kwarg') # analysis:ignore
343 343 nt.assert_equal(i['definition'], "f_kwarg(pos, *, kwonly)\n")
344 344
345 345 def test_getdoc():
346 346 class A(object):
347 347 """standard docstring"""
348 348 pass
349 349
350 350 class B(object):
351 351 """standard docstring"""
352 352 def getdoc(self):
353 353 return "custom docstring"
354 354
355 355 class C(object):
356 356 """standard docstring"""
357 357 def getdoc(self):
358 358 return None
359 359
360 360 a = A()
361 361 b = B()
362 362 c = C()
363 363
364 364 nt.assert_equal(oinspect.getdoc(a), "standard docstring")
365 365 nt.assert_equal(oinspect.getdoc(b), "custom docstring")
366 366 nt.assert_equal(oinspect.getdoc(c), "standard docstring")
367 367
368 368
369 369 def test_empty_property_has_no_source():
370 370 i = inspector.info(property(), detail_level=1)
371 371 nt.assert_is(i['source'], None)
372 372
373 373
374 374 def test_property_sources():
375 375 import zlib
376 376
377 377 class A(object):
378 378 @property
379 379 def foo(self):
380 380 return 'bar'
381 381
382 382 foo = foo.setter(lambda self, v: setattr(self, 'bar', v))
383 383
384 384 id = property(id)
385 385 compress = property(zlib.compress)
386 386
387 387 i = inspector.info(A.foo, detail_level=1)
388 388 nt.assert_in('def foo(self):', i['source'])
389 389 nt.assert_in('lambda self, v:', i['source'])
390 390
391 391 i = inspector.info(A.id, detail_level=1)
392 392 nt.assert_in('fget = <function id>', i['source'])
393 393
394 394 i = inspector.info(A.compress, detail_level=1)
395 395 nt.assert_in('fget = <function zlib.compress>', i['source'])
396 396
397 397
398 398 def test_property_docstring_is_in_info_for_detail_level_0():
399 399 class A(object):
400 400 @property
401 def foobar():
401 def foobar(self):
402 402 """This is `foobar` property."""
403 403 pass
404 404
405 405 ip.user_ns['a_obj'] = A()
406 406 nt.assert_equals(
407 407 'This is `foobar` property.',
408 408 ip.object_inspect('a_obj.foobar', detail_level=0)['docstring'])
409 409
410 410 ip.user_ns['a_cls'] = A
411 411 nt.assert_equals(
412 412 'This is `foobar` property.',
413 413 ip.object_inspect('a_cls.foobar', detail_level=0)['docstring'])
414 414
415 415
416 416 def test_pdef():
417 417 # See gh-1914
418 418 def foo(): pass
419 419 inspector.pdef(foo, 'foo')
420 420
421 421 def test_pinfo_nonascii():
422 422 # See gh-1177
423 423 from . import nonascii2
424 424 ip.user_ns['nonascii2'] = nonascii2
425 425 ip._inspect('pinfo', 'nonascii2', detail_level=1)
426 426
427 427 def test_pinfo_magic():
428 428 with AssertPrints('Docstring:'):
429 429 ip._inspect('pinfo', 'lsmagic', detail_level=0)
430 430
431 431 with AssertPrints('Source:'):
432 432 ip._inspect('pinfo', 'lsmagic', detail_level=1)
@@ -1,116 +1,112 b''
1 1 # IPython: modified copy of numpy.testing.utils, so
2 2 # IPython.external._decorators works without numpy being installed.
3 3 """
4 4 Utility function to facilitate testing.
5 5 """
6 6
7 7 import sys
8 8 import warnings
9 9
10 10 # The following two classes are copied from python 2.6 warnings module (context
11 11 # manager)
12 12 class WarningMessage(object):
13 13
14 14 """
15 15 Holds the result of a single showwarning() call.
16 16
17 17 Notes
18 18 -----
19 19 `WarningMessage` is copied from the Python 2.6 warnings module,
20 20 so it can be used in NumPy with older Python versions.
21 21
22 22 """
23 23
24 24 _WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
25 25 "line")
26 26
27 27 def __init__(self, message, category, filename, lineno, file=None,
28 28 line=None):
29 29 local_values = locals()
30 30 for attr in self._WARNING_DETAILS:
31 31 setattr(self, attr, local_values[attr])
32 32 if category:
33 33 self._category_name = category.__name__
34 34 else:
35 35 self._category_name = None
36 36
37 37 def __str__(self):
38 38 return ("{message : %r, category : %r, filename : %r, lineno : %s, "
39 39 "line : %r}" % (self.message, self._category_name,
40 40 self.filename, self.lineno, self.line))
41 41
42 42 class WarningManager:
43 43 """
44 44 A context manager that copies and restores the warnings filter upon
45 45 exiting the context.
46 46
47 47 The 'record' argument specifies whether warnings should be captured by a
48 48 custom implementation of ``warnings.showwarning()`` and be appended to a
49 49 list returned by the context manager. Otherwise None is returned by the
50 50 context manager. The objects appended to the list are arguments whose
51 51 attributes mirror the arguments to ``showwarning()``.
52 52
53 53 The 'module' argument is to specify an alternative module to the module
54 54 named 'warnings' and imported under that name. This argument is only useful
55 55 when testing the warnings module itself.
56 56
57 57 Notes
58 58 -----
59 59 `WarningManager` is a copy of the ``catch_warnings`` context manager
60 60 from the Python 2.6 warnings module, with slight modifications.
61 61 It is copied so it can be used in NumPy with older Python versions.
62 62
63 63 """
64 64 def __init__(self, record=False, module=None):
65 65 self._record = record
66 66 if module is None:
67 67 self._module = sys.modules['warnings']
68 68 else:
69 69 self._module = module
70 70 self._entered = False
71 71
72 72 def __enter__(self):
73 73 if self._entered:
74 74 raise RuntimeError("Cannot enter %r twice" % self)
75 75 self._entered = True
76 76 self._filters = self._module.filters
77 77 self._module.filters = self._filters[:]
78 78 self._showwarning = self._module.showwarning
79 79 if self._record:
80 80 log = []
81 81 def showwarning(*args, **kwargs):
82 82 log.append(WarningMessage(*args, **kwargs))
83 83 self._module.showwarning = showwarning
84 84 return log
85 85 else:
86 86 return None
87 87
88 def __exit__(self):
88 def __exit__(self, type_, value, traceback):
89 89 if not self._entered:
90 90 raise RuntimeError("Cannot exit %r without entering first" % self)
91 91 self._module.filters = self._filters
92 92 self._module.showwarning = self._showwarning
93 93
94 94 def assert_warns(warning_class, func, *args, **kw):
95 95 """Fail unless a warning of class warning_class is thrown by callable when
96 96 invoked with arguments args and keyword arguments kwargs.
97 97
98 98 If a different type of warning is thrown, it will not be caught, and the
99 99 test case will be deemed to have suffered an error.
100 100 """
101 101
102 102 # XXX: once we may depend on python >= 2.6, this can be replaced by the
103 103 # warnings module context manager.
104 ctx = WarningManager(record=True)
105 l = ctx.__enter__()
104 with WarningManager(record=True) as l:
106 105 warnings.simplefilter('always')
107 try:
108 106 func(*args, **kw)
109 107 if not len(l) > 0:
110 108 raise AssertionError("No warning raised when calling %s"
111 109 % func.__name__)
112 110 if not l[0].category is warning_class:
113 111 raise AssertionError("First warning for %s is not a " \
114 112 "%s( is %s)" % (func.__name__, warning_class, l[0]))
115 finally:
116 ctx.__exit__()
@@ -1,511 +1,509 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 Defines a variety of Pygments lexers for highlighting IPython code.
4 4
5 5 This includes:
6 6
7 7 IPythonLexer, IPython3Lexer
8 8 Lexers for pure IPython (python + magic/shell commands)
9 9
10 10 IPythonPartialTracebackLexer, IPythonTracebackLexer
11 11 Supports 2.x and 3.x via keyword `python3`. The partial traceback
12 12 lexer reads everything but the Python code appearing in a traceback.
13 13 The full lexer combines the partial lexer with an IPython lexer.
14 14
15 15 IPythonConsoleLexer
16 16 A lexer for IPython console sessions, with support for tracebacks.
17 17
18 18 IPyLexer
19 19 A friendly lexer which examines the first line of text and from it,
20 20 decides whether to use an IPython lexer or an IPython console lexer.
21 21 This is probably the only lexer that needs to be explicitly added
22 22 to Pygments.
23 23
24 24 """
25 25 #-----------------------------------------------------------------------------
26 26 # Copyright (c) 2013, the IPython Development Team.
27 27 #
28 28 # Distributed under the terms of the Modified BSD License.
29 29 #
30 30 # The full license is in the file COPYING.txt, distributed with this software.
31 31 #-----------------------------------------------------------------------------
32 32
33 33 # Standard library
34 34 import re
35 35
36 36 # Third party
37 37 from pygments.lexers import BashLexer, PythonLexer, Python3Lexer
38 38 from pygments.lexer import (
39 39 Lexer, DelegatingLexer, RegexLexer, do_insertions, bygroups, using,
40 40 )
41 41 from pygments.token import (
42 Comment, Generic, Keyword, Literal, Name, Operator, Other, Text, Error,
42 Generic, Keyword, Literal, Name, Operator, Other, Text, Error,
43 43 )
44 44 from pygments.util import get_bool_opt
45 45
46 46 # Local
47 47
48 48 line_re = re.compile('.*?\n')
49 49
50 50 __all__ = ['build_ipy_lexer', 'IPython3Lexer', 'IPythonLexer',
51 51 'IPythonPartialTracebackLexer', 'IPythonTracebackLexer',
52 52 'IPythonConsoleLexer', 'IPyLexer']
53 53
54 54 ipython_tokens = [
55 55 (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)),
56 56 (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))),
57 57 (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)),
58 58 (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)),
59 59 (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword,
60 60 using(BashLexer), Text)),
61 61 (r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)),
62 62 (r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
63 63 (r'(!)(?!=)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
64 64 (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)', bygroups(Text, Operator, Text)),
65 65 (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$', bygroups(Text, Operator, Text)),
66 66 ]
67 67
68 68 def build_ipy_lexer(python3):
69 69 """Builds IPython lexers depending on the value of `python3`.
70 70
71 71 The lexer inherits from an appropriate Python lexer and then adds
72 72 information about IPython specific keywords (i.e. magic commands,
73 73 shell commands, etc.)
74 74
75 75 Parameters
76 76 ----------
77 77 python3 : bool
78 78 If `True`, then build an IPython lexer from a Python 3 lexer.
79 79
80 80 """
81 81 # It would be nice to have a single IPython lexer class which takes
82 82 # a boolean `python3`. But since there are two Python lexer classes,
83 83 # we will also have two IPython lexer classes.
84 84 if python3:
85 85 PyLexer = Python3Lexer
86 clsname = 'IPython3Lexer'
87 86 name = 'IPython3'
88 87 aliases = ['ipython3']
89 88 doc = """IPython3 Lexer"""
90 89 else:
91 90 PyLexer = PythonLexer
92 clsname = 'IPythonLexer'
93 91 name = 'IPython'
94 92 aliases = ['ipython2', 'ipython']
95 93 doc = """IPython Lexer"""
96 94
97 95 tokens = PyLexer.tokens.copy()
98 96 tokens['root'] = ipython_tokens + tokens['root']
99 97
100 98 attrs = {'name': name, 'aliases': aliases, 'filenames': [],
101 99 '__doc__': doc, 'tokens': tokens}
102 100
103 101 return type(name, (PyLexer,), attrs)
104 102
105 103
106 104 IPython3Lexer = build_ipy_lexer(python3=True)
107 105 IPythonLexer = build_ipy_lexer(python3=False)
108 106
109 107
110 108 class IPythonPartialTracebackLexer(RegexLexer):
111 109 """
112 110 Partial lexer for IPython tracebacks.
113 111
114 112 Handles all the non-python output. This works for both Python 2.x and 3.x.
115 113
116 114 """
117 115 name = 'IPython Partial Traceback'
118 116
119 117 tokens = {
120 118 'root': [
121 119 # Tracebacks for syntax errors have a different style.
122 120 # For both types of tracebacks, we mark the first line with
123 121 # Generic.Traceback. For syntax errors, we mark the filename
124 122 # as we mark the filenames for non-syntax tracebacks.
125 123 #
126 124 # These two regexps define how IPythonConsoleLexer finds a
127 125 # traceback.
128 126 #
129 127 ## Non-syntax traceback
130 128 (r'^(\^C)?(-+\n)', bygroups(Error, Generic.Traceback)),
131 129 ## Syntax traceback
132 130 (r'^( File)(.*)(, line )(\d+\n)',
133 131 bygroups(Generic.Traceback, Name.Namespace,
134 132 Generic.Traceback, Literal.Number.Integer)),
135 133
136 134 # (Exception Identifier)(Whitespace)(Traceback Message)
137 135 (r'(?u)(^[^\d\W]\w*)(\s*)(Traceback.*?\n)',
138 136 bygroups(Name.Exception, Generic.Whitespace, Text)),
139 137 # (Module/Filename)(Text)(Callee)(Function Signature)
140 138 # Better options for callee and function signature?
141 139 (r'(.*)( in )(.*)(\(.*\)\n)',
142 140 bygroups(Name.Namespace, Text, Name.Entity, Name.Tag)),
143 141 # Regular line: (Whitespace)(Line Number)(Python Code)
144 142 (r'(\s*?)(\d+)(.*?\n)',
145 143 bygroups(Generic.Whitespace, Literal.Number.Integer, Other)),
146 144 # Emphasized line: (Arrow)(Line Number)(Python Code)
147 145 # Using Exception token so arrow color matches the Exception.
148 146 (r'(-*>?\s?)(\d+)(.*?\n)',
149 147 bygroups(Name.Exception, Literal.Number.Integer, Other)),
150 148 # (Exception Identifier)(Message)
151 149 (r'(?u)(^[^\d\W]\w*)(:.*?\n)',
152 150 bygroups(Name.Exception, Text)),
153 151 # Tag everything else as Other, will be handled later.
154 152 (r'.*\n', Other),
155 153 ],
156 154 }
157 155
158 156
159 157 class IPythonTracebackLexer(DelegatingLexer):
160 158 """
161 159 IPython traceback lexer.
162 160
163 161 For doctests, the tracebacks can be snipped as much as desired with the
164 162 exception to the lines that designate a traceback. For non-syntax error
165 163 tracebacks, this is the line of hyphens. For syntax error tracebacks,
166 164 this is the line which lists the File and line number.
167 165
168 166 """
169 167 # The lexer inherits from DelegatingLexer. The "root" lexer is an
170 168 # appropriate IPython lexer, which depends on the value of the boolean
171 169 # `python3`. First, we parse with the partial IPython traceback lexer.
172 170 # Then, any code marked with the "Other" token is delegated to the root
173 171 # lexer.
174 172 #
175 173 name = 'IPython Traceback'
176 174 aliases = ['ipythontb']
177 175
178 176 def __init__(self, **options):
179 177 self.python3 = get_bool_opt(options, 'python3', False)
180 178 if self.python3:
181 179 self.aliases = ['ipython3tb']
182 180 else:
183 181 self.aliases = ['ipython2tb', 'ipythontb']
184 182
185 183 if self.python3:
186 184 IPyLexer = IPython3Lexer
187 185 else:
188 186 IPyLexer = IPythonLexer
189 187
190 188 DelegatingLexer.__init__(self, IPyLexer,
191 189 IPythonPartialTracebackLexer, **options)
192 190
193 191 class IPythonConsoleLexer(Lexer):
194 192 """
195 193 An IPython console lexer for IPython code-blocks and doctests, such as:
196 194
197 195 .. code-block:: rst
198 196
199 197 .. code-block:: ipythonconsole
200 198
201 199 In [1]: a = 'foo'
202 200
203 201 In [2]: a
204 202 Out[2]: 'foo'
205 203
206 204 In [3]: print a
207 205 foo
208 206
209 207 In [4]: 1 / 0
210 208
211 209
212 210 Support is also provided for IPython exceptions:
213 211
214 212 .. code-block:: rst
215 213
216 214 .. code-block:: ipythonconsole
217 215
218 216 In [1]: raise Exception
219 217
220 218 ---------------------------------------------------------------------------
221 219 Exception Traceback (most recent call last)
222 220 <ipython-input-1-fca2ab0ca76b> in <module>()
223 221 ----> 1 raise Exception
224 222
225 223 Exception:
226 224
227 225 """
228 226 name = 'IPython console session'
229 227 aliases = ['ipythonconsole']
230 228 mimetypes = ['text/x-ipython-console']
231 229
232 230 # The regexps used to determine what is input and what is output.
233 231 # The default prompts for IPython are:
234 232 #
235 233 # c.PromptManager.in_template = 'In [\#]: '
236 234 # c.PromptManager.in2_template = ' .\D.: '
237 235 # c.PromptManager.out_template = 'Out[\#]: '
238 236 #
239 237 in1_regex = r'In \[[0-9]+\]: '
240 238 in2_regex = r' \.\.+\.: '
241 239 out_regex = r'Out\[[0-9]+\]: '
242 240
243 241 #: The regex to determine when a traceback starts.
244 242 ipytb_start = re.compile(r'^(\^C)?(-+\n)|^( File)(.*)(, line )(\d+\n)')
245 243
246 244 def __init__(self, **options):
247 245 """Initialize the IPython console lexer.
248 246
249 247 Parameters
250 248 ----------
251 249 python3 : bool
252 250 If `True`, then the console inputs are parsed using a Python 3
253 251 lexer. Otherwise, they are parsed using a Python 2 lexer.
254 252 in1_regex : RegexObject
255 253 The compiled regular expression used to detect the start
256 254 of inputs. Although the IPython configuration setting may have a
257 255 trailing whitespace, do not include it in the regex. If `None`,
258 256 then the default input prompt is assumed.
259 257 in2_regex : RegexObject
260 258 The compiled regular expression used to detect the continuation
261 259 of inputs. Although the IPython configuration setting may have a
262 260 trailing whitespace, do not include it in the regex. If `None`,
263 261 then the default input prompt is assumed.
264 262 out_regex : RegexObject
265 263 The compiled regular expression used to detect outputs. If `None`,
266 264 then the default output prompt is assumed.
267 265
268 266 """
269 267 self.python3 = get_bool_opt(options, 'python3', False)
270 268 if self.python3:
271 269 self.aliases = ['ipython3console']
272 270 else:
273 271 self.aliases = ['ipython2console', 'ipythonconsole']
274 272
275 273 in1_regex = options.get('in1_regex', self.in1_regex)
276 274 in2_regex = options.get('in2_regex', self.in2_regex)
277 275 out_regex = options.get('out_regex', self.out_regex)
278 276
279 277 # So that we can work with input and output prompts which have been
280 278 # rstrip'd (possibly by editors) we also need rstrip'd variants. If
281 279 # we do not do this, then such prompts will be tagged as 'output'.
282 280 # The reason can't just use the rstrip'd variants instead is because
283 281 # we want any whitespace associated with the prompt to be inserted
284 282 # with the token. This allows formatted code to be modified so as hide
285 283 # the appearance of prompts, with the whitespace included. One example
286 284 # use of this is in copybutton.js from the standard lib Python docs.
287 285 in1_regex_rstrip = in1_regex.rstrip() + '\n'
288 286 in2_regex_rstrip = in2_regex.rstrip() + '\n'
289 287 out_regex_rstrip = out_regex.rstrip() + '\n'
290 288
291 289 # Compile and save them all.
292 290 attrs = ['in1_regex', 'in2_regex', 'out_regex',
293 291 'in1_regex_rstrip', 'in2_regex_rstrip', 'out_regex_rstrip']
294 292 for attr in attrs:
295 293 self.__setattr__(attr, re.compile(locals()[attr]))
296 294
297 295 Lexer.__init__(self, **options)
298 296
299 297 if self.python3:
300 298 pylexer = IPython3Lexer
301 299 tblexer = IPythonTracebackLexer
302 300 else:
303 301 pylexer = IPythonLexer
304 302 tblexer = IPythonTracebackLexer
305 303
306 304 self.pylexer = pylexer(**options)
307 305 self.tblexer = tblexer(**options)
308 306
309 307 self.reset()
310 308
311 309 def reset(self):
312 310 self.mode = 'output'
313 311 self.index = 0
314 312 self.buffer = u''
315 313 self.insertions = []
316 314
317 315 def buffered_tokens(self):
318 316 """
319 317 Generator of unprocessed tokens after doing insertions and before
320 318 changing to a new state.
321 319
322 320 """
323 321 if self.mode == 'output':
324 322 tokens = [(0, Generic.Output, self.buffer)]
325 323 elif self.mode == 'input':
326 324 tokens = self.pylexer.get_tokens_unprocessed(self.buffer)
327 325 else: # traceback
328 326 tokens = self.tblexer.get_tokens_unprocessed(self.buffer)
329 327
330 328 for i, t, v in do_insertions(self.insertions, tokens):
331 329 # All token indexes are relative to the buffer.
332 330 yield self.index + i, t, v
333 331
334 332 # Clear it all
335 333 self.index += len(self.buffer)
336 334 self.buffer = u''
337 335 self.insertions = []
338 336
339 337 def get_mci(self, line):
340 338 """
341 339 Parses the line and returns a 3-tuple: (mode, code, insertion).
342 340
343 341 `mode` is the next mode (or state) of the lexer, and is always equal
344 342 to 'input', 'output', or 'tb'.
345 343
346 344 `code` is a portion of the line that should be added to the buffer
347 345 corresponding to the next mode and eventually lexed by another lexer.
348 346 For example, `code` could be Python code if `mode` were 'input'.
349 347
350 348 `insertion` is a 3-tuple (index, token, text) representing an
351 349 unprocessed "token" that will be inserted into the stream of tokens
352 350 that are created from the buffer once we change modes. This is usually
353 351 the input or output prompt.
354 352
355 353 In general, the next mode depends on current mode and on the contents
356 354 of `line`.
357 355
358 356 """
359 357 # To reduce the number of regex match checks, we have multiple
360 358 # 'if' blocks instead of 'if-elif' blocks.
361 359
362 360 # Check for possible end of input
363 361 in2_match = self.in2_regex.match(line)
364 362 in2_match_rstrip = self.in2_regex_rstrip.match(line)
365 363 if (in2_match and in2_match.group().rstrip() == line.rstrip()) or \
366 364 in2_match_rstrip:
367 365 end_input = True
368 366 else:
369 367 end_input = False
370 368 if end_input and self.mode != 'tb':
371 369 # Only look for an end of input when not in tb mode.
372 370 # An ellipsis could appear within the traceback.
373 371 mode = 'output'
374 372 code = u''
375 373 insertion = (0, Generic.Prompt, line)
376 374 return mode, code, insertion
377 375
378 376 # Check for output prompt
379 377 out_match = self.out_regex.match(line)
380 378 out_match_rstrip = self.out_regex_rstrip.match(line)
381 379 if out_match or out_match_rstrip:
382 380 mode = 'output'
383 381 if out_match:
384 382 idx = out_match.end()
385 383 else:
386 384 idx = out_match_rstrip.end()
387 385 code = line[idx:]
388 386 # Use the 'heading' token for output. We cannot use Generic.Error
389 387 # since it would conflict with exceptions.
390 388 insertion = (0, Generic.Heading, line[:idx])
391 389 return mode, code, insertion
392 390
393 391
394 392 # Check for input or continuation prompt (non stripped version)
395 393 in1_match = self.in1_regex.match(line)
396 394 if in1_match or (in2_match and self.mode != 'tb'):
397 395 # New input or when not in tb, continued input.
398 396 # We do not check for continued input when in tb since it is
399 397 # allowable to replace a long stack with an ellipsis.
400 398 mode = 'input'
401 399 if in1_match:
402 400 idx = in1_match.end()
403 401 else: # in2_match
404 402 idx = in2_match.end()
405 403 code = line[idx:]
406 404 insertion = (0, Generic.Prompt, line[:idx])
407 405 return mode, code, insertion
408 406
409 407 # Check for input or continuation prompt (stripped version)
410 408 in1_match_rstrip = self.in1_regex_rstrip.match(line)
411 409 if in1_match_rstrip or (in2_match_rstrip and self.mode != 'tb'):
412 410 # New input or when not in tb, continued input.
413 411 # We do not check for continued input when in tb since it is
414 412 # allowable to replace a long stack with an ellipsis.
415 413 mode = 'input'
416 414 if in1_match_rstrip:
417 415 idx = in1_match_rstrip.end()
418 416 else: # in2_match
419 417 idx = in2_match_rstrip.end()
420 418 code = line[idx:]
421 419 insertion = (0, Generic.Prompt, line[:idx])
422 420 return mode, code, insertion
423 421
424 422 # Check for traceback
425 423 if self.ipytb_start.match(line):
426 424 mode = 'tb'
427 425 code = line
428 426 insertion = None
429 427 return mode, code, insertion
430 428
431 429 # All other stuff...
432 430 if self.mode in ('input', 'output'):
433 431 # We assume all other text is output. Multiline input that
434 432 # does not use the continuation marker cannot be detected.
435 433 # For example, the 3 in the following is clearly output:
436 434 #
437 435 # In [1]: print 3
438 436 # 3
439 437 #
440 438 # But the following second line is part of the input:
441 439 #
442 440 # In [2]: while True:
443 441 # print True
444 442 #
445 443 # In both cases, the 2nd line will be 'output'.
446 444 #
447 445 mode = 'output'
448 446 else:
449 447 mode = 'tb'
450 448
451 449 code = line
452 450 insertion = None
453 451
454 452 return mode, code, insertion
455 453
456 454 def get_tokens_unprocessed(self, text):
457 455 self.reset()
458 456 for match in line_re.finditer(text):
459 457 line = match.group()
460 458 mode, code, insertion = self.get_mci(line)
461 459
462 460 if mode != self.mode:
463 461 # Yield buffered tokens before transitioning to new mode.
464 462 for token in self.buffered_tokens():
465 463 yield token
466 464 self.mode = mode
467 465
468 466 if insertion:
469 467 self.insertions.append((len(self.buffer), [insertion]))
470 468 self.buffer += code
471 else:
469
472 470 for token in self.buffered_tokens():
473 471 yield token
474 472
475 473 class IPyLexer(Lexer):
476 474 """
477 475 Primary lexer for all IPython-like code.
478 476
479 477 This is a simple helper lexer. If the first line of the text begins with
480 478 "In \[[0-9]+\]:", then the entire text is parsed with an IPython console
481 479 lexer. If not, then the entire text is parsed with an IPython lexer.
482 480
483 481 The goal is to reduce the number of lexers that are registered
484 482 with Pygments.
485 483
486 484 """
487 485 name = 'IPy session'
488 486 aliases = ['ipy']
489 487
490 488 def __init__(self, **options):
491 489 self.python3 = get_bool_opt(options, 'python3', False)
492 490 if self.python3:
493 491 self.aliases = ['ipy3']
494 492 else:
495 493 self.aliases = ['ipy2', 'ipy']
496 494
497 495 Lexer.__init__(self, **options)
498 496
499 497 self.IPythonLexer = IPythonLexer(**options)
500 498 self.IPythonConsoleLexer = IPythonConsoleLexer(**options)
501 499
502 500 def get_tokens_unprocessed(self, text):
503 501 # Search for the input prompt anywhere...this allows code blocks to
504 502 # begin with comments as well.
505 503 if re.match(r'.*(In \[[0-9]+\]:)', text.strip(), re.DOTALL):
506 504 lex = self.IPythonConsoleLexer
507 505 else:
508 506 lex = self.IPythonLexer
509 507 for token in lex.get_tokens_unprocessed(text):
510 508 yield token
511 509
General Comments 0
You need to be logged in to leave comments. Login now