##// END OF EJS Templates
Merge pull request #12110 from terrdavis/remove-py2-workarounds-1of2...
Matthias Bussonnier -
r25463:6b91afe8 merge
parent child Browse files
Show More
@@ -1,1032 +1,1031
1 1 # -*- coding: utf-8 -*-
2 2 """Tools for inspecting Python objects.
3 3
4 4 Uses syntax highlighting for presenting the various information elements.
5 5
6 6 Similar in spirit to the inspect module, but all calls take a name argument to
7 7 reference the name under which an object is being read.
8 8 """
9 9
10 10 # Copyright (c) IPython Development Team.
11 11 # Distributed under the terms of the Modified BSD License.
12 12
13 13 __all__ = ['Inspector','InspectColors']
14 14
15 15 # stdlib modules
16 16 import ast
17 17 import inspect
18 18 from inspect import signature
19 19 import linecache
20 20 import warnings
21 21 import os
22 22 from textwrap import dedent
23 23 import types
24 24 import io as stdlib_io
25 25
26 26 from typing import Union
27 27
28 28 # IPython's own
29 29 from IPython.core import page
30 30 from IPython.lib.pretty import pretty
31 31 from IPython.testing.skipdoctest import skip_doctest
32 32 from IPython.utils import PyColorize
33 33 from IPython.utils import openpy
34 34 from IPython.utils import py3compat
35 35 from IPython.utils.dir2 import safe_hasattr
36 36 from IPython.utils.path import compress_user
37 37 from IPython.utils.text import indent
38 38 from IPython.utils.wildcard import list_namespace
39 39 from IPython.utils.wildcard import typestr2type
40 40 from IPython.utils.coloransi import TermColors, ColorScheme, ColorSchemeTable
41 41 from IPython.utils.py3compat import cast_unicode
42 42 from IPython.utils.colorable import Colorable
43 43 from IPython.utils.decorators import undoc
44 44
45 45 from pygments import highlight
46 46 from pygments.lexers import PythonLexer
47 47 from pygments.formatters import HtmlFormatter
48 48
49 49 def pylight(code):
50 50 return highlight(code, PythonLexer(), HtmlFormatter(noclasses=True))
51 51
52 52 # builtin docstrings to ignore
53 53 _func_call_docstring = types.FunctionType.__call__.__doc__
54 54 _object_init_docstring = object.__init__.__doc__
55 55 _builtin_type_docstrings = {
56 56 inspect.getdoc(t) for t in (types.ModuleType, types.MethodType,
57 57 types.FunctionType, property)
58 58 }
59 59
60 60 _builtin_func_type = type(all)
61 61 _builtin_meth_type = type(str.upper) # Bound methods have the same type as builtin functions
62 62 #****************************************************************************
63 63 # Builtin color schemes
64 64
65 65 Colors = TermColors # just a shorthand
66 66
67 67 InspectColors = PyColorize.ANSICodeColors
68 68
69 69 #****************************************************************************
70 70 # Auxiliary functions and objects
71 71
72 72 # See the messaging spec for the definition of all these fields. This list
73 73 # effectively defines the order of display
74 74 info_fields = ['type_name', 'base_class', 'string_form', 'namespace',
75 75 'length', 'file', 'definition', 'docstring', 'source',
76 76 'init_definition', 'class_docstring', 'init_docstring',
77 77 'call_def', 'call_docstring',
78 78 # These won't be printed but will be used to determine how to
79 79 # format the object
80 80 'ismagic', 'isalias', 'isclass', 'found', 'name'
81 81 ]
82 82
83 83
84 84 def object_info(**kw):
85 85 """Make an object info dict with all fields present."""
86 86 infodict = {k:None for k in info_fields}
87 87 infodict.update(kw)
88 88 return infodict
89 89
90 90
91 91 def get_encoding(obj):
92 92 """Get encoding for python source file defining obj
93 93
94 94 Returns None if obj is not defined in a sourcefile.
95 95 """
96 96 ofile = find_file(obj)
97 97 # run contents of file through pager starting at line where the object
98 98 # is defined, as long as the file isn't binary and is actually on the
99 99 # filesystem.
100 100 if ofile is None:
101 101 return None
102 102 elif ofile.endswith(('.so', '.dll', '.pyd')):
103 103 return None
104 104 elif not os.path.isfile(ofile):
105 105 return None
106 106 else:
107 107 # Print only text files, not extension binaries. Note that
108 108 # getsourcelines returns lineno with 1-offset and page() uses
109 109 # 0-offset, so we must adjust.
110 110 with stdlib_io.open(ofile, 'rb') as buffer: # Tweaked to use io.open for Python 2
111 111 encoding, lines = openpy.detect_encoding(buffer.readline)
112 112 return encoding
113 113
114 114 def getdoc(obj) -> Union[str,None]:
115 115 """Stable wrapper around inspect.getdoc.
116 116
117 117 This can't crash because of attribute problems.
118 118
119 119 It also attempts to call a getdoc() method on the given object. This
120 120 allows objects which provide their docstrings via non-standard mechanisms
121 121 (like Pyro proxies) to still be inspected by ipython's ? system.
122 122 """
123 123 # Allow objects to offer customized documentation via a getdoc method:
124 124 try:
125 125 ds = obj.getdoc()
126 126 except Exception:
127 127 pass
128 128 else:
129 129 if isinstance(ds, str):
130 130 return inspect.cleandoc(ds)
131 131 docstr = inspect.getdoc(obj)
132 132 return docstr
133 133
134 134
135 135 def getsource(obj, oname='') -> Union[str,None]:
136 136 """Wrapper around inspect.getsource.
137 137
138 138 This can be modified by other projects to provide customized source
139 139 extraction.
140 140
141 141 Parameters
142 142 ----------
143 143 obj : object
144 144 an object whose source code we will attempt to extract
145 145 oname : str
146 146 (optional) a name under which the object is known
147 147
148 148 Returns
149 149 -------
150 150 src : unicode or None
151 151
152 152 """
153 153
154 154 if isinstance(obj, property):
155 155 sources = []
156 156 for attrname in ['fget', 'fset', 'fdel']:
157 157 fn = getattr(obj, attrname)
158 158 if fn is not None:
159 159 encoding = get_encoding(fn)
160 160 oname_prefix = ('%s.' % oname) if oname else ''
161 161 sources.append(''.join(('# ', oname_prefix, attrname)))
162 162 if inspect.isfunction(fn):
163 163 sources.append(dedent(getsource(fn)))
164 164 else:
165 165 # Default str/repr only prints function name,
166 166 # pretty.pretty prints module name too.
167 167 sources.append(
168 168 '%s%s = %s\n' % (oname_prefix, attrname, pretty(fn))
169 169 )
170 170 if sources:
171 171 return '\n'.join(sources)
172 172 else:
173 173 return None
174 174
175 175 else:
176 176 # Get source for non-property objects.
177 177
178 178 obj = _get_wrapped(obj)
179 179
180 180 try:
181 181 src = inspect.getsource(obj)
182 182 except TypeError:
183 183 # The object itself provided no meaningful source, try looking for
184 184 # its class definition instead.
185 185 if hasattr(obj, '__class__'):
186 186 try:
187 187 src = inspect.getsource(obj.__class__)
188 188 except TypeError:
189 189 return None
190 190
191 191 return src
192 192
193 193
194 194 def is_simple_callable(obj):
195 195 """True if obj is a function ()"""
196 196 return (inspect.isfunction(obj) or inspect.ismethod(obj) or \
197 197 isinstance(obj, _builtin_func_type) or isinstance(obj, _builtin_meth_type))
198 198
199 199 @undoc
200 200 def getargspec(obj):
201 """Wrapper around :func:`inspect.getfullargspec` on Python 3, and
202 :func:inspect.getargspec` on Python 2.
201 """Wrapper around :func:`inspect.getfullargspec`
203 202
204 203 In addition to functions and methods, this can also handle objects with a
205 204 ``__call__`` attribute.
206 205
207 206 DEPRECATED: Deprecated since 7.10. Do not use, will be removed.
208 207 """
209 208
210 209 warnings.warn('`getargspec` function is deprecated as of IPython 7.10'
211 210 'and will be removed in future versions.', DeprecationWarning, stacklevel=2)
212 211
213 212 if safe_hasattr(obj, '__call__') and not is_simple_callable(obj):
214 213 obj = obj.__call__
215 214
216 215 return inspect.getfullargspec(obj)
217 216
218 217 @undoc
219 218 def format_argspec(argspec):
220 219 """Format argspect, convenience wrapper around inspect's.
221 220
222 221 This takes a dict instead of ordered arguments and calls
223 222 inspect.format_argspec with the arguments in the necessary order.
224 223
225 224 DEPRECATED: Do not use; will be removed in future versions.
226 225 """
227 226
228 227 warnings.warn('`format_argspec` function is deprecated as of IPython 7.10'
229 228 'and will be removed in future versions.', DeprecationWarning, stacklevel=2)
230 229
231 230
232 231 return inspect.formatargspec(argspec['args'], argspec['varargs'],
233 232 argspec['varkw'], argspec['defaults'])
234 233
235 234 @undoc
236 235 def call_tip(oinfo, format_call=True):
237 236 """DEPRECATED. Extract call tip data from an oinfo dict.
238 237 """
239 238 warnings.warn('`call_tip` function is deprecated as of IPython 6.0'
240 239 'and will be removed in future versions.', DeprecationWarning, stacklevel=2)
241 240 # Get call definition
242 241 argspec = oinfo.get('argspec')
243 242 if argspec is None:
244 243 call_line = None
245 244 else:
246 245 # Callable objects will have 'self' as their first argument, prune
247 246 # it out if it's there for clarity (since users do *not* pass an
248 247 # extra first argument explicitly).
249 248 try:
250 249 has_self = argspec['args'][0] == 'self'
251 250 except (KeyError, IndexError):
252 251 pass
253 252 else:
254 253 if has_self:
255 254 argspec['args'] = argspec['args'][1:]
256 255
257 256 call_line = oinfo['name']+format_argspec(argspec)
258 257
259 258 # Now get docstring.
260 259 # The priority is: call docstring, constructor docstring, main one.
261 260 doc = oinfo.get('call_docstring')
262 261 if doc is None:
263 262 doc = oinfo.get('init_docstring')
264 263 if doc is None:
265 264 doc = oinfo.get('docstring','')
266 265
267 266 return call_line, doc
268 267
269 268
270 269 def _get_wrapped(obj):
271 270 """Get the original object if wrapped in one or more @decorators
272 271
273 272 Some objects automatically construct similar objects on any unrecognised
274 273 attribute access (e.g. unittest.mock.call). To protect against infinite loops,
275 274 this will arbitrarily cut off after 100 levels of obj.__wrapped__
276 275 attribute access. --TK, Jan 2016
277 276 """
278 277 orig_obj = obj
279 278 i = 0
280 279 while safe_hasattr(obj, '__wrapped__'):
281 280 obj = obj.__wrapped__
282 281 i += 1
283 282 if i > 100:
284 283 # __wrapped__ is probably a lie, so return the thing we started with
285 284 return orig_obj
286 285 return obj
287 286
288 287 def find_file(obj) -> str:
289 288 """Find the absolute path to the file where an object was defined.
290 289
291 290 This is essentially a robust wrapper around `inspect.getabsfile`.
292 291
293 292 Returns None if no file can be found.
294 293
295 294 Parameters
296 295 ----------
297 296 obj : any Python object
298 297
299 298 Returns
300 299 -------
301 300 fname : str
302 301 The absolute path to the file where the object was defined.
303 302 """
304 303 obj = _get_wrapped(obj)
305 304
306 305 fname = None
307 306 try:
308 307 fname = inspect.getabsfile(obj)
309 308 except TypeError:
310 309 # For an instance, the file that matters is where its class was
311 310 # declared.
312 311 if hasattr(obj, '__class__'):
313 312 try:
314 313 fname = inspect.getabsfile(obj.__class__)
315 314 except TypeError:
316 315 # Can happen for builtins
317 316 pass
318 317 except:
319 318 pass
320 319 return cast_unicode(fname)
321 320
322 321
323 322 def find_source_lines(obj):
324 323 """Find the line number in a file where an object was defined.
325 324
326 325 This is essentially a robust wrapper around `inspect.getsourcelines`.
327 326
328 327 Returns None if no file can be found.
329 328
330 329 Parameters
331 330 ----------
332 331 obj : any Python object
333 332
334 333 Returns
335 334 -------
336 335 lineno : int
337 336 The line number where the object definition starts.
338 337 """
339 338 obj = _get_wrapped(obj)
340 339
341 340 try:
342 341 try:
343 342 lineno = inspect.getsourcelines(obj)[1]
344 343 except TypeError:
345 344 # For instances, try the class object like getsource() does
346 345 if hasattr(obj, '__class__'):
347 346 lineno = inspect.getsourcelines(obj.__class__)[1]
348 347 else:
349 348 lineno = None
350 349 except:
351 350 return None
352 351
353 352 return lineno
354 353
355 354 class Inspector(Colorable):
356 355
357 356 def __init__(self, color_table=InspectColors,
358 357 code_color_table=PyColorize.ANSICodeColors,
359 358 scheme=None,
360 359 str_detail_level=0,
361 360 parent=None, config=None):
362 361 super(Inspector, self).__init__(parent=parent, config=config)
363 362 self.color_table = color_table
364 363 self.parser = PyColorize.Parser(out='str', parent=self, style=scheme)
365 364 self.format = self.parser.format
366 365 self.str_detail_level = str_detail_level
367 366 self.set_active_scheme(scheme)
368 367
369 368 def _getdef(self,obj,oname='') -> Union[str,None]:
370 369 """Return the call signature for any callable object.
371 370
372 371 If any exception is generated, None is returned instead and the
373 372 exception is suppressed."""
374 373 try:
375 374 return _render_signature(signature(obj), oname)
376 375 except:
377 376 return None
378 377
379 378 def __head(self,h) -> str:
380 379 """Return a header string with proper colors."""
381 380 return '%s%s%s' % (self.color_table.active_colors.header,h,
382 381 self.color_table.active_colors.normal)
383 382
384 383 def set_active_scheme(self, scheme):
385 384 if scheme is not None:
386 385 self.color_table.set_active_scheme(scheme)
387 386 self.parser.color_table.set_active_scheme(scheme)
388 387
389 388 def noinfo(self, msg, oname):
390 389 """Generic message when no information is found."""
391 390 print('No %s found' % msg, end=' ')
392 391 if oname:
393 392 print('for %s' % oname)
394 393 else:
395 394 print()
396 395
397 396 def pdef(self, obj, oname=''):
398 397 """Print the call signature for any callable object.
399 398
400 399 If the object is a class, print the constructor information."""
401 400
402 401 if not callable(obj):
403 402 print('Object is not callable.')
404 403 return
405 404
406 405 header = ''
407 406
408 407 if inspect.isclass(obj):
409 408 header = self.__head('Class constructor information:\n')
410 409
411 410
412 411 output = self._getdef(obj,oname)
413 412 if output is None:
414 413 self.noinfo('definition header',oname)
415 414 else:
416 415 print(header,self.format(output), end=' ')
417 416
418 417 # In Python 3, all classes are new-style, so they all have __init__.
419 418 @skip_doctest
420 419 def pdoc(self, obj, oname='', formatter=None):
421 420 """Print the docstring for any object.
422 421
423 422 Optional:
424 423 -formatter: a function to run the docstring through for specially
425 424 formatted docstrings.
426 425
427 426 Examples
428 427 --------
429 428
430 429 In [1]: class NoInit:
431 430 ...: pass
432 431
433 432 In [2]: class NoDoc:
434 433 ...: def __init__(self):
435 434 ...: pass
436 435
437 436 In [3]: %pdoc NoDoc
438 437 No documentation found for NoDoc
439 438
440 439 In [4]: %pdoc NoInit
441 440 No documentation found for NoInit
442 441
443 442 In [5]: obj = NoInit()
444 443
445 444 In [6]: %pdoc obj
446 445 No documentation found for obj
447 446
448 447 In [5]: obj2 = NoDoc()
449 448
450 449 In [6]: %pdoc obj2
451 450 No documentation found for obj2
452 451 """
453 452
454 453 head = self.__head # For convenience
455 454 lines = []
456 455 ds = getdoc(obj)
457 456 if formatter:
458 457 ds = formatter(ds).get('plain/text', ds)
459 458 if ds:
460 459 lines.append(head("Class docstring:"))
461 460 lines.append(indent(ds))
462 461 if inspect.isclass(obj) and hasattr(obj, '__init__'):
463 462 init_ds = getdoc(obj.__init__)
464 463 if init_ds is not None:
465 464 lines.append(head("Init docstring:"))
466 465 lines.append(indent(init_ds))
467 466 elif hasattr(obj,'__call__'):
468 467 call_ds = getdoc(obj.__call__)
469 468 if call_ds:
470 469 lines.append(head("Call docstring:"))
471 470 lines.append(indent(call_ds))
472 471
473 472 if not lines:
474 473 self.noinfo('documentation',oname)
475 474 else:
476 475 page.page('\n'.join(lines))
477 476
478 477 def psource(self, obj, oname=''):
479 478 """Print the source code for an object."""
480 479
481 480 # Flush the source cache because inspect can return out-of-date source
482 481 linecache.checkcache()
483 482 try:
484 483 src = getsource(obj, oname=oname)
485 484 except Exception:
486 485 src = None
487 486
488 487 if src is None:
489 488 self.noinfo('source', oname)
490 489 else:
491 490 page.page(self.format(src))
492 491
493 492 def pfile(self, obj, oname=''):
494 493 """Show the whole file where an object was defined."""
495 494
496 495 lineno = find_source_lines(obj)
497 496 if lineno is None:
498 497 self.noinfo('file', oname)
499 498 return
500 499
501 500 ofile = find_file(obj)
502 501 # run contents of file through pager starting at line where the object
503 502 # is defined, as long as the file isn't binary and is actually on the
504 503 # filesystem.
505 504 if ofile.endswith(('.so', '.dll', '.pyd')):
506 505 print('File %r is binary, not printing.' % ofile)
507 506 elif not os.path.isfile(ofile):
508 507 print('File %r does not exist, not printing.' % ofile)
509 508 else:
510 509 # Print only text files, not extension binaries. Note that
511 510 # getsourcelines returns lineno with 1-offset and page() uses
512 511 # 0-offset, so we must adjust.
513 512 page.page(self.format(openpy.read_py_file(ofile, skip_encoding_cookie=False)), lineno - 1)
514 513
515 514
516 515 def _mime_format(self, text:str, formatter=None) -> dict:
517 516 """Return a mime bundle representation of the input text.
518 517
519 518 - if `formatter` is None, the returned mime bundle has
520 519 a `text/plain` field, with the input text.
521 520 a `text/html` field with a `<pre>` tag containing the input text.
522 521
523 522 - if `formatter` is not None, it must be a callable transforming the
524 523 input text into a mime bundle. Default values for `text/plain` and
525 524 `text/html` representations are the ones described above.
526 525
527 526 Note:
528 527
529 528 Formatters returning strings are supported but this behavior is deprecated.
530 529
531 530 """
532 531 defaults = {
533 532 'text/plain': text,
534 533 'text/html': '<pre>' + text + '</pre>'
535 534 }
536 535
537 536 if formatter is None:
538 537 return defaults
539 538 else:
540 539 formatted = formatter(text)
541 540
542 541 if not isinstance(formatted, dict):
543 542 # Handle the deprecated behavior of a formatter returning
544 543 # a string instead of a mime bundle.
545 544 return {
546 545 'text/plain': formatted,
547 546 'text/html': '<pre>' + formatted + '</pre>'
548 547 }
549 548
550 549 else:
551 550 return dict(defaults, **formatted)
552 551
553 552
554 553 def format_mime(self, bundle):
555 554
556 555 text_plain = bundle['text/plain']
557 556
558 557 text = ''
559 558 heads, bodies = list(zip(*text_plain))
560 559 _len = max(len(h) for h in heads)
561 560
562 561 for head, body in zip(heads, bodies):
563 562 body = body.strip('\n')
564 563 delim = '\n' if '\n' in body else ' '
565 564 text += self.__head(head+':') + (_len - len(head))*' ' +delim + body +'\n'
566 565
567 566 bundle['text/plain'] = text
568 567 return bundle
569 568
570 569 def _get_info(self, obj, oname='', formatter=None, info=None, detail_level=0):
571 570 """Retrieve an info dict and format it.
572 571
573 572 Parameters
574 573 ==========
575 574
576 575 obj: any
577 576 Object to inspect and return info from
578 577 oname: str (default: ''):
579 578 Name of the variable pointing to `obj`.
580 579 formatter: callable
581 580 info:
582 581 already computed information
583 582 detail_level: integer
584 583 Granularity of detail level, if set to 1, give more information.
585 584 """
586 585
587 586 info = self._info(obj, oname=oname, info=info, detail_level=detail_level)
588 587
589 588 _mime = {
590 589 'text/plain': [],
591 590 'text/html': '',
592 591 }
593 592
594 593 def append_field(bundle, title:str, key:str, formatter=None):
595 594 field = info[key]
596 595 if field is not None:
597 596 formatted_field = self._mime_format(field, formatter)
598 597 bundle['text/plain'].append((title, formatted_field['text/plain']))
599 598 bundle['text/html'] += '<h1>' + title + '</h1>\n' + formatted_field['text/html'] + '\n'
600 599
601 600 def code_formatter(text):
602 601 return {
603 602 'text/plain': self.format(text),
604 603 'text/html': pylight(text)
605 604 }
606 605
607 606 if info['isalias']:
608 607 append_field(_mime, 'Repr', 'string_form')
609 608
610 609 elif info['ismagic']:
611 610 if detail_level > 0:
612 611 append_field(_mime, 'Source', 'source', code_formatter)
613 612 else:
614 613 append_field(_mime, 'Docstring', 'docstring', formatter)
615 614 append_field(_mime, 'File', 'file')
616 615
617 616 elif info['isclass'] or is_simple_callable(obj):
618 617 # Functions, methods, classes
619 618 append_field(_mime, 'Signature', 'definition', code_formatter)
620 619 append_field(_mime, 'Init signature', 'init_definition', code_formatter)
621 620 append_field(_mime, 'Docstring', 'docstring', formatter)
622 621 if detail_level > 0 and info['source']:
623 622 append_field(_mime, 'Source', 'source', code_formatter)
624 623 else:
625 624 append_field(_mime, 'Init docstring', 'init_docstring', formatter)
626 625
627 626 append_field(_mime, 'File', 'file')
628 627 append_field(_mime, 'Type', 'type_name')
629 628 append_field(_mime, 'Subclasses', 'subclasses')
630 629
631 630 else:
632 631 # General Python objects
633 632 append_field(_mime, 'Signature', 'definition', code_formatter)
634 633 append_field(_mime, 'Call signature', 'call_def', code_formatter)
635 634 append_field(_mime, 'Type', 'type_name')
636 635 append_field(_mime, 'String form', 'string_form')
637 636
638 637 # Namespace
639 638 if info['namespace'] != 'Interactive':
640 639 append_field(_mime, 'Namespace', 'namespace')
641 640
642 641 append_field(_mime, 'Length', 'length')
643 642 append_field(_mime, 'File', 'file')
644 643
645 644 # Source or docstring, depending on detail level and whether
646 645 # source found.
647 646 if detail_level > 0 and info['source']:
648 647 append_field(_mime, 'Source', 'source', code_formatter)
649 648 else:
650 649 append_field(_mime, 'Docstring', 'docstring', formatter)
651 650
652 651 append_field(_mime, 'Class docstring', 'class_docstring', formatter)
653 652 append_field(_mime, 'Init docstring', 'init_docstring', formatter)
654 653 append_field(_mime, 'Call docstring', 'call_docstring', formatter)
655 654
656 655
657 656 return self.format_mime(_mime)
658 657
659 658 def pinfo(self, obj, oname='', formatter=None, info=None, detail_level=0, enable_html_pager=True):
660 659 """Show detailed information about an object.
661 660
662 661 Optional arguments:
663 662
664 663 - oname: name of the variable pointing to the object.
665 664
666 665 - formatter: callable (optional)
667 666 A special formatter for docstrings.
668 667
669 668 The formatter is a callable that takes a string as an input
670 669 and returns either a formatted string or a mime type bundle
671 670 in the form of a dictionary.
672 671
673 672 Although the support of custom formatter returning a string
674 673 instead of a mime type bundle is deprecated.
675 674
676 675 - info: a structure with some information fields which may have been
677 676 precomputed already.
678 677
679 678 - detail_level: if set to 1, more information is given.
680 679 """
681 680 info = self._get_info(obj, oname, formatter, info, detail_level)
682 681 if not enable_html_pager:
683 682 del info['text/html']
684 683 page.page(info)
685 684
686 685 def info(self, obj, oname='', formatter=None, info=None, detail_level=0):
687 686 """DEPRECATED. Compute a dict with detailed information about an object.
688 687 """
689 688 if formatter is not None:
690 689 warnings.warn('The `formatter` keyword argument to `Inspector.info`'
691 690 'is deprecated as of IPython 5.0 and will have no effects.',
692 691 DeprecationWarning, stacklevel=2)
693 692 return self._info(obj, oname=oname, info=info, detail_level=detail_level)
694 693
695 694 def _info(self, obj, oname='', info=None, detail_level=0) -> dict:
696 695 """Compute a dict with detailed information about an object.
697 696
698 697 Parameters
699 698 ==========
700 699
701 700 obj: any
702 701 An object to find information about
703 702 oname: str (default: ''):
704 703 Name of the variable pointing to `obj`.
705 704 info: (default: None)
706 705 A struct (dict like with attr access) with some information fields
707 706 which may have been precomputed already.
708 707 detail_level: int (default:0)
709 708 If set to 1, more information is given.
710 709
711 710 Returns
712 711 =======
713 712
714 713 An object info dict with known fields from `info_fields`. Keys are
715 714 strings, values are string or None.
716 715 """
717 716
718 717 if info is None:
719 718 ismagic = False
720 719 isalias = False
721 720 ospace = ''
722 721 else:
723 722 ismagic = info.ismagic
724 723 isalias = info.isalias
725 724 ospace = info.namespace
726 725
727 726 # Get docstring, special-casing aliases:
728 727 if isalias:
729 728 if not callable(obj):
730 729 try:
731 730 ds = "Alias to the system command:\n %s" % obj[1]
732 731 except:
733 732 ds = "Alias: " + str(obj)
734 733 else:
735 734 ds = "Alias to " + str(obj)
736 735 if obj.__doc__:
737 736 ds += "\nDocstring:\n" + obj.__doc__
738 737 else:
739 738 ds = getdoc(obj)
740 739 if ds is None:
741 740 ds = '<no docstring>'
742 741
743 742 # store output in a dict, we initialize it here and fill it as we go
744 743 out = dict(name=oname, found=True, isalias=isalias, ismagic=ismagic, subclasses=None)
745 744
746 745 string_max = 200 # max size of strings to show (snipped if longer)
747 746 shalf = int((string_max - 5) / 2)
748 747
749 748 if ismagic:
750 749 out['type_name'] = 'Magic function'
751 750 elif isalias:
752 751 out['type_name'] = 'System alias'
753 752 else:
754 753 out['type_name'] = type(obj).__name__
755 754
756 755 try:
757 756 bclass = obj.__class__
758 757 out['base_class'] = str(bclass)
759 758 except:
760 759 pass
761 760
762 761 # String form, but snip if too long in ? form (full in ??)
763 762 if detail_level >= self.str_detail_level:
764 763 try:
765 764 ostr = str(obj)
766 765 str_head = 'string_form'
767 766 if not detail_level and len(ostr)>string_max:
768 767 ostr = ostr[:shalf] + ' <...> ' + ostr[-shalf:]
769 768 ostr = ("\n" + " " * len(str_head.expandtabs())).\
770 769 join(q.strip() for q in ostr.split("\n"))
771 770 out[str_head] = ostr
772 771 except:
773 772 pass
774 773
775 774 if ospace:
776 775 out['namespace'] = ospace
777 776
778 777 # Length (for strings and lists)
779 778 try:
780 779 out['length'] = str(len(obj))
781 780 except Exception:
782 781 pass
783 782
784 783 # Filename where object was defined
785 784 binary_file = False
786 785 fname = find_file(obj)
787 786 if fname is None:
788 787 # if anything goes wrong, we don't want to show source, so it's as
789 788 # if the file was binary
790 789 binary_file = True
791 790 else:
792 791 if fname.endswith(('.so', '.dll', '.pyd')):
793 792 binary_file = True
794 793 elif fname.endswith('<string>'):
795 794 fname = 'Dynamically generated function. No source code available.'
796 795 out['file'] = compress_user(fname)
797 796
798 797 # Original source code for a callable, class or property.
799 798 if detail_level:
800 799 # Flush the source cache because inspect can return out-of-date
801 800 # source
802 801 linecache.checkcache()
803 802 try:
804 803 if isinstance(obj, property) or not binary_file:
805 804 src = getsource(obj, oname)
806 805 if src is not None:
807 806 src = src.rstrip()
808 807 out['source'] = src
809 808
810 809 except Exception:
811 810 pass
812 811
813 812 # Add docstring only if no source is to be shown (avoid repetitions).
814 813 if ds and not self._source_contains_docstring(out.get('source'), ds):
815 814 out['docstring'] = ds
816 815
817 816 # Constructor docstring for classes
818 817 if inspect.isclass(obj):
819 818 out['isclass'] = True
820 819
821 820 # get the init signature:
822 821 try:
823 822 init_def = self._getdef(obj, oname)
824 823 except AttributeError:
825 824 init_def = None
826 825
827 826 # get the __init__ docstring
828 827 try:
829 828 obj_init = obj.__init__
830 829 except AttributeError:
831 830 init_ds = None
832 831 else:
833 832 if init_def is None:
834 833 # Get signature from init if top-level sig failed.
835 834 # Can happen for built-in types (list, etc.).
836 835 try:
837 836 init_def = self._getdef(obj_init, oname)
838 837 except AttributeError:
839 838 pass
840 839 init_ds = getdoc(obj_init)
841 840 # Skip Python's auto-generated docstrings
842 841 if init_ds == _object_init_docstring:
843 842 init_ds = None
844 843
845 844 if init_def:
846 845 out['init_definition'] = init_def
847 846
848 847 if init_ds:
849 848 out['init_docstring'] = init_ds
850 849
851 850 names = [sub.__name__ for sub in type.__subclasses__(obj)]
852 851 if len(names) < 10:
853 852 all_names = ', '.join(names)
854 853 else:
855 854 all_names = ', '.join(names[:10]+['...'])
856 855 out['subclasses'] = all_names
857 856 # and class docstring for instances:
858 857 else:
859 858 # reconstruct the function definition and print it:
860 859 defln = self._getdef(obj, oname)
861 860 if defln:
862 861 out['definition'] = defln
863 862
864 863 # First, check whether the instance docstring is identical to the
865 864 # class one, and print it separately if they don't coincide. In
866 865 # most cases they will, but it's nice to print all the info for
867 866 # objects which use instance-customized docstrings.
868 867 if ds:
869 868 try:
870 869 cls = getattr(obj,'__class__')
871 870 except:
872 871 class_ds = None
873 872 else:
874 873 class_ds = getdoc(cls)
875 874 # Skip Python's auto-generated docstrings
876 875 if class_ds in _builtin_type_docstrings:
877 876 class_ds = None
878 877 if class_ds and ds != class_ds:
879 878 out['class_docstring'] = class_ds
880 879
881 880 # Next, try to show constructor docstrings
882 881 try:
883 882 init_ds = getdoc(obj.__init__)
884 883 # Skip Python's auto-generated docstrings
885 884 if init_ds == _object_init_docstring:
886 885 init_ds = None
887 886 except AttributeError:
888 887 init_ds = None
889 888 if init_ds:
890 889 out['init_docstring'] = init_ds
891 890
892 891 # Call form docstring for callable instances
893 892 if safe_hasattr(obj, '__call__') and not is_simple_callable(obj):
894 893 call_def = self._getdef(obj.__call__, oname)
895 894 if call_def and (call_def != out.get('definition')):
896 895 # it may never be the case that call def and definition differ,
897 896 # but don't include the same signature twice
898 897 out['call_def'] = call_def
899 898 call_ds = getdoc(obj.__call__)
900 899 # Skip Python's auto-generated docstrings
901 900 if call_ds == _func_call_docstring:
902 901 call_ds = None
903 902 if call_ds:
904 903 out['call_docstring'] = call_ds
905 904
906 905 return object_info(**out)
907 906
908 907 @staticmethod
909 908 def _source_contains_docstring(src, doc):
910 909 """
911 910 Check whether the source *src* contains the docstring *doc*.
912 911
913 912 This is is helper function to skip displaying the docstring if the
914 913 source already contains it, avoiding repetition of information.
915 914 """
916 915 try:
917 916 def_node, = ast.parse(dedent(src)).body
918 917 return ast.get_docstring(def_node) == doc
919 918 except Exception:
920 919 # The source can become invalid or even non-existent (because it
921 920 # is re-fetched from the source file) so the above code fail in
922 921 # arbitrary ways.
923 922 return False
924 923
925 924 def psearch(self,pattern,ns_table,ns_search=[],
926 925 ignore_case=False,show_all=False, *, list_types=False):
927 926 """Search namespaces with wildcards for objects.
928 927
929 928 Arguments:
930 929
931 930 - pattern: string containing shell-like wildcards to use in namespace
932 931 searches and optionally a type specification to narrow the search to
933 932 objects of that type.
934 933
935 934 - ns_table: dict of name->namespaces for search.
936 935
937 936 Optional arguments:
938 937
939 938 - ns_search: list of namespace names to include in search.
940 939
941 940 - ignore_case(False): make the search case-insensitive.
942 941
943 942 - show_all(False): show all names, including those starting with
944 943 underscores.
945 944
946 945 - list_types(False): list all available object types for object matching.
947 946 """
948 947 #print 'ps pattern:<%r>' % pattern # dbg
949 948
950 949 # defaults
951 950 type_pattern = 'all'
952 951 filter = ''
953 952
954 953 # list all object types
955 954 if list_types:
956 955 page.page('\n'.join(sorted(typestr2type)))
957 956 return
958 957
959 958 cmds = pattern.split()
960 959 len_cmds = len(cmds)
961 960 if len_cmds == 1:
962 961 # Only filter pattern given
963 962 filter = cmds[0]
964 963 elif len_cmds == 2:
965 964 # Both filter and type specified
966 965 filter,type_pattern = cmds
967 966 else:
968 967 raise ValueError('invalid argument string for psearch: <%s>' %
969 968 pattern)
970 969
971 970 # filter search namespaces
972 971 for name in ns_search:
973 972 if name not in ns_table:
974 973 raise ValueError('invalid namespace <%s>. Valid names: %s' %
975 974 (name,ns_table.keys()))
976 975
977 976 #print 'type_pattern:',type_pattern # dbg
978 977 search_result, namespaces_seen = set(), set()
979 978 for ns_name in ns_search:
980 979 ns = ns_table[ns_name]
981 980 # Normally, locals and globals are the same, so we just check one.
982 981 if id(ns) in namespaces_seen:
983 982 continue
984 983 namespaces_seen.add(id(ns))
985 984 tmp_res = list_namespace(ns, type_pattern, filter,
986 985 ignore_case=ignore_case, show_all=show_all)
987 986 search_result.update(tmp_res)
988 987
989 988 page.page('\n'.join(sorted(search_result)))
990 989
991 990
992 991 def _render_signature(obj_signature, obj_name) -> str:
993 992 """
994 993 This was mostly taken from inspect.Signature.__str__.
995 994 Look there for the comments.
996 995 The only change is to add linebreaks when this gets too long.
997 996 """
998 997 result = []
999 998 pos_only = False
1000 999 kw_only = True
1001 1000 for param in obj_signature.parameters.values():
1002 1001 if param.kind == inspect._POSITIONAL_ONLY:
1003 1002 pos_only = True
1004 1003 elif pos_only:
1005 1004 result.append('/')
1006 1005 pos_only = False
1007 1006
1008 1007 if param.kind == inspect._VAR_POSITIONAL:
1009 1008 kw_only = False
1010 1009 elif param.kind == inspect._KEYWORD_ONLY and kw_only:
1011 1010 result.append('*')
1012 1011 kw_only = False
1013 1012
1014 1013 result.append(str(param))
1015 1014
1016 1015 if pos_only:
1017 1016 result.append('/')
1018 1017
1019 1018 # add up name, parameters, braces (2), and commas
1020 1019 if len(obj_name) + sum(len(r) + 2 for r in result) > 75:
1021 1020 # This doesn’t fit behind “Signature: ” in an inspect window.
1022 1021 rendered = '{}(\n{})'.format(obj_name, ''.join(
1023 1022 ' {},\n'.format(r) for r in result)
1024 1023 )
1025 1024 else:
1026 1025 rendered = '{}({})'.format(obj_name, ', '.join(result))
1027 1026
1028 1027 if obj_signature.return_annotation is not inspect._empty:
1029 1028 anno = inspect.formatannotation(obj_signature.return_annotation)
1030 1029 rendered += ' -> {}'.format(anno)
1031 1030
1032 1031 return rendered
@@ -1,1506 +1,1503
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 Verbose and colourful traceback formatting.
4 4
5 5 **ColorTB**
6 6
7 7 I've always found it a bit hard to visually parse tracebacks in Python. The
8 8 ColorTB class is a solution to that problem. It colors the different parts of a
9 9 traceback in a manner similar to what you would expect from a syntax-highlighting
10 10 text editor.
11 11
12 12 Installation instructions for ColorTB::
13 13
14 14 import sys,ultratb
15 15 sys.excepthook = ultratb.ColorTB()
16 16
17 17 **VerboseTB**
18 18
19 19 I've also included a port of Ka-Ping Yee's "cgitb.py" that produces all kinds
20 20 of useful info when a traceback occurs. Ping originally had it spit out HTML
21 21 and intended it for CGI programmers, but why should they have all the fun? I
22 22 altered it to spit out colored text to the terminal. It's a bit overwhelming,
23 23 but kind of neat, and maybe useful for long-running programs that you believe
24 24 are bug-free. If a crash *does* occur in that type of program you want details.
25 25 Give it a shot--you'll love it or you'll hate it.
26 26
27 27 .. note::
28 28
29 29 The Verbose mode prints the variables currently visible where the exception
30 30 happened (shortening their strings if too long). This can potentially be
31 31 very slow, if you happen to have a huge data structure whose string
32 32 representation is complex to compute. Your computer may appear to freeze for
33 33 a while with cpu usage at 100%. If this occurs, you can cancel the traceback
34 34 with Ctrl-C (maybe hitting it more than once).
35 35
36 36 If you encounter this kind of situation often, you may want to use the
37 37 Verbose_novars mode instead of the regular Verbose, which avoids formatting
38 38 variables (but otherwise includes the information and context given by
39 39 Verbose).
40 40
41 41 .. note::
42 42
43 43 The verbose mode print all variables in the stack, which means it can
44 44 potentially leak sensitive information like access keys, or unencrypted
45 45 password.
46 46
47 47 Installation instructions for VerboseTB::
48 48
49 49 import sys,ultratb
50 50 sys.excepthook = ultratb.VerboseTB()
51 51
52 52 Note: Much of the code in this module was lifted verbatim from the standard
53 53 library module 'traceback.py' and Ka-Ping Yee's 'cgitb.py'.
54 54
55 55 Color schemes
56 56 -------------
57 57
58 58 The colors are defined in the class TBTools through the use of the
59 59 ColorSchemeTable class. Currently the following exist:
60 60
61 61 - NoColor: allows all of this module to be used in any terminal (the color
62 62 escapes are just dummy blank strings).
63 63
64 64 - Linux: is meant to look good in a terminal like the Linux console (black
65 65 or very dark background).
66 66
67 67 - LightBG: similar to Linux but swaps dark/light colors to be more readable
68 68 in light background terminals.
69 69
70 70 - Neutral: a neutral color scheme that should be readable on both light and
71 71 dark background
72 72
73 73 You can implement other color schemes easily, the syntax is fairly
74 74 self-explanatory. Please send back new schemes you develop to the author for
75 75 possible inclusion in future releases.
76 76
77 77 Inheritance diagram:
78 78
79 79 .. inheritance-diagram:: IPython.core.ultratb
80 80 :parts: 3
81 81 """
82 82
83 83 #*****************************************************************************
84 84 # Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu>
85 85 # Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu>
86 86 #
87 87 # Distributed under the terms of the BSD License. The full license is in
88 88 # the file COPYING, distributed as part of this software.
89 89 #*****************************************************************************
90 90
91 91
92 92 import dis
93 93 import inspect
94 94 import keyword
95 95 import linecache
96 96 import os
97 97 import pydoc
98 98 import re
99 99 import sys
100 100 import time
101 101 import tokenize
102 102 import traceback
103 103
104 try: # Python 2
105 generate_tokens = tokenize.generate_tokens
106 except AttributeError: # Python 3
107 generate_tokens = tokenize.tokenize
104 from tokenize import generate_tokens
108 105
109 106 # For purposes of monkeypatching inspect to fix a bug in it.
110 107 from inspect import getsourcefile, getfile, getmodule, \
111 108 ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode
112 109
113 110 # IPython's own modules
114 111 from IPython import get_ipython
115 112 from IPython.core import debugger
116 113 from IPython.core.display_trap import DisplayTrap
117 114 from IPython.core.excolors import exception_colors
118 115 from IPython.utils import PyColorize
119 116 from IPython.utils import path as util_path
120 117 from IPython.utils import py3compat
121 118 from IPython.utils.data import uniq_stable
122 119 from IPython.utils.terminal import get_terminal_size
123 120
124 121 from logging import info, error, debug
125 122
126 123 from importlib.util import source_from_cache
127 124
128 125 import IPython.utils.colorable as colorable
129 126
130 127 # Globals
131 128 # amount of space to put line numbers before verbose tracebacks
132 129 INDENT_SIZE = 8
133 130
134 131 # Default color scheme. This is used, for example, by the traceback
135 132 # formatter. When running in an actual IPython instance, the user's rc.colors
136 133 # value is used, but having a module global makes this functionality available
137 134 # to users of ultratb who are NOT running inside ipython.
138 135 DEFAULT_SCHEME = 'NoColor'
139 136
140 137
141 138 # Number of frame above which we are likely to have a recursion and will
142 139 # **attempt** to detect it. Made modifiable mostly to speedup test suite
143 140 # as detecting recursion is one of our slowest test
144 141 _FRAME_RECURSION_LIMIT = 500
145 142
146 143 # ---------------------------------------------------------------------------
147 144 # Code begins
148 145
149 146 # Utility functions
150 147 def inspect_error():
151 148 """Print a message about internal inspect errors.
152 149
153 150 These are unfortunately quite common."""
154 151
155 152 error('Internal Python error in the inspect module.\n'
156 153 'Below is the traceback from this internal error.\n')
157 154
158 155
159 156 # This function is a monkeypatch we apply to the Python inspect module. We have
160 157 # now found when it's needed (see discussion on issue gh-1456), and we have a
161 158 # test case (IPython.core.tests.test_ultratb.ChangedPyFileTest) that fails if
162 159 # the monkeypatch is not applied. TK, Aug 2012.
163 160 def findsource(object):
164 161 """Return the entire source file and starting line number for an object.
165 162
166 163 The argument may be a module, class, method, function, traceback, frame,
167 164 or code object. The source code is returned as a list of all the lines
168 165 in the file and the line number indexes a line in that list. An IOError
169 166 is raised if the source code cannot be retrieved.
170 167
171 168 FIXED version with which we monkeypatch the stdlib to work around a bug."""
172 169
173 170 file = getsourcefile(object) or getfile(object)
174 171 # If the object is a frame, then trying to get the globals dict from its
175 172 # module won't work. Instead, the frame object itself has the globals
176 173 # dictionary.
177 174 globals_dict = None
178 175 if inspect.isframe(object):
179 176 # XXX: can this ever be false?
180 177 globals_dict = object.f_globals
181 178 else:
182 179 module = getmodule(object, file)
183 180 if module:
184 181 globals_dict = module.__dict__
185 182 lines = linecache.getlines(file, globals_dict)
186 183 if not lines:
187 184 raise IOError('could not get source code')
188 185
189 186 if ismodule(object):
190 187 return lines, 0
191 188
192 189 if isclass(object):
193 190 name = object.__name__
194 191 pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
195 192 # make some effort to find the best matching class definition:
196 193 # use the one with the least indentation, which is the one
197 194 # that's most probably not inside a function definition.
198 195 candidates = []
199 196 for i, line in enumerate(lines):
200 197 match = pat.match(line)
201 198 if match:
202 199 # if it's at toplevel, it's already the best one
203 200 if line[0] == 'c':
204 201 return lines, i
205 202 # else add whitespace to candidate list
206 203 candidates.append((match.group(1), i))
207 204 if candidates:
208 205 # this will sort by whitespace, and by line number,
209 206 # less whitespace first
210 207 candidates.sort()
211 208 return lines, candidates[0][1]
212 209 else:
213 210 raise IOError('could not find class definition')
214 211
215 212 if ismethod(object):
216 213 object = object.__func__
217 214 if isfunction(object):
218 215 object = object.__code__
219 216 if istraceback(object):
220 217 object = object.tb_frame
221 218 if isframe(object):
222 219 object = object.f_code
223 220 if iscode(object):
224 221 if not hasattr(object, 'co_firstlineno'):
225 222 raise IOError('could not find function definition')
226 223 pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
227 224 pmatch = pat.match
228 225 # fperez - fix: sometimes, co_firstlineno can give a number larger than
229 226 # the length of lines, which causes an error. Safeguard against that.
230 227 lnum = min(object.co_firstlineno, len(lines)) - 1
231 228 while lnum > 0:
232 229 if pmatch(lines[lnum]):
233 230 break
234 231 lnum -= 1
235 232
236 233 return lines, lnum
237 234 raise IOError('could not find code object')
238 235
239 236
240 237 # This is a patched version of inspect.getargs that applies the (unmerged)
241 238 # patch for http://bugs.python.org/issue14611 by Stefano Taschini. This fixes
242 239 # https://github.com/ipython/ipython/issues/8205 and
243 240 # https://github.com/ipython/ipython/issues/8293
244 241 def getargs(co):
245 242 """Get information about the arguments accepted by a code object.
246 243
247 244 Three things are returned: (args, varargs, varkw), where 'args' is
248 245 a list of argument names (possibly containing nested lists), and
249 246 'varargs' and 'varkw' are the names of the * and ** arguments or None."""
250 247 if not iscode(co):
251 248 raise TypeError('{!r} is not a code object'.format(co))
252 249
253 250 nargs = co.co_argcount
254 251 names = co.co_varnames
255 252 args = list(names[:nargs])
256 253 step = 0
257 254
258 255 # The following acrobatics are for anonymous (tuple) arguments.
259 256 for i in range(nargs):
260 257 if args[i][:1] in ('', '.'):
261 258 stack, remain, count = [], [], []
262 259 while step < len(co.co_code):
263 260 op = ord(co.co_code[step])
264 261 step = step + 1
265 262 if op >= dis.HAVE_ARGUMENT:
266 263 opname = dis.opname[op]
267 264 value = ord(co.co_code[step]) + ord(co.co_code[step+1])*256
268 265 step = step + 2
269 266 if opname in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
270 267 remain.append(value)
271 268 count.append(value)
272 269 elif opname in ('STORE_FAST', 'STORE_DEREF'):
273 270 if op in dis.haslocal:
274 271 stack.append(co.co_varnames[value])
275 272 elif op in dis.hasfree:
276 273 stack.append((co.co_cellvars + co.co_freevars)[value])
277 274 # Special case for sublists of length 1: def foo((bar))
278 275 # doesn't generate the UNPACK_TUPLE bytecode, so if
279 276 # `remain` is empty here, we have such a sublist.
280 277 if not remain:
281 278 stack[0] = [stack[0]]
282 279 break
283 280 else:
284 281 remain[-1] = remain[-1] - 1
285 282 while remain[-1] == 0:
286 283 remain.pop()
287 284 size = count.pop()
288 285 stack[-size:] = [stack[-size:]]
289 286 if not remain:
290 287 break
291 288 remain[-1] = remain[-1] - 1
292 289 if not remain:
293 290 break
294 291 args[i] = stack[0]
295 292
296 293 varargs = None
297 294 if co.co_flags & inspect.CO_VARARGS:
298 295 varargs = co.co_varnames[nargs]
299 296 nargs = nargs + 1
300 297 varkw = None
301 298 if co.co_flags & inspect.CO_VARKEYWORDS:
302 299 varkw = co.co_varnames[nargs]
303 300 return inspect.Arguments(args, varargs, varkw)
304 301
305 302
306 303 # Monkeypatch inspect to apply our bugfix.
307 304 def with_patch_inspect(f):
308 305 """
309 306 Deprecated since IPython 6.0
310 307 decorator for monkeypatching inspect.findsource
311 308 """
312 309
313 310 def wrapped(*args, **kwargs):
314 311 save_findsource = inspect.findsource
315 312 save_getargs = inspect.getargs
316 313 inspect.findsource = findsource
317 314 inspect.getargs = getargs
318 315 try:
319 316 return f(*args, **kwargs)
320 317 finally:
321 318 inspect.findsource = save_findsource
322 319 inspect.getargs = save_getargs
323 320
324 321 return wrapped
325 322
326 323
327 324 def fix_frame_records_filenames(records):
328 325 """Try to fix the filenames in each record from inspect.getinnerframes().
329 326
330 327 Particularly, modules loaded from within zip files have useless filenames
331 328 attached to their code object, and inspect.getinnerframes() just uses it.
332 329 """
333 330 fixed_records = []
334 331 for frame, filename, line_no, func_name, lines, index in records:
335 332 # Look inside the frame's globals dictionary for __file__,
336 333 # which should be better. However, keep Cython filenames since
337 334 # we prefer the source filenames over the compiled .so file.
338 335 if not filename.endswith(('.pyx', '.pxd', '.pxi')):
339 336 better_fn = frame.f_globals.get('__file__', None)
340 337 if isinstance(better_fn, str):
341 338 # Check the type just in case someone did something weird with
342 339 # __file__. It might also be None if the error occurred during
343 340 # import.
344 341 filename = better_fn
345 342 fixed_records.append((frame, filename, line_no, func_name, lines, index))
346 343 return fixed_records
347 344
348 345
349 346 @with_patch_inspect
350 347 def _fixed_getinnerframes(etb, context=1, tb_offset=0):
351 348 LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5
352 349
353 350 records = fix_frame_records_filenames(inspect.getinnerframes(etb, context))
354 351 # If the error is at the console, don't build any context, since it would
355 352 # otherwise produce 5 blank lines printed out (there is no file at the
356 353 # console)
357 354 rec_check = records[tb_offset:]
358 355 try:
359 356 rname = rec_check[0][1]
360 357 if rname == '<ipython console>' or rname.endswith('<string>'):
361 358 return rec_check
362 359 except IndexError:
363 360 pass
364 361
365 362 aux = traceback.extract_tb(etb)
366 363 assert len(records) == len(aux)
367 364 for i, (file, lnum, _, _) in enumerate(aux):
368 365 maybeStart = lnum - 1 - context // 2
369 366 start = max(maybeStart, 0)
370 367 end = start + context
371 368 lines = linecache.getlines(file)[start:end]
372 369 buf = list(records[i])
373 370 buf[LNUM_POS] = lnum
374 371 buf[INDEX_POS] = lnum - 1 - start
375 372 buf[LINES_POS] = lines
376 373 records[i] = tuple(buf)
377 374 return records[tb_offset:]
378 375
379 376 # Helper function -- largely belongs to VerboseTB, but we need the same
380 377 # functionality to produce a pseudo verbose TB for SyntaxErrors, so that they
381 378 # can be recognized properly by ipython.el's py-traceback-line-re
382 379 # (SyntaxErrors have to be treated specially because they have no traceback)
383 380
384 381
385 382 def _format_traceback_lines(lnum, index, lines, Colors, lvals, _line_format):
386 383 """
387 384 Format tracebacks lines with pointing arrow, leading numbers...
388 385
389 386 Parameters
390 387 ==========
391 388
392 389 lnum: int
393 390 index: int
394 391 lines: list[string]
395 392 Colors:
396 393 ColorScheme used.
397 394 lvals: bytes
398 395 Values of local variables, already colored, to inject just after the error line.
399 396 _line_format: f (str) -> (str, bool)
400 397 return (colorized version of str, failure to do so)
401 398 """
402 399 numbers_width = INDENT_SIZE - 1
403 400 res = []
404 401
405 402 for i,line in enumerate(lines, lnum-index):
406 403 line = py3compat.cast_unicode(line)
407 404
408 405 new_line, err = _line_format(line, 'str')
409 406 if not err:
410 407 line = new_line
411 408
412 409 if i == lnum:
413 410 # This is the line with the error
414 411 pad = numbers_width - len(str(i))
415 412 num = '%s%s' % (debugger.make_arrow(pad), str(lnum))
416 413 line = '%s%s%s %s%s' % (Colors.linenoEm, num,
417 414 Colors.line, line, Colors.Normal)
418 415 else:
419 416 num = '%*s' % (numbers_width, i)
420 417 line = '%s%s%s %s' % (Colors.lineno, num,
421 418 Colors.Normal, line)
422 419
423 420 res.append(line)
424 421 if lvals and i == lnum:
425 422 res.append(lvals + '\n')
426 423 return res
427 424
428 425 def is_recursion_error(etype, value, records):
429 426 try:
430 427 # RecursionError is new in Python 3.5
431 428 recursion_error_type = RecursionError
432 429 except NameError:
433 430 recursion_error_type = RuntimeError
434 431
435 432 # The default recursion limit is 1000, but some of that will be taken up
436 433 # by stack frames in IPython itself. >500 frames probably indicates
437 434 # a recursion error.
438 435 return (etype is recursion_error_type) \
439 436 and "recursion" in str(value).lower() \
440 437 and len(records) > _FRAME_RECURSION_LIMIT
441 438
442 439 def find_recursion(etype, value, records):
443 440 """Identify the repeating stack frames from a RecursionError traceback
444 441
445 442 'records' is a list as returned by VerboseTB.get_records()
446 443
447 444 Returns (last_unique, repeat_length)
448 445 """
449 446 # This involves a bit of guesswork - we want to show enough of the traceback
450 447 # to indicate where the recursion is occurring. We guess that the innermost
451 448 # quarter of the traceback (250 frames by default) is repeats, and find the
452 449 # first frame (from in to out) that looks different.
453 450 if not is_recursion_error(etype, value, records):
454 451 return len(records), 0
455 452
456 453 # Select filename, lineno, func_name to track frames with
457 454 records = [r[1:4] for r in records]
458 455 inner_frames = records[-(len(records)//4):]
459 456 frames_repeated = set(inner_frames)
460 457
461 458 last_seen_at = {}
462 459 longest_repeat = 0
463 460 i = len(records)
464 461 for frame in reversed(records):
465 462 i -= 1
466 463 if frame not in frames_repeated:
467 464 last_unique = i
468 465 break
469 466
470 467 if frame in last_seen_at:
471 468 distance = last_seen_at[frame] - i
472 469 longest_repeat = max(longest_repeat, distance)
473 470
474 471 last_seen_at[frame] = i
475 472 else:
476 473 last_unique = 0 # The whole traceback was recursion
477 474
478 475 return last_unique, longest_repeat
479 476
480 477 #---------------------------------------------------------------------------
481 478 # Module classes
482 479 class TBTools(colorable.Colorable):
483 480 """Basic tools used by all traceback printer classes."""
484 481
485 482 # Number of frames to skip when reporting tracebacks
486 483 tb_offset = 0
487 484
488 485 def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None, parent=None, config=None):
489 486 # Whether to call the interactive pdb debugger after printing
490 487 # tracebacks or not
491 488 super(TBTools, self).__init__(parent=parent, config=config)
492 489 self.call_pdb = call_pdb
493 490
494 491 # Output stream to write to. Note that we store the original value in
495 492 # a private attribute and then make the public ostream a property, so
496 493 # that we can delay accessing sys.stdout until runtime. The way
497 494 # things are written now, the sys.stdout object is dynamically managed
498 495 # so a reference to it should NEVER be stored statically. This
499 496 # property approach confines this detail to a single location, and all
500 497 # subclasses can simply access self.ostream for writing.
501 498 self._ostream = ostream
502 499
503 500 # Create color table
504 501 self.color_scheme_table = exception_colors()
505 502
506 503 self.set_colors(color_scheme)
507 504 self.old_scheme = color_scheme # save initial value for toggles
508 505
509 506 if call_pdb:
510 507 self.pdb = debugger.Pdb()
511 508 else:
512 509 self.pdb = None
513 510
514 511 def _get_ostream(self):
515 512 """Output stream that exceptions are written to.
516 513
517 514 Valid values are:
518 515
519 516 - None: the default, which means that IPython will dynamically resolve
520 517 to sys.stdout. This ensures compatibility with most tools, including
521 518 Windows (where plain stdout doesn't recognize ANSI escapes).
522 519
523 520 - Any object with 'write' and 'flush' attributes.
524 521 """
525 522 return sys.stdout if self._ostream is None else self._ostream
526 523
527 524 def _set_ostream(self, val):
528 525 assert val is None or (hasattr(val, 'write') and hasattr(val, 'flush'))
529 526 self._ostream = val
530 527
531 528 ostream = property(_get_ostream, _set_ostream)
532 529
533 530 def get_parts_of_chained_exception(self, evalue):
534 531 def get_chained_exception(exception_value):
535 532 cause = getattr(exception_value, '__cause__', None)
536 533 if cause:
537 534 return cause
538 535 if getattr(exception_value, '__suppress_context__', False):
539 536 return None
540 537 return getattr(exception_value, '__context__', None)
541 538
542 539 chained_evalue = get_chained_exception(evalue)
543 540
544 541 if chained_evalue:
545 542 return chained_evalue.__class__, chained_evalue, chained_evalue.__traceback__
546 543
547 544 def prepare_chained_exception_message(self, cause):
548 545 direct_cause = "\nThe above exception was the direct cause of the following exception:\n"
549 546 exception_during_handling = "\nDuring handling of the above exception, another exception occurred:\n"
550 547
551 548 if cause:
552 549 message = [[direct_cause]]
553 550 else:
554 551 message = [[exception_during_handling]]
555 552 return message
556 553
557 554 def set_colors(self, *args, **kw):
558 555 """Shorthand access to the color table scheme selector method."""
559 556
560 557 # Set own color table
561 558 self.color_scheme_table.set_active_scheme(*args, **kw)
562 559 # for convenience, set Colors to the active scheme
563 560 self.Colors = self.color_scheme_table.active_colors
564 561 # Also set colors of debugger
565 562 if hasattr(self, 'pdb') and self.pdb is not None:
566 563 self.pdb.set_colors(*args, **kw)
567 564
568 565 def color_toggle(self):
569 566 """Toggle between the currently active color scheme and NoColor."""
570 567
571 568 if self.color_scheme_table.active_scheme_name == 'NoColor':
572 569 self.color_scheme_table.set_active_scheme(self.old_scheme)
573 570 self.Colors = self.color_scheme_table.active_colors
574 571 else:
575 572 self.old_scheme = self.color_scheme_table.active_scheme_name
576 573 self.color_scheme_table.set_active_scheme('NoColor')
577 574 self.Colors = self.color_scheme_table.active_colors
578 575
579 576 def stb2text(self, stb):
580 577 """Convert a structured traceback (a list) to a string."""
581 578 return '\n'.join(stb)
582 579
583 580 def text(self, etype, value, tb, tb_offset=None, context=5):
584 581 """Return formatted traceback.
585 582
586 583 Subclasses may override this if they add extra arguments.
587 584 """
588 585 tb_list = self.structured_traceback(etype, value, tb,
589 586 tb_offset, context)
590 587 return self.stb2text(tb_list)
591 588
592 589 def structured_traceback(self, etype, evalue, tb, tb_offset=None,
593 590 context=5, mode=None):
594 591 """Return a list of traceback frames.
595 592
596 593 Must be implemented by each class.
597 594 """
598 595 raise NotImplementedError()
599 596
600 597
601 598 #---------------------------------------------------------------------------
602 599 class ListTB(TBTools):
603 600 """Print traceback information from a traceback list, with optional color.
604 601
605 602 Calling requires 3 arguments: (etype, evalue, elist)
606 603 as would be obtained by::
607 604
608 605 etype, evalue, tb = sys.exc_info()
609 606 if tb:
610 607 elist = traceback.extract_tb(tb)
611 608 else:
612 609 elist = None
613 610
614 611 It can thus be used by programs which need to process the traceback before
615 612 printing (such as console replacements based on the code module from the
616 613 standard library).
617 614
618 615 Because they are meant to be called without a full traceback (only a
619 616 list), instances of this class can't call the interactive pdb debugger."""
620 617
621 618 def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None, parent=None, config=None):
622 619 TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
623 620 ostream=ostream, parent=parent,config=config)
624 621
625 622 def __call__(self, etype, value, elist):
626 623 self.ostream.flush()
627 624 self.ostream.write(self.text(etype, value, elist))
628 625 self.ostream.write('\n')
629 626
630 627 def _extract_tb(self, tb):
631 628 if tb:
632 629 return traceback.extract_tb(tb)
633 630 else:
634 631 return None
635 632
636 633 def structured_traceback(self, etype, evalue, etb=None, tb_offset=None,
637 634 context=5):
638 635 """Return a color formatted string with the traceback info.
639 636
640 637 Parameters
641 638 ----------
642 639 etype : exception type
643 640 Type of the exception raised.
644 641
645 642 evalue : object
646 643 Data stored in the exception
647 644
648 645 etb : object
649 646 If list: List of frames, see class docstring for details.
650 647 If Traceback: Traceback of the exception.
651 648
652 649 tb_offset : int, optional
653 650 Number of frames in the traceback to skip. If not given, the
654 651 instance evalue is used (set in constructor).
655 652
656 653 context : int, optional
657 654 Number of lines of context information to print.
658 655
659 656 Returns
660 657 -------
661 658 String with formatted exception.
662 659 """
663 660 # This is a workaround to get chained_exc_ids in recursive calls
664 661 # etb should not be a tuple if structured_traceback is not recursive
665 662 if isinstance(etb, tuple):
666 663 etb, chained_exc_ids = etb
667 664 else:
668 665 chained_exc_ids = set()
669 666
670 667 if isinstance(etb, list):
671 668 elist = etb
672 669 elif etb is not None:
673 670 elist = self._extract_tb(etb)
674 671 else:
675 672 elist = []
676 673 tb_offset = self.tb_offset if tb_offset is None else tb_offset
677 674 Colors = self.Colors
678 675 out_list = []
679 676 if elist:
680 677
681 678 if tb_offset and len(elist) > tb_offset:
682 679 elist = elist[tb_offset:]
683 680
684 681 out_list.append('Traceback %s(most recent call last)%s:' %
685 682 (Colors.normalEm, Colors.Normal) + '\n')
686 683 out_list.extend(self._format_list(elist))
687 684 # The exception info should be a single entry in the list.
688 685 lines = ''.join(self._format_exception_only(etype, evalue))
689 686 out_list.append(lines)
690 687
691 688 exception = self.get_parts_of_chained_exception(evalue)
692 689
693 690 if exception and not id(exception[1]) in chained_exc_ids:
694 691 chained_exception_message = self.prepare_chained_exception_message(
695 692 evalue.__cause__)[0]
696 693 etype, evalue, etb = exception
697 694 # Trace exception to avoid infinite 'cause' loop
698 695 chained_exc_ids.add(id(exception[1]))
699 696 chained_exceptions_tb_offset = 0
700 697 out_list = (
701 698 self.structured_traceback(
702 699 etype, evalue, (etb, chained_exc_ids),
703 700 chained_exceptions_tb_offset, context)
704 701 + chained_exception_message
705 702 + out_list)
706 703
707 704 return out_list
708 705
709 706 def _format_list(self, extracted_list):
710 707 """Format a list of traceback entry tuples for printing.
711 708
712 709 Given a list of tuples as returned by extract_tb() or
713 710 extract_stack(), return a list of strings ready for printing.
714 711 Each string in the resulting list corresponds to the item with the
715 712 same index in the argument list. Each string ends in a newline;
716 713 the strings may contain internal newlines as well, for those items
717 714 whose source text line is not None.
718 715
719 716 Lifted almost verbatim from traceback.py
720 717 """
721 718
722 719 Colors = self.Colors
723 720 list = []
724 721 for filename, lineno, name, line in extracted_list[:-1]:
725 722 item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \
726 723 (Colors.filename, filename, Colors.Normal,
727 724 Colors.lineno, lineno, Colors.Normal,
728 725 Colors.name, name, Colors.Normal)
729 726 if line:
730 727 item += ' %s\n' % line.strip()
731 728 list.append(item)
732 729 # Emphasize the last entry
733 730 filename, lineno, name, line = extracted_list[-1]
734 731 item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \
735 732 (Colors.normalEm,
736 733 Colors.filenameEm, filename, Colors.normalEm,
737 734 Colors.linenoEm, lineno, Colors.normalEm,
738 735 Colors.nameEm, name, Colors.normalEm,
739 736 Colors.Normal)
740 737 if line:
741 738 item += '%s %s%s\n' % (Colors.line, line.strip(),
742 739 Colors.Normal)
743 740 list.append(item)
744 741 return list
745 742
746 743 def _format_exception_only(self, etype, value):
747 744 """Format the exception part of a traceback.
748 745
749 746 The arguments are the exception type and value such as given by
750 747 sys.exc_info()[:2]. The return value is a list of strings, each ending
751 748 in a newline. Normally, the list contains a single string; however,
752 749 for SyntaxError exceptions, it contains several lines that (when
753 750 printed) display detailed information about where the syntax error
754 751 occurred. The message indicating which exception occurred is the
755 752 always last string in the list.
756 753
757 754 Also lifted nearly verbatim from traceback.py
758 755 """
759 756 have_filedata = False
760 757 Colors = self.Colors
761 758 list = []
762 759 stype = py3compat.cast_unicode(Colors.excName + etype.__name__ + Colors.Normal)
763 760 if value is None:
764 761 # Not sure if this can still happen in Python 2.6 and above
765 762 list.append(stype + '\n')
766 763 else:
767 764 if issubclass(etype, SyntaxError):
768 765 have_filedata = True
769 766 if not value.filename: value.filename = "<string>"
770 767 if value.lineno:
771 768 lineno = value.lineno
772 769 textline = linecache.getline(value.filename, value.lineno)
773 770 else:
774 771 lineno = 'unknown'
775 772 textline = ''
776 773 list.append('%s File %s"%s"%s, line %s%s%s\n' % \
777 774 (Colors.normalEm,
778 775 Colors.filenameEm, py3compat.cast_unicode(value.filename), Colors.normalEm,
779 776 Colors.linenoEm, lineno, Colors.Normal ))
780 777 if textline == '':
781 778 textline = py3compat.cast_unicode(value.text, "utf-8")
782 779
783 780 if textline is not None:
784 781 i = 0
785 782 while i < len(textline) and textline[i].isspace():
786 783 i += 1
787 784 list.append('%s %s%s\n' % (Colors.line,
788 785 textline.strip(),
789 786 Colors.Normal))
790 787 if value.offset is not None:
791 788 s = ' '
792 789 for c in textline[i:value.offset - 1]:
793 790 if c.isspace():
794 791 s += c
795 792 else:
796 793 s += ' '
797 794 list.append('%s%s^%s\n' % (Colors.caret, s,
798 795 Colors.Normal))
799 796
800 797 try:
801 798 s = value.msg
802 799 except Exception:
803 800 s = self._some_str(value)
804 801 if s:
805 802 list.append('%s%s:%s %s\n' % (stype, Colors.excName,
806 803 Colors.Normal, s))
807 804 else:
808 805 list.append('%s\n' % stype)
809 806
810 807 # sync with user hooks
811 808 if have_filedata:
812 809 ipinst = get_ipython()
813 810 if ipinst is not None:
814 811 ipinst.hooks.synchronize_with_editor(value.filename, value.lineno, 0)
815 812
816 813 return list
817 814
818 815 def get_exception_only(self, etype, value):
819 816 """Only print the exception type and message, without a traceback.
820 817
821 818 Parameters
822 819 ----------
823 820 etype : exception type
824 821 value : exception value
825 822 """
826 823 return ListTB.structured_traceback(self, etype, value)
827 824
828 825 def show_exception_only(self, etype, evalue):
829 826 """Only print the exception type and message, without a traceback.
830 827
831 828 Parameters
832 829 ----------
833 830 etype : exception type
834 831 value : exception value
835 832 """
836 833 # This method needs to use __call__ from *this* class, not the one from
837 834 # a subclass whose signature or behavior may be different
838 835 ostream = self.ostream
839 836 ostream.flush()
840 837 ostream.write('\n'.join(self.get_exception_only(etype, evalue)))
841 838 ostream.flush()
842 839
843 840 def _some_str(self, value):
844 841 # Lifted from traceback.py
845 842 try:
846 843 return py3compat.cast_unicode(str(value))
847 844 except:
848 845 return u'<unprintable %s object>' % type(value).__name__
849 846
850 847
851 848 #----------------------------------------------------------------------------
852 849 class VerboseTB(TBTools):
853 850 """A port of Ka-Ping Yee's cgitb.py module that outputs color text instead
854 851 of HTML. Requires inspect and pydoc. Crazy, man.
855 852
856 853 Modified version which optionally strips the topmost entries from the
857 854 traceback, to be used with alternate interpreters (because their own code
858 855 would appear in the traceback)."""
859 856
860 857 def __init__(self, color_scheme='Linux', call_pdb=False, ostream=None,
861 858 tb_offset=0, long_header=False, include_vars=True,
862 859 check_cache=None, debugger_cls = None,
863 860 parent=None, config=None):
864 861 """Specify traceback offset, headers and color scheme.
865 862
866 863 Define how many frames to drop from the tracebacks. Calling it with
867 864 tb_offset=1 allows use of this handler in interpreters which will have
868 865 their own code at the top of the traceback (VerboseTB will first
869 866 remove that frame before printing the traceback info)."""
870 867 TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
871 868 ostream=ostream, parent=parent, config=config)
872 869 self.tb_offset = tb_offset
873 870 self.long_header = long_header
874 871 self.include_vars = include_vars
875 872 # By default we use linecache.checkcache, but the user can provide a
876 873 # different check_cache implementation. This is used by the IPython
877 874 # kernel to provide tracebacks for interactive code that is cached,
878 875 # by a compiler instance that flushes the linecache but preserves its
879 876 # own code cache.
880 877 if check_cache is None:
881 878 check_cache = linecache.checkcache
882 879 self.check_cache = check_cache
883 880
884 881 self.debugger_cls = debugger_cls or debugger.Pdb
885 882
886 883 def format_records(self, records, last_unique, recursion_repeat):
887 884 """Format the stack frames of the traceback"""
888 885 frames = []
889 886 for r in records[:last_unique+recursion_repeat+1]:
890 887 #print '*** record:',file,lnum,func,lines,index # dbg
891 888 frames.append(self.format_record(*r))
892 889
893 890 if recursion_repeat:
894 891 frames.append('... last %d frames repeated, from the frame below ...\n' % recursion_repeat)
895 892 frames.append(self.format_record(*records[last_unique+recursion_repeat+1]))
896 893
897 894 return frames
898 895
899 896 def format_record(self, frame, file, lnum, func, lines, index):
900 897 """Format a single stack frame"""
901 898 Colors = self.Colors # just a shorthand + quicker name lookup
902 899 ColorsNormal = Colors.Normal # used a lot
903 900 col_scheme = self.color_scheme_table.active_scheme_name
904 901 indent = ' ' * INDENT_SIZE
905 902 em_normal = '%s\n%s%s' % (Colors.valEm, indent, ColorsNormal)
906 903 undefined = '%sundefined%s' % (Colors.em, ColorsNormal)
907 904 tpl_link = '%s%%s%s' % (Colors.filenameEm, ColorsNormal)
908 905 tpl_call = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm,
909 906 ColorsNormal)
910 907 tpl_call_fail = 'in %s%%s%s(***failed resolving arguments***)%s' % \
911 908 (Colors.vName, Colors.valEm, ColorsNormal)
912 909 tpl_local_var = '%s%%s%s' % (Colors.vName, ColorsNormal)
913 910 tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal,
914 911 Colors.vName, ColorsNormal)
915 912 tpl_name_val = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal)
916 913
917 914 if not file:
918 915 file = '?'
919 916 elif file.startswith(str("<")) and file.endswith(str(">")):
920 917 # Not a real filename, no problem...
921 918 pass
922 919 elif not os.path.isabs(file):
923 920 # Try to make the filename absolute by trying all
924 921 # sys.path entries (which is also what linecache does)
925 922 for dirname in sys.path:
926 923 try:
927 924 fullname = os.path.join(dirname, file)
928 925 if os.path.isfile(fullname):
929 926 file = os.path.abspath(fullname)
930 927 break
931 928 except Exception:
932 929 # Just in case that sys.path contains very
933 930 # strange entries...
934 931 pass
935 932
936 933 file = py3compat.cast_unicode(file, util_path.fs_encoding)
937 934 link = tpl_link % util_path.compress_user(file)
938 935 args, varargs, varkw, locals_ = inspect.getargvalues(frame)
939 936
940 937 if func == '?':
941 938 call = ''
942 939 elif func == '<module>':
943 940 call = tpl_call % (func, '')
944 941 else:
945 942 # Decide whether to include variable details or not
946 943 var_repr = eqrepr if self.include_vars else nullrepr
947 944 try:
948 945 call = tpl_call % (func, inspect.formatargvalues(args,
949 946 varargs, varkw,
950 947 locals_, formatvalue=var_repr))
951 948 except KeyError:
952 949 # This happens in situations like errors inside generator
953 950 # expressions, where local variables are listed in the
954 951 # line, but can't be extracted from the frame. I'm not
955 952 # 100% sure this isn't actually a bug in inspect itself,
956 953 # but since there's no info for us to compute with, the
957 954 # best we can do is report the failure and move on. Here
958 955 # we must *not* call any traceback construction again,
959 956 # because that would mess up use of %debug later on. So we
960 957 # simply report the failure and move on. The only
961 958 # limitation will be that this frame won't have locals
962 959 # listed in the call signature. Quite subtle problem...
963 960 # I can't think of a good way to validate this in a unit
964 961 # test, but running a script consisting of:
965 962 # dict( (k,v.strip()) for (k,v) in range(10) )
966 963 # will illustrate the error, if this exception catch is
967 964 # disabled.
968 965 call = tpl_call_fail % func
969 966
970 967 # Don't attempt to tokenize binary files.
971 968 if file.endswith(('.so', '.pyd', '.dll')):
972 969 return '%s %s\n' % (link, call)
973 970
974 971 elif file.endswith(('.pyc', '.pyo')):
975 972 # Look up the corresponding source file.
976 973 try:
977 974 file = source_from_cache(file)
978 975 except ValueError:
979 976 # Failed to get the source file for some reason
980 977 # E.g. https://github.com/ipython/ipython/issues/9486
981 978 return '%s %s\n' % (link, call)
982 979
983 980 def linereader(file=file, lnum=[lnum], getline=linecache.getline):
984 981 line = getline(file, lnum[0])
985 982 lnum[0] += 1
986 983 return line
987 984
988 985 # Build the list of names on this line of code where the exception
989 986 # occurred.
990 987 try:
991 988 names = []
992 989 name_cont = False
993 990
994 991 for token_type, token, start, end, line in generate_tokens(linereader):
995 992 # build composite names
996 993 if token_type == tokenize.NAME and token not in keyword.kwlist:
997 994 if name_cont:
998 995 # Continuation of a dotted name
999 996 try:
1000 997 names[-1].append(token)
1001 998 except IndexError:
1002 999 names.append([token])
1003 1000 name_cont = False
1004 1001 else:
1005 1002 # Regular new names. We append everything, the caller
1006 1003 # will be responsible for pruning the list later. It's
1007 1004 # very tricky to try to prune as we go, b/c composite
1008 1005 # names can fool us. The pruning at the end is easy
1009 1006 # to do (or the caller can print a list with repeated
1010 1007 # names if so desired.
1011 1008 names.append([token])
1012 1009 elif token == '.':
1013 1010 name_cont = True
1014 1011 elif token_type == tokenize.NEWLINE:
1015 1012 break
1016 1013
1017 1014 except (IndexError, UnicodeDecodeError, SyntaxError):
1018 1015 # signals exit of tokenizer
1019 1016 # SyntaxError can occur if the file is not actually Python
1020 1017 # - see gh-6300
1021 1018 pass
1022 1019 except tokenize.TokenError as msg:
1023 1020 # Tokenizing may fail for various reasons, many of which are
1024 1021 # harmless. (A good example is when the line in question is the
1025 1022 # close of a triple-quoted string, cf gh-6864). We don't want to
1026 1023 # show this to users, but want make it available for debugging
1027 1024 # purposes.
1028 1025 _m = ("An unexpected error occurred while tokenizing input\n"
1029 1026 "The following traceback may be corrupted or invalid\n"
1030 1027 "The error message is: %s\n" % msg)
1031 1028 debug(_m)
1032 1029
1033 1030 # Join composite names (e.g. "dict.fromkeys")
1034 1031 names = ['.'.join(n) for n in names]
1035 1032 # prune names list of duplicates, but keep the right order
1036 1033 unique_names = uniq_stable(names)
1037 1034
1038 1035 # Start loop over vars
1039 1036 lvals = ''
1040 1037 lvals_list = []
1041 1038 if self.include_vars:
1042 1039 for name_full in unique_names:
1043 1040 name_base = name_full.split('.', 1)[0]
1044 1041 if name_base in frame.f_code.co_varnames:
1045 1042 if name_base in locals_:
1046 1043 try:
1047 1044 value = repr(eval(name_full, locals_))
1048 1045 except:
1049 1046 value = undefined
1050 1047 else:
1051 1048 value = undefined
1052 1049 name = tpl_local_var % name_full
1053 1050 else:
1054 1051 if name_base in frame.f_globals:
1055 1052 try:
1056 1053 value = repr(eval(name_full, frame.f_globals))
1057 1054 except:
1058 1055 value = undefined
1059 1056 else:
1060 1057 value = undefined
1061 1058 name = tpl_global_var % name_full
1062 1059 lvals_list.append(tpl_name_val % (name, value))
1063 1060 if lvals_list:
1064 1061 lvals = '%s%s' % (indent, em_normal.join(lvals_list))
1065 1062
1066 1063 level = '%s %s\n' % (link, call)
1067 1064
1068 1065 if index is None:
1069 1066 return level
1070 1067 else:
1071 1068 _line_format = PyColorize.Parser(style=col_scheme, parent=self).format2
1072 1069 return '%s%s' % (level, ''.join(
1073 1070 _format_traceback_lines(lnum, index, lines, Colors, lvals,
1074 1071 _line_format)))
1075 1072
1076 1073 def prepare_header(self, etype, long_version=False):
1077 1074 colors = self.Colors # just a shorthand + quicker name lookup
1078 1075 colorsnormal = colors.Normal # used a lot
1079 1076 exc = '%s%s%s' % (colors.excName, etype, colorsnormal)
1080 1077 width = min(75, get_terminal_size()[0])
1081 1078 if long_version:
1082 1079 # Header with the exception type, python version, and date
1083 1080 pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
1084 1081 date = time.ctime(time.time())
1085 1082
1086 1083 head = '%s%s%s\n%s%s%s\n%s' % (colors.topline, '-' * width, colorsnormal,
1087 1084 exc, ' ' * (width - len(str(etype)) - len(pyver)),
1088 1085 pyver, date.rjust(width) )
1089 1086 head += "\nA problem occurred executing Python code. Here is the sequence of function" \
1090 1087 "\ncalls leading up to the error, with the most recent (innermost) call last."
1091 1088 else:
1092 1089 # Simplified header
1093 1090 head = '%s%s' % (exc, 'Traceback (most recent call last)'. \
1094 1091 rjust(width - len(str(etype))) )
1095 1092
1096 1093 return head
1097 1094
1098 1095 def format_exception(self, etype, evalue):
1099 1096 colors = self.Colors # just a shorthand + quicker name lookup
1100 1097 colorsnormal = colors.Normal # used a lot
1101 1098 # Get (safely) a string form of the exception info
1102 1099 try:
1103 1100 etype_str, evalue_str = map(str, (etype, evalue))
1104 1101 except:
1105 1102 # User exception is improperly defined.
1106 1103 etype, evalue = str, sys.exc_info()[:2]
1107 1104 etype_str, evalue_str = map(str, (etype, evalue))
1108 1105 # ... and format it
1109 1106 return ['%s%s%s: %s' % (colors.excName, etype_str,
1110 1107 colorsnormal, py3compat.cast_unicode(evalue_str))]
1111 1108
1112 1109 def format_exception_as_a_whole(self, etype, evalue, etb, number_of_lines_of_context, tb_offset):
1113 1110 """Formats the header, traceback and exception message for a single exception.
1114 1111
1115 1112 This may be called multiple times by Python 3 exception chaining
1116 1113 (PEP 3134).
1117 1114 """
1118 1115 # some locals
1119 1116 orig_etype = etype
1120 1117 try:
1121 1118 etype = etype.__name__
1122 1119 except AttributeError:
1123 1120 pass
1124 1121
1125 1122 tb_offset = self.tb_offset if tb_offset is None else tb_offset
1126 1123 head = self.prepare_header(etype, self.long_header)
1127 1124 records = self.get_records(etb, number_of_lines_of_context, tb_offset)
1128 1125
1129 1126 if records is None:
1130 1127 return ""
1131 1128
1132 1129 last_unique, recursion_repeat = find_recursion(orig_etype, evalue, records)
1133 1130
1134 1131 frames = self.format_records(records, last_unique, recursion_repeat)
1135 1132
1136 1133 formatted_exception = self.format_exception(etype, evalue)
1137 1134 if records:
1138 1135 filepath, lnum = records[-1][1:3]
1139 1136 filepath = os.path.abspath(filepath)
1140 1137 ipinst = get_ipython()
1141 1138 if ipinst is not None:
1142 1139 ipinst.hooks.synchronize_with_editor(filepath, lnum, 0)
1143 1140
1144 1141 return [[head] + frames + [''.join(formatted_exception[0])]]
1145 1142
1146 1143 def get_records(self, etb, number_of_lines_of_context, tb_offset):
1147 1144 try:
1148 1145 # Try the default getinnerframes and Alex's: Alex's fixes some
1149 1146 # problems, but it generates empty tracebacks for console errors
1150 1147 # (5 blanks lines) where none should be returned.
1151 1148 return _fixed_getinnerframes(etb, number_of_lines_of_context, tb_offset)
1152 1149 except UnicodeDecodeError:
1153 1150 # This can occur if a file's encoding magic comment is wrong.
1154 1151 # I can't see a way to recover without duplicating a bunch of code
1155 1152 # from the stdlib traceback module. --TK
1156 1153 error('\nUnicodeDecodeError while processing traceback.\n')
1157 1154 return None
1158 1155 except:
1159 1156 # FIXME: I've been getting many crash reports from python 2.3
1160 1157 # users, traceable to inspect.py. If I can find a small test-case
1161 1158 # to reproduce this, I should either write a better workaround or
1162 1159 # file a bug report against inspect (if that's the real problem).
1163 1160 # So far, I haven't been able to find an isolated example to
1164 1161 # reproduce the problem.
1165 1162 inspect_error()
1166 1163 traceback.print_exc(file=self.ostream)
1167 1164 info('\nUnfortunately, your original traceback can not be constructed.\n')
1168 1165 return None
1169 1166
1170 1167 def structured_traceback(self, etype, evalue, etb, tb_offset=None,
1171 1168 number_of_lines_of_context=5):
1172 1169 """Return a nice text document describing the traceback."""
1173 1170
1174 1171 formatted_exception = self.format_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context,
1175 1172 tb_offset)
1176 1173
1177 1174 colors = self.Colors # just a shorthand + quicker name lookup
1178 1175 colorsnormal = colors.Normal # used a lot
1179 1176 head = '%s%s%s' % (colors.topline, '-' * min(75, get_terminal_size()[0]), colorsnormal)
1180 1177 structured_traceback_parts = [head]
1181 1178 chained_exceptions_tb_offset = 0
1182 1179 lines_of_context = 3
1183 1180 formatted_exceptions = formatted_exception
1184 1181 exception = self.get_parts_of_chained_exception(evalue)
1185 1182 if exception:
1186 1183 formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__)
1187 1184 etype, evalue, etb = exception
1188 1185 else:
1189 1186 evalue = None
1190 1187 chained_exc_ids = set()
1191 1188 while evalue:
1192 1189 formatted_exceptions += self.format_exception_as_a_whole(etype, evalue, etb, lines_of_context,
1193 1190 chained_exceptions_tb_offset)
1194 1191 exception = self.get_parts_of_chained_exception(evalue)
1195 1192
1196 1193 if exception and not id(exception[1]) in chained_exc_ids:
1197 1194 chained_exc_ids.add(id(exception[1])) # trace exception to avoid infinite 'cause' loop
1198 1195 formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__)
1199 1196 etype, evalue, etb = exception
1200 1197 else:
1201 1198 evalue = None
1202 1199
1203 1200 # we want to see exceptions in a reversed order:
1204 1201 # the first exception should be on top
1205 1202 for formatted_exception in reversed(formatted_exceptions):
1206 1203 structured_traceback_parts += formatted_exception
1207 1204
1208 1205 return structured_traceback_parts
1209 1206
1210 1207 def debugger(self, force=False):
1211 1208 """Call up the pdb debugger if desired, always clean up the tb
1212 1209 reference.
1213 1210
1214 1211 Keywords:
1215 1212
1216 1213 - force(False): by default, this routine checks the instance call_pdb
1217 1214 flag and does not actually invoke the debugger if the flag is false.
1218 1215 The 'force' option forces the debugger to activate even if the flag
1219 1216 is false.
1220 1217
1221 1218 If the call_pdb flag is set, the pdb interactive debugger is
1222 1219 invoked. In all cases, the self.tb reference to the current traceback
1223 1220 is deleted to prevent lingering references which hamper memory
1224 1221 management.
1225 1222
1226 1223 Note that each call to pdb() does an 'import readline', so if your app
1227 1224 requires a special setup for the readline completers, you'll have to
1228 1225 fix that by hand after invoking the exception handler."""
1229 1226
1230 1227 if force or self.call_pdb:
1231 1228 if self.pdb is None:
1232 1229 self.pdb = self.debugger_cls()
1233 1230 # the system displayhook may have changed, restore the original
1234 1231 # for pdb
1235 1232 display_trap = DisplayTrap(hook=sys.__displayhook__)
1236 1233 with display_trap:
1237 1234 self.pdb.reset()
1238 1235 # Find the right frame so we don't pop up inside ipython itself
1239 1236 if hasattr(self, 'tb') and self.tb is not None:
1240 1237 etb = self.tb
1241 1238 else:
1242 1239 etb = self.tb = sys.last_traceback
1243 1240 while self.tb is not None and self.tb.tb_next is not None:
1244 1241 self.tb = self.tb.tb_next
1245 1242 if etb and etb.tb_next:
1246 1243 etb = etb.tb_next
1247 1244 self.pdb.botframe = etb.tb_frame
1248 1245 self.pdb.interaction(None, etb)
1249 1246
1250 1247 if hasattr(self, 'tb'):
1251 1248 del self.tb
1252 1249
1253 1250 def handler(self, info=None):
1254 1251 (etype, evalue, etb) = info or sys.exc_info()
1255 1252 self.tb = etb
1256 1253 ostream = self.ostream
1257 1254 ostream.flush()
1258 1255 ostream.write(self.text(etype, evalue, etb))
1259 1256 ostream.write('\n')
1260 1257 ostream.flush()
1261 1258
1262 1259 # Changed so an instance can just be called as VerboseTB_inst() and print
1263 1260 # out the right info on its own.
1264 1261 def __call__(self, etype=None, evalue=None, etb=None):
1265 1262 """This hook can replace sys.excepthook (for Python 2.1 or higher)."""
1266 1263 if etb is None:
1267 1264 self.handler()
1268 1265 else:
1269 1266 self.handler((etype, evalue, etb))
1270 1267 try:
1271 1268 self.debugger()
1272 1269 except KeyboardInterrupt:
1273 1270 print("\nKeyboardInterrupt")
1274 1271
1275 1272
1276 1273 #----------------------------------------------------------------------------
1277 1274 class FormattedTB(VerboseTB, ListTB):
1278 1275 """Subclass ListTB but allow calling with a traceback.
1279 1276
1280 1277 It can thus be used as a sys.excepthook for Python > 2.1.
1281 1278
1282 1279 Also adds 'Context' and 'Verbose' modes, not available in ListTB.
1283 1280
1284 1281 Allows a tb_offset to be specified. This is useful for situations where
1285 1282 one needs to remove a number of topmost frames from the traceback (such as
1286 1283 occurs with python programs that themselves execute other python code,
1287 1284 like Python shells). """
1288 1285
1289 1286 def __init__(self, mode='Plain', color_scheme='Linux', call_pdb=False,
1290 1287 ostream=None,
1291 1288 tb_offset=0, long_header=False, include_vars=False,
1292 1289 check_cache=None, debugger_cls=None,
1293 1290 parent=None, config=None):
1294 1291
1295 1292 # NEVER change the order of this list. Put new modes at the end:
1296 1293 self.valid_modes = ['Plain', 'Context', 'Verbose', 'Minimal']
1297 1294 self.verbose_modes = self.valid_modes[1:3]
1298 1295
1299 1296 VerboseTB.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
1300 1297 ostream=ostream, tb_offset=tb_offset,
1301 1298 long_header=long_header, include_vars=include_vars,
1302 1299 check_cache=check_cache, debugger_cls=debugger_cls,
1303 1300 parent=parent, config=config)
1304 1301
1305 1302 # Different types of tracebacks are joined with different separators to
1306 1303 # form a single string. They are taken from this dict
1307 1304 self._join_chars = dict(Plain='', Context='\n', Verbose='\n',
1308 1305 Minimal='')
1309 1306 # set_mode also sets the tb_join_char attribute
1310 1307 self.set_mode(mode)
1311 1308
1312 1309 def structured_traceback(self, etype, value, tb, tb_offset=None, number_of_lines_of_context=5):
1313 1310 tb_offset = self.tb_offset if tb_offset is None else tb_offset
1314 1311 mode = self.mode
1315 1312 if mode in self.verbose_modes:
1316 1313 # Verbose modes need a full traceback
1317 1314 return VerboseTB.structured_traceback(
1318 1315 self, etype, value, tb, tb_offset, number_of_lines_of_context
1319 1316 )
1320 1317 elif mode == 'Minimal':
1321 1318 return ListTB.get_exception_only(self, etype, value)
1322 1319 else:
1323 1320 # We must check the source cache because otherwise we can print
1324 1321 # out-of-date source code.
1325 1322 self.check_cache()
1326 1323 # Now we can extract and format the exception
1327 1324 return ListTB.structured_traceback(
1328 1325 self, etype, value, tb, tb_offset, number_of_lines_of_context
1329 1326 )
1330 1327
1331 1328 def stb2text(self, stb):
1332 1329 """Convert a structured traceback (a list) to a string."""
1333 1330 return self.tb_join_char.join(stb)
1334 1331
1335 1332
1336 1333 def set_mode(self, mode=None):
1337 1334 """Switch to the desired mode.
1338 1335
1339 1336 If mode is not specified, cycles through the available modes."""
1340 1337
1341 1338 if not mode:
1342 1339 new_idx = (self.valid_modes.index(self.mode) + 1 ) % \
1343 1340 len(self.valid_modes)
1344 1341 self.mode = self.valid_modes[new_idx]
1345 1342 elif mode not in self.valid_modes:
1346 1343 raise ValueError('Unrecognized mode in FormattedTB: <' + mode + '>\n'
1347 1344 'Valid modes: ' + str(self.valid_modes))
1348 1345 else:
1349 1346 self.mode = mode
1350 1347 # include variable details only in 'Verbose' mode
1351 1348 self.include_vars = (self.mode == self.valid_modes[2])
1352 1349 # Set the join character for generating text tracebacks
1353 1350 self.tb_join_char = self._join_chars[self.mode]
1354 1351
1355 1352 # some convenient shortcuts
1356 1353 def plain(self):
1357 1354 self.set_mode(self.valid_modes[0])
1358 1355
1359 1356 def context(self):
1360 1357 self.set_mode(self.valid_modes[1])
1361 1358
1362 1359 def verbose(self):
1363 1360 self.set_mode(self.valid_modes[2])
1364 1361
1365 1362 def minimal(self):
1366 1363 self.set_mode(self.valid_modes[3])
1367 1364
1368 1365
1369 1366 #----------------------------------------------------------------------------
1370 1367 class AutoFormattedTB(FormattedTB):
1371 1368 """A traceback printer which can be called on the fly.
1372 1369
1373 1370 It will find out about exceptions by itself.
1374 1371
1375 1372 A brief example::
1376 1373
1377 1374 AutoTB = AutoFormattedTB(mode = 'Verbose',color_scheme='Linux')
1378 1375 try:
1379 1376 ...
1380 1377 except:
1381 1378 AutoTB() # or AutoTB(out=logfile) where logfile is an open file object
1382 1379 """
1383 1380
1384 1381 def __call__(self, etype=None, evalue=None, etb=None,
1385 1382 out=None, tb_offset=None):
1386 1383 """Print out a formatted exception traceback.
1387 1384
1388 1385 Optional arguments:
1389 1386 - out: an open file-like object to direct output to.
1390 1387
1391 1388 - tb_offset: the number of frames to skip over in the stack, on a
1392 1389 per-call basis (this overrides temporarily the instance's tb_offset
1393 1390 given at initialization time. """
1394 1391
1395 1392 if out is None:
1396 1393 out = self.ostream
1397 1394 out.flush()
1398 1395 out.write(self.text(etype, evalue, etb, tb_offset))
1399 1396 out.write('\n')
1400 1397 out.flush()
1401 1398 # FIXME: we should remove the auto pdb behavior from here and leave
1402 1399 # that to the clients.
1403 1400 try:
1404 1401 self.debugger()
1405 1402 except KeyboardInterrupt:
1406 1403 print("\nKeyboardInterrupt")
1407 1404
1408 1405 def structured_traceback(self, etype=None, value=None, tb=None,
1409 1406 tb_offset=None, number_of_lines_of_context=5):
1410 1407 if etype is None:
1411 1408 etype, value, tb = sys.exc_info()
1412 1409 if isinstance(tb, tuple):
1413 1410 # tb is a tuple if this is a chained exception.
1414 1411 self.tb = tb[0]
1415 1412 else:
1416 1413 self.tb = tb
1417 1414 return FormattedTB.structured_traceback(
1418 1415 self, etype, value, tb, tb_offset, number_of_lines_of_context)
1419 1416
1420 1417
1421 1418 #---------------------------------------------------------------------------
1422 1419
1423 1420 # A simple class to preserve Nathan's original functionality.
1424 1421 class ColorTB(FormattedTB):
1425 1422 """Shorthand to initialize a FormattedTB in Linux colors mode."""
1426 1423
1427 1424 def __init__(self, color_scheme='Linux', call_pdb=0, **kwargs):
1428 1425 FormattedTB.__init__(self, color_scheme=color_scheme,
1429 1426 call_pdb=call_pdb, **kwargs)
1430 1427
1431 1428
1432 1429 class SyntaxTB(ListTB):
1433 1430 """Extension which holds some state: the last exception value"""
1434 1431
1435 1432 def __init__(self, color_scheme='NoColor', parent=None, config=None):
1436 1433 ListTB.__init__(self, color_scheme, parent=parent, config=config)
1437 1434 self.last_syntax_error = None
1438 1435
1439 1436 def __call__(self, etype, value, elist):
1440 1437 self.last_syntax_error = value
1441 1438
1442 1439 ListTB.__call__(self, etype, value, elist)
1443 1440
1444 1441 def structured_traceback(self, etype, value, elist, tb_offset=None,
1445 1442 context=5):
1446 1443 # If the source file has been edited, the line in the syntax error can
1447 1444 # be wrong (retrieved from an outdated cache). This replaces it with
1448 1445 # the current value.
1449 1446 if isinstance(value, SyntaxError) \
1450 1447 and isinstance(value.filename, str) \
1451 1448 and isinstance(value.lineno, int):
1452 1449 linecache.checkcache(value.filename)
1453 1450 newtext = linecache.getline(value.filename, value.lineno)
1454 1451 if newtext:
1455 1452 value.text = newtext
1456 1453 self.last_syntax_error = value
1457 1454 return super(SyntaxTB, self).structured_traceback(etype, value, elist,
1458 1455 tb_offset=tb_offset, context=context)
1459 1456
1460 1457 def clear_err_state(self):
1461 1458 """Return the current error state and clear it"""
1462 1459 e = self.last_syntax_error
1463 1460 self.last_syntax_error = None
1464 1461 return e
1465 1462
1466 1463 def stb2text(self, stb):
1467 1464 """Convert a structured traceback (a list) to a string."""
1468 1465 return ''.join(stb)
1469 1466
1470 1467
1471 1468 # some internal-use functions
1472 1469 def text_repr(value):
1473 1470 """Hopefully pretty robust repr equivalent."""
1474 1471 # this is pretty horrible but should always return *something*
1475 1472 try:
1476 1473 return pydoc.text.repr(value)
1477 1474 except KeyboardInterrupt:
1478 1475 raise
1479 1476 except:
1480 1477 try:
1481 1478 return repr(value)
1482 1479 except KeyboardInterrupt:
1483 1480 raise
1484 1481 except:
1485 1482 try:
1486 1483 # all still in an except block so we catch
1487 1484 # getattr raising
1488 1485 name = getattr(value, '__name__', None)
1489 1486 if name:
1490 1487 # ick, recursion
1491 1488 return text_repr(name)
1492 1489 klass = getattr(value, '__class__', None)
1493 1490 if klass:
1494 1491 return '%s instance' % text_repr(klass)
1495 1492 except KeyboardInterrupt:
1496 1493 raise
1497 1494 except:
1498 1495 return 'UNRECOVERABLE REPR FAILURE'
1499 1496
1500 1497
1501 1498 def eqrepr(value, repr=text_repr):
1502 1499 return '=%s' % repr(value)
1503 1500
1504 1501
1505 1502 def nullrepr(value, repr=text_repr):
1506 1503 return ''
@@ -1,347 +1,341
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 Provides a reload() function that acts recursively.
4 4
5 5 Python's normal :func:`python:reload` function only reloads the module that it's
6 6 passed. The :func:`reload` function in this module also reloads everything
7 7 imported from that module, which is useful when you're changing files deep
8 8 inside a package.
9 9
10 To use this as your default reload function, type this for Python 2::
11
12 import __builtin__
13 from IPython.lib import deepreload
14 __builtin__.reload = deepreload.reload
15
16 Or this for Python 3::
10 To use this as your default reload function, type this::
17 11
18 12 import builtins
19 13 from IPython.lib import deepreload
20 14 builtins.reload = deepreload.reload
21 15
22 16 A reference to the original :func:`python:reload` is stored in this module as
23 17 :data:`original_reload`, so you can restore it later.
24 18
25 19 This code is almost entirely based on knee.py, which is a Python
26 20 re-implementation of hierarchical module import.
27 21 """
28 22 #*****************************************************************************
29 23 # Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu>
30 24 #
31 25 # Distributed under the terms of the BSD License. The full license is in
32 26 # the file COPYING, distributed as part of this software.
33 27 #*****************************************************************************
34 28
35 29 import builtins as builtin_mod
36 30 from contextlib import contextmanager
37 31 import imp
38 32 import sys
39 33
40 34 from types import ModuleType
41 35 from warnings import warn
42 36 import types
43 37
44 38 original_import = builtin_mod.__import__
45 39
46 40 @contextmanager
47 41 def replace_import_hook(new_import):
48 42 saved_import = builtin_mod.__import__
49 43 builtin_mod.__import__ = new_import
50 44 try:
51 45 yield
52 46 finally:
53 47 builtin_mod.__import__ = saved_import
54 48
55 49 def get_parent(globals, level):
56 50 """
57 51 parent, name = get_parent(globals, level)
58 52
59 53 Return the package that an import is being performed in. If globals comes
60 54 from the module foo.bar.bat (not itself a package), this returns the
61 55 sys.modules entry for foo.bar. If globals is from a package's __init__.py,
62 56 the package's entry in sys.modules is returned.
63 57
64 58 If globals doesn't come from a package or a module in a package, or a
65 59 corresponding entry is not found in sys.modules, None is returned.
66 60 """
67 61 orig_level = level
68 62
69 63 if not level or not isinstance(globals, dict):
70 64 return None, ''
71 65
72 66 pkgname = globals.get('__package__', None)
73 67
74 68 if pkgname is not None:
75 69 # __package__ is set, so use it
76 70 if not hasattr(pkgname, 'rindex'):
77 71 raise ValueError('__package__ set to non-string')
78 72 if len(pkgname) == 0:
79 73 if level > 0:
80 74 raise ValueError('Attempted relative import in non-package')
81 75 return None, ''
82 76 name = pkgname
83 77 else:
84 78 # __package__ not set, so figure it out and set it
85 79 if '__name__' not in globals:
86 80 return None, ''
87 81 modname = globals['__name__']
88 82
89 83 if '__path__' in globals:
90 84 # __path__ is set, so modname is already the package name
91 85 globals['__package__'] = name = modname
92 86 else:
93 87 # Normal module, so work out the package name if any
94 88 lastdot = modname.rfind('.')
95 89 if lastdot < 0 < level:
96 90 raise ValueError("Attempted relative import in non-package")
97 91 if lastdot < 0:
98 92 globals['__package__'] = None
99 93 return None, ''
100 94 globals['__package__'] = name = modname[:lastdot]
101 95
102 96 dot = len(name)
103 97 for x in range(level, 1, -1):
104 98 try:
105 99 dot = name.rindex('.', 0, dot)
106 100 except ValueError:
107 101 raise ValueError("attempted relative import beyond top-level "
108 102 "package")
109 103 name = name[:dot]
110 104
111 105 try:
112 106 parent = sys.modules[name]
113 107 except:
114 108 if orig_level < 1:
115 109 warn("Parent module '%.200s' not found while handling absolute "
116 110 "import" % name)
117 111 parent = None
118 112 else:
119 113 raise SystemError("Parent module '%.200s' not loaded, cannot "
120 114 "perform relative import" % name)
121 115
122 116 # We expect, but can't guarantee, if parent != None, that:
123 117 # - parent.__name__ == name
124 118 # - parent.__dict__ is globals
125 119 # If this is violated... Who cares?
126 120 return parent, name
127 121
128 122 def load_next(mod, altmod, name, buf):
129 123 """
130 124 mod, name, buf = load_next(mod, altmod, name, buf)
131 125
132 126 altmod is either None or same as mod
133 127 """
134 128
135 129 if len(name) == 0:
136 130 # completely empty module name should only happen in
137 131 # 'from . import' (or '__import__("")')
138 132 return mod, None, buf
139 133
140 134 dot = name.find('.')
141 135 if dot == 0:
142 136 raise ValueError('Empty module name')
143 137
144 138 if dot < 0:
145 139 subname = name
146 140 next = None
147 141 else:
148 142 subname = name[:dot]
149 143 next = name[dot+1:]
150 144
151 145 if buf != '':
152 146 buf += '.'
153 147 buf += subname
154 148
155 149 result = import_submodule(mod, subname, buf)
156 150 if result is None and mod != altmod:
157 151 result = import_submodule(altmod, subname, subname)
158 152 if result is not None:
159 153 buf = subname
160 154
161 155 if result is None:
162 156 raise ImportError("No module named %.200s" % name)
163 157
164 158 return result, next, buf
165 159
166 160
167 161 # Need to keep track of what we've already reloaded to prevent cyclic evil
168 162 found_now = {}
169 163
170 164 def import_submodule(mod, subname, fullname):
171 165 """m = import_submodule(mod, subname, fullname)"""
172 166 # Require:
173 167 # if mod == None: subname == fullname
174 168 # else: mod.__name__ + "." + subname == fullname
175 169
176 170 global found_now
177 171 if fullname in found_now and fullname in sys.modules:
178 172 m = sys.modules[fullname]
179 173 else:
180 174 print('Reloading', fullname)
181 175 found_now[fullname] = 1
182 176 oldm = sys.modules.get(fullname, None)
183 177
184 178 if mod is None:
185 179 path = None
186 180 elif hasattr(mod, '__path__'):
187 181 path = mod.__path__
188 182 else:
189 183 return None
190 184
191 185 try:
192 186 # This appears to be necessary on Python 3, because imp.find_module()
193 187 # tries to import standard libraries (like io) itself, and we don't
194 188 # want them to be processed by our deep_import_hook.
195 189 with replace_import_hook(original_import):
196 190 fp, filename, stuff = imp.find_module(subname, path)
197 191 except ImportError:
198 192 return None
199 193
200 194 try:
201 195 m = imp.load_module(fullname, fp, filename, stuff)
202 196 except:
203 197 # load_module probably removed name from modules because of
204 198 # the error. Put back the original module object.
205 199 if oldm:
206 200 sys.modules[fullname] = oldm
207 201 raise
208 202 finally:
209 203 if fp: fp.close()
210 204
211 205 add_submodule(mod, m, fullname, subname)
212 206
213 207 return m
214 208
215 209 def add_submodule(mod, submod, fullname, subname):
216 210 """mod.{subname} = submod"""
217 211 if mod is None:
218 212 return #Nothing to do here.
219 213
220 214 if submod is None:
221 215 submod = sys.modules[fullname]
222 216
223 217 setattr(mod, subname, submod)
224 218
225 219 return
226 220
227 221 def ensure_fromlist(mod, fromlist, buf, recursive):
228 222 """Handle 'from module import a, b, c' imports."""
229 223 if not hasattr(mod, '__path__'):
230 224 return
231 225 for item in fromlist:
232 226 if not hasattr(item, 'rindex'):
233 227 raise TypeError("Item in ``from list'' not a string")
234 228 if item == '*':
235 229 if recursive:
236 230 continue # avoid endless recursion
237 231 try:
238 232 all = mod.__all__
239 233 except AttributeError:
240 234 pass
241 235 else:
242 236 ret = ensure_fromlist(mod, all, buf, 1)
243 237 if not ret:
244 238 return 0
245 239 elif not hasattr(mod, item):
246 240 import_submodule(mod, item, buf + '.' + item)
247 241
248 242 def deep_import_hook(name, globals=None, locals=None, fromlist=None, level=-1):
249 243 """Replacement for __import__()"""
250 244 parent, buf = get_parent(globals, level)
251 245
252 246 head, name, buf = load_next(parent, None if level < 0 else parent, name, buf)
253 247
254 248 tail = head
255 249 while name:
256 250 tail, name, buf = load_next(tail, tail, name, buf)
257 251
258 252 # If tail is None, both get_parent and load_next found
259 253 # an empty module name: someone called __import__("") or
260 254 # doctored faulty bytecode
261 255 if tail is None:
262 256 raise ValueError('Empty module name')
263 257
264 258 if not fromlist:
265 259 return head
266 260
267 261 ensure_fromlist(tail, fromlist, buf, 0)
268 262 return tail
269 263
270 264 modules_reloading = {}
271 265
272 266 def deep_reload_hook(m):
273 267 """Replacement for reload()."""
274 268 # Hardcode this one as it would raise a NotImplementedError from the
275 269 # bowels of Python and screw up the import machinery after.
276 270 # unlike other imports the `exclude` list already in place is not enough.
277 271
278 272 if m is types:
279 273 return m
280 274 if not isinstance(m, ModuleType):
281 275 raise TypeError("reload() argument must be module")
282 276
283 277 name = m.__name__
284 278
285 279 if name not in sys.modules:
286 280 raise ImportError("reload(): module %.200s not in sys.modules" % name)
287 281
288 282 global modules_reloading
289 283 try:
290 284 return modules_reloading[name]
291 285 except:
292 286 modules_reloading[name] = m
293 287
294 288 dot = name.rfind('.')
295 289 if dot < 0:
296 290 subname = name
297 291 path = None
298 292 else:
299 293 try:
300 294 parent = sys.modules[name[:dot]]
301 295 except KeyError:
302 296 modules_reloading.clear()
303 297 raise ImportError("reload(): parent %.200s not in sys.modules" % name[:dot])
304 298 subname = name[dot+1:]
305 299 path = getattr(parent, "__path__", None)
306 300
307 301 try:
308 302 # This appears to be necessary on Python 3, because imp.find_module()
309 303 # tries to import standard libraries (like io) itself, and we don't
310 304 # want them to be processed by our deep_import_hook.
311 305 with replace_import_hook(original_import):
312 306 fp, filename, stuff = imp.find_module(subname, path)
313 307 finally:
314 308 modules_reloading.clear()
315 309
316 310 try:
317 311 newm = imp.load_module(name, fp, filename, stuff)
318 312 except:
319 313 # load_module probably removed name from modules because of
320 314 # the error. Put back the original module object.
321 315 sys.modules[name] = m
322 316 raise
323 317 finally:
324 318 if fp: fp.close()
325 319
326 320 modules_reloading.clear()
327 321 return newm
328 322
329 323 # Save the original hooks
330 324 original_reload = imp.reload
331 325
332 326 # Replacement for reload()
333 327 def reload(module, exclude=('sys', 'os.path', 'builtins', '__main__',
334 328 'numpy', 'numpy._globals')):
335 329 """Recursively reload all modules used in the given module. Optionally
336 330 takes a list of modules to exclude from reloading. The default exclude
337 331 list contains sys, __main__, and __builtin__, to prevent, e.g., resetting
338 332 display, exception, and io hooks.
339 333 """
340 334 global found_now
341 335 for i in exclude:
342 336 found_now[i] = 1
343 337 try:
344 338 with replace_import_hook(deep_import_hook):
345 339 return deep_reload_hook(module)
346 340 finally:
347 341 found_now = {}
@@ -1,532 +1,532
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 Defines a variety of Pygments lexers for highlighting IPython code.
4 4
5 5 This includes:
6 6
7 7 IPythonLexer, IPython3Lexer
8 8 Lexers for pure IPython (python + magic/shell commands)
9 9
10 10 IPythonPartialTracebackLexer, IPythonTracebackLexer
11 11 Supports 2.x and 3.x via keyword `python3`. The partial traceback
12 12 lexer reads everything but the Python code appearing in a traceback.
13 13 The full lexer combines the partial lexer with an IPython lexer.
14 14
15 15 IPythonConsoleLexer
16 16 A lexer for IPython console sessions, with support for tracebacks.
17 17
18 18 IPyLexer
19 19 A friendly lexer which examines the first line of text and from it,
20 20 decides whether to use an IPython lexer or an IPython console lexer.
21 21 This is probably the only lexer that needs to be explicitly added
22 22 to Pygments.
23 23
24 24 """
25 25 #-----------------------------------------------------------------------------
26 26 # Copyright (c) 2013, the IPython Development Team.
27 27 #
28 28 # Distributed under the terms of the Modified BSD License.
29 29 #
30 30 # The full license is in the file COPYING.txt, distributed with this software.
31 31 #-----------------------------------------------------------------------------
32 32
33 33 # Standard library
34 34 import re
35 35
36 36 # Third party
37 37 from pygments.lexers import (
38 38 BashLexer, HtmlLexer, JavascriptLexer, RubyLexer, PerlLexer, PythonLexer,
39 39 Python3Lexer, TexLexer)
40 40 from pygments.lexer import (
41 41 Lexer, DelegatingLexer, RegexLexer, do_insertions, bygroups, using,
42 42 )
43 43 from pygments.token import (
44 44 Generic, Keyword, Literal, Name, Operator, Other, Text, Error,
45 45 )
46 46 from pygments.util import get_bool_opt
47 47
48 48 # Local
49 49
50 50 line_re = re.compile('.*?\n')
51 51
52 52 __all__ = ['build_ipy_lexer', 'IPython3Lexer', 'IPythonLexer',
53 53 'IPythonPartialTracebackLexer', 'IPythonTracebackLexer',
54 54 'IPythonConsoleLexer', 'IPyLexer']
55 55
56 56
57 57 def build_ipy_lexer(python3):
58 58 """Builds IPython lexers depending on the value of `python3`.
59 59
60 60 The lexer inherits from an appropriate Python lexer and then adds
61 61 information about IPython specific keywords (i.e. magic commands,
62 62 shell commands, etc.)
63 63
64 64 Parameters
65 65 ----------
66 66 python3 : bool
67 67 If `True`, then build an IPython lexer from a Python 3 lexer.
68 68
69 69 """
70 70 # It would be nice to have a single IPython lexer class which takes
71 71 # a boolean `python3`. But since there are two Python lexer classes,
72 72 # we will also have two IPython lexer classes.
73 73 if python3:
74 74 PyLexer = Python3Lexer
75 75 name = 'IPython3'
76 76 aliases = ['ipython3']
77 77 doc = """IPython3 Lexer"""
78 78 else:
79 79 PyLexer = PythonLexer
80 80 name = 'IPython'
81 81 aliases = ['ipython2', 'ipython']
82 82 doc = """IPython Lexer"""
83 83
84 84 ipython_tokens = [
85 85 (r'(?s)(\s*)(%%capture)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
86 86 (r'(?s)(\s*)(%%debug)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
87 87 (r'(?is)(\s*)(%%html)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(HtmlLexer))),
88 88 (r'(?s)(\s*)(%%javascript)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))),
89 89 (r'(?s)(\s*)(%%js)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))),
90 90 (r'(?s)(\s*)(%%latex)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(TexLexer))),
91 91 (r'(?s)(\s*)(%%perl)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PerlLexer))),
92 92 (r'(?s)(\s*)(%%prun)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
93 93 (r'(?s)(\s*)(%%pypy)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
94 94 (r'(?s)(\s*)(%%python)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
95 95 (r'(?s)(\s*)(%%python2)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PythonLexer))),
96 96 (r'(?s)(\s*)(%%python3)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))),
97 97 (r'(?s)(\s*)(%%ruby)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(RubyLexer))),
98 98 (r'(?s)(\s*)(%%time)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
99 99 (r'(?s)(\s*)(%%timeit)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
100 100 (r'(?s)(\s*)(%%writefile)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
101 101 (r'(?s)(\s*)(%%file)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))),
102 102 (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)),
103 103 (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))),
104 104 (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)),
105 105 (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)),
106 106 (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword,
107 107 using(BashLexer), Text)),
108 108 (r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)),
109 109 (r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
110 110 (r'(!)(?!=)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
111 111 (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)', bygroups(Text, Operator, Text)),
112 112 (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$', bygroups(Text, Operator, Text)),
113 113 ]
114 114
115 115 tokens = PyLexer.tokens.copy()
116 116 tokens['root'] = ipython_tokens + tokens['root']
117 117
118 118 attrs = {'name': name, 'aliases': aliases, 'filenames': [],
119 119 '__doc__': doc, 'tokens': tokens}
120 120
121 121 return type(name, (PyLexer,), attrs)
122 122
123 123
124 124 IPython3Lexer = build_ipy_lexer(python3=True)
125 125 IPythonLexer = build_ipy_lexer(python3=False)
126 126
127 127
128 128 class IPythonPartialTracebackLexer(RegexLexer):
129 129 """
130 130 Partial lexer for IPython tracebacks.
131 131
132 Handles all the non-python output. This works for both Python 2.x and 3.x.
132 Handles all the non-python output.
133 133
134 134 """
135 135 name = 'IPython Partial Traceback'
136 136
137 137 tokens = {
138 138 'root': [
139 139 # Tracebacks for syntax errors have a different style.
140 140 # For both types of tracebacks, we mark the first line with
141 141 # Generic.Traceback. For syntax errors, we mark the filename
142 142 # as we mark the filenames for non-syntax tracebacks.
143 143 #
144 144 # These two regexps define how IPythonConsoleLexer finds a
145 145 # traceback.
146 146 #
147 147 ## Non-syntax traceback
148 148 (r'^(\^C)?(-+\n)', bygroups(Error, Generic.Traceback)),
149 149 ## Syntax traceback
150 150 (r'^( File)(.*)(, line )(\d+\n)',
151 151 bygroups(Generic.Traceback, Name.Namespace,
152 152 Generic.Traceback, Literal.Number.Integer)),
153 153
154 154 # (Exception Identifier)(Whitespace)(Traceback Message)
155 155 (r'(?u)(^[^\d\W]\w*)(\s*)(Traceback.*?\n)',
156 156 bygroups(Name.Exception, Generic.Whitespace, Text)),
157 157 # (Module/Filename)(Text)(Callee)(Function Signature)
158 158 # Better options for callee and function signature?
159 159 (r'(.*)( in )(.*)(\(.*\)\n)',
160 160 bygroups(Name.Namespace, Text, Name.Entity, Name.Tag)),
161 161 # Regular line: (Whitespace)(Line Number)(Python Code)
162 162 (r'(\s*?)(\d+)(.*?\n)',
163 163 bygroups(Generic.Whitespace, Literal.Number.Integer, Other)),
164 164 # Emphasized line: (Arrow)(Line Number)(Python Code)
165 165 # Using Exception token so arrow color matches the Exception.
166 166 (r'(-*>?\s?)(\d+)(.*?\n)',
167 167 bygroups(Name.Exception, Literal.Number.Integer, Other)),
168 168 # (Exception Identifier)(Message)
169 169 (r'(?u)(^[^\d\W]\w*)(:.*?\n)',
170 170 bygroups(Name.Exception, Text)),
171 171 # Tag everything else as Other, will be handled later.
172 172 (r'.*\n', Other),
173 173 ],
174 174 }
175 175
176 176
177 177 class IPythonTracebackLexer(DelegatingLexer):
178 178 """
179 179 IPython traceback lexer.
180 180
181 181 For doctests, the tracebacks can be snipped as much as desired with the
182 182 exception to the lines that designate a traceback. For non-syntax error
183 183 tracebacks, this is the line of hyphens. For syntax error tracebacks,
184 184 this is the line which lists the File and line number.
185 185
186 186 """
187 187 # The lexer inherits from DelegatingLexer. The "root" lexer is an
188 188 # appropriate IPython lexer, which depends on the value of the boolean
189 189 # `python3`. First, we parse with the partial IPython traceback lexer.
190 190 # Then, any code marked with the "Other" token is delegated to the root
191 191 # lexer.
192 192 #
193 193 name = 'IPython Traceback'
194 194 aliases = ['ipythontb']
195 195
196 196 def __init__(self, **options):
197 197 self.python3 = get_bool_opt(options, 'python3', False)
198 198 if self.python3:
199 199 self.aliases = ['ipython3tb']
200 200 else:
201 201 self.aliases = ['ipython2tb', 'ipythontb']
202 202
203 203 if self.python3:
204 204 IPyLexer = IPython3Lexer
205 205 else:
206 206 IPyLexer = IPythonLexer
207 207
208 208 DelegatingLexer.__init__(self, IPyLexer,
209 209 IPythonPartialTracebackLexer, **options)
210 210
211 211 class IPythonConsoleLexer(Lexer):
212 212 """
213 213 An IPython console lexer for IPython code-blocks and doctests, such as:
214 214
215 215 .. code-block:: rst
216 216
217 217 .. code-block:: ipythonconsole
218 218
219 219 In [1]: a = 'foo'
220 220
221 221 In [2]: a
222 222 Out[2]: 'foo'
223 223
224 224 In [3]: print a
225 225 foo
226 226
227 227 In [4]: 1 / 0
228 228
229 229
230 230 Support is also provided for IPython exceptions:
231 231
232 232 .. code-block:: rst
233 233
234 234 .. code-block:: ipythonconsole
235 235
236 236 In [1]: raise Exception
237 237
238 238 ---------------------------------------------------------------------------
239 239 Exception Traceback (most recent call last)
240 240 <ipython-input-1-fca2ab0ca76b> in <module>
241 241 ----> 1 raise Exception
242 242
243 243 Exception:
244 244
245 245 """
246 246 name = 'IPython console session'
247 247 aliases = ['ipythonconsole']
248 248 mimetypes = ['text/x-ipython-console']
249 249
250 250 # The regexps used to determine what is input and what is output.
251 251 # The default prompts for IPython are:
252 252 #
253 253 # in = 'In [#]: '
254 254 # continuation = ' .D.: '
255 255 # template = 'Out[#]: '
256 256 #
257 257 # Where '#' is the 'prompt number' or 'execution count' and 'D'
258 258 # D is a number of dots matching the width of the execution count
259 259 #
260 260 in1_regex = r'In \[[0-9]+\]: '
261 261 in2_regex = r' \.\.+\.: '
262 262 out_regex = r'Out\[[0-9]+\]: '
263 263
264 264 #: The regex to determine when a traceback starts.
265 265 ipytb_start = re.compile(r'^(\^C)?(-+\n)|^( File)(.*)(, line )(\d+\n)')
266 266
267 267 def __init__(self, **options):
268 268 """Initialize the IPython console lexer.
269 269
270 270 Parameters
271 271 ----------
272 272 python3 : bool
273 273 If `True`, then the console inputs are parsed using a Python 3
274 274 lexer. Otherwise, they are parsed using a Python 2 lexer.
275 275 in1_regex : RegexObject
276 276 The compiled regular expression used to detect the start
277 277 of inputs. Although the IPython configuration setting may have a
278 278 trailing whitespace, do not include it in the regex. If `None`,
279 279 then the default input prompt is assumed.
280 280 in2_regex : RegexObject
281 281 The compiled regular expression used to detect the continuation
282 282 of inputs. Although the IPython configuration setting may have a
283 283 trailing whitespace, do not include it in the regex. If `None`,
284 284 then the default input prompt is assumed.
285 285 out_regex : RegexObject
286 286 The compiled regular expression used to detect outputs. If `None`,
287 287 then the default output prompt is assumed.
288 288
289 289 """
290 290 self.python3 = get_bool_opt(options, 'python3', False)
291 291 if self.python3:
292 292 self.aliases = ['ipython3console']
293 293 else:
294 294 self.aliases = ['ipython2console', 'ipythonconsole']
295 295
296 296 in1_regex = options.get('in1_regex', self.in1_regex)
297 297 in2_regex = options.get('in2_regex', self.in2_regex)
298 298 out_regex = options.get('out_regex', self.out_regex)
299 299
300 300 # So that we can work with input and output prompts which have been
301 301 # rstrip'd (possibly by editors) we also need rstrip'd variants. If
302 302 # we do not do this, then such prompts will be tagged as 'output'.
303 303 # The reason can't just use the rstrip'd variants instead is because
304 304 # we want any whitespace associated with the prompt to be inserted
305 305 # with the token. This allows formatted code to be modified so as hide
306 306 # the appearance of prompts, with the whitespace included. One example
307 307 # use of this is in copybutton.js from the standard lib Python docs.
308 308 in1_regex_rstrip = in1_regex.rstrip() + '\n'
309 309 in2_regex_rstrip = in2_regex.rstrip() + '\n'
310 310 out_regex_rstrip = out_regex.rstrip() + '\n'
311 311
312 312 # Compile and save them all.
313 313 attrs = ['in1_regex', 'in2_regex', 'out_regex',
314 314 'in1_regex_rstrip', 'in2_regex_rstrip', 'out_regex_rstrip']
315 315 for attr in attrs:
316 316 self.__setattr__(attr, re.compile(locals()[attr]))
317 317
318 318 Lexer.__init__(self, **options)
319 319
320 320 if self.python3:
321 321 pylexer = IPython3Lexer
322 322 tblexer = IPythonTracebackLexer
323 323 else:
324 324 pylexer = IPythonLexer
325 325 tblexer = IPythonTracebackLexer
326 326
327 327 self.pylexer = pylexer(**options)
328 328 self.tblexer = tblexer(**options)
329 329
330 330 self.reset()
331 331
332 332 def reset(self):
333 333 self.mode = 'output'
334 334 self.index = 0
335 335 self.buffer = u''
336 336 self.insertions = []
337 337
338 338 def buffered_tokens(self):
339 339 """
340 340 Generator of unprocessed tokens after doing insertions and before
341 341 changing to a new state.
342 342
343 343 """
344 344 if self.mode == 'output':
345 345 tokens = [(0, Generic.Output, self.buffer)]
346 346 elif self.mode == 'input':
347 347 tokens = self.pylexer.get_tokens_unprocessed(self.buffer)
348 348 else: # traceback
349 349 tokens = self.tblexer.get_tokens_unprocessed(self.buffer)
350 350
351 351 for i, t, v in do_insertions(self.insertions, tokens):
352 352 # All token indexes are relative to the buffer.
353 353 yield self.index + i, t, v
354 354
355 355 # Clear it all
356 356 self.index += len(self.buffer)
357 357 self.buffer = u''
358 358 self.insertions = []
359 359
360 360 def get_mci(self, line):
361 361 """
362 362 Parses the line and returns a 3-tuple: (mode, code, insertion).
363 363
364 364 `mode` is the next mode (or state) of the lexer, and is always equal
365 365 to 'input', 'output', or 'tb'.
366 366
367 367 `code` is a portion of the line that should be added to the buffer
368 368 corresponding to the next mode and eventually lexed by another lexer.
369 369 For example, `code` could be Python code if `mode` were 'input'.
370 370
371 371 `insertion` is a 3-tuple (index, token, text) representing an
372 372 unprocessed "token" that will be inserted into the stream of tokens
373 373 that are created from the buffer once we change modes. This is usually
374 374 the input or output prompt.
375 375
376 376 In general, the next mode depends on current mode and on the contents
377 377 of `line`.
378 378
379 379 """
380 380 # To reduce the number of regex match checks, we have multiple
381 381 # 'if' blocks instead of 'if-elif' blocks.
382 382
383 383 # Check for possible end of input
384 384 in2_match = self.in2_regex.match(line)
385 385 in2_match_rstrip = self.in2_regex_rstrip.match(line)
386 386 if (in2_match and in2_match.group().rstrip() == line.rstrip()) or \
387 387 in2_match_rstrip:
388 388 end_input = True
389 389 else:
390 390 end_input = False
391 391 if end_input and self.mode != 'tb':
392 392 # Only look for an end of input when not in tb mode.
393 393 # An ellipsis could appear within the traceback.
394 394 mode = 'output'
395 395 code = u''
396 396 insertion = (0, Generic.Prompt, line)
397 397 return mode, code, insertion
398 398
399 399 # Check for output prompt
400 400 out_match = self.out_regex.match(line)
401 401 out_match_rstrip = self.out_regex_rstrip.match(line)
402 402 if out_match or out_match_rstrip:
403 403 mode = 'output'
404 404 if out_match:
405 405 idx = out_match.end()
406 406 else:
407 407 idx = out_match_rstrip.end()
408 408 code = line[idx:]
409 409 # Use the 'heading' token for output. We cannot use Generic.Error
410 410 # since it would conflict with exceptions.
411 411 insertion = (0, Generic.Heading, line[:idx])
412 412 return mode, code, insertion
413 413
414 414
415 415 # Check for input or continuation prompt (non stripped version)
416 416 in1_match = self.in1_regex.match(line)
417 417 if in1_match or (in2_match and self.mode != 'tb'):
418 418 # New input or when not in tb, continued input.
419 419 # We do not check for continued input when in tb since it is
420 420 # allowable to replace a long stack with an ellipsis.
421 421 mode = 'input'
422 422 if in1_match:
423 423 idx = in1_match.end()
424 424 else: # in2_match
425 425 idx = in2_match.end()
426 426 code = line[idx:]
427 427 insertion = (0, Generic.Prompt, line[:idx])
428 428 return mode, code, insertion
429 429
430 430 # Check for input or continuation prompt (stripped version)
431 431 in1_match_rstrip = self.in1_regex_rstrip.match(line)
432 432 if in1_match_rstrip or (in2_match_rstrip and self.mode != 'tb'):
433 433 # New input or when not in tb, continued input.
434 434 # We do not check for continued input when in tb since it is
435 435 # allowable to replace a long stack with an ellipsis.
436 436 mode = 'input'
437 437 if in1_match_rstrip:
438 438 idx = in1_match_rstrip.end()
439 439 else: # in2_match
440 440 idx = in2_match_rstrip.end()
441 441 code = line[idx:]
442 442 insertion = (0, Generic.Prompt, line[:idx])
443 443 return mode, code, insertion
444 444
445 445 # Check for traceback
446 446 if self.ipytb_start.match(line):
447 447 mode = 'tb'
448 448 code = line
449 449 insertion = None
450 450 return mode, code, insertion
451 451
452 452 # All other stuff...
453 453 if self.mode in ('input', 'output'):
454 454 # We assume all other text is output. Multiline input that
455 455 # does not use the continuation marker cannot be detected.
456 456 # For example, the 3 in the following is clearly output:
457 457 #
458 458 # In [1]: print 3
459 459 # 3
460 460 #
461 461 # But the following second line is part of the input:
462 462 #
463 463 # In [2]: while True:
464 464 # print True
465 465 #
466 466 # In both cases, the 2nd line will be 'output'.
467 467 #
468 468 mode = 'output'
469 469 else:
470 470 mode = 'tb'
471 471
472 472 code = line
473 473 insertion = None
474 474
475 475 return mode, code, insertion
476 476
477 477 def get_tokens_unprocessed(self, text):
478 478 self.reset()
479 479 for match in line_re.finditer(text):
480 480 line = match.group()
481 481 mode, code, insertion = self.get_mci(line)
482 482
483 483 if mode != self.mode:
484 484 # Yield buffered tokens before transitioning to new mode.
485 485 for token in self.buffered_tokens():
486 486 yield token
487 487 self.mode = mode
488 488
489 489 if insertion:
490 490 self.insertions.append((len(self.buffer), [insertion]))
491 491 self.buffer += code
492 492
493 493 for token in self.buffered_tokens():
494 494 yield token
495 495
496 496 class IPyLexer(Lexer):
497 497 r"""
498 498 Primary lexer for all IPython-like code.
499 499
500 500 This is a simple helper lexer. If the first line of the text begins with
501 501 "In \[[0-9]+\]:", then the entire text is parsed with an IPython console
502 502 lexer. If not, then the entire text is parsed with an IPython lexer.
503 503
504 504 The goal is to reduce the number of lexers that are registered
505 505 with Pygments.
506 506
507 507 """
508 508 name = 'IPy session'
509 509 aliases = ['ipy']
510 510
511 511 def __init__(self, **options):
512 512 self.python3 = get_bool_opt(options, 'python3', False)
513 513 if self.python3:
514 514 self.aliases = ['ipy3']
515 515 else:
516 516 self.aliases = ['ipy2', 'ipy']
517 517
518 518 Lexer.__init__(self, **options)
519 519
520 520 self.IPythonLexer = IPythonLexer(**options)
521 521 self.IPythonConsoleLexer = IPythonConsoleLexer(**options)
522 522
523 523 def get_tokens_unprocessed(self, text):
524 524 # Search for the input prompt anywhere...this allows code blocks to
525 525 # begin with comments as well.
526 526 if re.match(r'.*(In \[[0-9]+\]:)', text.strip(), re.DOTALL):
527 527 lex = self.IPythonConsoleLexer
528 528 else:
529 529 lex = self.IPythonLexer
530 530 for token in lex.get_tokens_unprocessed(text):
531 531 yield token
532 532
@@ -1,871 +1,856
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 Python advanced pretty printer. This pretty printer is intended to
4 4 replace the old `pprint` python module which does not allow developers
5 5 to provide their own pretty print callbacks.
6 6
7 7 This module is based on ruby's `prettyprint.rb` library by `Tanaka Akira`.
8 8
9 9
10 10 Example Usage
11 11 -------------
12 12
13 13 To directly print the representation of an object use `pprint`::
14 14
15 15 from pretty import pprint
16 16 pprint(complex_object)
17 17
18 18 To get a string of the output use `pretty`::
19 19
20 20 from pretty import pretty
21 21 string = pretty(complex_object)
22 22
23 23
24 24 Extending
25 25 ---------
26 26
27 27 The pretty library allows developers to add pretty printing rules for their
28 28 own objects. This process is straightforward. All you have to do is to
29 29 add a `_repr_pretty_` method to your object and call the methods on the
30 30 pretty printer passed::
31 31
32 32 class MyObject(object):
33 33
34 34 def _repr_pretty_(self, p, cycle):
35 35 ...
36 36
37 37 Here is an example implementation of a `_repr_pretty_` method for a list
38 38 subclass::
39 39
40 40 class MyList(list):
41 41
42 42 def _repr_pretty_(self, p, cycle):
43 43 if cycle:
44 44 p.text('MyList(...)')
45 45 else:
46 46 with p.group(8, 'MyList([', '])'):
47 47 for idx, item in enumerate(self):
48 48 if idx:
49 49 p.text(',')
50 50 p.breakable()
51 51 p.pretty(item)
52 52
53 53 The `cycle` parameter is `True` if pretty detected a cycle. You *have* to
54 54 react to that or the result is an infinite loop. `p.text()` just adds
55 55 non breaking text to the output, `p.breakable()` either adds a whitespace
56 56 or breaks here. If you pass it an argument it's used instead of the
57 57 default space. `p.pretty` prettyprints another object using the pretty print
58 58 method.
59 59
60 60 The first parameter to the `group` function specifies the extra indentation
61 61 of the next line. In this example the next item will either be on the same
62 62 line (if the items are short enough) or aligned with the right edge of the
63 63 opening bracket of `MyList`.
64 64
65 65 If you just want to indent something you can use the group function
66 66 without open / close parameters. You can also use this code::
67 67
68 68 with p.indent(2):
69 69 ...
70 70
71 71 Inheritance diagram:
72 72
73 73 .. inheritance-diagram:: IPython.lib.pretty
74 74 :parts: 3
75 75
76 76 :copyright: 2007 by Armin Ronacher.
77 77 Portions (c) 2009 by Robert Kern.
78 78 :license: BSD License.
79 79 """
80 80
81 81 from contextlib import contextmanager
82 82 import datetime
83 83 import os
84 84 import re
85 85 import sys
86 86 import types
87 87 from collections import deque
88 88 from inspect import signature
89 89 from io import StringIO
90 90 from warnings import warn
91 91
92 92 from IPython.utils.decorators import undoc
93 93 from IPython.utils.py3compat import PYPY
94 94
95 95 __all__ = ['pretty', 'pprint', 'PrettyPrinter', 'RepresentationPrinter',
96 96 'for_type', 'for_type_by_name']
97 97
98 98
99 99 MAX_SEQ_LENGTH = 1000
100 100 _re_pattern_type = type(re.compile(''))
101 101
102 102 def _safe_getattr(obj, attr, default=None):
103 103 """Safe version of getattr.
104 104
105 105 Same as getattr, but will return ``default`` on any Exception,
106 106 rather than raising.
107 107 """
108 108 try:
109 109 return getattr(obj, attr, default)
110 110 except Exception:
111 111 return default
112 112
113 113 @undoc
114 114 class CUnicodeIO(StringIO):
115 115 def __init__(self, *args, **kwargs):
116 116 super().__init__(*args, **kwargs)
117 117 warn(("CUnicodeIO is deprecated since IPython 6.0. "
118 118 "Please use io.StringIO instead."),
119 119 DeprecationWarning, stacklevel=2)
120 120
121 121 def _sorted_for_pprint(items):
122 122 """
123 123 Sort the given items for pretty printing. Since some predictable
124 124 sorting is better than no sorting at all, we sort on the string
125 125 representation if normal sorting fails.
126 126 """
127 127 items = list(items)
128 128 try:
129 129 return sorted(items)
130 130 except Exception:
131 131 try:
132 132 return sorted(items, key=str)
133 133 except Exception:
134 134 return items
135 135
136 136 def pretty(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH):
137 137 """
138 138 Pretty print the object's representation.
139 139 """
140 140 stream = StringIO()
141 141 printer = RepresentationPrinter(stream, verbose, max_width, newline, max_seq_length=max_seq_length)
142 142 printer.pretty(obj)
143 143 printer.flush()
144 144 return stream.getvalue()
145 145
146 146
147 147 def pprint(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH):
148 148 """
149 149 Like `pretty` but print to stdout.
150 150 """
151 151 printer = RepresentationPrinter(sys.stdout, verbose, max_width, newline, max_seq_length=max_seq_length)
152 152 printer.pretty(obj)
153 153 printer.flush()
154 154 sys.stdout.write(newline)
155 155 sys.stdout.flush()
156 156
157 157 class _PrettyPrinterBase(object):
158 158
159 159 @contextmanager
160 160 def indent(self, indent):
161 161 """with statement support for indenting/dedenting."""
162 162 self.indentation += indent
163 163 try:
164 164 yield
165 165 finally:
166 166 self.indentation -= indent
167 167
168 168 @contextmanager
169 169 def group(self, indent=0, open='', close=''):
170 170 """like begin_group / end_group but for the with statement."""
171 171 self.begin_group(indent, open)
172 172 try:
173 173 yield
174 174 finally:
175 175 self.end_group(indent, close)
176 176
177 177 class PrettyPrinter(_PrettyPrinterBase):
178 178 """
179 179 Baseclass for the `RepresentationPrinter` prettyprinter that is used to
180 180 generate pretty reprs of objects. Contrary to the `RepresentationPrinter`
181 181 this printer knows nothing about the default pprinters or the `_repr_pretty_`
182 182 callback method.
183 183 """
184 184
185 185 def __init__(self, output, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH):
186 186 self.output = output
187 187 self.max_width = max_width
188 188 self.newline = newline
189 189 self.max_seq_length = max_seq_length
190 190 self.output_width = 0
191 191 self.buffer_width = 0
192 192 self.buffer = deque()
193 193
194 194 root_group = Group(0)
195 195 self.group_stack = [root_group]
196 196 self.group_queue = GroupQueue(root_group)
197 197 self.indentation = 0
198 198
199 199 def _break_one_group(self, group):
200 200 while group.breakables:
201 201 x = self.buffer.popleft()
202 202 self.output_width = x.output(self.output, self.output_width)
203 203 self.buffer_width -= x.width
204 204 while self.buffer and isinstance(self.buffer[0], Text):
205 205 x = self.buffer.popleft()
206 206 self.output_width = x.output(self.output, self.output_width)
207 207 self.buffer_width -= x.width
208 208
209 209 def _break_outer_groups(self):
210 210 while self.max_width < self.output_width + self.buffer_width:
211 211 group = self.group_queue.deq()
212 212 if not group:
213 213 return
214 214 self._break_one_group(group)
215 215
216 216 def text(self, obj):
217 217 """Add literal text to the output."""
218 218 width = len(obj)
219 219 if self.buffer:
220 220 text = self.buffer[-1]
221 221 if not isinstance(text, Text):
222 222 text = Text()
223 223 self.buffer.append(text)
224 224 text.add(obj, width)
225 225 self.buffer_width += width
226 226 self._break_outer_groups()
227 227 else:
228 228 self.output.write(obj)
229 229 self.output_width += width
230 230
231 231 def breakable(self, sep=' '):
232 232 """
233 233 Add a breakable separator to the output. This does not mean that it
234 234 will automatically break here. If no breaking on this position takes
235 235 place the `sep` is inserted which default to one space.
236 236 """
237 237 width = len(sep)
238 238 group = self.group_stack[-1]
239 239 if group.want_break:
240 240 self.flush()
241 241 self.output.write(self.newline)
242 242 self.output.write(' ' * self.indentation)
243 243 self.output_width = self.indentation
244 244 self.buffer_width = 0
245 245 else:
246 246 self.buffer.append(Breakable(sep, width, self))
247 247 self.buffer_width += width
248 248 self._break_outer_groups()
249 249
250 250 def break_(self):
251 251 """
252 252 Explicitly insert a newline into the output, maintaining correct indentation.
253 253 """
254 254 group = self.group_queue.deq()
255 255 if group:
256 256 self._break_one_group(group)
257 257 self.flush()
258 258 self.output.write(self.newline)
259 259 self.output.write(' ' * self.indentation)
260 260 self.output_width = self.indentation
261 261 self.buffer_width = 0
262 262
263 263
264 264 def begin_group(self, indent=0, open=''):
265 265 """
266 Begin a group. If you want support for python < 2.5 which doesn't has
267 the with statement this is the preferred way:
268
269 p.begin_group(1, '{')
270 ...
271 p.end_group(1, '}')
272
273 The python 2.5 expression would be this:
274
275 with p.group(1, '{', '}'):
276 ...
277
266 Begin a group.
278 267 The first parameter specifies the indentation for the next line (usually
279 268 the width of the opening text), the second the opening text. All
280 269 parameters are optional.
281 270 """
282 271 if open:
283 272 self.text(open)
284 273 group = Group(self.group_stack[-1].depth + 1)
285 274 self.group_stack.append(group)
286 275 self.group_queue.enq(group)
287 276 self.indentation += indent
288 277
289 278 def _enumerate(self, seq):
290 279 """like enumerate, but with an upper limit on the number of items"""
291 280 for idx, x in enumerate(seq):
292 281 if self.max_seq_length and idx >= self.max_seq_length:
293 282 self.text(',')
294 283 self.breakable()
295 284 self.text('...')
296 285 return
297 286 yield idx, x
298 287
299 288 def end_group(self, dedent=0, close=''):
300 289 """End a group. See `begin_group` for more details."""
301 290 self.indentation -= dedent
302 291 group = self.group_stack.pop()
303 292 if not group.breakables:
304 293 self.group_queue.remove(group)
305 294 if close:
306 295 self.text(close)
307 296
308 297 def flush(self):
309 298 """Flush data that is left in the buffer."""
310 299 for data in self.buffer:
311 300 self.output_width += data.output(self.output, self.output_width)
312 301 self.buffer.clear()
313 302 self.buffer_width = 0
314 303
315 304
316 305 def _get_mro(obj_class):
317 306 """ Get a reasonable method resolution order of a class and its superclasses
318 307 for both old-style and new-style classes.
319 308 """
320 309 if not hasattr(obj_class, '__mro__'):
321 310 # Old-style class. Mix in object to make a fake new-style class.
322 311 try:
323 312 obj_class = type(obj_class.__name__, (obj_class, object), {})
324 313 except TypeError:
325 314 # Old-style extension type that does not descend from object.
326 315 # FIXME: try to construct a more thorough MRO.
327 316 mro = [obj_class]
328 317 else:
329 318 mro = obj_class.__mro__[1:-1]
330 319 else:
331 320 mro = obj_class.__mro__
332 321 return mro
333 322
334 323
335 324 class RepresentationPrinter(PrettyPrinter):
336 325 """
337 326 Special pretty printer that has a `pretty` method that calls the pretty
338 327 printer for a python object.
339 328
340 329 This class stores processing data on `self` so you must *never* use
341 330 this class in a threaded environment. Always lock it or reinstanciate
342 331 it.
343 332
344 333 Instances also have a verbose flag callbacks can access to control their
345 334 output. For example the default instance repr prints all attributes and
346 335 methods that are not prefixed by an underscore if the printer is in
347 336 verbose mode.
348 337 """
349 338
350 339 def __init__(self, output, verbose=False, max_width=79, newline='\n',
351 340 singleton_pprinters=None, type_pprinters=None, deferred_pprinters=None,
352 341 max_seq_length=MAX_SEQ_LENGTH):
353 342
354 343 PrettyPrinter.__init__(self, output, max_width, newline, max_seq_length=max_seq_length)
355 344 self.verbose = verbose
356 345 self.stack = []
357 346 if singleton_pprinters is None:
358 347 singleton_pprinters = _singleton_pprinters.copy()
359 348 self.singleton_pprinters = singleton_pprinters
360 349 if type_pprinters is None:
361 350 type_pprinters = _type_pprinters.copy()
362 351 self.type_pprinters = type_pprinters
363 352 if deferred_pprinters is None:
364 353 deferred_pprinters = _deferred_type_pprinters.copy()
365 354 self.deferred_pprinters = deferred_pprinters
366 355
367 356 def pretty(self, obj):
368 357 """Pretty print the given object."""
369 358 obj_id = id(obj)
370 359 cycle = obj_id in self.stack
371 360 self.stack.append(obj_id)
372 361 self.begin_group()
373 362 try:
374 363 obj_class = _safe_getattr(obj, '__class__', None) or type(obj)
375 364 # First try to find registered singleton printers for the type.
376 365 try:
377 366 printer = self.singleton_pprinters[obj_id]
378 367 except (TypeError, KeyError):
379 368 pass
380 369 else:
381 370 return printer(obj, self, cycle)
382 371 # Next walk the mro and check for either:
383 372 # 1) a registered printer
384 373 # 2) a _repr_pretty_ method
385 374 for cls in _get_mro(obj_class):
386 375 if cls in self.type_pprinters:
387 376 # printer registered in self.type_pprinters
388 377 return self.type_pprinters[cls](obj, self, cycle)
389 378 else:
390 379 # deferred printer
391 380 printer = self._in_deferred_types(cls)
392 381 if printer is not None:
393 382 return printer(obj, self, cycle)
394 383 else:
395 384 # Finally look for special method names.
396 385 # Some objects automatically create any requested
397 386 # attribute. Try to ignore most of them by checking for
398 387 # callability.
399 388 if '_repr_pretty_' in cls.__dict__:
400 389 meth = cls._repr_pretty_
401 390 if callable(meth):
402 391 return meth(obj, self, cycle)
403 392 if cls is not object \
404 393 and callable(cls.__dict__.get('__repr__')):
405 394 return _repr_pprint(obj, self, cycle)
406 395
407 396 return _default_pprint(obj, self, cycle)
408 397 finally:
409 398 self.end_group()
410 399 self.stack.pop()
411 400
412 401 def _in_deferred_types(self, cls):
413 402 """
414 403 Check if the given class is specified in the deferred type registry.
415 404
416 405 Returns the printer from the registry if it exists, and None if the
417 406 class is not in the registry. Successful matches will be moved to the
418 407 regular type registry for future use.
419 408 """
420 409 mod = _safe_getattr(cls, '__module__', None)
421 410 name = _safe_getattr(cls, '__name__', None)
422 411 key = (mod, name)
423 412 printer = None
424 413 if key in self.deferred_pprinters:
425 414 # Move the printer over to the regular registry.
426 415 printer = self.deferred_pprinters.pop(key)
427 416 self.type_pprinters[cls] = printer
428 417 return printer
429 418
430 419
431 420 class Printable(object):
432 421
433 422 def output(self, stream, output_width):
434 423 return output_width
435 424
436 425
437 426 class Text(Printable):
438 427
439 428 def __init__(self):
440 429 self.objs = []
441 430 self.width = 0
442 431
443 432 def output(self, stream, output_width):
444 433 for obj in self.objs:
445 434 stream.write(obj)
446 435 return output_width + self.width
447 436
448 437 def add(self, obj, width):
449 438 self.objs.append(obj)
450 439 self.width += width
451 440
452 441
453 442 class Breakable(Printable):
454 443
455 444 def __init__(self, seq, width, pretty):
456 445 self.obj = seq
457 446 self.width = width
458 447 self.pretty = pretty
459 448 self.indentation = pretty.indentation
460 449 self.group = pretty.group_stack[-1]
461 450 self.group.breakables.append(self)
462 451
463 452 def output(self, stream, output_width):
464 453 self.group.breakables.popleft()
465 454 if self.group.want_break:
466 455 stream.write(self.pretty.newline)
467 456 stream.write(' ' * self.indentation)
468 457 return self.indentation
469 458 if not self.group.breakables:
470 459 self.pretty.group_queue.remove(self.group)
471 460 stream.write(self.obj)
472 461 return output_width + self.width
473 462
474 463
475 464 class Group(Printable):
476 465
477 466 def __init__(self, depth):
478 467 self.depth = depth
479 468 self.breakables = deque()
480 469 self.want_break = False
481 470
482 471
483 472 class GroupQueue(object):
484 473
485 474 def __init__(self, *groups):
486 475 self.queue = []
487 476 for group in groups:
488 477 self.enq(group)
489 478
490 479 def enq(self, group):
491 480 depth = group.depth
492 481 while depth > len(self.queue) - 1:
493 482 self.queue.append([])
494 483 self.queue[depth].append(group)
495 484
496 485 def deq(self):
497 486 for stack in self.queue:
498 487 for idx, group in enumerate(reversed(stack)):
499 488 if group.breakables:
500 489 del stack[idx]
501 490 group.want_break = True
502 491 return group
503 492 for group in stack:
504 493 group.want_break = True
505 494 del stack[:]
506 495
507 496 def remove(self, group):
508 497 try:
509 498 self.queue[group.depth].remove(group)
510 499 except ValueError:
511 500 pass
512 501
513 502
514 503 def _default_pprint(obj, p, cycle):
515 504 """
516 505 The default print function. Used if an object does not provide one and
517 506 it's none of the builtin objects.
518 507 """
519 508 klass = _safe_getattr(obj, '__class__', None) or type(obj)
520 509 if _safe_getattr(klass, '__repr__', None) is not object.__repr__:
521 510 # A user-provided repr. Find newlines and replace them with p.break_()
522 511 _repr_pprint(obj, p, cycle)
523 512 return
524 513 p.begin_group(1, '<')
525 514 p.pretty(klass)
526 515 p.text(' at 0x%x' % id(obj))
527 516 if cycle:
528 517 p.text(' ...')
529 518 elif p.verbose:
530 519 first = True
531 520 for key in dir(obj):
532 521 if not key.startswith('_'):
533 522 try:
534 523 value = getattr(obj, key)
535 524 except AttributeError:
536 525 continue
537 526 if isinstance(value, types.MethodType):
538 527 continue
539 528 if not first:
540 529 p.text(',')
541 530 p.breakable()
542 531 p.text(key)
543 532 p.text('=')
544 533 step = len(key) + 1
545 534 p.indentation += step
546 535 p.pretty(value)
547 536 p.indentation -= step
548 537 first = False
549 538 p.end_group(1, '>')
550 539
551 540
552 541 def _seq_pprinter_factory(start, end):
553 542 """
554 543 Factory that returns a pprint function useful for sequences. Used by
555 544 the default pprint for tuples, dicts, and lists.
556 545 """
557 546 def inner(obj, p, cycle):
558 547 if cycle:
559 548 return p.text(start + '...' + end)
560 549 step = len(start)
561 550 p.begin_group(step, start)
562 551 for idx, x in p._enumerate(obj):
563 552 if idx:
564 553 p.text(',')
565 554 p.breakable()
566 555 p.pretty(x)
567 556 if len(obj) == 1 and type(obj) is tuple:
568 557 # Special case for 1-item tuples.
569 558 p.text(',')
570 559 p.end_group(step, end)
571 560 return inner
572 561
573 562
574 563 def _set_pprinter_factory(start, end):
575 564 """
576 565 Factory that returns a pprint function useful for sets and frozensets.
577 566 """
578 567 def inner(obj, p, cycle):
579 568 if cycle:
580 569 return p.text(start + '...' + end)
581 570 if len(obj) == 0:
582 571 # Special case.
583 572 p.text(type(obj).__name__ + '()')
584 573 else:
585 574 step = len(start)
586 575 p.begin_group(step, start)
587 576 # Like dictionary keys, we will try to sort the items if there aren't too many
588 577 if not (p.max_seq_length and len(obj) >= p.max_seq_length):
589 578 items = _sorted_for_pprint(obj)
590 579 else:
591 580 items = obj
592 581 for idx, x in p._enumerate(items):
593 582 if idx:
594 583 p.text(',')
595 584 p.breakable()
596 585 p.pretty(x)
597 586 p.end_group(step, end)
598 587 return inner
599 588
600 589
601 590 def _dict_pprinter_factory(start, end):
602 591 """
603 592 Factory that returns a pprint function used by the default pprint of
604 593 dicts and dict proxies.
605 594 """
606 595 def inner(obj, p, cycle):
607 596 if cycle:
608 597 return p.text('{...}')
609 598 step = len(start)
610 599 p.begin_group(step, start)
611 600 keys = obj.keys()
612 601 for idx, key in p._enumerate(keys):
613 602 if idx:
614 603 p.text(',')
615 604 p.breakable()
616 605 p.pretty(key)
617 606 p.text(': ')
618 607 p.pretty(obj[key])
619 608 p.end_group(step, end)
620 609 return inner
621 610
622 611
623 612 def _super_pprint(obj, p, cycle):
624 613 """The pprint for the super type."""
625 614 p.begin_group(8, '<super: ')
626 615 p.pretty(obj.__thisclass__)
627 616 p.text(',')
628 617 p.breakable()
629 618 if PYPY: # In PyPy, super() objects don't have __self__ attributes
630 619 dself = obj.__repr__.__self__
631 620 p.pretty(None if dself is obj else dself)
632 621 else:
633 622 p.pretty(obj.__self__)
634 623 p.end_group(8, '>')
635 624
636 625
637 626 def _re_pattern_pprint(obj, p, cycle):
638 627 """The pprint function for regular expression patterns."""
639 628 p.text('re.compile(')
640 629 pattern = repr(obj.pattern)
641 630 if pattern[:1] in 'uU':
642 631 pattern = pattern[1:]
643 632 prefix = 'ur'
644 633 else:
645 634 prefix = 'r'
646 635 pattern = prefix + pattern.replace('\\\\', '\\')
647 636 p.text(pattern)
648 637 if obj.flags:
649 638 p.text(',')
650 639 p.breakable()
651 640 done_one = False
652 641 for flag in ('TEMPLATE', 'IGNORECASE', 'LOCALE', 'MULTILINE', 'DOTALL',
653 642 'UNICODE', 'VERBOSE', 'DEBUG'):
654 643 if obj.flags & getattr(re, flag):
655 644 if done_one:
656 645 p.text('|')
657 646 p.text('re.' + flag)
658 647 done_one = True
659 648 p.text(')')
660 649
661 650
662 651 def _type_pprint(obj, p, cycle):
663 652 """The pprint for classes and types."""
664 653 # Heap allocated types might not have the module attribute,
665 654 # and others may set it to None.
666 655
667 656 # Checks for a __repr__ override in the metaclass. Can't compare the
668 657 # type(obj).__repr__ directly because in PyPy the representation function
669 658 # inherited from type isn't the same type.__repr__
670 659 if [m for m in _get_mro(type(obj)) if "__repr__" in vars(m)][:1] != [type]:
671 660 _repr_pprint(obj, p, cycle)
672 661 return
673 662
674 663 mod = _safe_getattr(obj, '__module__', None)
675 664 try:
676 665 name = obj.__qualname__
677 666 if not isinstance(name, str):
678 667 # This can happen if the type implements __qualname__ as a property
679 668 # or other descriptor in Python 2.
680 669 raise Exception("Try __name__")
681 670 except Exception:
682 671 name = obj.__name__
683 672 if not isinstance(name, str):
684 673 name = '<unknown type>'
685 674
686 675 if mod in (None, '__builtin__', 'builtins', 'exceptions'):
687 676 p.text(name)
688 677 else:
689 678 p.text(mod + '.' + name)
690 679
691 680
692 681 def _repr_pprint(obj, p, cycle):
693 682 """A pprint that just redirects to the normal repr function."""
694 683 # Find newlines and replace them with p.break_()
695 684 output = repr(obj)
696 685 lines = output.splitlines()
697 686 with p.group():
698 687 for idx, output_line in enumerate(lines):
699 688 if idx:
700 689 p.break_()
701 690 p.text(output_line)
702 691
703 692
704 693 def _function_pprint(obj, p, cycle):
705 694 """Base pprint for all functions and builtin functions."""
706 695 name = _safe_getattr(obj, '__qualname__', obj.__name__)
707 696 mod = obj.__module__
708 697 if mod and mod not in ('__builtin__', 'builtins', 'exceptions'):
709 698 name = mod + '.' + name
710 699 try:
711 700 func_def = name + str(signature(obj))
712 701 except ValueError:
713 702 func_def = name
714 703 p.text('<function %s>' % func_def)
715 704
716 705
717 706 def _exception_pprint(obj, p, cycle):
718 707 """Base pprint for all exceptions."""
719 708 name = getattr(obj.__class__, '__qualname__', obj.__class__.__name__)
720 709 if obj.__class__.__module__ not in ('exceptions', 'builtins'):
721 710 name = '%s.%s' % (obj.__class__.__module__, name)
722 711 step = len(name) + 1
723 712 p.begin_group(step, name + '(')
724 713 for idx, arg in enumerate(getattr(obj, 'args', ())):
725 714 if idx:
726 715 p.text(',')
727 716 p.breakable()
728 717 p.pretty(arg)
729 718 p.end_group(step, ')')
730 719
731 720
732 721 #: the exception base
733 722 try:
734 723 _exception_base = BaseException
735 724 except NameError:
736 725 _exception_base = Exception
737 726
738 727
739 728 #: printers for builtin types
740 729 _type_pprinters = {
741 730 int: _repr_pprint,
742 731 float: _repr_pprint,
743 732 str: _repr_pprint,
744 733 tuple: _seq_pprinter_factory('(', ')'),
745 734 list: _seq_pprinter_factory('[', ']'),
746 735 dict: _dict_pprinter_factory('{', '}'),
747 736 set: _set_pprinter_factory('{', '}'),
748 737 frozenset: _set_pprinter_factory('frozenset({', '})'),
749 738 super: _super_pprint,
750 739 _re_pattern_type: _re_pattern_pprint,
751 740 type: _type_pprint,
752 741 types.FunctionType: _function_pprint,
753 742 types.BuiltinFunctionType: _function_pprint,
754 743 types.MethodType: _repr_pprint,
755 744 datetime.datetime: _repr_pprint,
756 745 datetime.timedelta: _repr_pprint,
757 746 _exception_base: _exception_pprint
758 747 }
759 748
760 749 # render os.environ like a dict
761 750 _env_type = type(os.environ)
762 751 # future-proof in case os.environ becomes a plain dict?
763 752 if _env_type is not dict:
764 753 _type_pprinters[_env_type] = _dict_pprinter_factory('environ{', '}')
765 754
766 755 try:
767 756 # In PyPy, types.DictProxyType is dict, setting the dictproxy printer
768 757 # using dict.setdefault avoids overwriting the dict printer
769 758 _type_pprinters.setdefault(types.DictProxyType,
770 759 _dict_pprinter_factory('dict_proxy({', '})'))
771 760 _type_pprinters[types.ClassType] = _type_pprint
772 761 _type_pprinters[types.SliceType] = _repr_pprint
773 762 except AttributeError: # Python 3
774 763 _type_pprinters[types.MappingProxyType] = \
775 764 _dict_pprinter_factory('mappingproxy({', '})')
776 765 _type_pprinters[slice] = _repr_pprint
777 766
778 try:
779 _type_pprinters[long] = _repr_pprint
780 _type_pprinters[unicode] = _repr_pprint
781 except NameError:
782 767 _type_pprinters[range] = _repr_pprint
783 768 _type_pprinters[bytes] = _repr_pprint
784 769
785 770 #: printers for types specified by name
786 771 _deferred_type_pprinters = {
787 772 }
788 773
789 774 def for_type(typ, func):
790 775 """
791 776 Add a pretty printer for a given type.
792 777 """
793 778 oldfunc = _type_pprinters.get(typ, None)
794 779 if func is not None:
795 780 # To support easy restoration of old pprinters, we need to ignore Nones.
796 781 _type_pprinters[typ] = func
797 782 return oldfunc
798 783
799 784 def for_type_by_name(type_module, type_name, func):
800 785 """
801 786 Add a pretty printer for a type specified by the module and name of a type
802 787 rather than the type object itself.
803 788 """
804 789 key = (type_module, type_name)
805 790 oldfunc = _deferred_type_pprinters.get(key, None)
806 791 if func is not None:
807 792 # To support easy restoration of old pprinters, we need to ignore Nones.
808 793 _deferred_type_pprinters[key] = func
809 794 return oldfunc
810 795
811 796
812 797 #: printers for the default singletons
813 798 _singleton_pprinters = dict.fromkeys(map(id, [None, True, False, Ellipsis,
814 799 NotImplemented]), _repr_pprint)
815 800
816 801
817 802 def _defaultdict_pprint(obj, p, cycle):
818 803 name = obj.__class__.__name__
819 804 with p.group(len(name) + 1, name + '(', ')'):
820 805 if cycle:
821 806 p.text('...')
822 807 else:
823 808 p.pretty(obj.default_factory)
824 809 p.text(',')
825 810 p.breakable()
826 811 p.pretty(dict(obj))
827 812
828 813 def _ordereddict_pprint(obj, p, cycle):
829 814 name = obj.__class__.__name__
830 815 with p.group(len(name) + 1, name + '(', ')'):
831 816 if cycle:
832 817 p.text('...')
833 818 elif len(obj):
834 819 p.pretty(list(obj.items()))
835 820
836 821 def _deque_pprint(obj, p, cycle):
837 822 name = obj.__class__.__name__
838 823 with p.group(len(name) + 1, name + '(', ')'):
839 824 if cycle:
840 825 p.text('...')
841 826 else:
842 827 p.pretty(list(obj))
843 828
844 829
845 830 def _counter_pprint(obj, p, cycle):
846 831 name = obj.__class__.__name__
847 832 with p.group(len(name) + 1, name + '(', ')'):
848 833 if cycle:
849 834 p.text('...')
850 835 elif len(obj):
851 836 p.pretty(dict(obj))
852 837
853 838 for_type_by_name('collections', 'defaultdict', _defaultdict_pprint)
854 839 for_type_by_name('collections', 'OrderedDict', _ordereddict_pprint)
855 840 for_type_by_name('collections', 'deque', _deque_pprint)
856 841 for_type_by_name('collections', 'Counter', _counter_pprint)
857 842
858 843 if __name__ == '__main__':
859 844 from random import randrange
860 845 class Foo(object):
861 846 def __init__(self):
862 847 self.foo = 1
863 848 self.bar = re.compile(r'\s+')
864 849 self.blub = dict.fromkeys(range(30), randrange(1, 40))
865 850 self.hehe = 23424.234234
866 851 self.list = ["blub", "blah", self]
867 852
868 853 def get_foo(self):
869 854 print("foo")
870 855
871 856 pprint(Foo(), verbose=True)
@@ -1,270 +1,266
1 1 """Tests for IPython.lib.display.
2 2
3 3 """
4 4 #-----------------------------------------------------------------------------
5 5 # Copyright (c) 2012, the IPython Development Team.
6 6 #
7 7 # Distributed under the terms of the Modified BSD License.
8 8 #
9 9 # The full license is in the file COPYING.txt, distributed with this software.
10 10 #-----------------------------------------------------------------------------
11 11
12 12 #-----------------------------------------------------------------------------
13 13 # Imports
14 14 #-----------------------------------------------------------------------------
15 15 from tempfile import NamedTemporaryFile, mkdtemp
16 16 from os.path import split, join as pjoin, dirname
17 import sys
18 try:
19 17 import pathlib
20 except ImportError:
21 pass
22 18 from unittest import TestCase, mock
23 19 import struct
24 20 import wave
25 21 from io import BytesIO
26 22
27 23 # Third-party imports
28 24 import nose.tools as nt
29 25
30 26 try:
31 27 import numpy
32 28 except ImportError:
33 29 pass
34 30
35 31 # Our own imports
36 32 from IPython.lib import display
37 33
38 34 from IPython.testing.decorators import skipif_not_numpy
39 35
40 36 #-----------------------------------------------------------------------------
41 37 # Classes and functions
42 38 #-----------------------------------------------------------------------------
43 39
44 40 #--------------------------
45 41 # FileLink tests
46 42 #--------------------------
47 43
48 44 def test_instantiation_FileLink():
49 45 """FileLink: Test class can be instantiated"""
50 46 fl = display.FileLink('example.txt')
51 47 # TODO: remove if when only Python >= 3.6 is supported
52 48 fl = display.FileLink(pathlib.PurePath('example.txt'))
53 49
54 50 def test_warning_on_non_existent_path_FileLink():
55 51 """FileLink: Calling _repr_html_ on non-existent files returns a warning
56 52 """
57 53 fl = display.FileLink('example.txt')
58 54 nt.assert_true(fl._repr_html_().startswith('Path (<tt>example.txt</tt>)'))
59 55
60 56 def test_existing_path_FileLink():
61 57 """FileLink: Calling _repr_html_ functions as expected on existing filepath
62 58 """
63 59 tf = NamedTemporaryFile()
64 60 fl = display.FileLink(tf.name)
65 61 actual = fl._repr_html_()
66 62 expected = "<a href='%s' target='_blank'>%s</a><br>" % (tf.name,tf.name)
67 63 nt.assert_equal(actual,expected)
68 64
69 65 def test_existing_path_FileLink_repr():
70 66 """FileLink: Calling repr() functions as expected on existing filepath
71 67 """
72 68 tf = NamedTemporaryFile()
73 69 fl = display.FileLink(tf.name)
74 70 actual = repr(fl)
75 71 expected = tf.name
76 72 nt.assert_equal(actual,expected)
77 73
78 74 def test_error_on_directory_to_FileLink():
79 75 """FileLink: Raises error when passed directory
80 76 """
81 77 td = mkdtemp()
82 78 nt.assert_raises(ValueError,display.FileLink,td)
83 79
84 80 #--------------------------
85 81 # FileLinks tests
86 82 #--------------------------
87 83
88 84 def test_instantiation_FileLinks():
89 85 """FileLinks: Test class can be instantiated
90 86 """
91 87 fls = display.FileLinks('example')
92 88
93 89 def test_warning_on_non_existent_path_FileLinks():
94 90 """FileLinks: Calling _repr_html_ on non-existent files returns a warning
95 91 """
96 92 fls = display.FileLinks('example')
97 93 nt.assert_true(fls._repr_html_().startswith('Path (<tt>example</tt>)'))
98 94
99 95 def test_existing_path_FileLinks():
100 96 """FileLinks: Calling _repr_html_ functions as expected on existing dir
101 97 """
102 98 td = mkdtemp()
103 99 tf1 = NamedTemporaryFile(dir=td)
104 100 tf2 = NamedTemporaryFile(dir=td)
105 101 fl = display.FileLinks(td)
106 102 actual = fl._repr_html_()
107 103 actual = actual.split('\n')
108 104 actual.sort()
109 105 # the links should always have forward slashes, even on windows, so replace
110 106 # backslashes with forward slashes here
111 107 expected = ["%s/<br>" % td,
112 108 "&nbsp;&nbsp;<a href='%s' target='_blank'>%s</a><br>" %\
113 109 (tf2.name.replace("\\","/"),split(tf2.name)[1]),
114 110 "&nbsp;&nbsp;<a href='%s' target='_blank'>%s</a><br>" %\
115 111 (tf1.name.replace("\\","/"),split(tf1.name)[1])]
116 112 expected.sort()
117 113 # We compare the sorted list of links here as that's more reliable
118 114 nt.assert_equal(actual,expected)
119 115
120 116 def test_existing_path_FileLinks_alt_formatter():
121 117 """FileLinks: Calling _repr_html_ functions as expected w/ an alt formatter
122 118 """
123 119 td = mkdtemp()
124 120 tf1 = NamedTemporaryFile(dir=td)
125 121 tf2 = NamedTemporaryFile(dir=td)
126 122 def fake_formatter(dirname,fnames,included_suffixes):
127 123 return ["hello","world"]
128 124 fl = display.FileLinks(td,notebook_display_formatter=fake_formatter)
129 125 actual = fl._repr_html_()
130 126 actual = actual.split('\n')
131 127 actual.sort()
132 128 expected = ["hello","world"]
133 129 expected.sort()
134 130 # We compare the sorted list of links here as that's more reliable
135 131 nt.assert_equal(actual,expected)
136 132
137 133 def test_existing_path_FileLinks_repr():
138 134 """FileLinks: Calling repr() functions as expected on existing directory """
139 135 td = mkdtemp()
140 136 tf1 = NamedTemporaryFile(dir=td)
141 137 tf2 = NamedTemporaryFile(dir=td)
142 138 fl = display.FileLinks(td)
143 139 actual = repr(fl)
144 140 actual = actual.split('\n')
145 141 actual.sort()
146 142 expected = ['%s/' % td, ' %s' % split(tf1.name)[1],' %s' % split(tf2.name)[1]]
147 143 expected.sort()
148 144 # We compare the sorted list of links here as that's more reliable
149 145 nt.assert_equal(actual,expected)
150 146
151 147 def test_existing_path_FileLinks_repr_alt_formatter():
152 148 """FileLinks: Calling repr() functions as expected w/ alt formatter
153 149 """
154 150 td = mkdtemp()
155 151 tf1 = NamedTemporaryFile(dir=td)
156 152 tf2 = NamedTemporaryFile(dir=td)
157 153 def fake_formatter(dirname,fnames,included_suffixes):
158 154 return ["hello","world"]
159 155 fl = display.FileLinks(td,terminal_display_formatter=fake_formatter)
160 156 actual = repr(fl)
161 157 actual = actual.split('\n')
162 158 actual.sort()
163 159 expected = ["hello","world"]
164 160 expected.sort()
165 161 # We compare the sorted list of links here as that's more reliable
166 162 nt.assert_equal(actual,expected)
167 163
168 164 def test_error_on_file_to_FileLinks():
169 165 """FileLinks: Raises error when passed file
170 166 """
171 167 td = mkdtemp()
172 168 tf1 = NamedTemporaryFile(dir=td)
173 169 nt.assert_raises(ValueError,display.FileLinks,tf1.name)
174 170
175 171 def test_recursive_FileLinks():
176 172 """FileLinks: Does not recurse when recursive=False
177 173 """
178 174 td = mkdtemp()
179 175 tf = NamedTemporaryFile(dir=td)
180 176 subtd = mkdtemp(dir=td)
181 177 subtf = NamedTemporaryFile(dir=subtd)
182 178 fl = display.FileLinks(td)
183 179 actual = str(fl)
184 180 actual = actual.split('\n')
185 181 nt.assert_equal(len(actual), 4, actual)
186 182 fl = display.FileLinks(td, recursive=False)
187 183 actual = str(fl)
188 184 actual = actual.split('\n')
189 185 nt.assert_equal(len(actual), 2, actual)
190 186
191 187 def test_audio_from_file():
192 188 path = pjoin(dirname(__file__), 'test.wav')
193 189 display.Audio(filename=path)
194 190
195 191 class TestAudioDataWithNumpy(TestCase):
196 192
197 193 @skipif_not_numpy
198 194 def test_audio_from_numpy_array(self):
199 195 test_tone = get_test_tone()
200 196 audio = display.Audio(test_tone, rate=44100)
201 197 nt.assert_equal(len(read_wav(audio.data)), len(test_tone))
202 198
203 199 @skipif_not_numpy
204 200 def test_audio_from_list(self):
205 201 test_tone = get_test_tone()
206 202 audio = display.Audio(list(test_tone), rate=44100)
207 203 nt.assert_equal(len(read_wav(audio.data)), len(test_tone))
208 204
209 205 @skipif_not_numpy
210 206 def test_audio_from_numpy_array_without_rate_raises(self):
211 207 nt.assert_raises(ValueError, display.Audio, get_test_tone())
212 208
213 209 @skipif_not_numpy
214 210 def test_audio_data_normalization(self):
215 211 expected_max_value = numpy.iinfo(numpy.int16).max
216 212 for scale in [1, 0.5, 2]:
217 213 audio = display.Audio(get_test_tone(scale), rate=44100)
218 214 actual_max_value = numpy.max(numpy.abs(read_wav(audio.data)))
219 215 nt.assert_equal(actual_max_value, expected_max_value)
220 216
221 217 @skipif_not_numpy
222 218 def test_audio_data_without_normalization(self):
223 219 max_int16 = numpy.iinfo(numpy.int16).max
224 220 for scale in [1, 0.5, 0.2]:
225 221 test_tone = get_test_tone(scale)
226 222 test_tone_max_abs = numpy.max(numpy.abs(test_tone))
227 223 expected_max_value = int(max_int16 * test_tone_max_abs)
228 224 audio = display.Audio(test_tone, rate=44100, normalize=False)
229 225 actual_max_value = numpy.max(numpy.abs(read_wav(audio.data)))
230 226 nt.assert_equal(actual_max_value, expected_max_value)
231 227
232 228 def test_audio_data_without_normalization_raises_for_invalid_data(self):
233 229 nt.assert_raises(
234 230 ValueError,
235 231 lambda: display.Audio([1.001], rate=44100, normalize=False))
236 232 nt.assert_raises(
237 233 ValueError,
238 234 lambda: display.Audio([-1.001], rate=44100, normalize=False))
239 235
240 236 def simulate_numpy_not_installed():
241 237 try:
242 238 import numpy
243 239 return mock.patch('numpy.array', mock.MagicMock(side_effect=ImportError))
244 240 except ModuleNotFoundError:
245 241 return lambda x:x
246 242
247 243 @simulate_numpy_not_installed()
248 244 class TestAudioDataWithoutNumpy(TestAudioDataWithNumpy):
249 245 # All tests from `TestAudioDataWithNumpy` are inherited.
250 246
251 247 @skipif_not_numpy
252 248 def test_audio_raises_for_nested_list(self):
253 249 stereo_signal = [list(get_test_tone())] * 2
254 250 nt.assert_raises(
255 251 TypeError,
256 252 lambda: display.Audio(stereo_signal, rate=44100))
257 253
258 254 @skipif_not_numpy
259 255 def get_test_tone(scale=1):
260 256 return numpy.sin(2 * numpy.pi * 440 * numpy.linspace(0, 1, 44100)) * scale
261 257
262 258 def read_wav(data):
263 259 with wave.open(BytesIO(data)) as wave_file:
264 260 wave_data = wave_file.readframes(wave_file.getnframes())
265 261 num_samples = wave_file.getnframes() * wave_file.getnchannels()
266 262 return struct.unpack('<%sh' % num_samples, wave_data)
267 263
268 264 def test_code_from_file():
269 265 c = display.Code(filename=__file__)
270 266 assert c._repr_html_().startswith('<style>')
@@ -1,71 +1,71
1 1 # coding: utf-8
2 2 """
3 3 Utilities for dealing with text encodings
4 4 """
5 5
6 6 #-----------------------------------------------------------------------------
7 7 # Copyright (C) 2008-2012 The IPython Development Team
8 8 #
9 9 # Distributed under the terms of the BSD License. The full license is in
10 10 # the file COPYING, distributed as part of this software.
11 11 #-----------------------------------------------------------------------------
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Imports
15 15 #-----------------------------------------------------------------------------
16 16 import sys
17 17 import locale
18 18 import warnings
19 19
20 20 # to deal with the possibility of sys.std* not being a stream at all
21 21 def get_stream_enc(stream, default=None):
22 22 """Return the given stream's encoding or a default.
23 23
24 24 There are cases where ``sys.std*`` might not actually be a stream, so
25 25 check for the encoding attribute prior to returning it, and return
26 26 a default if it doesn't exist or evaluates as False. ``default``
27 27 is None if not provided.
28 28 """
29 29 if not hasattr(stream, 'encoding') or not stream.encoding:
30 30 return default
31 31 else:
32 32 return stream.encoding
33 33
34 34 # Less conservative replacement for sys.getdefaultencoding, that will try
35 35 # to match the environment.
36 36 # Defined here as central function, so if we find better choices, we
37 37 # won't need to make changes all over IPython.
38 38 def getdefaultencoding(prefer_stream=True):
39 39 """Return IPython's guess for the default encoding for bytes as text.
40 40
41 41 If prefer_stream is True (default), asks for stdin.encoding first,
42 42 to match the calling Terminal, but that is often None for subprocesses.
43 43
44 44 Then fall back on locale.getpreferredencoding(),
45 45 which should be a sensible platform default (that respects LANG environment),
46 46 and finally to sys.getdefaultencoding() which is the most conservative option,
47 and usually ASCII on Python 2 or UTF8 on Python 3.
47 and usually UTF8 as of Python 3.
48 48 """
49 49 enc = None
50 50 if prefer_stream:
51 51 enc = get_stream_enc(sys.stdin)
52 52 if not enc or enc=='ascii':
53 53 try:
54 54 # There are reports of getpreferredencoding raising errors
55 55 # in some cases, which may well be fixed, but let's be conservative here.
56 56 enc = locale.getpreferredencoding()
57 57 except Exception:
58 58 pass
59 59 enc = enc or sys.getdefaultencoding()
60 60 # On windows `cp0` can be returned to indicate that there is no code page.
61 61 # Since cp0 is an invalid encoding return instead cp1252 which is the
62 62 # Western European default.
63 63 if enc == 'cp0':
64 64 warnings.warn(
65 65 "Invalid code page cp0 detected - using cp1252 instead."
66 66 "If cp1252 is incorrect please ensure a valid code page "
67 67 "is defined for the process.", RuntimeWarning)
68 68 return 'cp1252'
69 69 return enc
70 70
71 71 DEFAULT_ENCODING = getdefaultencoding()
@@ -1,105 +1,103
1 1 """
2 2 Tools to open .py files as Unicode, using the encoding specified within the file,
3 3 as per PEP 263.
4 4
5 5 Much of the code is taken from the tokenize module in Python 3.2.
6 6 """
7 7
8 8 import io
9 9 from io import TextIOWrapper, BytesIO
10 10 import re
11 11 from tokenize import open, detect_encoding
12 12
13 13 cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)", re.UNICODE)
14 14 cookie_comment_re = re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)", re.UNICODE)
15 15
16 16 def source_to_unicode(txt, errors='replace', skip_encoding_cookie=True):
17 17 """Converts a bytes string with python source code to unicode.
18 18
19 19 Unicode strings are passed through unchanged. Byte strings are checked
20 20 for the python source file encoding cookie to determine encoding.
21 21 txt can be either a bytes buffer or a string containing the source
22 22 code.
23 23 """
24 24 if isinstance(txt, str):
25 25 return txt
26 26 if isinstance(txt, bytes):
27 27 buffer = BytesIO(txt)
28 28 else:
29 29 buffer = txt
30 30 try:
31 31 encoding, _ = detect_encoding(buffer.readline)
32 32 except SyntaxError:
33 33 encoding = "ascii"
34 34 buffer.seek(0)
35 35 with TextIOWrapper(buffer, encoding, errors=errors, line_buffering=True) as text:
36 36 text.mode = 'r'
37 37 if skip_encoding_cookie:
38 38 return u"".join(strip_encoding_cookie(text))
39 39 else:
40 40 return text.read()
41 41
42 42 def strip_encoding_cookie(filelike):
43 43 """Generator to pull lines from a text-mode file, skipping the encoding
44 44 cookie if it is found in the first two lines.
45 45 """
46 46 it = iter(filelike)
47 47 try:
48 48 first = next(it)
49 49 if not cookie_comment_re.match(first):
50 50 yield first
51 51 second = next(it)
52 52 if not cookie_comment_re.match(second):
53 53 yield second
54 54 except StopIteration:
55 55 return
56 56
57 57 for line in it:
58 58 yield line
59 59
60 60 def read_py_file(filename, skip_encoding_cookie=True):
61 61 """Read a Python file, using the encoding declared inside the file.
62 62
63 63 Parameters
64 64 ----------
65 65 filename : str
66 66 The path to the file to read.
67 67 skip_encoding_cookie : bool
68 68 If True (the default), and the encoding declaration is found in the first
69 two lines, that line will be excluded from the output - compiling a
70 unicode string with an encoding declaration is a SyntaxError in Python 2.
69 two lines, that line will be excluded from the output.
71 70
72 71 Returns
73 72 -------
74 73 A unicode string containing the contents of the file.
75 74 """
76 75 with open(filename) as f: # the open function defined in this module.
77 76 if skip_encoding_cookie:
78 77 return "".join(strip_encoding_cookie(f))
79 78 else:
80 79 return f.read()
81 80
82 81 def read_py_url(url, errors='replace', skip_encoding_cookie=True):
83 82 """Read a Python file from a URL, using the encoding declared inside the file.
84 83
85 84 Parameters
86 85 ----------
87 86 url : str
88 87 The URL from which to fetch the file.
89 88 errors : str
90 89 How to handle decoding errors in the file. Options are the same as for
91 90 bytes.decode(), but here 'replace' is the default.
92 91 skip_encoding_cookie : bool
93 92 If True (the default), and the encoding declaration is found in the first
94 two lines, that line will be excluded from the output - compiling a
95 unicode string with an encoding declaration is a SyntaxError in Python 2.
93 two lines, that line will be excluded from the output.
96 94
97 95 Returns
98 96 -------
99 97 A unicode string containing the contents of the file.
100 98 """
101 99 # Deferred import for faster start
102 100 from urllib.request import urlopen
103 101 response = urlopen(url)
104 102 buffer = io.BytesIO(response.read())
105 103 return source_to_unicode(buffer, errors, skip_encoding_cookie)
@@ -1,39 +1,38
1 1 #!/usr/bin/env python
2 2 """Extract a session from the IPython input history.
3 3
4 4 Usage:
5 5 ipython-get-history.py sessionnumber [outputfile]
6 6
7 7 If outputfile is not given, the relevant history is written to stdout. If
8 8 outputfile has a .py extension, the translated history (without IPython's
9 9 special syntax) will be extracted.
10 10
11 11 Example:
12 12 ./ipython-get-history.py 57 record.ipy
13 13
14 14
15 15 This script is a simple demonstration of HistoryAccessor. It should be possible
16 16 to build much more flexible and powerful tools to browse and pull from the
17 17 history database.
18 18 """
19 19 import sys
20 20
21 21 from IPython.core.history import HistoryAccessor
22 22
23 23 session_number = int(sys.argv[1])
24 24 if len(sys.argv) > 2:
25 25 dest = open(sys.argv[2], "w")
26 26 raw = not sys.argv[2].endswith('.py')
27 27 else:
28 28 dest = sys.stdout
29 29 raw = True
30 30
31 31 with dest:
32 32 dest.write("# coding: utf-8\n")
33 33
34 34 # Profiles other than 'default' can be specified here with a profile= argument:
35 35 hist = HistoryAccessor()
36 36
37 37 for session, lineno, cell in hist.get_range(session=session_number, raw=raw):
38 cell = cell.encode('utf-8') # This line is only needed on Python 2.
39 38 dest.write(cell + '\n')
General Comments 0
You need to be logged in to leave comments. Login now