Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,257 +1,256 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """ |
|
3 | 3 | System command aliases. |
|
4 | 4 | |
|
5 | 5 | Authors: |
|
6 | 6 | |
|
7 | 7 | * Fernando Perez |
|
8 | 8 | * Brian Granger |
|
9 | 9 | """ |
|
10 | 10 | |
|
11 | 11 | #----------------------------------------------------------------------------- |
|
12 | 12 | # Copyright (C) 2008-2011 The IPython Development Team |
|
13 | 13 | # |
|
14 | 14 | # Distributed under the terms of the BSD License. |
|
15 | 15 | # |
|
16 | 16 | # The full license is in the file COPYING.txt, distributed with this software. |
|
17 | 17 | #----------------------------------------------------------------------------- |
|
18 | 18 | |
|
19 | 19 | #----------------------------------------------------------------------------- |
|
20 | 20 | # Imports |
|
21 | 21 | #----------------------------------------------------------------------------- |
|
22 | 22 | |
|
23 | 23 | import os |
|
24 | 24 | import re |
|
25 | 25 | import sys |
|
26 | 26 | |
|
27 | 27 | from traitlets.config.configurable import Configurable |
|
28 | 28 | from IPython.core.error import UsageError |
|
29 | 29 | |
|
30 | from IPython.utils.py3compat import string_types | |
|
31 | 30 | from traitlets import List, Instance |
|
32 | 31 | from logging import error |
|
33 | 32 | |
|
34 | 33 | #----------------------------------------------------------------------------- |
|
35 | 34 | # Utilities |
|
36 | 35 | #----------------------------------------------------------------------------- |
|
37 | 36 | |
|
38 | 37 | # This is used as the pattern for calls to split_user_input. |
|
39 | 38 | shell_line_split = re.compile(r'^(\s*)()(\S+)(.*$)') |
|
40 | 39 | |
|
41 | 40 | def default_aliases(): |
|
42 | 41 | """Return list of shell aliases to auto-define. |
|
43 | 42 | """ |
|
44 | 43 | # Note: the aliases defined here should be safe to use on a kernel |
|
45 | 44 | # regardless of what frontend it is attached to. Frontends that use a |
|
46 | 45 | # kernel in-process can define additional aliases that will only work in |
|
47 | 46 | # their case. For example, things like 'less' or 'clear' that manipulate |
|
48 | 47 | # the terminal should NOT be declared here, as they will only work if the |
|
49 | 48 | # kernel is running inside a true terminal, and not over the network. |
|
50 | 49 | |
|
51 | 50 | if os.name == 'posix': |
|
52 | 51 | default_aliases = [('mkdir', 'mkdir'), ('rmdir', 'rmdir'), |
|
53 | 52 | ('mv', 'mv'), ('rm', 'rm'), ('cp', 'cp'), |
|
54 | 53 | ('cat', 'cat'), |
|
55 | 54 | ] |
|
56 | 55 | # Useful set of ls aliases. The GNU and BSD options are a little |
|
57 | 56 | # different, so we make aliases that provide as similar as possible |
|
58 | 57 | # behavior in ipython, by passing the right flags for each platform |
|
59 | 58 | if sys.platform.startswith('linux'): |
|
60 | 59 | ls_aliases = [('ls', 'ls -F --color'), |
|
61 | 60 | # long ls |
|
62 | 61 | ('ll', 'ls -F -o --color'), |
|
63 | 62 | # ls normal files only |
|
64 | 63 | ('lf', 'ls -F -o --color %l | grep ^-'), |
|
65 | 64 | # ls symbolic links |
|
66 | 65 | ('lk', 'ls -F -o --color %l | grep ^l'), |
|
67 | 66 | # directories or links to directories, |
|
68 | 67 | ('ldir', 'ls -F -o --color %l | grep /$'), |
|
69 | 68 | # things which are executable |
|
70 | 69 | ('lx', 'ls -F -o --color %l | grep ^-..x'), |
|
71 | 70 | ] |
|
72 | 71 | elif sys.platform.startswith('openbsd') or sys.platform.startswith('netbsd'): |
|
73 | 72 | # OpenBSD, NetBSD. The ls implementation on these platforms do not support |
|
74 | 73 | # the -G switch and lack the ability to use colorized output. |
|
75 | 74 | ls_aliases = [('ls', 'ls -F'), |
|
76 | 75 | # long ls |
|
77 | 76 | ('ll', 'ls -F -l'), |
|
78 | 77 | # ls normal files only |
|
79 | 78 | ('lf', 'ls -F -l %l | grep ^-'), |
|
80 | 79 | # ls symbolic links |
|
81 | 80 | ('lk', 'ls -F -l %l | grep ^l'), |
|
82 | 81 | # directories or links to directories, |
|
83 | 82 | ('ldir', 'ls -F -l %l | grep /$'), |
|
84 | 83 | # things which are executable |
|
85 | 84 | ('lx', 'ls -F -l %l | grep ^-..x'), |
|
86 | 85 | ] |
|
87 | 86 | else: |
|
88 | 87 | # BSD, OSX, etc. |
|
89 | 88 | ls_aliases = [('ls', 'ls -F -G'), |
|
90 | 89 | # long ls |
|
91 | 90 | ('ll', 'ls -F -l -G'), |
|
92 | 91 | # ls normal files only |
|
93 | 92 | ('lf', 'ls -F -l -G %l | grep ^-'), |
|
94 | 93 | # ls symbolic links |
|
95 | 94 | ('lk', 'ls -F -l -G %l | grep ^l'), |
|
96 | 95 | # directories or links to directories, |
|
97 | 96 | ('ldir', 'ls -F -G -l %l | grep /$'), |
|
98 | 97 | # things which are executable |
|
99 | 98 | ('lx', 'ls -F -l -G %l | grep ^-..x'), |
|
100 | 99 | ] |
|
101 | 100 | default_aliases = default_aliases + ls_aliases |
|
102 | 101 | elif os.name in ['nt', 'dos']: |
|
103 | 102 | default_aliases = [('ls', 'dir /on'), |
|
104 | 103 | ('ddir', 'dir /ad /on'), ('ldir', 'dir /ad /on'), |
|
105 | 104 | ('mkdir', 'mkdir'), ('rmdir', 'rmdir'), |
|
106 | 105 | ('echo', 'echo'), ('ren', 'ren'), ('copy', 'copy'), |
|
107 | 106 | ] |
|
108 | 107 | else: |
|
109 | 108 | default_aliases = [] |
|
110 | 109 | |
|
111 | 110 | return default_aliases |
|
112 | 111 | |
|
113 | 112 | |
|
114 | 113 | class AliasError(Exception): |
|
115 | 114 | pass |
|
116 | 115 | |
|
117 | 116 | |
|
118 | 117 | class InvalidAliasError(AliasError): |
|
119 | 118 | pass |
|
120 | 119 | |
|
121 | 120 | class Alias(object): |
|
122 | 121 | """Callable object storing the details of one alias. |
|
123 | 122 | |
|
124 | 123 | Instances are registered as magic functions to allow use of aliases. |
|
125 | 124 | """ |
|
126 | 125 | |
|
127 | 126 | # Prepare blacklist |
|
128 | 127 | blacklist = {'cd','popd','pushd','dhist','alias','unalias'} |
|
129 | 128 | |
|
130 | 129 | def __init__(self, shell, name, cmd): |
|
131 | 130 | self.shell = shell |
|
132 | 131 | self.name = name |
|
133 | 132 | self.cmd = cmd |
|
134 | 133 | self.__doc__ = "Alias for `!{}`".format(cmd) |
|
135 | 134 | self.nargs = self.validate() |
|
136 | 135 | |
|
137 | 136 | def validate(self): |
|
138 | 137 | """Validate the alias, and return the number of arguments.""" |
|
139 | 138 | if self.name in self.blacklist: |
|
140 | 139 | raise InvalidAliasError("The name %s can't be aliased " |
|
141 | 140 | "because it is a keyword or builtin." % self.name) |
|
142 | 141 | try: |
|
143 | 142 | caller = self.shell.magics_manager.magics['line'][self.name] |
|
144 | 143 | except KeyError: |
|
145 | 144 | pass |
|
146 | 145 | else: |
|
147 | 146 | if not isinstance(caller, Alias): |
|
148 | 147 | raise InvalidAliasError("The name %s can't be aliased " |
|
149 | 148 | "because it is another magic command." % self.name) |
|
150 | 149 | |
|
151 |
if not (isinstance(self.cmd, str |
|
|
150 | if not (isinstance(self.cmd, str)): | |
|
152 | 151 | raise InvalidAliasError("An alias command must be a string, " |
|
153 | 152 | "got: %r" % self.cmd) |
|
154 | 153 | |
|
155 | 154 | nargs = self.cmd.count('%s') - self.cmd.count('%%s') |
|
156 | 155 | |
|
157 | 156 | if (nargs > 0) and (self.cmd.find('%l') >= 0): |
|
158 | 157 | raise InvalidAliasError('The %s and %l specifiers are mutually ' |
|
159 | 158 | 'exclusive in alias definitions.') |
|
160 | 159 | |
|
161 | 160 | return nargs |
|
162 | 161 | |
|
163 | 162 | def __repr__(self): |
|
164 | 163 | return "<alias {} for {!r}>".format(self.name, self.cmd) |
|
165 | 164 | |
|
166 | 165 | def __call__(self, rest=''): |
|
167 | 166 | cmd = self.cmd |
|
168 | 167 | nargs = self.nargs |
|
169 | 168 | # Expand the %l special to be the user's input line |
|
170 | 169 | if cmd.find('%l') >= 0: |
|
171 | 170 | cmd = cmd.replace('%l', rest) |
|
172 | 171 | rest = '' |
|
173 | 172 | |
|
174 | 173 | if nargs==0: |
|
175 | 174 | if cmd.find('%%s') >= 1: |
|
176 | 175 | cmd = cmd.replace('%%s', '%s') |
|
177 | 176 | # Simple, argument-less aliases |
|
178 | 177 | cmd = '%s %s' % (cmd, rest) |
|
179 | 178 | else: |
|
180 | 179 | # Handle aliases with positional arguments |
|
181 | 180 | args = rest.split(None, nargs) |
|
182 | 181 | if len(args) < nargs: |
|
183 | 182 | raise UsageError('Alias <%s> requires %s arguments, %s given.' % |
|
184 | 183 | (self.name, nargs, len(args))) |
|
185 | 184 | cmd = '%s %s' % (cmd % tuple(args[:nargs]),' '.join(args[nargs:])) |
|
186 | 185 | |
|
187 | 186 | self.shell.system(cmd) |
|
188 | 187 | |
|
189 | 188 | #----------------------------------------------------------------------------- |
|
190 | 189 | # Main AliasManager class |
|
191 | 190 | #----------------------------------------------------------------------------- |
|
192 | 191 | |
|
193 | 192 | class AliasManager(Configurable): |
|
194 | 193 | |
|
195 | 194 | default_aliases = List(default_aliases()).tag(config=True) |
|
196 | 195 | user_aliases = List(default_value=[]).tag(config=True) |
|
197 | 196 | shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) |
|
198 | 197 | |
|
199 | 198 | def __init__(self, shell=None, **kwargs): |
|
200 | 199 | super(AliasManager, self).__init__(shell=shell, **kwargs) |
|
201 | 200 | # For convenient access |
|
202 | 201 | self.linemagics = self.shell.magics_manager.magics['line'] |
|
203 | 202 | self.init_aliases() |
|
204 | 203 | |
|
205 | 204 | def init_aliases(self): |
|
206 | 205 | # Load default & user aliases |
|
207 | 206 | for name, cmd in self.default_aliases + self.user_aliases: |
|
208 | 207 | self.soft_define_alias(name, cmd) |
|
209 | 208 | |
|
210 | 209 | @property |
|
211 | 210 | def aliases(self): |
|
212 | 211 | return [(n, func.cmd) for (n, func) in self.linemagics.items() |
|
213 | 212 | if isinstance(func, Alias)] |
|
214 | 213 | |
|
215 | 214 | def soft_define_alias(self, name, cmd): |
|
216 | 215 | """Define an alias, but don't raise on an AliasError.""" |
|
217 | 216 | try: |
|
218 | 217 | self.define_alias(name, cmd) |
|
219 | 218 | except AliasError as e: |
|
220 | 219 | error("Invalid alias: %s" % e) |
|
221 | 220 | |
|
222 | 221 | def define_alias(self, name, cmd): |
|
223 | 222 | """Define a new alias after validating it. |
|
224 | 223 | |
|
225 | 224 | This will raise an :exc:`AliasError` if there are validation |
|
226 | 225 | problems. |
|
227 | 226 | """ |
|
228 | 227 | caller = Alias(shell=self.shell, name=name, cmd=cmd) |
|
229 | 228 | self.shell.magics_manager.register_function(caller, magic_kind='line', |
|
230 | 229 | magic_name=name) |
|
231 | 230 | |
|
232 | 231 | def get_alias(self, name): |
|
233 | 232 | """Return an alias, or None if no alias by that name exists.""" |
|
234 | 233 | aname = self.linemagics.get(name, None) |
|
235 | 234 | return aname if isinstance(aname, Alias) else None |
|
236 | 235 | |
|
237 | 236 | def is_alias(self, name): |
|
238 | 237 | """Return whether or not a given name has been defined as an alias""" |
|
239 | 238 | return self.get_alias(name) is not None |
|
240 | 239 | |
|
241 | 240 | def undefine_alias(self, name): |
|
242 | 241 | if self.is_alias(name): |
|
243 | 242 | del self.linemagics[name] |
|
244 | 243 | else: |
|
245 | 244 | raise ValueError('%s is not an alias' % name) |
|
246 | 245 | |
|
247 | 246 | def clear_aliases(self): |
|
248 | 247 | for name, cmd in self.aliases: |
|
249 | 248 | self.undefine_alias(name) |
|
250 | 249 | |
|
251 | 250 | def retrieve_alias(self, name): |
|
252 | 251 | """Retrieve the command to which an alias expands.""" |
|
253 | 252 | caller = self.get_alias(name) |
|
254 | 253 | if caller: |
|
255 | 254 | return caller.cmd |
|
256 | 255 | else: |
|
257 | 256 | raise ValueError('%s is not an alias' % name) |
@@ -1,1229 +1,1229 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """Word completion for IPython. |
|
3 | 3 | |
|
4 | 4 | This module started as fork of the rlcompleter module in the Python standard |
|
5 | 5 | library. The original enhancements made to rlcompleter have been sent |
|
6 | 6 | upstream and were accepted as of Python 2.3, |
|
7 | 7 | |
|
8 | 8 | """ |
|
9 | 9 | |
|
10 | 10 | # Copyright (c) IPython Development Team. |
|
11 | 11 | # Distributed under the terms of the Modified BSD License. |
|
12 | 12 | # |
|
13 | 13 | # Some of this code originated from rlcompleter in the Python standard library |
|
14 | 14 | # Copyright (C) 2001 Python Software Foundation, www.python.org |
|
15 | 15 | |
|
16 | 16 | |
|
17 | 17 | import __main__ |
|
18 | 18 | import glob |
|
19 | 19 | import inspect |
|
20 | 20 | import itertools |
|
21 | 21 | import keyword |
|
22 | 22 | import os |
|
23 | 23 | import re |
|
24 | 24 | import sys |
|
25 | 25 | import unicodedata |
|
26 | 26 | import string |
|
27 | 27 | import warnings |
|
28 | 28 | from importlib import import_module |
|
29 | 29 | |
|
30 | 30 | from traitlets.config.configurable import Configurable |
|
31 | 31 | from IPython.core.error import TryNext |
|
32 | 32 | from IPython.core.inputsplitter import ESC_MAGIC |
|
33 | 33 | from IPython.core.latex_symbols import latex_symbols, reverse_latex_symbol |
|
34 | 34 | from IPython.utils import generics |
|
35 | 35 | from IPython.utils.decorators import undoc |
|
36 | 36 | from IPython.utils.dir2 import dir2, get_real_method |
|
37 | 37 | from IPython.utils.process import arg_split |
|
38 |
from IPython.utils.py3compat import builtin_mod, |
|
|
38 | from IPython.utils.py3compat import builtin_mod, PY3, cast_unicode_py2 | |
|
39 | 39 | from traitlets import Bool, Enum, observe |
|
40 | 40 | |
|
41 | 41 | from functools import wraps |
|
42 | 42 | |
|
43 | 43 | #----------------------------------------------------------------------------- |
|
44 | 44 | # Globals |
|
45 | 45 | #----------------------------------------------------------------------------- |
|
46 | 46 | |
|
47 | 47 | # Public API |
|
48 | 48 | __all__ = ['Completer','IPCompleter'] |
|
49 | 49 | |
|
50 | 50 | if sys.platform == 'win32': |
|
51 | 51 | PROTECTABLES = ' ' |
|
52 | 52 | else: |
|
53 | 53 | PROTECTABLES = ' ()[]{}?=\\|;:\'#*"^&' |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | #----------------------------------------------------------------------------- |
|
57 | 57 | # Work around BUG decorators. |
|
58 | 58 | #----------------------------------------------------------------------------- |
|
59 | 59 | |
|
60 | 60 | def _strip_single_trailing_space(complete): |
|
61 | 61 | """ |
|
62 | 62 | This is a workaround for a weird IPython/Prompt_toolkit behavior, |
|
63 | 63 | that can be removed once we rely on a slightly more recent prompt_toolkit |
|
64 | 64 | version (likely > 1.0.3). So this can likely be removed in IPython 6.0 |
|
65 | 65 | |
|
66 | 66 | cf https://github.com/ipython/ipython/issues/9658 |
|
67 | 67 | and https://github.com/jonathanslenders/python-prompt-toolkit/pull/328 |
|
68 | 68 | |
|
69 | 69 | The bug is due to the fact that in PTK the completer will reinvoke itself |
|
70 | 70 | after trying to completer to the longuest common prefix of all the |
|
71 | 71 | completions, unless only one completion is available. |
|
72 | 72 | |
|
73 | 73 | This logic is faulty if the completion ends with space, which can happen in |
|
74 | 74 | case like:: |
|
75 | 75 | |
|
76 | 76 | from foo import im<ta> |
|
77 | 77 | |
|
78 | 78 | which only matching completion is `import `. Note the leading space at the |
|
79 | 79 | end. So leaving a space at the end is a reasonable request, but for now |
|
80 | 80 | we'll strip it. |
|
81 | 81 | """ |
|
82 | 82 | |
|
83 | 83 | @wraps(complete) |
|
84 | 84 | def comp(*args, **kwargs): |
|
85 | 85 | text, matches = complete(*args, **kwargs) |
|
86 | 86 | if len(matches) == 1: |
|
87 | 87 | return text, [matches[0].rstrip()] |
|
88 | 88 | return text, matches |
|
89 | 89 | |
|
90 | 90 | return comp |
|
91 | 91 | |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | #----------------------------------------------------------------------------- |
|
95 | 95 | # Main functions and classes |
|
96 | 96 | #----------------------------------------------------------------------------- |
|
97 | 97 | |
|
98 | 98 | def has_open_quotes(s): |
|
99 | 99 | """Return whether a string has open quotes. |
|
100 | 100 | |
|
101 | 101 | This simply counts whether the number of quote characters of either type in |
|
102 | 102 | the string is odd. |
|
103 | 103 | |
|
104 | 104 | Returns |
|
105 | 105 | ------- |
|
106 | 106 | If there is an open quote, the quote character is returned. Else, return |
|
107 | 107 | False. |
|
108 | 108 | """ |
|
109 | 109 | # We check " first, then ', so complex cases with nested quotes will get |
|
110 | 110 | # the " to take precedence. |
|
111 | 111 | if s.count('"') % 2: |
|
112 | 112 | return '"' |
|
113 | 113 | elif s.count("'") % 2: |
|
114 | 114 | return "'" |
|
115 | 115 | else: |
|
116 | 116 | return False |
|
117 | 117 | |
|
118 | 118 | |
|
119 | 119 | def protect_filename(s): |
|
120 | 120 | """Escape a string to protect certain characters.""" |
|
121 | 121 | if set(s) & set(PROTECTABLES): |
|
122 | 122 | if sys.platform == "win32": |
|
123 | 123 | return '"' + s + '"' |
|
124 | 124 | else: |
|
125 | 125 | return "".join(("\\" + c if c in PROTECTABLES else c) for c in s) |
|
126 | 126 | else: |
|
127 | 127 | return s |
|
128 | 128 | |
|
129 | 129 | |
|
130 | 130 | def expand_user(path): |
|
131 | 131 | """Expand '~'-style usernames in strings. |
|
132 | 132 | |
|
133 | 133 | This is similar to :func:`os.path.expanduser`, but it computes and returns |
|
134 | 134 | extra information that will be useful if the input was being used in |
|
135 | 135 | computing completions, and you wish to return the completions with the |
|
136 | 136 | original '~' instead of its expanded value. |
|
137 | 137 | |
|
138 | 138 | Parameters |
|
139 | 139 | ---------- |
|
140 | 140 | path : str |
|
141 | 141 | String to be expanded. If no ~ is present, the output is the same as the |
|
142 | 142 | input. |
|
143 | 143 | |
|
144 | 144 | Returns |
|
145 | 145 | ------- |
|
146 | 146 | newpath : str |
|
147 | 147 | Result of ~ expansion in the input path. |
|
148 | 148 | tilde_expand : bool |
|
149 | 149 | Whether any expansion was performed or not. |
|
150 | 150 | tilde_val : str |
|
151 | 151 | The value that ~ was replaced with. |
|
152 | 152 | """ |
|
153 | 153 | # Default values |
|
154 | 154 | tilde_expand = False |
|
155 | 155 | tilde_val = '' |
|
156 | 156 | newpath = path |
|
157 | 157 | |
|
158 | 158 | if path.startswith('~'): |
|
159 | 159 | tilde_expand = True |
|
160 | 160 | rest = len(path)-1 |
|
161 | 161 | newpath = os.path.expanduser(path) |
|
162 | 162 | if rest: |
|
163 | 163 | tilde_val = newpath[:-rest] |
|
164 | 164 | else: |
|
165 | 165 | tilde_val = newpath |
|
166 | 166 | |
|
167 | 167 | return newpath, tilde_expand, tilde_val |
|
168 | 168 | |
|
169 | 169 | |
|
170 | 170 | def compress_user(path, tilde_expand, tilde_val): |
|
171 | 171 | """Does the opposite of expand_user, with its outputs. |
|
172 | 172 | """ |
|
173 | 173 | if tilde_expand: |
|
174 | 174 | return path.replace(tilde_val, '~') |
|
175 | 175 | else: |
|
176 | 176 | return path |
|
177 | 177 | |
|
178 | 178 | |
|
179 | 179 | def completions_sorting_key(word): |
|
180 | 180 | """key for sorting completions |
|
181 | 181 | |
|
182 | 182 | This does several things: |
|
183 | 183 | |
|
184 | 184 | - Lowercase all completions, so they are sorted alphabetically with |
|
185 | 185 | upper and lower case words mingled |
|
186 | 186 | - Demote any completions starting with underscores to the end |
|
187 | 187 | - Insert any %magic and %%cellmagic completions in the alphabetical order |
|
188 | 188 | by their name |
|
189 | 189 | """ |
|
190 | 190 | # Case insensitive sort |
|
191 | 191 | word = word.lower() |
|
192 | 192 | |
|
193 | 193 | prio1, prio2 = 0, 0 |
|
194 | 194 | |
|
195 | 195 | if word.startswith('__'): |
|
196 | 196 | prio1 = 2 |
|
197 | 197 | elif word.startswith('_'): |
|
198 | 198 | prio1 = 1 |
|
199 | 199 | |
|
200 | 200 | if word.endswith('='): |
|
201 | 201 | prio1 = -1 |
|
202 | 202 | |
|
203 | 203 | if word.startswith('%%'): |
|
204 | 204 | # If there's another % in there, this is something else, so leave it alone |
|
205 | 205 | if not "%" in word[2:]: |
|
206 | 206 | word = word[2:] |
|
207 | 207 | prio2 = 2 |
|
208 | 208 | elif word.startswith('%'): |
|
209 | 209 | if not "%" in word[1:]: |
|
210 | 210 | word = word[1:] |
|
211 | 211 | prio2 = 1 |
|
212 | 212 | |
|
213 | 213 | return prio1, word, prio2 |
|
214 | 214 | |
|
215 | 215 | |
|
216 | 216 | @undoc |
|
217 | 217 | class Bunch(object): pass |
|
218 | 218 | |
|
219 | 219 | |
|
220 | 220 | if sys.platform == 'win32': |
|
221 | 221 | DELIMS = ' \t\n`!@#$^&*()=+[{]}|;\'",<>?' |
|
222 | 222 | else: |
|
223 | 223 | DELIMS = ' \t\n`!@#$^&*()=+[{]}\\|;:\'",<>?' |
|
224 | 224 | |
|
225 | 225 | GREEDY_DELIMS = ' =\r\n' |
|
226 | 226 | |
|
227 | 227 | |
|
228 | 228 | class CompletionSplitter(object): |
|
229 | 229 | """An object to split an input line in a manner similar to readline. |
|
230 | 230 | |
|
231 | 231 | By having our own implementation, we can expose readline-like completion in |
|
232 | 232 | a uniform manner to all frontends. This object only needs to be given the |
|
233 | 233 | line of text to be split and the cursor position on said line, and it |
|
234 | 234 | returns the 'word' to be completed on at the cursor after splitting the |
|
235 | 235 | entire line. |
|
236 | 236 | |
|
237 | 237 | What characters are used as splitting delimiters can be controlled by |
|
238 | 238 | setting the `delims` attribute (this is a property that internally |
|
239 | 239 | automatically builds the necessary regular expression)""" |
|
240 | 240 | |
|
241 | 241 | # Private interface |
|
242 | 242 | |
|
243 | 243 | # A string of delimiter characters. The default value makes sense for |
|
244 | 244 | # IPython's most typical usage patterns. |
|
245 | 245 | _delims = DELIMS |
|
246 | 246 | |
|
247 | 247 | # The expression (a normal string) to be compiled into a regular expression |
|
248 | 248 | # for actual splitting. We store it as an attribute mostly for ease of |
|
249 | 249 | # debugging, since this type of code can be so tricky to debug. |
|
250 | 250 | _delim_expr = None |
|
251 | 251 | |
|
252 | 252 | # The regular expression that does the actual splitting |
|
253 | 253 | _delim_re = None |
|
254 | 254 | |
|
255 | 255 | def __init__(self, delims=None): |
|
256 | 256 | delims = CompletionSplitter._delims if delims is None else delims |
|
257 | 257 | self.delims = delims |
|
258 | 258 | |
|
259 | 259 | @property |
|
260 | 260 | def delims(self): |
|
261 | 261 | """Return the string of delimiter characters.""" |
|
262 | 262 | return self._delims |
|
263 | 263 | |
|
264 | 264 | @delims.setter |
|
265 | 265 | def delims(self, delims): |
|
266 | 266 | """Set the delimiters for line splitting.""" |
|
267 | 267 | expr = '[' + ''.join('\\'+ c for c in delims) + ']' |
|
268 | 268 | self._delim_re = re.compile(expr) |
|
269 | 269 | self._delims = delims |
|
270 | 270 | self._delim_expr = expr |
|
271 | 271 | |
|
272 | 272 | def split_line(self, line, cursor_pos=None): |
|
273 | 273 | """Split a line of text with a cursor at the given position. |
|
274 | 274 | """ |
|
275 | 275 | l = line if cursor_pos is None else line[:cursor_pos] |
|
276 | 276 | return self._delim_re.split(l)[-1] |
|
277 | 277 | |
|
278 | 278 | |
|
279 | 279 | class Completer(Configurable): |
|
280 | 280 | |
|
281 | 281 | greedy = Bool(False, |
|
282 | 282 | help="""Activate greedy completion |
|
283 | 283 | PENDING DEPRECTION. this is now mostly taken care of with Jedi. |
|
284 | 284 | |
|
285 | 285 | This will enable completion on elements of lists, results of function calls, etc., |
|
286 | 286 | but can be unsafe because the code is actually evaluated on TAB. |
|
287 | 287 | """ |
|
288 | 288 | ).tag(config=True) |
|
289 | 289 | |
|
290 | 290 | |
|
291 | 291 | def __init__(self, namespace=None, global_namespace=None, **kwargs): |
|
292 | 292 | """Create a new completer for the command line. |
|
293 | 293 | |
|
294 | 294 | Completer(namespace=ns, global_namespace=ns2) -> completer instance. |
|
295 | 295 | |
|
296 | 296 | If unspecified, the default namespace where completions are performed |
|
297 | 297 | is __main__ (technically, __main__.__dict__). Namespaces should be |
|
298 | 298 | given as dictionaries. |
|
299 | 299 | |
|
300 | 300 | An optional second namespace can be given. This allows the completer |
|
301 | 301 | to handle cases where both the local and global scopes need to be |
|
302 | 302 | distinguished. |
|
303 | 303 | |
|
304 | 304 | Completer instances should be used as the completion mechanism of |
|
305 | 305 | readline via the set_completer() call: |
|
306 | 306 | |
|
307 | 307 | readline.set_completer(Completer(my_namespace).complete) |
|
308 | 308 | """ |
|
309 | 309 | |
|
310 | 310 | # Don't bind to namespace quite yet, but flag whether the user wants a |
|
311 | 311 | # specific namespace or to use __main__.__dict__. This will allow us |
|
312 | 312 | # to bind to __main__.__dict__ at completion time, not now. |
|
313 | 313 | if namespace is None: |
|
314 | 314 | self.use_main_ns = 1 |
|
315 | 315 | else: |
|
316 | 316 | self.use_main_ns = 0 |
|
317 | 317 | self.namespace = namespace |
|
318 | 318 | |
|
319 | 319 | # The global namespace, if given, can be bound directly |
|
320 | 320 | if global_namespace is None: |
|
321 | 321 | self.global_namespace = {} |
|
322 | 322 | else: |
|
323 | 323 | self.global_namespace = global_namespace |
|
324 | 324 | |
|
325 | 325 | super(Completer, self).__init__(**kwargs) |
|
326 | 326 | |
|
327 | 327 | def complete(self, text, state): |
|
328 | 328 | """Return the next possible completion for 'text'. |
|
329 | 329 | |
|
330 | 330 | This is called successively with state == 0, 1, 2, ... until it |
|
331 | 331 | returns None. The completion should begin with 'text'. |
|
332 | 332 | |
|
333 | 333 | """ |
|
334 | 334 | if self.use_main_ns: |
|
335 | 335 | self.namespace = __main__.__dict__ |
|
336 | 336 | |
|
337 | 337 | if state == 0: |
|
338 | 338 | if "." in text: |
|
339 | 339 | self.matches = self.attr_matches(text) |
|
340 | 340 | else: |
|
341 | 341 | self.matches = self.global_matches(text) |
|
342 | 342 | try: |
|
343 | 343 | return self.matches[state] |
|
344 | 344 | except IndexError: |
|
345 | 345 | return None |
|
346 | 346 | |
|
347 | 347 | def global_matches(self, text): |
|
348 | 348 | """Compute matches when text is a simple name. |
|
349 | 349 | |
|
350 | 350 | Return a list of all keywords, built-in functions and names currently |
|
351 | 351 | defined in self.namespace or self.global_namespace that match. |
|
352 | 352 | |
|
353 | 353 | """ |
|
354 | 354 | matches = [] |
|
355 | 355 | match_append = matches.append |
|
356 | 356 | n = len(text) |
|
357 | 357 | for lst in [keyword.kwlist, |
|
358 | 358 | builtin_mod.__dict__.keys(), |
|
359 | 359 | self.namespace.keys(), |
|
360 | 360 | self.global_namespace.keys()]: |
|
361 | 361 | for word in lst: |
|
362 | 362 | if word[:n] == text and word != "__builtins__": |
|
363 | 363 | match_append(word) |
|
364 | 364 | return [cast_unicode_py2(m) for m in matches] |
|
365 | 365 | |
|
366 | 366 | def attr_matches(self, text): |
|
367 | 367 | """Compute matches when text contains a dot. |
|
368 | 368 | |
|
369 | 369 | Assuming the text is of the form NAME.NAME....[NAME], and is |
|
370 | 370 | evaluatable in self.namespace or self.global_namespace, it will be |
|
371 | 371 | evaluated and its attributes (as revealed by dir()) are used as |
|
372 | 372 | possible completions. (For class instances, class members are are |
|
373 | 373 | also considered.) |
|
374 | 374 | |
|
375 | 375 | WARNING: this can still invoke arbitrary C code, if an object |
|
376 | 376 | with a __getattr__ hook is evaluated. |
|
377 | 377 | |
|
378 | 378 | """ |
|
379 | 379 | |
|
380 | 380 | # Another option, seems to work great. Catches things like ''.<tab> |
|
381 | 381 | m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) |
|
382 | 382 | |
|
383 | 383 | if m: |
|
384 | 384 | expr, attr = m.group(1, 3) |
|
385 | 385 | elif self.greedy: |
|
386 | 386 | m2 = re.match(r"(.+)\.(\w*)$", self.line_buffer) |
|
387 | 387 | if not m2: |
|
388 | 388 | return [] |
|
389 | 389 | expr, attr = m2.group(1,2) |
|
390 | 390 | else: |
|
391 | 391 | return [] |
|
392 | 392 | |
|
393 | 393 | try: |
|
394 | 394 | obj = eval(expr, self.namespace) |
|
395 | 395 | except: |
|
396 | 396 | try: |
|
397 | 397 | obj = eval(expr, self.global_namespace) |
|
398 | 398 | except: |
|
399 | 399 | return [] |
|
400 | 400 | |
|
401 | 401 | if self.limit_to__all__ and hasattr(obj, '__all__'): |
|
402 | 402 | words = get__all__entries(obj) |
|
403 | 403 | else: |
|
404 | 404 | words = dir2(obj) |
|
405 | 405 | |
|
406 | 406 | try: |
|
407 | 407 | words = generics.complete_object(obj, words) |
|
408 | 408 | except TryNext: |
|
409 | 409 | pass |
|
410 | 410 | except Exception: |
|
411 | 411 | # Silence errors from completion function |
|
412 | 412 | #raise # dbg |
|
413 | 413 | pass |
|
414 | 414 | # Build match list to return |
|
415 | 415 | n = len(attr) |
|
416 | 416 | return [u"%s.%s" % (expr, w) for w in words if w[:n] == attr ] |
|
417 | 417 | |
|
418 | 418 | |
|
419 | 419 | def get__all__entries(obj): |
|
420 | 420 | """returns the strings in the __all__ attribute""" |
|
421 | 421 | try: |
|
422 | 422 | words = getattr(obj, '__all__') |
|
423 | 423 | except: |
|
424 | 424 | return [] |
|
425 | 425 | |
|
426 |
return [cast_unicode_py2(w) for w in words if isinstance(w, str |
|
|
426 | return [cast_unicode_py2(w) for w in words if isinstance(w, str)] | |
|
427 | 427 | |
|
428 | 428 | |
|
429 | 429 | def match_dict_keys(keys, prefix, delims): |
|
430 | 430 | """Used by dict_key_matches, matching the prefix to a list of keys""" |
|
431 | 431 | if not prefix: |
|
432 | 432 | return None, 0, [repr(k) for k in keys |
|
433 |
if isinstance(k, (str |
|
|
433 | if isinstance(k, (str, bytes))] | |
|
434 | 434 | quote_match = re.search('["\']', prefix) |
|
435 | 435 | quote = quote_match.group() |
|
436 | 436 | try: |
|
437 | 437 | prefix_str = eval(prefix + quote, {}) |
|
438 | 438 | except Exception: |
|
439 | 439 | return None, 0, [] |
|
440 | 440 | |
|
441 | 441 | pattern = '[^' + ''.join('\\' + c for c in delims) + ']*$' |
|
442 | 442 | token_match = re.search(pattern, prefix, re.UNICODE) |
|
443 | 443 | token_start = token_match.start() |
|
444 | 444 | token_prefix = token_match.group() |
|
445 | 445 | |
|
446 | 446 | # TODO: support bytes in Py3k |
|
447 | 447 | matched = [] |
|
448 | 448 | for key in keys: |
|
449 | 449 | try: |
|
450 | 450 | if not key.startswith(prefix_str): |
|
451 | 451 | continue |
|
452 | 452 | except (AttributeError, TypeError, UnicodeError): |
|
453 | 453 | # Python 3+ TypeError on b'a'.startswith('a') or vice-versa |
|
454 | 454 | continue |
|
455 | 455 | |
|
456 | 456 | # reformat remainder of key to begin with prefix |
|
457 | 457 | rem = key[len(prefix_str):] |
|
458 | 458 | # force repr wrapped in ' |
|
459 | 459 | rem_repr = repr(rem + '"') |
|
460 | 460 | if rem_repr.startswith('u') and prefix[0] not in 'uU': |
|
461 | 461 | # Found key is unicode, but prefix is Py2 string. |
|
462 | 462 | # Therefore attempt to interpret key as string. |
|
463 | 463 | try: |
|
464 | 464 | rem_repr = repr(rem.encode('ascii') + '"') |
|
465 | 465 | except UnicodeEncodeError: |
|
466 | 466 | continue |
|
467 | 467 | |
|
468 | 468 | rem_repr = rem_repr[1 + rem_repr.index("'"):-2] |
|
469 | 469 | if quote == '"': |
|
470 | 470 | # The entered prefix is quoted with ", |
|
471 | 471 | # but the match is quoted with '. |
|
472 | 472 | # A contained " hence needs escaping for comparison: |
|
473 | 473 | rem_repr = rem_repr.replace('"', '\\"') |
|
474 | 474 | |
|
475 | 475 | # then reinsert prefix from start of token |
|
476 | 476 | matched.append('%s%s' % (token_prefix, rem_repr)) |
|
477 | 477 | return quote, token_start, matched |
|
478 | 478 | |
|
479 | 479 | |
|
480 | 480 | def _safe_isinstance(obj, module, class_name): |
|
481 | 481 | """Checks if obj is an instance of module.class_name if loaded |
|
482 | 482 | """ |
|
483 | 483 | return (module in sys.modules and |
|
484 | 484 | isinstance(obj, getattr(import_module(module), class_name))) |
|
485 | 485 | |
|
486 | 486 | |
|
487 | 487 | def back_unicode_name_matches(text): |
|
488 | 488 | u"""Match unicode characters back to unicode name |
|
489 | 489 | |
|
490 | 490 | This does ☃ -> \\snowman |
|
491 | 491 | |
|
492 | 492 | Note that snowman is not a valid python3 combining character but will be expanded. |
|
493 | 493 | Though it will not recombine back to the snowman character by the completion machinery. |
|
494 | 494 | |
|
495 | 495 | This will not either back-complete standard sequences like \\n, \\b ... |
|
496 | 496 | |
|
497 | 497 | Used on Python 3 only. |
|
498 | 498 | """ |
|
499 | 499 | if len(text)<2: |
|
500 | 500 | return u'', () |
|
501 | 501 | maybe_slash = text[-2] |
|
502 | 502 | if maybe_slash != '\\': |
|
503 | 503 | return u'', () |
|
504 | 504 | |
|
505 | 505 | char = text[-1] |
|
506 | 506 | # no expand on quote for completion in strings. |
|
507 | 507 | # nor backcomplete standard ascii keys |
|
508 | 508 | if char in string.ascii_letters or char in ['"',"'"]: |
|
509 | 509 | return u'', () |
|
510 | 510 | try : |
|
511 | 511 | unic = unicodedata.name(char) |
|
512 | 512 | return '\\'+char,['\\'+unic] |
|
513 | 513 | except KeyError: |
|
514 | 514 | pass |
|
515 | 515 | return u'', () |
|
516 | 516 | |
|
517 | 517 | def back_latex_name_matches(text): |
|
518 | 518 | u"""Match latex characters back to unicode name |
|
519 | 519 | |
|
520 | 520 | This does ->\\sqrt |
|
521 | 521 | |
|
522 | 522 | Used on Python 3 only. |
|
523 | 523 | """ |
|
524 | 524 | if len(text)<2: |
|
525 | 525 | return u'', () |
|
526 | 526 | maybe_slash = text[-2] |
|
527 | 527 | if maybe_slash != '\\': |
|
528 | 528 | return u'', () |
|
529 | 529 | |
|
530 | 530 | |
|
531 | 531 | char = text[-1] |
|
532 | 532 | # no expand on quote for completion in strings. |
|
533 | 533 | # nor backcomplete standard ascii keys |
|
534 | 534 | if char in string.ascii_letters or char in ['"',"'"]: |
|
535 | 535 | return u'', () |
|
536 | 536 | try : |
|
537 | 537 | latex = reverse_latex_symbol[char] |
|
538 | 538 | # '\\' replace the \ as well |
|
539 | 539 | return '\\'+char,[latex] |
|
540 | 540 | except KeyError: |
|
541 | 541 | pass |
|
542 | 542 | return u'', () |
|
543 | 543 | |
|
544 | 544 | |
|
545 | 545 | class IPCompleter(Completer): |
|
546 | 546 | """Extension of the completer class with IPython-specific features""" |
|
547 | 547 | |
|
548 | 548 | @observe('greedy') |
|
549 | 549 | def _greedy_changed(self, change): |
|
550 | 550 | """update the splitter and readline delims when greedy is changed""" |
|
551 | 551 | if change['new']: |
|
552 | 552 | self.splitter.delims = GREEDY_DELIMS |
|
553 | 553 | else: |
|
554 | 554 | self.splitter.delims = DELIMS |
|
555 | 555 | |
|
556 | 556 | merge_completions = Bool(True, |
|
557 | 557 | help="""Whether to merge completion results into a single list |
|
558 | 558 | |
|
559 | 559 | If False, only the completion results from the first non-empty |
|
560 | 560 | completer will be returned. |
|
561 | 561 | """ |
|
562 | 562 | ).tag(config=True) |
|
563 | 563 | omit__names = Enum((0,1,2), default_value=2, |
|
564 | 564 | help="""Instruct the completer to omit private method names |
|
565 | 565 | |
|
566 | 566 | Specifically, when completing on ``object.<tab>``. |
|
567 | 567 | |
|
568 | 568 | When 2 [default]: all names that start with '_' will be excluded. |
|
569 | 569 | |
|
570 | 570 | When 1: all 'magic' names (``__foo__``) will be excluded. |
|
571 | 571 | |
|
572 | 572 | When 0: nothing will be excluded. |
|
573 | 573 | """ |
|
574 | 574 | ).tag(config=True) |
|
575 | 575 | limit_to__all__ = Bool(False, |
|
576 | 576 | help=""" |
|
577 | 577 | DEPRECATED as of version 5.0. |
|
578 | 578 | |
|
579 | 579 | Instruct the completer to use __all__ for the completion |
|
580 | 580 | |
|
581 | 581 | Specifically, when completing on ``object.<tab>``. |
|
582 | 582 | |
|
583 | 583 | When True: only those names in obj.__all__ will be included. |
|
584 | 584 | |
|
585 | 585 | When False [default]: the __all__ attribute is ignored |
|
586 | 586 | """, |
|
587 | 587 | ).tag(config=True) |
|
588 | 588 | |
|
589 | 589 | def __init__(self, shell=None, namespace=None, global_namespace=None, |
|
590 | 590 | use_readline=False, config=None, **kwargs): |
|
591 | 591 | """IPCompleter() -> completer |
|
592 | 592 | |
|
593 | 593 | Return a completer object suitable for use by the readline library |
|
594 | 594 | via readline.set_completer(). |
|
595 | 595 | |
|
596 | 596 | Inputs: |
|
597 | 597 | |
|
598 | 598 | - shell: a pointer to the ipython shell itself. This is needed |
|
599 | 599 | because this completer knows about magic functions, and those can |
|
600 | 600 | only be accessed via the ipython instance. |
|
601 | 601 | |
|
602 | 602 | - namespace: an optional dict where completions are performed. |
|
603 | 603 | |
|
604 | 604 | - global_namespace: secondary optional dict for completions, to |
|
605 | 605 | handle cases (such as IPython embedded inside functions) where |
|
606 | 606 | both Python scopes are visible. |
|
607 | 607 | |
|
608 | 608 | use_readline : bool, optional |
|
609 | 609 | DEPRECATED, ignored. |
|
610 | 610 | """ |
|
611 | 611 | |
|
612 | 612 | self.magic_escape = ESC_MAGIC |
|
613 | 613 | self.splitter = CompletionSplitter() |
|
614 | 614 | |
|
615 | 615 | if use_readline: |
|
616 | 616 | warnings.warn('The use_readline parameter is deprecated and ignored since IPython 6.0.', |
|
617 | 617 | DeprecationWarning, stacklevel=2) |
|
618 | 618 | |
|
619 | 619 | # _greedy_changed() depends on splitter and readline being defined: |
|
620 | 620 | Completer.__init__(self, namespace=namespace, global_namespace=global_namespace, |
|
621 | 621 | config=config, **kwargs) |
|
622 | 622 | |
|
623 | 623 | # List where completion matches will be stored |
|
624 | 624 | self.matches = [] |
|
625 | 625 | self.shell = shell |
|
626 | 626 | # Regexp to split filenames with spaces in them |
|
627 | 627 | self.space_name_re = re.compile(r'([^\\] )') |
|
628 | 628 | # Hold a local ref. to glob.glob for speed |
|
629 | 629 | self.glob = glob.glob |
|
630 | 630 | |
|
631 | 631 | # Determine if we are running on 'dumb' terminals, like (X)Emacs |
|
632 | 632 | # buffers, to avoid completion problems. |
|
633 | 633 | term = os.environ.get('TERM','xterm') |
|
634 | 634 | self.dumb_terminal = term in ['dumb','emacs'] |
|
635 | 635 | |
|
636 | 636 | # Special handling of backslashes needed in win32 platforms |
|
637 | 637 | if sys.platform == "win32": |
|
638 | 638 | self.clean_glob = self._clean_glob_win32 |
|
639 | 639 | else: |
|
640 | 640 | self.clean_glob = self._clean_glob |
|
641 | 641 | |
|
642 | 642 | #regexp to parse docstring for function signature |
|
643 | 643 | self.docstring_sig_re = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') |
|
644 | 644 | self.docstring_kwd_re = re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') |
|
645 | 645 | #use this if positional argument name is also needed |
|
646 | 646 | #= re.compile(r'[\s|\[]*(\w+)(?:\s*=?\s*.*)') |
|
647 | 647 | |
|
648 | 648 | # All active matcher routines for completion |
|
649 | 649 | self.matchers = [ |
|
650 | 650 | self.python_matches, |
|
651 | 651 | self.file_matches, |
|
652 | 652 | self.magic_matches, |
|
653 | 653 | self.python_func_kw_matches, |
|
654 | 654 | self.dict_key_matches, |
|
655 | 655 | ] |
|
656 | 656 | |
|
657 | 657 | # This is set externally by InteractiveShell |
|
658 | 658 | self.custom_completers = None |
|
659 | 659 | |
|
660 | 660 | def all_completions(self, text): |
|
661 | 661 | """ |
|
662 | 662 | Wrapper around the complete method for the benefit of emacs. |
|
663 | 663 | """ |
|
664 | 664 | return self.complete(text)[1] |
|
665 | 665 | |
|
666 | 666 | def _clean_glob(self, text): |
|
667 | 667 | return self.glob("%s*" % text) |
|
668 | 668 | |
|
669 | 669 | def _clean_glob_win32(self,text): |
|
670 | 670 | return [f.replace("\\","/") |
|
671 | 671 | for f in self.glob("%s*" % text)] |
|
672 | 672 | |
|
673 | 673 | def file_matches(self, text): |
|
674 | 674 | """Match filenames, expanding ~USER type strings. |
|
675 | 675 | |
|
676 | 676 | Most of the seemingly convoluted logic in this completer is an |
|
677 | 677 | attempt to handle filenames with spaces in them. And yet it's not |
|
678 | 678 | quite perfect, because Python's readline doesn't expose all of the |
|
679 | 679 | GNU readline details needed for this to be done correctly. |
|
680 | 680 | |
|
681 | 681 | For a filename with a space in it, the printed completions will be |
|
682 | 682 | only the parts after what's already been typed (instead of the |
|
683 | 683 | full completions, as is normally done). I don't think with the |
|
684 | 684 | current (as of Python 2.3) Python readline it's possible to do |
|
685 | 685 | better.""" |
|
686 | 686 | |
|
687 | 687 | # chars that require escaping with backslash - i.e. chars |
|
688 | 688 | # that readline treats incorrectly as delimiters, but we |
|
689 | 689 | # don't want to treat as delimiters in filename matching |
|
690 | 690 | # when escaped with backslash |
|
691 | 691 | if text.startswith('!'): |
|
692 | 692 | text = text[1:] |
|
693 | 693 | text_prefix = u'!' |
|
694 | 694 | else: |
|
695 | 695 | text_prefix = u'' |
|
696 | 696 | |
|
697 | 697 | text_until_cursor = self.text_until_cursor |
|
698 | 698 | # track strings with open quotes |
|
699 | 699 | open_quotes = has_open_quotes(text_until_cursor) |
|
700 | 700 | |
|
701 | 701 | if '(' in text_until_cursor or '[' in text_until_cursor: |
|
702 | 702 | lsplit = text |
|
703 | 703 | else: |
|
704 | 704 | try: |
|
705 | 705 | # arg_split ~ shlex.split, but with unicode bugs fixed by us |
|
706 | 706 | lsplit = arg_split(text_until_cursor)[-1] |
|
707 | 707 | except ValueError: |
|
708 | 708 | # typically an unmatched ", or backslash without escaped char. |
|
709 | 709 | if open_quotes: |
|
710 | 710 | lsplit = text_until_cursor.split(open_quotes)[-1] |
|
711 | 711 | else: |
|
712 | 712 | return [] |
|
713 | 713 | except IndexError: |
|
714 | 714 | # tab pressed on empty line |
|
715 | 715 | lsplit = "" |
|
716 | 716 | |
|
717 | 717 | if not open_quotes and lsplit != protect_filename(lsplit): |
|
718 | 718 | # if protectables are found, do matching on the whole escaped name |
|
719 | 719 | has_protectables = True |
|
720 | 720 | text0,text = text,lsplit |
|
721 | 721 | else: |
|
722 | 722 | has_protectables = False |
|
723 | 723 | text = os.path.expanduser(text) |
|
724 | 724 | |
|
725 | 725 | if text == "": |
|
726 | 726 | return [text_prefix + cast_unicode_py2(protect_filename(f)) for f in self.glob("*")] |
|
727 | 727 | |
|
728 | 728 | # Compute the matches from the filesystem |
|
729 | 729 | if sys.platform == 'win32': |
|
730 | 730 | m0 = self.clean_glob(text) |
|
731 | 731 | else: |
|
732 | 732 | m0 = self.clean_glob(text.replace('\\', '')) |
|
733 | 733 | |
|
734 | 734 | if has_protectables: |
|
735 | 735 | # If we had protectables, we need to revert our changes to the |
|
736 | 736 | # beginning of filename so that we don't double-write the part |
|
737 | 737 | # of the filename we have so far |
|
738 | 738 | len_lsplit = len(lsplit) |
|
739 | 739 | matches = [text_prefix + text0 + |
|
740 | 740 | protect_filename(f[len_lsplit:]) for f in m0] |
|
741 | 741 | else: |
|
742 | 742 | if open_quotes: |
|
743 | 743 | # if we have a string with an open quote, we don't need to |
|
744 | 744 | # protect the names at all (and we _shouldn't_, as it |
|
745 | 745 | # would cause bugs when the filesystem call is made). |
|
746 | 746 | matches = m0 |
|
747 | 747 | else: |
|
748 | 748 | matches = [text_prefix + |
|
749 | 749 | protect_filename(f) for f in m0] |
|
750 | 750 | |
|
751 | 751 | # Mark directories in input list by appending '/' to their names. |
|
752 | 752 | return [cast_unicode_py2(x+'/') if os.path.isdir(x) else x for x in matches] |
|
753 | 753 | |
|
754 | 754 | def magic_matches(self, text): |
|
755 | 755 | """Match magics""" |
|
756 | 756 | # Get all shell magics now rather than statically, so magics loaded at |
|
757 | 757 | # runtime show up too. |
|
758 | 758 | lsm = self.shell.magics_manager.lsmagic() |
|
759 | 759 | line_magics = lsm['line'] |
|
760 | 760 | cell_magics = lsm['cell'] |
|
761 | 761 | pre = self.magic_escape |
|
762 | 762 | pre2 = pre+pre |
|
763 | 763 | |
|
764 | 764 | # Completion logic: |
|
765 | 765 | # - user gives %%: only do cell magics |
|
766 | 766 | # - user gives %: do both line and cell magics |
|
767 | 767 | # - no prefix: do both |
|
768 | 768 | # In other words, line magics are skipped if the user gives %% explicitly |
|
769 | 769 | bare_text = text.lstrip(pre) |
|
770 | 770 | comp = [ pre2+m for m in cell_magics if m.startswith(bare_text)] |
|
771 | 771 | if not text.startswith(pre2): |
|
772 | 772 | comp += [ pre+m for m in line_magics if m.startswith(bare_text)] |
|
773 | 773 | return [cast_unicode_py2(c) for c in comp] |
|
774 | 774 | |
|
775 | 775 | |
|
776 | 776 | def python_matches(self, text): |
|
777 | 777 | """Match attributes or global python names""" |
|
778 | 778 | if "." in text: |
|
779 | 779 | try: |
|
780 | 780 | matches = self.attr_matches(text) |
|
781 | 781 | if text.endswith('.') and self.omit__names: |
|
782 | 782 | if self.omit__names == 1: |
|
783 | 783 | # true if txt is _not_ a __ name, false otherwise: |
|
784 | 784 | no__name = (lambda txt: |
|
785 | 785 | re.match(r'.*\.__.*?__',txt) is None) |
|
786 | 786 | else: |
|
787 | 787 | # true if txt is _not_ a _ name, false otherwise: |
|
788 | 788 | no__name = (lambda txt: |
|
789 | 789 | re.match(r'\._.*?',txt[txt.rindex('.'):]) is None) |
|
790 | 790 | matches = filter(no__name, matches) |
|
791 | 791 | except NameError: |
|
792 | 792 | # catches <undefined attributes>.<tab> |
|
793 | 793 | matches = [] |
|
794 | 794 | else: |
|
795 | 795 | matches = self.global_matches(text) |
|
796 | 796 | return matches |
|
797 | 797 | |
|
798 | 798 | def _default_arguments_from_docstring(self, doc): |
|
799 | 799 | """Parse the first line of docstring for call signature. |
|
800 | 800 | |
|
801 | 801 | Docstring should be of the form 'min(iterable[, key=func])\n'. |
|
802 | 802 | It can also parse cython docstring of the form |
|
803 | 803 | 'Minuit.migrad(self, int ncall=10000, resume=True, int nsplit=1)'. |
|
804 | 804 | """ |
|
805 | 805 | if doc is None: |
|
806 | 806 | return [] |
|
807 | 807 | |
|
808 | 808 | #care only the firstline |
|
809 | 809 | line = doc.lstrip().splitlines()[0] |
|
810 | 810 | |
|
811 | 811 | #p = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') |
|
812 | 812 | #'min(iterable[, key=func])\n' -> 'iterable[, key=func]' |
|
813 | 813 | sig = self.docstring_sig_re.search(line) |
|
814 | 814 | if sig is None: |
|
815 | 815 | return [] |
|
816 | 816 | # iterable[, key=func]' -> ['iterable[' ,' key=func]'] |
|
817 | 817 | sig = sig.groups()[0].split(',') |
|
818 | 818 | ret = [] |
|
819 | 819 | for s in sig: |
|
820 | 820 | #re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') |
|
821 | 821 | ret += self.docstring_kwd_re.findall(s) |
|
822 | 822 | return ret |
|
823 | 823 | |
|
824 | 824 | def _default_arguments(self, obj): |
|
825 | 825 | """Return the list of default arguments of obj if it is callable, |
|
826 | 826 | or empty list otherwise.""" |
|
827 | 827 | call_obj = obj |
|
828 | 828 | ret = [] |
|
829 | 829 | if inspect.isbuiltin(obj): |
|
830 | 830 | pass |
|
831 | 831 | elif not (inspect.isfunction(obj) or inspect.ismethod(obj)): |
|
832 | 832 | if inspect.isclass(obj): |
|
833 | 833 | #for cython embededsignature=True the constructor docstring |
|
834 | 834 | #belongs to the object itself not __init__ |
|
835 | 835 | ret += self._default_arguments_from_docstring( |
|
836 | 836 | getattr(obj, '__doc__', '')) |
|
837 | 837 | # for classes, check for __init__,__new__ |
|
838 | 838 | call_obj = (getattr(obj, '__init__', None) or |
|
839 | 839 | getattr(obj, '__new__', None)) |
|
840 | 840 | # for all others, check if they are __call__able |
|
841 | 841 | elif hasattr(obj, '__call__'): |
|
842 | 842 | call_obj = obj.__call__ |
|
843 | 843 | ret += self._default_arguments_from_docstring( |
|
844 | 844 | getattr(call_obj, '__doc__', '')) |
|
845 | 845 | |
|
846 | 846 | _keeps = (inspect.Parameter.KEYWORD_ONLY, |
|
847 | 847 | inspect.Parameter.POSITIONAL_OR_KEYWORD) |
|
848 | 848 | |
|
849 | 849 | try: |
|
850 | 850 | sig = inspect.signature(call_obj) |
|
851 | 851 | ret.extend(k for k, v in sig.parameters.items() if |
|
852 | 852 | v.kind in _keeps) |
|
853 | 853 | except ValueError: |
|
854 | 854 | pass |
|
855 | 855 | |
|
856 | 856 | return list(set(ret)) |
|
857 | 857 | |
|
858 | 858 | def python_func_kw_matches(self,text): |
|
859 | 859 | """Match named parameters (kwargs) of the last open function""" |
|
860 | 860 | |
|
861 | 861 | if "." in text: # a parameter cannot be dotted |
|
862 | 862 | return [] |
|
863 | 863 | try: regexp = self.__funcParamsRegex |
|
864 | 864 | except AttributeError: |
|
865 | 865 | regexp = self.__funcParamsRegex = re.compile(r''' |
|
866 | 866 | '.*?(?<!\\)' | # single quoted strings or |
|
867 | 867 | ".*?(?<!\\)" | # double quoted strings or |
|
868 | 868 | \w+ | # identifier |
|
869 | 869 | \S # other characters |
|
870 | 870 | ''', re.VERBOSE | re.DOTALL) |
|
871 | 871 | # 1. find the nearest identifier that comes before an unclosed |
|
872 | 872 | # parenthesis before the cursor |
|
873 | 873 | # e.g. for "foo (1+bar(x), pa<cursor>,a=1)", the candidate is "foo" |
|
874 | 874 | tokens = regexp.findall(self.text_until_cursor) |
|
875 | 875 | iterTokens = reversed(tokens); openPar = 0 |
|
876 | 876 | |
|
877 | 877 | for token in iterTokens: |
|
878 | 878 | if token == ')': |
|
879 | 879 | openPar -= 1 |
|
880 | 880 | elif token == '(': |
|
881 | 881 | openPar += 1 |
|
882 | 882 | if openPar > 0: |
|
883 | 883 | # found the last unclosed parenthesis |
|
884 | 884 | break |
|
885 | 885 | else: |
|
886 | 886 | return [] |
|
887 | 887 | # 2. Concatenate dotted names ("foo.bar" for "foo.bar(x, pa" ) |
|
888 | 888 | ids = [] |
|
889 | 889 | isId = re.compile(r'\w+$').match |
|
890 | 890 | |
|
891 | 891 | while True: |
|
892 | 892 | try: |
|
893 | 893 | ids.append(next(iterTokens)) |
|
894 | 894 | if not isId(ids[-1]): |
|
895 | 895 | ids.pop(); break |
|
896 | 896 | if not next(iterTokens) == '.': |
|
897 | 897 | break |
|
898 | 898 | except StopIteration: |
|
899 | 899 | break |
|
900 | 900 | |
|
901 | 901 | # Find all named arguments already assigned to, as to avoid suggesting |
|
902 | 902 | # them again |
|
903 | 903 | usedNamedArgs = set() |
|
904 | 904 | par_level = -1 |
|
905 | 905 | for token, next_token in zip(tokens, tokens[1:]): |
|
906 | 906 | if token == '(': |
|
907 | 907 | par_level += 1 |
|
908 | 908 | elif token == ')': |
|
909 | 909 | par_level -= 1 |
|
910 | 910 | |
|
911 | 911 | if par_level != 0: |
|
912 | 912 | continue |
|
913 | 913 | |
|
914 | 914 | if next_token != '=': |
|
915 | 915 | continue |
|
916 | 916 | |
|
917 | 917 | usedNamedArgs.add(token) |
|
918 | 918 | |
|
919 | 919 | # lookup the candidate callable matches either using global_matches |
|
920 | 920 | # or attr_matches for dotted names |
|
921 | 921 | if len(ids) == 1: |
|
922 | 922 | callableMatches = self.global_matches(ids[0]) |
|
923 | 923 | else: |
|
924 | 924 | callableMatches = self.attr_matches('.'.join(ids[::-1])) |
|
925 | 925 | argMatches = [] |
|
926 | 926 | for callableMatch in callableMatches: |
|
927 | 927 | try: |
|
928 | 928 | namedArgs = self._default_arguments(eval(callableMatch, |
|
929 | 929 | self.namespace)) |
|
930 | 930 | except: |
|
931 | 931 | continue |
|
932 | 932 | |
|
933 | 933 | # Remove used named arguments from the list, no need to show twice |
|
934 | 934 | for namedArg in set(namedArgs) - usedNamedArgs: |
|
935 | 935 | if namedArg.startswith(text): |
|
936 | 936 | argMatches.append(u"%s=" %namedArg) |
|
937 | 937 | return argMatches |
|
938 | 938 | |
|
939 | 939 | def dict_key_matches(self, text): |
|
940 | 940 | "Match string keys in a dictionary, after e.g. 'foo[' " |
|
941 | 941 | def get_keys(obj): |
|
942 | 942 | # Objects can define their own completions by defining an |
|
943 | 943 | # _ipy_key_completions_() method. |
|
944 | 944 | method = get_real_method(obj, '_ipython_key_completions_') |
|
945 | 945 | if method is not None: |
|
946 | 946 | return method() |
|
947 | 947 | |
|
948 | 948 | # Special case some common in-memory dict-like types |
|
949 | 949 | if isinstance(obj, dict) or\ |
|
950 | 950 | _safe_isinstance(obj, 'pandas', 'DataFrame'): |
|
951 | 951 | try: |
|
952 | 952 | return list(obj.keys()) |
|
953 | 953 | except Exception: |
|
954 | 954 | return [] |
|
955 | 955 | elif _safe_isinstance(obj, 'numpy', 'ndarray') or\ |
|
956 | 956 | _safe_isinstance(obj, 'numpy', 'void'): |
|
957 | 957 | return obj.dtype.names or [] |
|
958 | 958 | return [] |
|
959 | 959 | |
|
960 | 960 | try: |
|
961 | 961 | regexps = self.__dict_key_regexps |
|
962 | 962 | except AttributeError: |
|
963 | 963 | dict_key_re_fmt = r'''(?x) |
|
964 | 964 | ( # match dict-referring expression wrt greedy setting |
|
965 | 965 | %s |
|
966 | 966 | ) |
|
967 | 967 | \[ # open bracket |
|
968 | 968 | \s* # and optional whitespace |
|
969 | 969 | ([uUbB]? # string prefix (r not handled) |
|
970 | 970 | (?: # unclosed string |
|
971 | 971 | '(?:[^']|(?<!\\)\\')* |
|
972 | 972 | | |
|
973 | 973 | "(?:[^"]|(?<!\\)\\")* |
|
974 | 974 | ) |
|
975 | 975 | )? |
|
976 | 976 | $ |
|
977 | 977 | ''' |
|
978 | 978 | regexps = self.__dict_key_regexps = { |
|
979 | 979 | False: re.compile(dict_key_re_fmt % ''' |
|
980 | 980 | # identifiers separated by . |
|
981 | 981 | (?!\d)\w+ |
|
982 | 982 | (?:\.(?!\d)\w+)* |
|
983 | 983 | '''), |
|
984 | 984 | True: re.compile(dict_key_re_fmt % ''' |
|
985 | 985 | .+ |
|
986 | 986 | ''') |
|
987 | 987 | } |
|
988 | 988 | |
|
989 | 989 | match = regexps[self.greedy].search(self.text_until_cursor) |
|
990 | 990 | if match is None: |
|
991 | 991 | return [] |
|
992 | 992 | |
|
993 | 993 | expr, prefix = match.groups() |
|
994 | 994 | try: |
|
995 | 995 | obj = eval(expr, self.namespace) |
|
996 | 996 | except Exception: |
|
997 | 997 | try: |
|
998 | 998 | obj = eval(expr, self.global_namespace) |
|
999 | 999 | except Exception: |
|
1000 | 1000 | return [] |
|
1001 | 1001 | |
|
1002 | 1002 | keys = get_keys(obj) |
|
1003 | 1003 | if not keys: |
|
1004 | 1004 | return keys |
|
1005 | 1005 | closing_quote, token_offset, matches = match_dict_keys(keys, prefix, self.splitter.delims) |
|
1006 | 1006 | if not matches: |
|
1007 | 1007 | return matches |
|
1008 | 1008 | |
|
1009 | 1009 | # get the cursor position of |
|
1010 | 1010 | # - the text being completed |
|
1011 | 1011 | # - the start of the key text |
|
1012 | 1012 | # - the start of the completion |
|
1013 | 1013 | text_start = len(self.text_until_cursor) - len(text) |
|
1014 | 1014 | if prefix: |
|
1015 | 1015 | key_start = match.start(2) |
|
1016 | 1016 | completion_start = key_start + token_offset |
|
1017 | 1017 | else: |
|
1018 | 1018 | key_start = completion_start = match.end() |
|
1019 | 1019 | |
|
1020 | 1020 | # grab the leading prefix, to make sure all completions start with `text` |
|
1021 | 1021 | if text_start > key_start: |
|
1022 | 1022 | leading = '' |
|
1023 | 1023 | else: |
|
1024 | 1024 | leading = text[text_start:completion_start] |
|
1025 | 1025 | |
|
1026 | 1026 | # the index of the `[` character |
|
1027 | 1027 | bracket_idx = match.end(1) |
|
1028 | 1028 | |
|
1029 | 1029 | # append closing quote and bracket as appropriate |
|
1030 | 1030 | # this is *not* appropriate if the opening quote or bracket is outside |
|
1031 | 1031 | # the text given to this method |
|
1032 | 1032 | suf = '' |
|
1033 | 1033 | continuation = self.line_buffer[len(self.text_until_cursor):] |
|
1034 | 1034 | if key_start > text_start and closing_quote: |
|
1035 | 1035 | # quotes were opened inside text, maybe close them |
|
1036 | 1036 | if continuation.startswith(closing_quote): |
|
1037 | 1037 | continuation = continuation[len(closing_quote):] |
|
1038 | 1038 | else: |
|
1039 | 1039 | suf += closing_quote |
|
1040 | 1040 | if bracket_idx > text_start: |
|
1041 | 1041 | # brackets were opened inside text, maybe close them |
|
1042 | 1042 | if not continuation.startswith(']'): |
|
1043 | 1043 | suf += ']' |
|
1044 | 1044 | |
|
1045 | 1045 | return [leading + k + suf for k in matches] |
|
1046 | 1046 | |
|
1047 | 1047 | def unicode_name_matches(self, text): |
|
1048 | 1048 | u"""Match Latex-like syntax for unicode characters base |
|
1049 | 1049 | on the name of the character. |
|
1050 | 1050 | |
|
1051 | 1051 | This does \\GREEK SMALL LETTER ETA -> η |
|
1052 | 1052 | |
|
1053 | 1053 | Works only on valid python 3 identifier, or on combining characters that |
|
1054 | 1054 | will combine to form a valid identifier. |
|
1055 | 1055 | |
|
1056 | 1056 | Used on Python 3 only. |
|
1057 | 1057 | """ |
|
1058 | 1058 | slashpos = text.rfind('\\') |
|
1059 | 1059 | if slashpos > -1: |
|
1060 | 1060 | s = text[slashpos+1:] |
|
1061 | 1061 | try : |
|
1062 | 1062 | unic = unicodedata.lookup(s) |
|
1063 | 1063 | # allow combining chars |
|
1064 | 1064 | if ('a'+unic).isidentifier(): |
|
1065 | 1065 | return '\\'+s,[unic] |
|
1066 | 1066 | except KeyError: |
|
1067 | 1067 | pass |
|
1068 | 1068 | return u'', [] |
|
1069 | 1069 | |
|
1070 | 1070 | |
|
1071 | 1071 | |
|
1072 | 1072 | |
|
1073 | 1073 | def latex_matches(self, text): |
|
1074 | 1074 | u"""Match Latex syntax for unicode characters. |
|
1075 | 1075 | |
|
1076 | 1076 | This does both \\alp -> \\alpha and \\alpha -> α |
|
1077 | 1077 | |
|
1078 | 1078 | Used on Python 3 only. |
|
1079 | 1079 | """ |
|
1080 | 1080 | slashpos = text.rfind('\\') |
|
1081 | 1081 | if slashpos > -1: |
|
1082 | 1082 | s = text[slashpos:] |
|
1083 | 1083 | if s in latex_symbols: |
|
1084 | 1084 | # Try to complete a full latex symbol to unicode |
|
1085 | 1085 | # \\alpha -> α |
|
1086 | 1086 | return s, [latex_symbols[s]] |
|
1087 | 1087 | else: |
|
1088 | 1088 | # If a user has partially typed a latex symbol, give them |
|
1089 | 1089 | # a full list of options \al -> [\aleph, \alpha] |
|
1090 | 1090 | matches = [k for k in latex_symbols if k.startswith(s)] |
|
1091 | 1091 | return s, matches |
|
1092 | 1092 | return u'', [] |
|
1093 | 1093 | |
|
1094 | 1094 | def dispatch_custom_completer(self, text): |
|
1095 | 1095 | if not self.custom_completers: |
|
1096 | 1096 | return |
|
1097 | 1097 | |
|
1098 | 1098 | line = self.line_buffer |
|
1099 | 1099 | if not line.strip(): |
|
1100 | 1100 | return None |
|
1101 | 1101 | |
|
1102 | 1102 | # Create a little structure to pass all the relevant information about |
|
1103 | 1103 | # the current completion to any custom completer. |
|
1104 | 1104 | event = Bunch() |
|
1105 | 1105 | event.line = line |
|
1106 | 1106 | event.symbol = text |
|
1107 | 1107 | cmd = line.split(None,1)[0] |
|
1108 | 1108 | event.command = cmd |
|
1109 | 1109 | event.text_until_cursor = self.text_until_cursor |
|
1110 | 1110 | |
|
1111 | 1111 | # for foo etc, try also to find completer for %foo |
|
1112 | 1112 | if not cmd.startswith(self.magic_escape): |
|
1113 | 1113 | try_magic = self.custom_completers.s_matches( |
|
1114 | 1114 | self.magic_escape + cmd) |
|
1115 | 1115 | else: |
|
1116 | 1116 | try_magic = [] |
|
1117 | 1117 | |
|
1118 | 1118 | for c in itertools.chain(self.custom_completers.s_matches(cmd), |
|
1119 | 1119 | try_magic, |
|
1120 | 1120 | self.custom_completers.flat_matches(self.text_until_cursor)): |
|
1121 | 1121 | try: |
|
1122 | 1122 | res = c(event) |
|
1123 | 1123 | if res: |
|
1124 | 1124 | # first, try case sensitive match |
|
1125 | 1125 | withcase = [cast_unicode_py2(r) for r in res if r.startswith(text)] |
|
1126 | 1126 | if withcase: |
|
1127 | 1127 | return withcase |
|
1128 | 1128 | # if none, then case insensitive ones are ok too |
|
1129 | 1129 | text_low = text.lower() |
|
1130 | 1130 | return [cast_unicode_py2(r) for r in res if r.lower().startswith(text_low)] |
|
1131 | 1131 | except TryNext: |
|
1132 | 1132 | pass |
|
1133 | 1133 | |
|
1134 | 1134 | return None |
|
1135 | 1135 | |
|
1136 | 1136 | @_strip_single_trailing_space |
|
1137 | 1137 | def complete(self, text=None, line_buffer=None, cursor_pos=None): |
|
1138 | 1138 | """Find completions for the given text and line context. |
|
1139 | 1139 | |
|
1140 | 1140 | Note that both the text and the line_buffer are optional, but at least |
|
1141 | 1141 | one of them must be given. |
|
1142 | 1142 | |
|
1143 | 1143 | Parameters |
|
1144 | 1144 | ---------- |
|
1145 | 1145 | text : string, optional |
|
1146 | 1146 | Text to perform the completion on. If not given, the line buffer |
|
1147 | 1147 | is split using the instance's CompletionSplitter object. |
|
1148 | 1148 | |
|
1149 | 1149 | line_buffer : string, optional |
|
1150 | 1150 | If not given, the completer attempts to obtain the current line |
|
1151 | 1151 | buffer via readline. This keyword allows clients which are |
|
1152 | 1152 | requesting for text completions in non-readline contexts to inform |
|
1153 | 1153 | the completer of the entire text. |
|
1154 | 1154 | |
|
1155 | 1155 | cursor_pos : int, optional |
|
1156 | 1156 | Index of the cursor in the full line buffer. Should be provided by |
|
1157 | 1157 | remote frontends where kernel has no access to frontend state. |
|
1158 | 1158 | |
|
1159 | 1159 | Returns |
|
1160 | 1160 | ------- |
|
1161 | 1161 | text : str |
|
1162 | 1162 | Text that was actually used in the completion. |
|
1163 | 1163 | |
|
1164 | 1164 | matches : list |
|
1165 | 1165 | A list of completion matches. |
|
1166 | 1166 | """ |
|
1167 | 1167 | # if the cursor position isn't given, the only sane assumption we can |
|
1168 | 1168 | # make is that it's at the end of the line (the common case) |
|
1169 | 1169 | if cursor_pos is None: |
|
1170 | 1170 | cursor_pos = len(line_buffer) if text is None else len(text) |
|
1171 | 1171 | |
|
1172 | 1172 | if self.use_main_ns: |
|
1173 | 1173 | self.namespace = __main__.__dict__ |
|
1174 | 1174 | |
|
1175 | 1175 | if PY3: |
|
1176 | 1176 | |
|
1177 | 1177 | base_text = text if not line_buffer else line_buffer[:cursor_pos] |
|
1178 | 1178 | latex_text, latex_matches = self.latex_matches(base_text) |
|
1179 | 1179 | if latex_matches: |
|
1180 | 1180 | return latex_text, latex_matches |
|
1181 | 1181 | name_text = '' |
|
1182 | 1182 | name_matches = [] |
|
1183 | 1183 | for meth in (self.unicode_name_matches, back_latex_name_matches, back_unicode_name_matches): |
|
1184 | 1184 | name_text, name_matches = meth(base_text) |
|
1185 | 1185 | if name_text: |
|
1186 | 1186 | return name_text, name_matches |
|
1187 | 1187 | |
|
1188 | 1188 | # if text is either None or an empty string, rely on the line buffer |
|
1189 | 1189 | if not text: |
|
1190 | 1190 | text = self.splitter.split_line(line_buffer, cursor_pos) |
|
1191 | 1191 | |
|
1192 | 1192 | # If no line buffer is given, assume the input text is all there was |
|
1193 | 1193 | if line_buffer is None: |
|
1194 | 1194 | line_buffer = text |
|
1195 | 1195 | |
|
1196 | 1196 | self.line_buffer = line_buffer |
|
1197 | 1197 | self.text_until_cursor = self.line_buffer[:cursor_pos] |
|
1198 | 1198 | |
|
1199 | 1199 | # Start with a clean slate of completions |
|
1200 | 1200 | self.matches[:] = [] |
|
1201 | 1201 | custom_res = self.dispatch_custom_completer(text) |
|
1202 | 1202 | if custom_res is not None: |
|
1203 | 1203 | # did custom completers produce something? |
|
1204 | 1204 | self.matches = custom_res |
|
1205 | 1205 | else: |
|
1206 | 1206 | # Extend the list of completions with the results of each |
|
1207 | 1207 | # matcher, so we return results to the user from all |
|
1208 | 1208 | # namespaces. |
|
1209 | 1209 | if self.merge_completions: |
|
1210 | 1210 | self.matches = [] |
|
1211 | 1211 | for matcher in self.matchers: |
|
1212 | 1212 | try: |
|
1213 | 1213 | self.matches.extend(matcher(text)) |
|
1214 | 1214 | except: |
|
1215 | 1215 | # Show the ugly traceback if the matcher causes an |
|
1216 | 1216 | # exception, but do NOT crash the kernel! |
|
1217 | 1217 | sys.excepthook(*sys.exc_info()) |
|
1218 | 1218 | else: |
|
1219 | 1219 | for matcher in self.matchers: |
|
1220 | 1220 | self.matches = matcher(text) |
|
1221 | 1221 | if self.matches: |
|
1222 | 1222 | break |
|
1223 | 1223 | # FIXME: we should extend our api to return a dict with completions for |
|
1224 | 1224 | # different types of objects. The rlcomplete() method could then |
|
1225 | 1225 | # simply collapse the dict into a list for readline, but we'd have |
|
1226 | 1226 | # richer completion semantics in other evironments. |
|
1227 | 1227 | self.matches = sorted(set(self.matches), key=completions_sorting_key) |
|
1228 | 1228 | |
|
1229 | 1229 | return text, self.matches |
@@ -1,348 +1,347 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """Implementations for various useful completers. |
|
3 | 3 | |
|
4 | 4 | These are all loaded by default by IPython. |
|
5 | 5 | """ |
|
6 | 6 | #----------------------------------------------------------------------------- |
|
7 | 7 | # Copyright (C) 2010-2011 The IPython Development Team. |
|
8 | 8 | # |
|
9 | 9 | # Distributed under the terms of the BSD License. |
|
10 | 10 | # |
|
11 | 11 | # The full license is in the file COPYING.txt, distributed with this software. |
|
12 | 12 | #----------------------------------------------------------------------------- |
|
13 | 13 | |
|
14 | 14 | #----------------------------------------------------------------------------- |
|
15 | 15 | # Imports |
|
16 | 16 | #----------------------------------------------------------------------------- |
|
17 | 17 | |
|
18 | 18 | # Stdlib imports |
|
19 | 19 | import glob |
|
20 | 20 | import inspect |
|
21 | 21 | import os |
|
22 | 22 | import re |
|
23 | 23 | import sys |
|
24 | 24 | from importlib import import_module |
|
25 | 25 | |
|
26 | 26 | try: |
|
27 | 27 | # Python >= 3.3 |
|
28 | 28 | from importlib.machinery import all_suffixes |
|
29 | 29 | _suffixes = all_suffixes() |
|
30 | 30 | except ImportError: |
|
31 | 31 | from imp import get_suffixes |
|
32 | 32 | _suffixes = [ s[0] for s in get_suffixes() ] |
|
33 | 33 | |
|
34 | 34 | # Third-party imports |
|
35 | 35 | from time import time |
|
36 | 36 | from zipimport import zipimporter |
|
37 | 37 | |
|
38 | 38 | # Our own imports |
|
39 | 39 | from IPython.core.completer import expand_user, compress_user |
|
40 | 40 | from IPython.core.error import TryNext |
|
41 | 41 | from IPython.utils._process_common import arg_split |
|
42 | from IPython.utils.py3compat import string_types | |
|
43 | 42 | |
|
44 | 43 | # FIXME: this should be pulled in with the right call via the component system |
|
45 | 44 | from IPython import get_ipython |
|
46 | 45 | |
|
47 | 46 | #----------------------------------------------------------------------------- |
|
48 | 47 | # Globals and constants |
|
49 | 48 | #----------------------------------------------------------------------------- |
|
50 | 49 | |
|
51 | 50 | # Time in seconds after which the rootmodules will be stored permanently in the |
|
52 | 51 | # ipython ip.db database (kept in the user's .ipython dir). |
|
53 | 52 | TIMEOUT_STORAGE = 2 |
|
54 | 53 | |
|
55 | 54 | # Time in seconds after which we give up |
|
56 | 55 | TIMEOUT_GIVEUP = 20 |
|
57 | 56 | |
|
58 | 57 | # Regular expression for the python import statement |
|
59 | 58 | import_re = re.compile(r'(?P<name>[a-zA-Z_][a-zA-Z0-9_]*?)' |
|
60 | 59 | r'(?P<package>[/\\]__init__)?' |
|
61 | 60 | r'(?P<suffix>%s)$' % |
|
62 | 61 | r'|'.join(re.escape(s) for s in _suffixes)) |
|
63 | 62 | |
|
64 | 63 | # RE for the ipython %run command (python + ipython scripts) |
|
65 | 64 | magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$') |
|
66 | 65 | |
|
67 | 66 | #----------------------------------------------------------------------------- |
|
68 | 67 | # Local utilities |
|
69 | 68 | #----------------------------------------------------------------------------- |
|
70 | 69 | |
|
71 | 70 | def module_list(path): |
|
72 | 71 | """ |
|
73 | 72 | Return the list containing the names of the modules available in the given |
|
74 | 73 | folder. |
|
75 | 74 | """ |
|
76 | 75 | # sys.path has the cwd as an empty string, but isdir/listdir need it as '.' |
|
77 | 76 | if path == '': |
|
78 | 77 | path = '.' |
|
79 | 78 | |
|
80 | 79 | # A few local constants to be used in loops below |
|
81 | 80 | pjoin = os.path.join |
|
82 | 81 | |
|
83 | 82 | if os.path.isdir(path): |
|
84 | 83 | # Build a list of all files in the directory and all files |
|
85 | 84 | # in its subdirectories. For performance reasons, do not |
|
86 | 85 | # recurse more than one level into subdirectories. |
|
87 | 86 | files = [] |
|
88 | 87 | for root, dirs, nondirs in os.walk(path, followlinks=True): |
|
89 | 88 | subdir = root[len(path)+1:] |
|
90 | 89 | if subdir: |
|
91 | 90 | files.extend(pjoin(subdir, f) for f in nondirs) |
|
92 | 91 | dirs[:] = [] # Do not recurse into additional subdirectories. |
|
93 | 92 | else: |
|
94 | 93 | files.extend(nondirs) |
|
95 | 94 | |
|
96 | 95 | else: |
|
97 | 96 | try: |
|
98 | 97 | files = list(zipimporter(path)._files.keys()) |
|
99 | 98 | except: |
|
100 | 99 | files = [] |
|
101 | 100 | |
|
102 | 101 | # Build a list of modules which match the import_re regex. |
|
103 | 102 | modules = [] |
|
104 | 103 | for f in files: |
|
105 | 104 | m = import_re.match(f) |
|
106 | 105 | if m: |
|
107 | 106 | modules.append(m.group('name')) |
|
108 | 107 | return list(set(modules)) |
|
109 | 108 | |
|
110 | 109 | |
|
111 | 110 | def get_root_modules(): |
|
112 | 111 | """ |
|
113 | 112 | Returns a list containing the names of all the modules available in the |
|
114 | 113 | folders of the pythonpath. |
|
115 | 114 | |
|
116 | 115 | ip.db['rootmodules_cache'] maps sys.path entries to list of modules. |
|
117 | 116 | """ |
|
118 | 117 | ip = get_ipython() |
|
119 | 118 | rootmodules_cache = ip.db.get('rootmodules_cache', {}) |
|
120 | 119 | rootmodules = list(sys.builtin_module_names) |
|
121 | 120 | start_time = time() |
|
122 | 121 | store = False |
|
123 | 122 | for path in sys.path: |
|
124 | 123 | try: |
|
125 | 124 | modules = rootmodules_cache[path] |
|
126 | 125 | except KeyError: |
|
127 | 126 | modules = module_list(path) |
|
128 | 127 | try: |
|
129 | 128 | modules.remove('__init__') |
|
130 | 129 | except ValueError: |
|
131 | 130 | pass |
|
132 | 131 | if path not in ('', '.'): # cwd modules should not be cached |
|
133 | 132 | rootmodules_cache[path] = modules |
|
134 | 133 | if time() - start_time > TIMEOUT_STORAGE and not store: |
|
135 | 134 | store = True |
|
136 | 135 | print("\nCaching the list of root modules, please wait!") |
|
137 | 136 | print("(This will only be done once - type '%rehashx' to " |
|
138 | 137 | "reset cache!)\n") |
|
139 | 138 | sys.stdout.flush() |
|
140 | 139 | if time() - start_time > TIMEOUT_GIVEUP: |
|
141 | 140 | print("This is taking too long, we give up.\n") |
|
142 | 141 | return [] |
|
143 | 142 | rootmodules.extend(modules) |
|
144 | 143 | if store: |
|
145 | 144 | ip.db['rootmodules_cache'] = rootmodules_cache |
|
146 | 145 | rootmodules = list(set(rootmodules)) |
|
147 | 146 | return rootmodules |
|
148 | 147 | |
|
149 | 148 | |
|
150 | 149 | def is_importable(module, attr, only_modules): |
|
151 | 150 | if only_modules: |
|
152 | 151 | return inspect.ismodule(getattr(module, attr)) |
|
153 | 152 | else: |
|
154 | 153 | return not(attr[:2] == '__' and attr[-2:] == '__') |
|
155 | 154 | |
|
156 | 155 | def try_import(mod, only_modules=False): |
|
157 | 156 | try: |
|
158 | 157 | m = import_module(mod) |
|
159 | 158 | except: |
|
160 | 159 | return [] |
|
161 | 160 | |
|
162 | 161 | m_is_init = hasattr(m, '__file__') and '__init__' in m.__file__ |
|
163 | 162 | |
|
164 | 163 | completions = [] |
|
165 | 164 | if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init: |
|
166 | 165 | completions.extend( [attr for attr in dir(m) if |
|
167 | 166 | is_importable(m, attr, only_modules)]) |
|
168 | 167 | |
|
169 | 168 | completions.extend(getattr(m, '__all__', [])) |
|
170 | 169 | if m_is_init: |
|
171 | 170 | completions.extend(module_list(os.path.dirname(m.__file__))) |
|
172 |
completions = {c for c in completions if isinstance(c, str |
|
|
171 | completions = {c for c in completions if isinstance(c, str)} | |
|
173 | 172 | completions.discard('__init__') |
|
174 | 173 | return list(completions) |
|
175 | 174 | |
|
176 | 175 | |
|
177 | 176 | #----------------------------------------------------------------------------- |
|
178 | 177 | # Completion-related functions. |
|
179 | 178 | #----------------------------------------------------------------------------- |
|
180 | 179 | |
|
181 | 180 | def quick_completer(cmd, completions): |
|
182 | 181 | """ Easily create a trivial completer for a command. |
|
183 | 182 | |
|
184 | 183 | Takes either a list of completions, or all completions in string (that will |
|
185 | 184 | be split on whitespace). |
|
186 | 185 | |
|
187 | 186 | Example:: |
|
188 | 187 | |
|
189 | 188 | [d:\ipython]|1> import ipy_completers |
|
190 | 189 | [d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz']) |
|
191 | 190 | [d:\ipython]|3> foo b<TAB> |
|
192 | 191 | bar baz |
|
193 | 192 | [d:\ipython]|3> foo ba |
|
194 | 193 | """ |
|
195 | 194 | |
|
196 |
if isinstance(completions, str |
|
|
195 | if isinstance(completions, str): | |
|
197 | 196 | completions = completions.split() |
|
198 | 197 | |
|
199 | 198 | def do_complete(self, event): |
|
200 | 199 | return completions |
|
201 | 200 | |
|
202 | 201 | get_ipython().set_hook('complete_command',do_complete, str_key = cmd) |
|
203 | 202 | |
|
204 | 203 | def module_completion(line): |
|
205 | 204 | """ |
|
206 | 205 | Returns a list containing the completion possibilities for an import line. |
|
207 | 206 | |
|
208 | 207 | The line looks like this : |
|
209 | 208 | 'import xml.d' |
|
210 | 209 | 'from xml.dom import' |
|
211 | 210 | """ |
|
212 | 211 | |
|
213 | 212 | words = line.split(' ') |
|
214 | 213 | nwords = len(words) |
|
215 | 214 | |
|
216 | 215 | # from whatever <tab> -> 'import ' |
|
217 | 216 | if nwords == 3 and words[0] == 'from': |
|
218 | 217 | return ['import '] |
|
219 | 218 | |
|
220 | 219 | # 'from xy<tab>' or 'import xy<tab>' |
|
221 | 220 | if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) : |
|
222 | 221 | if nwords == 1: |
|
223 | 222 | return get_root_modules() |
|
224 | 223 | mod = words[1].split('.') |
|
225 | 224 | if len(mod) < 2: |
|
226 | 225 | return get_root_modules() |
|
227 | 226 | completion_list = try_import('.'.join(mod[:-1]), True) |
|
228 | 227 | return ['.'.join(mod[:-1] + [el]) for el in completion_list] |
|
229 | 228 | |
|
230 | 229 | # 'from xyz import abc<tab>' |
|
231 | 230 | if nwords >= 3 and words[0] == 'from': |
|
232 | 231 | mod = words[1] |
|
233 | 232 | return try_import(mod) |
|
234 | 233 | |
|
235 | 234 | #----------------------------------------------------------------------------- |
|
236 | 235 | # Completers |
|
237 | 236 | #----------------------------------------------------------------------------- |
|
238 | 237 | # These all have the func(self, event) signature to be used as custom |
|
239 | 238 | # completers |
|
240 | 239 | |
|
241 | 240 | def module_completer(self,event): |
|
242 | 241 | """Give completions after user has typed 'import ...' or 'from ...'""" |
|
243 | 242 | |
|
244 | 243 | # This works in all versions of python. While 2.5 has |
|
245 | 244 | # pkgutil.walk_packages(), that particular routine is fairly dangerous, |
|
246 | 245 | # since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full |
|
247 | 246 | # of possibly problematic side effects. |
|
248 | 247 | # This search the folders in the sys.path for available modules. |
|
249 | 248 | |
|
250 | 249 | return module_completion(event.line) |
|
251 | 250 | |
|
252 | 251 | # FIXME: there's a lot of logic common to the run, cd and builtin file |
|
253 | 252 | # completers, that is currently reimplemented in each. |
|
254 | 253 | |
|
255 | 254 | def magic_run_completer(self, event): |
|
256 | 255 | """Complete files that end in .py or .ipy or .ipynb for the %run command. |
|
257 | 256 | """ |
|
258 | 257 | comps = arg_split(event.line, strict=False) |
|
259 | 258 | # relpath should be the current token that we need to complete. |
|
260 | 259 | if (len(comps) > 1) and (not event.line.endswith(' ')): |
|
261 | 260 | relpath = comps[-1].strip("'\"") |
|
262 | 261 | else: |
|
263 | 262 | relpath = '' |
|
264 | 263 | |
|
265 | 264 | #print("\nev=", event) # dbg |
|
266 | 265 | #print("rp=", relpath) # dbg |
|
267 | 266 | #print('comps=', comps) # dbg |
|
268 | 267 | |
|
269 | 268 | lglob = glob.glob |
|
270 | 269 | isdir = os.path.isdir |
|
271 | 270 | relpath, tilde_expand, tilde_val = expand_user(relpath) |
|
272 | 271 | |
|
273 | 272 | # Find if the user has already typed the first filename, after which we |
|
274 | 273 | # should complete on all files, since after the first one other files may |
|
275 | 274 | # be arguments to the input script. |
|
276 | 275 | |
|
277 | 276 | if any(magic_run_re.match(c) for c in comps): |
|
278 | 277 | matches = [f.replace('\\','/') + ('/' if isdir(f) else '') |
|
279 | 278 | for f in lglob(relpath+'*')] |
|
280 | 279 | else: |
|
281 | 280 | dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)] |
|
282 | 281 | pys = [f.replace('\\','/') |
|
283 | 282 | for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') + |
|
284 | 283 | lglob(relpath+'*.ipynb') + lglob(relpath + '*.pyw')] |
|
285 | 284 | |
|
286 | 285 | matches = dirs + pys |
|
287 | 286 | |
|
288 | 287 | #print('run comp:', dirs+pys) # dbg |
|
289 | 288 | return [compress_user(p, tilde_expand, tilde_val) for p in matches] |
|
290 | 289 | |
|
291 | 290 | |
|
292 | 291 | def cd_completer(self, event): |
|
293 | 292 | """Completer function for cd, which only returns directories.""" |
|
294 | 293 | ip = get_ipython() |
|
295 | 294 | relpath = event.symbol |
|
296 | 295 | |
|
297 | 296 | #print(event) # dbg |
|
298 | 297 | if event.line.endswith('-b') or ' -b ' in event.line: |
|
299 | 298 | # return only bookmark completions |
|
300 | 299 | bkms = self.db.get('bookmarks', None) |
|
301 | 300 | if bkms: |
|
302 | 301 | return bkms.keys() |
|
303 | 302 | else: |
|
304 | 303 | return [] |
|
305 | 304 | |
|
306 | 305 | if event.symbol == '-': |
|
307 | 306 | width_dh = str(len(str(len(ip.user_ns['_dh']) + 1))) |
|
308 | 307 | # jump in directory history by number |
|
309 | 308 | fmt = '-%0' + width_dh +'d [%s]' |
|
310 | 309 | ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])] |
|
311 | 310 | if len(ents) > 1: |
|
312 | 311 | return ents |
|
313 | 312 | return [] |
|
314 | 313 | |
|
315 | 314 | if event.symbol.startswith('--'): |
|
316 | 315 | return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']] |
|
317 | 316 | |
|
318 | 317 | # Expand ~ in path and normalize directory separators. |
|
319 | 318 | relpath, tilde_expand, tilde_val = expand_user(relpath) |
|
320 | 319 | relpath = relpath.replace('\\','/') |
|
321 | 320 | |
|
322 | 321 | found = [] |
|
323 | 322 | for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*') |
|
324 | 323 | if os.path.isdir(f)]: |
|
325 | 324 | if ' ' in d: |
|
326 | 325 | # we don't want to deal with any of that, complex code |
|
327 | 326 | # for this is elsewhere |
|
328 | 327 | raise TryNext |
|
329 | 328 | |
|
330 | 329 | found.append(d) |
|
331 | 330 | |
|
332 | 331 | if not found: |
|
333 | 332 | if os.path.isdir(relpath): |
|
334 | 333 | return [compress_user(relpath, tilde_expand, tilde_val)] |
|
335 | 334 | |
|
336 | 335 | # if no completions so far, try bookmarks |
|
337 | 336 | bks = self.db.get('bookmarks',{}) |
|
338 | 337 | bkmatches = [s for s in bks if s.startswith(event.symbol)] |
|
339 | 338 | if bkmatches: |
|
340 | 339 | return bkmatches |
|
341 | 340 | |
|
342 | 341 | raise TryNext |
|
343 | 342 | |
|
344 | 343 | return [compress_user(p, tilde_expand, tilde_val) for p in found] |
|
345 | 344 | |
|
346 | 345 | def reset_completer(self, event): |
|
347 | 346 | "A completer for %reset magic" |
|
348 | 347 | return '-f -s in out array dhist'.split() |
@@ -1,1136 +1,1135 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Top-level display functions for displaying object in different formats.""" |
|
3 | 3 | |
|
4 | 4 | # Copyright (c) IPython Development Team. |
|
5 | 5 | # Distributed under the terms of the Modified BSD License. |
|
6 | 6 | |
|
7 | 7 | |
|
8 | 8 | try: |
|
9 | 9 | from base64 import encodebytes as base64_encode |
|
10 | 10 | except ImportError: |
|
11 | 11 | from base64 import encodestring as base64_encode |
|
12 | 12 | |
|
13 | 13 | from binascii import b2a_hex |
|
14 | 14 | import json |
|
15 | 15 | import mimetypes |
|
16 | 16 | import os |
|
17 | 17 | import struct |
|
18 | 18 | import sys |
|
19 | 19 | import warnings |
|
20 | 20 | |
|
21 |
from IPython.utils.py3compat import |
|
|
22 | unicode_type) | |
|
21 | from IPython.utils.py3compat import cast_bytes_py2, cast_unicode, unicode_type | |
|
23 | 22 | from IPython.testing.skipdoctest import skip_doctest |
|
24 | 23 | |
|
25 | 24 | __all__ = ['display', 'display_pretty', 'display_html', 'display_markdown', |
|
26 | 25 | 'display_svg', 'display_png', 'display_jpeg', 'display_latex', 'display_json', |
|
27 | 26 | 'display_javascript', 'display_pdf', 'DisplayObject', 'TextDisplayObject', |
|
28 | 27 | 'Pretty', 'HTML', 'Markdown', 'Math', 'Latex', 'SVG', 'JSON', 'Javascript', |
|
29 | 28 | 'Image', 'clear_output', 'set_matplotlib_formats', 'set_matplotlib_close', |
|
30 | 29 | 'publish_display_data', 'update_display', 'DisplayHandle'] |
|
31 | 30 | |
|
32 | 31 | #----------------------------------------------------------------------------- |
|
33 | 32 | # utility functions |
|
34 | 33 | #----------------------------------------------------------------------------- |
|
35 | 34 | |
|
36 | 35 | def _safe_exists(path): |
|
37 | 36 | """Check path, but don't let exceptions raise""" |
|
38 | 37 | try: |
|
39 | 38 | return os.path.exists(path) |
|
40 | 39 | except Exception: |
|
41 | 40 | return False |
|
42 | 41 | |
|
43 | 42 | def _merge(d1, d2): |
|
44 | 43 | """Like update, but merges sub-dicts instead of clobbering at the top level. |
|
45 | 44 | |
|
46 | 45 | Updates d1 in-place |
|
47 | 46 | """ |
|
48 | 47 | |
|
49 | 48 | if not isinstance(d2, dict) or not isinstance(d1, dict): |
|
50 | 49 | return d2 |
|
51 | 50 | for key, value in d2.items(): |
|
52 | 51 | d1[key] = _merge(d1.get(key), value) |
|
53 | 52 | return d1 |
|
54 | 53 | |
|
55 | 54 | def _display_mimetype(mimetype, objs, raw=False, metadata=None): |
|
56 | 55 | """internal implementation of all display_foo methods |
|
57 | 56 | |
|
58 | 57 | Parameters |
|
59 | 58 | ---------- |
|
60 | 59 | mimetype : str |
|
61 | 60 | The mimetype to be published (e.g. 'image/png') |
|
62 | 61 | objs : tuple of objects |
|
63 | 62 | The Python objects to display, or if raw=True raw text data to |
|
64 | 63 | display. |
|
65 | 64 | raw : bool |
|
66 | 65 | Are the data objects raw data or Python objects that need to be |
|
67 | 66 | formatted before display? [default: False] |
|
68 | 67 | metadata : dict (optional) |
|
69 | 68 | Metadata to be associated with the specific mimetype output. |
|
70 | 69 | """ |
|
71 | 70 | if metadata: |
|
72 | 71 | metadata = {mimetype: metadata} |
|
73 | 72 | if raw: |
|
74 | 73 | # turn list of pngdata into list of { 'image/png': pngdata } |
|
75 | 74 | objs = [ {mimetype: obj} for obj in objs ] |
|
76 | 75 | display(*objs, raw=raw, metadata=metadata, include=[mimetype]) |
|
77 | 76 | |
|
78 | 77 | #----------------------------------------------------------------------------- |
|
79 | 78 | # Main functions |
|
80 | 79 | #----------------------------------------------------------------------------- |
|
81 | 80 | |
|
82 | 81 | # use * to indicate transient is keyword-only |
|
83 | 82 | def publish_display_data(data, metadata=None, source=None, *, transient=None, **kwargs): |
|
84 | 83 | """Publish data and metadata to all frontends. |
|
85 | 84 | |
|
86 | 85 | See the ``display_data`` message in the messaging documentation for |
|
87 | 86 | more details about this message type. |
|
88 | 87 | |
|
89 | 88 | The following MIME types are currently implemented: |
|
90 | 89 | |
|
91 | 90 | * text/plain |
|
92 | 91 | * text/html |
|
93 | 92 | * text/markdown |
|
94 | 93 | * text/latex |
|
95 | 94 | * application/json |
|
96 | 95 | * application/javascript |
|
97 | 96 | * image/png |
|
98 | 97 | * image/jpeg |
|
99 | 98 | * image/svg+xml |
|
100 | 99 | |
|
101 | 100 | Parameters |
|
102 | 101 | ---------- |
|
103 | 102 | data : dict |
|
104 | 103 | A dictionary having keys that are valid MIME types (like |
|
105 | 104 | 'text/plain' or 'image/svg+xml') and values that are the data for |
|
106 | 105 | that MIME type. The data itself must be a JSON'able data |
|
107 | 106 | structure. Minimally all data should have the 'text/plain' data, |
|
108 | 107 | which can be displayed by all frontends. If more than the plain |
|
109 | 108 | text is given, it is up to the frontend to decide which |
|
110 | 109 | representation to use. |
|
111 | 110 | metadata : dict |
|
112 | 111 | A dictionary for metadata related to the data. This can contain |
|
113 | 112 | arbitrary key, value pairs that frontends can use to interpret |
|
114 | 113 | the data. mime-type keys matching those in data can be used |
|
115 | 114 | to specify metadata about particular representations. |
|
116 | 115 | source : str, deprecated |
|
117 | 116 | Unused. |
|
118 | 117 | transient : dict, keyword-only |
|
119 | 118 | A dictionary of transient data, such as display_id. |
|
120 | 119 | """ |
|
121 | 120 | from IPython.core.interactiveshell import InteractiveShell |
|
122 | 121 | |
|
123 | 122 | display_pub = InteractiveShell.instance().display_pub |
|
124 | 123 | |
|
125 | 124 | # only pass transient if supplied, |
|
126 | 125 | # to avoid errors with older ipykernel. |
|
127 | 126 | # TODO: We could check for ipykernel version and provide a detailed upgrade message. |
|
128 | 127 | if transient: |
|
129 | 128 | kwargs['transient'] = transient |
|
130 | 129 | |
|
131 | 130 | display_pub.publish( |
|
132 | 131 | data=data, |
|
133 | 132 | metadata=metadata, |
|
134 | 133 | **kwargs |
|
135 | 134 | ) |
|
136 | 135 | |
|
137 | 136 | |
|
138 | 137 | def _new_id(): |
|
139 | 138 | """Generate a new random text id with urandom""" |
|
140 | 139 | return b2a_hex(os.urandom(16)).decode('ascii') |
|
141 | 140 | |
|
142 | 141 | |
|
143 | 142 | def display(*objs, include=None, exclude=None, metadata=None, transient=None, display_id=None, **kwargs): |
|
144 | 143 | """Display a Python object in all frontends. |
|
145 | 144 | |
|
146 | 145 | By default all representations will be computed and sent to the frontends. |
|
147 | 146 | Frontends can decide which representation is used and how. |
|
148 | 147 | |
|
149 | 148 | Parameters |
|
150 | 149 | ---------- |
|
151 | 150 | objs : tuple of objects |
|
152 | 151 | The Python objects to display. |
|
153 | 152 | raw : bool, optional |
|
154 | 153 | Are the objects to be displayed already mimetype-keyed dicts of raw display data, |
|
155 | 154 | or Python objects that need to be formatted before display? [default: False] |
|
156 | 155 | include : list or tuple, optional |
|
157 | 156 | A list of format type strings (MIME types) to include in the |
|
158 | 157 | format data dict. If this is set *only* the format types included |
|
159 | 158 | in this list will be computed. |
|
160 | 159 | exclude : list or tuple, optional |
|
161 | 160 | A list of format type strings (MIME types) to exclude in the format |
|
162 | 161 | data dict. If this is set all format types will be computed, |
|
163 | 162 | except for those included in this argument. |
|
164 | 163 | metadata : dict, optional |
|
165 | 164 | A dictionary of metadata to associate with the output. |
|
166 | 165 | mime-type keys in this dictionary will be associated with the individual |
|
167 | 166 | representation formats, if they exist. |
|
168 | 167 | transient : dict, optional |
|
169 | 168 | A dictionary of transient data to associate with the output. |
|
170 | 169 | Data in this dict should not be persisted to files (e.g. notebooks). |
|
171 | 170 | display_id : str, optional |
|
172 | 171 | Set an id for the display. |
|
173 | 172 | This id can be used for updating this display area later via update_display. |
|
174 | 173 | If given as True, generate a new display_id |
|
175 | 174 | kwargs: additional keyword-args, optional |
|
176 | 175 | Additional keyword-arguments are passed through to the display publisher. |
|
177 | 176 | |
|
178 | 177 | Returns |
|
179 | 178 | ------- |
|
180 | 179 | |
|
181 | 180 | handle: DisplayHandle |
|
182 | 181 | Returns a handle on updatable displays, if display_id is given. |
|
183 | 182 | Returns None if no display_id is given (default). |
|
184 | 183 | """ |
|
185 | 184 | raw = kwargs.pop('raw', False) |
|
186 | 185 | if transient is None: |
|
187 | 186 | transient = {} |
|
188 | 187 | if display_id: |
|
189 | 188 | if display_id == True: |
|
190 | 189 | display_id = _new_id() |
|
191 | 190 | transient['display_id'] = display_id |
|
192 | 191 | if kwargs.get('update') and 'display_id' not in transient: |
|
193 | 192 | raise TypeError('display_id required for update_display') |
|
194 | 193 | if transient: |
|
195 | 194 | kwargs['transient'] = transient |
|
196 | 195 | |
|
197 | 196 | from IPython.core.interactiveshell import InteractiveShell |
|
198 | 197 | |
|
199 | 198 | if not raw: |
|
200 | 199 | format = InteractiveShell.instance().display_formatter.format |
|
201 | 200 | |
|
202 | 201 | for obj in objs: |
|
203 | 202 | if raw: |
|
204 | 203 | publish_display_data(data=obj, metadata=metadata, **kwargs) |
|
205 | 204 | else: |
|
206 | 205 | format_dict, md_dict = format(obj, include=include, exclude=exclude) |
|
207 | 206 | if not format_dict: |
|
208 | 207 | # nothing to display (e.g. _ipython_display_ took over) |
|
209 | 208 | continue |
|
210 | 209 | if metadata: |
|
211 | 210 | # kwarg-specified metadata gets precedence |
|
212 | 211 | _merge(md_dict, metadata) |
|
213 | 212 | publish_display_data(data=format_dict, metadata=md_dict, **kwargs) |
|
214 | 213 | if display_id: |
|
215 | 214 | return DisplayHandle(display_id) |
|
216 | 215 | |
|
217 | 216 | |
|
218 | 217 | # use * for keyword-only display_id arg |
|
219 | 218 | def update_display(obj, *, display_id, **kwargs): |
|
220 | 219 | """Update an existing display by id |
|
221 | 220 | |
|
222 | 221 | Parameters |
|
223 | 222 | ---------- |
|
224 | 223 | |
|
225 | 224 | obj: |
|
226 | 225 | The object with which to update the display |
|
227 | 226 | display_id: keyword-only |
|
228 | 227 | The id of the display to update |
|
229 | 228 | """ |
|
230 | 229 | kwargs['update'] = True |
|
231 | 230 | display(obj, display_id=display_id, **kwargs) |
|
232 | 231 | |
|
233 | 232 | |
|
234 | 233 | class DisplayHandle(object): |
|
235 | 234 | """A handle on an updatable display |
|
236 | 235 | |
|
237 | 236 | Call .update(obj) to display a new object. |
|
238 | 237 | |
|
239 | 238 | Call .display(obj) to add a new instance of this display, |
|
240 | 239 | and update existing instances. |
|
241 | 240 | """ |
|
242 | 241 | |
|
243 | 242 | def __init__(self, display_id=None): |
|
244 | 243 | if display_id is None: |
|
245 | 244 | display_id = _new_id() |
|
246 | 245 | self.display_id = display_id |
|
247 | 246 | |
|
248 | 247 | def __repr__(self): |
|
249 | 248 | return "<%s display_id=%s>" % (self.__class__.__name__, self.display_id) |
|
250 | 249 | |
|
251 | 250 | def display(self, obj, **kwargs): |
|
252 | 251 | """Make a new display with my id, updating existing instances. |
|
253 | 252 | |
|
254 | 253 | Parameters |
|
255 | 254 | ---------- |
|
256 | 255 | |
|
257 | 256 | obj: |
|
258 | 257 | object to display |
|
259 | 258 | **kwargs: |
|
260 | 259 | additional keyword arguments passed to display |
|
261 | 260 | """ |
|
262 | 261 | display(obj, display_id=self.display_id, **kwargs) |
|
263 | 262 | |
|
264 | 263 | def update(self, obj, **kwargs): |
|
265 | 264 | """Update existing displays with my id |
|
266 | 265 | |
|
267 | 266 | Parameters |
|
268 | 267 | ---------- |
|
269 | 268 | |
|
270 | 269 | obj: |
|
271 | 270 | object to display |
|
272 | 271 | **kwargs: |
|
273 | 272 | additional keyword arguments passed to update_display |
|
274 | 273 | """ |
|
275 | 274 | update_display(obj, display_id=self.display_id, **kwargs) |
|
276 | 275 | |
|
277 | 276 | |
|
278 | 277 | def display_pretty(*objs, **kwargs): |
|
279 | 278 | """Display the pretty (default) representation of an object. |
|
280 | 279 | |
|
281 | 280 | Parameters |
|
282 | 281 | ---------- |
|
283 | 282 | objs : tuple of objects |
|
284 | 283 | The Python objects to display, or if raw=True raw text data to |
|
285 | 284 | display. |
|
286 | 285 | raw : bool |
|
287 | 286 | Are the data objects raw data or Python objects that need to be |
|
288 | 287 | formatted before display? [default: False] |
|
289 | 288 | metadata : dict (optional) |
|
290 | 289 | Metadata to be associated with the specific mimetype output. |
|
291 | 290 | """ |
|
292 | 291 | _display_mimetype('text/plain', objs, **kwargs) |
|
293 | 292 | |
|
294 | 293 | |
|
295 | 294 | def display_html(*objs, **kwargs): |
|
296 | 295 | """Display the HTML representation of an object. |
|
297 | 296 | |
|
298 | 297 | Note: If raw=False and the object does not have a HTML |
|
299 | 298 | representation, no HTML will be shown. |
|
300 | 299 | |
|
301 | 300 | Parameters |
|
302 | 301 | ---------- |
|
303 | 302 | objs : tuple of objects |
|
304 | 303 | The Python objects to display, or if raw=True raw HTML data to |
|
305 | 304 | display. |
|
306 | 305 | raw : bool |
|
307 | 306 | Are the data objects raw data or Python objects that need to be |
|
308 | 307 | formatted before display? [default: False] |
|
309 | 308 | metadata : dict (optional) |
|
310 | 309 | Metadata to be associated with the specific mimetype output. |
|
311 | 310 | """ |
|
312 | 311 | _display_mimetype('text/html', objs, **kwargs) |
|
313 | 312 | |
|
314 | 313 | |
|
315 | 314 | def display_markdown(*objs, **kwargs): |
|
316 | 315 | """Displays the Markdown representation of an object. |
|
317 | 316 | |
|
318 | 317 | Parameters |
|
319 | 318 | ---------- |
|
320 | 319 | objs : tuple of objects |
|
321 | 320 | The Python objects to display, or if raw=True raw markdown data to |
|
322 | 321 | display. |
|
323 | 322 | raw : bool |
|
324 | 323 | Are the data objects raw data or Python objects that need to be |
|
325 | 324 | formatted before display? [default: False] |
|
326 | 325 | metadata : dict (optional) |
|
327 | 326 | Metadata to be associated with the specific mimetype output. |
|
328 | 327 | """ |
|
329 | 328 | |
|
330 | 329 | _display_mimetype('text/markdown', objs, **kwargs) |
|
331 | 330 | |
|
332 | 331 | |
|
333 | 332 | def display_svg(*objs, **kwargs): |
|
334 | 333 | """Display the SVG representation of an object. |
|
335 | 334 | |
|
336 | 335 | Parameters |
|
337 | 336 | ---------- |
|
338 | 337 | objs : tuple of objects |
|
339 | 338 | The Python objects to display, or if raw=True raw svg data to |
|
340 | 339 | display. |
|
341 | 340 | raw : bool |
|
342 | 341 | Are the data objects raw data or Python objects that need to be |
|
343 | 342 | formatted before display? [default: False] |
|
344 | 343 | metadata : dict (optional) |
|
345 | 344 | Metadata to be associated with the specific mimetype output. |
|
346 | 345 | """ |
|
347 | 346 | _display_mimetype('image/svg+xml', objs, **kwargs) |
|
348 | 347 | |
|
349 | 348 | |
|
350 | 349 | def display_png(*objs, **kwargs): |
|
351 | 350 | """Display the PNG representation of an object. |
|
352 | 351 | |
|
353 | 352 | Parameters |
|
354 | 353 | ---------- |
|
355 | 354 | objs : tuple of objects |
|
356 | 355 | The Python objects to display, or if raw=True raw png data to |
|
357 | 356 | display. |
|
358 | 357 | raw : bool |
|
359 | 358 | Are the data objects raw data or Python objects that need to be |
|
360 | 359 | formatted before display? [default: False] |
|
361 | 360 | metadata : dict (optional) |
|
362 | 361 | Metadata to be associated with the specific mimetype output. |
|
363 | 362 | """ |
|
364 | 363 | _display_mimetype('image/png', objs, **kwargs) |
|
365 | 364 | |
|
366 | 365 | |
|
367 | 366 | def display_jpeg(*objs, **kwargs): |
|
368 | 367 | """Display the JPEG representation of an object. |
|
369 | 368 | |
|
370 | 369 | Parameters |
|
371 | 370 | ---------- |
|
372 | 371 | objs : tuple of objects |
|
373 | 372 | The Python objects to display, or if raw=True raw JPEG data to |
|
374 | 373 | display. |
|
375 | 374 | raw : bool |
|
376 | 375 | Are the data objects raw data or Python objects that need to be |
|
377 | 376 | formatted before display? [default: False] |
|
378 | 377 | metadata : dict (optional) |
|
379 | 378 | Metadata to be associated with the specific mimetype output. |
|
380 | 379 | """ |
|
381 | 380 | _display_mimetype('image/jpeg', objs, **kwargs) |
|
382 | 381 | |
|
383 | 382 | |
|
384 | 383 | def display_latex(*objs, **kwargs): |
|
385 | 384 | """Display the LaTeX representation of an object. |
|
386 | 385 | |
|
387 | 386 | Parameters |
|
388 | 387 | ---------- |
|
389 | 388 | objs : tuple of objects |
|
390 | 389 | The Python objects to display, or if raw=True raw latex data to |
|
391 | 390 | display. |
|
392 | 391 | raw : bool |
|
393 | 392 | Are the data objects raw data or Python objects that need to be |
|
394 | 393 | formatted before display? [default: False] |
|
395 | 394 | metadata : dict (optional) |
|
396 | 395 | Metadata to be associated with the specific mimetype output. |
|
397 | 396 | """ |
|
398 | 397 | _display_mimetype('text/latex', objs, **kwargs) |
|
399 | 398 | |
|
400 | 399 | |
|
401 | 400 | def display_json(*objs, **kwargs): |
|
402 | 401 | """Display the JSON representation of an object. |
|
403 | 402 | |
|
404 | 403 | Note that not many frontends support displaying JSON. |
|
405 | 404 | |
|
406 | 405 | Parameters |
|
407 | 406 | ---------- |
|
408 | 407 | objs : tuple of objects |
|
409 | 408 | The Python objects to display, or if raw=True raw json data to |
|
410 | 409 | display. |
|
411 | 410 | raw : bool |
|
412 | 411 | Are the data objects raw data or Python objects that need to be |
|
413 | 412 | formatted before display? [default: False] |
|
414 | 413 | metadata : dict (optional) |
|
415 | 414 | Metadata to be associated with the specific mimetype output. |
|
416 | 415 | """ |
|
417 | 416 | _display_mimetype('application/json', objs, **kwargs) |
|
418 | 417 | |
|
419 | 418 | |
|
420 | 419 | def display_javascript(*objs, **kwargs): |
|
421 | 420 | """Display the Javascript representation of an object. |
|
422 | 421 | |
|
423 | 422 | Parameters |
|
424 | 423 | ---------- |
|
425 | 424 | objs : tuple of objects |
|
426 | 425 | The Python objects to display, or if raw=True raw javascript data to |
|
427 | 426 | display. |
|
428 | 427 | raw : bool |
|
429 | 428 | Are the data objects raw data or Python objects that need to be |
|
430 | 429 | formatted before display? [default: False] |
|
431 | 430 | metadata : dict (optional) |
|
432 | 431 | Metadata to be associated with the specific mimetype output. |
|
433 | 432 | """ |
|
434 | 433 | _display_mimetype('application/javascript', objs, **kwargs) |
|
435 | 434 | |
|
436 | 435 | |
|
437 | 436 | def display_pdf(*objs, **kwargs): |
|
438 | 437 | """Display the PDF representation of an object. |
|
439 | 438 | |
|
440 | 439 | Parameters |
|
441 | 440 | ---------- |
|
442 | 441 | objs : tuple of objects |
|
443 | 442 | The Python objects to display, or if raw=True raw javascript data to |
|
444 | 443 | display. |
|
445 | 444 | raw : bool |
|
446 | 445 | Are the data objects raw data or Python objects that need to be |
|
447 | 446 | formatted before display? [default: False] |
|
448 | 447 | metadata : dict (optional) |
|
449 | 448 | Metadata to be associated with the specific mimetype output. |
|
450 | 449 | """ |
|
451 | 450 | _display_mimetype('application/pdf', objs, **kwargs) |
|
452 | 451 | |
|
453 | 452 | |
|
454 | 453 | #----------------------------------------------------------------------------- |
|
455 | 454 | # Smart classes |
|
456 | 455 | #----------------------------------------------------------------------------- |
|
457 | 456 | |
|
458 | 457 | |
|
459 | 458 | class DisplayObject(object): |
|
460 | 459 | """An object that wraps data to be displayed.""" |
|
461 | 460 | |
|
462 | 461 | _read_flags = 'r' |
|
463 | 462 | _show_mem_addr = False |
|
464 | 463 | |
|
465 | 464 | def __init__(self, data=None, url=None, filename=None): |
|
466 | 465 | """Create a display object given raw data. |
|
467 | 466 | |
|
468 | 467 | When this object is returned by an expression or passed to the |
|
469 | 468 | display function, it will result in the data being displayed |
|
470 | 469 | in the frontend. The MIME type of the data should match the |
|
471 | 470 | subclasses used, so the Png subclass should be used for 'image/png' |
|
472 | 471 | data. If the data is a URL, the data will first be downloaded |
|
473 | 472 | and then displayed. If |
|
474 | 473 | |
|
475 | 474 | Parameters |
|
476 | 475 | ---------- |
|
477 | 476 | data : unicode, str or bytes |
|
478 | 477 | The raw data or a URL or file to load the data from |
|
479 | 478 | url : unicode |
|
480 | 479 | A URL to download the data from. |
|
481 | 480 | filename : unicode |
|
482 | 481 | Path to a local file to load the data from. |
|
483 | 482 | """ |
|
484 |
if data is not None and isinstance(data, str |
|
|
483 | if data is not None and isinstance(data, str): | |
|
485 | 484 | if data.startswith('http') and url is None: |
|
486 | 485 | url = data |
|
487 | 486 | filename = None |
|
488 | 487 | data = None |
|
489 | 488 | elif _safe_exists(data) and filename is None: |
|
490 | 489 | url = None |
|
491 | 490 | filename = data |
|
492 | 491 | data = None |
|
493 | 492 | |
|
494 | 493 | self.data = data |
|
495 | 494 | self.url = url |
|
496 | 495 | self.filename = None if filename is None else unicode_type(filename) |
|
497 | 496 | |
|
498 | 497 | self.reload() |
|
499 | 498 | self._check_data() |
|
500 | 499 | |
|
501 | 500 | def __repr__(self): |
|
502 | 501 | if not self._show_mem_addr: |
|
503 | 502 | cls = self.__class__ |
|
504 | 503 | r = "<%s.%s object>" % (cls.__module__, cls.__name__) |
|
505 | 504 | else: |
|
506 | 505 | r = super(DisplayObject, self).__repr__() |
|
507 | 506 | return r |
|
508 | 507 | |
|
509 | 508 | def _check_data(self): |
|
510 | 509 | """Override in subclasses if there's something to check.""" |
|
511 | 510 | pass |
|
512 | 511 | |
|
513 | 512 | def reload(self): |
|
514 | 513 | """Reload the raw data from file or URL.""" |
|
515 | 514 | if self.filename is not None: |
|
516 | 515 | with open(self.filename, self._read_flags) as f: |
|
517 | 516 | self.data = f.read() |
|
518 | 517 | elif self.url is not None: |
|
519 | 518 | try: |
|
520 | 519 | try: |
|
521 | 520 | from urllib.request import urlopen # Py3 |
|
522 | 521 | except ImportError: |
|
523 | 522 | from urllib2 import urlopen |
|
524 | 523 | response = urlopen(self.url) |
|
525 | 524 | self.data = response.read() |
|
526 | 525 | # extract encoding from header, if there is one: |
|
527 | 526 | encoding = None |
|
528 | 527 | for sub in response.headers['content-type'].split(';'): |
|
529 | 528 | sub = sub.strip() |
|
530 | 529 | if sub.startswith('charset'): |
|
531 | 530 | encoding = sub.split('=')[-1].strip() |
|
532 | 531 | break |
|
533 | 532 | # decode data, if an encoding was specified |
|
534 | 533 | if encoding: |
|
535 | 534 | self.data = self.data.decode(encoding, 'replace') |
|
536 | 535 | except: |
|
537 | 536 | self.data = None |
|
538 | 537 | |
|
539 | 538 | class TextDisplayObject(DisplayObject): |
|
540 | 539 | """Validate that display data is text""" |
|
541 | 540 | def _check_data(self): |
|
542 |
if self.data is not None and not isinstance(self.data, str |
|
|
541 | if self.data is not None and not isinstance(self.data, str): | |
|
543 | 542 | raise TypeError("%s expects text, not %r" % (self.__class__.__name__, self.data)) |
|
544 | 543 | |
|
545 | 544 | class Pretty(TextDisplayObject): |
|
546 | 545 | |
|
547 | 546 | def _repr_pretty_(self): |
|
548 | 547 | return self.data |
|
549 | 548 | |
|
550 | 549 | |
|
551 | 550 | class HTML(TextDisplayObject): |
|
552 | 551 | |
|
553 | 552 | def _repr_html_(self): |
|
554 | 553 | return self.data |
|
555 | 554 | |
|
556 | 555 | def __html__(self): |
|
557 | 556 | """ |
|
558 | 557 | This method exists to inform other HTML-using modules (e.g. Markupsafe, |
|
559 | 558 | htmltag, etc) that this object is HTML and does not need things like |
|
560 | 559 | special characters (<>&) escaped. |
|
561 | 560 | """ |
|
562 | 561 | return self._repr_html_() |
|
563 | 562 | |
|
564 | 563 | |
|
565 | 564 | class Markdown(TextDisplayObject): |
|
566 | 565 | |
|
567 | 566 | def _repr_markdown_(self): |
|
568 | 567 | return self.data |
|
569 | 568 | |
|
570 | 569 | |
|
571 | 570 | class Math(TextDisplayObject): |
|
572 | 571 | |
|
573 | 572 | def _repr_latex_(self): |
|
574 | 573 | s = self.data.strip('$') |
|
575 | 574 | return "$$%s$$" % s |
|
576 | 575 | |
|
577 | 576 | |
|
578 | 577 | class Latex(TextDisplayObject): |
|
579 | 578 | |
|
580 | 579 | def _repr_latex_(self): |
|
581 | 580 | return self.data |
|
582 | 581 | |
|
583 | 582 | |
|
584 | 583 | class SVG(DisplayObject): |
|
585 | 584 | |
|
586 | 585 | _read_flags = 'rb' |
|
587 | 586 | # wrap data in a property, which extracts the <svg> tag, discarding |
|
588 | 587 | # document headers |
|
589 | 588 | _data = None |
|
590 | 589 | |
|
591 | 590 | @property |
|
592 | 591 | def data(self): |
|
593 | 592 | return self._data |
|
594 | 593 | |
|
595 | 594 | @data.setter |
|
596 | 595 | def data(self, svg): |
|
597 | 596 | if svg is None: |
|
598 | 597 | self._data = None |
|
599 | 598 | return |
|
600 | 599 | # parse into dom object |
|
601 | 600 | from xml.dom import minidom |
|
602 | 601 | svg = cast_bytes_py2(svg) |
|
603 | 602 | x = minidom.parseString(svg) |
|
604 | 603 | # get svg tag (should be 1) |
|
605 | 604 | found_svg = x.getElementsByTagName('svg') |
|
606 | 605 | if found_svg: |
|
607 | 606 | svg = found_svg[0].toxml() |
|
608 | 607 | else: |
|
609 | 608 | # fallback on the input, trust the user |
|
610 | 609 | # but this is probably an error. |
|
611 | 610 | pass |
|
612 | 611 | svg = cast_unicode(svg) |
|
613 | 612 | self._data = svg |
|
614 | 613 | |
|
615 | 614 | def _repr_svg_(self): |
|
616 | 615 | return self.data |
|
617 | 616 | |
|
618 | 617 | |
|
619 | 618 | class JSON(DisplayObject): |
|
620 | 619 | """JSON expects a JSON-able dict or list |
|
621 | 620 | |
|
622 | 621 | not an already-serialized JSON string. |
|
623 | 622 | |
|
624 | 623 | Scalar types (None, number, string) are not allowed, only dict or list containers. |
|
625 | 624 | """ |
|
626 | 625 | # wrap data in a property, which warns about passing already-serialized JSON |
|
627 | 626 | _data = None |
|
628 | 627 | def __init__(self, data=None, url=None, filename=None, expanded=False, metadata=None): |
|
629 | 628 | """Create a JSON display object given raw data. |
|
630 | 629 | |
|
631 | 630 | Parameters |
|
632 | 631 | ---------- |
|
633 | 632 | data : dict or list |
|
634 | 633 | JSON data to display. Not an already-serialized JSON string. |
|
635 | 634 | Scalar types (None, number, string) are not allowed, only dict |
|
636 | 635 | or list containers. |
|
637 | 636 | url : unicode |
|
638 | 637 | A URL to download the data from. |
|
639 | 638 | filename : unicode |
|
640 | 639 | Path to a local file to load the data from. |
|
641 | 640 | expanded : boolean |
|
642 | 641 | Metadata to control whether a JSON display component is expanded. |
|
643 | 642 | metadata: dict |
|
644 | 643 | Specify extra metadata to attach to the json display object. |
|
645 | 644 | """ |
|
646 | 645 | self.expanded = expanded |
|
647 | 646 | self.metadata = metadata |
|
648 | 647 | super(JSON, self).__init__(data=data, url=url, filename=filename) |
|
649 | 648 | |
|
650 | 649 | def _check_data(self): |
|
651 | 650 | if self.data is not None and not isinstance(self.data, (dict, list)): |
|
652 | 651 | raise TypeError("%s expects JSONable dict or list, not %r" % (self.__class__.__name__, self.data)) |
|
653 | 652 | |
|
654 | 653 | @property |
|
655 | 654 | def data(self): |
|
656 | 655 | return self._data |
|
657 | 656 | |
|
658 | 657 | @data.setter |
|
659 | 658 | def data(self, data): |
|
660 |
if isinstance(data, str |
|
|
659 | if isinstance(data, str): | |
|
661 | 660 | warnings.warn("JSON expects JSONable dict or list, not JSON strings") |
|
662 | 661 | data = json.loads(data) |
|
663 | 662 | self._data = data |
|
664 | 663 | |
|
665 | 664 | def _data_and_metadata(self): |
|
666 | 665 | md = {'expanded': self.expanded} |
|
667 | 666 | if self.metadata: |
|
668 | 667 | md.update(self.metadata) |
|
669 | 668 | return self.data, md |
|
670 | 669 | |
|
671 | 670 | def _repr_json_(self): |
|
672 | 671 | return self._data_and_metadata() |
|
673 | 672 | |
|
674 | 673 | css_t = """$("head").append($("<link/>").attr({ |
|
675 | 674 | rel: "stylesheet", |
|
676 | 675 | type: "text/css", |
|
677 | 676 | href: "%s" |
|
678 | 677 | })); |
|
679 | 678 | """ |
|
680 | 679 | |
|
681 | 680 | lib_t1 = """$.getScript("%s", function () { |
|
682 | 681 | """ |
|
683 | 682 | lib_t2 = """}); |
|
684 | 683 | """ |
|
685 | 684 | |
|
686 | 685 | class Javascript(TextDisplayObject): |
|
687 | 686 | |
|
688 | 687 | def __init__(self, data=None, url=None, filename=None, lib=None, css=None): |
|
689 | 688 | """Create a Javascript display object given raw data. |
|
690 | 689 | |
|
691 | 690 | When this object is returned by an expression or passed to the |
|
692 | 691 | display function, it will result in the data being displayed |
|
693 | 692 | in the frontend. If the data is a URL, the data will first be |
|
694 | 693 | downloaded and then displayed. |
|
695 | 694 | |
|
696 | 695 | In the Notebook, the containing element will be available as `element`, |
|
697 | 696 | and jQuery will be available. Content appended to `element` will be |
|
698 | 697 | visible in the output area. |
|
699 | 698 | |
|
700 | 699 | Parameters |
|
701 | 700 | ---------- |
|
702 | 701 | data : unicode, str or bytes |
|
703 | 702 | The Javascript source code or a URL to download it from. |
|
704 | 703 | url : unicode |
|
705 | 704 | A URL to download the data from. |
|
706 | 705 | filename : unicode |
|
707 | 706 | Path to a local file to load the data from. |
|
708 | 707 | lib : list or str |
|
709 | 708 | A sequence of Javascript library URLs to load asynchronously before |
|
710 | 709 | running the source code. The full URLs of the libraries should |
|
711 | 710 | be given. A single Javascript library URL can also be given as a |
|
712 | 711 | string. |
|
713 | 712 | css: : list or str |
|
714 | 713 | A sequence of css files to load before running the source code. |
|
715 | 714 | The full URLs of the css files should be given. A single css URL |
|
716 | 715 | can also be given as a string. |
|
717 | 716 | """ |
|
718 |
if isinstance(lib, str |
|
|
717 | if isinstance(lib, str): | |
|
719 | 718 | lib = [lib] |
|
720 | 719 | elif lib is None: |
|
721 | 720 | lib = [] |
|
722 |
if isinstance(css, str |
|
|
721 | if isinstance(css, str): | |
|
723 | 722 | css = [css] |
|
724 | 723 | elif css is None: |
|
725 | 724 | css = [] |
|
726 | 725 | if not isinstance(lib, (list,tuple)): |
|
727 | 726 | raise TypeError('expected sequence, got: %r' % lib) |
|
728 | 727 | if not isinstance(css, (list,tuple)): |
|
729 | 728 | raise TypeError('expected sequence, got: %r' % css) |
|
730 | 729 | self.lib = lib |
|
731 | 730 | self.css = css |
|
732 | 731 | super(Javascript, self).__init__(data=data, url=url, filename=filename) |
|
733 | 732 | |
|
734 | 733 | def _repr_javascript_(self): |
|
735 | 734 | r = '' |
|
736 | 735 | for c in self.css: |
|
737 | 736 | r += css_t % c |
|
738 | 737 | for l in self.lib: |
|
739 | 738 | r += lib_t1 % l |
|
740 | 739 | r += self.data |
|
741 | 740 | r += lib_t2*len(self.lib) |
|
742 | 741 | return r |
|
743 | 742 | |
|
744 | 743 | # constants for identifying png/jpeg data |
|
745 | 744 | _PNG = b'\x89PNG\r\n\x1a\n' |
|
746 | 745 | _JPEG = b'\xff\xd8' |
|
747 | 746 | |
|
748 | 747 | def _pngxy(data): |
|
749 | 748 | """read the (width, height) from a PNG header""" |
|
750 | 749 | ihdr = data.index(b'IHDR') |
|
751 | 750 | # next 8 bytes are width/height |
|
752 | 751 | w4h4 = data[ihdr+4:ihdr+12] |
|
753 | 752 | return struct.unpack('>ii', w4h4) |
|
754 | 753 | |
|
755 | 754 | def _jpegxy(data): |
|
756 | 755 | """read the (width, height) from a JPEG header""" |
|
757 | 756 | # adapted from http://www.64lines.com/jpeg-width-height |
|
758 | 757 | |
|
759 | 758 | idx = 4 |
|
760 | 759 | while True: |
|
761 | 760 | block_size = struct.unpack('>H', data[idx:idx+2])[0] |
|
762 | 761 | idx = idx + block_size |
|
763 | 762 | if data[idx:idx+2] == b'\xFF\xC0': |
|
764 | 763 | # found Start of Frame |
|
765 | 764 | iSOF = idx |
|
766 | 765 | break |
|
767 | 766 | else: |
|
768 | 767 | # read another block |
|
769 | 768 | idx += 2 |
|
770 | 769 | |
|
771 | 770 | h, w = struct.unpack('>HH', data[iSOF+5:iSOF+9]) |
|
772 | 771 | return w, h |
|
773 | 772 | |
|
774 | 773 | class Image(DisplayObject): |
|
775 | 774 | |
|
776 | 775 | _read_flags = 'rb' |
|
777 | 776 | _FMT_JPEG = u'jpeg' |
|
778 | 777 | _FMT_PNG = u'png' |
|
779 | 778 | _ACCEPTABLE_EMBEDDINGS = [_FMT_JPEG, _FMT_PNG] |
|
780 | 779 | |
|
781 | 780 | def __init__(self, data=None, url=None, filename=None, format=None, |
|
782 | 781 | embed=None, width=None, height=None, retina=False, |
|
783 | 782 | unconfined=False, metadata=None): |
|
784 | 783 | """Create a PNG/JPEG image object given raw data. |
|
785 | 784 | |
|
786 | 785 | When this object is returned by an input cell or passed to the |
|
787 | 786 | display function, it will result in the image being displayed |
|
788 | 787 | in the frontend. |
|
789 | 788 | |
|
790 | 789 | Parameters |
|
791 | 790 | ---------- |
|
792 | 791 | data : unicode, str or bytes |
|
793 | 792 | The raw image data or a URL or filename to load the data from. |
|
794 | 793 | This always results in embedded image data. |
|
795 | 794 | url : unicode |
|
796 | 795 | A URL to download the data from. If you specify `url=`, |
|
797 | 796 | the image data will not be embedded unless you also specify `embed=True`. |
|
798 | 797 | filename : unicode |
|
799 | 798 | Path to a local file to load the data from. |
|
800 | 799 | Images from a file are always embedded. |
|
801 | 800 | format : unicode |
|
802 | 801 | The format of the image data (png/jpeg/jpg). If a filename or URL is given |
|
803 | 802 | for format will be inferred from the filename extension. |
|
804 | 803 | embed : bool |
|
805 | 804 | Should the image data be embedded using a data URI (True) or be |
|
806 | 805 | loaded using an <img> tag. Set this to True if you want the image |
|
807 | 806 | to be viewable later with no internet connection in the notebook. |
|
808 | 807 | |
|
809 | 808 | Default is `True`, unless the keyword argument `url` is set, then |
|
810 | 809 | default value is `False`. |
|
811 | 810 | |
|
812 | 811 | Note that QtConsole is not able to display images if `embed` is set to `False` |
|
813 | 812 | width : int |
|
814 | 813 | Width in pixels to which to constrain the image in html |
|
815 | 814 | height : int |
|
816 | 815 | Height in pixels to which to constrain the image in html |
|
817 | 816 | retina : bool |
|
818 | 817 | Automatically set the width and height to half of the measured |
|
819 | 818 | width and height. |
|
820 | 819 | This only works for embedded images because it reads the width/height |
|
821 | 820 | from image data. |
|
822 | 821 | For non-embedded images, you can just set the desired display width |
|
823 | 822 | and height directly. |
|
824 | 823 | unconfined: bool |
|
825 | 824 | Set unconfined=True to disable max-width confinement of the image. |
|
826 | 825 | metadata: dict |
|
827 | 826 | Specify extra metadata to attach to the image. |
|
828 | 827 | |
|
829 | 828 | Examples |
|
830 | 829 | -------- |
|
831 | 830 | # embedded image data, works in qtconsole and notebook |
|
832 | 831 | # when passed positionally, the first arg can be any of raw image data, |
|
833 | 832 | # a URL, or a filename from which to load image data. |
|
834 | 833 | # The result is always embedding image data for inline images. |
|
835 | 834 | Image('http://www.google.fr/images/srpr/logo3w.png') |
|
836 | 835 | Image('/path/to/image.jpg') |
|
837 | 836 | Image(b'RAW_PNG_DATA...') |
|
838 | 837 | |
|
839 | 838 | # Specifying Image(url=...) does not embed the image data, |
|
840 | 839 | # it only generates `<img>` tag with a link to the source. |
|
841 | 840 | # This will not work in the qtconsole or offline. |
|
842 | 841 | Image(url='http://www.google.fr/images/srpr/logo3w.png') |
|
843 | 842 | |
|
844 | 843 | """ |
|
845 | 844 | if filename is not None: |
|
846 | 845 | ext = self._find_ext(filename) |
|
847 | 846 | elif url is not None: |
|
848 | 847 | ext = self._find_ext(url) |
|
849 | 848 | elif data is None: |
|
850 | 849 | raise ValueError("No image data found. Expecting filename, url, or data.") |
|
851 |
elif isinstance(data, str |
|
|
850 | elif isinstance(data, str) and ( | |
|
852 | 851 | data.startswith('http') or _safe_exists(data) |
|
853 | 852 | ): |
|
854 | 853 | ext = self._find_ext(data) |
|
855 | 854 | else: |
|
856 | 855 | ext = None |
|
857 | 856 | |
|
858 | 857 | if format is None: |
|
859 | 858 | if ext is not None: |
|
860 | 859 | if ext == u'jpg' or ext == u'jpeg': |
|
861 | 860 | format = self._FMT_JPEG |
|
862 | 861 | if ext == u'png': |
|
863 | 862 | format = self._FMT_PNG |
|
864 | 863 | else: |
|
865 | 864 | format = ext.lower() |
|
866 | 865 | elif isinstance(data, bytes): |
|
867 | 866 | # infer image type from image data header, |
|
868 | 867 | # only if format has not been specified. |
|
869 | 868 | if data[:2] == _JPEG: |
|
870 | 869 | format = self._FMT_JPEG |
|
871 | 870 | |
|
872 | 871 | # failed to detect format, default png |
|
873 | 872 | if format is None: |
|
874 | 873 | format = 'png' |
|
875 | 874 | |
|
876 | 875 | if format.lower() == 'jpg': |
|
877 | 876 | # jpg->jpeg |
|
878 | 877 | format = self._FMT_JPEG |
|
879 | 878 | |
|
880 | 879 | self.format = unicode_type(format).lower() |
|
881 | 880 | self.embed = embed if embed is not None else (url is None) |
|
882 | 881 | |
|
883 | 882 | if self.embed and self.format not in self._ACCEPTABLE_EMBEDDINGS: |
|
884 | 883 | raise ValueError("Cannot embed the '%s' image format" % (self.format)) |
|
885 | 884 | self.width = width |
|
886 | 885 | self.height = height |
|
887 | 886 | self.retina = retina |
|
888 | 887 | self.unconfined = unconfined |
|
889 | 888 | self.metadata = metadata |
|
890 | 889 | super(Image, self).__init__(data=data, url=url, filename=filename) |
|
891 | 890 | |
|
892 | 891 | if retina: |
|
893 | 892 | self._retina_shape() |
|
894 | 893 | |
|
895 | 894 | def _retina_shape(self): |
|
896 | 895 | """load pixel-doubled width and height from image data""" |
|
897 | 896 | if not self.embed: |
|
898 | 897 | return |
|
899 | 898 | if self.format == 'png': |
|
900 | 899 | w, h = _pngxy(self.data) |
|
901 | 900 | elif self.format == 'jpeg': |
|
902 | 901 | w, h = _jpegxy(self.data) |
|
903 | 902 | else: |
|
904 | 903 | # retina only supports png |
|
905 | 904 | return |
|
906 | 905 | self.width = w // 2 |
|
907 | 906 | self.height = h // 2 |
|
908 | 907 | |
|
909 | 908 | def reload(self): |
|
910 | 909 | """Reload the raw data from file or URL.""" |
|
911 | 910 | if self.embed: |
|
912 | 911 | super(Image,self).reload() |
|
913 | 912 | if self.retina: |
|
914 | 913 | self._retina_shape() |
|
915 | 914 | |
|
916 | 915 | def _repr_html_(self): |
|
917 | 916 | if not self.embed: |
|
918 | 917 | width = height = klass = '' |
|
919 | 918 | if self.width: |
|
920 | 919 | width = ' width="%d"' % self.width |
|
921 | 920 | if self.height: |
|
922 | 921 | height = ' height="%d"' % self.height |
|
923 | 922 | if self.unconfined: |
|
924 | 923 | klass = ' class="unconfined"' |
|
925 | 924 | return u'<img src="{url}"{width}{height}{klass}/>'.format( |
|
926 | 925 | url=self.url, |
|
927 | 926 | width=width, |
|
928 | 927 | height=height, |
|
929 | 928 | klass=klass, |
|
930 | 929 | ) |
|
931 | 930 | |
|
932 | 931 | def _data_and_metadata(self): |
|
933 | 932 | """shortcut for returning metadata with shape information, if defined""" |
|
934 | 933 | md = {} |
|
935 | 934 | if self.width: |
|
936 | 935 | md['width'] = self.width |
|
937 | 936 | if self.height: |
|
938 | 937 | md['height'] = self.height |
|
939 | 938 | if self.unconfined: |
|
940 | 939 | md['unconfined'] = self.unconfined |
|
941 | 940 | if self.metadata: |
|
942 | 941 | md.update(self.metadata) |
|
943 | 942 | if md: |
|
944 | 943 | return self.data, md |
|
945 | 944 | else: |
|
946 | 945 | return self.data |
|
947 | 946 | |
|
948 | 947 | def _repr_png_(self): |
|
949 | 948 | if self.embed and self.format == u'png': |
|
950 | 949 | return self._data_and_metadata() |
|
951 | 950 | |
|
952 | 951 | def _repr_jpeg_(self): |
|
953 | 952 | if self.embed and (self.format == u'jpeg' or self.format == u'jpg'): |
|
954 | 953 | return self._data_and_metadata() |
|
955 | 954 | |
|
956 | 955 | def _find_ext(self, s): |
|
957 | 956 | return unicode_type(s.split('.')[-1].lower()) |
|
958 | 957 | |
|
959 | 958 | class Video(DisplayObject): |
|
960 | 959 | |
|
961 | 960 | def __init__(self, data=None, url=None, filename=None, embed=False, mimetype=None): |
|
962 | 961 | """Create a video object given raw data or an URL. |
|
963 | 962 | |
|
964 | 963 | When this object is returned by an input cell or passed to the |
|
965 | 964 | display function, it will result in the video being displayed |
|
966 | 965 | in the frontend. |
|
967 | 966 | |
|
968 | 967 | Parameters |
|
969 | 968 | ---------- |
|
970 | 969 | data : unicode, str or bytes |
|
971 | 970 | The raw video data or a URL or filename to load the data from. |
|
972 | 971 | Raw data will require passing `embed=True`. |
|
973 | 972 | url : unicode |
|
974 | 973 | A URL for the video. If you specify `url=`, |
|
975 | 974 | the image data will not be embedded. |
|
976 | 975 | filename : unicode |
|
977 | 976 | Path to a local file containing the video. |
|
978 | 977 | Will be interpreted as a local URL unless `embed=True`. |
|
979 | 978 | embed : bool |
|
980 | 979 | Should the video be embedded using a data URI (True) or be |
|
981 | 980 | loaded using a <video> tag (False). |
|
982 | 981 | |
|
983 | 982 | Since videos are large, embedding them should be avoided, if possible. |
|
984 | 983 | You must confirm embedding as your intention by passing `embed=True`. |
|
985 | 984 | |
|
986 | 985 | Local files can be displayed with URLs without embedding the content, via:: |
|
987 | 986 | |
|
988 | 987 | Video('./video.mp4') |
|
989 | 988 | |
|
990 | 989 | mimetype: unicode |
|
991 | 990 | Specify the mimetype for embedded videos. |
|
992 | 991 | Default will be guessed from file extension, if available. |
|
993 | 992 | |
|
994 | 993 | Examples |
|
995 | 994 | -------- |
|
996 | 995 | |
|
997 | 996 | Video('https://archive.org/download/Sita_Sings_the_Blues/Sita_Sings_the_Blues_small.mp4') |
|
998 | 997 | Video('path/to/video.mp4') |
|
999 | 998 | Video('path/to/video.mp4', embed=True) |
|
1000 | 999 | Video(b'raw-videodata', embed=True) |
|
1001 | 1000 | """ |
|
1002 |
if url is None and isinstance(data, str |
|
|
1001 | if url is None and isinstance(data, str) and data.startswith(('http:', 'https:')): | |
|
1003 | 1002 | url = data |
|
1004 | 1003 | data = None |
|
1005 | 1004 | elif os.path.exists(data): |
|
1006 | 1005 | filename = data |
|
1007 | 1006 | data = None |
|
1008 | 1007 | |
|
1009 | 1008 | if data and not embed: |
|
1010 | 1009 | msg = ''.join([ |
|
1011 | 1010 | "To embed videos, you must pass embed=True ", |
|
1012 | 1011 | "(this may make your notebook files huge)\n", |
|
1013 | 1012 | "Consider passing Video(url='...')", |
|
1014 | 1013 | ]) |
|
1015 | 1014 | raise ValueError(msg) |
|
1016 | 1015 | |
|
1017 | 1016 | self.mimetype = mimetype |
|
1018 | 1017 | self.embed = embed |
|
1019 | 1018 | super(Video, self).__init__(data=data, url=url, filename=filename) |
|
1020 | 1019 | |
|
1021 | 1020 | def _repr_html_(self): |
|
1022 | 1021 | # External URLs and potentially local files are not embedded into the |
|
1023 | 1022 | # notebook output. |
|
1024 | 1023 | if not self.embed: |
|
1025 | 1024 | url = self.url if self.url is not None else self.filename |
|
1026 | 1025 | output = """<video src="{0}" controls> |
|
1027 | 1026 | Your browser does not support the <code>video</code> element. |
|
1028 | 1027 | </video>""".format(url) |
|
1029 | 1028 | return output |
|
1030 | 1029 | |
|
1031 | 1030 | # Embedded videos are base64-encoded. |
|
1032 | 1031 | mimetype = self.mimetype |
|
1033 | 1032 | if self.filename is not None: |
|
1034 | 1033 | if not mimetype: |
|
1035 | 1034 | mimetype, _ = mimetypes.guess_type(self.filename) |
|
1036 | 1035 | |
|
1037 | 1036 | with open(self.filename, 'rb') as f: |
|
1038 | 1037 | video = f.read() |
|
1039 | 1038 | else: |
|
1040 | 1039 | video = self.data |
|
1041 | 1040 | if isinstance(video, unicode_type): |
|
1042 | 1041 | # unicode input is already b64-encoded |
|
1043 | 1042 | b64_video = video |
|
1044 | 1043 | else: |
|
1045 | 1044 | b64_video = base64_encode(video).decode('ascii').rstrip() |
|
1046 | 1045 | |
|
1047 | 1046 | output = """<video controls> |
|
1048 | 1047 | <source src="data:{0};base64,{1}" type="{0}"> |
|
1049 | 1048 | Your browser does not support the video tag. |
|
1050 | 1049 | </video>""".format(mimetype, b64_video) |
|
1051 | 1050 | return output |
|
1052 | 1051 | |
|
1053 | 1052 | def reload(self): |
|
1054 | 1053 | # TODO |
|
1055 | 1054 | pass |
|
1056 | 1055 | |
|
1057 | 1056 | def _repr_png_(self): |
|
1058 | 1057 | # TODO |
|
1059 | 1058 | pass |
|
1060 | 1059 | def _repr_jpeg_(self): |
|
1061 | 1060 | # TODO |
|
1062 | 1061 | pass |
|
1063 | 1062 | |
|
1064 | 1063 | def clear_output(wait=False): |
|
1065 | 1064 | """Clear the output of the current cell receiving output. |
|
1066 | 1065 | |
|
1067 | 1066 | Parameters |
|
1068 | 1067 | ---------- |
|
1069 | 1068 | wait : bool [default: false] |
|
1070 | 1069 | Wait to clear the output until new output is available to replace it.""" |
|
1071 | 1070 | from IPython.core.interactiveshell import InteractiveShell |
|
1072 | 1071 | if InteractiveShell.initialized(): |
|
1073 | 1072 | InteractiveShell.instance().display_pub.clear_output(wait) |
|
1074 | 1073 | else: |
|
1075 | 1074 | print('\033[2K\r', end='') |
|
1076 | 1075 | sys.stdout.flush() |
|
1077 | 1076 | print('\033[2K\r', end='') |
|
1078 | 1077 | sys.stderr.flush() |
|
1079 | 1078 | |
|
1080 | 1079 | |
|
1081 | 1080 | @skip_doctest |
|
1082 | 1081 | def set_matplotlib_formats(*formats, **kwargs): |
|
1083 | 1082 | """Select figure formats for the inline backend. Optionally pass quality for JPEG. |
|
1084 | 1083 | |
|
1085 | 1084 | For example, this enables PNG and JPEG output with a JPEG quality of 90%:: |
|
1086 | 1085 | |
|
1087 | 1086 | In [1]: set_matplotlib_formats('png', 'jpeg', quality=90) |
|
1088 | 1087 | |
|
1089 | 1088 | To set this in your config files use the following:: |
|
1090 | 1089 | |
|
1091 | 1090 | c.InlineBackend.figure_formats = {'png', 'jpeg'} |
|
1092 | 1091 | c.InlineBackend.print_figure_kwargs.update({'quality' : 90}) |
|
1093 | 1092 | |
|
1094 | 1093 | Parameters |
|
1095 | 1094 | ---------- |
|
1096 | 1095 | *formats : strs |
|
1097 | 1096 | One or more figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'. |
|
1098 | 1097 | **kwargs : |
|
1099 | 1098 | Keyword args will be relayed to ``figure.canvas.print_figure``. |
|
1100 | 1099 | """ |
|
1101 | 1100 | from IPython.core.interactiveshell import InteractiveShell |
|
1102 | 1101 | from IPython.core.pylabtools import select_figure_formats |
|
1103 | 1102 | # build kwargs, starting with InlineBackend config |
|
1104 | 1103 | kw = {} |
|
1105 | 1104 | from ipykernel.pylab.config import InlineBackend |
|
1106 | 1105 | cfg = InlineBackend.instance() |
|
1107 | 1106 | kw.update(cfg.print_figure_kwargs) |
|
1108 | 1107 | kw.update(**kwargs) |
|
1109 | 1108 | shell = InteractiveShell.instance() |
|
1110 | 1109 | select_figure_formats(shell, formats, **kw) |
|
1111 | 1110 | |
|
1112 | 1111 | @skip_doctest |
|
1113 | 1112 | def set_matplotlib_close(close=True): |
|
1114 | 1113 | """Set whether the inline backend closes all figures automatically or not. |
|
1115 | 1114 | |
|
1116 | 1115 | By default, the inline backend used in the IPython Notebook will close all |
|
1117 | 1116 | matplotlib figures automatically after each cell is run. This means that |
|
1118 | 1117 | plots in different cells won't interfere. Sometimes, you may want to make |
|
1119 | 1118 | a plot in one cell and then refine it in later cells. This can be accomplished |
|
1120 | 1119 | by:: |
|
1121 | 1120 | |
|
1122 | 1121 | In [1]: set_matplotlib_close(False) |
|
1123 | 1122 | |
|
1124 | 1123 | To set this in your config files use the following:: |
|
1125 | 1124 | |
|
1126 | 1125 | c.InlineBackend.close_figures = False |
|
1127 | 1126 | |
|
1128 | 1127 | Parameters |
|
1129 | 1128 | ---------- |
|
1130 | 1129 | close : bool |
|
1131 | 1130 | Should all matplotlib figures be automatically closed after each cell is |
|
1132 | 1131 | run? |
|
1133 | 1132 | """ |
|
1134 | 1133 | from ipykernel.pylab.config import InlineBackend |
|
1135 | 1134 | cfg = InlineBackend.instance() |
|
1136 | 1135 | cfg.close_figures = close |
@@ -1,947 +1,947 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Display formatters. |
|
3 | 3 | |
|
4 | 4 | Inheritance diagram: |
|
5 | 5 | |
|
6 | 6 | .. inheritance-diagram:: IPython.core.formatters |
|
7 | 7 | :parts: 3 |
|
8 | 8 | """ |
|
9 | 9 | |
|
10 | 10 | # Copyright (c) IPython Development Team. |
|
11 | 11 | # Distributed under the terms of the Modified BSD License. |
|
12 | 12 | |
|
13 | 13 | import abc |
|
14 | 14 | import json |
|
15 | 15 | import sys |
|
16 | 16 | import traceback |
|
17 | 17 | import warnings |
|
18 | 18 | |
|
19 | 19 | from decorator import decorator |
|
20 | 20 | |
|
21 | 21 | from traitlets.config.configurable import Configurable |
|
22 | 22 | from IPython.core.getipython import get_ipython |
|
23 | 23 | from IPython.utils.sentinel import Sentinel |
|
24 | 24 | from IPython.utils.dir2 import get_real_method |
|
25 | 25 | from IPython.lib import pretty |
|
26 | 26 | from traitlets import ( |
|
27 | 27 | Bool, Dict, Integer, Unicode, CUnicode, ObjectName, List, |
|
28 | 28 | ForwardDeclaredInstance, |
|
29 | 29 | default, observe, |
|
30 | 30 | ) |
|
31 | 31 | from IPython.utils.py3compat import ( |
|
32 |
with_metaclass, |
|
|
32 | with_metaclass, unicode_type, | |
|
33 | 33 | ) |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | class DisplayFormatter(Configurable): |
|
37 | 37 | |
|
38 | 38 | active_types = List(Unicode(), |
|
39 | 39 | help="""List of currently active mime-types to display. |
|
40 | 40 | You can use this to set a white-list for formats to display. |
|
41 | 41 | |
|
42 | 42 | Most users will not need to change this value. |
|
43 | 43 | """).tag(config=True) |
|
44 | 44 | |
|
45 | 45 | @default('active_types') |
|
46 | 46 | def _active_types_default(self): |
|
47 | 47 | return self.format_types |
|
48 | 48 | |
|
49 | 49 | @observe('active_types') |
|
50 | 50 | def _active_types_changed(self, change): |
|
51 | 51 | for key, formatter in self.formatters.items(): |
|
52 | 52 | if key in change['new']: |
|
53 | 53 | formatter.enabled = True |
|
54 | 54 | else: |
|
55 | 55 | formatter.enabled = False |
|
56 | 56 | |
|
57 | 57 | ipython_display_formatter = ForwardDeclaredInstance('FormatterABC') |
|
58 | 58 | @default('ipython_display_formatter') |
|
59 | 59 | def _default_formatter(self): |
|
60 | 60 | return IPythonDisplayFormatter(parent=self) |
|
61 | 61 | |
|
62 | 62 | # A dict of formatter whose keys are format types (MIME types) and whose |
|
63 | 63 | # values are subclasses of BaseFormatter. |
|
64 | 64 | formatters = Dict() |
|
65 | 65 | @default('formatters') |
|
66 | 66 | def _formatters_default(self): |
|
67 | 67 | """Activate the default formatters.""" |
|
68 | 68 | formatter_classes = [ |
|
69 | 69 | PlainTextFormatter, |
|
70 | 70 | HTMLFormatter, |
|
71 | 71 | MarkdownFormatter, |
|
72 | 72 | SVGFormatter, |
|
73 | 73 | PNGFormatter, |
|
74 | 74 | PDFFormatter, |
|
75 | 75 | JPEGFormatter, |
|
76 | 76 | LatexFormatter, |
|
77 | 77 | JSONFormatter, |
|
78 | 78 | JavascriptFormatter |
|
79 | 79 | ] |
|
80 | 80 | d = {} |
|
81 | 81 | for cls in formatter_classes: |
|
82 | 82 | f = cls(parent=self) |
|
83 | 83 | d[f.format_type] = f |
|
84 | 84 | return d |
|
85 | 85 | |
|
86 | 86 | def format(self, obj, include=None, exclude=None): |
|
87 | 87 | """Return a format data dict for an object. |
|
88 | 88 | |
|
89 | 89 | By default all format types will be computed. |
|
90 | 90 | |
|
91 | 91 | The following MIME types are currently implemented: |
|
92 | 92 | |
|
93 | 93 | * text/plain |
|
94 | 94 | * text/html |
|
95 | 95 | * text/markdown |
|
96 | 96 | * text/latex |
|
97 | 97 | * application/json |
|
98 | 98 | * application/javascript |
|
99 | 99 | * application/pdf |
|
100 | 100 | * image/png |
|
101 | 101 | * image/jpeg |
|
102 | 102 | * image/svg+xml |
|
103 | 103 | |
|
104 | 104 | Parameters |
|
105 | 105 | ---------- |
|
106 | 106 | obj : object |
|
107 | 107 | The Python object whose format data will be computed. |
|
108 | 108 | include : list or tuple, optional |
|
109 | 109 | A list of format type strings (MIME types) to include in the |
|
110 | 110 | format data dict. If this is set *only* the format types included |
|
111 | 111 | in this list will be computed. |
|
112 | 112 | exclude : list or tuple, optional |
|
113 | 113 | A list of format type string (MIME types) to exclude in the format |
|
114 | 114 | data dict. If this is set all format types will be computed, |
|
115 | 115 | except for those included in this argument. |
|
116 | 116 | |
|
117 | 117 | Returns |
|
118 | 118 | ------- |
|
119 | 119 | (format_dict, metadata_dict) : tuple of two dicts |
|
120 | 120 | |
|
121 | 121 | format_dict is a dictionary of key/value pairs, one of each format that was |
|
122 | 122 | generated for the object. The keys are the format types, which |
|
123 | 123 | will usually be MIME type strings and the values and JSON'able |
|
124 | 124 | data structure containing the raw data for the representation in |
|
125 | 125 | that format. |
|
126 | 126 | |
|
127 | 127 | metadata_dict is a dictionary of metadata about each mime-type output. |
|
128 | 128 | Its keys will be a strict subset of the keys in format_dict. |
|
129 | 129 | """ |
|
130 | 130 | format_dict = {} |
|
131 | 131 | md_dict = {} |
|
132 | 132 | |
|
133 | 133 | if self.ipython_display_formatter(obj): |
|
134 | 134 | # object handled itself, don't proceed |
|
135 | 135 | return {}, {} |
|
136 | 136 | |
|
137 | 137 | for format_type, formatter in self.formatters.items(): |
|
138 | 138 | if include and format_type not in include: |
|
139 | 139 | continue |
|
140 | 140 | if exclude and format_type in exclude: |
|
141 | 141 | continue |
|
142 | 142 | |
|
143 | 143 | md = None |
|
144 | 144 | try: |
|
145 | 145 | data = formatter(obj) |
|
146 | 146 | except: |
|
147 | 147 | # FIXME: log the exception |
|
148 | 148 | raise |
|
149 | 149 | |
|
150 | 150 | # formatters can return raw data or (data, metadata) |
|
151 | 151 | if isinstance(data, tuple) and len(data) == 2: |
|
152 | 152 | data, md = data |
|
153 | 153 | |
|
154 | 154 | if data is not None: |
|
155 | 155 | format_dict[format_type] = data |
|
156 | 156 | if md is not None: |
|
157 | 157 | md_dict[format_type] = md |
|
158 | 158 | |
|
159 | 159 | return format_dict, md_dict |
|
160 | 160 | |
|
161 | 161 | @property |
|
162 | 162 | def format_types(self): |
|
163 | 163 | """Return the format types (MIME types) of the active formatters.""" |
|
164 | 164 | return list(self.formatters.keys()) |
|
165 | 165 | |
|
166 | 166 | |
|
167 | 167 | #----------------------------------------------------------------------------- |
|
168 | 168 | # Formatters for specific format types (text, html, svg, etc.) |
|
169 | 169 | #----------------------------------------------------------------------------- |
|
170 | 170 | |
|
171 | 171 | |
|
172 | 172 | def _safe_repr(obj): |
|
173 | 173 | """Try to return a repr of an object |
|
174 | 174 | |
|
175 | 175 | always returns a string, at least. |
|
176 | 176 | """ |
|
177 | 177 | try: |
|
178 | 178 | return repr(obj) |
|
179 | 179 | except Exception as e: |
|
180 | 180 | return "un-repr-able object (%r)" % e |
|
181 | 181 | |
|
182 | 182 | |
|
183 | 183 | class FormatterWarning(UserWarning): |
|
184 | 184 | """Warning class for errors in formatters""" |
|
185 | 185 | |
|
186 | 186 | @decorator |
|
187 | 187 | def catch_format_error(method, self, *args, **kwargs): |
|
188 | 188 | """show traceback on failed format call""" |
|
189 | 189 | try: |
|
190 | 190 | r = method(self, *args, **kwargs) |
|
191 | 191 | except NotImplementedError: |
|
192 | 192 | # don't warn on NotImplementedErrors |
|
193 | 193 | return None |
|
194 | 194 | except Exception: |
|
195 | 195 | exc_info = sys.exc_info() |
|
196 | 196 | ip = get_ipython() |
|
197 | 197 | if ip is not None: |
|
198 | 198 | ip.showtraceback(exc_info) |
|
199 | 199 | else: |
|
200 | 200 | traceback.print_exception(*exc_info) |
|
201 | 201 | return None |
|
202 | 202 | return self._check_return(r, args[0]) |
|
203 | 203 | |
|
204 | 204 | |
|
205 | 205 | class FormatterABC(with_metaclass(abc.ABCMeta, object)): |
|
206 | 206 | """ Abstract base class for Formatters. |
|
207 | 207 | |
|
208 | 208 | A formatter is a callable class that is responsible for computing the |
|
209 | 209 | raw format data for a particular format type (MIME type). For example, |
|
210 | 210 | an HTML formatter would have a format type of `text/html` and would return |
|
211 | 211 | the HTML representation of the object when called. |
|
212 | 212 | """ |
|
213 | 213 | |
|
214 | 214 | # The format type of the data returned, usually a MIME type. |
|
215 | 215 | format_type = 'text/plain' |
|
216 | 216 | |
|
217 | 217 | # Is the formatter enabled... |
|
218 | 218 | enabled = True |
|
219 | 219 | |
|
220 | 220 | @abc.abstractmethod |
|
221 | 221 | def __call__(self, obj): |
|
222 | 222 | """Return a JSON'able representation of the object. |
|
223 | 223 | |
|
224 | 224 | If the object cannot be formatted by this formatter, |
|
225 | 225 | warn and return None. |
|
226 | 226 | """ |
|
227 | 227 | return repr(obj) |
|
228 | 228 | |
|
229 | 229 | |
|
230 | 230 | def _mod_name_key(typ): |
|
231 | 231 | """Return a (__module__, __name__) tuple for a type. |
|
232 | 232 | |
|
233 | 233 | Used as key in Formatter.deferred_printers. |
|
234 | 234 | """ |
|
235 | 235 | module = getattr(typ, '__module__', None) |
|
236 | 236 | name = getattr(typ, '__name__', None) |
|
237 | 237 | return (module, name) |
|
238 | 238 | |
|
239 | 239 | |
|
240 | 240 | def _get_type(obj): |
|
241 | 241 | """Return the type of an instance (old and new-style)""" |
|
242 | 242 | return getattr(obj, '__class__', None) or type(obj) |
|
243 | 243 | |
|
244 | 244 | |
|
245 | 245 | _raise_key_error = Sentinel('_raise_key_error', __name__, |
|
246 | 246 | """ |
|
247 | 247 | Special value to raise a KeyError |
|
248 | 248 | |
|
249 | 249 | Raise KeyError in `BaseFormatter.pop` if passed as the default value to `pop` |
|
250 | 250 | """) |
|
251 | 251 | |
|
252 | 252 | |
|
253 | 253 | class BaseFormatter(Configurable): |
|
254 | 254 | """A base formatter class that is configurable. |
|
255 | 255 | |
|
256 | 256 | This formatter should usually be used as the base class of all formatters. |
|
257 | 257 | It is a traited :class:`Configurable` class and includes an extensible |
|
258 | 258 | API for users to determine how their objects are formatted. The following |
|
259 | 259 | logic is used to find a function to format an given object. |
|
260 | 260 | |
|
261 | 261 | 1. The object is introspected to see if it has a method with the name |
|
262 | 262 | :attr:`print_method`. If is does, that object is passed to that method |
|
263 | 263 | for formatting. |
|
264 | 264 | 2. If no print method is found, three internal dictionaries are consulted |
|
265 | 265 | to find print method: :attr:`singleton_printers`, :attr:`type_printers` |
|
266 | 266 | and :attr:`deferred_printers`. |
|
267 | 267 | |
|
268 | 268 | Users should use these dictionaries to register functions that will be |
|
269 | 269 | used to compute the format data for their objects (if those objects don't |
|
270 | 270 | have the special print methods). The easiest way of using these |
|
271 | 271 | dictionaries is through the :meth:`for_type` and :meth:`for_type_by_name` |
|
272 | 272 | methods. |
|
273 | 273 | |
|
274 | 274 | If no function/callable is found to compute the format data, ``None`` is |
|
275 | 275 | returned and this format type is not used. |
|
276 | 276 | """ |
|
277 | 277 | |
|
278 | 278 | format_type = Unicode('text/plain') |
|
279 |
_return_type = str |
|
|
279 | _return_type = str | |
|
280 | 280 | |
|
281 | 281 | enabled = Bool(True).tag(config=True) |
|
282 | 282 | |
|
283 | 283 | print_method = ObjectName('__repr__') |
|
284 | 284 | |
|
285 | 285 | # The singleton printers. |
|
286 | 286 | # Maps the IDs of the builtin singleton objects to the format functions. |
|
287 | 287 | singleton_printers = Dict().tag(config=True) |
|
288 | 288 | |
|
289 | 289 | # The type-specific printers. |
|
290 | 290 | # Map type objects to the format functions. |
|
291 | 291 | type_printers = Dict().tag(config=True) |
|
292 | 292 | |
|
293 | 293 | # The deferred-import type-specific printers. |
|
294 | 294 | # Map (modulename, classname) pairs to the format functions. |
|
295 | 295 | deferred_printers = Dict().tag(config=True) |
|
296 | 296 | |
|
297 | 297 | @catch_format_error |
|
298 | 298 | def __call__(self, obj): |
|
299 | 299 | """Compute the format for an object.""" |
|
300 | 300 | if self.enabled: |
|
301 | 301 | # lookup registered printer |
|
302 | 302 | try: |
|
303 | 303 | printer = self.lookup(obj) |
|
304 | 304 | except KeyError: |
|
305 | 305 | pass |
|
306 | 306 | else: |
|
307 | 307 | return printer(obj) |
|
308 | 308 | # Finally look for special method names |
|
309 | 309 | method = get_real_method(obj, self.print_method) |
|
310 | 310 | if method is not None: |
|
311 | 311 | return method() |
|
312 | 312 | return None |
|
313 | 313 | else: |
|
314 | 314 | return None |
|
315 | 315 | |
|
316 | 316 | def __contains__(self, typ): |
|
317 | 317 | """map in to lookup_by_type""" |
|
318 | 318 | try: |
|
319 | 319 | self.lookup_by_type(typ) |
|
320 | 320 | except KeyError: |
|
321 | 321 | return False |
|
322 | 322 | else: |
|
323 | 323 | return True |
|
324 | 324 | |
|
325 | 325 | def _check_return(self, r, obj): |
|
326 | 326 | """Check that a return value is appropriate |
|
327 | 327 | |
|
328 | 328 | Return the value if so, None otherwise, warning if invalid. |
|
329 | 329 | """ |
|
330 | 330 | if r is None or isinstance(r, self._return_type) or \ |
|
331 | 331 | (isinstance(r, tuple) and r and isinstance(r[0], self._return_type)): |
|
332 | 332 | return r |
|
333 | 333 | else: |
|
334 | 334 | warnings.warn( |
|
335 | 335 | "%s formatter returned invalid type %s (expected %s) for object: %s" % \ |
|
336 | 336 | (self.format_type, type(r), self._return_type, _safe_repr(obj)), |
|
337 | 337 | FormatterWarning |
|
338 | 338 | ) |
|
339 | 339 | |
|
340 | 340 | def lookup(self, obj): |
|
341 | 341 | """Look up the formatter for a given instance. |
|
342 | 342 | |
|
343 | 343 | Parameters |
|
344 | 344 | ---------- |
|
345 | 345 | obj : object instance |
|
346 | 346 | |
|
347 | 347 | Returns |
|
348 | 348 | ------- |
|
349 | 349 | f : callable |
|
350 | 350 | The registered formatting callable for the type. |
|
351 | 351 | |
|
352 | 352 | Raises |
|
353 | 353 | ------ |
|
354 | 354 | KeyError if the type has not been registered. |
|
355 | 355 | """ |
|
356 | 356 | # look for singleton first |
|
357 | 357 | obj_id = id(obj) |
|
358 | 358 | if obj_id in self.singleton_printers: |
|
359 | 359 | return self.singleton_printers[obj_id] |
|
360 | 360 | # then lookup by type |
|
361 | 361 | return self.lookup_by_type(_get_type(obj)) |
|
362 | 362 | |
|
363 | 363 | def lookup_by_type(self, typ): |
|
364 | 364 | """Look up the registered formatter for a type. |
|
365 | 365 | |
|
366 | 366 | Parameters |
|
367 | 367 | ---------- |
|
368 | 368 | typ : type or '__module__.__name__' string for a type |
|
369 | 369 | |
|
370 | 370 | Returns |
|
371 | 371 | ------- |
|
372 | 372 | f : callable |
|
373 | 373 | The registered formatting callable for the type. |
|
374 | 374 | |
|
375 | 375 | Raises |
|
376 | 376 | ------ |
|
377 | 377 | KeyError if the type has not been registered. |
|
378 | 378 | """ |
|
379 |
if isinstance(typ, str |
|
|
379 | if isinstance(typ, str): | |
|
380 | 380 | typ_key = tuple(typ.rsplit('.',1)) |
|
381 | 381 | if typ_key not in self.deferred_printers: |
|
382 | 382 | # We may have it cached in the type map. We will have to |
|
383 | 383 | # iterate over all of the types to check. |
|
384 | 384 | for cls in self.type_printers: |
|
385 | 385 | if _mod_name_key(cls) == typ_key: |
|
386 | 386 | return self.type_printers[cls] |
|
387 | 387 | else: |
|
388 | 388 | return self.deferred_printers[typ_key] |
|
389 | 389 | else: |
|
390 | 390 | for cls in pretty._get_mro(typ): |
|
391 | 391 | if cls in self.type_printers or self._in_deferred_types(cls): |
|
392 | 392 | return self.type_printers[cls] |
|
393 | 393 | |
|
394 | 394 | # If we have reached here, the lookup failed. |
|
395 | 395 | raise KeyError("No registered printer for {0!r}".format(typ)) |
|
396 | 396 | |
|
397 | 397 | def for_type(self, typ, func=None): |
|
398 | 398 | """Add a format function for a given type. |
|
399 | 399 | |
|
400 | 400 | Parameters |
|
401 | 401 | ----------- |
|
402 | 402 | typ : type or '__module__.__name__' string for a type |
|
403 | 403 | The class of the object that will be formatted using `func`. |
|
404 | 404 | func : callable |
|
405 | 405 | A callable for computing the format data. |
|
406 | 406 | `func` will be called with the object to be formatted, |
|
407 | 407 | and will return the raw data in this formatter's format. |
|
408 | 408 | Subclasses may use a different call signature for the |
|
409 | 409 | `func` argument. |
|
410 | 410 | |
|
411 | 411 | If `func` is None or not specified, there will be no change, |
|
412 | 412 | only returning the current value. |
|
413 | 413 | |
|
414 | 414 | Returns |
|
415 | 415 | ------- |
|
416 | 416 | oldfunc : callable |
|
417 | 417 | The currently registered callable. |
|
418 | 418 | If you are registering a new formatter, |
|
419 | 419 | this will be the previous value (to enable restoring later). |
|
420 | 420 | """ |
|
421 | 421 | # if string given, interpret as 'pkg.module.class_name' |
|
422 |
if isinstance(typ, str |
|
|
422 | if isinstance(typ, str): | |
|
423 | 423 | type_module, type_name = typ.rsplit('.', 1) |
|
424 | 424 | return self.for_type_by_name(type_module, type_name, func) |
|
425 | 425 | |
|
426 | 426 | try: |
|
427 | 427 | oldfunc = self.lookup_by_type(typ) |
|
428 | 428 | except KeyError: |
|
429 | 429 | oldfunc = None |
|
430 | 430 | |
|
431 | 431 | if func is not None: |
|
432 | 432 | self.type_printers[typ] = func |
|
433 | 433 | |
|
434 | 434 | return oldfunc |
|
435 | 435 | |
|
436 | 436 | def for_type_by_name(self, type_module, type_name, func=None): |
|
437 | 437 | """Add a format function for a type specified by the full dotted |
|
438 | 438 | module and name of the type, rather than the type of the object. |
|
439 | 439 | |
|
440 | 440 | Parameters |
|
441 | 441 | ---------- |
|
442 | 442 | type_module : str |
|
443 | 443 | The full dotted name of the module the type is defined in, like |
|
444 | 444 | ``numpy``. |
|
445 | 445 | type_name : str |
|
446 | 446 | The name of the type (the class name), like ``dtype`` |
|
447 | 447 | func : callable |
|
448 | 448 | A callable for computing the format data. |
|
449 | 449 | `func` will be called with the object to be formatted, |
|
450 | 450 | and will return the raw data in this formatter's format. |
|
451 | 451 | Subclasses may use a different call signature for the |
|
452 | 452 | `func` argument. |
|
453 | 453 | |
|
454 | 454 | If `func` is None or unspecified, there will be no change, |
|
455 | 455 | only returning the current value. |
|
456 | 456 | |
|
457 | 457 | Returns |
|
458 | 458 | ------- |
|
459 | 459 | oldfunc : callable |
|
460 | 460 | The currently registered callable. |
|
461 | 461 | If you are registering a new formatter, |
|
462 | 462 | this will be the previous value (to enable restoring later). |
|
463 | 463 | """ |
|
464 | 464 | key = (type_module, type_name) |
|
465 | 465 | |
|
466 | 466 | try: |
|
467 | 467 | oldfunc = self.lookup_by_type("%s.%s" % key) |
|
468 | 468 | except KeyError: |
|
469 | 469 | oldfunc = None |
|
470 | 470 | |
|
471 | 471 | if func is not None: |
|
472 | 472 | self.deferred_printers[key] = func |
|
473 | 473 | return oldfunc |
|
474 | 474 | |
|
475 | 475 | def pop(self, typ, default=_raise_key_error): |
|
476 | 476 | """Pop a formatter for the given type. |
|
477 | 477 | |
|
478 | 478 | Parameters |
|
479 | 479 | ---------- |
|
480 | 480 | typ : type or '__module__.__name__' string for a type |
|
481 | 481 | default : object |
|
482 | 482 | value to be returned if no formatter is registered for typ. |
|
483 | 483 | |
|
484 | 484 | Returns |
|
485 | 485 | ------- |
|
486 | 486 | obj : object |
|
487 | 487 | The last registered object for the type. |
|
488 | 488 | |
|
489 | 489 | Raises |
|
490 | 490 | ------ |
|
491 | 491 | KeyError if the type is not registered and default is not specified. |
|
492 | 492 | """ |
|
493 | 493 | |
|
494 |
if isinstance(typ, str |
|
|
494 | if isinstance(typ, str): | |
|
495 | 495 | typ_key = tuple(typ.rsplit('.',1)) |
|
496 | 496 | if typ_key not in self.deferred_printers: |
|
497 | 497 | # We may have it cached in the type map. We will have to |
|
498 | 498 | # iterate over all of the types to check. |
|
499 | 499 | for cls in self.type_printers: |
|
500 | 500 | if _mod_name_key(cls) == typ_key: |
|
501 | 501 | old = self.type_printers.pop(cls) |
|
502 | 502 | break |
|
503 | 503 | else: |
|
504 | 504 | old = default |
|
505 | 505 | else: |
|
506 | 506 | old = self.deferred_printers.pop(typ_key) |
|
507 | 507 | else: |
|
508 | 508 | if typ in self.type_printers: |
|
509 | 509 | old = self.type_printers.pop(typ) |
|
510 | 510 | else: |
|
511 | 511 | old = self.deferred_printers.pop(_mod_name_key(typ), default) |
|
512 | 512 | if old is _raise_key_error: |
|
513 | 513 | raise KeyError("No registered value for {0!r}".format(typ)) |
|
514 | 514 | return old |
|
515 | 515 | |
|
516 | 516 | def _in_deferred_types(self, cls): |
|
517 | 517 | """ |
|
518 | 518 | Check if the given class is specified in the deferred type registry. |
|
519 | 519 | |
|
520 | 520 | Successful matches will be moved to the regular type registry for future use. |
|
521 | 521 | """ |
|
522 | 522 | mod = getattr(cls, '__module__', None) |
|
523 | 523 | name = getattr(cls, '__name__', None) |
|
524 | 524 | key = (mod, name) |
|
525 | 525 | if key in self.deferred_printers: |
|
526 | 526 | # Move the printer over to the regular registry. |
|
527 | 527 | printer = self.deferred_printers.pop(key) |
|
528 | 528 | self.type_printers[cls] = printer |
|
529 | 529 | return True |
|
530 | 530 | return False |
|
531 | 531 | |
|
532 | 532 | |
|
533 | 533 | class PlainTextFormatter(BaseFormatter): |
|
534 | 534 | """The default pretty-printer. |
|
535 | 535 | |
|
536 | 536 | This uses :mod:`IPython.lib.pretty` to compute the format data of |
|
537 | 537 | the object. If the object cannot be pretty printed, :func:`repr` is used. |
|
538 | 538 | See the documentation of :mod:`IPython.lib.pretty` for details on |
|
539 | 539 | how to write pretty printers. Here is a simple example:: |
|
540 | 540 | |
|
541 | 541 | def dtype_pprinter(obj, p, cycle): |
|
542 | 542 | if cycle: |
|
543 | 543 | return p.text('dtype(...)') |
|
544 | 544 | if hasattr(obj, 'fields'): |
|
545 | 545 | if obj.fields is None: |
|
546 | 546 | p.text(repr(obj)) |
|
547 | 547 | else: |
|
548 | 548 | p.begin_group(7, 'dtype([') |
|
549 | 549 | for i, field in enumerate(obj.descr): |
|
550 | 550 | if i > 0: |
|
551 | 551 | p.text(',') |
|
552 | 552 | p.breakable() |
|
553 | 553 | p.pretty(field) |
|
554 | 554 | p.end_group(7, '])') |
|
555 | 555 | """ |
|
556 | 556 | |
|
557 | 557 | # The format type of data returned. |
|
558 | 558 | format_type = Unicode('text/plain') |
|
559 | 559 | |
|
560 | 560 | # This subclass ignores this attribute as it always need to return |
|
561 | 561 | # something. |
|
562 | 562 | enabled = Bool(True).tag(config=False) |
|
563 | 563 | |
|
564 | 564 | max_seq_length = Integer(pretty.MAX_SEQ_LENGTH, |
|
565 | 565 | help="""Truncate large collections (lists, dicts, tuples, sets) to this size. |
|
566 | 566 | |
|
567 | 567 | Set to 0 to disable truncation. |
|
568 | 568 | """ |
|
569 | 569 | ).tag(config=True) |
|
570 | 570 | |
|
571 | 571 | # Look for a _repr_pretty_ methods to use for pretty printing. |
|
572 | 572 | print_method = ObjectName('_repr_pretty_') |
|
573 | 573 | |
|
574 | 574 | # Whether to pretty-print or not. |
|
575 | 575 | pprint = Bool(True).tag(config=True) |
|
576 | 576 | |
|
577 | 577 | # Whether to be verbose or not. |
|
578 | 578 | verbose = Bool(False).tag(config=True) |
|
579 | 579 | |
|
580 | 580 | # The maximum width. |
|
581 | 581 | max_width = Integer(79).tag(config=True) |
|
582 | 582 | |
|
583 | 583 | # The newline character. |
|
584 | 584 | newline = Unicode('\n').tag(config=True) |
|
585 | 585 | |
|
586 | 586 | # format-string for pprinting floats |
|
587 | 587 | float_format = Unicode('%r') |
|
588 | 588 | # setter for float precision, either int or direct format-string |
|
589 | 589 | float_precision = CUnicode('').tag(config=True) |
|
590 | 590 | |
|
591 | 591 | @observe('float_precision') |
|
592 | 592 | def _float_precision_changed(self, change): |
|
593 | 593 | """float_precision changed, set float_format accordingly. |
|
594 | 594 | |
|
595 | 595 | float_precision can be set by int or str. |
|
596 | 596 | This will set float_format, after interpreting input. |
|
597 | 597 | If numpy has been imported, numpy print precision will also be set. |
|
598 | 598 | |
|
599 | 599 | integer `n` sets format to '%.nf', otherwise, format set directly. |
|
600 | 600 | |
|
601 | 601 | An empty string returns to defaults (repr for float, 8 for numpy). |
|
602 | 602 | |
|
603 | 603 | This parameter can be set via the '%precision' magic. |
|
604 | 604 | """ |
|
605 | 605 | |
|
606 | 606 | new = change['new'] |
|
607 | 607 | if '%' in new: |
|
608 | 608 | # got explicit format string |
|
609 | 609 | fmt = new |
|
610 | 610 | try: |
|
611 | 611 | fmt%3.14159 |
|
612 | 612 | except Exception: |
|
613 | 613 | raise ValueError("Precision must be int or format string, not %r"%new) |
|
614 | 614 | elif new: |
|
615 | 615 | # otherwise, should be an int |
|
616 | 616 | try: |
|
617 | 617 | i = int(new) |
|
618 | 618 | assert i >= 0 |
|
619 | 619 | except ValueError: |
|
620 | 620 | raise ValueError("Precision must be int or format string, not %r"%new) |
|
621 | 621 | except AssertionError: |
|
622 | 622 | raise ValueError("int precision must be non-negative, not %r"%i) |
|
623 | 623 | |
|
624 | 624 | fmt = '%%.%if'%i |
|
625 | 625 | if 'numpy' in sys.modules: |
|
626 | 626 | # set numpy precision if it has been imported |
|
627 | 627 | import numpy |
|
628 | 628 | numpy.set_printoptions(precision=i) |
|
629 | 629 | else: |
|
630 | 630 | # default back to repr |
|
631 | 631 | fmt = '%r' |
|
632 | 632 | if 'numpy' in sys.modules: |
|
633 | 633 | import numpy |
|
634 | 634 | # numpy default is 8 |
|
635 | 635 | numpy.set_printoptions(precision=8) |
|
636 | 636 | self.float_format = fmt |
|
637 | 637 | |
|
638 | 638 | # Use the default pretty printers from IPython.lib.pretty. |
|
639 | 639 | @default('singleton_printers') |
|
640 | 640 | def _singleton_printers_default(self): |
|
641 | 641 | return pretty._singleton_pprinters.copy() |
|
642 | 642 | |
|
643 | 643 | @default('type_printers') |
|
644 | 644 | def _type_printers_default(self): |
|
645 | 645 | d = pretty._type_pprinters.copy() |
|
646 | 646 | d[float] = lambda obj,p,cycle: p.text(self.float_format%obj) |
|
647 | 647 | return d |
|
648 | 648 | |
|
649 | 649 | @default('deferred_printers') |
|
650 | 650 | def _deferred_printers_default(self): |
|
651 | 651 | return pretty._deferred_type_pprinters.copy() |
|
652 | 652 | |
|
653 | 653 | #### FormatterABC interface #### |
|
654 | 654 | |
|
655 | 655 | @catch_format_error |
|
656 | 656 | def __call__(self, obj): |
|
657 | 657 | """Compute the pretty representation of the object.""" |
|
658 | 658 | if not self.pprint: |
|
659 | 659 | return repr(obj) |
|
660 | 660 | else: |
|
661 | 661 | # handle str and unicode on Python 2 |
|
662 | 662 | # io.StringIO only accepts unicode, |
|
663 | 663 | # cStringIO doesn't handle unicode on py2, |
|
664 | 664 | # StringIO allows str, unicode but only ascii str |
|
665 | 665 | stream = pretty.CUnicodeIO() |
|
666 | 666 | printer = pretty.RepresentationPrinter(stream, self.verbose, |
|
667 | 667 | self.max_width, self.newline, |
|
668 | 668 | max_seq_length=self.max_seq_length, |
|
669 | 669 | singleton_pprinters=self.singleton_printers, |
|
670 | 670 | type_pprinters=self.type_printers, |
|
671 | 671 | deferred_pprinters=self.deferred_printers) |
|
672 | 672 | printer.pretty(obj) |
|
673 | 673 | printer.flush() |
|
674 | 674 | return stream.getvalue() |
|
675 | 675 | |
|
676 | 676 | |
|
677 | 677 | class HTMLFormatter(BaseFormatter): |
|
678 | 678 | """An HTML formatter. |
|
679 | 679 | |
|
680 | 680 | To define the callables that compute the HTML representation of your |
|
681 | 681 | objects, define a :meth:`_repr_html_` method or use the :meth:`for_type` |
|
682 | 682 | or :meth:`for_type_by_name` methods to register functions that handle |
|
683 | 683 | this. |
|
684 | 684 | |
|
685 | 685 | The return value of this formatter should be a valid HTML snippet that |
|
686 | 686 | could be injected into an existing DOM. It should *not* include the |
|
687 | 687 | ```<html>`` or ```<body>`` tags. |
|
688 | 688 | """ |
|
689 | 689 | format_type = Unicode('text/html') |
|
690 | 690 | |
|
691 | 691 | print_method = ObjectName('_repr_html_') |
|
692 | 692 | |
|
693 | 693 | |
|
694 | 694 | class MarkdownFormatter(BaseFormatter): |
|
695 | 695 | """A Markdown formatter. |
|
696 | 696 | |
|
697 | 697 | To define the callables that compute the Markdown representation of your |
|
698 | 698 | objects, define a :meth:`_repr_markdown_` method or use the :meth:`for_type` |
|
699 | 699 | or :meth:`for_type_by_name` methods to register functions that handle |
|
700 | 700 | this. |
|
701 | 701 | |
|
702 | 702 | The return value of this formatter should be a valid Markdown. |
|
703 | 703 | """ |
|
704 | 704 | format_type = Unicode('text/markdown') |
|
705 | 705 | |
|
706 | 706 | print_method = ObjectName('_repr_markdown_') |
|
707 | 707 | |
|
708 | 708 | class SVGFormatter(BaseFormatter): |
|
709 | 709 | """An SVG formatter. |
|
710 | 710 | |
|
711 | 711 | To define the callables that compute the SVG representation of your |
|
712 | 712 | objects, define a :meth:`_repr_svg_` method or use the :meth:`for_type` |
|
713 | 713 | or :meth:`for_type_by_name` methods to register functions that handle |
|
714 | 714 | this. |
|
715 | 715 | |
|
716 | 716 | The return value of this formatter should be valid SVG enclosed in |
|
717 | 717 | ```<svg>``` tags, that could be injected into an existing DOM. It should |
|
718 | 718 | *not* include the ```<html>`` or ```<body>`` tags. |
|
719 | 719 | """ |
|
720 | 720 | format_type = Unicode('image/svg+xml') |
|
721 | 721 | |
|
722 | 722 | print_method = ObjectName('_repr_svg_') |
|
723 | 723 | |
|
724 | 724 | |
|
725 | 725 | class PNGFormatter(BaseFormatter): |
|
726 | 726 | """A PNG formatter. |
|
727 | 727 | |
|
728 | 728 | To define the callables that compute the PNG representation of your |
|
729 | 729 | objects, define a :meth:`_repr_png_` method or use the :meth:`for_type` |
|
730 | 730 | or :meth:`for_type_by_name` methods to register functions that handle |
|
731 | 731 | this. |
|
732 | 732 | |
|
733 | 733 | The return value of this formatter should be raw PNG data, *not* |
|
734 | 734 | base64 encoded. |
|
735 | 735 | """ |
|
736 | 736 | format_type = Unicode('image/png') |
|
737 | 737 | |
|
738 | 738 | print_method = ObjectName('_repr_png_') |
|
739 | 739 | |
|
740 | 740 | _return_type = (bytes, unicode_type) |
|
741 | 741 | |
|
742 | 742 | |
|
743 | 743 | class JPEGFormatter(BaseFormatter): |
|
744 | 744 | """A JPEG formatter. |
|
745 | 745 | |
|
746 | 746 | To define the callables that compute the JPEG representation of your |
|
747 | 747 | objects, define a :meth:`_repr_jpeg_` method or use the :meth:`for_type` |
|
748 | 748 | or :meth:`for_type_by_name` methods to register functions that handle |
|
749 | 749 | this. |
|
750 | 750 | |
|
751 | 751 | The return value of this formatter should be raw JPEG data, *not* |
|
752 | 752 | base64 encoded. |
|
753 | 753 | """ |
|
754 | 754 | format_type = Unicode('image/jpeg') |
|
755 | 755 | |
|
756 | 756 | print_method = ObjectName('_repr_jpeg_') |
|
757 | 757 | |
|
758 | 758 | _return_type = (bytes, unicode_type) |
|
759 | 759 | |
|
760 | 760 | |
|
761 | 761 | class LatexFormatter(BaseFormatter): |
|
762 | 762 | """A LaTeX formatter. |
|
763 | 763 | |
|
764 | 764 | To define the callables that compute the LaTeX representation of your |
|
765 | 765 | objects, define a :meth:`_repr_latex_` method or use the :meth:`for_type` |
|
766 | 766 | or :meth:`for_type_by_name` methods to register functions that handle |
|
767 | 767 | this. |
|
768 | 768 | |
|
769 | 769 | The return value of this formatter should be a valid LaTeX equation, |
|
770 | 770 | enclosed in either ```$```, ```$$``` or another LaTeX equation |
|
771 | 771 | environment. |
|
772 | 772 | """ |
|
773 | 773 | format_type = Unicode('text/latex') |
|
774 | 774 | |
|
775 | 775 | print_method = ObjectName('_repr_latex_') |
|
776 | 776 | |
|
777 | 777 | |
|
778 | 778 | class JSONFormatter(BaseFormatter): |
|
779 | 779 | """A JSON string formatter. |
|
780 | 780 | |
|
781 | 781 | To define the callables that compute the JSONable representation of |
|
782 | 782 | your objects, define a :meth:`_repr_json_` method or use the :meth:`for_type` |
|
783 | 783 | or :meth:`for_type_by_name` methods to register functions that handle |
|
784 | 784 | this. |
|
785 | 785 | |
|
786 | 786 | The return value of this formatter should be a JSONable list or dict. |
|
787 | 787 | JSON scalars (None, number, string) are not allowed, only dict or list containers. |
|
788 | 788 | """ |
|
789 | 789 | format_type = Unicode('application/json') |
|
790 | 790 | _return_type = (list, dict) |
|
791 | 791 | |
|
792 | 792 | print_method = ObjectName('_repr_json_') |
|
793 | 793 | |
|
794 | 794 | def _check_return(self, r, obj): |
|
795 | 795 | """Check that a return value is appropriate |
|
796 | 796 | |
|
797 | 797 | Return the value if so, None otherwise, warning if invalid. |
|
798 | 798 | """ |
|
799 | 799 | if r is None: |
|
800 | 800 | return |
|
801 | 801 | md = None |
|
802 | 802 | if isinstance(r, tuple): |
|
803 | 803 | # unpack data, metadata tuple for type checking on first element |
|
804 | 804 | r, md = r |
|
805 | 805 | |
|
806 | 806 | # handle deprecated JSON-as-string form from IPython < 3 |
|
807 |
if isinstance(r, str |
|
|
807 | if isinstance(r, str): | |
|
808 | 808 | warnings.warn("JSON expects JSONable list/dict containers, not JSON strings", |
|
809 | 809 | FormatterWarning) |
|
810 | 810 | r = json.loads(r) |
|
811 | 811 | |
|
812 | 812 | if md is not None: |
|
813 | 813 | # put the tuple back together |
|
814 | 814 | r = (r, md) |
|
815 | 815 | return super(JSONFormatter, self)._check_return(r, obj) |
|
816 | 816 | |
|
817 | 817 | |
|
818 | 818 | class JavascriptFormatter(BaseFormatter): |
|
819 | 819 | """A Javascript formatter. |
|
820 | 820 | |
|
821 | 821 | To define the callables that compute the Javascript representation of |
|
822 | 822 | your objects, define a :meth:`_repr_javascript_` method or use the |
|
823 | 823 | :meth:`for_type` or :meth:`for_type_by_name` methods to register functions |
|
824 | 824 | that handle this. |
|
825 | 825 | |
|
826 | 826 | The return value of this formatter should be valid Javascript code and |
|
827 | 827 | should *not* be enclosed in ```<script>``` tags. |
|
828 | 828 | """ |
|
829 | 829 | format_type = Unicode('application/javascript') |
|
830 | 830 | |
|
831 | 831 | print_method = ObjectName('_repr_javascript_') |
|
832 | 832 | |
|
833 | 833 | |
|
834 | 834 | class PDFFormatter(BaseFormatter): |
|
835 | 835 | """A PDF formatter. |
|
836 | 836 | |
|
837 | 837 | To define the callables that compute the PDF representation of your |
|
838 | 838 | objects, define a :meth:`_repr_pdf_` method or use the :meth:`for_type` |
|
839 | 839 | or :meth:`for_type_by_name` methods to register functions that handle |
|
840 | 840 | this. |
|
841 | 841 | |
|
842 | 842 | The return value of this formatter should be raw PDF data, *not* |
|
843 | 843 | base64 encoded. |
|
844 | 844 | """ |
|
845 | 845 | format_type = Unicode('application/pdf') |
|
846 | 846 | |
|
847 | 847 | print_method = ObjectName('_repr_pdf_') |
|
848 | 848 | |
|
849 | 849 | _return_type = (bytes, unicode_type) |
|
850 | 850 | |
|
851 | 851 | class IPythonDisplayFormatter(BaseFormatter): |
|
852 | 852 | """A Formatter for objects that know how to display themselves. |
|
853 | 853 | |
|
854 | 854 | To define the callables that compute the representation of your |
|
855 | 855 | objects, define a :meth:`_ipython_display_` method or use the :meth:`for_type` |
|
856 | 856 | or :meth:`for_type_by_name` methods to register functions that handle |
|
857 | 857 | this. Unlike mime-type displays, this method should not return anything, |
|
858 | 858 | instead calling any appropriate display methods itself. |
|
859 | 859 | |
|
860 | 860 | This display formatter has highest priority. |
|
861 | 861 | If it fires, no other display formatter will be called. |
|
862 | 862 | """ |
|
863 | 863 | print_method = ObjectName('_ipython_display_') |
|
864 | 864 | _return_type = (type(None), bool) |
|
865 | 865 | |
|
866 | 866 | |
|
867 | 867 | @catch_format_error |
|
868 | 868 | def __call__(self, obj): |
|
869 | 869 | """Compute the format for an object.""" |
|
870 | 870 | if self.enabled: |
|
871 | 871 | # lookup registered printer |
|
872 | 872 | try: |
|
873 | 873 | printer = self.lookup(obj) |
|
874 | 874 | except KeyError: |
|
875 | 875 | pass |
|
876 | 876 | else: |
|
877 | 877 | printer(obj) |
|
878 | 878 | return True |
|
879 | 879 | # Finally look for special method names |
|
880 | 880 | method = get_real_method(obj, self.print_method) |
|
881 | 881 | if method is not None: |
|
882 | 882 | method() |
|
883 | 883 | return True |
|
884 | 884 | |
|
885 | 885 | |
|
886 | 886 | FormatterABC.register(BaseFormatter) |
|
887 | 887 | FormatterABC.register(PlainTextFormatter) |
|
888 | 888 | FormatterABC.register(HTMLFormatter) |
|
889 | 889 | FormatterABC.register(MarkdownFormatter) |
|
890 | 890 | FormatterABC.register(SVGFormatter) |
|
891 | 891 | FormatterABC.register(PNGFormatter) |
|
892 | 892 | FormatterABC.register(PDFFormatter) |
|
893 | 893 | FormatterABC.register(JPEGFormatter) |
|
894 | 894 | FormatterABC.register(LatexFormatter) |
|
895 | 895 | FormatterABC.register(JSONFormatter) |
|
896 | 896 | FormatterABC.register(JavascriptFormatter) |
|
897 | 897 | FormatterABC.register(IPythonDisplayFormatter) |
|
898 | 898 | |
|
899 | 899 | |
|
900 | 900 | def format_display_data(obj, include=None, exclude=None): |
|
901 | 901 | """Return a format data dict for an object. |
|
902 | 902 | |
|
903 | 903 | By default all format types will be computed. |
|
904 | 904 | |
|
905 | 905 | The following MIME types are currently implemented: |
|
906 | 906 | |
|
907 | 907 | * text/plain |
|
908 | 908 | * text/html |
|
909 | 909 | * text/markdown |
|
910 | 910 | * text/latex |
|
911 | 911 | * application/json |
|
912 | 912 | * application/javascript |
|
913 | 913 | * application/pdf |
|
914 | 914 | * image/png |
|
915 | 915 | * image/jpeg |
|
916 | 916 | * image/svg+xml |
|
917 | 917 | |
|
918 | 918 | Parameters |
|
919 | 919 | ---------- |
|
920 | 920 | obj : object |
|
921 | 921 | The Python object whose format data will be computed. |
|
922 | 922 | |
|
923 | 923 | Returns |
|
924 | 924 | ------- |
|
925 | 925 | format_dict : dict |
|
926 | 926 | A dictionary of key/value pairs, one or each format that was |
|
927 | 927 | generated for the object. The keys are the format types, which |
|
928 | 928 | will usually be MIME type strings and the values and JSON'able |
|
929 | 929 | data structure containing the raw data for the representation in |
|
930 | 930 | that format. |
|
931 | 931 | include : list or tuple, optional |
|
932 | 932 | A list of format type strings (MIME types) to include in the |
|
933 | 933 | format data dict. If this is set *only* the format types included |
|
934 | 934 | in this list will be computed. |
|
935 | 935 | exclude : list or tuple, optional |
|
936 | 936 | A list of format type string (MIME types) to exclue in the format |
|
937 | 937 | data dict. If this is set all format types will be computed, |
|
938 | 938 | except for those included in this argument. |
|
939 | 939 | """ |
|
940 | 940 | from IPython.core.interactiveshell import InteractiveShell |
|
941 | 941 | |
|
942 | 942 | return InteractiveShell.instance().display_formatter.format( |
|
943 | 943 | obj, |
|
944 | 944 | include, |
|
945 | 945 | exclude |
|
946 | 946 | ) |
|
947 | 947 |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,57 +1,57 b'' | |||
|
1 | 1 | """Support for interactive macros in IPython""" |
|
2 | 2 | |
|
3 | 3 | #***************************************************************************** |
|
4 | 4 | # Copyright (C) 2001-2005 Fernando Perez <fperez@colorado.edu> |
|
5 | 5 | # |
|
6 | 6 | # Distributed under the terms of the BSD License. The full license is in |
|
7 | 7 | # the file COPYING, distributed as part of this software. |
|
8 | 8 | #***************************************************************************** |
|
9 | 9 | |
|
10 | 10 | import re |
|
11 | 11 | |
|
12 | 12 | from IPython.utils import py3compat |
|
13 | 13 | from IPython.utils.encoding import DEFAULT_ENCODING |
|
14 | 14 | |
|
15 | 15 | coding_declaration = re.compile(r"#\s*coding[:=]\s*([-\w.]+)") |
|
16 | 16 | |
|
17 | 17 | class Macro(object): |
|
18 | 18 | """Simple class to store the value of macros as strings. |
|
19 | 19 | |
|
20 | 20 | Macro is just a callable that executes a string of IPython |
|
21 | 21 | input when called. |
|
22 | 22 | """ |
|
23 | 23 | |
|
24 | 24 | def __init__(self,code): |
|
25 | 25 | """store the macro value, as a single string which can be executed""" |
|
26 | 26 | lines = [] |
|
27 | 27 | enc = None |
|
28 | 28 | for line in code.splitlines(): |
|
29 | 29 | coding_match = coding_declaration.match(line) |
|
30 | 30 | if coding_match: |
|
31 | 31 | enc = coding_match.group(1) |
|
32 | 32 | else: |
|
33 | 33 | lines.append(line) |
|
34 | 34 | code = "\n".join(lines) |
|
35 | 35 | if isinstance(code, bytes): |
|
36 | 36 | code = code.decode(enc or DEFAULT_ENCODING) |
|
37 | 37 | self.value = code + '\n' |
|
38 | 38 | |
|
39 | 39 | def __str__(self): |
|
40 | 40 | return py3compat.unicode_to_str(self.value) |
|
41 | 41 | |
|
42 | 42 | def __unicode__(self): |
|
43 | 43 | return self.value |
|
44 | 44 | |
|
45 | 45 | def __repr__(self): |
|
46 | 46 | return 'IPython.macro.Macro(%s)' % repr(self.value) |
|
47 | 47 | |
|
48 | 48 | def __getstate__(self): |
|
49 | 49 | """ needed for safe pickling via %store """ |
|
50 | 50 | return {'value': self.value} |
|
51 | 51 | |
|
52 | 52 | def __add__(self, other): |
|
53 | 53 | if isinstance(other, Macro): |
|
54 | 54 | return Macro(self.value + other.value) |
|
55 |
elif isinstance(other, |
|
|
55 | elif isinstance(other, str): | |
|
56 | 56 | return Macro(self.value + other) |
|
57 | 57 | raise TypeError |
@@ -1,679 +1,678 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """Magic functions for InteractiveShell. |
|
3 | 3 | """ |
|
4 | 4 | |
|
5 | 5 | #----------------------------------------------------------------------------- |
|
6 | 6 | # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and |
|
7 | 7 | # Copyright (C) 2001 Fernando Perez <fperez@colorado.edu> |
|
8 | 8 | # Copyright (C) 2008 The IPython Development Team |
|
9 | 9 | |
|
10 | 10 | # Distributed under the terms of the BSD License. The full license is in |
|
11 | 11 | # the file COPYING, distributed as part of this software. |
|
12 | 12 | #----------------------------------------------------------------------------- |
|
13 | 13 | |
|
14 | 14 | import os |
|
15 | 15 | import re |
|
16 | 16 | import sys |
|
17 | 17 | import types |
|
18 | 18 | from getopt import getopt, GetoptError |
|
19 | 19 | |
|
20 | 20 | from traitlets.config.configurable import Configurable |
|
21 | 21 | from IPython.core import oinspect |
|
22 | 22 | from IPython.core.error import UsageError |
|
23 | 23 | from IPython.core.inputsplitter import ESC_MAGIC, ESC_MAGIC2 |
|
24 | 24 | from decorator import decorator |
|
25 | 25 | from IPython.utils.ipstruct import Struct |
|
26 | 26 | from IPython.utils.process import arg_split |
|
27 | from IPython.utils.py3compat import string_types | |
|
28 | 27 | from IPython.utils.text import dedent |
|
29 | 28 | from traitlets import Bool, Dict, Instance, observe |
|
30 | 29 | from logging import error |
|
31 | 30 | |
|
32 | 31 | #----------------------------------------------------------------------------- |
|
33 | 32 | # Globals |
|
34 | 33 | #----------------------------------------------------------------------------- |
|
35 | 34 | |
|
36 | 35 | # A dict we'll use for each class that has magics, used as temporary storage to |
|
37 | 36 | # pass information between the @line/cell_magic method decorators and the |
|
38 | 37 | # @magics_class class decorator, because the method decorators have no |
|
39 | 38 | # access to the class when they run. See for more details: |
|
40 | 39 | # http://stackoverflow.com/questions/2366713/can-a-python-decorator-of-an-instance-method-access-the-class |
|
41 | 40 | |
|
42 | 41 | magics = dict(line={}, cell={}) |
|
43 | 42 | |
|
44 | 43 | magic_kinds = ('line', 'cell') |
|
45 | 44 | magic_spec = ('line', 'cell', 'line_cell') |
|
46 | 45 | magic_escapes = dict(line=ESC_MAGIC, cell=ESC_MAGIC2) |
|
47 | 46 | |
|
48 | 47 | #----------------------------------------------------------------------------- |
|
49 | 48 | # Utility classes and functions |
|
50 | 49 | #----------------------------------------------------------------------------- |
|
51 | 50 | |
|
52 | 51 | class Bunch: pass |
|
53 | 52 | |
|
54 | 53 | |
|
55 | 54 | def on_off(tag): |
|
56 | 55 | """Return an ON/OFF string for a 1/0 input. Simple utility function.""" |
|
57 | 56 | return ['OFF','ON'][tag] |
|
58 | 57 | |
|
59 | 58 | |
|
60 | 59 | def compress_dhist(dh): |
|
61 | 60 | """Compress a directory history into a new one with at most 20 entries. |
|
62 | 61 | |
|
63 | 62 | Return a new list made from the first and last 10 elements of dhist after |
|
64 | 63 | removal of duplicates. |
|
65 | 64 | """ |
|
66 | 65 | head, tail = dh[:-10], dh[-10:] |
|
67 | 66 | |
|
68 | 67 | newhead = [] |
|
69 | 68 | done = set() |
|
70 | 69 | for h in head: |
|
71 | 70 | if h in done: |
|
72 | 71 | continue |
|
73 | 72 | newhead.append(h) |
|
74 | 73 | done.add(h) |
|
75 | 74 | |
|
76 | 75 | return newhead + tail |
|
77 | 76 | |
|
78 | 77 | |
|
79 | 78 | def needs_local_scope(func): |
|
80 | 79 | """Decorator to mark magic functions which need to local scope to run.""" |
|
81 | 80 | func.needs_local_scope = True |
|
82 | 81 | return func |
|
83 | 82 | |
|
84 | 83 | #----------------------------------------------------------------------------- |
|
85 | 84 | # Class and method decorators for registering magics |
|
86 | 85 | #----------------------------------------------------------------------------- |
|
87 | 86 | |
|
88 | 87 | def magics_class(cls): |
|
89 | 88 | """Class decorator for all subclasses of the main Magics class. |
|
90 | 89 | |
|
91 | 90 | Any class that subclasses Magics *must* also apply this decorator, to |
|
92 | 91 | ensure that all the methods that have been decorated as line/cell magics |
|
93 | 92 | get correctly registered in the class instance. This is necessary because |
|
94 | 93 | when method decorators run, the class does not exist yet, so they |
|
95 | 94 | temporarily store their information into a module global. Application of |
|
96 | 95 | this class decorator copies that global data to the class instance and |
|
97 | 96 | clears the global. |
|
98 | 97 | |
|
99 | 98 | Obviously, this mechanism is not thread-safe, which means that the |
|
100 | 99 | *creation* of subclasses of Magic should only be done in a single-thread |
|
101 | 100 | context. Instantiation of the classes has no restrictions. Given that |
|
102 | 101 | these classes are typically created at IPython startup time and before user |
|
103 | 102 | application code becomes active, in practice this should not pose any |
|
104 | 103 | problems. |
|
105 | 104 | """ |
|
106 | 105 | cls.registered = True |
|
107 | 106 | cls.magics = dict(line = magics['line'], |
|
108 | 107 | cell = magics['cell']) |
|
109 | 108 | magics['line'] = {} |
|
110 | 109 | magics['cell'] = {} |
|
111 | 110 | return cls |
|
112 | 111 | |
|
113 | 112 | |
|
114 | 113 | def record_magic(dct, magic_kind, magic_name, func): |
|
115 | 114 | """Utility function to store a function as a magic of a specific kind. |
|
116 | 115 | |
|
117 | 116 | Parameters |
|
118 | 117 | ---------- |
|
119 | 118 | dct : dict |
|
120 | 119 | A dictionary with 'line' and 'cell' subdicts. |
|
121 | 120 | |
|
122 | 121 | magic_kind : str |
|
123 | 122 | Kind of magic to be stored. |
|
124 | 123 | |
|
125 | 124 | magic_name : str |
|
126 | 125 | Key to store the magic as. |
|
127 | 126 | |
|
128 | 127 | func : function |
|
129 | 128 | Callable object to store. |
|
130 | 129 | """ |
|
131 | 130 | if magic_kind == 'line_cell': |
|
132 | 131 | dct['line'][magic_name] = dct['cell'][magic_name] = func |
|
133 | 132 | else: |
|
134 | 133 | dct[magic_kind][magic_name] = func |
|
135 | 134 | |
|
136 | 135 | |
|
137 | 136 | def validate_type(magic_kind): |
|
138 | 137 | """Ensure that the given magic_kind is valid. |
|
139 | 138 | |
|
140 | 139 | Check that the given magic_kind is one of the accepted spec types (stored |
|
141 | 140 | in the global `magic_spec`), raise ValueError otherwise. |
|
142 | 141 | """ |
|
143 | 142 | if magic_kind not in magic_spec: |
|
144 | 143 | raise ValueError('magic_kind must be one of %s, %s given' % |
|
145 | 144 | magic_kinds, magic_kind) |
|
146 | 145 | |
|
147 | 146 | |
|
148 | 147 | # The docstrings for the decorator below will be fairly similar for the two |
|
149 | 148 | # types (method and function), so we generate them here once and reuse the |
|
150 | 149 | # templates below. |
|
151 | 150 | _docstring_template = \ |
|
152 | 151 | """Decorate the given {0} as {1} magic. |
|
153 | 152 | |
|
154 | 153 | The decorator can be used with or without arguments, as follows. |
|
155 | 154 | |
|
156 | 155 | i) without arguments: it will create a {1} magic named as the {0} being |
|
157 | 156 | decorated:: |
|
158 | 157 | |
|
159 | 158 | @deco |
|
160 | 159 | def foo(...) |
|
161 | 160 | |
|
162 | 161 | will create a {1} magic named `foo`. |
|
163 | 162 | |
|
164 | 163 | ii) with one string argument: which will be used as the actual name of the |
|
165 | 164 | resulting magic:: |
|
166 | 165 | |
|
167 | 166 | @deco('bar') |
|
168 | 167 | def foo(...) |
|
169 | 168 | |
|
170 | 169 | will create a {1} magic named `bar`. |
|
171 | 170 | """ |
|
172 | 171 | |
|
173 | 172 | # These two are decorator factories. While they are conceptually very similar, |
|
174 | 173 | # there are enough differences in the details that it's simpler to have them |
|
175 | 174 | # written as completely standalone functions rather than trying to share code |
|
176 | 175 | # and make a single one with convoluted logic. |
|
177 | 176 | |
|
178 | 177 | def _method_magic_marker(magic_kind): |
|
179 | 178 | """Decorator factory for methods in Magics subclasses. |
|
180 | 179 | """ |
|
181 | 180 | |
|
182 | 181 | validate_type(magic_kind) |
|
183 | 182 | |
|
184 | 183 | # This is a closure to capture the magic_kind. We could also use a class, |
|
185 | 184 | # but it's overkill for just that one bit of state. |
|
186 | 185 | def magic_deco(arg): |
|
187 | 186 | call = lambda f, *a, **k: f(*a, **k) |
|
188 | 187 | |
|
189 | 188 | if callable(arg): |
|
190 | 189 | # "Naked" decorator call (just @foo, no args) |
|
191 | 190 | func = arg |
|
192 | 191 | name = func.__name__ |
|
193 | 192 | retval = decorator(call, func) |
|
194 | 193 | record_magic(magics, magic_kind, name, name) |
|
195 |
elif isinstance(arg, str |
|
|
194 | elif isinstance(arg, str): | |
|
196 | 195 | # Decorator called with arguments (@foo('bar')) |
|
197 | 196 | name = arg |
|
198 | 197 | def mark(func, *a, **kw): |
|
199 | 198 | record_magic(magics, magic_kind, name, func.__name__) |
|
200 | 199 | return decorator(call, func) |
|
201 | 200 | retval = mark |
|
202 | 201 | else: |
|
203 | 202 | raise TypeError("Decorator can only be called with " |
|
204 | 203 | "string or function") |
|
205 | 204 | return retval |
|
206 | 205 | |
|
207 | 206 | # Ensure the resulting decorator has a usable docstring |
|
208 | 207 | magic_deco.__doc__ = _docstring_template.format('method', magic_kind) |
|
209 | 208 | return magic_deco |
|
210 | 209 | |
|
211 | 210 | |
|
212 | 211 | def _function_magic_marker(magic_kind): |
|
213 | 212 | """Decorator factory for standalone functions. |
|
214 | 213 | """ |
|
215 | 214 | validate_type(magic_kind) |
|
216 | 215 | |
|
217 | 216 | # This is a closure to capture the magic_kind. We could also use a class, |
|
218 | 217 | # but it's overkill for just that one bit of state. |
|
219 | 218 | def magic_deco(arg): |
|
220 | 219 | call = lambda f, *a, **k: f(*a, **k) |
|
221 | 220 | |
|
222 | 221 | # Find get_ipython() in the caller's namespace |
|
223 | 222 | caller = sys._getframe(1) |
|
224 | 223 | for ns in ['f_locals', 'f_globals', 'f_builtins']: |
|
225 | 224 | get_ipython = getattr(caller, ns).get('get_ipython') |
|
226 | 225 | if get_ipython is not None: |
|
227 | 226 | break |
|
228 | 227 | else: |
|
229 | 228 | raise NameError('Decorator can only run in context where ' |
|
230 | 229 | '`get_ipython` exists') |
|
231 | 230 | |
|
232 | 231 | ip = get_ipython() |
|
233 | 232 | |
|
234 | 233 | if callable(arg): |
|
235 | 234 | # "Naked" decorator call (just @foo, no args) |
|
236 | 235 | func = arg |
|
237 | 236 | name = func.__name__ |
|
238 | 237 | ip.register_magic_function(func, magic_kind, name) |
|
239 | 238 | retval = decorator(call, func) |
|
240 |
elif isinstance(arg, str |
|
|
239 | elif isinstance(arg, str): | |
|
241 | 240 | # Decorator called with arguments (@foo('bar')) |
|
242 | 241 | name = arg |
|
243 | 242 | def mark(func, *a, **kw): |
|
244 | 243 | ip.register_magic_function(func, magic_kind, name) |
|
245 | 244 | return decorator(call, func) |
|
246 | 245 | retval = mark |
|
247 | 246 | else: |
|
248 | 247 | raise TypeError("Decorator can only be called with " |
|
249 | 248 | "string or function") |
|
250 | 249 | return retval |
|
251 | 250 | |
|
252 | 251 | # Ensure the resulting decorator has a usable docstring |
|
253 | 252 | ds = _docstring_template.format('function', magic_kind) |
|
254 | 253 | |
|
255 | 254 | ds += dedent(""" |
|
256 | 255 | Note: this decorator can only be used in a context where IPython is already |
|
257 | 256 | active, so that the `get_ipython()` call succeeds. You can therefore use |
|
258 | 257 | it in your startup files loaded after IPython initializes, but *not* in the |
|
259 | 258 | IPython configuration file itself, which is executed before IPython is |
|
260 | 259 | fully up and running. Any file located in the `startup` subdirectory of |
|
261 | 260 | your configuration profile will be OK in this sense. |
|
262 | 261 | """) |
|
263 | 262 | |
|
264 | 263 | magic_deco.__doc__ = ds |
|
265 | 264 | return magic_deco |
|
266 | 265 | |
|
267 | 266 | |
|
268 | 267 | # Create the actual decorators for public use |
|
269 | 268 | |
|
270 | 269 | # These three are used to decorate methods in class definitions |
|
271 | 270 | line_magic = _method_magic_marker('line') |
|
272 | 271 | cell_magic = _method_magic_marker('cell') |
|
273 | 272 | line_cell_magic = _method_magic_marker('line_cell') |
|
274 | 273 | |
|
275 | 274 | # These three decorate standalone functions and perform the decoration |
|
276 | 275 | # immediately. They can only run where get_ipython() works |
|
277 | 276 | register_line_magic = _function_magic_marker('line') |
|
278 | 277 | register_cell_magic = _function_magic_marker('cell') |
|
279 | 278 | register_line_cell_magic = _function_magic_marker('line_cell') |
|
280 | 279 | |
|
281 | 280 | #----------------------------------------------------------------------------- |
|
282 | 281 | # Core Magic classes |
|
283 | 282 | #----------------------------------------------------------------------------- |
|
284 | 283 | |
|
285 | 284 | class MagicsManager(Configurable): |
|
286 | 285 | """Object that handles all magic-related functionality for IPython. |
|
287 | 286 | """ |
|
288 | 287 | # Non-configurable class attributes |
|
289 | 288 | |
|
290 | 289 | # A two-level dict, first keyed by magic type, then by magic function, and |
|
291 | 290 | # holding the actual callable object as value. This is the dict used for |
|
292 | 291 | # magic function dispatch |
|
293 | 292 | magics = Dict() |
|
294 | 293 | |
|
295 | 294 | # A registry of the original objects that we've been given holding magics. |
|
296 | 295 | registry = Dict() |
|
297 | 296 | |
|
298 | 297 | shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) |
|
299 | 298 | |
|
300 | 299 | auto_magic = Bool(True, help= |
|
301 | 300 | "Automatically call line magics without requiring explicit % prefix" |
|
302 | 301 | ).tag(config=True) |
|
303 | 302 | @observe('auto_magic') |
|
304 | 303 | def _auto_magic_changed(self, change): |
|
305 | 304 | self.shell.automagic = change['new'] |
|
306 | 305 | |
|
307 | 306 | _auto_status = [ |
|
308 | 307 | 'Automagic is OFF, % prefix IS needed for line magics.', |
|
309 | 308 | 'Automagic is ON, % prefix IS NOT needed for line magics.'] |
|
310 | 309 | |
|
311 | 310 | user_magics = Instance('IPython.core.magics.UserMagics', allow_none=True) |
|
312 | 311 | |
|
313 | 312 | def __init__(self, shell=None, config=None, user_magics=None, **traits): |
|
314 | 313 | |
|
315 | 314 | super(MagicsManager, self).__init__(shell=shell, config=config, |
|
316 | 315 | user_magics=user_magics, **traits) |
|
317 | 316 | self.magics = dict(line={}, cell={}) |
|
318 | 317 | # Let's add the user_magics to the registry for uniformity, so *all* |
|
319 | 318 | # registered magic containers can be found there. |
|
320 | 319 | self.registry[user_magics.__class__.__name__] = user_magics |
|
321 | 320 | |
|
322 | 321 | def auto_status(self): |
|
323 | 322 | """Return descriptive string with automagic status.""" |
|
324 | 323 | return self._auto_status[self.auto_magic] |
|
325 | 324 | |
|
326 | 325 | def lsmagic(self): |
|
327 | 326 | """Return a dict of currently available magic functions. |
|
328 | 327 | |
|
329 | 328 | The return dict has the keys 'line' and 'cell', corresponding to the |
|
330 | 329 | two types of magics we support. Each value is a list of names. |
|
331 | 330 | """ |
|
332 | 331 | return self.magics |
|
333 | 332 | |
|
334 | 333 | def lsmagic_docs(self, brief=False, missing=''): |
|
335 | 334 | """Return dict of documentation of magic functions. |
|
336 | 335 | |
|
337 | 336 | The return dict has the keys 'line' and 'cell', corresponding to the |
|
338 | 337 | two types of magics we support. Each value is a dict keyed by magic |
|
339 | 338 | name whose value is the function docstring. If a docstring is |
|
340 | 339 | unavailable, the value of `missing` is used instead. |
|
341 | 340 | |
|
342 | 341 | If brief is True, only the first line of each docstring will be returned. |
|
343 | 342 | """ |
|
344 | 343 | docs = {} |
|
345 | 344 | for m_type in self.magics: |
|
346 | 345 | m_docs = {} |
|
347 | 346 | for m_name, m_func in self.magics[m_type].items(): |
|
348 | 347 | if m_func.__doc__: |
|
349 | 348 | if brief: |
|
350 | 349 | m_docs[m_name] = m_func.__doc__.split('\n', 1)[0] |
|
351 | 350 | else: |
|
352 | 351 | m_docs[m_name] = m_func.__doc__.rstrip() |
|
353 | 352 | else: |
|
354 | 353 | m_docs[m_name] = missing |
|
355 | 354 | docs[m_type] = m_docs |
|
356 | 355 | return docs |
|
357 | 356 | |
|
358 | 357 | def register(self, *magic_objects): |
|
359 | 358 | """Register one or more instances of Magics. |
|
360 | 359 | |
|
361 | 360 | Take one or more classes or instances of classes that subclass the main |
|
362 | 361 | `core.Magic` class, and register them with IPython to use the magic |
|
363 | 362 | functions they provide. The registration process will then ensure that |
|
364 | 363 | any methods that have decorated to provide line and/or cell magics will |
|
365 | 364 | be recognized with the `%x`/`%%x` syntax as a line/cell magic |
|
366 | 365 | respectively. |
|
367 | 366 | |
|
368 | 367 | If classes are given, they will be instantiated with the default |
|
369 | 368 | constructor. If your classes need a custom constructor, you should |
|
370 | 369 | instanitate them first and pass the instance. |
|
371 | 370 | |
|
372 | 371 | The provided arguments can be an arbitrary mix of classes and instances. |
|
373 | 372 | |
|
374 | 373 | Parameters |
|
375 | 374 | ---------- |
|
376 | 375 | magic_objects : one or more classes or instances |
|
377 | 376 | """ |
|
378 | 377 | # Start by validating them to ensure they have all had their magic |
|
379 | 378 | # methods registered at the instance level |
|
380 | 379 | for m in magic_objects: |
|
381 | 380 | if not m.registered: |
|
382 | 381 | raise ValueError("Class of magics %r was constructed without " |
|
383 | 382 | "the @register_magics class decorator") |
|
384 | 383 | if isinstance(m, type): |
|
385 | 384 | # If we're given an uninstantiated class |
|
386 | 385 | m = m(shell=self.shell) |
|
387 | 386 | |
|
388 | 387 | # Now that we have an instance, we can register it and update the |
|
389 | 388 | # table of callables |
|
390 | 389 | self.registry[m.__class__.__name__] = m |
|
391 | 390 | for mtype in magic_kinds: |
|
392 | 391 | self.magics[mtype].update(m.magics[mtype]) |
|
393 | 392 | |
|
394 | 393 | def register_function(self, func, magic_kind='line', magic_name=None): |
|
395 | 394 | """Expose a standalone function as magic function for IPython. |
|
396 | 395 | |
|
397 | 396 | This will create an IPython magic (line, cell or both) from a |
|
398 | 397 | standalone function. The functions should have the following |
|
399 | 398 | signatures: |
|
400 | 399 | |
|
401 | 400 | * For line magics: `def f(line)` |
|
402 | 401 | * For cell magics: `def f(line, cell)` |
|
403 | 402 | * For a function that does both: `def f(line, cell=None)` |
|
404 | 403 | |
|
405 | 404 | In the latter case, the function will be called with `cell==None` when |
|
406 | 405 | invoked as `%f`, and with cell as a string when invoked as `%%f`. |
|
407 | 406 | |
|
408 | 407 | Parameters |
|
409 | 408 | ---------- |
|
410 | 409 | func : callable |
|
411 | 410 | Function to be registered as a magic. |
|
412 | 411 | |
|
413 | 412 | magic_kind : str |
|
414 | 413 | Kind of magic, one of 'line', 'cell' or 'line_cell' |
|
415 | 414 | |
|
416 | 415 | magic_name : optional str |
|
417 | 416 | If given, the name the magic will have in the IPython namespace. By |
|
418 | 417 | default, the name of the function itself is used. |
|
419 | 418 | """ |
|
420 | 419 | |
|
421 | 420 | # Create the new method in the user_magics and register it in the |
|
422 | 421 | # global table |
|
423 | 422 | validate_type(magic_kind) |
|
424 | 423 | magic_name = func.__name__ if magic_name is None else magic_name |
|
425 | 424 | setattr(self.user_magics, magic_name, func) |
|
426 | 425 | record_magic(self.magics, magic_kind, magic_name, func) |
|
427 | 426 | |
|
428 | 427 | def register_alias(self, alias_name, magic_name, magic_kind='line'): |
|
429 | 428 | """Register an alias to a magic function. |
|
430 | 429 | |
|
431 | 430 | The alias is an instance of :class:`MagicAlias`, which holds the |
|
432 | 431 | name and kind of the magic it should call. Binding is done at |
|
433 | 432 | call time, so if the underlying magic function is changed the alias |
|
434 | 433 | will call the new function. |
|
435 | 434 | |
|
436 | 435 | Parameters |
|
437 | 436 | ---------- |
|
438 | 437 | alias_name : str |
|
439 | 438 | The name of the magic to be registered. |
|
440 | 439 | |
|
441 | 440 | magic_name : str |
|
442 | 441 | The name of an existing magic. |
|
443 | 442 | |
|
444 | 443 | magic_kind : str |
|
445 | 444 | Kind of magic, one of 'line' or 'cell' |
|
446 | 445 | """ |
|
447 | 446 | |
|
448 | 447 | # `validate_type` is too permissive, as it allows 'line_cell' |
|
449 | 448 | # which we do not handle. |
|
450 | 449 | if magic_kind not in magic_kinds: |
|
451 | 450 | raise ValueError('magic_kind must be one of %s, %s given' % |
|
452 | 451 | magic_kinds, magic_kind) |
|
453 | 452 | |
|
454 | 453 | alias = MagicAlias(self.shell, magic_name, magic_kind) |
|
455 | 454 | setattr(self.user_magics, alias_name, alias) |
|
456 | 455 | record_magic(self.magics, magic_kind, alias_name, alias) |
|
457 | 456 | |
|
458 | 457 | # Key base class that provides the central functionality for magics. |
|
459 | 458 | |
|
460 | 459 | |
|
461 | 460 | class Magics(Configurable): |
|
462 | 461 | """Base class for implementing magic functions. |
|
463 | 462 | |
|
464 | 463 | Shell functions which can be reached as %function_name. All magic |
|
465 | 464 | functions should accept a string, which they can parse for their own |
|
466 | 465 | needs. This can make some functions easier to type, eg `%cd ../` |
|
467 | 466 | vs. `%cd("../")` |
|
468 | 467 | |
|
469 | 468 | Classes providing magic functions need to subclass this class, and they |
|
470 | 469 | MUST: |
|
471 | 470 | |
|
472 | 471 | - Use the method decorators `@line_magic` and `@cell_magic` to decorate |
|
473 | 472 | individual methods as magic functions, AND |
|
474 | 473 | |
|
475 | 474 | - Use the class decorator `@magics_class` to ensure that the magic |
|
476 | 475 | methods are properly registered at the instance level upon instance |
|
477 | 476 | initialization. |
|
478 | 477 | |
|
479 | 478 | See :mod:`magic_functions` for examples of actual implementation classes. |
|
480 | 479 | """ |
|
481 | 480 | # Dict holding all command-line options for each magic. |
|
482 | 481 | options_table = None |
|
483 | 482 | # Dict for the mapping of magic names to methods, set by class decorator |
|
484 | 483 | magics = None |
|
485 | 484 | # Flag to check that the class decorator was properly applied |
|
486 | 485 | registered = False |
|
487 | 486 | # Instance of IPython shell |
|
488 | 487 | shell = None |
|
489 | 488 | |
|
490 | 489 | def __init__(self, shell=None, **kwargs): |
|
491 | 490 | if not(self.__class__.registered): |
|
492 | 491 | raise ValueError('Magics subclass without registration - ' |
|
493 | 492 | 'did you forget to apply @magics_class?') |
|
494 | 493 | if shell is not None: |
|
495 | 494 | if hasattr(shell, 'configurables'): |
|
496 | 495 | shell.configurables.append(self) |
|
497 | 496 | if hasattr(shell, 'config'): |
|
498 | 497 | kwargs.setdefault('parent', shell) |
|
499 | 498 | |
|
500 | 499 | self.shell = shell |
|
501 | 500 | self.options_table = {} |
|
502 | 501 | # The method decorators are run when the instance doesn't exist yet, so |
|
503 | 502 | # they can only record the names of the methods they are supposed to |
|
504 | 503 | # grab. Only now, that the instance exists, can we create the proper |
|
505 | 504 | # mapping to bound methods. So we read the info off the original names |
|
506 | 505 | # table and replace each method name by the actual bound method. |
|
507 | 506 | # But we mustn't clobber the *class* mapping, in case of multiple instances. |
|
508 | 507 | class_magics = self.magics |
|
509 | 508 | self.magics = {} |
|
510 | 509 | for mtype in magic_kinds: |
|
511 | 510 | tab = self.magics[mtype] = {} |
|
512 | 511 | cls_tab = class_magics[mtype] |
|
513 | 512 | for magic_name, meth_name in cls_tab.items(): |
|
514 |
if isinstance(meth_name, str |
|
|
513 | if isinstance(meth_name, str): | |
|
515 | 514 | # it's a method name, grab it |
|
516 | 515 | tab[magic_name] = getattr(self, meth_name) |
|
517 | 516 | else: |
|
518 | 517 | # it's the real thing |
|
519 | 518 | tab[magic_name] = meth_name |
|
520 | 519 | # Configurable **needs** to be initiated at the end or the config |
|
521 | 520 | # magics get screwed up. |
|
522 | 521 | super(Magics, self).__init__(**kwargs) |
|
523 | 522 | |
|
524 | 523 | def arg_err(self,func): |
|
525 | 524 | """Print docstring if incorrect arguments were passed""" |
|
526 | 525 | print('Error in arguments:') |
|
527 | 526 | print(oinspect.getdoc(func)) |
|
528 | 527 | |
|
529 | 528 | def format_latex(self, strng): |
|
530 | 529 | """Format a string for latex inclusion.""" |
|
531 | 530 | |
|
532 | 531 | # Characters that need to be escaped for latex: |
|
533 | 532 | escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE) |
|
534 | 533 | # Magic command names as headers: |
|
535 | 534 | cmd_name_re = re.compile(r'^(%s.*?):' % ESC_MAGIC, |
|
536 | 535 | re.MULTILINE) |
|
537 | 536 | # Magic commands |
|
538 | 537 | cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % ESC_MAGIC, |
|
539 | 538 | re.MULTILINE) |
|
540 | 539 | # Paragraph continue |
|
541 | 540 | par_re = re.compile(r'\\$',re.MULTILINE) |
|
542 | 541 | |
|
543 | 542 | # The "\n" symbol |
|
544 | 543 | newline_re = re.compile(r'\\n') |
|
545 | 544 | |
|
546 | 545 | # Now build the string for output: |
|
547 | 546 | #strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng) |
|
548 | 547 | strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:', |
|
549 | 548 | strng) |
|
550 | 549 | strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng) |
|
551 | 550 | strng = par_re.sub(r'\\\\',strng) |
|
552 | 551 | strng = escape_re.sub(r'\\\1',strng) |
|
553 | 552 | strng = newline_re.sub(r'\\textbackslash{}n',strng) |
|
554 | 553 | return strng |
|
555 | 554 | |
|
556 | 555 | def parse_options(self, arg_str, opt_str, *long_opts, **kw): |
|
557 | 556 | """Parse options passed to an argument string. |
|
558 | 557 | |
|
559 | 558 | The interface is similar to that of :func:`getopt.getopt`, but it |
|
560 | 559 | returns a :class:`~IPython.utils.struct.Struct` with the options as keys |
|
561 | 560 | and the stripped argument string still as a string. |
|
562 | 561 | |
|
563 | 562 | arg_str is quoted as a true sys.argv vector by using shlex.split. |
|
564 | 563 | This allows us to easily expand variables, glob files, quote |
|
565 | 564 | arguments, etc. |
|
566 | 565 | |
|
567 | 566 | Parameters |
|
568 | 567 | ---------- |
|
569 | 568 | |
|
570 | 569 | arg_str : str |
|
571 | 570 | The arguments to parse. |
|
572 | 571 | |
|
573 | 572 | opt_str : str |
|
574 | 573 | The options specification. |
|
575 | 574 | |
|
576 | 575 | mode : str, default 'string' |
|
577 | 576 | If given as 'list', the argument string is returned as a list (split |
|
578 | 577 | on whitespace) instead of a string. |
|
579 | 578 | |
|
580 | 579 | list_all : bool, default False |
|
581 | 580 | Put all option values in lists. Normally only options |
|
582 | 581 | appearing more than once are put in a list. |
|
583 | 582 | |
|
584 | 583 | posix : bool, default True |
|
585 | 584 | Whether to split the input line in POSIX mode or not, as per the |
|
586 | 585 | conventions outlined in the :mod:`shlex` module from the standard |
|
587 | 586 | library. |
|
588 | 587 | """ |
|
589 | 588 | |
|
590 | 589 | # inject default options at the beginning of the input line |
|
591 | 590 | caller = sys._getframe(1).f_code.co_name |
|
592 | 591 | arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str) |
|
593 | 592 | |
|
594 | 593 | mode = kw.get('mode','string') |
|
595 | 594 | if mode not in ['string','list']: |
|
596 | 595 | raise ValueError('incorrect mode given: %s' % mode) |
|
597 | 596 | # Get options |
|
598 | 597 | list_all = kw.get('list_all',0) |
|
599 | 598 | posix = kw.get('posix', os.name == 'posix') |
|
600 | 599 | strict = kw.get('strict', True) |
|
601 | 600 | |
|
602 | 601 | # Check if we have more than one argument to warrant extra processing: |
|
603 | 602 | odict = {} # Dictionary with options |
|
604 | 603 | args = arg_str.split() |
|
605 | 604 | if len(args) >= 1: |
|
606 | 605 | # If the list of inputs only has 0 or 1 thing in it, there's no |
|
607 | 606 | # need to look for options |
|
608 | 607 | argv = arg_split(arg_str, posix, strict) |
|
609 | 608 | # Do regular option processing |
|
610 | 609 | try: |
|
611 | 610 | opts,args = getopt(argv, opt_str, long_opts) |
|
612 | 611 | except GetoptError as e: |
|
613 | 612 | raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str, |
|
614 | 613 | " ".join(long_opts))) |
|
615 | 614 | for o,a in opts: |
|
616 | 615 | if o.startswith('--'): |
|
617 | 616 | o = o[2:] |
|
618 | 617 | else: |
|
619 | 618 | o = o[1:] |
|
620 | 619 | try: |
|
621 | 620 | odict[o].append(a) |
|
622 | 621 | except AttributeError: |
|
623 | 622 | odict[o] = [odict[o],a] |
|
624 | 623 | except KeyError: |
|
625 | 624 | if list_all: |
|
626 | 625 | odict[o] = [a] |
|
627 | 626 | else: |
|
628 | 627 | odict[o] = a |
|
629 | 628 | |
|
630 | 629 | # Prepare opts,args for return |
|
631 | 630 | opts = Struct(odict) |
|
632 | 631 | if mode == 'string': |
|
633 | 632 | args = ' '.join(args) |
|
634 | 633 | |
|
635 | 634 | return opts,args |
|
636 | 635 | |
|
637 | 636 | def default_option(self, fn, optstr): |
|
638 | 637 | """Make an entry in the options_table for fn, with value optstr""" |
|
639 | 638 | |
|
640 | 639 | if fn not in self.lsmagic(): |
|
641 | 640 | error("%s is not a magic function" % fn) |
|
642 | 641 | self.options_table[fn] = optstr |
|
643 | 642 | |
|
644 | 643 | |
|
645 | 644 | class MagicAlias(object): |
|
646 | 645 | """An alias to another magic function. |
|
647 | 646 | |
|
648 | 647 | An alias is determined by its magic name and magic kind. Lookup |
|
649 | 648 | is done at call time, so if the underlying magic changes the alias |
|
650 | 649 | will call the new function. |
|
651 | 650 | |
|
652 | 651 | Use the :meth:`MagicsManager.register_alias` method or the |
|
653 | 652 | `%alias_magic` magic function to create and register a new alias. |
|
654 | 653 | """ |
|
655 | 654 | def __init__(self, shell, magic_name, magic_kind): |
|
656 | 655 | self.shell = shell |
|
657 | 656 | self.magic_name = magic_name |
|
658 | 657 | self.magic_kind = magic_kind |
|
659 | 658 | |
|
660 | 659 | self.pretty_target = '%s%s' % (magic_escapes[self.magic_kind], self.magic_name) |
|
661 | 660 | self.__doc__ = "Alias for `%s`." % self.pretty_target |
|
662 | 661 | |
|
663 | 662 | self._in_call = False |
|
664 | 663 | |
|
665 | 664 | def __call__(self, *args, **kwargs): |
|
666 | 665 | """Call the magic alias.""" |
|
667 | 666 | fn = self.shell.find_magic(self.magic_name, self.magic_kind) |
|
668 | 667 | if fn is None: |
|
669 | 668 | raise UsageError("Magic `%s` not found." % self.pretty_target) |
|
670 | 669 | |
|
671 | 670 | # Protect against infinite recursion. |
|
672 | 671 | if self._in_call: |
|
673 | 672 | raise UsageError("Infinite recursion detected; " |
|
674 | 673 | "magic aliases cannot call themselves.") |
|
675 | 674 | self._in_call = True |
|
676 | 675 | try: |
|
677 | 676 | return fn(*args, **kwargs) |
|
678 | 677 | finally: |
|
679 | 678 | self._in_call = False |
@@ -1,744 +1,743 b'' | |||
|
1 | 1 | """Implementation of code management magic functions. |
|
2 | 2 | """ |
|
3 | 3 | #----------------------------------------------------------------------------- |
|
4 | 4 | # Copyright (c) 2012 The IPython Development Team. |
|
5 | 5 | # |
|
6 | 6 | # Distributed under the terms of the Modified BSD License. |
|
7 | 7 | # |
|
8 | 8 | # The full license is in the file COPYING.txt, distributed with this software. |
|
9 | 9 | #----------------------------------------------------------------------------- |
|
10 | 10 | |
|
11 | 11 | #----------------------------------------------------------------------------- |
|
12 | 12 | # Imports |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | |
|
15 | 15 | # Stdlib |
|
16 | 16 | import inspect |
|
17 | 17 | import io |
|
18 | 18 | import os |
|
19 | 19 | import re |
|
20 | 20 | import sys |
|
21 | 21 | import ast |
|
22 | 22 | from itertools import chain |
|
23 | 23 | |
|
24 | 24 | # Our own packages |
|
25 | 25 | from IPython.core.error import TryNext, StdinNotImplementedError, UsageError |
|
26 | 26 | from IPython.core.macro import Macro |
|
27 | 27 | from IPython.core.magic import Magics, magics_class, line_magic |
|
28 | 28 | from IPython.core.oinspect import find_file, find_source_lines |
|
29 | 29 | from IPython.testing.skipdoctest import skip_doctest |
|
30 | 30 | from IPython.utils import py3compat |
|
31 | from IPython.utils.py3compat import string_types | |
|
32 | 31 | from IPython.utils.contexts import preserve_keys |
|
33 | 32 | from IPython.utils.path import get_py_filename |
|
34 | 33 | from warnings import warn |
|
35 | 34 | from logging import error |
|
36 | 35 | from IPython.utils.text import get_text_list |
|
37 | 36 | |
|
38 | 37 | #----------------------------------------------------------------------------- |
|
39 | 38 | # Magic implementation classes |
|
40 | 39 | #----------------------------------------------------------------------------- |
|
41 | 40 | |
|
42 | 41 | # Used for exception handling in magic_edit |
|
43 | 42 | class MacroToEdit(ValueError): pass |
|
44 | 43 | |
|
45 | 44 | ipython_input_pat = re.compile(r"<ipython\-input\-(\d+)-[a-z\d]+>$") |
|
46 | 45 | |
|
47 | 46 | # To match, e.g. 8-10 1:5 :10 3- |
|
48 | 47 | range_re = re.compile(r""" |
|
49 | 48 | (?P<start>\d+)? |
|
50 | 49 | ((?P<sep>[\-:]) |
|
51 | 50 | (?P<end>\d+)?)? |
|
52 | 51 | $""", re.VERBOSE) |
|
53 | 52 | |
|
54 | 53 | |
|
55 | 54 | def extract_code_ranges(ranges_str): |
|
56 | 55 | """Turn a string of range for %%load into 2-tuples of (start, stop) |
|
57 | 56 | ready to use as a slice of the content splitted by lines. |
|
58 | 57 | |
|
59 | 58 | Examples |
|
60 | 59 | -------- |
|
61 | 60 | list(extract_input_ranges("5-10 2")) |
|
62 | 61 | [(4, 10), (1, 2)] |
|
63 | 62 | """ |
|
64 | 63 | for range_str in ranges_str.split(): |
|
65 | 64 | rmatch = range_re.match(range_str) |
|
66 | 65 | if not rmatch: |
|
67 | 66 | continue |
|
68 | 67 | sep = rmatch.group("sep") |
|
69 | 68 | start = rmatch.group("start") |
|
70 | 69 | end = rmatch.group("end") |
|
71 | 70 | |
|
72 | 71 | if sep == '-': |
|
73 | 72 | start = int(start) - 1 if start else None |
|
74 | 73 | end = int(end) if end else None |
|
75 | 74 | elif sep == ':': |
|
76 | 75 | start = int(start) - 1 if start else None |
|
77 | 76 | end = int(end) - 1 if end else None |
|
78 | 77 | else: |
|
79 | 78 | end = int(start) |
|
80 | 79 | start = int(start) - 1 |
|
81 | 80 | yield (start, end) |
|
82 | 81 | |
|
83 | 82 | |
|
84 | 83 | @skip_doctest |
|
85 | 84 | def extract_symbols(code, symbols): |
|
86 | 85 | """ |
|
87 | 86 | Return a tuple (blocks, not_found) |
|
88 | 87 | where ``blocks`` is a list of code fragments |
|
89 | 88 | for each symbol parsed from code, and ``not_found`` are |
|
90 | 89 | symbols not found in the code. |
|
91 | 90 | |
|
92 | 91 | For example:: |
|
93 | 92 | |
|
94 | 93 | >>> code = '''a = 10 |
|
95 | 94 | |
|
96 | 95 | def b(): return 42 |
|
97 | 96 | |
|
98 | 97 | class A: pass''' |
|
99 | 98 | |
|
100 | 99 | >>> extract_symbols(code, 'A,b,z') |
|
101 | 100 | (["class A: pass", "def b(): return 42"], ['z']) |
|
102 | 101 | """ |
|
103 | 102 | symbols = symbols.split(',') |
|
104 | 103 | |
|
105 | 104 | # this will raise SyntaxError if code isn't valid Python |
|
106 | 105 | py_code = ast.parse(code) |
|
107 | 106 | |
|
108 | 107 | marks = [(getattr(s, 'name', None), s.lineno) for s in py_code.body] |
|
109 | 108 | code = code.split('\n') |
|
110 | 109 | |
|
111 | 110 | symbols_lines = {} |
|
112 | 111 | |
|
113 | 112 | # we already know the start_lineno of each symbol (marks). |
|
114 | 113 | # To find each end_lineno, we traverse in reverse order until each |
|
115 | 114 | # non-blank line |
|
116 | 115 | end = len(code) |
|
117 | 116 | for name, start in reversed(marks): |
|
118 | 117 | while not code[end - 1].strip(): |
|
119 | 118 | end -= 1 |
|
120 | 119 | if name: |
|
121 | 120 | symbols_lines[name] = (start - 1, end) |
|
122 | 121 | end = start - 1 |
|
123 | 122 | |
|
124 | 123 | # Now symbols_lines is a map |
|
125 | 124 | # {'symbol_name': (start_lineno, end_lineno), ...} |
|
126 | 125 | |
|
127 | 126 | # fill a list with chunks of codes for each requested symbol |
|
128 | 127 | blocks = [] |
|
129 | 128 | not_found = [] |
|
130 | 129 | for symbol in symbols: |
|
131 | 130 | if symbol in symbols_lines: |
|
132 | 131 | start, end = symbols_lines[symbol] |
|
133 | 132 | blocks.append('\n'.join(code[start:end]) + '\n') |
|
134 | 133 | else: |
|
135 | 134 | not_found.append(symbol) |
|
136 | 135 | |
|
137 | 136 | return blocks, not_found |
|
138 | 137 | |
|
139 | 138 | def strip_initial_indent(lines): |
|
140 | 139 | """For %load, strip indent from lines until finding an unindented line. |
|
141 | 140 | |
|
142 | 141 | https://github.com/ipython/ipython/issues/9775 |
|
143 | 142 | """ |
|
144 | 143 | indent_re = re.compile(r'\s+') |
|
145 | 144 | |
|
146 | 145 | it = iter(lines) |
|
147 | 146 | first_line = next(it) |
|
148 | 147 | indent_match = indent_re.match(first_line) |
|
149 | 148 | |
|
150 | 149 | if indent_match: |
|
151 | 150 | # First line was indented |
|
152 | 151 | indent = indent_match.group() |
|
153 | 152 | yield first_line[len(indent):] |
|
154 | 153 | |
|
155 | 154 | for line in it: |
|
156 | 155 | if line.startswith(indent): |
|
157 | 156 | yield line[len(indent):] |
|
158 | 157 | else: |
|
159 | 158 | # Less indented than the first line - stop dedenting |
|
160 | 159 | yield line |
|
161 | 160 | break |
|
162 | 161 | else: |
|
163 | 162 | yield first_line |
|
164 | 163 | |
|
165 | 164 | # Pass the remaining lines through without dedenting |
|
166 | 165 | for line in it: |
|
167 | 166 | yield line |
|
168 | 167 | |
|
169 | 168 | |
|
170 | 169 | class InteractivelyDefined(Exception): |
|
171 | 170 | """Exception for interactively defined variable in magic_edit""" |
|
172 | 171 | def __init__(self, index): |
|
173 | 172 | self.index = index |
|
174 | 173 | |
|
175 | 174 | |
|
176 | 175 | @magics_class |
|
177 | 176 | class CodeMagics(Magics): |
|
178 | 177 | """Magics related to code management (loading, saving, editing, ...).""" |
|
179 | 178 | |
|
180 | 179 | def __init__(self, *args, **kwargs): |
|
181 | 180 | self._knowntemps = set() |
|
182 | 181 | super(CodeMagics, self).__init__(*args, **kwargs) |
|
183 | 182 | |
|
184 | 183 | @line_magic |
|
185 | 184 | def save(self, parameter_s=''): |
|
186 | 185 | """Save a set of lines or a macro to a given filename. |
|
187 | 186 | |
|
188 | 187 | Usage:\\ |
|
189 | 188 | %save [options] filename n1-n2 n3-n4 ... n5 .. n6 ... |
|
190 | 189 | |
|
191 | 190 | Options: |
|
192 | 191 | |
|
193 | 192 | -r: use 'raw' input. By default, the 'processed' history is used, |
|
194 | 193 | so that magics are loaded in their transformed version to valid |
|
195 | 194 | Python. If this option is given, the raw input as typed as the |
|
196 | 195 | command line is used instead. |
|
197 | 196 | |
|
198 | 197 | -f: force overwrite. If file exists, %save will prompt for overwrite |
|
199 | 198 | unless -f is given. |
|
200 | 199 | |
|
201 | 200 | -a: append to the file instead of overwriting it. |
|
202 | 201 | |
|
203 | 202 | This function uses the same syntax as %history for input ranges, |
|
204 | 203 | then saves the lines to the filename you specify. |
|
205 | 204 | |
|
206 | 205 | It adds a '.py' extension to the file if you don't do so yourself, and |
|
207 | 206 | it asks for confirmation before overwriting existing files. |
|
208 | 207 | |
|
209 | 208 | If `-r` option is used, the default extension is `.ipy`. |
|
210 | 209 | """ |
|
211 | 210 | |
|
212 | 211 | opts,args = self.parse_options(parameter_s,'fra',mode='list') |
|
213 | 212 | if not args: |
|
214 | 213 | raise UsageError('Missing filename.') |
|
215 | 214 | raw = 'r' in opts |
|
216 | 215 | force = 'f' in opts |
|
217 | 216 | append = 'a' in opts |
|
218 | 217 | mode = 'a' if append else 'w' |
|
219 | 218 | ext = u'.ipy' if raw else u'.py' |
|
220 | 219 | fname, codefrom = args[0], " ".join(args[1:]) |
|
221 | 220 | if not fname.endswith((u'.py',u'.ipy')): |
|
222 | 221 | fname += ext |
|
223 | 222 | file_exists = os.path.isfile(fname) |
|
224 | 223 | if file_exists and not force and not append: |
|
225 | 224 | try: |
|
226 | 225 | overwrite = self.shell.ask_yes_no('File `%s` exists. Overwrite (y/[N])? ' % fname, default='n') |
|
227 | 226 | except StdinNotImplementedError: |
|
228 | 227 | print("File `%s` exists. Use `%%save -f %s` to force overwrite" % (fname, parameter_s)) |
|
229 | 228 | return |
|
230 | 229 | if not overwrite : |
|
231 | 230 | print('Operation cancelled.') |
|
232 | 231 | return |
|
233 | 232 | try: |
|
234 | 233 | cmds = self.shell.find_user_code(codefrom,raw) |
|
235 | 234 | except (TypeError, ValueError) as e: |
|
236 | 235 | print(e.args[0]) |
|
237 | 236 | return |
|
238 | 237 | out = py3compat.cast_unicode(cmds) |
|
239 | 238 | with io.open(fname, mode, encoding="utf-8") as f: |
|
240 | 239 | if not file_exists or not append: |
|
241 | 240 | f.write(u"# coding: utf-8\n") |
|
242 | 241 | f.write(out) |
|
243 | 242 | # make sure we end on a newline |
|
244 | 243 | if not out.endswith(u'\n'): |
|
245 | 244 | f.write(u'\n') |
|
246 | 245 | print('The following commands were written to file `%s`:' % fname) |
|
247 | 246 | print(cmds) |
|
248 | 247 | |
|
249 | 248 | @line_magic |
|
250 | 249 | def pastebin(self, parameter_s=''): |
|
251 | 250 | """Upload code to Github's Gist paste bin, returning the URL. |
|
252 | 251 | |
|
253 | 252 | Usage:\\ |
|
254 | 253 | %pastebin [-d "Custom description"] 1-7 |
|
255 | 254 | |
|
256 | 255 | The argument can be an input history range, a filename, or the name of a |
|
257 | 256 | string or macro. |
|
258 | 257 | |
|
259 | 258 | Options: |
|
260 | 259 | |
|
261 | 260 | -d: Pass a custom description for the gist. The default will say |
|
262 | 261 | "Pasted from IPython". |
|
263 | 262 | """ |
|
264 | 263 | opts, args = self.parse_options(parameter_s, 'd:') |
|
265 | 264 | |
|
266 | 265 | try: |
|
267 | 266 | code = self.shell.find_user_code(args) |
|
268 | 267 | except (ValueError, TypeError) as e: |
|
269 | 268 | print(e.args[0]) |
|
270 | 269 | return |
|
271 | 270 | |
|
272 | 271 | # Deferred import |
|
273 | 272 | try: |
|
274 | 273 | from urllib.request import urlopen # Py 3 |
|
275 | 274 | except ImportError: |
|
276 | 275 | from urllib2 import urlopen |
|
277 | 276 | import json |
|
278 | 277 | post_data = json.dumps({ |
|
279 | 278 | "description": opts.get('d', "Pasted from IPython"), |
|
280 | 279 | "public": True, |
|
281 | 280 | "files": { |
|
282 | 281 | "file1.py": { |
|
283 | 282 | "content": code |
|
284 | 283 | } |
|
285 | 284 | } |
|
286 | 285 | }).encode('utf-8') |
|
287 | 286 | |
|
288 | 287 | response = urlopen("https://api.github.com/gists", post_data) |
|
289 | 288 | response_data = json.loads(response.read().decode('utf-8')) |
|
290 | 289 | return response_data['html_url'] |
|
291 | 290 | |
|
292 | 291 | @line_magic |
|
293 | 292 | def loadpy(self, arg_s): |
|
294 | 293 | """Alias of `%load` |
|
295 | 294 | |
|
296 | 295 | `%loadpy` has gained some flexibility and dropped the requirement of a `.py` |
|
297 | 296 | extension. So it has been renamed simply into %load. You can look at |
|
298 | 297 | `%load`'s docstring for more info. |
|
299 | 298 | """ |
|
300 | 299 | self.load(arg_s) |
|
301 | 300 | |
|
302 | 301 | @line_magic |
|
303 | 302 | def load(self, arg_s): |
|
304 | 303 | """Load code into the current frontend. |
|
305 | 304 | |
|
306 | 305 | Usage:\\ |
|
307 | 306 | %load [options] source |
|
308 | 307 | |
|
309 | 308 | where source can be a filename, URL, input history range, macro, or |
|
310 | 309 | element in the user namespace |
|
311 | 310 | |
|
312 | 311 | Options: |
|
313 | 312 | |
|
314 | 313 | -r <lines>: Specify lines or ranges of lines to load from the source. |
|
315 | 314 | Ranges could be specified as x-y (x..y) or in python-style x:y |
|
316 | 315 | (x..(y-1)). Both limits x and y can be left blank (meaning the |
|
317 | 316 | beginning and end of the file, respectively). |
|
318 | 317 | |
|
319 | 318 | -s <symbols>: Specify function or classes to load from python source. |
|
320 | 319 | |
|
321 | 320 | -y : Don't ask confirmation for loading source above 200 000 characters. |
|
322 | 321 | |
|
323 | 322 | -n : Include the user's namespace when searching for source code. |
|
324 | 323 | |
|
325 | 324 | This magic command can either take a local filename, a URL, an history |
|
326 | 325 | range (see %history) or a macro as argument, it will prompt for |
|
327 | 326 | confirmation before loading source with more than 200 000 characters, unless |
|
328 | 327 | -y flag is passed or if the frontend does not support raw_input:: |
|
329 | 328 | |
|
330 | 329 | %load myscript.py |
|
331 | 330 | %load 7-27 |
|
332 | 331 | %load myMacro |
|
333 | 332 | %load http://www.example.com/myscript.py |
|
334 | 333 | %load -r 5-10 myscript.py |
|
335 | 334 | %load -r 10-20,30,40: foo.py |
|
336 | 335 | %load -s MyClass,wonder_function myscript.py |
|
337 | 336 | %load -n MyClass |
|
338 | 337 | %load -n my_module.wonder_function |
|
339 | 338 | """ |
|
340 | 339 | opts,args = self.parse_options(arg_s,'yns:r:') |
|
341 | 340 | |
|
342 | 341 | if not args: |
|
343 | 342 | raise UsageError('Missing filename, URL, input history range, ' |
|
344 | 343 | 'macro, or element in the user namespace.') |
|
345 | 344 | |
|
346 | 345 | search_ns = 'n' in opts |
|
347 | 346 | |
|
348 | 347 | contents = self.shell.find_user_code(args, search_ns=search_ns) |
|
349 | 348 | |
|
350 | 349 | if 's' in opts: |
|
351 | 350 | try: |
|
352 | 351 | blocks, not_found = extract_symbols(contents, opts['s']) |
|
353 | 352 | except SyntaxError: |
|
354 | 353 | # non python code |
|
355 | 354 | error("Unable to parse the input as valid Python code") |
|
356 | 355 | return |
|
357 | 356 | |
|
358 | 357 | if len(not_found) == 1: |
|
359 | 358 | warn('The symbol `%s` was not found' % not_found[0]) |
|
360 | 359 | elif len(not_found) > 1: |
|
361 | 360 | warn('The symbols %s were not found' % get_text_list(not_found, |
|
362 | 361 | wrap_item_with='`') |
|
363 | 362 | ) |
|
364 | 363 | |
|
365 | 364 | contents = '\n'.join(blocks) |
|
366 | 365 | |
|
367 | 366 | if 'r' in opts: |
|
368 | 367 | ranges = opts['r'].replace(',', ' ') |
|
369 | 368 | lines = contents.split('\n') |
|
370 | 369 | slices = extract_code_ranges(ranges) |
|
371 | 370 | contents = [lines[slice(*slc)] for slc in slices] |
|
372 | 371 | contents = '\n'.join(strip_initial_indent(chain.from_iterable(contents))) |
|
373 | 372 | |
|
374 | 373 | l = len(contents) |
|
375 | 374 | |
|
376 | 375 | # 200 000 is ~ 2500 full 80 caracter lines |
|
377 | 376 | # so in average, more than 5000 lines |
|
378 | 377 | if l > 200000 and 'y' not in opts: |
|
379 | 378 | try: |
|
380 | 379 | ans = self.shell.ask_yes_no(("The text you're trying to load seems pretty big"\ |
|
381 | 380 | " (%d characters). Continue (y/[N]) ?" % l), default='n' ) |
|
382 | 381 | except StdinNotImplementedError: |
|
383 | 382 | #asume yes if raw input not implemented |
|
384 | 383 | ans = True |
|
385 | 384 | |
|
386 | 385 | if ans is False : |
|
387 | 386 | print('Operation cancelled.') |
|
388 | 387 | return |
|
389 | 388 | |
|
390 | 389 | contents = "# %load {}\n".format(arg_s) + contents |
|
391 | 390 | |
|
392 | 391 | self.shell.set_next_input(contents, replace=True) |
|
393 | 392 | |
|
394 | 393 | @staticmethod |
|
395 | 394 | def _find_edit_target(shell, args, opts, last_call): |
|
396 | 395 | """Utility method used by magic_edit to find what to edit.""" |
|
397 | 396 | |
|
398 | 397 | def make_filename(arg): |
|
399 | 398 | "Make a filename from the given args" |
|
400 | 399 | try: |
|
401 | 400 | filename = get_py_filename(arg) |
|
402 | 401 | except IOError: |
|
403 | 402 | # If it ends with .py but doesn't already exist, assume we want |
|
404 | 403 | # a new file. |
|
405 | 404 | if arg.endswith('.py'): |
|
406 | 405 | filename = arg |
|
407 | 406 | else: |
|
408 | 407 | filename = None |
|
409 | 408 | return filename |
|
410 | 409 | |
|
411 | 410 | # Set a few locals from the options for convenience: |
|
412 | 411 | opts_prev = 'p' in opts |
|
413 | 412 | opts_raw = 'r' in opts |
|
414 | 413 | |
|
415 | 414 | # custom exceptions |
|
416 | 415 | class DataIsObject(Exception): pass |
|
417 | 416 | |
|
418 | 417 | # Default line number value |
|
419 | 418 | lineno = opts.get('n',None) |
|
420 | 419 | |
|
421 | 420 | if opts_prev: |
|
422 | 421 | args = '_%s' % last_call[0] |
|
423 | 422 | if args not in shell.user_ns: |
|
424 | 423 | args = last_call[1] |
|
425 | 424 | |
|
426 | 425 | # by default this is done with temp files, except when the given |
|
427 | 426 | # arg is a filename |
|
428 | 427 | use_temp = True |
|
429 | 428 | |
|
430 | 429 | data = '' |
|
431 | 430 | |
|
432 | 431 | # First, see if the arguments should be a filename. |
|
433 | 432 | filename = make_filename(args) |
|
434 | 433 | if filename: |
|
435 | 434 | use_temp = False |
|
436 | 435 | elif args: |
|
437 | 436 | # Mode where user specifies ranges of lines, like in %macro. |
|
438 | 437 | data = shell.extract_input_lines(args, opts_raw) |
|
439 | 438 | if not data: |
|
440 | 439 | try: |
|
441 | 440 | # Load the parameter given as a variable. If not a string, |
|
442 | 441 | # process it as an object instead (below) |
|
443 | 442 | |
|
444 | 443 | #print '*** args',args,'type',type(args) # dbg |
|
445 | 444 | data = eval(args, shell.user_ns) |
|
446 |
if not isinstance(data, str |
|
|
445 | if not isinstance(data, str): | |
|
447 | 446 | raise DataIsObject |
|
448 | 447 | |
|
449 | 448 | except (NameError,SyntaxError): |
|
450 | 449 | # given argument is not a variable, try as a filename |
|
451 | 450 | filename = make_filename(args) |
|
452 | 451 | if filename is None: |
|
453 | 452 | warn("Argument given (%s) can't be found as a variable " |
|
454 | 453 | "or as a filename." % args) |
|
455 | 454 | return (None, None, None) |
|
456 | 455 | use_temp = False |
|
457 | 456 | |
|
458 | 457 | except DataIsObject: |
|
459 | 458 | # macros have a special edit function |
|
460 | 459 | if isinstance(data, Macro): |
|
461 | 460 | raise MacroToEdit(data) |
|
462 | 461 | |
|
463 | 462 | # For objects, try to edit the file where they are defined |
|
464 | 463 | filename = find_file(data) |
|
465 | 464 | if filename: |
|
466 | 465 | if 'fakemodule' in filename.lower() and \ |
|
467 | 466 | inspect.isclass(data): |
|
468 | 467 | # class created by %edit? Try to find source |
|
469 | 468 | # by looking for method definitions instead, the |
|
470 | 469 | # __module__ in those classes is FakeModule. |
|
471 | 470 | attrs = [getattr(data, aname) for aname in dir(data)] |
|
472 | 471 | for attr in attrs: |
|
473 | 472 | if not inspect.ismethod(attr): |
|
474 | 473 | continue |
|
475 | 474 | filename = find_file(attr) |
|
476 | 475 | if filename and \ |
|
477 | 476 | 'fakemodule' not in filename.lower(): |
|
478 | 477 | # change the attribute to be the edit |
|
479 | 478 | # target instead |
|
480 | 479 | data = attr |
|
481 | 480 | break |
|
482 | 481 | |
|
483 | 482 | m = ipython_input_pat.match(os.path.basename(filename)) |
|
484 | 483 | if m: |
|
485 | 484 | raise InteractivelyDefined(int(m.groups()[0])) |
|
486 | 485 | |
|
487 | 486 | datafile = 1 |
|
488 | 487 | if filename is None: |
|
489 | 488 | filename = make_filename(args) |
|
490 | 489 | datafile = 1 |
|
491 | 490 | if filename is not None: |
|
492 | 491 | # only warn about this if we get a real name |
|
493 | 492 | warn('Could not find file where `%s` is defined.\n' |
|
494 | 493 | 'Opening a file named `%s`' % (args, filename)) |
|
495 | 494 | # Now, make sure we can actually read the source (if it was |
|
496 | 495 | # in a temp file it's gone by now). |
|
497 | 496 | if datafile: |
|
498 | 497 | if lineno is None: |
|
499 | 498 | lineno = find_source_lines(data) |
|
500 | 499 | if lineno is None: |
|
501 | 500 | filename = make_filename(args) |
|
502 | 501 | if filename is None: |
|
503 | 502 | warn('The file where `%s` was defined ' |
|
504 | 503 | 'cannot be read or found.' % data) |
|
505 | 504 | return (None, None, None) |
|
506 | 505 | use_temp = False |
|
507 | 506 | |
|
508 | 507 | if use_temp: |
|
509 | 508 | filename = shell.mktempfile(data) |
|
510 | 509 | print('IPython will make a temporary file named:',filename) |
|
511 | 510 | |
|
512 | 511 | # use last_call to remember the state of the previous call, but don't |
|
513 | 512 | # let it be clobbered by successive '-p' calls. |
|
514 | 513 | try: |
|
515 | 514 | last_call[0] = shell.displayhook.prompt_count |
|
516 | 515 | if not opts_prev: |
|
517 | 516 | last_call[1] = args |
|
518 | 517 | except: |
|
519 | 518 | pass |
|
520 | 519 | |
|
521 | 520 | |
|
522 | 521 | return filename, lineno, use_temp |
|
523 | 522 | |
|
524 | 523 | def _edit_macro(self,mname,macro): |
|
525 | 524 | """open an editor with the macro data in a file""" |
|
526 | 525 | filename = self.shell.mktempfile(macro.value) |
|
527 | 526 | self.shell.hooks.editor(filename) |
|
528 | 527 | |
|
529 | 528 | # and make a new macro object, to replace the old one |
|
530 | 529 | with open(filename) as mfile: |
|
531 | 530 | mvalue = mfile.read() |
|
532 | 531 | self.shell.user_ns[mname] = Macro(mvalue) |
|
533 | 532 | |
|
534 | 533 | @skip_doctest |
|
535 | 534 | @line_magic |
|
536 | 535 | def edit(self, parameter_s='',last_call=['','']): |
|
537 | 536 | """Bring up an editor and execute the resulting code. |
|
538 | 537 | |
|
539 | 538 | Usage: |
|
540 | 539 | %edit [options] [args] |
|
541 | 540 | |
|
542 | 541 | %edit runs IPython's editor hook. The default version of this hook is |
|
543 | 542 | set to call the editor specified by your $EDITOR environment variable. |
|
544 | 543 | If this isn't found, it will default to vi under Linux/Unix and to |
|
545 | 544 | notepad under Windows. See the end of this docstring for how to change |
|
546 | 545 | the editor hook. |
|
547 | 546 | |
|
548 | 547 | You can also set the value of this editor via the |
|
549 | 548 | ``TerminalInteractiveShell.editor`` option in your configuration file. |
|
550 | 549 | This is useful if you wish to use a different editor from your typical |
|
551 | 550 | default with IPython (and for Windows users who typically don't set |
|
552 | 551 | environment variables). |
|
553 | 552 | |
|
554 | 553 | This command allows you to conveniently edit multi-line code right in |
|
555 | 554 | your IPython session. |
|
556 | 555 | |
|
557 | 556 | If called without arguments, %edit opens up an empty editor with a |
|
558 | 557 | temporary file and will execute the contents of this file when you |
|
559 | 558 | close it (don't forget to save it!). |
|
560 | 559 | |
|
561 | 560 | |
|
562 | 561 | Options: |
|
563 | 562 | |
|
564 | 563 | -n <number>: open the editor at a specified line number. By default, |
|
565 | 564 | the IPython editor hook uses the unix syntax 'editor +N filename', but |
|
566 | 565 | you can configure this by providing your own modified hook if your |
|
567 | 566 | favorite editor supports line-number specifications with a different |
|
568 | 567 | syntax. |
|
569 | 568 | |
|
570 | 569 | -p: this will call the editor with the same data as the previous time |
|
571 | 570 | it was used, regardless of how long ago (in your current session) it |
|
572 | 571 | was. |
|
573 | 572 | |
|
574 | 573 | -r: use 'raw' input. This option only applies to input taken from the |
|
575 | 574 | user's history. By default, the 'processed' history is used, so that |
|
576 | 575 | magics are loaded in their transformed version to valid Python. If |
|
577 | 576 | this option is given, the raw input as typed as the command line is |
|
578 | 577 | used instead. When you exit the editor, it will be executed by |
|
579 | 578 | IPython's own processor. |
|
580 | 579 | |
|
581 | 580 | -x: do not execute the edited code immediately upon exit. This is |
|
582 | 581 | mainly useful if you are editing programs which need to be called with |
|
583 | 582 | command line arguments, which you can then do using %run. |
|
584 | 583 | |
|
585 | 584 | |
|
586 | 585 | Arguments: |
|
587 | 586 | |
|
588 | 587 | If arguments are given, the following possibilities exist: |
|
589 | 588 | |
|
590 | 589 | - If the argument is a filename, IPython will load that into the |
|
591 | 590 | editor. It will execute its contents with execfile() when you exit, |
|
592 | 591 | loading any code in the file into your interactive namespace. |
|
593 | 592 | |
|
594 | 593 | - The arguments are ranges of input history, e.g. "7 ~1/4-6". |
|
595 | 594 | The syntax is the same as in the %history magic. |
|
596 | 595 | |
|
597 | 596 | - If the argument is a string variable, its contents are loaded |
|
598 | 597 | into the editor. You can thus edit any string which contains |
|
599 | 598 | python code (including the result of previous edits). |
|
600 | 599 | |
|
601 | 600 | - If the argument is the name of an object (other than a string), |
|
602 | 601 | IPython will try to locate the file where it was defined and open the |
|
603 | 602 | editor at the point where it is defined. You can use `%edit function` |
|
604 | 603 | to load an editor exactly at the point where 'function' is defined, |
|
605 | 604 | edit it and have the file be executed automatically. |
|
606 | 605 | |
|
607 | 606 | - If the object is a macro (see %macro for details), this opens up your |
|
608 | 607 | specified editor with a temporary file containing the macro's data. |
|
609 | 608 | Upon exit, the macro is reloaded with the contents of the file. |
|
610 | 609 | |
|
611 | 610 | Note: opening at an exact line is only supported under Unix, and some |
|
612 | 611 | editors (like kedit and gedit up to Gnome 2.8) do not understand the |
|
613 | 612 | '+NUMBER' parameter necessary for this feature. Good editors like |
|
614 | 613 | (X)Emacs, vi, jed, pico and joe all do. |
|
615 | 614 | |
|
616 | 615 | After executing your code, %edit will return as output the code you |
|
617 | 616 | typed in the editor (except when it was an existing file). This way |
|
618 | 617 | you can reload the code in further invocations of %edit as a variable, |
|
619 | 618 | via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of |
|
620 | 619 | the output. |
|
621 | 620 | |
|
622 | 621 | Note that %edit is also available through the alias %ed. |
|
623 | 622 | |
|
624 | 623 | This is an example of creating a simple function inside the editor and |
|
625 | 624 | then modifying it. First, start up the editor:: |
|
626 | 625 | |
|
627 | 626 | In [1]: edit |
|
628 | 627 | Editing... done. Executing edited code... |
|
629 | 628 | Out[1]: 'def foo():\\n print "foo() was defined in an editing |
|
630 | 629 | session"\\n' |
|
631 | 630 | |
|
632 | 631 | We can then call the function foo():: |
|
633 | 632 | |
|
634 | 633 | In [2]: foo() |
|
635 | 634 | foo() was defined in an editing session |
|
636 | 635 | |
|
637 | 636 | Now we edit foo. IPython automatically loads the editor with the |
|
638 | 637 | (temporary) file where foo() was previously defined:: |
|
639 | 638 | |
|
640 | 639 | In [3]: edit foo |
|
641 | 640 | Editing... done. Executing edited code... |
|
642 | 641 | |
|
643 | 642 | And if we call foo() again we get the modified version:: |
|
644 | 643 | |
|
645 | 644 | In [4]: foo() |
|
646 | 645 | foo() has now been changed! |
|
647 | 646 | |
|
648 | 647 | Here is an example of how to edit a code snippet successive |
|
649 | 648 | times. First we call the editor:: |
|
650 | 649 | |
|
651 | 650 | In [5]: edit |
|
652 | 651 | Editing... done. Executing edited code... |
|
653 | 652 | hello |
|
654 | 653 | Out[5]: "print 'hello'\\n" |
|
655 | 654 | |
|
656 | 655 | Now we call it again with the previous output (stored in _):: |
|
657 | 656 | |
|
658 | 657 | In [6]: edit _ |
|
659 | 658 | Editing... done. Executing edited code... |
|
660 | 659 | hello world |
|
661 | 660 | Out[6]: "print 'hello world'\\n" |
|
662 | 661 | |
|
663 | 662 | Now we call it with the output #8 (stored in _8, also as Out[8]):: |
|
664 | 663 | |
|
665 | 664 | In [7]: edit _8 |
|
666 | 665 | Editing... done. Executing edited code... |
|
667 | 666 | hello again |
|
668 | 667 | Out[7]: "print 'hello again'\\n" |
|
669 | 668 | |
|
670 | 669 | |
|
671 | 670 | Changing the default editor hook: |
|
672 | 671 | |
|
673 | 672 | If you wish to write your own editor hook, you can put it in a |
|
674 | 673 | configuration file which you load at startup time. The default hook |
|
675 | 674 | is defined in the IPython.core.hooks module, and you can use that as a |
|
676 | 675 | starting example for further modifications. That file also has |
|
677 | 676 | general instructions on how to set a new hook for use once you've |
|
678 | 677 | defined it.""" |
|
679 | 678 | opts,args = self.parse_options(parameter_s,'prxn:') |
|
680 | 679 | |
|
681 | 680 | try: |
|
682 | 681 | filename, lineno, is_temp = self._find_edit_target(self.shell, |
|
683 | 682 | args, opts, last_call) |
|
684 | 683 | except MacroToEdit as e: |
|
685 | 684 | self._edit_macro(args, e.args[0]) |
|
686 | 685 | return |
|
687 | 686 | except InteractivelyDefined as e: |
|
688 | 687 | print("Editing In[%i]" % e.index) |
|
689 | 688 | args = str(e.index) |
|
690 | 689 | filename, lineno, is_temp = self._find_edit_target(self.shell, |
|
691 | 690 | args, opts, last_call) |
|
692 | 691 | if filename is None: |
|
693 | 692 | # nothing was found, warnings have already been issued, |
|
694 | 693 | # just give up. |
|
695 | 694 | return |
|
696 | 695 | |
|
697 | 696 | if is_temp: |
|
698 | 697 | self._knowntemps.add(filename) |
|
699 | 698 | elif (filename in self._knowntemps): |
|
700 | 699 | is_temp = True |
|
701 | 700 | |
|
702 | 701 | |
|
703 | 702 | # do actual editing here |
|
704 | 703 | print('Editing...', end=' ') |
|
705 | 704 | sys.stdout.flush() |
|
706 | 705 | try: |
|
707 | 706 | # Quote filenames that may have spaces in them |
|
708 | 707 | if ' ' in filename: |
|
709 | 708 | filename = "'%s'" % filename |
|
710 | 709 | self.shell.hooks.editor(filename,lineno) |
|
711 | 710 | except TryNext: |
|
712 | 711 | warn('Could not open editor') |
|
713 | 712 | return |
|
714 | 713 | |
|
715 | 714 | # XXX TODO: should this be generalized for all string vars? |
|
716 | 715 | # For now, this is special-cased to blocks created by cpaste |
|
717 | 716 | if args.strip() == 'pasted_block': |
|
718 | 717 | with open(filename, 'r') as f: |
|
719 | 718 | self.shell.user_ns['pasted_block'] = f.read() |
|
720 | 719 | |
|
721 | 720 | if 'x' in opts: # -x prevents actual execution |
|
722 | 721 | print() |
|
723 | 722 | else: |
|
724 | 723 | print('done. Executing edited code...') |
|
725 | 724 | with preserve_keys(self.shell.user_ns, '__file__'): |
|
726 | 725 | if not is_temp: |
|
727 | 726 | self.shell.user_ns['__file__'] = filename |
|
728 | 727 | if 'r' in opts: # Untranslated IPython code |
|
729 | 728 | with open(filename, 'r') as f: |
|
730 | 729 | source = f.read() |
|
731 | 730 | self.shell.run_cell(source, store_history=False) |
|
732 | 731 | else: |
|
733 | 732 | self.shell.safe_execfile(filename, self.shell.user_ns, |
|
734 | 733 | self.shell.user_ns) |
|
735 | 734 | |
|
736 | 735 | if is_temp: |
|
737 | 736 | try: |
|
738 | 737 | return open(filename).read() |
|
739 | 738 | except IOError as msg: |
|
740 | 739 | if msg.filename == filename: |
|
741 | 740 | warn('File not found. Did you forget to save?') |
|
742 | 741 | return |
|
743 | 742 | else: |
|
744 | 743 | self.shell.showtraceback() |
@@ -1,1009 +1,1009 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Tools for inspecting Python objects. |
|
3 | 3 | |
|
4 | 4 | Uses syntax highlighting for presenting the various information elements. |
|
5 | 5 | |
|
6 | 6 | Similar in spirit to the inspect module, but all calls take a name argument to |
|
7 | 7 | reference the name under which an object is being read. |
|
8 | 8 | """ |
|
9 | 9 | |
|
10 | 10 | # Copyright (c) IPython Development Team. |
|
11 | 11 | # Distributed under the terms of the Modified BSD License. |
|
12 | 12 | |
|
13 | 13 | __all__ = ['Inspector','InspectColors'] |
|
14 | 14 | |
|
15 | 15 | # stdlib modules |
|
16 | 16 | import inspect |
|
17 | 17 | from inspect import signature |
|
18 | 18 | import linecache |
|
19 | 19 | import warnings |
|
20 | 20 | import os |
|
21 | 21 | from textwrap import dedent |
|
22 | 22 | import types |
|
23 | 23 | import io as stdlib_io |
|
24 | 24 | |
|
25 | 25 | try: |
|
26 | 26 | from itertools import izip_longest |
|
27 | 27 | except ImportError: |
|
28 | 28 | from itertools import zip_longest as izip_longest |
|
29 | 29 | |
|
30 | 30 | # IPython's own |
|
31 | 31 | from IPython.core import page |
|
32 | 32 | from IPython.lib.pretty import pretty |
|
33 | 33 | from IPython.testing.skipdoctest import skip_doctest |
|
34 | 34 | from IPython.utils import PyColorize |
|
35 | 35 | from IPython.utils import openpy |
|
36 | 36 | from IPython.utils import py3compat |
|
37 | 37 | from IPython.utils.dir2 import safe_hasattr |
|
38 | 38 | from IPython.utils.path import compress_user |
|
39 | 39 | from IPython.utils.text import indent |
|
40 | 40 | from IPython.utils.wildcard import list_namespace |
|
41 | 41 | from IPython.utils.coloransi import TermColors, ColorScheme, ColorSchemeTable |
|
42 |
from IPython.utils.py3compat import cast_unicode, |
|
|
42 | from IPython.utils.py3compat import cast_unicode, PY3 | |
|
43 | 43 | from IPython.utils.colorable import Colorable |
|
44 | 44 | |
|
45 | 45 | from pygments import highlight |
|
46 | 46 | from pygments.lexers import PythonLexer |
|
47 | 47 | from pygments.formatters import HtmlFormatter |
|
48 | 48 | |
|
49 | 49 | def pylight(code): |
|
50 | 50 | return highlight(code, PythonLexer(), HtmlFormatter(noclasses=True)) |
|
51 | 51 | |
|
52 | 52 | # builtin docstrings to ignore |
|
53 | 53 | _func_call_docstring = types.FunctionType.__call__.__doc__ |
|
54 | 54 | _object_init_docstring = object.__init__.__doc__ |
|
55 | 55 | _builtin_type_docstrings = { |
|
56 | 56 | inspect.getdoc(t) for t in (types.ModuleType, types.MethodType, |
|
57 | 57 | types.FunctionType, property) |
|
58 | 58 | } |
|
59 | 59 | |
|
60 | 60 | _builtin_func_type = type(all) |
|
61 | 61 | _builtin_meth_type = type(str.upper) # Bound methods have the same type as builtin functions |
|
62 | 62 | #**************************************************************************** |
|
63 | 63 | # Builtin color schemes |
|
64 | 64 | |
|
65 | 65 | Colors = TermColors # just a shorthand |
|
66 | 66 | |
|
67 | 67 | InspectColors = PyColorize.ANSICodeColors |
|
68 | 68 | |
|
69 | 69 | #**************************************************************************** |
|
70 | 70 | # Auxiliary functions and objects |
|
71 | 71 | |
|
72 | 72 | # See the messaging spec for the definition of all these fields. This list |
|
73 | 73 | # effectively defines the order of display |
|
74 | 74 | info_fields = ['type_name', 'base_class', 'string_form', 'namespace', |
|
75 | 75 | 'length', 'file', 'definition', 'docstring', 'source', |
|
76 | 76 | 'init_definition', 'class_docstring', 'init_docstring', |
|
77 | 77 | 'call_def', 'call_docstring', |
|
78 | 78 | # These won't be printed but will be used to determine how to |
|
79 | 79 | # format the object |
|
80 | 80 | 'ismagic', 'isalias', 'isclass', 'argspec', 'found', 'name' |
|
81 | 81 | ] |
|
82 | 82 | |
|
83 | 83 | |
|
84 | 84 | def object_info(**kw): |
|
85 | 85 | """Make an object info dict with all fields present.""" |
|
86 | 86 | infodict = dict(izip_longest(info_fields, [None])) |
|
87 | 87 | infodict.update(kw) |
|
88 | 88 | return infodict |
|
89 | 89 | |
|
90 | 90 | |
|
91 | 91 | def get_encoding(obj): |
|
92 | 92 | """Get encoding for python source file defining obj |
|
93 | 93 | |
|
94 | 94 | Returns None if obj is not defined in a sourcefile. |
|
95 | 95 | """ |
|
96 | 96 | ofile = find_file(obj) |
|
97 | 97 | # run contents of file through pager starting at line where the object |
|
98 | 98 | # is defined, as long as the file isn't binary and is actually on the |
|
99 | 99 | # filesystem. |
|
100 | 100 | if ofile is None: |
|
101 | 101 | return None |
|
102 | 102 | elif ofile.endswith(('.so', '.dll', '.pyd')): |
|
103 | 103 | return None |
|
104 | 104 | elif not os.path.isfile(ofile): |
|
105 | 105 | return None |
|
106 | 106 | else: |
|
107 | 107 | # Print only text files, not extension binaries. Note that |
|
108 | 108 | # getsourcelines returns lineno with 1-offset and page() uses |
|
109 | 109 | # 0-offset, so we must adjust. |
|
110 | 110 | with stdlib_io.open(ofile, 'rb') as buffer: # Tweaked to use io.open for Python 2 |
|
111 | 111 | encoding, lines = openpy.detect_encoding(buffer.readline) |
|
112 | 112 | return encoding |
|
113 | 113 | |
|
114 | 114 | def getdoc(obj): |
|
115 | 115 | """Stable wrapper around inspect.getdoc. |
|
116 | 116 | |
|
117 | 117 | This can't crash because of attribute problems. |
|
118 | 118 | |
|
119 | 119 | It also attempts to call a getdoc() method on the given object. This |
|
120 | 120 | allows objects which provide their docstrings via non-standard mechanisms |
|
121 | 121 | (like Pyro proxies) to still be inspected by ipython's ? system. |
|
122 | 122 | """ |
|
123 | 123 | # Allow objects to offer customized documentation via a getdoc method: |
|
124 | 124 | try: |
|
125 | 125 | ds = obj.getdoc() |
|
126 | 126 | except Exception: |
|
127 | 127 | pass |
|
128 | 128 | else: |
|
129 | 129 | # if we get extra info, we add it to the normal docstring. |
|
130 |
if isinstance(ds, str |
|
|
130 | if isinstance(ds, str): | |
|
131 | 131 | return inspect.cleandoc(ds) |
|
132 | 132 | try: |
|
133 | 133 | docstr = inspect.getdoc(obj) |
|
134 | 134 | encoding = get_encoding(obj) |
|
135 | 135 | return py3compat.cast_unicode(docstr, encoding=encoding) |
|
136 | 136 | except Exception: |
|
137 | 137 | # Harden against an inspect failure, which can occur with |
|
138 | 138 | # extensions modules. |
|
139 | 139 | raise |
|
140 | 140 | return None |
|
141 | 141 | |
|
142 | 142 | |
|
143 | 143 | def getsource(obj, oname=''): |
|
144 | 144 | """Wrapper around inspect.getsource. |
|
145 | 145 | |
|
146 | 146 | This can be modified by other projects to provide customized source |
|
147 | 147 | extraction. |
|
148 | 148 | |
|
149 | 149 | Parameters |
|
150 | 150 | ---------- |
|
151 | 151 | obj : object |
|
152 | 152 | an object whose source code we will attempt to extract |
|
153 | 153 | oname : str |
|
154 | 154 | (optional) a name under which the object is known |
|
155 | 155 | |
|
156 | 156 | Returns |
|
157 | 157 | ------- |
|
158 | 158 | src : unicode or None |
|
159 | 159 | |
|
160 | 160 | """ |
|
161 | 161 | |
|
162 | 162 | if isinstance(obj, property): |
|
163 | 163 | sources = [] |
|
164 | 164 | for attrname in ['fget', 'fset', 'fdel']: |
|
165 | 165 | fn = getattr(obj, attrname) |
|
166 | 166 | if fn is not None: |
|
167 | 167 | encoding = get_encoding(fn) |
|
168 | 168 | oname_prefix = ('%s.' % oname) if oname else '' |
|
169 | 169 | sources.append(cast_unicode( |
|
170 | 170 | ''.join(('# ', oname_prefix, attrname)), |
|
171 | 171 | encoding=encoding)) |
|
172 | 172 | if inspect.isfunction(fn): |
|
173 | 173 | sources.append(dedent(getsource(fn))) |
|
174 | 174 | else: |
|
175 | 175 | # Default str/repr only prints function name, |
|
176 | 176 | # pretty.pretty prints module name too. |
|
177 | 177 | sources.append(cast_unicode( |
|
178 | 178 | '%s%s = %s\n' % ( |
|
179 | 179 | oname_prefix, attrname, pretty(fn)), |
|
180 | 180 | encoding=encoding)) |
|
181 | 181 | if sources: |
|
182 | 182 | return '\n'.join(sources) |
|
183 | 183 | else: |
|
184 | 184 | return None |
|
185 | 185 | |
|
186 | 186 | else: |
|
187 | 187 | # Get source for non-property objects. |
|
188 | 188 | |
|
189 | 189 | obj = _get_wrapped(obj) |
|
190 | 190 | |
|
191 | 191 | try: |
|
192 | 192 | src = inspect.getsource(obj) |
|
193 | 193 | except TypeError: |
|
194 | 194 | # The object itself provided no meaningful source, try looking for |
|
195 | 195 | # its class definition instead. |
|
196 | 196 | if hasattr(obj, '__class__'): |
|
197 | 197 | try: |
|
198 | 198 | src = inspect.getsource(obj.__class__) |
|
199 | 199 | except TypeError: |
|
200 | 200 | return None |
|
201 | 201 | |
|
202 | 202 | encoding = get_encoding(obj) |
|
203 | 203 | return cast_unicode(src, encoding=encoding) |
|
204 | 204 | |
|
205 | 205 | |
|
206 | 206 | def is_simple_callable(obj): |
|
207 | 207 | """True if obj is a function ()""" |
|
208 | 208 | return (inspect.isfunction(obj) or inspect.ismethod(obj) or \ |
|
209 | 209 | isinstance(obj, _builtin_func_type) or isinstance(obj, _builtin_meth_type)) |
|
210 | 210 | |
|
211 | 211 | |
|
212 | 212 | def getargspec(obj): |
|
213 | 213 | """Wrapper around :func:`inspect.getfullargspec` on Python 3, and |
|
214 | 214 | :func:inspect.getargspec` on Python 2. |
|
215 | 215 | |
|
216 | 216 | In addition to functions and methods, this can also handle objects with a |
|
217 | 217 | ``__call__`` attribute. |
|
218 | 218 | """ |
|
219 | 219 | if safe_hasattr(obj, '__call__') and not is_simple_callable(obj): |
|
220 | 220 | obj = obj.__call__ |
|
221 | 221 | |
|
222 | 222 | return inspect.getfullargspec(obj) if PY3 else inspect.getargspec(obj) |
|
223 | 223 | |
|
224 | 224 | |
|
225 | 225 | def format_argspec(argspec): |
|
226 | 226 | """Format argspect, convenience wrapper around inspect's. |
|
227 | 227 | |
|
228 | 228 | This takes a dict instead of ordered arguments and calls |
|
229 | 229 | inspect.format_argspec with the arguments in the necessary order. |
|
230 | 230 | """ |
|
231 | 231 | return inspect.formatargspec(argspec['args'], argspec['varargs'], |
|
232 | 232 | argspec['varkw'], argspec['defaults']) |
|
233 | 233 | |
|
234 | 234 | |
|
235 | 235 | def call_tip(oinfo, format_call=True): |
|
236 | 236 | """Extract call tip data from an oinfo dict. |
|
237 | 237 | |
|
238 | 238 | Parameters |
|
239 | 239 | ---------- |
|
240 | 240 | oinfo : dict |
|
241 | 241 | |
|
242 | 242 | format_call : bool, optional |
|
243 | 243 | If True, the call line is formatted and returned as a string. If not, a |
|
244 | 244 | tuple of (name, argspec) is returned. |
|
245 | 245 | |
|
246 | 246 | Returns |
|
247 | 247 | ------- |
|
248 | 248 | call_info : None, str or (str, dict) tuple. |
|
249 | 249 | When format_call is True, the whole call information is formattted as a |
|
250 | 250 | single string. Otherwise, the object's name and its argspec dict are |
|
251 | 251 | returned. If no call information is available, None is returned. |
|
252 | 252 | |
|
253 | 253 | docstring : str or None |
|
254 | 254 | The most relevant docstring for calling purposes is returned, if |
|
255 | 255 | available. The priority is: call docstring for callable instances, then |
|
256 | 256 | constructor docstring for classes, then main object's docstring otherwise |
|
257 | 257 | (regular functions). |
|
258 | 258 | """ |
|
259 | 259 | # Get call definition |
|
260 | 260 | argspec = oinfo.get('argspec') |
|
261 | 261 | if argspec is None: |
|
262 | 262 | call_line = None |
|
263 | 263 | else: |
|
264 | 264 | # Callable objects will have 'self' as their first argument, prune |
|
265 | 265 | # it out if it's there for clarity (since users do *not* pass an |
|
266 | 266 | # extra first argument explicitly). |
|
267 | 267 | try: |
|
268 | 268 | has_self = argspec['args'][0] == 'self' |
|
269 | 269 | except (KeyError, IndexError): |
|
270 | 270 | pass |
|
271 | 271 | else: |
|
272 | 272 | if has_self: |
|
273 | 273 | argspec['args'] = argspec['args'][1:] |
|
274 | 274 | |
|
275 | 275 | call_line = oinfo['name']+format_argspec(argspec) |
|
276 | 276 | |
|
277 | 277 | # Now get docstring. |
|
278 | 278 | # The priority is: call docstring, constructor docstring, main one. |
|
279 | 279 | doc = oinfo.get('call_docstring') |
|
280 | 280 | if doc is None: |
|
281 | 281 | doc = oinfo.get('init_docstring') |
|
282 | 282 | if doc is None: |
|
283 | 283 | doc = oinfo.get('docstring','') |
|
284 | 284 | |
|
285 | 285 | return call_line, doc |
|
286 | 286 | |
|
287 | 287 | |
|
288 | 288 | def _get_wrapped(obj): |
|
289 | 289 | """Get the original object if wrapped in one or more @decorators |
|
290 | 290 | |
|
291 | 291 | Some objects automatically construct similar objects on any unrecognised |
|
292 | 292 | attribute access (e.g. unittest.mock.call). To protect against infinite loops, |
|
293 | 293 | this will arbitrarily cut off after 100 levels of obj.__wrapped__ |
|
294 | 294 | attribute access. --TK, Jan 2016 |
|
295 | 295 | """ |
|
296 | 296 | orig_obj = obj |
|
297 | 297 | i = 0 |
|
298 | 298 | while safe_hasattr(obj, '__wrapped__'): |
|
299 | 299 | obj = obj.__wrapped__ |
|
300 | 300 | i += 1 |
|
301 | 301 | if i > 100: |
|
302 | 302 | # __wrapped__ is probably a lie, so return the thing we started with |
|
303 | 303 | return orig_obj |
|
304 | 304 | return obj |
|
305 | 305 | |
|
306 | 306 | def find_file(obj): |
|
307 | 307 | """Find the absolute path to the file where an object was defined. |
|
308 | 308 | |
|
309 | 309 | This is essentially a robust wrapper around `inspect.getabsfile`. |
|
310 | 310 | |
|
311 | 311 | Returns None if no file can be found. |
|
312 | 312 | |
|
313 | 313 | Parameters |
|
314 | 314 | ---------- |
|
315 | 315 | obj : any Python object |
|
316 | 316 | |
|
317 | 317 | Returns |
|
318 | 318 | ------- |
|
319 | 319 | fname : str |
|
320 | 320 | The absolute path to the file where the object was defined. |
|
321 | 321 | """ |
|
322 | 322 | obj = _get_wrapped(obj) |
|
323 | 323 | |
|
324 | 324 | fname = None |
|
325 | 325 | try: |
|
326 | 326 | fname = inspect.getabsfile(obj) |
|
327 | 327 | except TypeError: |
|
328 | 328 | # For an instance, the file that matters is where its class was |
|
329 | 329 | # declared. |
|
330 | 330 | if hasattr(obj, '__class__'): |
|
331 | 331 | try: |
|
332 | 332 | fname = inspect.getabsfile(obj.__class__) |
|
333 | 333 | except TypeError: |
|
334 | 334 | # Can happen for builtins |
|
335 | 335 | pass |
|
336 | 336 | except: |
|
337 | 337 | pass |
|
338 | 338 | return cast_unicode(fname) |
|
339 | 339 | |
|
340 | 340 | |
|
341 | 341 | def find_source_lines(obj): |
|
342 | 342 | """Find the line number in a file where an object was defined. |
|
343 | 343 | |
|
344 | 344 | This is essentially a robust wrapper around `inspect.getsourcelines`. |
|
345 | 345 | |
|
346 | 346 | Returns None if no file can be found. |
|
347 | 347 | |
|
348 | 348 | Parameters |
|
349 | 349 | ---------- |
|
350 | 350 | obj : any Python object |
|
351 | 351 | |
|
352 | 352 | Returns |
|
353 | 353 | ------- |
|
354 | 354 | lineno : int |
|
355 | 355 | The line number where the object definition starts. |
|
356 | 356 | """ |
|
357 | 357 | obj = _get_wrapped(obj) |
|
358 | 358 | |
|
359 | 359 | try: |
|
360 | 360 | try: |
|
361 | 361 | lineno = inspect.getsourcelines(obj)[1] |
|
362 | 362 | except TypeError: |
|
363 | 363 | # For instances, try the class object like getsource() does |
|
364 | 364 | if hasattr(obj, '__class__'): |
|
365 | 365 | lineno = inspect.getsourcelines(obj.__class__)[1] |
|
366 | 366 | else: |
|
367 | 367 | lineno = None |
|
368 | 368 | except: |
|
369 | 369 | return None |
|
370 | 370 | |
|
371 | 371 | return lineno |
|
372 | 372 | |
|
373 | 373 | class Inspector(Colorable): |
|
374 | 374 | |
|
375 | 375 | def __init__(self, color_table=InspectColors, |
|
376 | 376 | code_color_table=PyColorize.ANSICodeColors, |
|
377 | 377 | scheme='NoColor', |
|
378 | 378 | str_detail_level=0, |
|
379 | 379 | parent=None, config=None): |
|
380 | 380 | super(Inspector, self).__init__(parent=parent, config=config) |
|
381 | 381 | self.color_table = color_table |
|
382 | 382 | self.parser = PyColorize.Parser(out='str', parent=self, style=scheme) |
|
383 | 383 | self.format = self.parser.format |
|
384 | 384 | self.str_detail_level = str_detail_level |
|
385 | 385 | self.set_active_scheme(scheme) |
|
386 | 386 | |
|
387 | 387 | def _getdef(self,obj,oname=''): |
|
388 | 388 | """Return the call signature for any callable object. |
|
389 | 389 | |
|
390 | 390 | If any exception is generated, None is returned instead and the |
|
391 | 391 | exception is suppressed.""" |
|
392 | 392 | try: |
|
393 | 393 | hdef = oname + str(signature(obj)) |
|
394 | 394 | return cast_unicode(hdef) |
|
395 | 395 | except: |
|
396 | 396 | return None |
|
397 | 397 | |
|
398 | 398 | def __head(self,h): |
|
399 | 399 | """Return a header string with proper colors.""" |
|
400 | 400 | return '%s%s%s' % (self.color_table.active_colors.header,h, |
|
401 | 401 | self.color_table.active_colors.normal) |
|
402 | 402 | |
|
403 | 403 | def set_active_scheme(self, scheme): |
|
404 | 404 | self.color_table.set_active_scheme(scheme) |
|
405 | 405 | self.parser.color_table.set_active_scheme(scheme) |
|
406 | 406 | |
|
407 | 407 | def noinfo(self, msg, oname): |
|
408 | 408 | """Generic message when no information is found.""" |
|
409 | 409 | print('No %s found' % msg, end=' ') |
|
410 | 410 | if oname: |
|
411 | 411 | print('for %s' % oname) |
|
412 | 412 | else: |
|
413 | 413 | print() |
|
414 | 414 | |
|
415 | 415 | def pdef(self, obj, oname=''): |
|
416 | 416 | """Print the call signature for any callable object. |
|
417 | 417 | |
|
418 | 418 | If the object is a class, print the constructor information.""" |
|
419 | 419 | |
|
420 | 420 | if not callable(obj): |
|
421 | 421 | print('Object is not callable.') |
|
422 | 422 | return |
|
423 | 423 | |
|
424 | 424 | header = '' |
|
425 | 425 | |
|
426 | 426 | if inspect.isclass(obj): |
|
427 | 427 | header = self.__head('Class constructor information:\n') |
|
428 | 428 | elif (not py3compat.PY3) and type(obj) is types.InstanceType: |
|
429 | 429 | obj = obj.__call__ |
|
430 | 430 | |
|
431 | 431 | output = self._getdef(obj,oname) |
|
432 | 432 | if output is None: |
|
433 | 433 | self.noinfo('definition header',oname) |
|
434 | 434 | else: |
|
435 | 435 | print(header,self.format(output), end=' ') |
|
436 | 436 | |
|
437 | 437 | # In Python 3, all classes are new-style, so they all have __init__. |
|
438 | 438 | @skip_doctest |
|
439 | 439 | def pdoc(self, obj, oname='', formatter=None): |
|
440 | 440 | """Print the docstring for any object. |
|
441 | 441 | |
|
442 | 442 | Optional: |
|
443 | 443 | -formatter: a function to run the docstring through for specially |
|
444 | 444 | formatted docstrings. |
|
445 | 445 | |
|
446 | 446 | Examples |
|
447 | 447 | -------- |
|
448 | 448 | |
|
449 | 449 | In [1]: class NoInit: |
|
450 | 450 | ...: pass |
|
451 | 451 | |
|
452 | 452 | In [2]: class NoDoc: |
|
453 | 453 | ...: def __init__(self): |
|
454 | 454 | ...: pass |
|
455 | 455 | |
|
456 | 456 | In [3]: %pdoc NoDoc |
|
457 | 457 | No documentation found for NoDoc |
|
458 | 458 | |
|
459 | 459 | In [4]: %pdoc NoInit |
|
460 | 460 | No documentation found for NoInit |
|
461 | 461 | |
|
462 | 462 | In [5]: obj = NoInit() |
|
463 | 463 | |
|
464 | 464 | In [6]: %pdoc obj |
|
465 | 465 | No documentation found for obj |
|
466 | 466 | |
|
467 | 467 | In [5]: obj2 = NoDoc() |
|
468 | 468 | |
|
469 | 469 | In [6]: %pdoc obj2 |
|
470 | 470 | No documentation found for obj2 |
|
471 | 471 | """ |
|
472 | 472 | |
|
473 | 473 | head = self.__head # For convenience |
|
474 | 474 | lines = [] |
|
475 | 475 | ds = getdoc(obj) |
|
476 | 476 | if formatter: |
|
477 | 477 | ds = formatter(ds).get('plain/text', ds) |
|
478 | 478 | if ds: |
|
479 | 479 | lines.append(head("Class docstring:")) |
|
480 | 480 | lines.append(indent(ds)) |
|
481 | 481 | if inspect.isclass(obj) and hasattr(obj, '__init__'): |
|
482 | 482 | init_ds = getdoc(obj.__init__) |
|
483 | 483 | if init_ds is not None: |
|
484 | 484 | lines.append(head("Init docstring:")) |
|
485 | 485 | lines.append(indent(init_ds)) |
|
486 | 486 | elif hasattr(obj,'__call__'): |
|
487 | 487 | call_ds = getdoc(obj.__call__) |
|
488 | 488 | if call_ds: |
|
489 | 489 | lines.append(head("Call docstring:")) |
|
490 | 490 | lines.append(indent(call_ds)) |
|
491 | 491 | |
|
492 | 492 | if not lines: |
|
493 | 493 | self.noinfo('documentation',oname) |
|
494 | 494 | else: |
|
495 | 495 | page.page('\n'.join(lines)) |
|
496 | 496 | |
|
497 | 497 | def psource(self, obj, oname=''): |
|
498 | 498 | """Print the source code for an object.""" |
|
499 | 499 | |
|
500 | 500 | # Flush the source cache because inspect can return out-of-date source |
|
501 | 501 | linecache.checkcache() |
|
502 | 502 | try: |
|
503 | 503 | src = getsource(obj, oname=oname) |
|
504 | 504 | except Exception: |
|
505 | 505 | src = None |
|
506 | 506 | |
|
507 | 507 | if src is None: |
|
508 | 508 | self.noinfo('source', oname) |
|
509 | 509 | else: |
|
510 | 510 | page.page(self.format(src)) |
|
511 | 511 | |
|
512 | 512 | def pfile(self, obj, oname=''): |
|
513 | 513 | """Show the whole file where an object was defined.""" |
|
514 | 514 | |
|
515 | 515 | lineno = find_source_lines(obj) |
|
516 | 516 | if lineno is None: |
|
517 | 517 | self.noinfo('file', oname) |
|
518 | 518 | return |
|
519 | 519 | |
|
520 | 520 | ofile = find_file(obj) |
|
521 | 521 | # run contents of file through pager starting at line where the object |
|
522 | 522 | # is defined, as long as the file isn't binary and is actually on the |
|
523 | 523 | # filesystem. |
|
524 | 524 | if ofile.endswith(('.so', '.dll', '.pyd')): |
|
525 | 525 | print('File %r is binary, not printing.' % ofile) |
|
526 | 526 | elif not os.path.isfile(ofile): |
|
527 | 527 | print('File %r does not exist, not printing.' % ofile) |
|
528 | 528 | else: |
|
529 | 529 | # Print only text files, not extension binaries. Note that |
|
530 | 530 | # getsourcelines returns lineno with 1-offset and page() uses |
|
531 | 531 | # 0-offset, so we must adjust. |
|
532 | 532 | page.page(self.format(openpy.read_py_file(ofile, skip_encoding_cookie=False)), lineno - 1) |
|
533 | 533 | |
|
534 | 534 | def _format_fields(self, fields, title_width=0): |
|
535 | 535 | """Formats a list of fields for display. |
|
536 | 536 | |
|
537 | 537 | Parameters |
|
538 | 538 | ---------- |
|
539 | 539 | fields : list |
|
540 | 540 | A list of 2-tuples: (field_title, field_content) |
|
541 | 541 | title_width : int |
|
542 | 542 | How many characters to pad titles to. Default to longest title. |
|
543 | 543 | """ |
|
544 | 544 | out = [] |
|
545 | 545 | header = self.__head |
|
546 | 546 | if title_width == 0: |
|
547 | 547 | title_width = max(len(title) + 2 for title, _ in fields) |
|
548 | 548 | for title, content in fields: |
|
549 | 549 | if len(content.splitlines()) > 1: |
|
550 | 550 | title = header(title + ':') + '\n' |
|
551 | 551 | else: |
|
552 | 552 | title = header((title + ':').ljust(title_width)) |
|
553 | 553 | out.append(cast_unicode(title) + cast_unicode(content)) |
|
554 | 554 | return "\n".join(out) |
|
555 | 555 | |
|
556 | 556 | def _mime_format(self, text, formatter=None): |
|
557 | 557 | """Return a mime bundle representation of the input text. |
|
558 | 558 | |
|
559 | 559 | - if `formatter` is None, the returned mime bundle has |
|
560 | 560 | a `text/plain` field, with the input text. |
|
561 | 561 | a `text/html` field with a `<pre>` tag containing the input text. |
|
562 | 562 | |
|
563 | 563 | - if `formatter` is not None, it must be a callable transforming the |
|
564 | 564 | input text into a mime bundle. Default values for `text/plain` and |
|
565 | 565 | `text/html` representations are the ones described above. |
|
566 | 566 | |
|
567 | 567 | Note: |
|
568 | 568 | |
|
569 | 569 | Formatters returning strings are supported but this behavior is deprecated. |
|
570 | 570 | |
|
571 | 571 | """ |
|
572 | 572 | text = cast_unicode(text) |
|
573 | 573 | defaults = { |
|
574 | 574 | 'text/plain': text, |
|
575 | 575 | 'text/html': '<pre>' + text + '</pre>' |
|
576 | 576 | } |
|
577 | 577 | |
|
578 | 578 | if formatter is None: |
|
579 | 579 | return defaults |
|
580 | 580 | else: |
|
581 | 581 | formatted = formatter(text) |
|
582 | 582 | |
|
583 | 583 | if not isinstance(formatted, dict): |
|
584 | 584 | # Handle the deprecated behavior of a formatter returning |
|
585 | 585 | # a string instead of a mime bundle. |
|
586 | 586 | return { |
|
587 | 587 | 'text/plain': formatted, |
|
588 | 588 | 'text/html': '<pre>' + formatted + '</pre>' |
|
589 | 589 | } |
|
590 | 590 | |
|
591 | 591 | else: |
|
592 | 592 | return dict(defaults, **formatted) |
|
593 | 593 | |
|
594 | 594 | |
|
595 | 595 | def format_mime(self, bundle): |
|
596 | 596 | |
|
597 | 597 | text_plain = bundle['text/plain'] |
|
598 | 598 | |
|
599 | 599 | text = '' |
|
600 | 600 | heads, bodies = list(zip(*text_plain)) |
|
601 | 601 | _len = max(len(h) for h in heads) |
|
602 | 602 | |
|
603 | 603 | for head, body in zip(heads, bodies): |
|
604 | 604 | body = body.strip('\n') |
|
605 | 605 | delim = '\n' if '\n' in body else ' ' |
|
606 | 606 | text += self.__head(head+':') + (_len - len(head))*' ' +delim + body +'\n' |
|
607 | 607 | |
|
608 | 608 | bundle['text/plain'] = text |
|
609 | 609 | return bundle |
|
610 | 610 | |
|
611 | 611 | def _get_info(self, obj, oname='', formatter=None, info=None, detail_level=0): |
|
612 | 612 | """Retrieve an info dict and format it.""" |
|
613 | 613 | |
|
614 | 614 | info = self._info(obj, oname=oname, info=info, detail_level=detail_level) |
|
615 | 615 | |
|
616 | 616 | _mime = { |
|
617 | 617 | 'text/plain': [], |
|
618 | 618 | 'text/html': '', |
|
619 | 619 | } |
|
620 | 620 | |
|
621 | 621 | def append_field(bundle, title, key, formatter=None): |
|
622 | 622 | field = info[key] |
|
623 | 623 | if field is not None: |
|
624 | 624 | formatted_field = self._mime_format(field, formatter) |
|
625 | 625 | bundle['text/plain'].append((title, formatted_field['text/plain'])) |
|
626 | 626 | bundle['text/html'] += '<h1>' + title + '</h1>\n' + formatted_field['text/html'] + '\n' |
|
627 | 627 | |
|
628 | 628 | def code_formatter(text): |
|
629 | 629 | return { |
|
630 | 630 | 'text/plain': self.format(text), |
|
631 | 631 | 'text/html': pylight(text) |
|
632 | 632 | } |
|
633 | 633 | |
|
634 | 634 | if info['isalias']: |
|
635 | 635 | append_field(_mime, 'Repr', 'string_form') |
|
636 | 636 | |
|
637 | 637 | elif info['ismagic']: |
|
638 | 638 | if detail_level > 0: |
|
639 | 639 | append_field(_mime, 'Source', 'source', code_formatter) |
|
640 | 640 | else: |
|
641 | 641 | append_field(_mime, 'Docstring', 'docstring', formatter) |
|
642 | 642 | append_field(_mime, 'File', 'file') |
|
643 | 643 | |
|
644 | 644 | elif info['isclass'] or is_simple_callable(obj): |
|
645 | 645 | # Functions, methods, classes |
|
646 | 646 | append_field(_mime, 'Signature', 'definition', code_formatter) |
|
647 | 647 | append_field(_mime, 'Init signature', 'init_definition', code_formatter) |
|
648 | 648 | if detail_level > 0 and info['source']: |
|
649 | 649 | append_field(_mime, 'Source', 'source', code_formatter) |
|
650 | 650 | else: |
|
651 | 651 | append_field(_mime, 'Docstring', 'docstring', formatter) |
|
652 | 652 | append_field(_mime, 'Init docstring', 'init_docstring', formatter) |
|
653 | 653 | |
|
654 | 654 | append_field(_mime, 'File', 'file') |
|
655 | 655 | append_field(_mime, 'Type', 'type_name') |
|
656 | 656 | |
|
657 | 657 | else: |
|
658 | 658 | # General Python objects |
|
659 | 659 | append_field(_mime, 'Signature', 'definition', code_formatter) |
|
660 | 660 | append_field(_mime, 'Call signature', 'call_def', code_formatter) |
|
661 | 661 | |
|
662 | 662 | append_field(_mime, 'Type', 'type_name') |
|
663 | 663 | |
|
664 | 664 | # Base class for old-style instances |
|
665 | 665 | if (not py3compat.PY3) and isinstance(obj, types.InstanceType) and info['base_class']: |
|
666 | 666 | append_field(_mime, 'Base Class', 'base_class') |
|
667 | 667 | |
|
668 | 668 | append_field(_mime, 'String form', 'string_form') |
|
669 | 669 | |
|
670 | 670 | # Namespace |
|
671 | 671 | if info['namespace'] != 'Interactive': |
|
672 | 672 | append_field(_mime, 'Namespace', 'namespace') |
|
673 | 673 | |
|
674 | 674 | append_field(_mime, 'Length', 'length') |
|
675 | 675 | append_field(_mime, 'File', 'file') |
|
676 | 676 | |
|
677 | 677 | # Source or docstring, depending on detail level and whether |
|
678 | 678 | # source found. |
|
679 | 679 | if detail_level > 0: |
|
680 | 680 | append_field(_mime, 'Source', 'source', code_formatter) |
|
681 | 681 | else: |
|
682 | 682 | append_field(_mime, 'Docstring', 'docstring', formatter) |
|
683 | 683 | |
|
684 | 684 | append_field(_mime, 'Class docstring', 'class_docstring', formatter) |
|
685 | 685 | append_field(_mime, 'Init docstring', 'init_docstring', formatter) |
|
686 | 686 | append_field(_mime, 'Call docstring', 'call_docstring', formatter) |
|
687 | 687 | |
|
688 | 688 | |
|
689 | 689 | return self.format_mime(_mime) |
|
690 | 690 | |
|
691 | 691 | def pinfo(self, obj, oname='', formatter=None, info=None, detail_level=0, enable_html_pager=True): |
|
692 | 692 | """Show detailed information about an object. |
|
693 | 693 | |
|
694 | 694 | Optional arguments: |
|
695 | 695 | |
|
696 | 696 | - oname: name of the variable pointing to the object. |
|
697 | 697 | |
|
698 | 698 | - formatter: callable (optional) |
|
699 | 699 | A special formatter for docstrings. |
|
700 | 700 | |
|
701 | 701 | The formatter is a callable that takes a string as an input |
|
702 | 702 | and returns either a formatted string or a mime type bundle |
|
703 | 703 | in the form of a dictionnary. |
|
704 | 704 | |
|
705 | 705 | Although the support of custom formatter returning a string |
|
706 | 706 | instead of a mime type bundle is deprecated. |
|
707 | 707 | |
|
708 | 708 | - info: a structure with some information fields which may have been |
|
709 | 709 | precomputed already. |
|
710 | 710 | |
|
711 | 711 | - detail_level: if set to 1, more information is given. |
|
712 | 712 | """ |
|
713 | 713 | info = self._get_info(obj, oname, formatter, info, detail_level) |
|
714 | 714 | if not enable_html_pager: |
|
715 | 715 | del info['text/html'] |
|
716 | 716 | page.page(info) |
|
717 | 717 | |
|
718 | 718 | def info(self, obj, oname='', formatter=None, info=None, detail_level=0): |
|
719 | 719 | """DEPRECATED. Compute a dict with detailed information about an object. |
|
720 | 720 | """ |
|
721 | 721 | if formatter is not None: |
|
722 | 722 | warnings.warn('The `formatter` keyword argument to `Inspector.info`' |
|
723 | 723 | 'is deprecated as of IPython 5.0 and will have no effects.', |
|
724 | 724 | DeprecationWarning, stacklevel=2) |
|
725 | 725 | return self._info(obj, oname=oname, info=info, detail_level=detail_level) |
|
726 | 726 | |
|
727 | 727 | def _info(self, obj, oname='', info=None, detail_level=0): |
|
728 | 728 | """Compute a dict with detailed information about an object. |
|
729 | 729 | |
|
730 | 730 | Optional arguments: |
|
731 | 731 | |
|
732 | 732 | - oname: name of the variable pointing to the object. |
|
733 | 733 | |
|
734 | 734 | - info: a structure with some information fields which may have been |
|
735 | 735 | precomputed already. |
|
736 | 736 | |
|
737 | 737 | - detail_level: if set to 1, more information is given. |
|
738 | 738 | """ |
|
739 | 739 | |
|
740 | 740 | obj_type = type(obj) |
|
741 | 741 | |
|
742 | 742 | if info is None: |
|
743 | 743 | ismagic = 0 |
|
744 | 744 | isalias = 0 |
|
745 | 745 | ospace = '' |
|
746 | 746 | else: |
|
747 | 747 | ismagic = info.ismagic |
|
748 | 748 | isalias = info.isalias |
|
749 | 749 | ospace = info.namespace |
|
750 | 750 | |
|
751 | 751 | # Get docstring, special-casing aliases: |
|
752 | 752 | if isalias: |
|
753 | 753 | if not callable(obj): |
|
754 | 754 | try: |
|
755 | 755 | ds = "Alias to the system command:\n %s" % obj[1] |
|
756 | 756 | except: |
|
757 | 757 | ds = "Alias: " + str(obj) |
|
758 | 758 | else: |
|
759 | 759 | ds = "Alias to " + str(obj) |
|
760 | 760 | if obj.__doc__: |
|
761 | 761 | ds += "\nDocstring:\n" + obj.__doc__ |
|
762 | 762 | else: |
|
763 | 763 | ds = getdoc(obj) |
|
764 | 764 | if ds is None: |
|
765 | 765 | ds = '<no docstring>' |
|
766 | 766 | |
|
767 | 767 | # store output in a dict, we initialize it here and fill it as we go |
|
768 | 768 | out = dict(name=oname, found=True, isalias=isalias, ismagic=ismagic) |
|
769 | 769 | |
|
770 | 770 | string_max = 200 # max size of strings to show (snipped if longer) |
|
771 | 771 | shalf = int((string_max - 5) / 2) |
|
772 | 772 | |
|
773 | 773 | if ismagic: |
|
774 | 774 | obj_type_name = 'Magic function' |
|
775 | 775 | elif isalias: |
|
776 | 776 | obj_type_name = 'System alias' |
|
777 | 777 | else: |
|
778 | 778 | obj_type_name = obj_type.__name__ |
|
779 | 779 | out['type_name'] = obj_type_name |
|
780 | 780 | |
|
781 | 781 | try: |
|
782 | 782 | bclass = obj.__class__ |
|
783 | 783 | out['base_class'] = str(bclass) |
|
784 | 784 | except: pass |
|
785 | 785 | |
|
786 | 786 | # String form, but snip if too long in ? form (full in ??) |
|
787 | 787 | if detail_level >= self.str_detail_level: |
|
788 | 788 | try: |
|
789 | 789 | ostr = str(obj) |
|
790 | 790 | str_head = 'string_form' |
|
791 | 791 | if not detail_level and len(ostr)>string_max: |
|
792 | 792 | ostr = ostr[:shalf] + ' <...> ' + ostr[-shalf:] |
|
793 | 793 | ostr = ("\n" + " " * len(str_head.expandtabs())).\ |
|
794 | 794 | join(q.strip() for q in ostr.split("\n")) |
|
795 | 795 | out[str_head] = ostr |
|
796 | 796 | except: |
|
797 | 797 | pass |
|
798 | 798 | |
|
799 | 799 | if ospace: |
|
800 | 800 | out['namespace'] = ospace |
|
801 | 801 | |
|
802 | 802 | # Length (for strings and lists) |
|
803 | 803 | try: |
|
804 | 804 | out['length'] = str(len(obj)) |
|
805 | 805 | except: pass |
|
806 | 806 | |
|
807 | 807 | # Filename where object was defined |
|
808 | 808 | binary_file = False |
|
809 | 809 | fname = find_file(obj) |
|
810 | 810 | if fname is None: |
|
811 | 811 | # if anything goes wrong, we don't want to show source, so it's as |
|
812 | 812 | # if the file was binary |
|
813 | 813 | binary_file = True |
|
814 | 814 | else: |
|
815 | 815 | if fname.endswith(('.so', '.dll', '.pyd')): |
|
816 | 816 | binary_file = True |
|
817 | 817 | elif fname.endswith('<string>'): |
|
818 | 818 | fname = 'Dynamically generated function. No source code available.' |
|
819 | 819 | out['file'] = compress_user(fname) |
|
820 | 820 | |
|
821 | 821 | # Original source code for a callable, class or property. |
|
822 | 822 | if detail_level: |
|
823 | 823 | # Flush the source cache because inspect can return out-of-date |
|
824 | 824 | # source |
|
825 | 825 | linecache.checkcache() |
|
826 | 826 | try: |
|
827 | 827 | if isinstance(obj, property) or not binary_file: |
|
828 | 828 | src = getsource(obj, oname) |
|
829 | 829 | if src is not None: |
|
830 | 830 | src = src.rstrip() |
|
831 | 831 | out['source'] = src |
|
832 | 832 | |
|
833 | 833 | except Exception: |
|
834 | 834 | pass |
|
835 | 835 | |
|
836 | 836 | # Add docstring only if no source is to be shown (avoid repetitions). |
|
837 | 837 | if ds and out.get('source', None) is None: |
|
838 | 838 | out['docstring'] = ds |
|
839 | 839 | |
|
840 | 840 | # Constructor docstring for classes |
|
841 | 841 | if inspect.isclass(obj): |
|
842 | 842 | out['isclass'] = True |
|
843 | 843 | |
|
844 | 844 | # get the init signature: |
|
845 | 845 | try: |
|
846 | 846 | init_def = self._getdef(obj, oname) |
|
847 | 847 | except AttributeError: |
|
848 | 848 | init_def = None |
|
849 | 849 | |
|
850 | 850 | # get the __init__ docstring |
|
851 | 851 | try: |
|
852 | 852 | obj_init = obj.__init__ |
|
853 | 853 | except AttributeError: |
|
854 | 854 | init_ds = None |
|
855 | 855 | else: |
|
856 | 856 | if init_def is None: |
|
857 | 857 | # Get signature from init if top-level sig failed. |
|
858 | 858 | # Can happen for built-in types (list, etc.). |
|
859 | 859 | try: |
|
860 | 860 | init_def = self._getdef(obj_init, oname) |
|
861 | 861 | except AttributeError: |
|
862 | 862 | pass |
|
863 | 863 | init_ds = getdoc(obj_init) |
|
864 | 864 | # Skip Python's auto-generated docstrings |
|
865 | 865 | if init_ds == _object_init_docstring: |
|
866 | 866 | init_ds = None |
|
867 | 867 | |
|
868 | 868 | if init_def: |
|
869 | 869 | out['init_definition'] = init_def |
|
870 | 870 | |
|
871 | 871 | if init_ds: |
|
872 | 872 | out['init_docstring'] = init_ds |
|
873 | 873 | |
|
874 | 874 | # and class docstring for instances: |
|
875 | 875 | else: |
|
876 | 876 | # reconstruct the function definition and print it: |
|
877 | 877 | defln = self._getdef(obj, oname) |
|
878 | 878 | if defln: |
|
879 | 879 | out['definition'] = defln |
|
880 | 880 | |
|
881 | 881 | # First, check whether the instance docstring is identical to the |
|
882 | 882 | # class one, and print it separately if they don't coincide. In |
|
883 | 883 | # most cases they will, but it's nice to print all the info for |
|
884 | 884 | # objects which use instance-customized docstrings. |
|
885 | 885 | if ds: |
|
886 | 886 | try: |
|
887 | 887 | cls = getattr(obj,'__class__') |
|
888 | 888 | except: |
|
889 | 889 | class_ds = None |
|
890 | 890 | else: |
|
891 | 891 | class_ds = getdoc(cls) |
|
892 | 892 | # Skip Python's auto-generated docstrings |
|
893 | 893 | if class_ds in _builtin_type_docstrings: |
|
894 | 894 | class_ds = None |
|
895 | 895 | if class_ds and ds != class_ds: |
|
896 | 896 | out['class_docstring'] = class_ds |
|
897 | 897 | |
|
898 | 898 | # Next, try to show constructor docstrings |
|
899 | 899 | try: |
|
900 | 900 | init_ds = getdoc(obj.__init__) |
|
901 | 901 | # Skip Python's auto-generated docstrings |
|
902 | 902 | if init_ds == _object_init_docstring: |
|
903 | 903 | init_ds = None |
|
904 | 904 | except AttributeError: |
|
905 | 905 | init_ds = None |
|
906 | 906 | if init_ds: |
|
907 | 907 | out['init_docstring'] = init_ds |
|
908 | 908 | |
|
909 | 909 | # Call form docstring for callable instances |
|
910 | 910 | if safe_hasattr(obj, '__call__') and not is_simple_callable(obj): |
|
911 | 911 | call_def = self._getdef(obj.__call__, oname) |
|
912 | 912 | if call_def and (call_def != out.get('definition')): |
|
913 | 913 | # it may never be the case that call def and definition differ, |
|
914 | 914 | # but don't include the same signature twice |
|
915 | 915 | out['call_def'] = call_def |
|
916 | 916 | call_ds = getdoc(obj.__call__) |
|
917 | 917 | # Skip Python's auto-generated docstrings |
|
918 | 918 | if call_ds == _func_call_docstring: |
|
919 | 919 | call_ds = None |
|
920 | 920 | if call_ds: |
|
921 | 921 | out['call_docstring'] = call_ds |
|
922 | 922 | |
|
923 | 923 | # Compute the object's argspec as a callable. The key is to decide |
|
924 | 924 | # whether to pull it from the object itself, from its __init__ or |
|
925 | 925 | # from its __call__ method. |
|
926 | 926 | |
|
927 | 927 | if inspect.isclass(obj): |
|
928 | 928 | # Old-style classes need not have an __init__ |
|
929 | 929 | callable_obj = getattr(obj, "__init__", None) |
|
930 | 930 | elif callable(obj): |
|
931 | 931 | callable_obj = obj |
|
932 | 932 | else: |
|
933 | 933 | callable_obj = None |
|
934 | 934 | |
|
935 | 935 | if callable_obj is not None: |
|
936 | 936 | try: |
|
937 | 937 | argspec = getargspec(callable_obj) |
|
938 | 938 | except (TypeError, AttributeError): |
|
939 | 939 | # For extensions/builtins we can't retrieve the argspec |
|
940 | 940 | pass |
|
941 | 941 | else: |
|
942 | 942 | # named tuples' _asdict() method returns an OrderedDict, but we |
|
943 | 943 | # we want a normal |
|
944 | 944 | out['argspec'] = argspec_dict = dict(argspec._asdict()) |
|
945 | 945 | # We called this varkw before argspec became a named tuple. |
|
946 | 946 | # With getfullargspec it's also called varkw. |
|
947 | 947 | if 'varkw' not in argspec_dict: |
|
948 | 948 | argspec_dict['varkw'] = argspec_dict.pop('keywords') |
|
949 | 949 | |
|
950 | 950 | return object_info(**out) |
|
951 | 951 | |
|
952 | 952 | def psearch(self,pattern,ns_table,ns_search=[], |
|
953 | 953 | ignore_case=False,show_all=False): |
|
954 | 954 | """Search namespaces with wildcards for objects. |
|
955 | 955 | |
|
956 | 956 | Arguments: |
|
957 | 957 | |
|
958 | 958 | - pattern: string containing shell-like wildcards to use in namespace |
|
959 | 959 | searches and optionally a type specification to narrow the search to |
|
960 | 960 | objects of that type. |
|
961 | 961 | |
|
962 | 962 | - ns_table: dict of name->namespaces for search. |
|
963 | 963 | |
|
964 | 964 | Optional arguments: |
|
965 | 965 | |
|
966 | 966 | - ns_search: list of namespace names to include in search. |
|
967 | 967 | |
|
968 | 968 | - ignore_case(False): make the search case-insensitive. |
|
969 | 969 | |
|
970 | 970 | - show_all(False): show all names, including those starting with |
|
971 | 971 | underscores. |
|
972 | 972 | """ |
|
973 | 973 | #print 'ps pattern:<%r>' % pattern # dbg |
|
974 | 974 | |
|
975 | 975 | # defaults |
|
976 | 976 | type_pattern = 'all' |
|
977 | 977 | filter = '' |
|
978 | 978 | |
|
979 | 979 | cmds = pattern.split() |
|
980 | 980 | len_cmds = len(cmds) |
|
981 | 981 | if len_cmds == 1: |
|
982 | 982 | # Only filter pattern given |
|
983 | 983 | filter = cmds[0] |
|
984 | 984 | elif len_cmds == 2: |
|
985 | 985 | # Both filter and type specified |
|
986 | 986 | filter,type_pattern = cmds |
|
987 | 987 | else: |
|
988 | 988 | raise ValueError('invalid argument string for psearch: <%s>' % |
|
989 | 989 | pattern) |
|
990 | 990 | |
|
991 | 991 | # filter search namespaces |
|
992 | 992 | for name in ns_search: |
|
993 | 993 | if name not in ns_table: |
|
994 | 994 | raise ValueError('invalid namespace <%s>. Valid names: %s' % |
|
995 | 995 | (name,ns_table.keys())) |
|
996 | 996 | |
|
997 | 997 | #print 'type_pattern:',type_pattern # dbg |
|
998 | 998 | search_result, namespaces_seen = set(), set() |
|
999 | 999 | for ns_name in ns_search: |
|
1000 | 1000 | ns = ns_table[ns_name] |
|
1001 | 1001 | # Normally, locals and globals are the same, so we just check one. |
|
1002 | 1002 | if id(ns) in namespaces_seen: |
|
1003 | 1003 | continue |
|
1004 | 1004 | namespaces_seen.add(id(ns)) |
|
1005 | 1005 | tmp_res = list_namespace(ns, type_pattern, filter, |
|
1006 | 1006 | ignore_case=ignore_case, show_all=show_all) |
|
1007 | 1007 | search_result.update(tmp_res) |
|
1008 | 1008 | |
|
1009 | 1009 | page.page('\n'.join(sorted(search_result))) |
@@ -1,410 +1,410 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Pylab (matplotlib) support utilities.""" |
|
3 | 3 | |
|
4 | 4 | # Copyright (c) IPython Development Team. |
|
5 | 5 | # Distributed under the terms of the Modified BSD License. |
|
6 | 6 | |
|
7 | 7 | from io import BytesIO |
|
8 | 8 | |
|
9 | 9 | from IPython.core.display import _pngxy |
|
10 | 10 | from IPython.utils.decorators import flag_calls |
|
11 | 11 | from IPython.utils import py3compat |
|
12 | 12 | |
|
13 | 13 | # If user specifies a GUI, that dictates the backend, otherwise we read the |
|
14 | 14 | # user's mpl default from the mpl rc structure |
|
15 | 15 | backends = {'tk': 'TkAgg', |
|
16 | 16 | 'gtk': 'GTKAgg', |
|
17 | 17 | 'gtk3': 'GTK3Agg', |
|
18 | 18 | 'wx': 'WXAgg', |
|
19 | 19 | 'qt': 'Qt4Agg', # qt3 not supported |
|
20 | 20 | 'qt4': 'Qt4Agg', |
|
21 | 21 | 'qt5': 'Qt5Agg', |
|
22 | 22 | 'osx': 'MacOSX', |
|
23 | 23 | 'nbagg': 'nbAgg', |
|
24 | 24 | 'notebook': 'nbAgg', |
|
25 | 25 | 'agg': 'agg', |
|
26 | 26 | 'inline': 'module://ipykernel.pylab.backend_inline', |
|
27 | 27 | 'ipympl': 'module://ipympl.backend_nbagg', |
|
28 | 28 | } |
|
29 | 29 | |
|
30 | 30 | # We also need a reverse backends2guis mapping that will properly choose which |
|
31 | 31 | # GUI support to activate based on the desired matplotlib backend. For the |
|
32 | 32 | # most part it's just a reverse of the above dict, but we also need to add a |
|
33 | 33 | # few others that map to the same GUI manually: |
|
34 | 34 | backend2gui = dict(zip(backends.values(), backends.keys())) |
|
35 | 35 | # Our tests expect backend2gui to just return 'qt' |
|
36 | 36 | backend2gui['Qt4Agg'] = 'qt' |
|
37 | 37 | # In the reverse mapping, there are a few extra valid matplotlib backends that |
|
38 | 38 | # map to the same GUI support |
|
39 | 39 | backend2gui['GTK'] = backend2gui['GTKCairo'] = 'gtk' |
|
40 | 40 | backend2gui['GTK3Cairo'] = 'gtk3' |
|
41 | 41 | backend2gui['WX'] = 'wx' |
|
42 | 42 | backend2gui['CocoaAgg'] = 'osx' |
|
43 | 43 | # And some backends that don't need GUI integration |
|
44 | 44 | del backend2gui['nbAgg'] |
|
45 | 45 | del backend2gui['agg'] |
|
46 | 46 | del backend2gui['module://ipykernel.pylab.backend_inline'] |
|
47 | 47 | |
|
48 | 48 | #----------------------------------------------------------------------------- |
|
49 | 49 | # Matplotlib utilities |
|
50 | 50 | #----------------------------------------------------------------------------- |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | def getfigs(*fig_nums): |
|
54 | 54 | """Get a list of matplotlib figures by figure numbers. |
|
55 | 55 | |
|
56 | 56 | If no arguments are given, all available figures are returned. If the |
|
57 | 57 | argument list contains references to invalid figures, a warning is printed |
|
58 | 58 | but the function continues pasting further figures. |
|
59 | 59 | |
|
60 | 60 | Parameters |
|
61 | 61 | ---------- |
|
62 | 62 | figs : tuple |
|
63 | 63 | A tuple of ints giving the figure numbers of the figures to return. |
|
64 | 64 | """ |
|
65 | 65 | from matplotlib._pylab_helpers import Gcf |
|
66 | 66 | if not fig_nums: |
|
67 | 67 | fig_managers = Gcf.get_all_fig_managers() |
|
68 | 68 | return [fm.canvas.figure for fm in fig_managers] |
|
69 | 69 | else: |
|
70 | 70 | figs = [] |
|
71 | 71 | for num in fig_nums: |
|
72 | 72 | f = Gcf.figs.get(num) |
|
73 | 73 | if f is None: |
|
74 | 74 | print('Warning: figure %s not available.' % num) |
|
75 | 75 | else: |
|
76 | 76 | figs.append(f.canvas.figure) |
|
77 | 77 | return figs |
|
78 | 78 | |
|
79 | 79 | |
|
80 | 80 | def figsize(sizex, sizey): |
|
81 | 81 | """Set the default figure size to be [sizex, sizey]. |
|
82 | 82 | |
|
83 | 83 | This is just an easy to remember, convenience wrapper that sets:: |
|
84 | 84 | |
|
85 | 85 | matplotlib.rcParams['figure.figsize'] = [sizex, sizey] |
|
86 | 86 | """ |
|
87 | 87 | import matplotlib |
|
88 | 88 | matplotlib.rcParams['figure.figsize'] = [sizex, sizey] |
|
89 | 89 | |
|
90 | 90 | |
|
91 | 91 | def print_figure(fig, fmt='png', bbox_inches='tight', **kwargs): |
|
92 | 92 | """Print a figure to an image, and return the resulting file data |
|
93 | 93 | |
|
94 | 94 | Returned data will be bytes unless ``fmt='svg'``, |
|
95 | 95 | in which case it will be unicode. |
|
96 | 96 | |
|
97 | 97 | Any keyword args are passed to fig.canvas.print_figure, |
|
98 | 98 | such as ``quality`` or ``bbox_inches``. |
|
99 | 99 | """ |
|
100 | 100 | from matplotlib import rcParams |
|
101 | 101 | # When there's an empty figure, we shouldn't return anything, otherwise we |
|
102 | 102 | # get big blank areas in the qt console. |
|
103 | 103 | if not fig.axes and not fig.lines: |
|
104 | 104 | return |
|
105 | 105 | |
|
106 | 106 | dpi = fig.dpi |
|
107 | 107 | if fmt == 'retina': |
|
108 | 108 | dpi = dpi * 2 |
|
109 | 109 | fmt = 'png' |
|
110 | 110 | |
|
111 | 111 | # build keyword args |
|
112 | 112 | kw = dict( |
|
113 | 113 | format=fmt, |
|
114 | 114 | facecolor=fig.get_facecolor(), |
|
115 | 115 | edgecolor=fig.get_edgecolor(), |
|
116 | 116 | dpi=dpi, |
|
117 | 117 | bbox_inches=bbox_inches, |
|
118 | 118 | ) |
|
119 | 119 | # **kwargs get higher priority |
|
120 | 120 | kw.update(kwargs) |
|
121 | 121 | |
|
122 | 122 | bytes_io = BytesIO() |
|
123 | 123 | fig.canvas.print_figure(bytes_io, **kw) |
|
124 | 124 | data = bytes_io.getvalue() |
|
125 | 125 | if fmt == 'svg': |
|
126 | 126 | data = data.decode('utf-8') |
|
127 | 127 | return data |
|
128 | 128 | |
|
129 | 129 | def retina_figure(fig, **kwargs): |
|
130 | 130 | """format a figure as a pixel-doubled (retina) PNG""" |
|
131 | 131 | pngdata = print_figure(fig, fmt='retina', **kwargs) |
|
132 | 132 | # Make sure that retina_figure acts just like print_figure and returns |
|
133 | 133 | # None when the figure is empty. |
|
134 | 134 | if pngdata is None: |
|
135 | 135 | return |
|
136 | 136 | w, h = _pngxy(pngdata) |
|
137 | 137 | metadata = dict(width=w//2, height=h//2) |
|
138 | 138 | return pngdata, metadata |
|
139 | 139 | |
|
140 | 140 | # We need a little factory function here to create the closure where |
|
141 | 141 | # safe_execfile can live. |
|
142 | 142 | def mpl_runner(safe_execfile): |
|
143 | 143 | """Factory to return a matplotlib-enabled runner for %run. |
|
144 | 144 | |
|
145 | 145 | Parameters |
|
146 | 146 | ---------- |
|
147 | 147 | safe_execfile : function |
|
148 | 148 | This must be a function with the same interface as the |
|
149 | 149 | :meth:`safe_execfile` method of IPython. |
|
150 | 150 | |
|
151 | 151 | Returns |
|
152 | 152 | ------- |
|
153 | 153 | A function suitable for use as the ``runner`` argument of the %run magic |
|
154 | 154 | function. |
|
155 | 155 | """ |
|
156 | 156 | |
|
157 | 157 | def mpl_execfile(fname,*where,**kw): |
|
158 | 158 | """matplotlib-aware wrapper around safe_execfile. |
|
159 | 159 | |
|
160 | 160 | Its interface is identical to that of the :func:`execfile` builtin. |
|
161 | 161 | |
|
162 | 162 | This is ultimately a call to execfile(), but wrapped in safeties to |
|
163 | 163 | properly handle interactive rendering.""" |
|
164 | 164 | |
|
165 | 165 | import matplotlib |
|
166 | 166 | import matplotlib.pyplot as plt |
|
167 | 167 | |
|
168 | 168 | #print '*** Matplotlib runner ***' # dbg |
|
169 | 169 | # turn off rendering until end of script |
|
170 | 170 | is_interactive = matplotlib.rcParams['interactive'] |
|
171 | 171 | matplotlib.interactive(False) |
|
172 | 172 | safe_execfile(fname,*where,**kw) |
|
173 | 173 | matplotlib.interactive(is_interactive) |
|
174 | 174 | # make rendering call now, if the user tried to do it |
|
175 | 175 | if plt.draw_if_interactive.called: |
|
176 | 176 | plt.draw() |
|
177 | 177 | plt.draw_if_interactive.called = False |
|
178 | 178 | |
|
179 | 179 | # re-draw everything that is stale |
|
180 | 180 | try: |
|
181 | 181 | da = plt.draw_all |
|
182 | 182 | except AttributeError: |
|
183 | 183 | pass |
|
184 | 184 | else: |
|
185 | 185 | da() |
|
186 | 186 | |
|
187 | 187 | return mpl_execfile |
|
188 | 188 | |
|
189 | 189 | |
|
190 | 190 | def _reshow_nbagg_figure(fig): |
|
191 | 191 | """reshow an nbagg figure""" |
|
192 | 192 | try: |
|
193 | 193 | reshow = fig.canvas.manager.reshow |
|
194 | 194 | except AttributeError: |
|
195 | 195 | raise NotImplementedError() |
|
196 | 196 | else: |
|
197 | 197 | reshow() |
|
198 | 198 | |
|
199 | 199 | |
|
200 | 200 | def select_figure_formats(shell, formats, **kwargs): |
|
201 | 201 | """Select figure formats for the inline backend. |
|
202 | 202 | |
|
203 | 203 | Parameters |
|
204 | 204 | ========== |
|
205 | 205 | shell : InteractiveShell |
|
206 | 206 | The main IPython instance. |
|
207 | 207 | formats : str or set |
|
208 | 208 | One or a set of figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'. |
|
209 | 209 | **kwargs : any |
|
210 | 210 | Extra keyword arguments to be passed to fig.canvas.print_figure. |
|
211 | 211 | """ |
|
212 | 212 | import matplotlib |
|
213 | 213 | from matplotlib.figure import Figure |
|
214 | 214 | |
|
215 | 215 | svg_formatter = shell.display_formatter.formatters['image/svg+xml'] |
|
216 | 216 | png_formatter = shell.display_formatter.formatters['image/png'] |
|
217 | 217 | jpg_formatter = shell.display_formatter.formatters['image/jpeg'] |
|
218 | 218 | pdf_formatter = shell.display_formatter.formatters['application/pdf'] |
|
219 | 219 | |
|
220 |
if isinstance(formats, |
|
|
220 | if isinstance(formats, str): | |
|
221 | 221 | formats = {formats} |
|
222 | 222 | # cast in case of list / tuple |
|
223 | 223 | formats = set(formats) |
|
224 | 224 | |
|
225 | 225 | [ f.pop(Figure, None) for f in shell.display_formatter.formatters.values() ] |
|
226 | 226 | mplbackend = matplotlib.get_backend().lower() |
|
227 | 227 | if mplbackend == 'nbagg' or mplbackend == 'module://ipympl.backend_nbagg': |
|
228 | 228 | formatter = shell.display_formatter.ipython_display_formatter |
|
229 | 229 | formatter.for_type(Figure, _reshow_nbagg_figure) |
|
230 | 230 | |
|
231 | 231 | supported = {'png', 'png2x', 'retina', 'jpg', 'jpeg', 'svg', 'pdf'} |
|
232 | 232 | bad = formats.difference(supported) |
|
233 | 233 | if bad: |
|
234 | 234 | bs = "%s" % ','.join([repr(f) for f in bad]) |
|
235 | 235 | gs = "%s" % ','.join([repr(f) for f in supported]) |
|
236 | 236 | raise ValueError("supported formats are: %s not %s" % (gs, bs)) |
|
237 | 237 | |
|
238 | 238 | if 'png' in formats: |
|
239 | 239 | png_formatter.for_type(Figure, lambda fig: print_figure(fig, 'png', **kwargs)) |
|
240 | 240 | if 'retina' in formats or 'png2x' in formats: |
|
241 | 241 | png_formatter.for_type(Figure, lambda fig: retina_figure(fig, **kwargs)) |
|
242 | 242 | if 'jpg' in formats or 'jpeg' in formats: |
|
243 | 243 | jpg_formatter.for_type(Figure, lambda fig: print_figure(fig, 'jpg', **kwargs)) |
|
244 | 244 | if 'svg' in formats: |
|
245 | 245 | svg_formatter.for_type(Figure, lambda fig: print_figure(fig, 'svg', **kwargs)) |
|
246 | 246 | if 'pdf' in formats: |
|
247 | 247 | pdf_formatter.for_type(Figure, lambda fig: print_figure(fig, 'pdf', **kwargs)) |
|
248 | 248 | |
|
249 | 249 | #----------------------------------------------------------------------------- |
|
250 | 250 | # Code for initializing matplotlib and importing pylab |
|
251 | 251 | #----------------------------------------------------------------------------- |
|
252 | 252 | |
|
253 | 253 | |
|
254 | 254 | def find_gui_and_backend(gui=None, gui_select=None): |
|
255 | 255 | """Given a gui string return the gui and mpl backend. |
|
256 | 256 | |
|
257 | 257 | Parameters |
|
258 | 258 | ---------- |
|
259 | 259 | gui : str |
|
260 | 260 | Can be one of ('tk','gtk','wx','qt','qt4','inline','agg'). |
|
261 | 261 | gui_select : str |
|
262 | 262 | Can be one of ('tk','gtk','wx','qt','qt4','inline'). |
|
263 | 263 | This is any gui already selected by the shell. |
|
264 | 264 | |
|
265 | 265 | Returns |
|
266 | 266 | ------- |
|
267 | 267 | A tuple of (gui, backend) where backend is one of ('TkAgg','GTKAgg', |
|
268 | 268 | 'WXAgg','Qt4Agg','module://ipykernel.pylab.backend_inline','agg'). |
|
269 | 269 | """ |
|
270 | 270 | |
|
271 | 271 | import matplotlib |
|
272 | 272 | |
|
273 | 273 | if gui and gui != 'auto': |
|
274 | 274 | # select backend based on requested gui |
|
275 | 275 | backend = backends[gui] |
|
276 | 276 | if gui == 'agg': |
|
277 | 277 | gui = None |
|
278 | 278 | else: |
|
279 | 279 | # We need to read the backend from the original data structure, *not* |
|
280 | 280 | # from mpl.rcParams, since a prior invocation of %matplotlib may have |
|
281 | 281 | # overwritten that. |
|
282 | 282 | # WARNING: this assumes matplotlib 1.1 or newer!! |
|
283 | 283 | backend = matplotlib.rcParamsOrig['backend'] |
|
284 | 284 | # In this case, we need to find what the appropriate gui selection call |
|
285 | 285 | # should be for IPython, so we can activate inputhook accordingly |
|
286 | 286 | gui = backend2gui.get(backend, None) |
|
287 | 287 | |
|
288 | 288 | # If we have already had a gui active, we need it and inline are the |
|
289 | 289 | # ones allowed. |
|
290 | 290 | if gui_select and gui != gui_select: |
|
291 | 291 | gui = gui_select |
|
292 | 292 | backend = backends[gui] |
|
293 | 293 | |
|
294 | 294 | return gui, backend |
|
295 | 295 | |
|
296 | 296 | |
|
297 | 297 | def activate_matplotlib(backend): |
|
298 | 298 | """Activate the given backend and set interactive to True.""" |
|
299 | 299 | |
|
300 | 300 | import matplotlib |
|
301 | 301 | matplotlib.interactive(True) |
|
302 | 302 | |
|
303 | 303 | # Matplotlib had a bug where even switch_backend could not force |
|
304 | 304 | # the rcParam to update. This needs to be set *before* the module |
|
305 | 305 | # magic of switch_backend(). |
|
306 | 306 | matplotlib.rcParams['backend'] = backend |
|
307 | 307 | |
|
308 | 308 | import matplotlib.pyplot |
|
309 | 309 | matplotlib.pyplot.switch_backend(backend) |
|
310 | 310 | |
|
311 | 311 | # This must be imported last in the matplotlib series, after |
|
312 | 312 | # backend/interactivity choices have been made |
|
313 | 313 | import matplotlib.pyplot as plt |
|
314 | 314 | |
|
315 | 315 | plt.show._needmain = False |
|
316 | 316 | # We need to detect at runtime whether show() is called by the user. |
|
317 | 317 | # For this, we wrap it into a decorator which adds a 'called' flag. |
|
318 | 318 | plt.draw_if_interactive = flag_calls(plt.draw_if_interactive) |
|
319 | 319 | |
|
320 | 320 | |
|
321 | 321 | def import_pylab(user_ns, import_all=True): |
|
322 | 322 | """Populate the namespace with pylab-related values. |
|
323 | 323 | |
|
324 | 324 | Imports matplotlib, pylab, numpy, and everything from pylab and numpy. |
|
325 | 325 | |
|
326 | 326 | Also imports a few names from IPython (figsize, display, getfigs) |
|
327 | 327 | |
|
328 | 328 | """ |
|
329 | 329 | |
|
330 | 330 | # Import numpy as np/pyplot as plt are conventions we're trying to |
|
331 | 331 | # somewhat standardize on. Making them available to users by default |
|
332 | 332 | # will greatly help this. |
|
333 | 333 | s = ("import numpy\n" |
|
334 | 334 | "import matplotlib\n" |
|
335 | 335 | "from matplotlib import pylab, mlab, pyplot\n" |
|
336 | 336 | "np = numpy\n" |
|
337 | 337 | "plt = pyplot\n" |
|
338 | 338 | ) |
|
339 | 339 | exec(s, user_ns) |
|
340 | 340 | |
|
341 | 341 | if import_all: |
|
342 | 342 | s = ("from matplotlib.pylab import *\n" |
|
343 | 343 | "from numpy import *\n") |
|
344 | 344 | exec(s, user_ns) |
|
345 | 345 | |
|
346 | 346 | # IPython symbols to add |
|
347 | 347 | user_ns['figsize'] = figsize |
|
348 | 348 | from IPython.core.display import display |
|
349 | 349 | # Add display and getfigs to the user's namespace |
|
350 | 350 | user_ns['display'] = display |
|
351 | 351 | user_ns['getfigs'] = getfigs |
|
352 | 352 | |
|
353 | 353 | |
|
354 | 354 | def configure_inline_support(shell, backend): |
|
355 | 355 | """Configure an IPython shell object for matplotlib use. |
|
356 | 356 | |
|
357 | 357 | Parameters |
|
358 | 358 | ---------- |
|
359 | 359 | shell : InteractiveShell instance |
|
360 | 360 | |
|
361 | 361 | backend : matplotlib backend |
|
362 | 362 | """ |
|
363 | 363 | # If using our svg payload backend, register the post-execution |
|
364 | 364 | # function that will pick up the results for display. This can only be |
|
365 | 365 | # done with access to the real shell object. |
|
366 | 366 | |
|
367 | 367 | # Note: if we can't load the inline backend, then there's no point |
|
368 | 368 | # continuing (such as in terminal-only shells in environments without |
|
369 | 369 | # zeromq available). |
|
370 | 370 | try: |
|
371 | 371 | from ipykernel.pylab.backend_inline import InlineBackend |
|
372 | 372 | except ImportError: |
|
373 | 373 | return |
|
374 | 374 | import matplotlib |
|
375 | 375 | |
|
376 | 376 | cfg = InlineBackend.instance(parent=shell) |
|
377 | 377 | cfg.shell = shell |
|
378 | 378 | if cfg not in shell.configurables: |
|
379 | 379 | shell.configurables.append(cfg) |
|
380 | 380 | |
|
381 | 381 | if backend == backends['inline']: |
|
382 | 382 | from ipykernel.pylab.backend_inline import flush_figures |
|
383 | 383 | shell.events.register('post_execute', flush_figures) |
|
384 | 384 | |
|
385 | 385 | # Save rcParams that will be overwrittern |
|
386 | 386 | shell._saved_rcParams = dict() |
|
387 | 387 | for k in cfg.rc: |
|
388 | 388 | shell._saved_rcParams[k] = matplotlib.rcParams[k] |
|
389 | 389 | # load inline_rc |
|
390 | 390 | matplotlib.rcParams.update(cfg.rc) |
|
391 | 391 | new_backend_name = "inline" |
|
392 | 392 | else: |
|
393 | 393 | from ipykernel.pylab.backend_inline import flush_figures |
|
394 | 394 | try: |
|
395 | 395 | shell.events.unregister('post_execute', flush_figures) |
|
396 | 396 | except ValueError: |
|
397 | 397 | pass |
|
398 | 398 | if hasattr(shell, '_saved_rcParams'): |
|
399 | 399 | matplotlib.rcParams.update(shell._saved_rcParams) |
|
400 | 400 | del shell._saved_rcParams |
|
401 | 401 | new_backend_name = "other" |
|
402 | 402 | |
|
403 | 403 | # only enable the formats once -> don't change the enabled formats (which the user may |
|
404 | 404 | # has changed) when getting another "%matplotlib inline" call. |
|
405 | 405 | # See https://github.com/ipython/ipykernel/issues/29 |
|
406 | 406 | cur_backend = getattr(configure_inline_support, "current_backend", "unset") |
|
407 | 407 | if new_backend_name != cur_backend: |
|
408 | 408 | # Setup the default figure format |
|
409 | 409 | select_figure_formats(shell, cfg.figure_formats, **cfg.print_figure_kwargs) |
|
410 | 410 | configure_inline_support.current_backend = new_backend_name |
@@ -1,739 +1,739 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """Tests for the IPython tab-completion machinery.""" |
|
3 | 3 | |
|
4 | 4 | # Copyright (c) IPython Development Team. |
|
5 | 5 | # Distributed under the terms of the Modified BSD License. |
|
6 | 6 | |
|
7 | 7 | import os |
|
8 | 8 | import sys |
|
9 | 9 | import unittest |
|
10 | 10 | |
|
11 | 11 | from contextlib import contextmanager |
|
12 | 12 | |
|
13 | 13 | import nose.tools as nt |
|
14 | 14 | |
|
15 | 15 | from traitlets.config.loader import Config |
|
16 | 16 | from IPython import get_ipython |
|
17 | 17 | from IPython.core import completer |
|
18 | 18 | from IPython.external.decorators import knownfailureif |
|
19 | 19 | from IPython.utils.tempdir import TemporaryDirectory, TemporaryWorkingDirectory |
|
20 | 20 | from IPython.utils.generics import complete_object |
|
21 |
from IPython.utils.py3compat import |
|
|
21 | from IPython.utils.py3compat import unicode_type | |
|
22 | 22 | from IPython.testing import decorators as dec |
|
23 | 23 | |
|
24 | 24 | #----------------------------------------------------------------------------- |
|
25 | 25 | # Test functions |
|
26 | 26 | #----------------------------------------------------------------------------- |
|
27 | 27 | |
|
28 | 28 | @contextmanager |
|
29 | 29 | def greedy_completion(): |
|
30 | 30 | ip = get_ipython() |
|
31 | 31 | greedy_original = ip.Completer.greedy |
|
32 | 32 | try: |
|
33 | 33 | ip.Completer.greedy = True |
|
34 | 34 | yield |
|
35 | 35 | finally: |
|
36 | 36 | ip.Completer.greedy = greedy_original |
|
37 | 37 | |
|
38 | 38 | def test_protect_filename(): |
|
39 | 39 | if sys.platform == 'win32': |
|
40 | 40 | pairs = [('abc','abc'), |
|
41 | 41 | (' abc','" abc"'), |
|
42 | 42 | ('a bc','"a bc"'), |
|
43 | 43 | ('a bc','"a bc"'), |
|
44 | 44 | (' bc','" bc"'), |
|
45 | 45 | ] |
|
46 | 46 | else: |
|
47 | 47 | pairs = [('abc','abc'), |
|
48 | 48 | (' abc',r'\ abc'), |
|
49 | 49 | ('a bc',r'a\ bc'), |
|
50 | 50 | ('a bc',r'a\ \ bc'), |
|
51 | 51 | (' bc',r'\ \ bc'), |
|
52 | 52 | # On posix, we also protect parens and other special characters. |
|
53 | 53 | ('a(bc',r'a\(bc'), |
|
54 | 54 | ('a)bc',r'a\)bc'), |
|
55 | 55 | ('a( )bc',r'a\(\ \)bc'), |
|
56 | 56 | ('a[1]bc', r'a\[1\]bc'), |
|
57 | 57 | ('a{1}bc', r'a\{1\}bc'), |
|
58 | 58 | ('a#bc', r'a\#bc'), |
|
59 | 59 | ('a?bc', r'a\?bc'), |
|
60 | 60 | ('a=bc', r'a\=bc'), |
|
61 | 61 | ('a\\bc', r'a\\bc'), |
|
62 | 62 | ('a|bc', r'a\|bc'), |
|
63 | 63 | ('a;bc', r'a\;bc'), |
|
64 | 64 | ('a:bc', r'a\:bc'), |
|
65 | 65 | ("a'bc", r"a\'bc"), |
|
66 | 66 | ('a*bc', r'a\*bc'), |
|
67 | 67 | ('a"bc', r'a\"bc'), |
|
68 | 68 | ('a^bc', r'a\^bc'), |
|
69 | 69 | ('a&bc', r'a\&bc'), |
|
70 | 70 | ] |
|
71 | 71 | # run the actual tests |
|
72 | 72 | for s1, s2 in pairs: |
|
73 | 73 | s1p = completer.protect_filename(s1) |
|
74 | 74 | nt.assert_equal(s1p, s2) |
|
75 | 75 | |
|
76 | 76 | |
|
77 | 77 | def check_line_split(splitter, test_specs): |
|
78 | 78 | for part1, part2, split in test_specs: |
|
79 | 79 | cursor_pos = len(part1) |
|
80 | 80 | line = part1+part2 |
|
81 | 81 | out = splitter.split_line(line, cursor_pos) |
|
82 | 82 | nt.assert_equal(out, split) |
|
83 | 83 | |
|
84 | 84 | |
|
85 | 85 | def test_line_split(): |
|
86 | 86 | """Basic line splitter test with default specs.""" |
|
87 | 87 | sp = completer.CompletionSplitter() |
|
88 | 88 | # The format of the test specs is: part1, part2, expected answer. Parts 1 |
|
89 | 89 | # and 2 are joined into the 'line' sent to the splitter, as if the cursor |
|
90 | 90 | # was at the end of part1. So an empty part2 represents someone hitting |
|
91 | 91 | # tab at the end of the line, the most common case. |
|
92 | 92 | t = [('run some/scrip', '', 'some/scrip'), |
|
93 | 93 | ('run scripts/er', 'ror.py foo', 'scripts/er'), |
|
94 | 94 | ('echo $HOM', '', 'HOM'), |
|
95 | 95 | ('print sys.pa', '', 'sys.pa'), |
|
96 | 96 | ('print(sys.pa', '', 'sys.pa'), |
|
97 | 97 | ("execfile('scripts/er", '', 'scripts/er'), |
|
98 | 98 | ('a[x.', '', 'x.'), |
|
99 | 99 | ('a[x.', 'y', 'x.'), |
|
100 | 100 | ('cd "some_file/', '', 'some_file/'), |
|
101 | 101 | ] |
|
102 | 102 | check_line_split(sp, t) |
|
103 | 103 | # Ensure splitting works OK with unicode by re-running the tests with |
|
104 | 104 | # all inputs turned into unicode |
|
105 | 105 | check_line_split(sp, [ map(unicode_type, p) for p in t] ) |
|
106 | 106 | |
|
107 | 107 | |
|
108 | 108 | def test_custom_completion_error(): |
|
109 | 109 | """Test that errors from custom attribute completers are silenced.""" |
|
110 | 110 | ip = get_ipython() |
|
111 | 111 | class A(object): pass |
|
112 | 112 | ip.user_ns['a'] = A() |
|
113 | 113 | |
|
114 | 114 | @complete_object.when_type(A) |
|
115 | 115 | def complete_A(a, existing_completions): |
|
116 | 116 | raise TypeError("this should be silenced") |
|
117 | 117 | |
|
118 | 118 | ip.complete("a.") |
|
119 | 119 | |
|
120 | 120 | |
|
121 | 121 | def test_unicode_completions(): |
|
122 | 122 | ip = get_ipython() |
|
123 | 123 | # Some strings that trigger different types of completion. Check them both |
|
124 | 124 | # in str and unicode forms |
|
125 | 125 | s = ['ru', '%ru', 'cd /', 'floa', 'float(x)/'] |
|
126 | 126 | for t in s + list(map(unicode_type, s)): |
|
127 | 127 | # We don't need to check exact completion values (they may change |
|
128 | 128 | # depending on the state of the namespace, but at least no exceptions |
|
129 | 129 | # should be thrown and the return value should be a pair of text, list |
|
130 | 130 | # values. |
|
131 | 131 | text, matches = ip.complete(t) |
|
132 |
nt.assert_true(isinstance(text, str |
|
|
132 | nt.assert_true(isinstance(text, str)) | |
|
133 | 133 | nt.assert_true(isinstance(matches, list)) |
|
134 | 134 | |
|
135 | 135 | def test_latex_completions(): |
|
136 | 136 | from IPython.core.latex_symbols import latex_symbols |
|
137 | 137 | import random |
|
138 | 138 | ip = get_ipython() |
|
139 | 139 | # Test some random unicode symbols |
|
140 | 140 | keys = random.sample(latex_symbols.keys(), 10) |
|
141 | 141 | for k in keys: |
|
142 | 142 | text, matches = ip.complete(k) |
|
143 | 143 | nt.assert_equal(len(matches),1) |
|
144 | 144 | nt.assert_equal(text, k) |
|
145 | 145 | nt.assert_equal(matches[0], latex_symbols[k]) |
|
146 | 146 | # Test a more complex line |
|
147 | 147 | text, matches = ip.complete(u'print(\\alpha') |
|
148 | 148 | nt.assert_equals(text, u'\\alpha') |
|
149 | 149 | nt.assert_equals(matches[0], latex_symbols['\\alpha']) |
|
150 | 150 | # Test multiple matching latex symbols |
|
151 | 151 | text, matches = ip.complete(u'\\al') |
|
152 | 152 | nt.assert_in('\\alpha', matches) |
|
153 | 153 | nt.assert_in('\\aleph', matches) |
|
154 | 154 | |
|
155 | 155 | |
|
156 | 156 | |
|
157 | 157 | |
|
158 | 158 | def test_back_latex_completion(): |
|
159 | 159 | ip = get_ipython() |
|
160 | 160 | |
|
161 | 161 | # do not return more than 1 matches fro \beta, only the latex one. |
|
162 | 162 | name, matches = ip.complete('\\β') |
|
163 | 163 | nt.assert_equal(len(matches), 1) |
|
164 | 164 | nt.assert_equal(matches[0], '\\beta') |
|
165 | 165 | |
|
166 | 166 | def test_back_unicode_completion(): |
|
167 | 167 | ip = get_ipython() |
|
168 | 168 | |
|
169 | 169 | name, matches = ip.complete('\\Ⅴ') |
|
170 | 170 | nt.assert_equal(len(matches), 1) |
|
171 | 171 | nt.assert_equal(matches[0], '\\ROMAN NUMERAL FIVE') |
|
172 | 172 | |
|
173 | 173 | |
|
174 | 174 | def test_forward_unicode_completion(): |
|
175 | 175 | ip = get_ipython() |
|
176 | 176 | |
|
177 | 177 | name, matches = ip.complete('\\ROMAN NUMERAL FIVE') |
|
178 | 178 | nt.assert_equal(len(matches), 1) |
|
179 | 179 | nt.assert_equal(matches[0], 'Ⅴ') |
|
180 | 180 | |
|
181 | 181 | @dec.knownfailureif(sys.platform == 'win32', 'Fails if there is a C:\\j... path') |
|
182 | 182 | def test_no_ascii_back_completion(): |
|
183 | 183 | ip = get_ipython() |
|
184 | 184 | with TemporaryWorkingDirectory(): # Avoid any filename completions |
|
185 | 185 | # single ascii letter that don't have yet completions |
|
186 | 186 | for letter in 'jJ' : |
|
187 | 187 | name, matches = ip.complete('\\'+letter) |
|
188 | 188 | nt.assert_equal(matches, []) |
|
189 | 189 | |
|
190 | 190 | |
|
191 | 191 | |
|
192 | 192 | |
|
193 | 193 | class CompletionSplitterTestCase(unittest.TestCase): |
|
194 | 194 | def setUp(self): |
|
195 | 195 | self.sp = completer.CompletionSplitter() |
|
196 | 196 | |
|
197 | 197 | def test_delim_setting(self): |
|
198 | 198 | self.sp.delims = ' ' |
|
199 | 199 | nt.assert_equal(self.sp.delims, ' ') |
|
200 | 200 | nt.assert_equal(self.sp._delim_expr, '[\ ]') |
|
201 | 201 | |
|
202 | 202 | def test_spaces(self): |
|
203 | 203 | """Test with only spaces as split chars.""" |
|
204 | 204 | self.sp.delims = ' ' |
|
205 | 205 | t = [('foo', '', 'foo'), |
|
206 | 206 | ('run foo', '', 'foo'), |
|
207 | 207 | ('run foo', 'bar', 'foo'), |
|
208 | 208 | ] |
|
209 | 209 | check_line_split(self.sp, t) |
|
210 | 210 | |
|
211 | 211 | |
|
212 | 212 | def test_has_open_quotes1(): |
|
213 | 213 | for s in ["'", "'''", "'hi' '"]: |
|
214 | 214 | nt.assert_equal(completer.has_open_quotes(s), "'") |
|
215 | 215 | |
|
216 | 216 | |
|
217 | 217 | def test_has_open_quotes2(): |
|
218 | 218 | for s in ['"', '"""', '"hi" "']: |
|
219 | 219 | nt.assert_equal(completer.has_open_quotes(s), '"') |
|
220 | 220 | |
|
221 | 221 | |
|
222 | 222 | def test_has_open_quotes3(): |
|
223 | 223 | for s in ["''", "''' '''", "'hi' 'ipython'"]: |
|
224 | 224 | nt.assert_false(completer.has_open_quotes(s)) |
|
225 | 225 | |
|
226 | 226 | |
|
227 | 227 | def test_has_open_quotes4(): |
|
228 | 228 | for s in ['""', '""" """', '"hi" "ipython"']: |
|
229 | 229 | nt.assert_false(completer.has_open_quotes(s)) |
|
230 | 230 | |
|
231 | 231 | |
|
232 | 232 | @knownfailureif(sys.platform == 'win32', "abspath completions fail on Windows") |
|
233 | 233 | def test_abspath_file_completions(): |
|
234 | 234 | ip = get_ipython() |
|
235 | 235 | with TemporaryDirectory() as tmpdir: |
|
236 | 236 | prefix = os.path.join(tmpdir, 'foo') |
|
237 | 237 | suffixes = ['1', '2'] |
|
238 | 238 | names = [prefix+s for s in suffixes] |
|
239 | 239 | for n in names: |
|
240 | 240 | open(n, 'w').close() |
|
241 | 241 | |
|
242 | 242 | # Check simple completion |
|
243 | 243 | c = ip.complete(prefix)[1] |
|
244 | 244 | nt.assert_equal(c, names) |
|
245 | 245 | |
|
246 | 246 | # Now check with a function call |
|
247 | 247 | cmd = 'a = f("%s' % prefix |
|
248 | 248 | c = ip.complete(prefix, cmd)[1] |
|
249 | 249 | comp = [prefix+s for s in suffixes] |
|
250 | 250 | nt.assert_equal(c, comp) |
|
251 | 251 | |
|
252 | 252 | |
|
253 | 253 | def test_local_file_completions(): |
|
254 | 254 | ip = get_ipython() |
|
255 | 255 | with TemporaryWorkingDirectory(): |
|
256 | 256 | prefix = './foo' |
|
257 | 257 | suffixes = ['1', '2'] |
|
258 | 258 | names = [prefix+s for s in suffixes] |
|
259 | 259 | for n in names: |
|
260 | 260 | open(n, 'w').close() |
|
261 | 261 | |
|
262 | 262 | # Check simple completion |
|
263 | 263 | c = ip.complete(prefix)[1] |
|
264 | 264 | nt.assert_equal(c, names) |
|
265 | 265 | |
|
266 | 266 | # Now check with a function call |
|
267 | 267 | cmd = 'a = f("%s' % prefix |
|
268 | 268 | c = ip.complete(prefix, cmd)[1] |
|
269 | 269 | comp = set(prefix+s for s in suffixes) |
|
270 | 270 | nt.assert_true(comp.issubset(set(c))) |
|
271 | 271 | |
|
272 | 272 | |
|
273 | 273 | def test_greedy_completions(): |
|
274 | 274 | ip = get_ipython() |
|
275 | 275 | ip.ex('a=list(range(5))') |
|
276 | 276 | _,c = ip.complete('.',line='a[0].') |
|
277 | 277 | nt.assert_false('.real' in c, |
|
278 | 278 | "Shouldn't have completed on a[0]: %s"%c) |
|
279 | 279 | with greedy_completion(): |
|
280 | 280 | def _(line, cursor_pos, expect, message): |
|
281 | 281 | _,c = ip.complete('.', line=line, cursor_pos=cursor_pos) |
|
282 | 282 | nt.assert_in(expect, c, message%c) |
|
283 | 283 | |
|
284 | 284 | yield _, 'a[0].', 5, 'a[0].real', "Should have completed on a[0].: %s" |
|
285 | 285 | yield _, 'a[0].r', 6, 'a[0].real', "Should have completed on a[0].r: %s" |
|
286 | 286 | |
|
287 | 287 | if sys.version_info > (3,4): |
|
288 | 288 | yield _, 'a[0].from_', 10, 'a[0].from_bytes', "Should have completed on a[0].from_: %s" |
|
289 | 289 | |
|
290 | 290 | |
|
291 | 291 | |
|
292 | 292 | def test_omit__names(): |
|
293 | 293 | # also happens to test IPCompleter as a configurable |
|
294 | 294 | ip = get_ipython() |
|
295 | 295 | ip._hidden_attr = 1 |
|
296 | 296 | ip._x = {} |
|
297 | 297 | c = ip.Completer |
|
298 | 298 | ip.ex('ip=get_ipython()') |
|
299 | 299 | cfg = Config() |
|
300 | 300 | cfg.IPCompleter.omit__names = 0 |
|
301 | 301 | c.update_config(cfg) |
|
302 | 302 | s,matches = c.complete('ip.') |
|
303 | 303 | nt.assert_in('ip.__str__', matches) |
|
304 | 304 | nt.assert_in('ip._hidden_attr', matches) |
|
305 | 305 | cfg = Config() |
|
306 | 306 | cfg.IPCompleter.omit__names = 1 |
|
307 | 307 | c.update_config(cfg) |
|
308 | 308 | s,matches = c.complete('ip.') |
|
309 | 309 | nt.assert_not_in('ip.__str__', matches) |
|
310 | 310 | nt.assert_in('ip._hidden_attr', matches) |
|
311 | 311 | cfg = Config() |
|
312 | 312 | cfg.IPCompleter.omit__names = 2 |
|
313 | 313 | c.update_config(cfg) |
|
314 | 314 | s,matches = c.complete('ip.') |
|
315 | 315 | nt.assert_not_in('ip.__str__', matches) |
|
316 | 316 | nt.assert_not_in('ip._hidden_attr', matches) |
|
317 | 317 | s,matches = c.complete('ip._x.') |
|
318 | 318 | nt.assert_in('ip._x.keys', matches) |
|
319 | 319 | del ip._hidden_attr |
|
320 | 320 | |
|
321 | 321 | |
|
322 | 322 | def test_limit_to__all__False_ok(): |
|
323 | 323 | ip = get_ipython() |
|
324 | 324 | c = ip.Completer |
|
325 | 325 | ip.ex('class D: x=24') |
|
326 | 326 | ip.ex('d=D()') |
|
327 | 327 | cfg = Config() |
|
328 | 328 | cfg.IPCompleter.limit_to__all__ = False |
|
329 | 329 | c.update_config(cfg) |
|
330 | 330 | s, matches = c.complete('d.') |
|
331 | 331 | nt.assert_in('d.x', matches) |
|
332 | 332 | |
|
333 | 333 | |
|
334 | 334 | def test_get__all__entries_ok(): |
|
335 | 335 | class A(object): |
|
336 | 336 | __all__ = ['x', 1] |
|
337 | 337 | words = completer.get__all__entries(A()) |
|
338 | 338 | nt.assert_equal(words, ['x']) |
|
339 | 339 | |
|
340 | 340 | |
|
341 | 341 | def test_get__all__entries_no__all__ok(): |
|
342 | 342 | class A(object): |
|
343 | 343 | pass |
|
344 | 344 | words = completer.get__all__entries(A()) |
|
345 | 345 | nt.assert_equal(words, []) |
|
346 | 346 | |
|
347 | 347 | |
|
348 | 348 | def test_func_kw_completions(): |
|
349 | 349 | ip = get_ipython() |
|
350 | 350 | c = ip.Completer |
|
351 | 351 | ip.ex('def myfunc(a=1,b=2): return a+b') |
|
352 | 352 | s, matches = c.complete(None, 'myfunc(1,b') |
|
353 | 353 | nt.assert_in('b=', matches) |
|
354 | 354 | # Simulate completing with cursor right after b (pos==10): |
|
355 | 355 | s, matches = c.complete(None, 'myfunc(1,b)', 10) |
|
356 | 356 | nt.assert_in('b=', matches) |
|
357 | 357 | s, matches = c.complete(None, 'myfunc(a="escaped\\")string",b') |
|
358 | 358 | nt.assert_in('b=', matches) |
|
359 | 359 | #builtin function |
|
360 | 360 | s, matches = c.complete(None, 'min(k, k') |
|
361 | 361 | nt.assert_in('key=', matches) |
|
362 | 362 | |
|
363 | 363 | |
|
364 | 364 | def test_default_arguments_from_docstring(): |
|
365 | 365 | ip = get_ipython() |
|
366 | 366 | c = ip.Completer |
|
367 | 367 | kwd = c._default_arguments_from_docstring( |
|
368 | 368 | 'min(iterable[, key=func]) -> value') |
|
369 | 369 | nt.assert_equal(kwd, ['key']) |
|
370 | 370 | #with cython type etc |
|
371 | 371 | kwd = c._default_arguments_from_docstring( |
|
372 | 372 | 'Minuit.migrad(self, int ncall=10000, resume=True, int nsplit=1)\n') |
|
373 | 373 | nt.assert_equal(kwd, ['ncall', 'resume', 'nsplit']) |
|
374 | 374 | #white spaces |
|
375 | 375 | kwd = c._default_arguments_from_docstring( |
|
376 | 376 | '\n Minuit.migrad(self, int ncall=10000, resume=True, int nsplit=1)\n') |
|
377 | 377 | nt.assert_equal(kwd, ['ncall', 'resume', 'nsplit']) |
|
378 | 378 | |
|
379 | 379 | def test_line_magics(): |
|
380 | 380 | ip = get_ipython() |
|
381 | 381 | c = ip.Completer |
|
382 | 382 | s, matches = c.complete(None, 'lsmag') |
|
383 | 383 | nt.assert_in('%lsmagic', matches) |
|
384 | 384 | s, matches = c.complete(None, '%lsmag') |
|
385 | 385 | nt.assert_in('%lsmagic', matches) |
|
386 | 386 | |
|
387 | 387 | |
|
388 | 388 | def test_cell_magics(): |
|
389 | 389 | from IPython.core.magic import register_cell_magic |
|
390 | 390 | |
|
391 | 391 | @register_cell_magic |
|
392 | 392 | def _foo_cellm(line, cell): |
|
393 | 393 | pass |
|
394 | 394 | |
|
395 | 395 | ip = get_ipython() |
|
396 | 396 | c = ip.Completer |
|
397 | 397 | |
|
398 | 398 | s, matches = c.complete(None, '_foo_ce') |
|
399 | 399 | nt.assert_in('%%_foo_cellm', matches) |
|
400 | 400 | s, matches = c.complete(None, '%%_foo_ce') |
|
401 | 401 | nt.assert_in('%%_foo_cellm', matches) |
|
402 | 402 | |
|
403 | 403 | |
|
404 | 404 | def test_line_cell_magics(): |
|
405 | 405 | from IPython.core.magic import register_line_cell_magic |
|
406 | 406 | |
|
407 | 407 | @register_line_cell_magic |
|
408 | 408 | def _bar_cellm(line, cell): |
|
409 | 409 | pass |
|
410 | 410 | |
|
411 | 411 | ip = get_ipython() |
|
412 | 412 | c = ip.Completer |
|
413 | 413 | |
|
414 | 414 | # The policy here is trickier, see comments in completion code. The |
|
415 | 415 | # returned values depend on whether the user passes %% or not explicitly, |
|
416 | 416 | # and this will show a difference if the same name is both a line and cell |
|
417 | 417 | # magic. |
|
418 | 418 | s, matches = c.complete(None, '_bar_ce') |
|
419 | 419 | nt.assert_in('%_bar_cellm', matches) |
|
420 | 420 | nt.assert_in('%%_bar_cellm', matches) |
|
421 | 421 | s, matches = c.complete(None, '%_bar_ce') |
|
422 | 422 | nt.assert_in('%_bar_cellm', matches) |
|
423 | 423 | nt.assert_in('%%_bar_cellm', matches) |
|
424 | 424 | s, matches = c.complete(None, '%%_bar_ce') |
|
425 | 425 | nt.assert_not_in('%_bar_cellm', matches) |
|
426 | 426 | nt.assert_in('%%_bar_cellm', matches) |
|
427 | 427 | |
|
428 | 428 | |
|
429 | 429 | def test_magic_completion_order(): |
|
430 | 430 | |
|
431 | 431 | ip = get_ipython() |
|
432 | 432 | c = ip.Completer |
|
433 | 433 | |
|
434 | 434 | # Test ordering of magics and non-magics with the same name |
|
435 | 435 | # We want the non-magic first |
|
436 | 436 | |
|
437 | 437 | # Before importing matplotlib, there should only be one option: |
|
438 | 438 | |
|
439 | 439 | text, matches = c.complete('mat') |
|
440 | 440 | nt.assert_equal(matches, ["%matplotlib"]) |
|
441 | 441 | |
|
442 | 442 | |
|
443 | 443 | ip.run_cell("matplotlib = 1") # introduce name into namespace |
|
444 | 444 | |
|
445 | 445 | # After the import, there should be two options, ordered like this: |
|
446 | 446 | text, matches = c.complete('mat') |
|
447 | 447 | nt.assert_equal(matches, ["matplotlib", "%matplotlib"]) |
|
448 | 448 | |
|
449 | 449 | |
|
450 | 450 | ip.run_cell("timeit = 1") # define a user variable called 'timeit' |
|
451 | 451 | |
|
452 | 452 | # Order of user variable and line and cell magics with same name: |
|
453 | 453 | text, matches = c.complete('timeit') |
|
454 | 454 | nt.assert_equal(matches, ["timeit", "%timeit","%%timeit"]) |
|
455 | 455 | |
|
456 | 456 | |
|
457 | 457 | def test_dict_key_completion_string(): |
|
458 | 458 | """Test dictionary key completion for string keys""" |
|
459 | 459 | ip = get_ipython() |
|
460 | 460 | complete = ip.Completer.complete |
|
461 | 461 | |
|
462 | 462 | ip.user_ns['d'] = {'abc': None} |
|
463 | 463 | |
|
464 | 464 | # check completion at different stages |
|
465 | 465 | _, matches = complete(line_buffer="d[") |
|
466 | 466 | nt.assert_in("'abc'", matches) |
|
467 | 467 | nt.assert_not_in("'abc']", matches) |
|
468 | 468 | |
|
469 | 469 | _, matches = complete(line_buffer="d['") |
|
470 | 470 | nt.assert_in("abc", matches) |
|
471 | 471 | nt.assert_not_in("abc']", matches) |
|
472 | 472 | |
|
473 | 473 | _, matches = complete(line_buffer="d['a") |
|
474 | 474 | nt.assert_in("abc", matches) |
|
475 | 475 | nt.assert_not_in("abc']", matches) |
|
476 | 476 | |
|
477 | 477 | # check use of different quoting |
|
478 | 478 | _, matches = complete(line_buffer="d[\"") |
|
479 | 479 | nt.assert_in("abc", matches) |
|
480 | 480 | nt.assert_not_in('abc\"]', matches) |
|
481 | 481 | |
|
482 | 482 | _, matches = complete(line_buffer="d[\"a") |
|
483 | 483 | nt.assert_in("abc", matches) |
|
484 | 484 | nt.assert_not_in('abc\"]', matches) |
|
485 | 485 | |
|
486 | 486 | # check sensitivity to following context |
|
487 | 487 | _, matches = complete(line_buffer="d[]", cursor_pos=2) |
|
488 | 488 | nt.assert_in("'abc'", matches) |
|
489 | 489 | |
|
490 | 490 | _, matches = complete(line_buffer="d['']", cursor_pos=3) |
|
491 | 491 | nt.assert_in("abc", matches) |
|
492 | 492 | nt.assert_not_in("abc'", matches) |
|
493 | 493 | nt.assert_not_in("abc']", matches) |
|
494 | 494 | |
|
495 | 495 | # check multiple solutions are correctly returned and that noise is not |
|
496 | 496 | ip.user_ns['d'] = {'abc': None, 'abd': None, 'bad': None, object(): None, |
|
497 | 497 | 5: None} |
|
498 | 498 | |
|
499 | 499 | _, matches = complete(line_buffer="d['a") |
|
500 | 500 | nt.assert_in("abc", matches) |
|
501 | 501 | nt.assert_in("abd", matches) |
|
502 | 502 | nt.assert_not_in("bad", matches) |
|
503 | 503 | assert not any(m.endswith((']', '"', "'")) for m in matches), matches |
|
504 | 504 | |
|
505 | 505 | # check escaping and whitespace |
|
506 | 506 | ip.user_ns['d'] = {'a\nb': None, 'a\'b': None, 'a"b': None, 'a word': None} |
|
507 | 507 | _, matches = complete(line_buffer="d['a") |
|
508 | 508 | nt.assert_in("a\\nb", matches) |
|
509 | 509 | nt.assert_in("a\\'b", matches) |
|
510 | 510 | nt.assert_in("a\"b", matches) |
|
511 | 511 | nt.assert_in("a word", matches) |
|
512 | 512 | assert not any(m.endswith((']', '"', "'")) for m in matches), matches |
|
513 | 513 | |
|
514 | 514 | # - can complete on non-initial word of the string |
|
515 | 515 | _, matches = complete(line_buffer="d['a w") |
|
516 | 516 | nt.assert_in("word", matches) |
|
517 | 517 | |
|
518 | 518 | # - understands quote escaping |
|
519 | 519 | _, matches = complete(line_buffer="d['a\\'") |
|
520 | 520 | nt.assert_in("b", matches) |
|
521 | 521 | |
|
522 | 522 | # - default quoting should work like repr |
|
523 | 523 | _, matches = complete(line_buffer="d[") |
|
524 | 524 | nt.assert_in("\"a'b\"", matches) |
|
525 | 525 | |
|
526 | 526 | # - when opening quote with ", possible to match with unescaped apostrophe |
|
527 | 527 | _, matches = complete(line_buffer="d[\"a'") |
|
528 | 528 | nt.assert_in("b", matches) |
|
529 | 529 | |
|
530 | 530 | # need to not split at delims that readline won't split at |
|
531 | 531 | if '-' not in ip.Completer.splitter.delims: |
|
532 | 532 | ip.user_ns['d'] = {'before-after': None} |
|
533 | 533 | _, matches = complete(line_buffer="d['before-af") |
|
534 | 534 | nt.assert_in('before-after', matches) |
|
535 | 535 | |
|
536 | 536 | def test_dict_key_completion_contexts(): |
|
537 | 537 | """Test expression contexts in which dict key completion occurs""" |
|
538 | 538 | ip = get_ipython() |
|
539 | 539 | complete = ip.Completer.complete |
|
540 | 540 | d = {'abc': None} |
|
541 | 541 | ip.user_ns['d'] = d |
|
542 | 542 | |
|
543 | 543 | class C: |
|
544 | 544 | data = d |
|
545 | 545 | ip.user_ns['C'] = C |
|
546 | 546 | ip.user_ns['get'] = lambda: d |
|
547 | 547 | |
|
548 | 548 | def assert_no_completion(**kwargs): |
|
549 | 549 | _, matches = complete(**kwargs) |
|
550 | 550 | nt.assert_not_in('abc', matches) |
|
551 | 551 | nt.assert_not_in('abc\'', matches) |
|
552 | 552 | nt.assert_not_in('abc\']', matches) |
|
553 | 553 | nt.assert_not_in('\'abc\'', matches) |
|
554 | 554 | nt.assert_not_in('\'abc\']', matches) |
|
555 | 555 | |
|
556 | 556 | def assert_completion(**kwargs): |
|
557 | 557 | _, matches = complete(**kwargs) |
|
558 | 558 | nt.assert_in("'abc'", matches) |
|
559 | 559 | nt.assert_not_in("'abc']", matches) |
|
560 | 560 | |
|
561 | 561 | # no completion after string closed, even if reopened |
|
562 | 562 | assert_no_completion(line_buffer="d['a'") |
|
563 | 563 | assert_no_completion(line_buffer="d[\"a\"") |
|
564 | 564 | assert_no_completion(line_buffer="d['a' + ") |
|
565 | 565 | assert_no_completion(line_buffer="d['a' + '") |
|
566 | 566 | |
|
567 | 567 | # completion in non-trivial expressions |
|
568 | 568 | assert_completion(line_buffer="+ d[") |
|
569 | 569 | assert_completion(line_buffer="(d[") |
|
570 | 570 | assert_completion(line_buffer="C.data[") |
|
571 | 571 | |
|
572 | 572 | # greedy flag |
|
573 | 573 | def assert_completion(**kwargs): |
|
574 | 574 | _, matches = complete(**kwargs) |
|
575 | 575 | nt.assert_in("get()['abc']", matches) |
|
576 | 576 | |
|
577 | 577 | assert_no_completion(line_buffer="get()[") |
|
578 | 578 | with greedy_completion(): |
|
579 | 579 | assert_completion(line_buffer="get()[") |
|
580 | 580 | assert_completion(line_buffer="get()['") |
|
581 | 581 | assert_completion(line_buffer="get()['a") |
|
582 | 582 | assert_completion(line_buffer="get()['ab") |
|
583 | 583 | assert_completion(line_buffer="get()['abc") |
|
584 | 584 | |
|
585 | 585 | |
|
586 | 586 | |
|
587 | 587 | def test_dict_key_completion_bytes(): |
|
588 | 588 | """Test handling of bytes in dict key completion""" |
|
589 | 589 | ip = get_ipython() |
|
590 | 590 | complete = ip.Completer.complete |
|
591 | 591 | |
|
592 | 592 | ip.user_ns['d'] = {'abc': None, b'abd': None} |
|
593 | 593 | |
|
594 | 594 | _, matches = complete(line_buffer="d[") |
|
595 | 595 | nt.assert_in("'abc'", matches) |
|
596 | 596 | nt.assert_in("b'abd'", matches) |
|
597 | 597 | |
|
598 | 598 | if False: # not currently implemented |
|
599 | 599 | _, matches = complete(line_buffer="d[b") |
|
600 | 600 | nt.assert_in("b'abd'", matches) |
|
601 | 601 | nt.assert_not_in("b'abc'", matches) |
|
602 | 602 | |
|
603 | 603 | _, matches = complete(line_buffer="d[b'") |
|
604 | 604 | nt.assert_in("abd", matches) |
|
605 | 605 | nt.assert_not_in("abc", matches) |
|
606 | 606 | |
|
607 | 607 | _, matches = complete(line_buffer="d[B'") |
|
608 | 608 | nt.assert_in("abd", matches) |
|
609 | 609 | nt.assert_not_in("abc", matches) |
|
610 | 610 | |
|
611 | 611 | _, matches = complete(line_buffer="d['") |
|
612 | 612 | nt.assert_in("abc", matches) |
|
613 | 613 | nt.assert_not_in("abd", matches) |
|
614 | 614 | |
|
615 | 615 | |
|
616 | 616 | def test_dict_key_completion_unicode_py3(): |
|
617 | 617 | """Test handling of unicode in dict key completion""" |
|
618 | 618 | ip = get_ipython() |
|
619 | 619 | complete = ip.Completer.complete |
|
620 | 620 | |
|
621 | 621 | ip.user_ns['d'] = {u'a\u05d0': None} |
|
622 | 622 | |
|
623 | 623 | # query using escape |
|
624 | 624 | if sys.platform != 'win32': |
|
625 | 625 | # Known failure on Windows |
|
626 | 626 | _, matches = complete(line_buffer="d['a\\u05d0") |
|
627 | 627 | nt.assert_in("u05d0", matches) # tokenized after \\ |
|
628 | 628 | |
|
629 | 629 | # query using character |
|
630 | 630 | _, matches = complete(line_buffer="d['a\u05d0") |
|
631 | 631 | nt.assert_in(u"a\u05d0", matches) |
|
632 | 632 | |
|
633 | 633 | with greedy_completion(): |
|
634 | 634 | # query using escape |
|
635 | 635 | _, matches = complete(line_buffer="d['a\\u05d0") |
|
636 | 636 | nt.assert_in("d['a\\u05d0']", matches) # tokenized after \\ |
|
637 | 637 | |
|
638 | 638 | # query using character |
|
639 | 639 | _, matches = complete(line_buffer="d['a\u05d0") |
|
640 | 640 | nt.assert_in(u"d['a\u05d0']", matches) |
|
641 | 641 | |
|
642 | 642 | |
|
643 | 643 | |
|
644 | 644 | @dec.skip_without('numpy') |
|
645 | 645 | def test_struct_array_key_completion(): |
|
646 | 646 | """Test dict key completion applies to numpy struct arrays""" |
|
647 | 647 | import numpy |
|
648 | 648 | ip = get_ipython() |
|
649 | 649 | complete = ip.Completer.complete |
|
650 | 650 | ip.user_ns['d'] = numpy.array([], dtype=[('hello', 'f'), ('world', 'f')]) |
|
651 | 651 | _, matches = complete(line_buffer="d['") |
|
652 | 652 | nt.assert_in("hello", matches) |
|
653 | 653 | nt.assert_in("world", matches) |
|
654 | 654 | # complete on the numpy struct itself |
|
655 | 655 | dt = numpy.dtype([('my_head', [('my_dt', '>u4'), ('my_df', '>u4')]), |
|
656 | 656 | ('my_data', '>f4', 5)]) |
|
657 | 657 | x = numpy.zeros(2, dtype=dt) |
|
658 | 658 | ip.user_ns['d'] = x[1] |
|
659 | 659 | _, matches = complete(line_buffer="d['") |
|
660 | 660 | nt.assert_in("my_head", matches) |
|
661 | 661 | nt.assert_in("my_data", matches) |
|
662 | 662 | # complete on a nested level |
|
663 | 663 | with greedy_completion(): |
|
664 | 664 | ip.user_ns['d'] = numpy.zeros(2, dtype=dt) |
|
665 | 665 | _, matches = complete(line_buffer="d[1]['my_head']['") |
|
666 | 666 | nt.assert_true(any(["my_dt" in m for m in matches])) |
|
667 | 667 | nt.assert_true(any(["my_df" in m for m in matches])) |
|
668 | 668 | |
|
669 | 669 | |
|
670 | 670 | @dec.skip_without('pandas') |
|
671 | 671 | def test_dataframe_key_completion(): |
|
672 | 672 | """Test dict key completion applies to pandas DataFrames""" |
|
673 | 673 | import pandas |
|
674 | 674 | ip = get_ipython() |
|
675 | 675 | complete = ip.Completer.complete |
|
676 | 676 | ip.user_ns['d'] = pandas.DataFrame({'hello': [1], 'world': [2]}) |
|
677 | 677 | _, matches = complete(line_buffer="d['") |
|
678 | 678 | nt.assert_in("hello", matches) |
|
679 | 679 | nt.assert_in("world", matches) |
|
680 | 680 | |
|
681 | 681 | |
|
682 | 682 | def test_dict_key_completion_invalids(): |
|
683 | 683 | """Smoke test cases dict key completion can't handle""" |
|
684 | 684 | ip = get_ipython() |
|
685 | 685 | complete = ip.Completer.complete |
|
686 | 686 | |
|
687 | 687 | ip.user_ns['no_getitem'] = None |
|
688 | 688 | ip.user_ns['no_keys'] = [] |
|
689 | 689 | ip.user_ns['cant_call_keys'] = dict |
|
690 | 690 | ip.user_ns['empty'] = {} |
|
691 | 691 | ip.user_ns['d'] = {'abc': 5} |
|
692 | 692 | |
|
693 | 693 | _, matches = complete(line_buffer="no_getitem['") |
|
694 | 694 | _, matches = complete(line_buffer="no_keys['") |
|
695 | 695 | _, matches = complete(line_buffer="cant_call_keys['") |
|
696 | 696 | _, matches = complete(line_buffer="empty['") |
|
697 | 697 | _, matches = complete(line_buffer="name_error['") |
|
698 | 698 | _, matches = complete(line_buffer="d['\\") # incomplete escape |
|
699 | 699 | |
|
700 | 700 | class KeyCompletable(object): |
|
701 | 701 | def __init__(self, things=()): |
|
702 | 702 | self.things = things |
|
703 | 703 | |
|
704 | 704 | def _ipython_key_completions_(self): |
|
705 | 705 | return list(self.things) |
|
706 | 706 | |
|
707 | 707 | def test_object_key_completion(): |
|
708 | 708 | ip = get_ipython() |
|
709 | 709 | ip.user_ns['key_completable'] = KeyCompletable(['qwerty', 'qwick']) |
|
710 | 710 | |
|
711 | 711 | _, matches = ip.Completer.complete(line_buffer="key_completable['qw") |
|
712 | 712 | nt.assert_in('qwerty', matches) |
|
713 | 713 | nt.assert_in('qwick', matches) |
|
714 | 714 | |
|
715 | 715 | |
|
716 | 716 | def test_aimport_module_completer(): |
|
717 | 717 | ip = get_ipython() |
|
718 | 718 | _, matches = ip.complete('i', '%aimport i') |
|
719 | 719 | nt.assert_in('io', matches) |
|
720 | 720 | nt.assert_not_in('int', matches) |
|
721 | 721 | |
|
722 | 722 | def test_nested_import_module_completer(): |
|
723 | 723 | ip = get_ipython() |
|
724 | 724 | _, matches = ip.complete(None, 'import IPython.co', 17) |
|
725 | 725 | nt.assert_in('IPython.core', matches) |
|
726 | 726 | nt.assert_not_in('import IPython.core', matches) |
|
727 | 727 | nt.assert_not_in('IPython.display', matches) |
|
728 | 728 | |
|
729 | 729 | def test_import_module_completer(): |
|
730 | 730 | ip = get_ipython() |
|
731 | 731 | _, matches = ip.complete('i', 'import i') |
|
732 | 732 | nt.assert_in('io', matches) |
|
733 | 733 | nt.assert_not_in('int', matches) |
|
734 | 734 | |
|
735 | 735 | def test_from_module_completer(): |
|
736 | 736 | ip = get_ipython() |
|
737 | 737 | _, matches = ip.complete('B', 'from io import B', 16) |
|
738 | 738 | nt.assert_in('BytesIO', matches) |
|
739 | 739 | nt.assert_not_in('BaseException', matches) |
@@ -1,162 +1,162 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Tests for completerlib. |
|
3 | 3 | |
|
4 | 4 | """ |
|
5 | 5 | |
|
6 | 6 | #----------------------------------------------------------------------------- |
|
7 | 7 | # Imports |
|
8 | 8 | #----------------------------------------------------------------------------- |
|
9 | 9 | |
|
10 | 10 | import os |
|
11 | 11 | import shutil |
|
12 | 12 | import sys |
|
13 | 13 | import tempfile |
|
14 | 14 | import unittest |
|
15 | 15 | from os.path import join |
|
16 | 16 | |
|
17 | 17 | import nose.tools as nt |
|
18 | 18 | |
|
19 | 19 | from IPython.core.completerlib import magic_run_completer, module_completion |
|
20 | 20 | from IPython.utils import py3compat |
|
21 | 21 | from IPython.utils.tempdir import TemporaryDirectory |
|
22 | 22 | from IPython.testing.decorators import onlyif_unicode_paths |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | class MockEvent(object): |
|
26 | 26 | def __init__(self, line): |
|
27 | 27 | self.line = line |
|
28 | 28 | |
|
29 | 29 | #----------------------------------------------------------------------------- |
|
30 | 30 | # Test functions begin |
|
31 | 31 | #----------------------------------------------------------------------------- |
|
32 | 32 | class Test_magic_run_completer(unittest.TestCase): |
|
33 | 33 | files = [u"aao.py", u"a.py", u"b.py", u"aao.txt"] |
|
34 | 34 | dirs = [u"adir/", "bdir/"] |
|
35 | 35 | |
|
36 | 36 | def setUp(self): |
|
37 | 37 | self.BASETESTDIR = tempfile.mkdtemp() |
|
38 | 38 | for fil in self.files: |
|
39 | 39 | with open(join(self.BASETESTDIR, fil), "w") as sfile: |
|
40 | 40 | sfile.write("pass\n") |
|
41 | 41 | for d in self.dirs: |
|
42 | 42 | os.mkdir(join(self.BASETESTDIR, d)) |
|
43 | 43 | |
|
44 | 44 | self.oldpath = py3compat.getcwd() |
|
45 | 45 | os.chdir(self.BASETESTDIR) |
|
46 | 46 | |
|
47 | 47 | def tearDown(self): |
|
48 | 48 | os.chdir(self.oldpath) |
|
49 | 49 | shutil.rmtree(self.BASETESTDIR) |
|
50 | 50 | |
|
51 | 51 | def test_1(self): |
|
52 | 52 | """Test magic_run_completer, should match two alterntives |
|
53 | 53 | """ |
|
54 | 54 | event = MockEvent(u"%run a") |
|
55 | 55 | mockself = None |
|
56 | 56 | match = set(magic_run_completer(mockself, event)) |
|
57 | 57 | self.assertEqual(match, {u"a.py", u"aao.py", u"adir/"}) |
|
58 | 58 | |
|
59 | 59 | def test_2(self): |
|
60 | 60 | """Test magic_run_completer, should match one alterntive |
|
61 | 61 | """ |
|
62 | 62 | event = MockEvent(u"%run aa") |
|
63 | 63 | mockself = None |
|
64 | 64 | match = set(magic_run_completer(mockself, event)) |
|
65 | 65 | self.assertEqual(match, {u"aao.py"}) |
|
66 | 66 | |
|
67 | 67 | def test_3(self): |
|
68 | 68 | """Test magic_run_completer with unterminated " """ |
|
69 | 69 | event = MockEvent(u'%run "a') |
|
70 | 70 | mockself = None |
|
71 | 71 | match = set(magic_run_completer(mockself, event)) |
|
72 | 72 | self.assertEqual(match, {u"a.py", u"aao.py", u"adir/"}) |
|
73 | 73 | |
|
74 | 74 | def test_completion_more_args(self): |
|
75 | 75 | event = MockEvent(u'%run a.py ') |
|
76 | 76 | match = set(magic_run_completer(None, event)) |
|
77 | 77 | self.assertEqual(match, set(self.files + self.dirs)) |
|
78 | 78 | |
|
79 | 79 | def test_completion_in_dir(self): |
|
80 | 80 | # Github issue #3459 |
|
81 | 81 | event = MockEvent(u'%run a.py {}'.format(join(self.BASETESTDIR, 'a'))) |
|
82 | 82 | print(repr(event.line)) |
|
83 | 83 | match = set(magic_run_completer(None, event)) |
|
84 | 84 | # We specifically use replace here rather than normpath, because |
|
85 | 85 | # at one point there were duplicates 'adir' and 'adir/', and normpath |
|
86 | 86 | # would hide the failure for that. |
|
87 | 87 | self.assertEqual(match, {join(self.BASETESTDIR, f).replace('\\','/') |
|
88 | 88 | for f in (u'a.py', u'aao.py', u'aao.txt', u'adir/')}) |
|
89 | 89 | |
|
90 | 90 | class Test_magic_run_completer_nonascii(unittest.TestCase): |
|
91 | 91 | @onlyif_unicode_paths |
|
92 | 92 | def setUp(self): |
|
93 | 93 | self.BASETESTDIR = tempfile.mkdtemp() |
|
94 | 94 | for fil in [u"aaø.py", u"a.py", u"b.py"]: |
|
95 | 95 | with open(join(self.BASETESTDIR, fil), "w") as sfile: |
|
96 | 96 | sfile.write("pass\n") |
|
97 | 97 | self.oldpath = py3compat.getcwd() |
|
98 | 98 | os.chdir(self.BASETESTDIR) |
|
99 | 99 | |
|
100 | 100 | def tearDown(self): |
|
101 | 101 | os.chdir(self.oldpath) |
|
102 | 102 | shutil.rmtree(self.BASETESTDIR) |
|
103 | 103 | |
|
104 | 104 | @onlyif_unicode_paths |
|
105 | 105 | def test_1(self): |
|
106 | 106 | """Test magic_run_completer, should match two alterntives |
|
107 | 107 | """ |
|
108 | 108 | event = MockEvent(u"%run a") |
|
109 | 109 | mockself = None |
|
110 | 110 | match = set(magic_run_completer(mockself, event)) |
|
111 | 111 | self.assertEqual(match, {u"a.py", u"aaø.py"}) |
|
112 | 112 | |
|
113 | 113 | @onlyif_unicode_paths |
|
114 | 114 | def test_2(self): |
|
115 | 115 | """Test magic_run_completer, should match one alterntive |
|
116 | 116 | """ |
|
117 | 117 | event = MockEvent(u"%run aa") |
|
118 | 118 | mockself = None |
|
119 | 119 | match = set(magic_run_completer(mockself, event)) |
|
120 | 120 | self.assertEqual(match, {u"aaø.py"}) |
|
121 | 121 | |
|
122 | 122 | @onlyif_unicode_paths |
|
123 | 123 | def test_3(self): |
|
124 | 124 | """Test magic_run_completer with unterminated " """ |
|
125 | 125 | event = MockEvent(u'%run "a') |
|
126 | 126 | mockself = None |
|
127 | 127 | match = set(magic_run_completer(mockself, event)) |
|
128 | 128 | self.assertEqual(match, {u"a.py", u"aaø.py"}) |
|
129 | 129 | |
|
130 | 130 | # module_completer: |
|
131 | 131 | |
|
132 | 132 | def test_import_invalid_module(): |
|
133 | 133 | """Testing of issue https://github.com/ipython/ipython/issues/1107""" |
|
134 | 134 | invalid_module_names = {'foo-bar', 'foo:bar', '10foo'} |
|
135 | 135 | valid_module_names = {'foobar'} |
|
136 | 136 | with TemporaryDirectory() as tmpdir: |
|
137 | 137 | sys.path.insert( 0, tmpdir ) |
|
138 | 138 | for name in invalid_module_names | valid_module_names: |
|
139 | 139 | filename = os.path.join(tmpdir, name + '.py') |
|
140 | 140 | open(filename, 'w').close() |
|
141 | 141 | |
|
142 | 142 | s = set( module_completion('import foo') ) |
|
143 | 143 | intersection = s.intersection(invalid_module_names) |
|
144 | 144 | nt.assert_equal(intersection, set()) |
|
145 | 145 | |
|
146 | 146 | assert valid_module_names.issubset(s), valid_module_names.intersection(s) |
|
147 | 147 | |
|
148 | 148 | |
|
149 | 149 | def test_bad_module_all(): |
|
150 | 150 | """Test module with invalid __all__ |
|
151 | 151 | |
|
152 | 152 | https://github.com/ipython/ipython/issues/9678 |
|
153 | 153 | """ |
|
154 | 154 | testsdir = os.path.dirname(__file__) |
|
155 | 155 | sys.path.insert(0, testsdir) |
|
156 | 156 | try: |
|
157 | 157 | results = module_completion('from bad_all import ') |
|
158 | 158 | nt.assert_in('puppies', results) |
|
159 | 159 | for r in results: |
|
160 |
nt.assert_is_instance(r, |
|
|
160 | nt.assert_is_instance(r, str) | |
|
161 | 161 | finally: |
|
162 | 162 | sys.path.remove(testsdir) |
@@ -1,614 +1,614 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Tests for the inputsplitter module.""" |
|
3 | 3 | |
|
4 | 4 | |
|
5 | 5 | # Copyright (c) IPython Development Team. |
|
6 | 6 | # Distributed under the terms of the Modified BSD License. |
|
7 | 7 | |
|
8 | 8 | import unittest |
|
9 | 9 | import sys |
|
10 | 10 | |
|
11 | 11 | import nose.tools as nt |
|
12 | 12 | |
|
13 | 13 | from IPython.core import inputsplitter as isp |
|
14 | 14 | from IPython.core.inputtransformer import InputTransformer |
|
15 | 15 | from IPython.core.tests.test_inputtransformer import syntax, syntax_ml |
|
16 | 16 | from IPython.testing import tools as tt |
|
17 | 17 | from IPython.utils import py3compat |
|
18 |
from IPython.utils.py3compat import |
|
|
18 | from IPython.utils.py3compat import input | |
|
19 | 19 | |
|
20 | 20 | #----------------------------------------------------------------------------- |
|
21 | 21 | # Semi-complete examples (also used as tests) |
|
22 | 22 | #----------------------------------------------------------------------------- |
|
23 | 23 | |
|
24 | 24 | # Note: at the bottom, there's a slightly more complete version of this that |
|
25 | 25 | # can be useful during development of code here. |
|
26 | 26 | |
|
27 | 27 | def mini_interactive_loop(input_func): |
|
28 | 28 | """Minimal example of the logic of an interactive interpreter loop. |
|
29 | 29 | |
|
30 | 30 | This serves as an example, and it is used by the test system with a fake |
|
31 | 31 | raw_input that simulates interactive input.""" |
|
32 | 32 | |
|
33 | 33 | from IPython.core.inputsplitter import InputSplitter |
|
34 | 34 | |
|
35 | 35 | isp = InputSplitter() |
|
36 | 36 | # In practice, this input loop would be wrapped in an outside loop to read |
|
37 | 37 | # input indefinitely, until some exit/quit command was issued. Here we |
|
38 | 38 | # only illustrate the basic inner loop. |
|
39 | 39 | while isp.push_accepts_more(): |
|
40 | 40 | indent = ' '*isp.indent_spaces |
|
41 | 41 | prompt = '>>> ' + indent |
|
42 | 42 | line = indent + input_func(prompt) |
|
43 | 43 | isp.push(line) |
|
44 | 44 | |
|
45 | 45 | # Here we just return input so we can use it in a test suite, but a real |
|
46 | 46 | # interpreter would instead send it for execution somewhere. |
|
47 | 47 | src = isp.source_reset() |
|
48 | 48 | #print 'Input source was:\n', src # dbg |
|
49 | 49 | return src |
|
50 | 50 | |
|
51 | 51 | #----------------------------------------------------------------------------- |
|
52 | 52 | # Test utilities, just for local use |
|
53 | 53 | #----------------------------------------------------------------------------- |
|
54 | 54 | |
|
55 | 55 | def assemble(block): |
|
56 | 56 | """Assemble a block into multi-line sub-blocks.""" |
|
57 | 57 | return ['\n'.join(sub_block)+'\n' for sub_block in block] |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | def pseudo_input(lines): |
|
61 | 61 | """Return a function that acts like raw_input but feeds the input list.""" |
|
62 | 62 | ilines = iter(lines) |
|
63 | 63 | def raw_in(prompt): |
|
64 | 64 | try: |
|
65 | 65 | return next(ilines) |
|
66 | 66 | except StopIteration: |
|
67 | 67 | return '' |
|
68 | 68 | return raw_in |
|
69 | 69 | |
|
70 | 70 | #----------------------------------------------------------------------------- |
|
71 | 71 | # Tests |
|
72 | 72 | #----------------------------------------------------------------------------- |
|
73 | 73 | def test_spaces(): |
|
74 | 74 | tests = [('', 0), |
|
75 | 75 | (' ', 1), |
|
76 | 76 | ('\n', 0), |
|
77 | 77 | (' \n', 1), |
|
78 | 78 | ('x', 0), |
|
79 | 79 | (' x', 1), |
|
80 | 80 | (' x',2), |
|
81 | 81 | (' x',4), |
|
82 | 82 | # Note: tabs are counted as a single whitespace! |
|
83 | 83 | ('\tx', 1), |
|
84 | 84 | ('\t x', 2), |
|
85 | 85 | ] |
|
86 | 86 | tt.check_pairs(isp.num_ini_spaces, tests) |
|
87 | 87 | |
|
88 | 88 | |
|
89 | 89 | def test_remove_comments(): |
|
90 | 90 | tests = [('text', 'text'), |
|
91 | 91 | ('text # comment', 'text '), |
|
92 | 92 | ('text # comment\n', 'text \n'), |
|
93 | 93 | ('text # comment \n', 'text \n'), |
|
94 | 94 | ('line # c \nline\n','line \nline\n'), |
|
95 | 95 | ('line # c \nline#c2 \nline\nline #c\n\n', |
|
96 | 96 | 'line \nline\nline\nline \n\n'), |
|
97 | 97 | ] |
|
98 | 98 | tt.check_pairs(isp.remove_comments, tests) |
|
99 | 99 | |
|
100 | 100 | |
|
101 | 101 | def test_get_input_encoding(): |
|
102 | 102 | encoding = isp.get_input_encoding() |
|
103 |
nt.assert_true(isinstance(encoding, str |
|
|
103 | nt.assert_true(isinstance(encoding, str)) | |
|
104 | 104 | # simple-minded check that at least encoding a simple string works with the |
|
105 | 105 | # encoding we got. |
|
106 | 106 | nt.assert_equal(u'test'.encode(encoding), b'test') |
|
107 | 107 | |
|
108 | 108 | |
|
109 | 109 | class NoInputEncodingTestCase(unittest.TestCase): |
|
110 | 110 | def setUp(self): |
|
111 | 111 | self.old_stdin = sys.stdin |
|
112 | 112 | class X: pass |
|
113 | 113 | fake_stdin = X() |
|
114 | 114 | sys.stdin = fake_stdin |
|
115 | 115 | |
|
116 | 116 | def test(self): |
|
117 | 117 | # Verify that if sys.stdin has no 'encoding' attribute we do the right |
|
118 | 118 | # thing |
|
119 | 119 | enc = isp.get_input_encoding() |
|
120 | 120 | self.assertEqual(enc, 'ascii') |
|
121 | 121 | |
|
122 | 122 | def tearDown(self): |
|
123 | 123 | sys.stdin = self.old_stdin |
|
124 | 124 | |
|
125 | 125 | |
|
126 | 126 | class InputSplitterTestCase(unittest.TestCase): |
|
127 | 127 | def setUp(self): |
|
128 | 128 | self.isp = isp.InputSplitter() |
|
129 | 129 | |
|
130 | 130 | def test_reset(self): |
|
131 | 131 | isp = self.isp |
|
132 | 132 | isp.push('x=1') |
|
133 | 133 | isp.reset() |
|
134 | 134 | self.assertEqual(isp._buffer, []) |
|
135 | 135 | self.assertEqual(isp.indent_spaces, 0) |
|
136 | 136 | self.assertEqual(isp.source, '') |
|
137 | 137 | self.assertEqual(isp.code, None) |
|
138 | 138 | self.assertEqual(isp._is_complete, False) |
|
139 | 139 | |
|
140 | 140 | def test_source(self): |
|
141 | 141 | self.isp._store('1') |
|
142 | 142 | self.isp._store('2') |
|
143 | 143 | self.assertEqual(self.isp.source, '1\n2\n') |
|
144 | 144 | self.assertEqual(len(self.isp._buffer)>0, True) |
|
145 | 145 | self.assertEqual(self.isp.source_reset(), '1\n2\n') |
|
146 | 146 | self.assertEqual(self.isp._buffer, []) |
|
147 | 147 | self.assertEqual(self.isp.source, '') |
|
148 | 148 | |
|
149 | 149 | def test_indent(self): |
|
150 | 150 | isp = self.isp # shorthand |
|
151 | 151 | isp.push('x=1') |
|
152 | 152 | self.assertEqual(isp.indent_spaces, 0) |
|
153 | 153 | isp.push('if 1:\n x=1') |
|
154 | 154 | self.assertEqual(isp.indent_spaces, 4) |
|
155 | 155 | isp.push('y=2\n') |
|
156 | 156 | self.assertEqual(isp.indent_spaces, 0) |
|
157 | 157 | |
|
158 | 158 | def test_indent2(self): |
|
159 | 159 | isp = self.isp |
|
160 | 160 | isp.push('if 1:') |
|
161 | 161 | self.assertEqual(isp.indent_spaces, 4) |
|
162 | 162 | isp.push(' x=1') |
|
163 | 163 | self.assertEqual(isp.indent_spaces, 4) |
|
164 | 164 | # Blank lines shouldn't change the indent level |
|
165 | 165 | isp.push(' '*2) |
|
166 | 166 | self.assertEqual(isp.indent_spaces, 4) |
|
167 | 167 | |
|
168 | 168 | def test_indent3(self): |
|
169 | 169 | isp = self.isp |
|
170 | 170 | # When a multiline statement contains parens or multiline strings, we |
|
171 | 171 | # shouldn't get confused. |
|
172 | 172 | isp.push("if 1:") |
|
173 | 173 | isp.push(" x = (1+\n 2)") |
|
174 | 174 | self.assertEqual(isp.indent_spaces, 4) |
|
175 | 175 | |
|
176 | 176 | def test_indent4(self): |
|
177 | 177 | isp = self.isp |
|
178 | 178 | # whitespace after ':' should not screw up indent level |
|
179 | 179 | isp.push('if 1: \n x=1') |
|
180 | 180 | self.assertEqual(isp.indent_spaces, 4) |
|
181 | 181 | isp.push('y=2\n') |
|
182 | 182 | self.assertEqual(isp.indent_spaces, 0) |
|
183 | 183 | isp.push('if 1:\t\n x=1') |
|
184 | 184 | self.assertEqual(isp.indent_spaces, 4) |
|
185 | 185 | isp.push('y=2\n') |
|
186 | 186 | self.assertEqual(isp.indent_spaces, 0) |
|
187 | 187 | |
|
188 | 188 | def test_dedent_pass(self): |
|
189 | 189 | isp = self.isp # shorthand |
|
190 | 190 | # should NOT cause dedent |
|
191 | 191 | isp.push('if 1:\n passes = 5') |
|
192 | 192 | self.assertEqual(isp.indent_spaces, 4) |
|
193 | 193 | isp.push('if 1:\n pass') |
|
194 | 194 | self.assertEqual(isp.indent_spaces, 0) |
|
195 | 195 | isp.push('if 1:\n pass ') |
|
196 | 196 | self.assertEqual(isp.indent_spaces, 0) |
|
197 | 197 | |
|
198 | 198 | def test_dedent_break(self): |
|
199 | 199 | isp = self.isp # shorthand |
|
200 | 200 | # should NOT cause dedent |
|
201 | 201 | isp.push('while 1:\n breaks = 5') |
|
202 | 202 | self.assertEqual(isp.indent_spaces, 4) |
|
203 | 203 | isp.push('while 1:\n break') |
|
204 | 204 | self.assertEqual(isp.indent_spaces, 0) |
|
205 | 205 | isp.push('while 1:\n break ') |
|
206 | 206 | self.assertEqual(isp.indent_spaces, 0) |
|
207 | 207 | |
|
208 | 208 | def test_dedent_continue(self): |
|
209 | 209 | isp = self.isp # shorthand |
|
210 | 210 | # should NOT cause dedent |
|
211 | 211 | isp.push('while 1:\n continues = 5') |
|
212 | 212 | self.assertEqual(isp.indent_spaces, 4) |
|
213 | 213 | isp.push('while 1:\n continue') |
|
214 | 214 | self.assertEqual(isp.indent_spaces, 0) |
|
215 | 215 | isp.push('while 1:\n continue ') |
|
216 | 216 | self.assertEqual(isp.indent_spaces, 0) |
|
217 | 217 | |
|
218 | 218 | def test_dedent_raise(self): |
|
219 | 219 | isp = self.isp # shorthand |
|
220 | 220 | # should NOT cause dedent |
|
221 | 221 | isp.push('if 1:\n raised = 4') |
|
222 | 222 | self.assertEqual(isp.indent_spaces, 4) |
|
223 | 223 | isp.push('if 1:\n raise TypeError()') |
|
224 | 224 | self.assertEqual(isp.indent_spaces, 0) |
|
225 | 225 | isp.push('if 1:\n raise') |
|
226 | 226 | self.assertEqual(isp.indent_spaces, 0) |
|
227 | 227 | isp.push('if 1:\n raise ') |
|
228 | 228 | self.assertEqual(isp.indent_spaces, 0) |
|
229 | 229 | |
|
230 | 230 | def test_dedent_return(self): |
|
231 | 231 | isp = self.isp # shorthand |
|
232 | 232 | # should NOT cause dedent |
|
233 | 233 | isp.push('if 1:\n returning = 4') |
|
234 | 234 | self.assertEqual(isp.indent_spaces, 4) |
|
235 | 235 | isp.push('if 1:\n return 5 + 493') |
|
236 | 236 | self.assertEqual(isp.indent_spaces, 0) |
|
237 | 237 | isp.push('if 1:\n return') |
|
238 | 238 | self.assertEqual(isp.indent_spaces, 0) |
|
239 | 239 | isp.push('if 1:\n return ') |
|
240 | 240 | self.assertEqual(isp.indent_spaces, 0) |
|
241 | 241 | isp.push('if 1:\n return(0)') |
|
242 | 242 | self.assertEqual(isp.indent_spaces, 0) |
|
243 | 243 | |
|
244 | 244 | def test_push(self): |
|
245 | 245 | isp = self.isp |
|
246 | 246 | self.assertEqual(isp.push('x=1'), True) |
|
247 | 247 | |
|
248 | 248 | def test_push2(self): |
|
249 | 249 | isp = self.isp |
|
250 | 250 | self.assertEqual(isp.push('if 1:'), False) |
|
251 | 251 | for line in [' x=1', '# a comment', ' y=2']: |
|
252 | 252 | print(line) |
|
253 | 253 | self.assertEqual(isp.push(line), True) |
|
254 | 254 | |
|
255 | 255 | def test_push3(self): |
|
256 | 256 | isp = self.isp |
|
257 | 257 | isp.push('if True:') |
|
258 | 258 | isp.push(' a = 1') |
|
259 | 259 | self.assertEqual(isp.push('b = [1,'), False) |
|
260 | 260 | |
|
261 | 261 | def test_push_accepts_more(self): |
|
262 | 262 | isp = self.isp |
|
263 | 263 | isp.push('x=1') |
|
264 | 264 | self.assertEqual(isp.push_accepts_more(), False) |
|
265 | 265 | |
|
266 | 266 | def test_push_accepts_more2(self): |
|
267 | 267 | isp = self.isp |
|
268 | 268 | isp.push('if 1:') |
|
269 | 269 | self.assertEqual(isp.push_accepts_more(), True) |
|
270 | 270 | isp.push(' x=1') |
|
271 | 271 | self.assertEqual(isp.push_accepts_more(), True) |
|
272 | 272 | isp.push('') |
|
273 | 273 | self.assertEqual(isp.push_accepts_more(), False) |
|
274 | 274 | |
|
275 | 275 | def test_push_accepts_more3(self): |
|
276 | 276 | isp = self.isp |
|
277 | 277 | isp.push("x = (2+\n3)") |
|
278 | 278 | self.assertEqual(isp.push_accepts_more(), False) |
|
279 | 279 | |
|
280 | 280 | def test_push_accepts_more4(self): |
|
281 | 281 | isp = self.isp |
|
282 | 282 | # When a multiline statement contains parens or multiline strings, we |
|
283 | 283 | # shouldn't get confused. |
|
284 | 284 | # FIXME: we should be able to better handle de-dents in statements like |
|
285 | 285 | # multiline strings and multiline expressions (continued with \ or |
|
286 | 286 | # parens). Right now we aren't handling the indentation tracking quite |
|
287 | 287 | # correctly with this, though in practice it may not be too much of a |
|
288 | 288 | # problem. We'll need to see. |
|
289 | 289 | isp.push("if 1:") |
|
290 | 290 | isp.push(" x = (2+") |
|
291 | 291 | isp.push(" 3)") |
|
292 | 292 | self.assertEqual(isp.push_accepts_more(), True) |
|
293 | 293 | isp.push(" y = 3") |
|
294 | 294 | self.assertEqual(isp.push_accepts_more(), True) |
|
295 | 295 | isp.push('') |
|
296 | 296 | self.assertEqual(isp.push_accepts_more(), False) |
|
297 | 297 | |
|
298 | 298 | def test_push_accepts_more5(self): |
|
299 | 299 | isp = self.isp |
|
300 | 300 | isp.push('try:') |
|
301 | 301 | isp.push(' a = 5') |
|
302 | 302 | isp.push('except:') |
|
303 | 303 | isp.push(' raise') |
|
304 | 304 | # We want to be able to add an else: block at this point, so it should |
|
305 | 305 | # wait for a blank line. |
|
306 | 306 | self.assertEqual(isp.push_accepts_more(), True) |
|
307 | 307 | |
|
308 | 308 | def test_continuation(self): |
|
309 | 309 | isp = self.isp |
|
310 | 310 | isp.push("import os, \\") |
|
311 | 311 | self.assertEqual(isp.push_accepts_more(), True) |
|
312 | 312 | isp.push("sys") |
|
313 | 313 | self.assertEqual(isp.push_accepts_more(), False) |
|
314 | 314 | |
|
315 | 315 | def test_syntax_error(self): |
|
316 | 316 | isp = self.isp |
|
317 | 317 | # Syntax errors immediately produce a 'ready' block, so the invalid |
|
318 | 318 | # Python can be sent to the kernel for evaluation with possible ipython |
|
319 | 319 | # special-syntax conversion. |
|
320 | 320 | isp.push('run foo') |
|
321 | 321 | self.assertEqual(isp.push_accepts_more(), False) |
|
322 | 322 | |
|
323 | 323 | def test_unicode(self): |
|
324 | 324 | self.isp.push(u"Pérez") |
|
325 | 325 | self.isp.push(u'\xc3\xa9') |
|
326 | 326 | self.isp.push(u"u'\xc3\xa9'") |
|
327 | 327 | |
|
328 | 328 | def test_line_continuation(self): |
|
329 | 329 | """ Test issue #2108.""" |
|
330 | 330 | isp = self.isp |
|
331 | 331 | # A blank line after a line continuation should not accept more |
|
332 | 332 | isp.push("1 \\\n\n") |
|
333 | 333 | self.assertEqual(isp.push_accepts_more(), False) |
|
334 | 334 | # Whitespace after a \ is a SyntaxError. The only way to test that |
|
335 | 335 | # here is to test that push doesn't accept more (as with |
|
336 | 336 | # test_syntax_error() above). |
|
337 | 337 | isp.push(r"1 \ ") |
|
338 | 338 | self.assertEqual(isp.push_accepts_more(), False) |
|
339 | 339 | # Even if the line is continuable (c.f. the regular Python |
|
340 | 340 | # interpreter) |
|
341 | 341 | isp.push(r"(1 \ ") |
|
342 | 342 | self.assertEqual(isp.push_accepts_more(), False) |
|
343 | 343 | |
|
344 | 344 | def test_check_complete(self): |
|
345 | 345 | isp = self.isp |
|
346 | 346 | self.assertEqual(isp.check_complete("a = 1"), ('complete', None)) |
|
347 | 347 | self.assertEqual(isp.check_complete("for a in range(5):"), ('incomplete', 4)) |
|
348 | 348 | self.assertEqual(isp.check_complete("raise = 2"), ('invalid', None)) |
|
349 | 349 | self.assertEqual(isp.check_complete("a = [1,\n2,"), ('incomplete', 0)) |
|
350 | 350 | self.assertEqual(isp.check_complete("def a():\n x=1\n global x"), ('invalid', None)) |
|
351 | 351 | |
|
352 | 352 | class InteractiveLoopTestCase(unittest.TestCase): |
|
353 | 353 | """Tests for an interactive loop like a python shell. |
|
354 | 354 | """ |
|
355 | 355 | def check_ns(self, lines, ns): |
|
356 | 356 | """Validate that the given input lines produce the resulting namespace. |
|
357 | 357 | |
|
358 | 358 | Note: the input lines are given exactly as they would be typed in an |
|
359 | 359 | auto-indenting environment, as mini_interactive_loop above already does |
|
360 | 360 | auto-indenting and prepends spaces to the input. |
|
361 | 361 | """ |
|
362 | 362 | src = mini_interactive_loop(pseudo_input(lines)) |
|
363 | 363 | test_ns = {} |
|
364 | 364 | exec(src, test_ns) |
|
365 | 365 | # We can't check that the provided ns is identical to the test_ns, |
|
366 | 366 | # because Python fills test_ns with extra keys (copyright, etc). But |
|
367 | 367 | # we can check that the given dict is *contained* in test_ns |
|
368 | 368 | for k,v in ns.items(): |
|
369 | 369 | self.assertEqual(test_ns[k], v) |
|
370 | 370 | |
|
371 | 371 | def test_simple(self): |
|
372 | 372 | self.check_ns(['x=1'], dict(x=1)) |
|
373 | 373 | |
|
374 | 374 | def test_simple2(self): |
|
375 | 375 | self.check_ns(['if 1:', 'x=2'], dict(x=2)) |
|
376 | 376 | |
|
377 | 377 | def test_xy(self): |
|
378 | 378 | self.check_ns(['x=1; y=2'], dict(x=1, y=2)) |
|
379 | 379 | |
|
380 | 380 | def test_abc(self): |
|
381 | 381 | self.check_ns(['if 1:','a=1','b=2','c=3'], dict(a=1, b=2, c=3)) |
|
382 | 382 | |
|
383 | 383 | def test_multi(self): |
|
384 | 384 | self.check_ns(['x =(1+','1+','2)'], dict(x=4)) |
|
385 | 385 | |
|
386 | 386 | |
|
387 | 387 | class IPythonInputTestCase(InputSplitterTestCase): |
|
388 | 388 | """By just creating a new class whose .isp is a different instance, we |
|
389 | 389 | re-run the same test battery on the new input splitter. |
|
390 | 390 | |
|
391 | 391 | In addition, this runs the tests over the syntax and syntax_ml dicts that |
|
392 | 392 | were tested by individual functions, as part of the OO interface. |
|
393 | 393 | |
|
394 | 394 | It also makes some checks on the raw buffer storage. |
|
395 | 395 | """ |
|
396 | 396 | |
|
397 | 397 | def setUp(self): |
|
398 | 398 | self.isp = isp.IPythonInputSplitter() |
|
399 | 399 | |
|
400 | 400 | def test_syntax(self): |
|
401 | 401 | """Call all single-line syntax tests from the main object""" |
|
402 | 402 | isp = self.isp |
|
403 | 403 | for example in syntax.values(): |
|
404 | 404 | for raw, out_t in example: |
|
405 | 405 | if raw.startswith(' '): |
|
406 | 406 | continue |
|
407 | 407 | |
|
408 | 408 | isp.push(raw+'\n') |
|
409 | 409 | out_raw = isp.source_raw |
|
410 | 410 | out = isp.source_reset() |
|
411 | 411 | self.assertEqual(out.rstrip(), out_t, |
|
412 | 412 | tt.pair_fail_msg.format("inputsplitter",raw, out_t, out)) |
|
413 | 413 | self.assertEqual(out_raw.rstrip(), raw.rstrip()) |
|
414 | 414 | |
|
415 | 415 | def test_syntax_multiline(self): |
|
416 | 416 | isp = self.isp |
|
417 | 417 | for example in syntax_ml.values(): |
|
418 | 418 | for line_pairs in example: |
|
419 | 419 | out_t_parts = [] |
|
420 | 420 | raw_parts = [] |
|
421 | 421 | for lraw, out_t_part in line_pairs: |
|
422 | 422 | if out_t_part is not None: |
|
423 | 423 | out_t_parts.append(out_t_part) |
|
424 | 424 | |
|
425 | 425 | if lraw is not None: |
|
426 | 426 | isp.push(lraw) |
|
427 | 427 | raw_parts.append(lraw) |
|
428 | 428 | |
|
429 | 429 | out_raw = isp.source_raw |
|
430 | 430 | out = isp.source_reset() |
|
431 | 431 | out_t = '\n'.join(out_t_parts).rstrip() |
|
432 | 432 | raw = '\n'.join(raw_parts).rstrip() |
|
433 | 433 | self.assertEqual(out.rstrip(), out_t) |
|
434 | 434 | self.assertEqual(out_raw.rstrip(), raw) |
|
435 | 435 | |
|
436 | 436 | def test_syntax_multiline_cell(self): |
|
437 | 437 | isp = self.isp |
|
438 | 438 | for example in syntax_ml.values(): |
|
439 | 439 | |
|
440 | 440 | out_t_parts = [] |
|
441 | 441 | for line_pairs in example: |
|
442 | 442 | raw = '\n'.join(r for r, _ in line_pairs if r is not None) |
|
443 | 443 | out_t = '\n'.join(t for _,t in line_pairs if t is not None) |
|
444 | 444 | out = isp.transform_cell(raw) |
|
445 | 445 | # Match ignoring trailing whitespace |
|
446 | 446 | self.assertEqual(out.rstrip(), out_t.rstrip()) |
|
447 | 447 | |
|
448 | 448 | def test_cellmagic_preempt(self): |
|
449 | 449 | isp = self.isp |
|
450 | 450 | for raw, name, line, cell in [ |
|
451 | 451 | ("%%cellm a\nIn[1]:", u'cellm', u'a', u'In[1]:'), |
|
452 | 452 | ("%%cellm \nline\n>>> hi", u'cellm', u'', u'line\n>>> hi'), |
|
453 | 453 | (">>> %%cellm \nline\n>>> hi", u'cellm', u'', u'line\nhi'), |
|
454 | 454 | ("%%cellm \n>>> hi", u'cellm', u'', u'>>> hi'), |
|
455 | 455 | ("%%cellm \nline1\nline2", u'cellm', u'', u'line1\nline2'), |
|
456 | 456 | ("%%cellm \nline1\\\\\nline2", u'cellm', u'', u'line1\\\\\nline2'), |
|
457 | 457 | ]: |
|
458 | 458 | expected = "get_ipython().run_cell_magic(%r, %r, %r)" % ( |
|
459 | 459 | name, line, cell |
|
460 | 460 | ) |
|
461 | 461 | out = isp.transform_cell(raw) |
|
462 | 462 | self.assertEqual(out.rstrip(), expected.rstrip()) |
|
463 | 463 | |
|
464 | 464 | def test_multiline_passthrough(self): |
|
465 | 465 | isp = self.isp |
|
466 | 466 | class CommentTransformer(InputTransformer): |
|
467 | 467 | def __init__(self): |
|
468 | 468 | self._lines = [] |
|
469 | 469 | |
|
470 | 470 | def push(self, line): |
|
471 | 471 | self._lines.append(line + '#') |
|
472 | 472 | |
|
473 | 473 | def reset(self): |
|
474 | 474 | text = '\n'.join(self._lines) |
|
475 | 475 | self._lines = [] |
|
476 | 476 | return text |
|
477 | 477 | |
|
478 | 478 | isp.physical_line_transforms.insert(0, CommentTransformer()) |
|
479 | 479 | |
|
480 | 480 | for raw, expected in [ |
|
481 | 481 | ("a=5", "a=5#"), |
|
482 | 482 | ("%ls foo", "get_ipython().magic(%r)" % u'ls foo#'), |
|
483 | 483 | ("!ls foo\n%ls bar", "get_ipython().system(%r)\nget_ipython().magic(%r)" % ( |
|
484 | 484 | u'ls foo#', u'ls bar#' |
|
485 | 485 | )), |
|
486 | 486 | ("1\n2\n3\n%ls foo\n4\n5", "1#\n2#\n3#\nget_ipython().magic(%r)\n4#\n5#" % u'ls foo#'), |
|
487 | 487 | ]: |
|
488 | 488 | out = isp.transform_cell(raw) |
|
489 | 489 | self.assertEqual(out.rstrip(), expected.rstrip()) |
|
490 | 490 | |
|
491 | 491 | #----------------------------------------------------------------------------- |
|
492 | 492 | # Main - use as a script, mostly for developer experiments |
|
493 | 493 | #----------------------------------------------------------------------------- |
|
494 | 494 | |
|
495 | 495 | if __name__ == '__main__': |
|
496 | 496 | # A simple demo for interactive experimentation. This code will not get |
|
497 | 497 | # picked up by any test suite. |
|
498 | 498 | from IPython.core.inputsplitter import IPythonInputSplitter |
|
499 | 499 | |
|
500 | 500 | # configure here the syntax to use, prompt and whether to autoindent |
|
501 | 501 | #isp, start_prompt = InputSplitter(), '>>> ' |
|
502 | 502 | isp, start_prompt = IPythonInputSplitter(), 'In> ' |
|
503 | 503 | |
|
504 | 504 | autoindent = True |
|
505 | 505 | #autoindent = False |
|
506 | 506 | |
|
507 | 507 | try: |
|
508 | 508 | while True: |
|
509 | 509 | prompt = start_prompt |
|
510 | 510 | while isp.push_accepts_more(): |
|
511 | 511 | indent = ' '*isp.indent_spaces |
|
512 | 512 | if autoindent: |
|
513 | 513 | line = indent + input(prompt+indent) |
|
514 | 514 | else: |
|
515 | 515 | line = input(prompt) |
|
516 | 516 | isp.push(line) |
|
517 | 517 | prompt = '... ' |
|
518 | 518 | |
|
519 | 519 | # Here we just return input so we can use it in a test suite, but a |
|
520 | 520 | # real interpreter would instead send it for execution somewhere. |
|
521 | 521 | #src = isp.source; raise EOFError # dbg |
|
522 | 522 | raw = isp.source_raw |
|
523 | 523 | src = isp.source_reset() |
|
524 | 524 | print('Input source was:\n', src) |
|
525 | 525 | print('Raw source was:\n', raw) |
|
526 | 526 | except EOFError: |
|
527 | 527 | print('Bye') |
|
528 | 528 | |
|
529 | 529 | # Tests for cell magics support |
|
530 | 530 | |
|
531 | 531 | def test_last_blank(): |
|
532 | 532 | nt.assert_false(isp.last_blank('')) |
|
533 | 533 | nt.assert_false(isp.last_blank('abc')) |
|
534 | 534 | nt.assert_false(isp.last_blank('abc\n')) |
|
535 | 535 | nt.assert_false(isp.last_blank('abc\na')) |
|
536 | 536 | |
|
537 | 537 | nt.assert_true(isp.last_blank('\n')) |
|
538 | 538 | nt.assert_true(isp.last_blank('\n ')) |
|
539 | 539 | nt.assert_true(isp.last_blank('abc\n ')) |
|
540 | 540 | nt.assert_true(isp.last_blank('abc\n\n')) |
|
541 | 541 | nt.assert_true(isp.last_blank('abc\nd\n\n')) |
|
542 | 542 | nt.assert_true(isp.last_blank('abc\nd\ne\n\n')) |
|
543 | 543 | nt.assert_true(isp.last_blank('abc \n \n \n\n')) |
|
544 | 544 | |
|
545 | 545 | |
|
546 | 546 | def test_last_two_blanks(): |
|
547 | 547 | nt.assert_false(isp.last_two_blanks('')) |
|
548 | 548 | nt.assert_false(isp.last_two_blanks('abc')) |
|
549 | 549 | nt.assert_false(isp.last_two_blanks('abc\n')) |
|
550 | 550 | nt.assert_false(isp.last_two_blanks('abc\n\na')) |
|
551 | 551 | nt.assert_false(isp.last_two_blanks('abc\n \n')) |
|
552 | 552 | nt.assert_false(isp.last_two_blanks('abc\n\n')) |
|
553 | 553 | |
|
554 | 554 | nt.assert_true(isp.last_two_blanks('\n\n')) |
|
555 | 555 | nt.assert_true(isp.last_two_blanks('\n\n ')) |
|
556 | 556 | nt.assert_true(isp.last_two_blanks('\n \n')) |
|
557 | 557 | nt.assert_true(isp.last_two_blanks('abc\n\n ')) |
|
558 | 558 | nt.assert_true(isp.last_two_blanks('abc\n\n\n')) |
|
559 | 559 | nt.assert_true(isp.last_two_blanks('abc\n\n \n')) |
|
560 | 560 | nt.assert_true(isp.last_two_blanks('abc\n\n \n ')) |
|
561 | 561 | nt.assert_true(isp.last_two_blanks('abc\n\n \n \n')) |
|
562 | 562 | nt.assert_true(isp.last_two_blanks('abc\nd\n\n\n')) |
|
563 | 563 | nt.assert_true(isp.last_two_blanks('abc\nd\ne\nf\n\n\n')) |
|
564 | 564 | |
|
565 | 565 | |
|
566 | 566 | class CellMagicsCommon(object): |
|
567 | 567 | |
|
568 | 568 | def test_whole_cell(self): |
|
569 | 569 | src = "%%cellm line\nbody\n" |
|
570 | 570 | out = self.sp.transform_cell(src) |
|
571 | 571 | ref = u"get_ipython().run_cell_magic({u}'cellm', {u}'line', {u}'body')\n" |
|
572 | 572 | nt.assert_equal(out, py3compat.u_format(ref)) |
|
573 | 573 | |
|
574 | 574 | def test_cellmagic_help(self): |
|
575 | 575 | self.sp.push('%%cellm?') |
|
576 | 576 | nt.assert_false(self.sp.push_accepts_more()) |
|
577 | 577 | |
|
578 | 578 | def tearDown(self): |
|
579 | 579 | self.sp.reset() |
|
580 | 580 | |
|
581 | 581 | |
|
582 | 582 | class CellModeCellMagics(CellMagicsCommon, unittest.TestCase): |
|
583 | 583 | sp = isp.IPythonInputSplitter(line_input_checker=False) |
|
584 | 584 | |
|
585 | 585 | def test_incremental(self): |
|
586 | 586 | sp = self.sp |
|
587 | 587 | sp.push('%%cellm firstline\n') |
|
588 | 588 | nt.assert_true(sp.push_accepts_more()) #1 |
|
589 | 589 | sp.push('line2\n') |
|
590 | 590 | nt.assert_true(sp.push_accepts_more()) #2 |
|
591 | 591 | sp.push('\n') |
|
592 | 592 | # This should accept a blank line and carry on until the cell is reset |
|
593 | 593 | nt.assert_true(sp.push_accepts_more()) #3 |
|
594 | 594 | |
|
595 | 595 | def test_no_strip_coding(self): |
|
596 | 596 | src = '\n'.join([ |
|
597 | 597 | '%%writefile foo.py', |
|
598 | 598 | '# coding: utf-8', |
|
599 | 599 | 'print(u"üñîçø∂é")', |
|
600 | 600 | ]) |
|
601 | 601 | out = self.sp.transform_cell(src) |
|
602 | 602 | nt.assert_in('# coding: utf-8', out) |
|
603 | 603 | |
|
604 | 604 | |
|
605 | 605 | class LineModeCellMagics(CellMagicsCommon, unittest.TestCase): |
|
606 | 606 | sp = isp.IPythonInputSplitter(line_input_checker=True) |
|
607 | 607 | |
|
608 | 608 | def test_incremental(self): |
|
609 | 609 | sp = self.sp |
|
610 | 610 | sp.push('%%cellm line2\n') |
|
611 | 611 | nt.assert_true(sp.push_accepts_more()) #1 |
|
612 | 612 | sp.push('\n') |
|
613 | 613 | # In this case, a blank line should end the cell magic |
|
614 | 614 | nt.assert_false(sp.push_accepts_more()) #2 |
@@ -1,1488 +1,1488 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | Verbose and colourful traceback formatting. |
|
4 | 4 | |
|
5 | 5 | **ColorTB** |
|
6 | 6 | |
|
7 | 7 | I've always found it a bit hard to visually parse tracebacks in Python. The |
|
8 | 8 | ColorTB class is a solution to that problem. It colors the different parts of a |
|
9 | 9 | traceback in a manner similar to what you would expect from a syntax-highlighting |
|
10 | 10 | text editor. |
|
11 | 11 | |
|
12 | 12 | Installation instructions for ColorTB:: |
|
13 | 13 | |
|
14 | 14 | import sys,ultratb |
|
15 | 15 | sys.excepthook = ultratb.ColorTB() |
|
16 | 16 | |
|
17 | 17 | **VerboseTB** |
|
18 | 18 | |
|
19 | 19 | I've also included a port of Ka-Ping Yee's "cgitb.py" that produces all kinds |
|
20 | 20 | of useful info when a traceback occurs. Ping originally had it spit out HTML |
|
21 | 21 | and intended it for CGI programmers, but why should they have all the fun? I |
|
22 | 22 | altered it to spit out colored text to the terminal. It's a bit overwhelming, |
|
23 | 23 | but kind of neat, and maybe useful for long-running programs that you believe |
|
24 | 24 | are bug-free. If a crash *does* occur in that type of program you want details. |
|
25 | 25 | Give it a shot--you'll love it or you'll hate it. |
|
26 | 26 | |
|
27 | 27 | .. note:: |
|
28 | 28 | |
|
29 | 29 | The Verbose mode prints the variables currently visible where the exception |
|
30 | 30 | happened (shortening their strings if too long). This can potentially be |
|
31 | 31 | very slow, if you happen to have a huge data structure whose string |
|
32 | 32 | representation is complex to compute. Your computer may appear to freeze for |
|
33 | 33 | a while with cpu usage at 100%. If this occurs, you can cancel the traceback |
|
34 | 34 | with Ctrl-C (maybe hitting it more than once). |
|
35 | 35 | |
|
36 | 36 | If you encounter this kind of situation often, you may want to use the |
|
37 | 37 | Verbose_novars mode instead of the regular Verbose, which avoids formatting |
|
38 | 38 | variables (but otherwise includes the information and context given by |
|
39 | 39 | Verbose). |
|
40 | 40 | |
|
41 | 41 | .. note:: |
|
42 | 42 | |
|
43 | 43 | The verbose mode print all variables in the stack, which means it can |
|
44 | 44 | potentially leak sensitive information like access keys, or unencryted |
|
45 | 45 | password. |
|
46 | 46 | |
|
47 | 47 | Installation instructions for VerboseTB:: |
|
48 | 48 | |
|
49 | 49 | import sys,ultratb |
|
50 | 50 | sys.excepthook = ultratb.VerboseTB() |
|
51 | 51 | |
|
52 | 52 | Note: Much of the code in this module was lifted verbatim from the standard |
|
53 | 53 | library module 'traceback.py' and Ka-Ping Yee's 'cgitb.py'. |
|
54 | 54 | |
|
55 | 55 | Color schemes |
|
56 | 56 | ------------- |
|
57 | 57 | |
|
58 | 58 | The colors are defined in the class TBTools through the use of the |
|
59 | 59 | ColorSchemeTable class. Currently the following exist: |
|
60 | 60 | |
|
61 | 61 | - NoColor: allows all of this module to be used in any terminal (the color |
|
62 | 62 | escapes are just dummy blank strings). |
|
63 | 63 | |
|
64 | 64 | - Linux: is meant to look good in a terminal like the Linux console (black |
|
65 | 65 | or very dark background). |
|
66 | 66 | |
|
67 | 67 | - LightBG: similar to Linux but swaps dark/light colors to be more readable |
|
68 | 68 | in light background terminals. |
|
69 | 69 | |
|
70 | 70 | - Neutral: a neutral color scheme that should be readable on both light and |
|
71 | 71 | dark background |
|
72 | 72 | |
|
73 | 73 | You can implement other color schemes easily, the syntax is fairly |
|
74 | 74 | self-explanatory. Please send back new schemes you develop to the author for |
|
75 | 75 | possible inclusion in future releases. |
|
76 | 76 | |
|
77 | 77 | Inheritance diagram: |
|
78 | 78 | |
|
79 | 79 | .. inheritance-diagram:: IPython.core.ultratb |
|
80 | 80 | :parts: 3 |
|
81 | 81 | """ |
|
82 | 82 | |
|
83 | 83 | #***************************************************************************** |
|
84 | 84 | # Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu> |
|
85 | 85 | # Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu> |
|
86 | 86 | # |
|
87 | 87 | # Distributed under the terms of the BSD License. The full license is in |
|
88 | 88 | # the file COPYING, distributed as part of this software. |
|
89 | 89 | #***************************************************************************** |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | import dis |
|
93 | 93 | import inspect |
|
94 | 94 | import keyword |
|
95 | 95 | import linecache |
|
96 | 96 | import os |
|
97 | 97 | import pydoc |
|
98 | 98 | import re |
|
99 | 99 | import sys |
|
100 | 100 | import time |
|
101 | 101 | import tokenize |
|
102 | 102 | import traceback |
|
103 | 103 | import types |
|
104 | 104 | |
|
105 | 105 | try: # Python 2 |
|
106 | 106 | generate_tokens = tokenize.generate_tokens |
|
107 | 107 | except AttributeError: # Python 3 |
|
108 | 108 | generate_tokens = tokenize.tokenize |
|
109 | 109 | |
|
110 | 110 | # For purposes of monkeypatching inspect to fix a bug in it. |
|
111 | 111 | from inspect import getsourcefile, getfile, getmodule, \ |
|
112 | 112 | ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode |
|
113 | 113 | |
|
114 | 114 | # IPython's own modules |
|
115 | 115 | from IPython import get_ipython |
|
116 | 116 | from IPython.core import debugger |
|
117 | 117 | from IPython.core.display_trap import DisplayTrap |
|
118 | 118 | from IPython.core.excolors import exception_colors |
|
119 | 119 | from IPython.utils import PyColorize |
|
120 | 120 | from IPython.utils import openpy |
|
121 | 121 | from IPython.utils import path as util_path |
|
122 | 122 | from IPython.utils import py3compat |
|
123 | 123 | from IPython.utils import ulinecache |
|
124 | 124 | from IPython.utils.data import uniq_stable |
|
125 | 125 | from IPython.utils.terminal import get_terminal_size |
|
126 | 126 | from logging import info, error |
|
127 | 127 | |
|
128 | 128 | import IPython.utils.colorable as colorable |
|
129 | 129 | |
|
130 | 130 | # Globals |
|
131 | 131 | # amount of space to put line numbers before verbose tracebacks |
|
132 | 132 | INDENT_SIZE = 8 |
|
133 | 133 | |
|
134 | 134 | # Default color scheme. This is used, for example, by the traceback |
|
135 | 135 | # formatter. When running in an actual IPython instance, the user's rc.colors |
|
136 | 136 | # value is used, but having a module global makes this functionality available |
|
137 | 137 | # to users of ultratb who are NOT running inside ipython. |
|
138 | 138 | DEFAULT_SCHEME = 'NoColor' |
|
139 | 139 | |
|
140 | 140 | # --------------------------------------------------------------------------- |
|
141 | 141 | # Code begins |
|
142 | 142 | |
|
143 | 143 | # Utility functions |
|
144 | 144 | def inspect_error(): |
|
145 | 145 | """Print a message about internal inspect errors. |
|
146 | 146 | |
|
147 | 147 | These are unfortunately quite common.""" |
|
148 | 148 | |
|
149 | 149 | error('Internal Python error in the inspect module.\n' |
|
150 | 150 | 'Below is the traceback from this internal error.\n') |
|
151 | 151 | |
|
152 | 152 | |
|
153 | 153 | # This function is a monkeypatch we apply to the Python inspect module. We have |
|
154 | 154 | # now found when it's needed (see discussion on issue gh-1456), and we have a |
|
155 | 155 | # test case (IPython.core.tests.test_ultratb.ChangedPyFileTest) that fails if |
|
156 | 156 | # the monkeypatch is not applied. TK, Aug 2012. |
|
157 | 157 | def findsource(object): |
|
158 | 158 | """Return the entire source file and starting line number for an object. |
|
159 | 159 | |
|
160 | 160 | The argument may be a module, class, method, function, traceback, frame, |
|
161 | 161 | or code object. The source code is returned as a list of all the lines |
|
162 | 162 | in the file and the line number indexes a line in that list. An IOError |
|
163 | 163 | is raised if the source code cannot be retrieved. |
|
164 | 164 | |
|
165 | 165 | FIXED version with which we monkeypatch the stdlib to work around a bug.""" |
|
166 | 166 | |
|
167 | 167 | file = getsourcefile(object) or getfile(object) |
|
168 | 168 | # If the object is a frame, then trying to get the globals dict from its |
|
169 | 169 | # module won't work. Instead, the frame object itself has the globals |
|
170 | 170 | # dictionary. |
|
171 | 171 | globals_dict = None |
|
172 | 172 | if inspect.isframe(object): |
|
173 | 173 | # XXX: can this ever be false? |
|
174 | 174 | globals_dict = object.f_globals |
|
175 | 175 | else: |
|
176 | 176 | module = getmodule(object, file) |
|
177 | 177 | if module: |
|
178 | 178 | globals_dict = module.__dict__ |
|
179 | 179 | lines = linecache.getlines(file, globals_dict) |
|
180 | 180 | if not lines: |
|
181 | 181 | raise IOError('could not get source code') |
|
182 | 182 | |
|
183 | 183 | if ismodule(object): |
|
184 | 184 | return lines, 0 |
|
185 | 185 | |
|
186 | 186 | if isclass(object): |
|
187 | 187 | name = object.__name__ |
|
188 | 188 | pat = re.compile(r'^(\s*)class\s*' + name + r'\b') |
|
189 | 189 | # make some effort to find the best matching class definition: |
|
190 | 190 | # use the one with the least indentation, which is the one |
|
191 | 191 | # that's most probably not inside a function definition. |
|
192 | 192 | candidates = [] |
|
193 | 193 | for i in range(len(lines)): |
|
194 | 194 | match = pat.match(lines[i]) |
|
195 | 195 | if match: |
|
196 | 196 | # if it's at toplevel, it's already the best one |
|
197 | 197 | if lines[i][0] == 'c': |
|
198 | 198 | return lines, i |
|
199 | 199 | # else add whitespace to candidate list |
|
200 | 200 | candidates.append((match.group(1), i)) |
|
201 | 201 | if candidates: |
|
202 | 202 | # this will sort by whitespace, and by line number, |
|
203 | 203 | # less whitespace first |
|
204 | 204 | candidates.sort() |
|
205 | 205 | return lines, candidates[0][1] |
|
206 | 206 | else: |
|
207 | 207 | raise IOError('could not find class definition') |
|
208 | 208 | |
|
209 | 209 | if ismethod(object): |
|
210 | 210 | object = object.__func__ |
|
211 | 211 | if isfunction(object): |
|
212 | 212 | object = object.__code__ |
|
213 | 213 | if istraceback(object): |
|
214 | 214 | object = object.tb_frame |
|
215 | 215 | if isframe(object): |
|
216 | 216 | object = object.f_code |
|
217 | 217 | if iscode(object): |
|
218 | 218 | if not hasattr(object, 'co_firstlineno'): |
|
219 | 219 | raise IOError('could not find function definition') |
|
220 | 220 | pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)') |
|
221 | 221 | pmatch = pat.match |
|
222 | 222 | # fperez - fix: sometimes, co_firstlineno can give a number larger than |
|
223 | 223 | # the length of lines, which causes an error. Safeguard against that. |
|
224 | 224 | lnum = min(object.co_firstlineno, len(lines)) - 1 |
|
225 | 225 | while lnum > 0: |
|
226 | 226 | if pmatch(lines[lnum]): |
|
227 | 227 | break |
|
228 | 228 | lnum -= 1 |
|
229 | 229 | |
|
230 | 230 | return lines, lnum |
|
231 | 231 | raise IOError('could not find code object') |
|
232 | 232 | |
|
233 | 233 | |
|
234 | 234 | # This is a patched version of inspect.getargs that applies the (unmerged) |
|
235 | 235 | # patch for http://bugs.python.org/issue14611 by Stefano Taschini. This fixes |
|
236 | 236 | # https://github.com/ipython/ipython/issues/8205 and |
|
237 | 237 | # https://github.com/ipython/ipython/issues/8293 |
|
238 | 238 | def getargs(co): |
|
239 | 239 | """Get information about the arguments accepted by a code object. |
|
240 | 240 | |
|
241 | 241 | Three things are returned: (args, varargs, varkw), where 'args' is |
|
242 | 242 | a list of argument names (possibly containing nested lists), and |
|
243 | 243 | 'varargs' and 'varkw' are the names of the * and ** arguments or None.""" |
|
244 | 244 | if not iscode(co): |
|
245 | 245 | raise TypeError('{!r} is not a code object'.format(co)) |
|
246 | 246 | |
|
247 | 247 | nargs = co.co_argcount |
|
248 | 248 | names = co.co_varnames |
|
249 | 249 | args = list(names[:nargs]) |
|
250 | 250 | step = 0 |
|
251 | 251 | |
|
252 | 252 | # The following acrobatics are for anonymous (tuple) arguments. |
|
253 | 253 | for i in range(nargs): |
|
254 | 254 | if args[i][:1] in ('', '.'): |
|
255 | 255 | stack, remain, count = [], [], [] |
|
256 | 256 | while step < len(co.co_code): |
|
257 | 257 | op = ord(co.co_code[step]) |
|
258 | 258 | step = step + 1 |
|
259 | 259 | if op >= dis.HAVE_ARGUMENT: |
|
260 | 260 | opname = dis.opname[op] |
|
261 | 261 | value = ord(co.co_code[step]) + ord(co.co_code[step+1])*256 |
|
262 | 262 | step = step + 2 |
|
263 | 263 | if opname in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'): |
|
264 | 264 | remain.append(value) |
|
265 | 265 | count.append(value) |
|
266 | 266 | elif opname in ('STORE_FAST', 'STORE_DEREF'): |
|
267 | 267 | if op in dis.haslocal: |
|
268 | 268 | stack.append(co.co_varnames[value]) |
|
269 | 269 | elif op in dis.hasfree: |
|
270 | 270 | stack.append((co.co_cellvars + co.co_freevars)[value]) |
|
271 | 271 | # Special case for sublists of length 1: def foo((bar)) |
|
272 | 272 | # doesn't generate the UNPACK_TUPLE bytecode, so if |
|
273 | 273 | # `remain` is empty here, we have such a sublist. |
|
274 | 274 | if not remain: |
|
275 | 275 | stack[0] = [stack[0]] |
|
276 | 276 | break |
|
277 | 277 | else: |
|
278 | 278 | remain[-1] = remain[-1] - 1 |
|
279 | 279 | while remain[-1] == 0: |
|
280 | 280 | remain.pop() |
|
281 | 281 | size = count.pop() |
|
282 | 282 | stack[-size:] = [stack[-size:]] |
|
283 | 283 | if not remain: |
|
284 | 284 | break |
|
285 | 285 | remain[-1] = remain[-1] - 1 |
|
286 | 286 | if not remain: |
|
287 | 287 | break |
|
288 | 288 | args[i] = stack[0] |
|
289 | 289 | |
|
290 | 290 | varargs = None |
|
291 | 291 | if co.co_flags & inspect.CO_VARARGS: |
|
292 | 292 | varargs = co.co_varnames[nargs] |
|
293 | 293 | nargs = nargs + 1 |
|
294 | 294 | varkw = None |
|
295 | 295 | if co.co_flags & inspect.CO_VARKEYWORDS: |
|
296 | 296 | varkw = co.co_varnames[nargs] |
|
297 | 297 | return inspect.Arguments(args, varargs, varkw) |
|
298 | 298 | |
|
299 | 299 | |
|
300 | 300 | # Monkeypatch inspect to apply our bugfix. |
|
301 | 301 | def with_patch_inspect(f): |
|
302 | 302 | """decorator for monkeypatching inspect.findsource""" |
|
303 | 303 | |
|
304 | 304 | def wrapped(*args, **kwargs): |
|
305 | 305 | save_findsource = inspect.findsource |
|
306 | 306 | save_getargs = inspect.getargs |
|
307 | 307 | inspect.findsource = findsource |
|
308 | 308 | inspect.getargs = getargs |
|
309 | 309 | try: |
|
310 | 310 | return f(*args, **kwargs) |
|
311 | 311 | finally: |
|
312 | 312 | inspect.findsource = save_findsource |
|
313 | 313 | inspect.getargs = save_getargs |
|
314 | 314 | |
|
315 | 315 | return wrapped |
|
316 | 316 | |
|
317 | 317 | |
|
318 | 318 | if py3compat.PY3: |
|
319 | 319 | fixed_getargvalues = inspect.getargvalues |
|
320 | 320 | else: |
|
321 | 321 | # Fixes for https://github.com/ipython/ipython/issues/8293 |
|
322 | 322 | # and https://github.com/ipython/ipython/issues/8205. |
|
323 | 323 | # The relevant bug is caused by failure to correctly handle anonymous tuple |
|
324 | 324 | # unpacking, which only exists in Python 2. |
|
325 | 325 | fixed_getargvalues = with_patch_inspect(inspect.getargvalues) |
|
326 | 326 | |
|
327 | 327 | |
|
328 | 328 | def fix_frame_records_filenames(records): |
|
329 | 329 | """Try to fix the filenames in each record from inspect.getinnerframes(). |
|
330 | 330 | |
|
331 | 331 | Particularly, modules loaded from within zip files have useless filenames |
|
332 | 332 | attached to their code object, and inspect.getinnerframes() just uses it. |
|
333 | 333 | """ |
|
334 | 334 | fixed_records = [] |
|
335 | 335 | for frame, filename, line_no, func_name, lines, index in records: |
|
336 | 336 | # Look inside the frame's globals dictionary for __file__, |
|
337 | 337 | # which should be better. However, keep Cython filenames since |
|
338 | 338 | # we prefer the source filenames over the compiled .so file. |
|
339 | 339 | filename = py3compat.cast_unicode_py2(filename, "utf-8") |
|
340 | 340 | if not filename.endswith(('.pyx', '.pxd', '.pxi')): |
|
341 | 341 | better_fn = frame.f_globals.get('__file__', None) |
|
342 | 342 | if isinstance(better_fn, str): |
|
343 | 343 | # Check the type just in case someone did something weird with |
|
344 | 344 | # __file__. It might also be None if the error occurred during |
|
345 | 345 | # import. |
|
346 | 346 | filename = better_fn |
|
347 | 347 | fixed_records.append((frame, filename, line_no, func_name, lines, index)) |
|
348 | 348 | return fixed_records |
|
349 | 349 | |
|
350 | 350 | |
|
351 | 351 | @with_patch_inspect |
|
352 | 352 | def _fixed_getinnerframes(etb, context=1, tb_offset=0): |
|
353 | 353 | LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5 |
|
354 | 354 | |
|
355 | 355 | records = fix_frame_records_filenames(inspect.getinnerframes(etb, context)) |
|
356 | 356 | # If the error is at the console, don't build any context, since it would |
|
357 | 357 | # otherwise produce 5 blank lines printed out (there is no file at the |
|
358 | 358 | # console) |
|
359 | 359 | rec_check = records[tb_offset:] |
|
360 | 360 | try: |
|
361 | 361 | rname = rec_check[0][1] |
|
362 | 362 | if rname == '<ipython console>' or rname.endswith('<string>'): |
|
363 | 363 | return rec_check |
|
364 | 364 | except IndexError: |
|
365 | 365 | pass |
|
366 | 366 | |
|
367 | 367 | aux = traceback.extract_tb(etb) |
|
368 | 368 | assert len(records) == len(aux) |
|
369 | 369 | for i, (file, lnum, _, _) in zip(range(len(records)), aux): |
|
370 | 370 | maybeStart = lnum - 1 - context // 2 |
|
371 | 371 | start = max(maybeStart, 0) |
|
372 | 372 | end = start + context |
|
373 | 373 | lines = ulinecache.getlines(file)[start:end] |
|
374 | 374 | buf = list(records[i]) |
|
375 | 375 | buf[LNUM_POS] = lnum |
|
376 | 376 | buf[INDEX_POS] = lnum - 1 - start |
|
377 | 377 | buf[LINES_POS] = lines |
|
378 | 378 | records[i] = tuple(buf) |
|
379 | 379 | return records[tb_offset:] |
|
380 | 380 | |
|
381 | 381 | # Helper function -- largely belongs to VerboseTB, but we need the same |
|
382 | 382 | # functionality to produce a pseudo verbose TB for SyntaxErrors, so that they |
|
383 | 383 | # can be recognized properly by ipython.el's py-traceback-line-re |
|
384 | 384 | # (SyntaxErrors have to be treated specially because they have no traceback) |
|
385 | 385 | |
|
386 | 386 | |
|
387 | 387 | def _format_traceback_lines(lnum, index, lines, Colors, lvals=None, _line_format=(lambda x,_:x,None)): |
|
388 | 388 | numbers_width = INDENT_SIZE - 1 |
|
389 | 389 | res = [] |
|
390 | 390 | i = lnum - index |
|
391 | 391 | |
|
392 | 392 | for line in lines: |
|
393 | 393 | line = py3compat.cast_unicode(line) |
|
394 | 394 | |
|
395 | 395 | new_line, err = _line_format(line, 'str') |
|
396 | 396 | if not err: line = new_line |
|
397 | 397 | |
|
398 | 398 | if i == lnum: |
|
399 | 399 | # This is the line with the error |
|
400 | 400 | pad = numbers_width - len(str(i)) |
|
401 | 401 | num = '%s%s' % (debugger.make_arrow(pad), str(lnum)) |
|
402 | 402 | line = '%s%s%s %s%s' % (Colors.linenoEm, num, |
|
403 | 403 | Colors.line, line, Colors.Normal) |
|
404 | 404 | else: |
|
405 | 405 | num = '%*s' % (numbers_width, i) |
|
406 | 406 | line = '%s%s%s %s' % (Colors.lineno, num, |
|
407 | 407 | Colors.Normal, line) |
|
408 | 408 | |
|
409 | 409 | res.append(line) |
|
410 | 410 | if lvals and i == lnum: |
|
411 | 411 | res.append(lvals + '\n') |
|
412 | 412 | i = i + 1 |
|
413 | 413 | return res |
|
414 | 414 | |
|
415 | 415 | def is_recursion_error(etype, value, records): |
|
416 | 416 | try: |
|
417 | 417 | # RecursionError is new in Python 3.5 |
|
418 | 418 | recursion_error_type = RecursionError |
|
419 | 419 | except NameError: |
|
420 | 420 | recursion_error_type = RuntimeError |
|
421 | 421 | |
|
422 | 422 | # The default recursion limit is 1000, but some of that will be taken up |
|
423 | 423 | # by stack frames in IPython itself. >500 frames probably indicates |
|
424 | 424 | # a recursion error. |
|
425 | 425 | return (etype is recursion_error_type) \ |
|
426 | 426 | and "recursion" in str(value).lower() \ |
|
427 | 427 | and len(records) > 500 |
|
428 | 428 | |
|
429 | 429 | def find_recursion(etype, value, records): |
|
430 | 430 | """Identify the repeating stack frames from a RecursionError traceback |
|
431 | 431 | |
|
432 | 432 | 'records' is a list as returned by VerboseTB.get_records() |
|
433 | 433 | |
|
434 | 434 | Returns (last_unique, repeat_length) |
|
435 | 435 | """ |
|
436 | 436 | # This involves a bit of guesswork - we want to show enough of the traceback |
|
437 | 437 | # to indicate where the recursion is occurring. We guess that the innermost |
|
438 | 438 | # quarter of the traceback (250 frames by default) is repeats, and find the |
|
439 | 439 | # first frame (from in to out) that looks different. |
|
440 | 440 | if not is_recursion_error(etype, value, records): |
|
441 | 441 | return len(records), 0 |
|
442 | 442 | |
|
443 | 443 | # Select filename, lineno, func_name to track frames with |
|
444 | 444 | records = [r[1:4] for r in records] |
|
445 | 445 | inner_frames = records[-(len(records)//4):] |
|
446 | 446 | frames_repeated = set(inner_frames) |
|
447 | 447 | |
|
448 | 448 | last_seen_at = {} |
|
449 | 449 | longest_repeat = 0 |
|
450 | 450 | i = len(records) |
|
451 | 451 | for frame in reversed(records): |
|
452 | 452 | i -= 1 |
|
453 | 453 | if frame not in frames_repeated: |
|
454 | 454 | last_unique = i |
|
455 | 455 | break |
|
456 | 456 | |
|
457 | 457 | if frame in last_seen_at: |
|
458 | 458 | distance = last_seen_at[frame] - i |
|
459 | 459 | longest_repeat = max(longest_repeat, distance) |
|
460 | 460 | |
|
461 | 461 | last_seen_at[frame] = i |
|
462 | 462 | else: |
|
463 | 463 | last_unique = 0 # The whole traceback was recursion |
|
464 | 464 | |
|
465 | 465 | return last_unique, longest_repeat |
|
466 | 466 | |
|
467 | 467 | #--------------------------------------------------------------------------- |
|
468 | 468 | # Module classes |
|
469 | 469 | class TBTools(colorable.Colorable): |
|
470 | 470 | """Basic tools used by all traceback printer classes.""" |
|
471 | 471 | |
|
472 | 472 | # Number of frames to skip when reporting tracebacks |
|
473 | 473 | tb_offset = 0 |
|
474 | 474 | |
|
475 | 475 | def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None, parent=None, config=None): |
|
476 | 476 | # Whether to call the interactive pdb debugger after printing |
|
477 | 477 | # tracebacks or not |
|
478 | 478 | super(TBTools, self).__init__(parent=parent, config=config) |
|
479 | 479 | self.call_pdb = call_pdb |
|
480 | 480 | |
|
481 | 481 | # Output stream to write to. Note that we store the original value in |
|
482 | 482 | # a private attribute and then make the public ostream a property, so |
|
483 | 483 | # that we can delay accessing sys.stdout until runtime. The way |
|
484 | 484 | # things are written now, the sys.stdout object is dynamically managed |
|
485 | 485 | # so a reference to it should NEVER be stored statically. This |
|
486 | 486 | # property approach confines this detail to a single location, and all |
|
487 | 487 | # subclasses can simply access self.ostream for writing. |
|
488 | 488 | self._ostream = ostream |
|
489 | 489 | |
|
490 | 490 | # Create color table |
|
491 | 491 | self.color_scheme_table = exception_colors() |
|
492 | 492 | |
|
493 | 493 | self.set_colors(color_scheme) |
|
494 | 494 | self.old_scheme = color_scheme # save initial value for toggles |
|
495 | 495 | |
|
496 | 496 | if call_pdb: |
|
497 | 497 | self.pdb = debugger.Pdb() |
|
498 | 498 | else: |
|
499 | 499 | self.pdb = None |
|
500 | 500 | |
|
501 | 501 | def _get_ostream(self): |
|
502 | 502 | """Output stream that exceptions are written to. |
|
503 | 503 | |
|
504 | 504 | Valid values are: |
|
505 | 505 | |
|
506 | 506 | - None: the default, which means that IPython will dynamically resolve |
|
507 | 507 | to sys.stdout. This ensures compatibility with most tools, including |
|
508 | 508 | Windows (where plain stdout doesn't recognize ANSI escapes). |
|
509 | 509 | |
|
510 | 510 | - Any object with 'write' and 'flush' attributes. |
|
511 | 511 | """ |
|
512 | 512 | return sys.stdout if self._ostream is None else self._ostream |
|
513 | 513 | |
|
514 | 514 | def _set_ostream(self, val): |
|
515 | 515 | assert val is None or (hasattr(val, 'write') and hasattr(val, 'flush')) |
|
516 | 516 | self._ostream = val |
|
517 | 517 | |
|
518 | 518 | ostream = property(_get_ostream, _set_ostream) |
|
519 | 519 | |
|
520 | 520 | def set_colors(self, *args, **kw): |
|
521 | 521 | """Shorthand access to the color table scheme selector method.""" |
|
522 | 522 | |
|
523 | 523 | # Set own color table |
|
524 | 524 | self.color_scheme_table.set_active_scheme(*args, **kw) |
|
525 | 525 | # for convenience, set Colors to the active scheme |
|
526 | 526 | self.Colors = self.color_scheme_table.active_colors |
|
527 | 527 | # Also set colors of debugger |
|
528 | 528 | if hasattr(self, 'pdb') and self.pdb is not None: |
|
529 | 529 | self.pdb.set_colors(*args, **kw) |
|
530 | 530 | |
|
531 | 531 | def color_toggle(self): |
|
532 | 532 | """Toggle between the currently active color scheme and NoColor.""" |
|
533 | 533 | |
|
534 | 534 | if self.color_scheme_table.active_scheme_name == 'NoColor': |
|
535 | 535 | self.color_scheme_table.set_active_scheme(self.old_scheme) |
|
536 | 536 | self.Colors = self.color_scheme_table.active_colors |
|
537 | 537 | else: |
|
538 | 538 | self.old_scheme = self.color_scheme_table.active_scheme_name |
|
539 | 539 | self.color_scheme_table.set_active_scheme('NoColor') |
|
540 | 540 | self.Colors = self.color_scheme_table.active_colors |
|
541 | 541 | |
|
542 | 542 | def stb2text(self, stb): |
|
543 | 543 | """Convert a structured traceback (a list) to a string.""" |
|
544 | 544 | return '\n'.join(stb) |
|
545 | 545 | |
|
546 | 546 | def text(self, etype, value, tb, tb_offset=None, context=5): |
|
547 | 547 | """Return formatted traceback. |
|
548 | 548 | |
|
549 | 549 | Subclasses may override this if they add extra arguments. |
|
550 | 550 | """ |
|
551 | 551 | tb_list = self.structured_traceback(etype, value, tb, |
|
552 | 552 | tb_offset, context) |
|
553 | 553 | return self.stb2text(tb_list) |
|
554 | 554 | |
|
555 | 555 | def structured_traceback(self, etype, evalue, tb, tb_offset=None, |
|
556 | 556 | context=5, mode=None): |
|
557 | 557 | """Return a list of traceback frames. |
|
558 | 558 | |
|
559 | 559 | Must be implemented by each class. |
|
560 | 560 | """ |
|
561 | 561 | raise NotImplementedError() |
|
562 | 562 | |
|
563 | 563 | |
|
564 | 564 | #--------------------------------------------------------------------------- |
|
565 | 565 | class ListTB(TBTools): |
|
566 | 566 | """Print traceback information from a traceback list, with optional color. |
|
567 | 567 | |
|
568 | 568 | Calling requires 3 arguments: (etype, evalue, elist) |
|
569 | 569 | as would be obtained by:: |
|
570 | 570 | |
|
571 | 571 | etype, evalue, tb = sys.exc_info() |
|
572 | 572 | if tb: |
|
573 | 573 | elist = traceback.extract_tb(tb) |
|
574 | 574 | else: |
|
575 | 575 | elist = None |
|
576 | 576 | |
|
577 | 577 | It can thus be used by programs which need to process the traceback before |
|
578 | 578 | printing (such as console replacements based on the code module from the |
|
579 | 579 | standard library). |
|
580 | 580 | |
|
581 | 581 | Because they are meant to be called without a full traceback (only a |
|
582 | 582 | list), instances of this class can't call the interactive pdb debugger.""" |
|
583 | 583 | |
|
584 | 584 | def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None, parent=None, config=None): |
|
585 | 585 | TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, |
|
586 | 586 | ostream=ostream, parent=parent,config=config) |
|
587 | 587 | |
|
588 | 588 | def __call__(self, etype, value, elist): |
|
589 | 589 | self.ostream.flush() |
|
590 | 590 | self.ostream.write(self.text(etype, value, elist)) |
|
591 | 591 | self.ostream.write('\n') |
|
592 | 592 | |
|
593 | 593 | def structured_traceback(self, etype, value, elist, tb_offset=None, |
|
594 | 594 | context=5): |
|
595 | 595 | """Return a color formatted string with the traceback info. |
|
596 | 596 | |
|
597 | 597 | Parameters |
|
598 | 598 | ---------- |
|
599 | 599 | etype : exception type |
|
600 | 600 | Type of the exception raised. |
|
601 | 601 | |
|
602 | 602 | value : object |
|
603 | 603 | Data stored in the exception |
|
604 | 604 | |
|
605 | 605 | elist : list |
|
606 | 606 | List of frames, see class docstring for details. |
|
607 | 607 | |
|
608 | 608 | tb_offset : int, optional |
|
609 | 609 | Number of frames in the traceback to skip. If not given, the |
|
610 | 610 | instance value is used (set in constructor). |
|
611 | 611 | |
|
612 | 612 | context : int, optional |
|
613 | 613 | Number of lines of context information to print. |
|
614 | 614 | |
|
615 | 615 | Returns |
|
616 | 616 | ------- |
|
617 | 617 | String with formatted exception. |
|
618 | 618 | """ |
|
619 | 619 | tb_offset = self.tb_offset if tb_offset is None else tb_offset |
|
620 | 620 | Colors = self.Colors |
|
621 | 621 | out_list = [] |
|
622 | 622 | if elist: |
|
623 | 623 | |
|
624 | 624 | if tb_offset and len(elist) > tb_offset: |
|
625 | 625 | elist = elist[tb_offset:] |
|
626 | 626 | |
|
627 | 627 | out_list.append('Traceback %s(most recent call last)%s:' % |
|
628 | 628 | (Colors.normalEm, Colors.Normal) + '\n') |
|
629 | 629 | out_list.extend(self._format_list(elist)) |
|
630 | 630 | # The exception info should be a single entry in the list. |
|
631 | 631 | lines = ''.join(self._format_exception_only(etype, value)) |
|
632 | 632 | out_list.append(lines) |
|
633 | 633 | |
|
634 | 634 | # Note: this code originally read: |
|
635 | 635 | |
|
636 | 636 | ## for line in lines[:-1]: |
|
637 | 637 | ## out_list.append(" "+line) |
|
638 | 638 | ## out_list.append(lines[-1]) |
|
639 | 639 | |
|
640 | 640 | # This means it was indenting everything but the last line by a little |
|
641 | 641 | # bit. I've disabled this for now, but if we see ugliness somewhere we |
|
642 | 642 | # can restore it. |
|
643 | 643 | |
|
644 | 644 | return out_list |
|
645 | 645 | |
|
646 | 646 | def _format_list(self, extracted_list): |
|
647 | 647 | """Format a list of traceback entry tuples for printing. |
|
648 | 648 | |
|
649 | 649 | Given a list of tuples as returned by extract_tb() or |
|
650 | 650 | extract_stack(), return a list of strings ready for printing. |
|
651 | 651 | Each string in the resulting list corresponds to the item with the |
|
652 | 652 | same index in the argument list. Each string ends in a newline; |
|
653 | 653 | the strings may contain internal newlines as well, for those items |
|
654 | 654 | whose source text line is not None. |
|
655 | 655 | |
|
656 | 656 | Lifted almost verbatim from traceback.py |
|
657 | 657 | """ |
|
658 | 658 | |
|
659 | 659 | Colors = self.Colors |
|
660 | 660 | list = [] |
|
661 | 661 | for filename, lineno, name, line in extracted_list[:-1]: |
|
662 | 662 | item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \ |
|
663 | 663 | (Colors.filename, py3compat.cast_unicode_py2(filename, "utf-8"), Colors.Normal, |
|
664 | 664 | Colors.lineno, lineno, Colors.Normal, |
|
665 | 665 | Colors.name, py3compat.cast_unicode_py2(name, "utf-8"), Colors.Normal) |
|
666 | 666 | if line: |
|
667 | 667 | item += ' %s\n' % line.strip() |
|
668 | 668 | list.append(item) |
|
669 | 669 | # Emphasize the last entry |
|
670 | 670 | filename, lineno, name, line = extracted_list[-1] |
|
671 | 671 | item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \ |
|
672 | 672 | (Colors.normalEm, |
|
673 | 673 | Colors.filenameEm, py3compat.cast_unicode_py2(filename, "utf-8"), Colors.normalEm, |
|
674 | 674 | Colors.linenoEm, lineno, Colors.normalEm, |
|
675 | 675 | Colors.nameEm, py3compat.cast_unicode_py2(name, "utf-8"), Colors.normalEm, |
|
676 | 676 | Colors.Normal) |
|
677 | 677 | if line: |
|
678 | 678 | item += '%s %s%s\n' % (Colors.line, line.strip(), |
|
679 | 679 | Colors.Normal) |
|
680 | 680 | list.append(item) |
|
681 | 681 | return list |
|
682 | 682 | |
|
683 | 683 | def _format_exception_only(self, etype, value): |
|
684 | 684 | """Format the exception part of a traceback. |
|
685 | 685 | |
|
686 | 686 | The arguments are the exception type and value such as given by |
|
687 | 687 | sys.exc_info()[:2]. The return value is a list of strings, each ending |
|
688 | 688 | in a newline. Normally, the list contains a single string; however, |
|
689 | 689 | for SyntaxError exceptions, it contains several lines that (when |
|
690 | 690 | printed) display detailed information about where the syntax error |
|
691 | 691 | occurred. The message indicating which exception occurred is the |
|
692 | 692 | always last string in the list. |
|
693 | 693 | |
|
694 | 694 | Also lifted nearly verbatim from traceback.py |
|
695 | 695 | """ |
|
696 | 696 | have_filedata = False |
|
697 | 697 | Colors = self.Colors |
|
698 | 698 | list = [] |
|
699 | 699 | stype = py3compat.cast_unicode(Colors.excName + etype.__name__ + Colors.Normal) |
|
700 | 700 | if value is None: |
|
701 | 701 | # Not sure if this can still happen in Python 2.6 and above |
|
702 | 702 | list.append(stype + '\n') |
|
703 | 703 | else: |
|
704 | 704 | if issubclass(etype, SyntaxError): |
|
705 | 705 | have_filedata = True |
|
706 | 706 | if not value.filename: value.filename = "<string>" |
|
707 | 707 | if value.lineno: |
|
708 | 708 | lineno = value.lineno |
|
709 | 709 | textline = ulinecache.getline(value.filename, value.lineno) |
|
710 | 710 | else: |
|
711 | 711 | lineno = 'unknown' |
|
712 | 712 | textline = '' |
|
713 | 713 | list.append('%s File %s"%s"%s, line %s%s%s\n' % \ |
|
714 | 714 | (Colors.normalEm, |
|
715 | 715 | Colors.filenameEm, py3compat.cast_unicode(value.filename), Colors.normalEm, |
|
716 | 716 | Colors.linenoEm, lineno, Colors.Normal )) |
|
717 | 717 | if textline == '': |
|
718 | 718 | textline = py3compat.cast_unicode(value.text, "utf-8") |
|
719 | 719 | |
|
720 | 720 | if textline is not None: |
|
721 | 721 | i = 0 |
|
722 | 722 | while i < len(textline) and textline[i].isspace(): |
|
723 | 723 | i += 1 |
|
724 | 724 | list.append('%s %s%s\n' % (Colors.line, |
|
725 | 725 | textline.strip(), |
|
726 | 726 | Colors.Normal)) |
|
727 | 727 | if value.offset is not None: |
|
728 | 728 | s = ' ' |
|
729 | 729 | for c in textline[i:value.offset - 1]: |
|
730 | 730 | if c.isspace(): |
|
731 | 731 | s += c |
|
732 | 732 | else: |
|
733 | 733 | s += ' ' |
|
734 | 734 | list.append('%s%s^%s\n' % (Colors.caret, s, |
|
735 | 735 | Colors.Normal)) |
|
736 | 736 | |
|
737 | 737 | try: |
|
738 | 738 | s = value.msg |
|
739 | 739 | except Exception: |
|
740 | 740 | s = self._some_str(value) |
|
741 | 741 | if s: |
|
742 | 742 | list.append('%s%s:%s %s\n' % (stype, Colors.excName, |
|
743 | 743 | Colors.Normal, s)) |
|
744 | 744 | else: |
|
745 | 745 | list.append('%s\n' % stype) |
|
746 | 746 | |
|
747 | 747 | # sync with user hooks |
|
748 | 748 | if have_filedata: |
|
749 | 749 | ipinst = get_ipython() |
|
750 | 750 | if ipinst is not None: |
|
751 | 751 | ipinst.hooks.synchronize_with_editor(value.filename, value.lineno, 0) |
|
752 | 752 | |
|
753 | 753 | return list |
|
754 | 754 | |
|
755 | 755 | def get_exception_only(self, etype, value): |
|
756 | 756 | """Only print the exception type and message, without a traceback. |
|
757 | 757 | |
|
758 | 758 | Parameters |
|
759 | 759 | ---------- |
|
760 | 760 | etype : exception type |
|
761 | 761 | value : exception value |
|
762 | 762 | """ |
|
763 | 763 | return ListTB.structured_traceback(self, etype, value, []) |
|
764 | 764 | |
|
765 | 765 | def show_exception_only(self, etype, evalue): |
|
766 | 766 | """Only print the exception type and message, without a traceback. |
|
767 | 767 | |
|
768 | 768 | Parameters |
|
769 | 769 | ---------- |
|
770 | 770 | etype : exception type |
|
771 | 771 | value : exception value |
|
772 | 772 | """ |
|
773 | 773 | # This method needs to use __call__ from *this* class, not the one from |
|
774 | 774 | # a subclass whose signature or behavior may be different |
|
775 | 775 | ostream = self.ostream |
|
776 | 776 | ostream.flush() |
|
777 | 777 | ostream.write('\n'.join(self.get_exception_only(etype, evalue))) |
|
778 | 778 | ostream.flush() |
|
779 | 779 | |
|
780 | 780 | def _some_str(self, value): |
|
781 | 781 | # Lifted from traceback.py |
|
782 | 782 | try: |
|
783 | 783 | return py3compat.cast_unicode(str(value)) |
|
784 | 784 | except: |
|
785 | 785 | return u'<unprintable %s object>' % type(value).__name__ |
|
786 | 786 | |
|
787 | 787 | |
|
788 | 788 | #---------------------------------------------------------------------------- |
|
789 | 789 | class VerboseTB(TBTools): |
|
790 | 790 | """A port of Ka-Ping Yee's cgitb.py module that outputs color text instead |
|
791 | 791 | of HTML. Requires inspect and pydoc. Crazy, man. |
|
792 | 792 | |
|
793 | 793 | Modified version which optionally strips the topmost entries from the |
|
794 | 794 | traceback, to be used with alternate interpreters (because their own code |
|
795 | 795 | would appear in the traceback).""" |
|
796 | 796 | |
|
797 | 797 | def __init__(self, color_scheme='Linux', call_pdb=False, ostream=None, |
|
798 | 798 | tb_offset=0, long_header=False, include_vars=True, |
|
799 | 799 | check_cache=None, debugger_cls = None, |
|
800 | 800 | parent=None, config=None): |
|
801 | 801 | """Specify traceback offset, headers and color scheme. |
|
802 | 802 | |
|
803 | 803 | Define how many frames to drop from the tracebacks. Calling it with |
|
804 | 804 | tb_offset=1 allows use of this handler in interpreters which will have |
|
805 | 805 | their own code at the top of the traceback (VerboseTB will first |
|
806 | 806 | remove that frame before printing the traceback info).""" |
|
807 | 807 | TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, |
|
808 | 808 | ostream=ostream, parent=parent, config=config) |
|
809 | 809 | self.tb_offset = tb_offset |
|
810 | 810 | self.long_header = long_header |
|
811 | 811 | self.include_vars = include_vars |
|
812 | 812 | # By default we use linecache.checkcache, but the user can provide a |
|
813 | 813 | # different check_cache implementation. This is used by the IPython |
|
814 | 814 | # kernel to provide tracebacks for interactive code that is cached, |
|
815 | 815 | # by a compiler instance that flushes the linecache but preserves its |
|
816 | 816 | # own code cache. |
|
817 | 817 | if check_cache is None: |
|
818 | 818 | check_cache = linecache.checkcache |
|
819 | 819 | self.check_cache = check_cache |
|
820 | 820 | |
|
821 | 821 | self.debugger_cls = debugger_cls or debugger.Pdb |
|
822 | 822 | |
|
823 | 823 | def format_records(self, records, last_unique, recursion_repeat): |
|
824 | 824 | """Format the stack frames of the traceback""" |
|
825 | 825 | frames = [] |
|
826 | 826 | for r in records[:last_unique+recursion_repeat+1]: |
|
827 | 827 | #print '*** record:',file,lnum,func,lines,index # dbg |
|
828 | 828 | frames.append(self.format_record(*r)) |
|
829 | 829 | |
|
830 | 830 | if recursion_repeat: |
|
831 | 831 | frames.append('... last %d frames repeated, from the frame below ...\n' % recursion_repeat) |
|
832 | 832 | frames.append(self.format_record(*records[last_unique+recursion_repeat+1])) |
|
833 | 833 | |
|
834 | 834 | return frames |
|
835 | 835 | |
|
836 | 836 | def format_record(self, frame, file, lnum, func, lines, index): |
|
837 | 837 | """Format a single stack frame""" |
|
838 | 838 | Colors = self.Colors # just a shorthand + quicker name lookup |
|
839 | 839 | ColorsNormal = Colors.Normal # used a lot |
|
840 | 840 | col_scheme = self.color_scheme_table.active_scheme_name |
|
841 | 841 | indent = ' ' * INDENT_SIZE |
|
842 | 842 | em_normal = '%s\n%s%s' % (Colors.valEm, indent, ColorsNormal) |
|
843 | 843 | undefined = '%sundefined%s' % (Colors.em, ColorsNormal) |
|
844 | 844 | tpl_link = '%s%%s%s' % (Colors.filenameEm, ColorsNormal) |
|
845 | 845 | tpl_call = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm, |
|
846 | 846 | ColorsNormal) |
|
847 | 847 | tpl_call_fail = 'in %s%%s%s(***failed resolving arguments***)%s' % \ |
|
848 | 848 | (Colors.vName, Colors.valEm, ColorsNormal) |
|
849 | 849 | tpl_local_var = '%s%%s%s' % (Colors.vName, ColorsNormal) |
|
850 | 850 | tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal, |
|
851 | 851 | Colors.vName, ColorsNormal) |
|
852 | 852 | tpl_name_val = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal) |
|
853 | 853 | |
|
854 | 854 | tpl_line = '%s%%s%s %%s' % (Colors.lineno, ColorsNormal) |
|
855 | 855 | tpl_line_em = '%s%%s%s %%s%s' % (Colors.linenoEm, Colors.line, |
|
856 | 856 | ColorsNormal) |
|
857 | 857 | |
|
858 | 858 | abspath = os.path.abspath |
|
859 | 859 | |
|
860 | 860 | |
|
861 | 861 | if not file: |
|
862 | 862 | file = '?' |
|
863 | 863 | elif file.startswith(str("<")) and file.endswith(str(">")): |
|
864 | 864 | # Not a real filename, no problem... |
|
865 | 865 | pass |
|
866 | 866 | elif not os.path.isabs(file): |
|
867 | 867 | # Try to make the filename absolute by trying all |
|
868 | 868 | # sys.path entries (which is also what linecache does) |
|
869 | 869 | for dirname in sys.path: |
|
870 | 870 | try: |
|
871 | 871 | fullname = os.path.join(dirname, file) |
|
872 | 872 | if os.path.isfile(fullname): |
|
873 | 873 | file = os.path.abspath(fullname) |
|
874 | 874 | break |
|
875 | 875 | except Exception: |
|
876 | 876 | # Just in case that sys.path contains very |
|
877 | 877 | # strange entries... |
|
878 | 878 | pass |
|
879 | 879 | |
|
880 | 880 | file = py3compat.cast_unicode(file, util_path.fs_encoding) |
|
881 | 881 | link = tpl_link % file |
|
882 | 882 | args, varargs, varkw, locals = fixed_getargvalues(frame) |
|
883 | 883 | |
|
884 | 884 | if func == '?': |
|
885 | 885 | call = '' |
|
886 | 886 | else: |
|
887 | 887 | # Decide whether to include variable details or not |
|
888 | 888 | var_repr = self.include_vars and eqrepr or nullrepr |
|
889 | 889 | try: |
|
890 | 890 | call = tpl_call % (func, inspect.formatargvalues(args, |
|
891 | 891 | varargs, varkw, |
|
892 | 892 | locals, formatvalue=var_repr)) |
|
893 | 893 | except KeyError: |
|
894 | 894 | # This happens in situations like errors inside generator |
|
895 | 895 | # expressions, where local variables are listed in the |
|
896 | 896 | # line, but can't be extracted from the frame. I'm not |
|
897 | 897 | # 100% sure this isn't actually a bug in inspect itself, |
|
898 | 898 | # but since there's no info for us to compute with, the |
|
899 | 899 | # best we can do is report the failure and move on. Here |
|
900 | 900 | # we must *not* call any traceback construction again, |
|
901 | 901 | # because that would mess up use of %debug later on. So we |
|
902 | 902 | # simply report the failure and move on. The only |
|
903 | 903 | # limitation will be that this frame won't have locals |
|
904 | 904 | # listed in the call signature. Quite subtle problem... |
|
905 | 905 | # I can't think of a good way to validate this in a unit |
|
906 | 906 | # test, but running a script consisting of: |
|
907 | 907 | # dict( (k,v.strip()) for (k,v) in range(10) ) |
|
908 | 908 | # will illustrate the error, if this exception catch is |
|
909 | 909 | # disabled. |
|
910 | 910 | call = tpl_call_fail % func |
|
911 | 911 | |
|
912 | 912 | # Don't attempt to tokenize binary files. |
|
913 | 913 | if file.endswith(('.so', '.pyd', '.dll')): |
|
914 | 914 | return '%s %s\n' % (link, call) |
|
915 | 915 | |
|
916 | 916 | elif file.endswith(('.pyc', '.pyo')): |
|
917 | 917 | # Look up the corresponding source file. |
|
918 | 918 | try: |
|
919 | 919 | file = openpy.source_from_cache(file) |
|
920 | 920 | except ValueError: |
|
921 | 921 | # Failed to get the source file for some reason |
|
922 | 922 | # E.g. https://github.com/ipython/ipython/issues/9486 |
|
923 | 923 | return '%s %s\n' % (link, call) |
|
924 | 924 | |
|
925 | 925 | def linereader(file=file, lnum=[lnum], getline=ulinecache.getline): |
|
926 | 926 | line = getline(file, lnum[0]) |
|
927 | 927 | lnum[0] += 1 |
|
928 | 928 | return line |
|
929 | 929 | |
|
930 | 930 | # Build the list of names on this line of code where the exception |
|
931 | 931 | # occurred. |
|
932 | 932 | try: |
|
933 | 933 | names = [] |
|
934 | 934 | name_cont = False |
|
935 | 935 | |
|
936 | 936 | for token_type, token, start, end, line in generate_tokens(linereader): |
|
937 | 937 | # build composite names |
|
938 | 938 | if token_type == tokenize.NAME and token not in keyword.kwlist: |
|
939 | 939 | if name_cont: |
|
940 | 940 | # Continuation of a dotted name |
|
941 | 941 | try: |
|
942 | 942 | names[-1].append(token) |
|
943 | 943 | except IndexError: |
|
944 | 944 | names.append([token]) |
|
945 | 945 | name_cont = False |
|
946 | 946 | else: |
|
947 | 947 | # Regular new names. We append everything, the caller |
|
948 | 948 | # will be responsible for pruning the list later. It's |
|
949 | 949 | # very tricky to try to prune as we go, b/c composite |
|
950 | 950 | # names can fool us. The pruning at the end is easy |
|
951 | 951 | # to do (or the caller can print a list with repeated |
|
952 | 952 | # names if so desired. |
|
953 | 953 | names.append([token]) |
|
954 | 954 | elif token == '.': |
|
955 | 955 | name_cont = True |
|
956 | 956 | elif token_type == tokenize.NEWLINE: |
|
957 | 957 | break |
|
958 | 958 | |
|
959 | 959 | except (IndexError, UnicodeDecodeError, SyntaxError): |
|
960 | 960 | # signals exit of tokenizer |
|
961 | 961 | # SyntaxError can occur if the file is not actually Python |
|
962 | 962 | # - see gh-6300 |
|
963 | 963 | pass |
|
964 | 964 | except tokenize.TokenError as msg: |
|
965 | 965 | _m = ("An unexpected error occurred while tokenizing input\n" |
|
966 | 966 | "The following traceback may be corrupted or invalid\n" |
|
967 | 967 | "The error message is: %s\n" % msg) |
|
968 | 968 | error(_m) |
|
969 | 969 | |
|
970 | 970 | # Join composite names (e.g. "dict.fromkeys") |
|
971 | 971 | names = ['.'.join(n) for n in names] |
|
972 | 972 | # prune names list of duplicates, but keep the right order |
|
973 | 973 | unique_names = uniq_stable(names) |
|
974 | 974 | |
|
975 | 975 | # Start loop over vars |
|
976 | 976 | lvals = [] |
|
977 | 977 | if self.include_vars: |
|
978 | 978 | for name_full in unique_names: |
|
979 | 979 | name_base = name_full.split('.', 1)[0] |
|
980 | 980 | if name_base in frame.f_code.co_varnames: |
|
981 | 981 | if name_base in locals: |
|
982 | 982 | try: |
|
983 | 983 | value = repr(eval(name_full, locals)) |
|
984 | 984 | except: |
|
985 | 985 | value = undefined |
|
986 | 986 | else: |
|
987 | 987 | value = undefined |
|
988 | 988 | name = tpl_local_var % name_full |
|
989 | 989 | else: |
|
990 | 990 | if name_base in frame.f_globals: |
|
991 | 991 | try: |
|
992 | 992 | value = repr(eval(name_full, frame.f_globals)) |
|
993 | 993 | except: |
|
994 | 994 | value = undefined |
|
995 | 995 | else: |
|
996 | 996 | value = undefined |
|
997 | 997 | name = tpl_global_var % name_full |
|
998 | 998 | lvals.append(tpl_name_val % (name, value)) |
|
999 | 999 | if lvals: |
|
1000 | 1000 | lvals = '%s%s' % (indent, em_normal.join(lvals)) |
|
1001 | 1001 | else: |
|
1002 | 1002 | lvals = '' |
|
1003 | 1003 | |
|
1004 | 1004 | level = '%s %s\n' % (link, call) |
|
1005 | 1005 | |
|
1006 | 1006 | if index is None: |
|
1007 | 1007 | return level |
|
1008 | 1008 | else: |
|
1009 | 1009 | _line_format = PyColorize.Parser(style=col_scheme, parent=self).format2 |
|
1010 | 1010 | return '%s%s' % (level, ''.join( |
|
1011 | 1011 | _format_traceback_lines(lnum, index, lines, Colors, lvals, |
|
1012 | 1012 | _line_format))) |
|
1013 | 1013 | |
|
1014 | 1014 | def prepare_chained_exception_message(self, cause): |
|
1015 | 1015 | direct_cause = "\nThe above exception was the direct cause of the following exception:\n" |
|
1016 | 1016 | exception_during_handling = "\nDuring handling of the above exception, another exception occurred:\n" |
|
1017 | 1017 | |
|
1018 | 1018 | if cause: |
|
1019 | 1019 | message = [[direct_cause]] |
|
1020 | 1020 | else: |
|
1021 | 1021 | message = [[exception_during_handling]] |
|
1022 | 1022 | return message |
|
1023 | 1023 | |
|
1024 | 1024 | def prepare_header(self, etype, long_version=False): |
|
1025 | 1025 | colors = self.Colors # just a shorthand + quicker name lookup |
|
1026 | 1026 | colorsnormal = colors.Normal # used a lot |
|
1027 | 1027 | exc = '%s%s%s' % (colors.excName, etype, colorsnormal) |
|
1028 | 1028 | width = min(75, get_terminal_size()[0]) |
|
1029 | 1029 | if long_version: |
|
1030 | 1030 | # Header with the exception type, python version, and date |
|
1031 | 1031 | pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable |
|
1032 | 1032 | date = time.ctime(time.time()) |
|
1033 | 1033 | |
|
1034 | 1034 | head = '%s%s%s\n%s%s%s\n%s' % (colors.topline, '-' * width, colorsnormal, |
|
1035 | 1035 | exc, ' ' * (width - len(str(etype)) - len(pyver)), |
|
1036 | 1036 | pyver, date.rjust(width) ) |
|
1037 | 1037 | head += "\nA problem occurred executing Python code. Here is the sequence of function" \ |
|
1038 | 1038 | "\ncalls leading up to the error, with the most recent (innermost) call last." |
|
1039 | 1039 | else: |
|
1040 | 1040 | # Simplified header |
|
1041 | 1041 | head = '%s%s' % (exc, 'Traceback (most recent call last)'. \ |
|
1042 | 1042 | rjust(width - len(str(etype))) ) |
|
1043 | 1043 | |
|
1044 | 1044 | return head |
|
1045 | 1045 | |
|
1046 | 1046 | def format_exception(self, etype, evalue): |
|
1047 | 1047 | colors = self.Colors # just a shorthand + quicker name lookup |
|
1048 | 1048 | colorsnormal = colors.Normal # used a lot |
|
1049 | 1049 | indent = ' ' * INDENT_SIZE |
|
1050 | 1050 | # Get (safely) a string form of the exception info |
|
1051 | 1051 | try: |
|
1052 | 1052 | etype_str, evalue_str = map(str, (etype, evalue)) |
|
1053 | 1053 | except: |
|
1054 | 1054 | # User exception is improperly defined. |
|
1055 | 1055 | etype, evalue = str, sys.exc_info()[:2] |
|
1056 | 1056 | etype_str, evalue_str = map(str, (etype, evalue)) |
|
1057 | 1057 | # ... and format it |
|
1058 | 1058 | exception = ['%s%s%s: %s' % (colors.excName, etype_str, |
|
1059 | 1059 | colorsnormal, py3compat.cast_unicode(evalue_str))] |
|
1060 | 1060 | |
|
1061 | 1061 | if (not py3compat.PY3) and type(evalue) is types.InstanceType: |
|
1062 | 1062 | try: |
|
1063 |
names = [w for w in dir(evalue) if isinstance(w, |
|
|
1063 | names = [w for w in dir(evalue) if isinstance(w, str)] | |
|
1064 | 1064 | except: |
|
1065 | 1065 | # Every now and then, an object with funny internals blows up |
|
1066 | 1066 | # when dir() is called on it. We do the best we can to report |
|
1067 | 1067 | # the problem and continue |
|
1068 | 1068 | _m = '%sException reporting error (object with broken dir())%s:' |
|
1069 | 1069 | exception.append(_m % (colors.excName, colorsnormal)) |
|
1070 | 1070 | etype_str, evalue_str = map(str, sys.exc_info()[:2]) |
|
1071 | 1071 | exception.append('%s%s%s: %s' % (colors.excName, etype_str, |
|
1072 | 1072 | colorsnormal, py3compat.cast_unicode(evalue_str))) |
|
1073 | 1073 | names = [] |
|
1074 | 1074 | for name in names: |
|
1075 | 1075 | value = text_repr(getattr(evalue, name)) |
|
1076 | 1076 | exception.append('\n%s%s = %s' % (indent, name, value)) |
|
1077 | 1077 | |
|
1078 | 1078 | return exception |
|
1079 | 1079 | |
|
1080 | 1080 | def format_exception_as_a_whole(self, etype, evalue, etb, number_of_lines_of_context, tb_offset): |
|
1081 | 1081 | """Formats the header, traceback and exception message for a single exception. |
|
1082 | 1082 | |
|
1083 | 1083 | This may be called multiple times by Python 3 exception chaining |
|
1084 | 1084 | (PEP 3134). |
|
1085 | 1085 | """ |
|
1086 | 1086 | # some locals |
|
1087 | 1087 | orig_etype = etype |
|
1088 | 1088 | try: |
|
1089 | 1089 | etype = etype.__name__ |
|
1090 | 1090 | except AttributeError: |
|
1091 | 1091 | pass |
|
1092 | 1092 | |
|
1093 | 1093 | tb_offset = self.tb_offset if tb_offset is None else tb_offset |
|
1094 | 1094 | head = self.prepare_header(etype, self.long_header) |
|
1095 | 1095 | records = self.get_records(etb, number_of_lines_of_context, tb_offset) |
|
1096 | 1096 | |
|
1097 | 1097 | if records is None: |
|
1098 | 1098 | return "" |
|
1099 | 1099 | |
|
1100 | 1100 | last_unique, recursion_repeat = find_recursion(orig_etype, evalue, records) |
|
1101 | 1101 | |
|
1102 | 1102 | frames = self.format_records(records, last_unique, recursion_repeat) |
|
1103 | 1103 | |
|
1104 | 1104 | formatted_exception = self.format_exception(etype, evalue) |
|
1105 | 1105 | if records: |
|
1106 | 1106 | filepath, lnum = records[-1][1:3] |
|
1107 | 1107 | filepath = os.path.abspath(filepath) |
|
1108 | 1108 | ipinst = get_ipython() |
|
1109 | 1109 | if ipinst is not None: |
|
1110 | 1110 | ipinst.hooks.synchronize_with_editor(filepath, lnum, 0) |
|
1111 | 1111 | |
|
1112 | 1112 | return [[head] + frames + [''.join(formatted_exception[0])]] |
|
1113 | 1113 | |
|
1114 | 1114 | def get_records(self, etb, number_of_lines_of_context, tb_offset): |
|
1115 | 1115 | try: |
|
1116 | 1116 | # Try the default getinnerframes and Alex's: Alex's fixes some |
|
1117 | 1117 | # problems, but it generates empty tracebacks for console errors |
|
1118 | 1118 | # (5 blanks lines) where none should be returned. |
|
1119 | 1119 | return _fixed_getinnerframes(etb, number_of_lines_of_context, tb_offset) |
|
1120 | 1120 | except UnicodeDecodeError: |
|
1121 | 1121 | # This can occur if a file's encoding magic comment is wrong. |
|
1122 | 1122 | # I can't see a way to recover without duplicating a bunch of code |
|
1123 | 1123 | # from the stdlib traceback module. --TK |
|
1124 | 1124 | error('\nUnicodeDecodeError while processing traceback.\n') |
|
1125 | 1125 | return None |
|
1126 | 1126 | except: |
|
1127 | 1127 | # FIXME: I've been getting many crash reports from python 2.3 |
|
1128 | 1128 | # users, traceable to inspect.py. If I can find a small test-case |
|
1129 | 1129 | # to reproduce this, I should either write a better workaround or |
|
1130 | 1130 | # file a bug report against inspect (if that's the real problem). |
|
1131 | 1131 | # So far, I haven't been able to find an isolated example to |
|
1132 | 1132 | # reproduce the problem. |
|
1133 | 1133 | inspect_error() |
|
1134 | 1134 | traceback.print_exc(file=self.ostream) |
|
1135 | 1135 | info('\nUnfortunately, your original traceback can not be constructed.\n') |
|
1136 | 1136 | return None |
|
1137 | 1137 | |
|
1138 | 1138 | def get_parts_of_chained_exception(self, evalue): |
|
1139 | 1139 | def get_chained_exception(exception_value): |
|
1140 | 1140 | cause = getattr(exception_value, '__cause__', None) |
|
1141 | 1141 | if cause: |
|
1142 | 1142 | return cause |
|
1143 | 1143 | if getattr(exception_value, '__suppress_context__', False): |
|
1144 | 1144 | return None |
|
1145 | 1145 | return getattr(exception_value, '__context__', None) |
|
1146 | 1146 | |
|
1147 | 1147 | chained_evalue = get_chained_exception(evalue) |
|
1148 | 1148 | |
|
1149 | 1149 | if chained_evalue: |
|
1150 | 1150 | return chained_evalue.__class__, chained_evalue, chained_evalue.__traceback__ |
|
1151 | 1151 | |
|
1152 | 1152 | def structured_traceback(self, etype, evalue, etb, tb_offset=None, |
|
1153 | 1153 | number_of_lines_of_context=5): |
|
1154 | 1154 | """Return a nice text document describing the traceback.""" |
|
1155 | 1155 | |
|
1156 | 1156 | formatted_exception = self.format_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context, |
|
1157 | 1157 | tb_offset) |
|
1158 | 1158 | |
|
1159 | 1159 | colors = self.Colors # just a shorthand + quicker name lookup |
|
1160 | 1160 | colorsnormal = colors.Normal # used a lot |
|
1161 | 1161 | head = '%s%s%s' % (colors.topline, '-' * min(75, get_terminal_size()[0]), colorsnormal) |
|
1162 | 1162 | structured_traceback_parts = [head] |
|
1163 | 1163 | if py3compat.PY3: |
|
1164 | 1164 | chained_exceptions_tb_offset = 0 |
|
1165 | 1165 | lines_of_context = 3 |
|
1166 | 1166 | formatted_exceptions = formatted_exception |
|
1167 | 1167 | exception = self.get_parts_of_chained_exception(evalue) |
|
1168 | 1168 | if exception: |
|
1169 | 1169 | formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__) |
|
1170 | 1170 | etype, evalue, etb = exception |
|
1171 | 1171 | else: |
|
1172 | 1172 | evalue = None |
|
1173 | 1173 | chained_exc_ids = set() |
|
1174 | 1174 | while evalue: |
|
1175 | 1175 | formatted_exceptions += self.format_exception_as_a_whole(etype, evalue, etb, lines_of_context, |
|
1176 | 1176 | chained_exceptions_tb_offset) |
|
1177 | 1177 | exception = self.get_parts_of_chained_exception(evalue) |
|
1178 | 1178 | |
|
1179 | 1179 | if exception and not id(exception[1]) in chained_exc_ids: |
|
1180 | 1180 | chained_exc_ids.add(id(exception[1])) # trace exception to avoid infinite 'cause' loop |
|
1181 | 1181 | formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__) |
|
1182 | 1182 | etype, evalue, etb = exception |
|
1183 | 1183 | else: |
|
1184 | 1184 | evalue = None |
|
1185 | 1185 | |
|
1186 | 1186 | # we want to see exceptions in a reversed order: |
|
1187 | 1187 | # the first exception should be on top |
|
1188 | 1188 | for formatted_exception in reversed(formatted_exceptions): |
|
1189 | 1189 | structured_traceback_parts += formatted_exception |
|
1190 | 1190 | else: |
|
1191 | 1191 | structured_traceback_parts += formatted_exception[0] |
|
1192 | 1192 | |
|
1193 | 1193 | return structured_traceback_parts |
|
1194 | 1194 | |
|
1195 | 1195 | def debugger(self, force=False): |
|
1196 | 1196 | """Call up the pdb debugger if desired, always clean up the tb |
|
1197 | 1197 | reference. |
|
1198 | 1198 | |
|
1199 | 1199 | Keywords: |
|
1200 | 1200 | |
|
1201 | 1201 | - force(False): by default, this routine checks the instance call_pdb |
|
1202 | 1202 | flag and does not actually invoke the debugger if the flag is false. |
|
1203 | 1203 | The 'force' option forces the debugger to activate even if the flag |
|
1204 | 1204 | is false. |
|
1205 | 1205 | |
|
1206 | 1206 | If the call_pdb flag is set, the pdb interactive debugger is |
|
1207 | 1207 | invoked. In all cases, the self.tb reference to the current traceback |
|
1208 | 1208 | is deleted to prevent lingering references which hamper memory |
|
1209 | 1209 | management. |
|
1210 | 1210 | |
|
1211 | 1211 | Note that each call to pdb() does an 'import readline', so if your app |
|
1212 | 1212 | requires a special setup for the readline completers, you'll have to |
|
1213 | 1213 | fix that by hand after invoking the exception handler.""" |
|
1214 | 1214 | |
|
1215 | 1215 | if force or self.call_pdb: |
|
1216 | 1216 | if self.pdb is None: |
|
1217 | 1217 | self.pdb = self.debugger_cls() |
|
1218 | 1218 | # the system displayhook may have changed, restore the original |
|
1219 | 1219 | # for pdb |
|
1220 | 1220 | display_trap = DisplayTrap(hook=sys.__displayhook__) |
|
1221 | 1221 | with display_trap: |
|
1222 | 1222 | self.pdb.reset() |
|
1223 | 1223 | # Find the right frame so we don't pop up inside ipython itself |
|
1224 | 1224 | if hasattr(self, 'tb') and self.tb is not None: |
|
1225 | 1225 | etb = self.tb |
|
1226 | 1226 | else: |
|
1227 | 1227 | etb = self.tb = sys.last_traceback |
|
1228 | 1228 | while self.tb is not None and self.tb.tb_next is not None: |
|
1229 | 1229 | self.tb = self.tb.tb_next |
|
1230 | 1230 | if etb and etb.tb_next: |
|
1231 | 1231 | etb = etb.tb_next |
|
1232 | 1232 | self.pdb.botframe = etb.tb_frame |
|
1233 | 1233 | self.pdb.interaction(self.tb.tb_frame, self.tb) |
|
1234 | 1234 | |
|
1235 | 1235 | if hasattr(self, 'tb'): |
|
1236 | 1236 | del self.tb |
|
1237 | 1237 | |
|
1238 | 1238 | def handler(self, info=None): |
|
1239 | 1239 | (etype, evalue, etb) = info or sys.exc_info() |
|
1240 | 1240 | self.tb = etb |
|
1241 | 1241 | ostream = self.ostream |
|
1242 | 1242 | ostream.flush() |
|
1243 | 1243 | ostream.write(self.text(etype, evalue, etb)) |
|
1244 | 1244 | ostream.write('\n') |
|
1245 | 1245 | ostream.flush() |
|
1246 | 1246 | |
|
1247 | 1247 | # Changed so an instance can just be called as VerboseTB_inst() and print |
|
1248 | 1248 | # out the right info on its own. |
|
1249 | 1249 | def __call__(self, etype=None, evalue=None, etb=None): |
|
1250 | 1250 | """This hook can replace sys.excepthook (for Python 2.1 or higher).""" |
|
1251 | 1251 | if etb is None: |
|
1252 | 1252 | self.handler() |
|
1253 | 1253 | else: |
|
1254 | 1254 | self.handler((etype, evalue, etb)) |
|
1255 | 1255 | try: |
|
1256 | 1256 | self.debugger() |
|
1257 | 1257 | except KeyboardInterrupt: |
|
1258 | 1258 | print("\nKeyboardInterrupt") |
|
1259 | 1259 | |
|
1260 | 1260 | |
|
1261 | 1261 | #---------------------------------------------------------------------------- |
|
1262 | 1262 | class FormattedTB(VerboseTB, ListTB): |
|
1263 | 1263 | """Subclass ListTB but allow calling with a traceback. |
|
1264 | 1264 | |
|
1265 | 1265 | It can thus be used as a sys.excepthook for Python > 2.1. |
|
1266 | 1266 | |
|
1267 | 1267 | Also adds 'Context' and 'Verbose' modes, not available in ListTB. |
|
1268 | 1268 | |
|
1269 | 1269 | Allows a tb_offset to be specified. This is useful for situations where |
|
1270 | 1270 | one needs to remove a number of topmost frames from the traceback (such as |
|
1271 | 1271 | occurs with python programs that themselves execute other python code, |
|
1272 | 1272 | like Python shells). """ |
|
1273 | 1273 | |
|
1274 | 1274 | def __init__(self, mode='Plain', color_scheme='Linux', call_pdb=False, |
|
1275 | 1275 | ostream=None, |
|
1276 | 1276 | tb_offset=0, long_header=False, include_vars=False, |
|
1277 | 1277 | check_cache=None, debugger_cls=None, |
|
1278 | 1278 | parent=None, config=None): |
|
1279 | 1279 | |
|
1280 | 1280 | # NEVER change the order of this list. Put new modes at the end: |
|
1281 | 1281 | self.valid_modes = ['Plain', 'Context', 'Verbose'] |
|
1282 | 1282 | self.verbose_modes = self.valid_modes[1:3] |
|
1283 | 1283 | |
|
1284 | 1284 | VerboseTB.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, |
|
1285 | 1285 | ostream=ostream, tb_offset=tb_offset, |
|
1286 | 1286 | long_header=long_header, include_vars=include_vars, |
|
1287 | 1287 | check_cache=check_cache, debugger_cls=debugger_cls, |
|
1288 | 1288 | parent=parent, config=config) |
|
1289 | 1289 | |
|
1290 | 1290 | # Different types of tracebacks are joined with different separators to |
|
1291 | 1291 | # form a single string. They are taken from this dict |
|
1292 | 1292 | self._join_chars = dict(Plain='', Context='\n', Verbose='\n') |
|
1293 | 1293 | # set_mode also sets the tb_join_char attribute |
|
1294 | 1294 | self.set_mode(mode) |
|
1295 | 1295 | |
|
1296 | 1296 | def _extract_tb(self, tb): |
|
1297 | 1297 | if tb: |
|
1298 | 1298 | return traceback.extract_tb(tb) |
|
1299 | 1299 | else: |
|
1300 | 1300 | return None |
|
1301 | 1301 | |
|
1302 | 1302 | def structured_traceback(self, etype, value, tb, tb_offset=None, number_of_lines_of_context=5): |
|
1303 | 1303 | tb_offset = self.tb_offset if tb_offset is None else tb_offset |
|
1304 | 1304 | mode = self.mode |
|
1305 | 1305 | if mode in self.verbose_modes: |
|
1306 | 1306 | # Verbose modes need a full traceback |
|
1307 | 1307 | return VerboseTB.structured_traceback( |
|
1308 | 1308 | self, etype, value, tb, tb_offset, number_of_lines_of_context |
|
1309 | 1309 | ) |
|
1310 | 1310 | else: |
|
1311 | 1311 | # We must check the source cache because otherwise we can print |
|
1312 | 1312 | # out-of-date source code. |
|
1313 | 1313 | self.check_cache() |
|
1314 | 1314 | # Now we can extract and format the exception |
|
1315 | 1315 | elist = self._extract_tb(tb) |
|
1316 | 1316 | return ListTB.structured_traceback( |
|
1317 | 1317 | self, etype, value, elist, tb_offset, number_of_lines_of_context |
|
1318 | 1318 | ) |
|
1319 | 1319 | |
|
1320 | 1320 | def stb2text(self, stb): |
|
1321 | 1321 | """Convert a structured traceback (a list) to a string.""" |
|
1322 | 1322 | return self.tb_join_char.join(stb) |
|
1323 | 1323 | |
|
1324 | 1324 | |
|
1325 | 1325 | def set_mode(self, mode=None): |
|
1326 | 1326 | """Switch to the desired mode. |
|
1327 | 1327 | |
|
1328 | 1328 | If mode is not specified, cycles through the available modes.""" |
|
1329 | 1329 | |
|
1330 | 1330 | if not mode: |
|
1331 | 1331 | new_idx = (self.valid_modes.index(self.mode) + 1 ) % \ |
|
1332 | 1332 | len(self.valid_modes) |
|
1333 | 1333 | self.mode = self.valid_modes[new_idx] |
|
1334 | 1334 | elif mode not in self.valid_modes: |
|
1335 | 1335 | raise ValueError('Unrecognized mode in FormattedTB: <' + mode + '>\n' |
|
1336 | 1336 | 'Valid modes: ' + str(self.valid_modes)) |
|
1337 | 1337 | else: |
|
1338 | 1338 | self.mode = mode |
|
1339 | 1339 | # include variable details only in 'Verbose' mode |
|
1340 | 1340 | self.include_vars = (self.mode == self.valid_modes[2]) |
|
1341 | 1341 | # Set the join character for generating text tracebacks |
|
1342 | 1342 | self.tb_join_char = self._join_chars[self.mode] |
|
1343 | 1343 | |
|
1344 | 1344 | # some convenient shortcuts |
|
1345 | 1345 | def plain(self): |
|
1346 | 1346 | self.set_mode(self.valid_modes[0]) |
|
1347 | 1347 | |
|
1348 | 1348 | def context(self): |
|
1349 | 1349 | self.set_mode(self.valid_modes[1]) |
|
1350 | 1350 | |
|
1351 | 1351 | def verbose(self): |
|
1352 | 1352 | self.set_mode(self.valid_modes[2]) |
|
1353 | 1353 | |
|
1354 | 1354 | |
|
1355 | 1355 | #---------------------------------------------------------------------------- |
|
1356 | 1356 | class AutoFormattedTB(FormattedTB): |
|
1357 | 1357 | """A traceback printer which can be called on the fly. |
|
1358 | 1358 | |
|
1359 | 1359 | It will find out about exceptions by itself. |
|
1360 | 1360 | |
|
1361 | 1361 | A brief example:: |
|
1362 | 1362 | |
|
1363 | 1363 | AutoTB = AutoFormattedTB(mode = 'Verbose',color_scheme='Linux') |
|
1364 | 1364 | try: |
|
1365 | 1365 | ... |
|
1366 | 1366 | except: |
|
1367 | 1367 | AutoTB() # or AutoTB(out=logfile) where logfile is an open file object |
|
1368 | 1368 | """ |
|
1369 | 1369 | |
|
1370 | 1370 | def __call__(self, etype=None, evalue=None, etb=None, |
|
1371 | 1371 | out=None, tb_offset=None): |
|
1372 | 1372 | """Print out a formatted exception traceback. |
|
1373 | 1373 | |
|
1374 | 1374 | Optional arguments: |
|
1375 | 1375 | - out: an open file-like object to direct output to. |
|
1376 | 1376 | |
|
1377 | 1377 | - tb_offset: the number of frames to skip over in the stack, on a |
|
1378 | 1378 | per-call basis (this overrides temporarily the instance's tb_offset |
|
1379 | 1379 | given at initialization time. """ |
|
1380 | 1380 | |
|
1381 | 1381 | if out is None: |
|
1382 | 1382 | out = self.ostream |
|
1383 | 1383 | out.flush() |
|
1384 | 1384 | out.write(self.text(etype, evalue, etb, tb_offset)) |
|
1385 | 1385 | out.write('\n') |
|
1386 | 1386 | out.flush() |
|
1387 | 1387 | # FIXME: we should remove the auto pdb behavior from here and leave |
|
1388 | 1388 | # that to the clients. |
|
1389 | 1389 | try: |
|
1390 | 1390 | self.debugger() |
|
1391 | 1391 | except KeyboardInterrupt: |
|
1392 | 1392 | print("\nKeyboardInterrupt") |
|
1393 | 1393 | |
|
1394 | 1394 | def structured_traceback(self, etype=None, value=None, tb=None, |
|
1395 | 1395 | tb_offset=None, number_of_lines_of_context=5): |
|
1396 | 1396 | if etype is None: |
|
1397 | 1397 | etype, value, tb = sys.exc_info() |
|
1398 | 1398 | self.tb = tb |
|
1399 | 1399 | return FormattedTB.structured_traceback( |
|
1400 | 1400 | self, etype, value, tb, tb_offset, number_of_lines_of_context) |
|
1401 | 1401 | |
|
1402 | 1402 | |
|
1403 | 1403 | #--------------------------------------------------------------------------- |
|
1404 | 1404 | |
|
1405 | 1405 | # A simple class to preserve Nathan's original functionality. |
|
1406 | 1406 | class ColorTB(FormattedTB): |
|
1407 | 1407 | """Shorthand to initialize a FormattedTB in Linux colors mode.""" |
|
1408 | 1408 | |
|
1409 | 1409 | def __init__(self, color_scheme='Linux', call_pdb=0, **kwargs): |
|
1410 | 1410 | FormattedTB.__init__(self, color_scheme=color_scheme, |
|
1411 | 1411 | call_pdb=call_pdb, **kwargs) |
|
1412 | 1412 | |
|
1413 | 1413 | |
|
1414 | 1414 | class SyntaxTB(ListTB): |
|
1415 | 1415 | """Extension which holds some state: the last exception value""" |
|
1416 | 1416 | |
|
1417 | 1417 | def __init__(self, color_scheme='NoColor', parent=None, config=None): |
|
1418 | 1418 | ListTB.__init__(self, color_scheme, parent=parent, config=config) |
|
1419 | 1419 | self.last_syntax_error = None |
|
1420 | 1420 | |
|
1421 | 1421 | def __call__(self, etype, value, elist): |
|
1422 | 1422 | self.last_syntax_error = value |
|
1423 | 1423 | |
|
1424 | 1424 | ListTB.__call__(self, etype, value, elist) |
|
1425 | 1425 | |
|
1426 | 1426 | def structured_traceback(self, etype, value, elist, tb_offset=None, |
|
1427 | 1427 | context=5): |
|
1428 | 1428 | # If the source file has been edited, the line in the syntax error can |
|
1429 | 1429 | # be wrong (retrieved from an outdated cache). This replaces it with |
|
1430 | 1430 | # the current value. |
|
1431 | 1431 | if isinstance(value, SyntaxError) \ |
|
1432 |
and isinstance(value.filename, |
|
|
1432 | and isinstance(value.filename, str) \ | |
|
1433 | 1433 | and isinstance(value.lineno, int): |
|
1434 | 1434 | linecache.checkcache(value.filename) |
|
1435 | 1435 | newtext = ulinecache.getline(value.filename, value.lineno) |
|
1436 | 1436 | if newtext: |
|
1437 | 1437 | value.text = newtext |
|
1438 | 1438 | self.last_syntax_error = value |
|
1439 | 1439 | return super(SyntaxTB, self).structured_traceback(etype, value, elist, |
|
1440 | 1440 | tb_offset=tb_offset, context=context) |
|
1441 | 1441 | |
|
1442 | 1442 | def clear_err_state(self): |
|
1443 | 1443 | """Return the current error state and clear it""" |
|
1444 | 1444 | e = self.last_syntax_error |
|
1445 | 1445 | self.last_syntax_error = None |
|
1446 | 1446 | return e |
|
1447 | 1447 | |
|
1448 | 1448 | def stb2text(self, stb): |
|
1449 | 1449 | """Convert a structured traceback (a list) to a string.""" |
|
1450 | 1450 | return ''.join(stb) |
|
1451 | 1451 | |
|
1452 | 1452 | |
|
1453 | 1453 | # some internal-use functions |
|
1454 | 1454 | def text_repr(value): |
|
1455 | 1455 | """Hopefully pretty robust repr equivalent.""" |
|
1456 | 1456 | # this is pretty horrible but should always return *something* |
|
1457 | 1457 | try: |
|
1458 | 1458 | return pydoc.text.repr(value) |
|
1459 | 1459 | except KeyboardInterrupt: |
|
1460 | 1460 | raise |
|
1461 | 1461 | except: |
|
1462 | 1462 | try: |
|
1463 | 1463 | return repr(value) |
|
1464 | 1464 | except KeyboardInterrupt: |
|
1465 | 1465 | raise |
|
1466 | 1466 | except: |
|
1467 | 1467 | try: |
|
1468 | 1468 | # all still in an except block so we catch |
|
1469 | 1469 | # getattr raising |
|
1470 | 1470 | name = getattr(value, '__name__', None) |
|
1471 | 1471 | if name: |
|
1472 | 1472 | # ick, recursion |
|
1473 | 1473 | return text_repr(name) |
|
1474 | 1474 | klass = getattr(value, '__class__', None) |
|
1475 | 1475 | if klass: |
|
1476 | 1476 | return '%s instance' % text_repr(klass) |
|
1477 | 1477 | except KeyboardInterrupt: |
|
1478 | 1478 | raise |
|
1479 | 1479 | except: |
|
1480 | 1480 | return 'UNRECOVERABLE REPR FAILURE' |
|
1481 | 1481 | |
|
1482 | 1482 | |
|
1483 | 1483 | def eqrepr(value, repr=text_repr): |
|
1484 | 1484 | return '=%s' % repr(value) |
|
1485 | 1485 | |
|
1486 | 1486 | |
|
1487 | 1487 | def nullrepr(value, repr=text_repr): |
|
1488 | 1488 | return '' |
@@ -1,227 +1,226 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | %store magic for lightweight persistence. |
|
4 | 4 | |
|
5 | 5 | Stores variables, aliases and macros in IPython's database. |
|
6 | 6 | |
|
7 | 7 | To automatically restore stored variables at startup, add this to your |
|
8 | 8 | :file:`ipython_config.py` file:: |
|
9 | 9 | |
|
10 | 10 | c.StoreMagics.autorestore = True |
|
11 | 11 | """ |
|
12 | 12 | |
|
13 | 13 | # Copyright (c) IPython Development Team. |
|
14 | 14 | # Distributed under the terms of the Modified BSD License. |
|
15 | 15 | |
|
16 | 16 | import inspect, os, sys, textwrap |
|
17 | 17 | |
|
18 | 18 | from IPython.core.error import UsageError |
|
19 | 19 | from IPython.core.magic import Magics, magics_class, line_magic |
|
20 | 20 | from traitlets import Bool |
|
21 | from IPython.utils.py3compat import string_types | |
|
22 | 21 | |
|
23 | 22 | |
|
24 | 23 | def restore_aliases(ip): |
|
25 | 24 | staliases = ip.db.get('stored_aliases', {}) |
|
26 | 25 | for k,v in staliases.items(): |
|
27 | 26 | #print "restore alias",k,v # dbg |
|
28 | 27 | #self.alias_table[k] = v |
|
29 | 28 | ip.alias_manager.define_alias(k,v) |
|
30 | 29 | |
|
31 | 30 | |
|
32 | 31 | def refresh_variables(ip): |
|
33 | 32 | db = ip.db |
|
34 | 33 | for key in db.keys('autorestore/*'): |
|
35 | 34 | # strip autorestore |
|
36 | 35 | justkey = os.path.basename(key) |
|
37 | 36 | try: |
|
38 | 37 | obj = db[key] |
|
39 | 38 | except KeyError: |
|
40 | 39 | print("Unable to restore variable '%s', ignoring (use %%store -d to forget!)" % justkey) |
|
41 | 40 | print("The error was:", sys.exc_info()[0]) |
|
42 | 41 | else: |
|
43 | 42 | #print "restored",justkey,"=",obj #dbg |
|
44 | 43 | ip.user_ns[justkey] = obj |
|
45 | 44 | |
|
46 | 45 | |
|
47 | 46 | def restore_dhist(ip): |
|
48 | 47 | ip.user_ns['_dh'] = ip.db.get('dhist',[]) |
|
49 | 48 | |
|
50 | 49 | |
|
51 | 50 | def restore_data(ip): |
|
52 | 51 | refresh_variables(ip) |
|
53 | 52 | restore_aliases(ip) |
|
54 | 53 | restore_dhist(ip) |
|
55 | 54 | |
|
56 | 55 | |
|
57 | 56 | @magics_class |
|
58 | 57 | class StoreMagics(Magics): |
|
59 | 58 | """Lightweight persistence for python variables. |
|
60 | 59 | |
|
61 | 60 | Provides the %store magic.""" |
|
62 | 61 | |
|
63 | 62 | autorestore = Bool(False, help= |
|
64 | 63 | """If True, any %store-d variables will be automatically restored |
|
65 | 64 | when IPython starts. |
|
66 | 65 | """ |
|
67 | 66 | ).tag(config=True) |
|
68 | 67 | |
|
69 | 68 | def __init__(self, shell): |
|
70 | 69 | super(StoreMagics, self).__init__(shell=shell) |
|
71 | 70 | self.shell.configurables.append(self) |
|
72 | 71 | if self.autorestore: |
|
73 | 72 | restore_data(self.shell) |
|
74 | 73 | |
|
75 | 74 | @line_magic |
|
76 | 75 | def store(self, parameter_s=''): |
|
77 | 76 | """Lightweight persistence for python variables. |
|
78 | 77 | |
|
79 | 78 | Example:: |
|
80 | 79 | |
|
81 | 80 | In [1]: l = ['hello',10,'world'] |
|
82 | 81 | In [2]: %store l |
|
83 | 82 | In [3]: exit |
|
84 | 83 | |
|
85 | 84 | (IPython session is closed and started again...) |
|
86 | 85 | |
|
87 | 86 | ville@badger:~$ ipython |
|
88 | 87 | In [1]: l |
|
89 | 88 | NameError: name 'l' is not defined |
|
90 | 89 | In [2]: %store -r |
|
91 | 90 | In [3]: l |
|
92 | 91 | Out[3]: ['hello', 10, 'world'] |
|
93 | 92 | |
|
94 | 93 | Usage: |
|
95 | 94 | |
|
96 | 95 | * ``%store`` - Show list of all variables and their current |
|
97 | 96 | values |
|
98 | 97 | * ``%store spam`` - Store the *current* value of the variable spam |
|
99 | 98 | to disk |
|
100 | 99 | * ``%store -d spam`` - Remove the variable and its value from storage |
|
101 | 100 | * ``%store -z`` - Remove all variables from storage |
|
102 | 101 | * ``%store -r`` - Refresh all variables from store (overwrite |
|
103 | 102 | current vals) |
|
104 | 103 | * ``%store -r spam bar`` - Refresh specified variables from store |
|
105 | 104 | (delete current val) |
|
106 | 105 | * ``%store foo >a.txt`` - Store value of foo to new file a.txt |
|
107 | 106 | * ``%store foo >>a.txt`` - Append value of foo to file a.txt |
|
108 | 107 | |
|
109 | 108 | It should be noted that if you change the value of a variable, you |
|
110 | 109 | need to %store it again if you want to persist the new value. |
|
111 | 110 | |
|
112 | 111 | Note also that the variables will need to be pickleable; most basic |
|
113 | 112 | python types can be safely %store'd. |
|
114 | 113 | |
|
115 | 114 | Also aliases can be %store'd across sessions. |
|
116 | 115 | """ |
|
117 | 116 | |
|
118 | 117 | opts,argsl = self.parse_options(parameter_s,'drz',mode='string') |
|
119 | 118 | args = argsl.split(None,1) |
|
120 | 119 | ip = self.shell |
|
121 | 120 | db = ip.db |
|
122 | 121 | # delete |
|
123 | 122 | if 'd' in opts: |
|
124 | 123 | try: |
|
125 | 124 | todel = args[0] |
|
126 | 125 | except IndexError: |
|
127 | 126 | raise UsageError('You must provide the variable to forget') |
|
128 | 127 | else: |
|
129 | 128 | try: |
|
130 | 129 | del db['autorestore/' + todel] |
|
131 | 130 | except: |
|
132 | 131 | raise UsageError("Can't delete variable '%s'" % todel) |
|
133 | 132 | # reset |
|
134 | 133 | elif 'z' in opts: |
|
135 | 134 | for k in db.keys('autorestore/*'): |
|
136 | 135 | del db[k] |
|
137 | 136 | |
|
138 | 137 | elif 'r' in opts: |
|
139 | 138 | if args: |
|
140 | 139 | for arg in args: |
|
141 | 140 | try: |
|
142 | 141 | obj = db['autorestore/' + arg] |
|
143 | 142 | except KeyError: |
|
144 | 143 | print("no stored variable %s" % arg) |
|
145 | 144 | else: |
|
146 | 145 | ip.user_ns[arg] = obj |
|
147 | 146 | else: |
|
148 | 147 | restore_data(ip) |
|
149 | 148 | |
|
150 | 149 | # run without arguments -> list variables & values |
|
151 | 150 | elif not args: |
|
152 | 151 | vars = db.keys('autorestore/*') |
|
153 | 152 | vars.sort() |
|
154 | 153 | if vars: |
|
155 | 154 | size = max(map(len, vars)) |
|
156 | 155 | else: |
|
157 | 156 | size = 0 |
|
158 | 157 | |
|
159 | 158 | print('Stored variables and their in-db values:') |
|
160 | 159 | fmt = '%-'+str(size)+'s -> %s' |
|
161 | 160 | get = db.get |
|
162 | 161 | for var in vars: |
|
163 | 162 | justkey = os.path.basename(var) |
|
164 | 163 | # print 30 first characters from every var |
|
165 | 164 | print(fmt % (justkey, repr(get(var, '<unavailable>'))[:50])) |
|
166 | 165 | |
|
167 | 166 | # default action - store the variable |
|
168 | 167 | else: |
|
169 | 168 | # %store foo >file.txt or >>file.txt |
|
170 | 169 | if len(args) > 1 and args[1].startswith('>'): |
|
171 | 170 | fnam = os.path.expanduser(args[1].lstrip('>').lstrip()) |
|
172 | 171 | if args[1].startswith('>>'): |
|
173 | 172 | fil = open(fnam, 'a') |
|
174 | 173 | else: |
|
175 | 174 | fil = open(fnam, 'w') |
|
176 | 175 | obj = ip.ev(args[0]) |
|
177 | 176 | print("Writing '%s' (%s) to file '%s'." % (args[0], |
|
178 | 177 | obj.__class__.__name__, fnam)) |
|
179 | 178 | |
|
180 | 179 | |
|
181 |
if not isinstance (obj, str |
|
|
180 | if not isinstance (obj, str): | |
|
182 | 181 | from pprint import pprint |
|
183 | 182 | pprint(obj, fil) |
|
184 | 183 | else: |
|
185 | 184 | fil.write(obj) |
|
186 | 185 | if not obj.endswith('\n'): |
|
187 | 186 | fil.write('\n') |
|
188 | 187 | |
|
189 | 188 | fil.close() |
|
190 | 189 | return |
|
191 | 190 | |
|
192 | 191 | # %store foo |
|
193 | 192 | try: |
|
194 | 193 | obj = ip.user_ns[args[0]] |
|
195 | 194 | except KeyError: |
|
196 | 195 | # it might be an alias |
|
197 | 196 | name = args[0] |
|
198 | 197 | try: |
|
199 | 198 | cmd = ip.alias_manager.retrieve_alias(name) |
|
200 | 199 | except ValueError: |
|
201 | 200 | raise UsageError("Unknown variable '%s'" % name) |
|
202 | 201 | |
|
203 | 202 | staliases = db.get('stored_aliases',{}) |
|
204 | 203 | staliases[name] = cmd |
|
205 | 204 | db['stored_aliases'] = staliases |
|
206 | 205 | print("Alias stored: %s (%s)" % (name, cmd)) |
|
207 | 206 | return |
|
208 | 207 | |
|
209 | 208 | else: |
|
210 | 209 | modname = getattr(inspect.getmodule(obj), '__name__', '') |
|
211 | 210 | if modname == '__main__': |
|
212 | 211 | print(textwrap.dedent("""\ |
|
213 | 212 | Warning:%s is %s |
|
214 | 213 | Proper storage of interactively declared classes (or instances |
|
215 | 214 | of those classes) is not possible! Only instances |
|
216 | 215 | of classes in real modules on file system can be %%store'd. |
|
217 | 216 | """ % (args[0], obj) )) |
|
218 | 217 | return |
|
219 | 218 | #pickled = pickle.dumps(obj) |
|
220 | 219 | db[ 'autorestore/' + args[0] ] = obj |
|
221 | 220 | print("Stored '%s' (%s)" % (args[0], obj.__class__.__name__)) |
|
222 | 221 | |
|
223 | 222 | |
|
224 | 223 | def load_ipython_extension(ip): |
|
225 | 224 | """Load the extension in IPython.""" |
|
226 | 225 | ip.register_magics(StoreMagics) |
|
227 | 226 |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now