##// END OF EJS Templates
Update function attribute names...
Thomas Kluyver -
Show More
@@ -1,690 +1,690 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 """Magic functions for InteractiveShell.
2 """Magic functions for InteractiveShell.
3 """
3 """
4 from __future__ import print_function
4 from __future__ import print_function
5
5
6 #-----------------------------------------------------------------------------
6 #-----------------------------------------------------------------------------
7 # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
7 # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
8 # Copyright (C) 2001 Fernando Perez <fperez@colorado.edu>
8 # Copyright (C) 2001 Fernando Perez <fperez@colorado.edu>
9 # Copyright (C) 2008 The IPython Development Team
9 # Copyright (C) 2008 The IPython Development Team
10
10
11 # Distributed under the terms of the BSD License. The full license is in
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
12 # the file COPYING, distributed as part of this software.
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-----------------------------------------------------------------------------
17 #-----------------------------------------------------------------------------
18 # Stdlib
18 # Stdlib
19 import os
19 import os
20 import re
20 import re
21 import sys
21 import sys
22 import types
22 import types
23 from getopt import getopt, GetoptError
23 from getopt import getopt, GetoptError
24
24
25 # Our own
25 # Our own
26 from IPython.config.configurable import Configurable
26 from IPython.config.configurable import Configurable
27 from IPython.core import oinspect
27 from IPython.core import oinspect
28 from IPython.core.error import UsageError
28 from IPython.core.error import UsageError
29 from IPython.core.inputsplitter import ESC_MAGIC, ESC_MAGIC2
29 from IPython.core.inputsplitter import ESC_MAGIC, ESC_MAGIC2
30 from IPython.external.decorator import decorator
30 from IPython.external.decorator import decorator
31 from IPython.utils.ipstruct import Struct
31 from IPython.utils.ipstruct import Struct
32 from IPython.utils.process import arg_split
32 from IPython.utils.process import arg_split
33 from IPython.utils.py3compat import string_types, iteritems
33 from IPython.utils.py3compat import string_types, iteritems
34 from IPython.utils.text import dedent
34 from IPython.utils.text import dedent
35 from IPython.utils.traitlets import Bool, Dict, Instance, MetaHasTraits
35 from IPython.utils.traitlets import Bool, Dict, Instance, MetaHasTraits
36 from IPython.utils.warn import error
36 from IPython.utils.warn import error
37
37
38 #-----------------------------------------------------------------------------
38 #-----------------------------------------------------------------------------
39 # Globals
39 # Globals
40 #-----------------------------------------------------------------------------
40 #-----------------------------------------------------------------------------
41
41
42 # A dict we'll use for each class that has magics, used as temporary storage to
42 # A dict we'll use for each class that has magics, used as temporary storage to
43 # pass information between the @line/cell_magic method decorators and the
43 # pass information between the @line/cell_magic method decorators and the
44 # @magics_class class decorator, because the method decorators have no
44 # @magics_class class decorator, because the method decorators have no
45 # access to the class when they run. See for more details:
45 # access to the class when they run. See for more details:
46 # http://stackoverflow.com/questions/2366713/can-a-python-decorator-of-an-instance-method-access-the-class
46 # http://stackoverflow.com/questions/2366713/can-a-python-decorator-of-an-instance-method-access-the-class
47
47
48 magics = dict(line={}, cell={})
48 magics = dict(line={}, cell={})
49
49
50 magic_kinds = ('line', 'cell')
50 magic_kinds = ('line', 'cell')
51 magic_spec = ('line', 'cell', 'line_cell')
51 magic_spec = ('line', 'cell', 'line_cell')
52 magic_escapes = dict(line=ESC_MAGIC, cell=ESC_MAGIC2)
52 magic_escapes = dict(line=ESC_MAGIC, cell=ESC_MAGIC2)
53
53
54 #-----------------------------------------------------------------------------
54 #-----------------------------------------------------------------------------
55 # Utility classes and functions
55 # Utility classes and functions
56 #-----------------------------------------------------------------------------
56 #-----------------------------------------------------------------------------
57
57
58 class Bunch: pass
58 class Bunch: pass
59
59
60
60
61 def on_off(tag):
61 def on_off(tag):
62 """Return an ON/OFF string for a 1/0 input. Simple utility function."""
62 """Return an ON/OFF string for a 1/0 input. Simple utility function."""
63 return ['OFF','ON'][tag]
63 return ['OFF','ON'][tag]
64
64
65
65
66 def compress_dhist(dh):
66 def compress_dhist(dh):
67 """Compress a directory history into a new one with at most 20 entries.
67 """Compress a directory history into a new one with at most 20 entries.
68
68
69 Return a new list made from the first and last 10 elements of dhist after
69 Return a new list made from the first and last 10 elements of dhist after
70 removal of duplicates.
70 removal of duplicates.
71 """
71 """
72 head, tail = dh[:-10], dh[-10:]
72 head, tail = dh[:-10], dh[-10:]
73
73
74 newhead = []
74 newhead = []
75 done = set()
75 done = set()
76 for h in head:
76 for h in head:
77 if h in done:
77 if h in done:
78 continue
78 continue
79 newhead.append(h)
79 newhead.append(h)
80 done.add(h)
80 done.add(h)
81
81
82 return newhead + tail
82 return newhead + tail
83
83
84
84
85 def needs_local_scope(func):
85 def needs_local_scope(func):
86 """Decorator to mark magic functions which need to local scope to run."""
86 """Decorator to mark magic functions which need to local scope to run."""
87 func.needs_local_scope = True
87 func.needs_local_scope = True
88 return func
88 return func
89
89
90 #-----------------------------------------------------------------------------
90 #-----------------------------------------------------------------------------
91 # Class and method decorators for registering magics
91 # Class and method decorators for registering magics
92 #-----------------------------------------------------------------------------
92 #-----------------------------------------------------------------------------
93
93
94 def magics_class(cls):
94 def magics_class(cls):
95 """Class decorator for all subclasses of the main Magics class.
95 """Class decorator for all subclasses of the main Magics class.
96
96
97 Any class that subclasses Magics *must* also apply this decorator, to
97 Any class that subclasses Magics *must* also apply this decorator, to
98 ensure that all the methods that have been decorated as line/cell magics
98 ensure that all the methods that have been decorated as line/cell magics
99 get correctly registered in the class instance. This is necessary because
99 get correctly registered in the class instance. This is necessary because
100 when method decorators run, the class does not exist yet, so they
100 when method decorators run, the class does not exist yet, so they
101 temporarily store their information into a module global. Application of
101 temporarily store their information into a module global. Application of
102 this class decorator copies that global data to the class instance and
102 this class decorator copies that global data to the class instance and
103 clears the global.
103 clears the global.
104
104
105 Obviously, this mechanism is not thread-safe, which means that the
105 Obviously, this mechanism is not thread-safe, which means that the
106 *creation* of subclasses of Magic should only be done in a single-thread
106 *creation* of subclasses of Magic should only be done in a single-thread
107 context. Instantiation of the classes has no restrictions. Given that
107 context. Instantiation of the classes has no restrictions. Given that
108 these classes are typically created at IPython startup time and before user
108 these classes are typically created at IPython startup time and before user
109 application code becomes active, in practice this should not pose any
109 application code becomes active, in practice this should not pose any
110 problems.
110 problems.
111 """
111 """
112 cls.registered = True
112 cls.registered = True
113 cls.magics = dict(line = magics['line'],
113 cls.magics = dict(line = magics['line'],
114 cell = magics['cell'])
114 cell = magics['cell'])
115 magics['line'] = {}
115 magics['line'] = {}
116 magics['cell'] = {}
116 magics['cell'] = {}
117 return cls
117 return cls
118
118
119
119
120 def record_magic(dct, magic_kind, magic_name, func):
120 def record_magic(dct, magic_kind, magic_name, func):
121 """Utility function to store a function as a magic of a specific kind.
121 """Utility function to store a function as a magic of a specific kind.
122
122
123 Parameters
123 Parameters
124 ----------
124 ----------
125 dct : dict
125 dct : dict
126 A dictionary with 'line' and 'cell' subdicts.
126 A dictionary with 'line' and 'cell' subdicts.
127
127
128 magic_kind : str
128 magic_kind : str
129 Kind of magic to be stored.
129 Kind of magic to be stored.
130
130
131 magic_name : str
131 magic_name : str
132 Key to store the magic as.
132 Key to store the magic as.
133
133
134 func : function
134 func : function
135 Callable object to store.
135 Callable object to store.
136 """
136 """
137 if magic_kind == 'line_cell':
137 if magic_kind == 'line_cell':
138 dct['line'][magic_name] = dct['cell'][magic_name] = func
138 dct['line'][magic_name] = dct['cell'][magic_name] = func
139 else:
139 else:
140 dct[magic_kind][magic_name] = func
140 dct[magic_kind][magic_name] = func
141
141
142
142
143 def validate_type(magic_kind):
143 def validate_type(magic_kind):
144 """Ensure that the given magic_kind is valid.
144 """Ensure that the given magic_kind is valid.
145
145
146 Check that the given magic_kind is one of the accepted spec types (stored
146 Check that the given magic_kind is one of the accepted spec types (stored
147 in the global `magic_spec`), raise ValueError otherwise.
147 in the global `magic_spec`), raise ValueError otherwise.
148 """
148 """
149 if magic_kind not in magic_spec:
149 if magic_kind not in magic_spec:
150 raise ValueError('magic_kind must be one of %s, %s given' %
150 raise ValueError('magic_kind must be one of %s, %s given' %
151 magic_kinds, magic_kind)
151 magic_kinds, magic_kind)
152
152
153
153
154 # The docstrings for the decorator below will be fairly similar for the two
154 # The docstrings for the decorator below will be fairly similar for the two
155 # types (method and function), so we generate them here once and reuse the
155 # types (method and function), so we generate them here once and reuse the
156 # templates below.
156 # templates below.
157 _docstring_template = \
157 _docstring_template = \
158 """Decorate the given {0} as {1} magic.
158 """Decorate the given {0} as {1} magic.
159
159
160 The decorator can be used with or without arguments, as follows.
160 The decorator can be used with or without arguments, as follows.
161
161
162 i) without arguments: it will create a {1} magic named as the {0} being
162 i) without arguments: it will create a {1} magic named as the {0} being
163 decorated::
163 decorated::
164
164
165 @deco
165 @deco
166 def foo(...)
166 def foo(...)
167
167
168 will create a {1} magic named `foo`.
168 will create a {1} magic named `foo`.
169
169
170 ii) with one string argument: which will be used as the actual name of the
170 ii) with one string argument: which will be used as the actual name of the
171 resulting magic::
171 resulting magic::
172
172
173 @deco('bar')
173 @deco('bar')
174 def foo(...)
174 def foo(...)
175
175
176 will create a {1} magic named `bar`.
176 will create a {1} magic named `bar`.
177 """
177 """
178
178
179 # These two are decorator factories. While they are conceptually very similar,
179 # These two are decorator factories. While they are conceptually very similar,
180 # there are enough differences in the details that it's simpler to have them
180 # there are enough differences in the details that it's simpler to have them
181 # written as completely standalone functions rather than trying to share code
181 # written as completely standalone functions rather than trying to share code
182 # and make a single one with convoluted logic.
182 # and make a single one with convoluted logic.
183
183
184 def _method_magic_marker(magic_kind):
184 def _method_magic_marker(magic_kind):
185 """Decorator factory for methods in Magics subclasses.
185 """Decorator factory for methods in Magics subclasses.
186 """
186 """
187
187
188 validate_type(magic_kind)
188 validate_type(magic_kind)
189
189
190 # This is a closure to capture the magic_kind. We could also use a class,
190 # This is a closure to capture the magic_kind. We could also use a class,
191 # but it's overkill for just that one bit of state.
191 # but it's overkill for just that one bit of state.
192 def magic_deco(arg):
192 def magic_deco(arg):
193 call = lambda f, *a, **k: f(*a, **k)
193 call = lambda f, *a, **k: f(*a, **k)
194
194
195 if callable(arg):
195 if callable(arg):
196 # "Naked" decorator call (just @foo, no args)
196 # "Naked" decorator call (just @foo, no args)
197 func = arg
197 func = arg
198 name = func.func_name
198 name = func.__name__
199 retval = decorator(call, func)
199 retval = decorator(call, func)
200 record_magic(magics, magic_kind, name, name)
200 record_magic(magics, magic_kind, name, name)
201 elif isinstance(arg, string_types):
201 elif isinstance(arg, string_types):
202 # Decorator called with arguments (@foo('bar'))
202 # Decorator called with arguments (@foo('bar'))
203 name = arg
203 name = arg
204 def mark(func, *a, **kw):
204 def mark(func, *a, **kw):
205 record_magic(magics, magic_kind, name, func.func_name)
205 record_magic(magics, magic_kind, name, func.__name__)
206 return decorator(call, func)
206 return decorator(call, func)
207 retval = mark
207 retval = mark
208 else:
208 else:
209 raise TypeError("Decorator can only be called with "
209 raise TypeError("Decorator can only be called with "
210 "string or function")
210 "string or function")
211 return retval
211 return retval
212
212
213 # Ensure the resulting decorator has a usable docstring
213 # Ensure the resulting decorator has a usable docstring
214 magic_deco.__doc__ = _docstring_template.format('method', magic_kind)
214 magic_deco.__doc__ = _docstring_template.format('method', magic_kind)
215 return magic_deco
215 return magic_deco
216
216
217
217
218 def _function_magic_marker(magic_kind):
218 def _function_magic_marker(magic_kind):
219 """Decorator factory for standalone functions.
219 """Decorator factory for standalone functions.
220 """
220 """
221 validate_type(magic_kind)
221 validate_type(magic_kind)
222
222
223 # This is a closure to capture the magic_kind. We could also use a class,
223 # This is a closure to capture the magic_kind. We could also use a class,
224 # but it's overkill for just that one bit of state.
224 # but it's overkill for just that one bit of state.
225 def magic_deco(arg):
225 def magic_deco(arg):
226 call = lambda f, *a, **k: f(*a, **k)
226 call = lambda f, *a, **k: f(*a, **k)
227
227
228 # Find get_ipython() in the caller's namespace
228 # Find get_ipython() in the caller's namespace
229 caller = sys._getframe(1)
229 caller = sys._getframe(1)
230 for ns in ['f_locals', 'f_globals', 'f_builtins']:
230 for ns in ['f_locals', 'f_globals', 'f_builtins']:
231 get_ipython = getattr(caller, ns).get('get_ipython')
231 get_ipython = getattr(caller, ns).get('get_ipython')
232 if get_ipython is not None:
232 if get_ipython is not None:
233 break
233 break
234 else:
234 else:
235 raise NameError('Decorator can only run in context where '
235 raise NameError('Decorator can only run in context where '
236 '`get_ipython` exists')
236 '`get_ipython` exists')
237
237
238 ip = get_ipython()
238 ip = get_ipython()
239
239
240 if callable(arg):
240 if callable(arg):
241 # "Naked" decorator call (just @foo, no args)
241 # "Naked" decorator call (just @foo, no args)
242 func = arg
242 func = arg
243 name = func.func_name
243 name = func.__name__
244 ip.register_magic_function(func, magic_kind, name)
244 ip.register_magic_function(func, magic_kind, name)
245 retval = decorator(call, func)
245 retval = decorator(call, func)
246 elif isinstance(arg, string_types):
246 elif isinstance(arg, string_types):
247 # Decorator called with arguments (@foo('bar'))
247 # Decorator called with arguments (@foo('bar'))
248 name = arg
248 name = arg
249 def mark(func, *a, **kw):
249 def mark(func, *a, **kw):
250 ip.register_magic_function(func, magic_kind, name)
250 ip.register_magic_function(func, magic_kind, name)
251 return decorator(call, func)
251 return decorator(call, func)
252 retval = mark
252 retval = mark
253 else:
253 else:
254 raise TypeError("Decorator can only be called with "
254 raise TypeError("Decorator can only be called with "
255 "string or function")
255 "string or function")
256 return retval
256 return retval
257
257
258 # Ensure the resulting decorator has a usable docstring
258 # Ensure the resulting decorator has a usable docstring
259 ds = _docstring_template.format('function', magic_kind)
259 ds = _docstring_template.format('function', magic_kind)
260
260
261 ds += dedent("""
261 ds += dedent("""
262 Note: this decorator can only be used in a context where IPython is already
262 Note: this decorator can only be used in a context where IPython is already
263 active, so that the `get_ipython()` call succeeds. You can therefore use
263 active, so that the `get_ipython()` call succeeds. You can therefore use
264 it in your startup files loaded after IPython initializes, but *not* in the
264 it in your startup files loaded after IPython initializes, but *not* in the
265 IPython configuration file itself, which is executed before IPython is
265 IPython configuration file itself, which is executed before IPython is
266 fully up and running. Any file located in the `startup` subdirectory of
266 fully up and running. Any file located in the `startup` subdirectory of
267 your configuration profile will be OK in this sense.
267 your configuration profile will be OK in this sense.
268 """)
268 """)
269
269
270 magic_deco.__doc__ = ds
270 magic_deco.__doc__ = ds
271 return magic_deco
271 return magic_deco
272
272
273
273
274 # Create the actual decorators for public use
274 # Create the actual decorators for public use
275
275
276 # These three are used to decorate methods in class definitions
276 # These three are used to decorate methods in class definitions
277 line_magic = _method_magic_marker('line')
277 line_magic = _method_magic_marker('line')
278 cell_magic = _method_magic_marker('cell')
278 cell_magic = _method_magic_marker('cell')
279 line_cell_magic = _method_magic_marker('line_cell')
279 line_cell_magic = _method_magic_marker('line_cell')
280
280
281 # These three decorate standalone functions and perform the decoration
281 # These three decorate standalone functions and perform the decoration
282 # immediately. They can only run where get_ipython() works
282 # immediately. They can only run where get_ipython() works
283 register_line_magic = _function_magic_marker('line')
283 register_line_magic = _function_magic_marker('line')
284 register_cell_magic = _function_magic_marker('cell')
284 register_cell_magic = _function_magic_marker('cell')
285 register_line_cell_magic = _function_magic_marker('line_cell')
285 register_line_cell_magic = _function_magic_marker('line_cell')
286
286
287 #-----------------------------------------------------------------------------
287 #-----------------------------------------------------------------------------
288 # Core Magic classes
288 # Core Magic classes
289 #-----------------------------------------------------------------------------
289 #-----------------------------------------------------------------------------
290
290
291 class MagicsManager(Configurable):
291 class MagicsManager(Configurable):
292 """Object that handles all magic-related functionality for IPython.
292 """Object that handles all magic-related functionality for IPython.
293 """
293 """
294 # Non-configurable class attributes
294 # Non-configurable class attributes
295
295
296 # A two-level dict, first keyed by magic type, then by magic function, and
296 # A two-level dict, first keyed by magic type, then by magic function, and
297 # holding the actual callable object as value. This is the dict used for
297 # holding the actual callable object as value. This is the dict used for
298 # magic function dispatch
298 # magic function dispatch
299 magics = Dict
299 magics = Dict
300
300
301 # A registry of the original objects that we've been given holding magics.
301 # A registry of the original objects that we've been given holding magics.
302 registry = Dict
302 registry = Dict
303
303
304 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
304 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
305
305
306 auto_magic = Bool(True, config=True, help=
306 auto_magic = Bool(True, config=True, help=
307 "Automatically call line magics without requiring explicit % prefix")
307 "Automatically call line magics without requiring explicit % prefix")
308
308
309 def _auto_magic_changed(self, name, value):
309 def _auto_magic_changed(self, name, value):
310 self.shell.automagic = value
310 self.shell.automagic = value
311
311
312 _auto_status = [
312 _auto_status = [
313 'Automagic is OFF, % prefix IS needed for line magics.',
313 'Automagic is OFF, % prefix IS needed for line magics.',
314 'Automagic is ON, % prefix IS NOT needed for line magics.']
314 'Automagic is ON, % prefix IS NOT needed for line magics.']
315
315
316 user_magics = Instance('IPython.core.magics.UserMagics')
316 user_magics = Instance('IPython.core.magics.UserMagics')
317
317
318 def __init__(self, shell=None, config=None, user_magics=None, **traits):
318 def __init__(self, shell=None, config=None, user_magics=None, **traits):
319
319
320 super(MagicsManager, self).__init__(shell=shell, config=config,
320 super(MagicsManager, self).__init__(shell=shell, config=config,
321 user_magics=user_magics, **traits)
321 user_magics=user_magics, **traits)
322 self.magics = dict(line={}, cell={})
322 self.magics = dict(line={}, cell={})
323 # Let's add the user_magics to the registry for uniformity, so *all*
323 # Let's add the user_magics to the registry for uniformity, so *all*
324 # registered magic containers can be found there.
324 # registered magic containers can be found there.
325 self.registry[user_magics.__class__.__name__] = user_magics
325 self.registry[user_magics.__class__.__name__] = user_magics
326
326
327 def auto_status(self):
327 def auto_status(self):
328 """Return descriptive string with automagic status."""
328 """Return descriptive string with automagic status."""
329 return self._auto_status[self.auto_magic]
329 return self._auto_status[self.auto_magic]
330
330
331 def lsmagic(self):
331 def lsmagic(self):
332 """Return a dict of currently available magic functions.
332 """Return a dict of currently available magic functions.
333
333
334 The return dict has the keys 'line' and 'cell', corresponding to the
334 The return dict has the keys 'line' and 'cell', corresponding to the
335 two types of magics we support. Each value is a list of names.
335 two types of magics we support. Each value is a list of names.
336 """
336 """
337 return self.magics
337 return self.magics
338
338
339 def lsmagic_docs(self, brief=False, missing=''):
339 def lsmagic_docs(self, brief=False, missing=''):
340 """Return dict of documentation of magic functions.
340 """Return dict of documentation of magic functions.
341
341
342 The return dict has the keys 'line' and 'cell', corresponding to the
342 The return dict has the keys 'line' and 'cell', corresponding to the
343 two types of magics we support. Each value is a dict keyed by magic
343 two types of magics we support. Each value is a dict keyed by magic
344 name whose value is the function docstring. If a docstring is
344 name whose value is the function docstring. If a docstring is
345 unavailable, the value of `missing` is used instead.
345 unavailable, the value of `missing` is used instead.
346
346
347 If brief is True, only the first line of each docstring will be returned.
347 If brief is True, only the first line of each docstring will be returned.
348 """
348 """
349 docs = {}
349 docs = {}
350 for m_type in self.magics:
350 for m_type in self.magics:
351 m_docs = {}
351 m_docs = {}
352 for m_name, m_func in iteritems(self.magics[m_type]):
352 for m_name, m_func in iteritems(self.magics[m_type]):
353 if m_func.__doc__:
353 if m_func.__doc__:
354 if brief:
354 if brief:
355 m_docs[m_name] = m_func.__doc__.split('\n', 1)[0]
355 m_docs[m_name] = m_func.__doc__.split('\n', 1)[0]
356 else:
356 else:
357 m_docs[m_name] = m_func.__doc__.rstrip()
357 m_docs[m_name] = m_func.__doc__.rstrip()
358 else:
358 else:
359 m_docs[m_name] = missing
359 m_docs[m_name] = missing
360 docs[m_type] = m_docs
360 docs[m_type] = m_docs
361 return docs
361 return docs
362
362
363 def register(self, *magic_objects):
363 def register(self, *magic_objects):
364 """Register one or more instances of Magics.
364 """Register one or more instances of Magics.
365
365
366 Take one or more classes or instances of classes that subclass the main
366 Take one or more classes or instances of classes that subclass the main
367 `core.Magic` class, and register them with IPython to use the magic
367 `core.Magic` class, and register them with IPython to use the magic
368 functions they provide. The registration process will then ensure that
368 functions they provide. The registration process will then ensure that
369 any methods that have decorated to provide line and/or cell magics will
369 any methods that have decorated to provide line and/or cell magics will
370 be recognized with the `%x`/`%%x` syntax as a line/cell magic
370 be recognized with the `%x`/`%%x` syntax as a line/cell magic
371 respectively.
371 respectively.
372
372
373 If classes are given, they will be instantiated with the default
373 If classes are given, they will be instantiated with the default
374 constructor. If your classes need a custom constructor, you should
374 constructor. If your classes need a custom constructor, you should
375 instanitate them first and pass the instance.
375 instanitate them first and pass the instance.
376
376
377 The provided arguments can be an arbitrary mix of classes and instances.
377 The provided arguments can be an arbitrary mix of classes and instances.
378
378
379 Parameters
379 Parameters
380 ----------
380 ----------
381 magic_objects : one or more classes or instances
381 magic_objects : one or more classes or instances
382 """
382 """
383 # Start by validating them to ensure they have all had their magic
383 # Start by validating them to ensure they have all had their magic
384 # methods registered at the instance level
384 # methods registered at the instance level
385 for m in magic_objects:
385 for m in magic_objects:
386 if not m.registered:
386 if not m.registered:
387 raise ValueError("Class of magics %r was constructed without "
387 raise ValueError("Class of magics %r was constructed without "
388 "the @register_magics class decorator")
388 "the @register_magics class decorator")
389 if type(m) in (type, MetaHasTraits):
389 if type(m) in (type, MetaHasTraits):
390 # If we're given an uninstantiated class
390 # If we're given an uninstantiated class
391 m = m(shell=self.shell)
391 m = m(shell=self.shell)
392
392
393 # Now that we have an instance, we can register it and update the
393 # Now that we have an instance, we can register it and update the
394 # table of callables
394 # table of callables
395 self.registry[m.__class__.__name__] = m
395 self.registry[m.__class__.__name__] = m
396 for mtype in magic_kinds:
396 for mtype in magic_kinds:
397 self.magics[mtype].update(m.magics[mtype])
397 self.magics[mtype].update(m.magics[mtype])
398
398
399 def register_function(self, func, magic_kind='line', magic_name=None):
399 def register_function(self, func, magic_kind='line', magic_name=None):
400 """Expose a standalone function as magic function for IPython.
400 """Expose a standalone function as magic function for IPython.
401
401
402 This will create an IPython magic (line, cell or both) from a
402 This will create an IPython magic (line, cell or both) from a
403 standalone function. The functions should have the following
403 standalone function. The functions should have the following
404 signatures:
404 signatures:
405
405
406 * For line magics: `def f(line)`
406 * For line magics: `def f(line)`
407 * For cell magics: `def f(line, cell)`
407 * For cell magics: `def f(line, cell)`
408 * For a function that does both: `def f(line, cell=None)`
408 * For a function that does both: `def f(line, cell=None)`
409
409
410 In the latter case, the function will be called with `cell==None` when
410 In the latter case, the function will be called with `cell==None` when
411 invoked as `%f`, and with cell as a string when invoked as `%%f`.
411 invoked as `%f`, and with cell as a string when invoked as `%%f`.
412
412
413 Parameters
413 Parameters
414 ----------
414 ----------
415 func : callable
415 func : callable
416 Function to be registered as a magic.
416 Function to be registered as a magic.
417
417
418 magic_kind : str
418 magic_kind : str
419 Kind of magic, one of 'line', 'cell' or 'line_cell'
419 Kind of magic, one of 'line', 'cell' or 'line_cell'
420
420
421 magic_name : optional str
421 magic_name : optional str
422 If given, the name the magic will have in the IPython namespace. By
422 If given, the name the magic will have in the IPython namespace. By
423 default, the name of the function itself is used.
423 default, the name of the function itself is used.
424 """
424 """
425
425
426 # Create the new method in the user_magics and register it in the
426 # Create the new method in the user_magics and register it in the
427 # global table
427 # global table
428 validate_type(magic_kind)
428 validate_type(magic_kind)
429 magic_name = func.func_name if magic_name is None else magic_name
429 magic_name = func.__name__ if magic_name is None else magic_name
430 setattr(self.user_magics, magic_name, func)
430 setattr(self.user_magics, magic_name, func)
431 record_magic(self.magics, magic_kind, magic_name, func)
431 record_magic(self.magics, magic_kind, magic_name, func)
432
432
433 def define_magic(self, name, func):
433 def define_magic(self, name, func):
434 """[Deprecated] Expose own function as magic function for IPython.
434 """[Deprecated] Expose own function as magic function for IPython.
435
435
436 Example::
436 Example::
437
437
438 def foo_impl(self, parameter_s=''):
438 def foo_impl(self, parameter_s=''):
439 'My very own magic!. (Use docstrings, IPython reads them).'
439 'My very own magic!. (Use docstrings, IPython reads them).'
440 print 'Magic function. Passed parameter is between < >:'
440 print 'Magic function. Passed parameter is between < >:'
441 print '<%s>' % parameter_s
441 print '<%s>' % parameter_s
442 print 'The self object is:', self
442 print 'The self object is:', self
443
443
444 ip.define_magic('foo',foo_impl)
444 ip.define_magic('foo',foo_impl)
445 """
445 """
446 meth = types.MethodType(func, self.user_magics)
446 meth = types.MethodType(func, self.user_magics)
447 setattr(self.user_magics, name, meth)
447 setattr(self.user_magics, name, meth)
448 record_magic(self.magics, 'line', name, meth)
448 record_magic(self.magics, 'line', name, meth)
449
449
450 def register_alias(self, alias_name, magic_name, magic_kind='line'):
450 def register_alias(self, alias_name, magic_name, magic_kind='line'):
451 """Register an alias to a magic function.
451 """Register an alias to a magic function.
452
452
453 The alias is an instance of :class:`MagicAlias`, which holds the
453 The alias is an instance of :class:`MagicAlias`, which holds the
454 name and kind of the magic it should call. Binding is done at
454 name and kind of the magic it should call. Binding is done at
455 call time, so if the underlying magic function is changed the alias
455 call time, so if the underlying magic function is changed the alias
456 will call the new function.
456 will call the new function.
457
457
458 Parameters
458 Parameters
459 ----------
459 ----------
460 alias_name : str
460 alias_name : str
461 The name of the magic to be registered.
461 The name of the magic to be registered.
462
462
463 magic_name : str
463 magic_name : str
464 The name of an existing magic.
464 The name of an existing magic.
465
465
466 magic_kind : str
466 magic_kind : str
467 Kind of magic, one of 'line' or 'cell'
467 Kind of magic, one of 'line' or 'cell'
468 """
468 """
469
469
470 # `validate_type` is too permissive, as it allows 'line_cell'
470 # `validate_type` is too permissive, as it allows 'line_cell'
471 # which we do not handle.
471 # which we do not handle.
472 if magic_kind not in magic_kinds:
472 if magic_kind not in magic_kinds:
473 raise ValueError('magic_kind must be one of %s, %s given' %
473 raise ValueError('magic_kind must be one of %s, %s given' %
474 magic_kinds, magic_kind)
474 magic_kinds, magic_kind)
475
475
476 alias = MagicAlias(self.shell, magic_name, magic_kind)
476 alias = MagicAlias(self.shell, magic_name, magic_kind)
477 setattr(self.user_magics, alias_name, alias)
477 setattr(self.user_magics, alias_name, alias)
478 record_magic(self.magics, magic_kind, alias_name, alias)
478 record_magic(self.magics, magic_kind, alias_name, alias)
479
479
480 # Key base class that provides the central functionality for magics.
480 # Key base class that provides the central functionality for magics.
481
481
482
482
483 class Magics(Configurable):
483 class Magics(Configurable):
484 """Base class for implementing magic functions.
484 """Base class for implementing magic functions.
485
485
486 Shell functions which can be reached as %function_name. All magic
486 Shell functions which can be reached as %function_name. All magic
487 functions should accept a string, which they can parse for their own
487 functions should accept a string, which they can parse for their own
488 needs. This can make some functions easier to type, eg `%cd ../`
488 needs. This can make some functions easier to type, eg `%cd ../`
489 vs. `%cd("../")`
489 vs. `%cd("../")`
490
490
491 Classes providing magic functions need to subclass this class, and they
491 Classes providing magic functions need to subclass this class, and they
492 MUST:
492 MUST:
493
493
494 - Use the method decorators `@line_magic` and `@cell_magic` to decorate
494 - Use the method decorators `@line_magic` and `@cell_magic` to decorate
495 individual methods as magic functions, AND
495 individual methods as magic functions, AND
496
496
497 - Use the class decorator `@magics_class` to ensure that the magic
497 - Use the class decorator `@magics_class` to ensure that the magic
498 methods are properly registered at the instance level upon instance
498 methods are properly registered at the instance level upon instance
499 initialization.
499 initialization.
500
500
501 See :mod:`magic_functions` for examples of actual implementation classes.
501 See :mod:`magic_functions` for examples of actual implementation classes.
502 """
502 """
503 # Dict holding all command-line options for each magic.
503 # Dict holding all command-line options for each magic.
504 options_table = None
504 options_table = None
505 # Dict for the mapping of magic names to methods, set by class decorator
505 # Dict for the mapping of magic names to methods, set by class decorator
506 magics = None
506 magics = None
507 # Flag to check that the class decorator was properly applied
507 # Flag to check that the class decorator was properly applied
508 registered = False
508 registered = False
509 # Instance of IPython shell
509 # Instance of IPython shell
510 shell = None
510 shell = None
511
511
512 def __init__(self, shell=None, **kwargs):
512 def __init__(self, shell=None, **kwargs):
513 if not(self.__class__.registered):
513 if not(self.__class__.registered):
514 raise ValueError('Magics subclass without registration - '
514 raise ValueError('Magics subclass without registration - '
515 'did you forget to apply @magics_class?')
515 'did you forget to apply @magics_class?')
516 if shell is not None:
516 if shell is not None:
517 if hasattr(shell, 'configurables'):
517 if hasattr(shell, 'configurables'):
518 shell.configurables.append(self)
518 shell.configurables.append(self)
519 if hasattr(shell, 'config'):
519 if hasattr(shell, 'config'):
520 kwargs.setdefault('parent', shell)
520 kwargs.setdefault('parent', shell)
521 kwargs['shell'] = shell
521 kwargs['shell'] = shell
522
522
523 self.shell = shell
523 self.shell = shell
524 self.options_table = {}
524 self.options_table = {}
525 # The method decorators are run when the instance doesn't exist yet, so
525 # The method decorators are run when the instance doesn't exist yet, so
526 # they can only record the names of the methods they are supposed to
526 # they can only record the names of the methods they are supposed to
527 # grab. Only now, that the instance exists, can we create the proper
527 # grab. Only now, that the instance exists, can we create the proper
528 # mapping to bound methods. So we read the info off the original names
528 # mapping to bound methods. So we read the info off the original names
529 # table and replace each method name by the actual bound method.
529 # table and replace each method name by the actual bound method.
530 # But we mustn't clobber the *class* mapping, in case of multiple instances.
530 # But we mustn't clobber the *class* mapping, in case of multiple instances.
531 class_magics = self.magics
531 class_magics = self.magics
532 self.magics = {}
532 self.magics = {}
533 for mtype in magic_kinds:
533 for mtype in magic_kinds:
534 tab = self.magics[mtype] = {}
534 tab = self.magics[mtype] = {}
535 cls_tab = class_magics[mtype]
535 cls_tab = class_magics[mtype]
536 for magic_name, meth_name in iteritems(cls_tab):
536 for magic_name, meth_name in iteritems(cls_tab):
537 if isinstance(meth_name, string_types):
537 if isinstance(meth_name, string_types):
538 # it's a method name, grab it
538 # it's a method name, grab it
539 tab[magic_name] = getattr(self, meth_name)
539 tab[magic_name] = getattr(self, meth_name)
540 else:
540 else:
541 # it's the real thing
541 # it's the real thing
542 tab[magic_name] = meth_name
542 tab[magic_name] = meth_name
543 # Configurable **needs** to be initiated at the end or the config
543 # Configurable **needs** to be initiated at the end or the config
544 # magics get screwed up.
544 # magics get screwed up.
545 super(Magics, self).__init__(**kwargs)
545 super(Magics, self).__init__(**kwargs)
546
546
547 def arg_err(self,func):
547 def arg_err(self,func):
548 """Print docstring if incorrect arguments were passed"""
548 """Print docstring if incorrect arguments were passed"""
549 print('Error in arguments:')
549 print('Error in arguments:')
550 print(oinspect.getdoc(func))
550 print(oinspect.getdoc(func))
551
551
552 def format_latex(self, strng):
552 def format_latex(self, strng):
553 """Format a string for latex inclusion."""
553 """Format a string for latex inclusion."""
554
554
555 # Characters that need to be escaped for latex:
555 # Characters that need to be escaped for latex:
556 escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE)
556 escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE)
557 # Magic command names as headers:
557 # Magic command names as headers:
558 cmd_name_re = re.compile(r'^(%s.*?):' % ESC_MAGIC,
558 cmd_name_re = re.compile(r'^(%s.*?):' % ESC_MAGIC,
559 re.MULTILINE)
559 re.MULTILINE)
560 # Magic commands
560 # Magic commands
561 cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % ESC_MAGIC,
561 cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % ESC_MAGIC,
562 re.MULTILINE)
562 re.MULTILINE)
563 # Paragraph continue
563 # Paragraph continue
564 par_re = re.compile(r'\\$',re.MULTILINE)
564 par_re = re.compile(r'\\$',re.MULTILINE)
565
565
566 # The "\n" symbol
566 # The "\n" symbol
567 newline_re = re.compile(r'\\n')
567 newline_re = re.compile(r'\\n')
568
568
569 # Now build the string for output:
569 # Now build the string for output:
570 #strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng)
570 #strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng)
571 strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:',
571 strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:',
572 strng)
572 strng)
573 strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng)
573 strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng)
574 strng = par_re.sub(r'\\\\',strng)
574 strng = par_re.sub(r'\\\\',strng)
575 strng = escape_re.sub(r'\\\1',strng)
575 strng = escape_re.sub(r'\\\1',strng)
576 strng = newline_re.sub(r'\\textbackslash{}n',strng)
576 strng = newline_re.sub(r'\\textbackslash{}n',strng)
577 return strng
577 return strng
578
578
579 def parse_options(self, arg_str, opt_str, *long_opts, **kw):
579 def parse_options(self, arg_str, opt_str, *long_opts, **kw):
580 """Parse options passed to an argument string.
580 """Parse options passed to an argument string.
581
581
582 The interface is similar to that of getopt(), but it returns back a
582 The interface is similar to that of getopt(), but it returns back a
583 Struct with the options as keys and the stripped argument string still
583 Struct with the options as keys and the stripped argument string still
584 as a string.
584 as a string.
585
585
586 arg_str is quoted as a true sys.argv vector by using shlex.split.
586 arg_str is quoted as a true sys.argv vector by using shlex.split.
587 This allows us to easily expand variables, glob files, quote
587 This allows us to easily expand variables, glob files, quote
588 arguments, etc.
588 arguments, etc.
589
589
590 Options:
590 Options:
591 -mode: default 'string'. If given as 'list', the argument string is
591 -mode: default 'string'. If given as 'list', the argument string is
592 returned as a list (split on whitespace) instead of a string.
592 returned as a list (split on whitespace) instead of a string.
593
593
594 -list_all: put all option values in lists. Normally only options
594 -list_all: put all option values in lists. Normally only options
595 appearing more than once are put in a list.
595 appearing more than once are put in a list.
596
596
597 -posix (True): whether to split the input line in POSIX mode or not,
597 -posix (True): whether to split the input line in POSIX mode or not,
598 as per the conventions outlined in the shlex module from the
598 as per the conventions outlined in the shlex module from the
599 standard library."""
599 standard library."""
600
600
601 # inject default options at the beginning of the input line
601 # inject default options at the beginning of the input line
602 caller = sys._getframe(1).f_code.co_name
602 caller = sys._getframe(1).f_code.co_name
603 arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str)
603 arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str)
604
604
605 mode = kw.get('mode','string')
605 mode = kw.get('mode','string')
606 if mode not in ['string','list']:
606 if mode not in ['string','list']:
607 raise ValueError('incorrect mode given: %s' % mode)
607 raise ValueError('incorrect mode given: %s' % mode)
608 # Get options
608 # Get options
609 list_all = kw.get('list_all',0)
609 list_all = kw.get('list_all',0)
610 posix = kw.get('posix', os.name == 'posix')
610 posix = kw.get('posix', os.name == 'posix')
611 strict = kw.get('strict', True)
611 strict = kw.get('strict', True)
612
612
613 # Check if we have more than one argument to warrant extra processing:
613 # Check if we have more than one argument to warrant extra processing:
614 odict = {} # Dictionary with options
614 odict = {} # Dictionary with options
615 args = arg_str.split()
615 args = arg_str.split()
616 if len(args) >= 1:
616 if len(args) >= 1:
617 # If the list of inputs only has 0 or 1 thing in it, there's no
617 # If the list of inputs only has 0 or 1 thing in it, there's no
618 # need to look for options
618 # need to look for options
619 argv = arg_split(arg_str, posix, strict)
619 argv = arg_split(arg_str, posix, strict)
620 # Do regular option processing
620 # Do regular option processing
621 try:
621 try:
622 opts,args = getopt(argv, opt_str, long_opts)
622 opts,args = getopt(argv, opt_str, long_opts)
623 except GetoptError as e:
623 except GetoptError as e:
624 raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str,
624 raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str,
625 " ".join(long_opts)))
625 " ".join(long_opts)))
626 for o,a in opts:
626 for o,a in opts:
627 if o.startswith('--'):
627 if o.startswith('--'):
628 o = o[2:]
628 o = o[2:]
629 else:
629 else:
630 o = o[1:]
630 o = o[1:]
631 try:
631 try:
632 odict[o].append(a)
632 odict[o].append(a)
633 except AttributeError:
633 except AttributeError:
634 odict[o] = [odict[o],a]
634 odict[o] = [odict[o],a]
635 except KeyError:
635 except KeyError:
636 if list_all:
636 if list_all:
637 odict[o] = [a]
637 odict[o] = [a]
638 else:
638 else:
639 odict[o] = a
639 odict[o] = a
640
640
641 # Prepare opts,args for return
641 # Prepare opts,args for return
642 opts = Struct(odict)
642 opts = Struct(odict)
643 if mode == 'string':
643 if mode == 'string':
644 args = ' '.join(args)
644 args = ' '.join(args)
645
645
646 return opts,args
646 return opts,args
647
647
648 def default_option(self, fn, optstr):
648 def default_option(self, fn, optstr):
649 """Make an entry in the options_table for fn, with value optstr"""
649 """Make an entry in the options_table for fn, with value optstr"""
650
650
651 if fn not in self.lsmagic():
651 if fn not in self.lsmagic():
652 error("%s is not a magic function" % fn)
652 error("%s is not a magic function" % fn)
653 self.options_table[fn] = optstr
653 self.options_table[fn] = optstr
654
654
655
655
656 class MagicAlias(object):
656 class MagicAlias(object):
657 """An alias to another magic function.
657 """An alias to another magic function.
658
658
659 An alias is determined by its magic name and magic kind. Lookup
659 An alias is determined by its magic name and magic kind. Lookup
660 is done at call time, so if the underlying magic changes the alias
660 is done at call time, so if the underlying magic changes the alias
661 will call the new function.
661 will call the new function.
662
662
663 Use the :meth:`MagicsManager.register_alias` method or the
663 Use the :meth:`MagicsManager.register_alias` method or the
664 `%alias_magic` magic function to create and register a new alias.
664 `%alias_magic` magic function to create and register a new alias.
665 """
665 """
666 def __init__(self, shell, magic_name, magic_kind):
666 def __init__(self, shell, magic_name, magic_kind):
667 self.shell = shell
667 self.shell = shell
668 self.magic_name = magic_name
668 self.magic_name = magic_name
669 self.magic_kind = magic_kind
669 self.magic_kind = magic_kind
670
670
671 self.pretty_target = '%s%s' % (magic_escapes[self.magic_kind], self.magic_name)
671 self.pretty_target = '%s%s' % (magic_escapes[self.magic_kind], self.magic_name)
672 self.__doc__ = "Alias for `%s`." % self.pretty_target
672 self.__doc__ = "Alias for `%s`." % self.pretty_target
673
673
674 self._in_call = False
674 self._in_call = False
675
675
676 def __call__(self, *args, **kwargs):
676 def __call__(self, *args, **kwargs):
677 """Call the magic alias."""
677 """Call the magic alias."""
678 fn = self.shell.find_magic(self.magic_name, self.magic_kind)
678 fn = self.shell.find_magic(self.magic_name, self.magic_kind)
679 if fn is None:
679 if fn is None:
680 raise UsageError("Magic `%s` not found." % self.pretty_target)
680 raise UsageError("Magic `%s` not found." % self.pretty_target)
681
681
682 # Protect against infinite recursion.
682 # Protect against infinite recursion.
683 if self._in_call:
683 if self._in_call:
684 raise UsageError("Infinite recursion detected; "
684 raise UsageError("Infinite recursion detected; "
685 "magic aliases cannot call themselves.")
685 "magic aliases cannot call themselves.")
686 self._in_call = True
686 self._in_call = True
687 try:
687 try:
688 return fn(*args, **kwargs)
688 return fn(*args, **kwargs)
689 finally:
689 finally:
690 self._in_call = False
690 self._in_call = False
@@ -1,873 +1,873 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """Tools for inspecting Python objects.
2 """Tools for inspecting Python objects.
3
3
4 Uses syntax highlighting for presenting the various information elements.
4 Uses syntax highlighting for presenting the various information elements.
5
5
6 Similar in spirit to the inspect module, but all calls take a name argument to
6 Similar in spirit to the inspect module, but all calls take a name argument to
7 reference the name under which an object is being read.
7 reference the name under which an object is being read.
8 """
8 """
9
9
10 #*****************************************************************************
10 #*****************************************************************************
11 # Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu>
11 # Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu>
12 #
12 #
13 # Distributed under the terms of the BSD License. The full license is in
13 # Distributed under the terms of the BSD License. The full license is in
14 # the file COPYING, distributed as part of this software.
14 # the file COPYING, distributed as part of this software.
15 #*****************************************************************************
15 #*****************************************************************************
16 from __future__ import print_function
16 from __future__ import print_function
17
17
18 __all__ = ['Inspector','InspectColors']
18 __all__ = ['Inspector','InspectColors']
19
19
20 # stdlib modules
20 # stdlib modules
21 import inspect
21 import inspect
22 import linecache
22 import linecache
23 import os
23 import os
24 import types
24 import types
25 import io as stdlib_io
25 import io as stdlib_io
26
26
27 try:
27 try:
28 from itertools import izip_longest
28 from itertools import izip_longest
29 except ImportError:
29 except ImportError:
30 from itertools import zip_longest as izip_longest
30 from itertools import zip_longest as izip_longest
31
31
32 # IPython's own
32 # IPython's own
33 from IPython.core import page
33 from IPython.core import page
34 from IPython.testing.skipdoctest import skip_doctest_py3
34 from IPython.testing.skipdoctest import skip_doctest_py3
35 from IPython.utils import PyColorize
35 from IPython.utils import PyColorize
36 from IPython.utils import io
36 from IPython.utils import io
37 from IPython.utils import openpy
37 from IPython.utils import openpy
38 from IPython.utils import py3compat
38 from IPython.utils import py3compat
39 from IPython.utils.dir2 import safe_hasattr
39 from IPython.utils.dir2 import safe_hasattr
40 from IPython.utils.text import indent
40 from IPython.utils.text import indent
41 from IPython.utils.wildcard import list_namespace
41 from IPython.utils.wildcard import list_namespace
42 from IPython.utils.coloransi import *
42 from IPython.utils.coloransi import *
43 from IPython.utils.py3compat import cast_unicode, string_types
43 from IPython.utils.py3compat import cast_unicode, string_types
44
44
45 #****************************************************************************
45 #****************************************************************************
46 # Builtin color schemes
46 # Builtin color schemes
47
47
48 Colors = TermColors # just a shorthand
48 Colors = TermColors # just a shorthand
49
49
50 # Build a few color schemes
50 # Build a few color schemes
51 NoColor = ColorScheme(
51 NoColor = ColorScheme(
52 'NoColor',{
52 'NoColor',{
53 'header' : Colors.NoColor,
53 'header' : Colors.NoColor,
54 'normal' : Colors.NoColor # color off (usu. Colors.Normal)
54 'normal' : Colors.NoColor # color off (usu. Colors.Normal)
55 } )
55 } )
56
56
57 LinuxColors = ColorScheme(
57 LinuxColors = ColorScheme(
58 'Linux',{
58 'Linux',{
59 'header' : Colors.LightRed,
59 'header' : Colors.LightRed,
60 'normal' : Colors.Normal # color off (usu. Colors.Normal)
60 'normal' : Colors.Normal # color off (usu. Colors.Normal)
61 } )
61 } )
62
62
63 LightBGColors = ColorScheme(
63 LightBGColors = ColorScheme(
64 'LightBG',{
64 'LightBG',{
65 'header' : Colors.Red,
65 'header' : Colors.Red,
66 'normal' : Colors.Normal # color off (usu. Colors.Normal)
66 'normal' : Colors.Normal # color off (usu. Colors.Normal)
67 } )
67 } )
68
68
69 # Build table of color schemes (needed by the parser)
69 # Build table of color schemes (needed by the parser)
70 InspectColors = ColorSchemeTable([NoColor,LinuxColors,LightBGColors],
70 InspectColors = ColorSchemeTable([NoColor,LinuxColors,LightBGColors],
71 'Linux')
71 'Linux')
72
72
73 #****************************************************************************
73 #****************************************************************************
74 # Auxiliary functions and objects
74 # Auxiliary functions and objects
75
75
76 # See the messaging spec for the definition of all these fields. This list
76 # See the messaging spec for the definition of all these fields. This list
77 # effectively defines the order of display
77 # effectively defines the order of display
78 info_fields = ['type_name', 'base_class', 'string_form', 'namespace',
78 info_fields = ['type_name', 'base_class', 'string_form', 'namespace',
79 'length', 'file', 'definition', 'docstring', 'source',
79 'length', 'file', 'definition', 'docstring', 'source',
80 'init_definition', 'class_docstring', 'init_docstring',
80 'init_definition', 'class_docstring', 'init_docstring',
81 'call_def', 'call_docstring',
81 'call_def', 'call_docstring',
82 # These won't be printed but will be used to determine how to
82 # These won't be printed but will be used to determine how to
83 # format the object
83 # format the object
84 'ismagic', 'isalias', 'isclass', 'argspec', 'found', 'name'
84 'ismagic', 'isalias', 'isclass', 'argspec', 'found', 'name'
85 ]
85 ]
86
86
87
87
88 def object_info(**kw):
88 def object_info(**kw):
89 """Make an object info dict with all fields present."""
89 """Make an object info dict with all fields present."""
90 infodict = dict(izip_longest(info_fields, [None]))
90 infodict = dict(izip_longest(info_fields, [None]))
91 infodict.update(kw)
91 infodict.update(kw)
92 return infodict
92 return infodict
93
93
94
94
95 def get_encoding(obj):
95 def get_encoding(obj):
96 """Get encoding for python source file defining obj
96 """Get encoding for python source file defining obj
97
97
98 Returns None if obj is not defined in a sourcefile.
98 Returns None if obj is not defined in a sourcefile.
99 """
99 """
100 ofile = find_file(obj)
100 ofile = find_file(obj)
101 # run contents of file through pager starting at line where the object
101 # run contents of file through pager starting at line where the object
102 # is defined, as long as the file isn't binary and is actually on the
102 # is defined, as long as the file isn't binary and is actually on the
103 # filesystem.
103 # filesystem.
104 if ofile is None:
104 if ofile is None:
105 return None
105 return None
106 elif ofile.endswith(('.so', '.dll', '.pyd')):
106 elif ofile.endswith(('.so', '.dll', '.pyd')):
107 return None
107 return None
108 elif not os.path.isfile(ofile):
108 elif not os.path.isfile(ofile):
109 return None
109 return None
110 else:
110 else:
111 # Print only text files, not extension binaries. Note that
111 # Print only text files, not extension binaries. Note that
112 # getsourcelines returns lineno with 1-offset and page() uses
112 # getsourcelines returns lineno with 1-offset and page() uses
113 # 0-offset, so we must adjust.
113 # 0-offset, so we must adjust.
114 buffer = stdlib_io.open(ofile, 'rb') # Tweaked to use io.open for Python 2
114 buffer = stdlib_io.open(ofile, 'rb') # Tweaked to use io.open for Python 2
115 encoding, lines = openpy.detect_encoding(buffer.readline)
115 encoding, lines = openpy.detect_encoding(buffer.readline)
116 return encoding
116 return encoding
117
117
118 def getdoc(obj):
118 def getdoc(obj):
119 """Stable wrapper around inspect.getdoc.
119 """Stable wrapper around inspect.getdoc.
120
120
121 This can't crash because of attribute problems.
121 This can't crash because of attribute problems.
122
122
123 It also attempts to call a getdoc() method on the given object. This
123 It also attempts to call a getdoc() method on the given object. This
124 allows objects which provide their docstrings via non-standard mechanisms
124 allows objects which provide their docstrings via non-standard mechanisms
125 (like Pyro proxies) to still be inspected by ipython's ? system."""
125 (like Pyro proxies) to still be inspected by ipython's ? system."""
126 # Allow objects to offer customized documentation via a getdoc method:
126 # Allow objects to offer customized documentation via a getdoc method:
127 try:
127 try:
128 ds = obj.getdoc()
128 ds = obj.getdoc()
129 except Exception:
129 except Exception:
130 pass
130 pass
131 else:
131 else:
132 # if we get extra info, we add it to the normal docstring.
132 # if we get extra info, we add it to the normal docstring.
133 if isinstance(ds, string_types):
133 if isinstance(ds, string_types):
134 return inspect.cleandoc(ds)
134 return inspect.cleandoc(ds)
135
135
136 try:
136 try:
137 docstr = inspect.getdoc(obj)
137 docstr = inspect.getdoc(obj)
138 encoding = get_encoding(obj)
138 encoding = get_encoding(obj)
139 return py3compat.cast_unicode(docstr, encoding=encoding)
139 return py3compat.cast_unicode(docstr, encoding=encoding)
140 except Exception:
140 except Exception:
141 # Harden against an inspect failure, which can occur with
141 # Harden against an inspect failure, which can occur with
142 # SWIG-wrapped extensions.
142 # SWIG-wrapped extensions.
143 raise
143 raise
144 return None
144 return None
145
145
146
146
147 def getsource(obj,is_binary=False):
147 def getsource(obj,is_binary=False):
148 """Wrapper around inspect.getsource.
148 """Wrapper around inspect.getsource.
149
149
150 This can be modified by other projects to provide customized source
150 This can be modified by other projects to provide customized source
151 extraction.
151 extraction.
152
152
153 Inputs:
153 Inputs:
154
154
155 - obj: an object whose source code we will attempt to extract.
155 - obj: an object whose source code we will attempt to extract.
156
156
157 Optional inputs:
157 Optional inputs:
158
158
159 - is_binary: whether the object is known to come from a binary source.
159 - is_binary: whether the object is known to come from a binary source.
160 This implementation will skip returning any output for binary objects, but
160 This implementation will skip returning any output for binary objects, but
161 custom extractors may know how to meaningfully process them."""
161 custom extractors may know how to meaningfully process them."""
162
162
163 if is_binary:
163 if is_binary:
164 return None
164 return None
165 else:
165 else:
166 # get source if obj was decorated with @decorator
166 # get source if obj was decorated with @decorator
167 if hasattr(obj,"__wrapped__"):
167 if hasattr(obj,"__wrapped__"):
168 obj = obj.__wrapped__
168 obj = obj.__wrapped__
169 try:
169 try:
170 src = inspect.getsource(obj)
170 src = inspect.getsource(obj)
171 except TypeError:
171 except TypeError:
172 if hasattr(obj,'__class__'):
172 if hasattr(obj,'__class__'):
173 src = inspect.getsource(obj.__class__)
173 src = inspect.getsource(obj.__class__)
174 encoding = get_encoding(obj)
174 encoding = get_encoding(obj)
175 return cast_unicode(src, encoding=encoding)
175 return cast_unicode(src, encoding=encoding)
176
176
177 def getargspec(obj):
177 def getargspec(obj):
178 """Get the names and default values of a function's arguments.
178 """Get the names and default values of a function's arguments.
179
179
180 A tuple of four things is returned: (args, varargs, varkw, defaults).
180 A tuple of four things is returned: (args, varargs, varkw, defaults).
181 'args' is a list of the argument names (it may contain nested lists).
181 'args' is a list of the argument names (it may contain nested lists).
182 'varargs' and 'varkw' are the names of the * and ** arguments or None.
182 'varargs' and 'varkw' are the names of the * and ** arguments or None.
183 'defaults' is an n-tuple of the default values of the last n arguments.
183 'defaults' is an n-tuple of the default values of the last n arguments.
184
184
185 Modified version of inspect.getargspec from the Python Standard
185 Modified version of inspect.getargspec from the Python Standard
186 Library."""
186 Library."""
187
187
188 if inspect.isfunction(obj):
188 if inspect.isfunction(obj):
189 func_obj = obj
189 func_obj = obj
190 elif inspect.ismethod(obj):
190 elif inspect.ismethod(obj):
191 func_obj = obj.im_func
191 func_obj = obj.im_func
192 elif hasattr(obj, '__call__'):
192 elif hasattr(obj, '__call__'):
193 func_obj = obj.__call__
193 func_obj = obj.__call__
194 else:
194 else:
195 raise TypeError('arg is not a Python function')
195 raise TypeError('arg is not a Python function')
196 args, varargs, varkw = inspect.getargs(func_obj.func_code)
196 args, varargs, varkw = inspect.getargs(func_obj.__code__)
197 return args, varargs, varkw, func_obj.func_defaults
197 return args, varargs, varkw, func_obj.__defaults__
198
198
199
199
200 def format_argspec(argspec):
200 def format_argspec(argspec):
201 """Format argspect, convenience wrapper around inspect's.
201 """Format argspect, convenience wrapper around inspect's.
202
202
203 This takes a dict instead of ordered arguments and calls
203 This takes a dict instead of ordered arguments and calls
204 inspect.format_argspec with the arguments in the necessary order.
204 inspect.format_argspec with the arguments in the necessary order.
205 """
205 """
206 return inspect.formatargspec(argspec['args'], argspec['varargs'],
206 return inspect.formatargspec(argspec['args'], argspec['varargs'],
207 argspec['varkw'], argspec['defaults'])
207 argspec['varkw'], argspec['defaults'])
208
208
209
209
210 def call_tip(oinfo, format_call=True):
210 def call_tip(oinfo, format_call=True):
211 """Extract call tip data from an oinfo dict.
211 """Extract call tip data from an oinfo dict.
212
212
213 Parameters
213 Parameters
214 ----------
214 ----------
215 oinfo : dict
215 oinfo : dict
216
216
217 format_call : bool, optional
217 format_call : bool, optional
218 If True, the call line is formatted and returned as a string. If not, a
218 If True, the call line is formatted and returned as a string. If not, a
219 tuple of (name, argspec) is returned.
219 tuple of (name, argspec) is returned.
220
220
221 Returns
221 Returns
222 -------
222 -------
223 call_info : None, str or (str, dict) tuple.
223 call_info : None, str or (str, dict) tuple.
224 When format_call is True, the whole call information is formattted as a
224 When format_call is True, the whole call information is formattted as a
225 single string. Otherwise, the object's name and its argspec dict are
225 single string. Otherwise, the object's name and its argspec dict are
226 returned. If no call information is available, None is returned.
226 returned. If no call information is available, None is returned.
227
227
228 docstring : str or None
228 docstring : str or None
229 The most relevant docstring for calling purposes is returned, if
229 The most relevant docstring for calling purposes is returned, if
230 available. The priority is: call docstring for callable instances, then
230 available. The priority is: call docstring for callable instances, then
231 constructor docstring for classes, then main object's docstring otherwise
231 constructor docstring for classes, then main object's docstring otherwise
232 (regular functions).
232 (regular functions).
233 """
233 """
234 # Get call definition
234 # Get call definition
235 argspec = oinfo.get('argspec')
235 argspec = oinfo.get('argspec')
236 if argspec is None:
236 if argspec is None:
237 call_line = None
237 call_line = None
238 else:
238 else:
239 # Callable objects will have 'self' as their first argument, prune
239 # Callable objects will have 'self' as their first argument, prune
240 # it out if it's there for clarity (since users do *not* pass an
240 # it out if it's there for clarity (since users do *not* pass an
241 # extra first argument explicitly).
241 # extra first argument explicitly).
242 try:
242 try:
243 has_self = argspec['args'][0] == 'self'
243 has_self = argspec['args'][0] == 'self'
244 except (KeyError, IndexError):
244 except (KeyError, IndexError):
245 pass
245 pass
246 else:
246 else:
247 if has_self:
247 if has_self:
248 argspec['args'] = argspec['args'][1:]
248 argspec['args'] = argspec['args'][1:]
249
249
250 call_line = oinfo['name']+format_argspec(argspec)
250 call_line = oinfo['name']+format_argspec(argspec)
251
251
252 # Now get docstring.
252 # Now get docstring.
253 # The priority is: call docstring, constructor docstring, main one.
253 # The priority is: call docstring, constructor docstring, main one.
254 doc = oinfo.get('call_docstring')
254 doc = oinfo.get('call_docstring')
255 if doc is None:
255 if doc is None:
256 doc = oinfo.get('init_docstring')
256 doc = oinfo.get('init_docstring')
257 if doc is None:
257 if doc is None:
258 doc = oinfo.get('docstring','')
258 doc = oinfo.get('docstring','')
259
259
260 return call_line, doc
260 return call_line, doc
261
261
262
262
263 def find_file(obj):
263 def find_file(obj):
264 """Find the absolute path to the file where an object was defined.
264 """Find the absolute path to the file where an object was defined.
265
265
266 This is essentially a robust wrapper around `inspect.getabsfile`.
266 This is essentially a robust wrapper around `inspect.getabsfile`.
267
267
268 Returns None if no file can be found.
268 Returns None if no file can be found.
269
269
270 Parameters
270 Parameters
271 ----------
271 ----------
272 obj : any Python object
272 obj : any Python object
273
273
274 Returns
274 Returns
275 -------
275 -------
276 fname : str
276 fname : str
277 The absolute path to the file where the object was defined.
277 The absolute path to the file where the object was defined.
278 """
278 """
279 # get source if obj was decorated with @decorator
279 # get source if obj was decorated with @decorator
280 if safe_hasattr(obj, '__wrapped__'):
280 if safe_hasattr(obj, '__wrapped__'):
281 obj = obj.__wrapped__
281 obj = obj.__wrapped__
282
282
283 fname = None
283 fname = None
284 try:
284 try:
285 fname = inspect.getabsfile(obj)
285 fname = inspect.getabsfile(obj)
286 except TypeError:
286 except TypeError:
287 # For an instance, the file that matters is where its class was
287 # For an instance, the file that matters is where its class was
288 # declared.
288 # declared.
289 if hasattr(obj, '__class__'):
289 if hasattr(obj, '__class__'):
290 try:
290 try:
291 fname = inspect.getabsfile(obj.__class__)
291 fname = inspect.getabsfile(obj.__class__)
292 except TypeError:
292 except TypeError:
293 # Can happen for builtins
293 # Can happen for builtins
294 pass
294 pass
295 except:
295 except:
296 pass
296 pass
297 return cast_unicode(fname)
297 return cast_unicode(fname)
298
298
299
299
300 def find_source_lines(obj):
300 def find_source_lines(obj):
301 """Find the line number in a file where an object was defined.
301 """Find the line number in a file where an object was defined.
302
302
303 This is essentially a robust wrapper around `inspect.getsourcelines`.
303 This is essentially a robust wrapper around `inspect.getsourcelines`.
304
304
305 Returns None if no file can be found.
305 Returns None if no file can be found.
306
306
307 Parameters
307 Parameters
308 ----------
308 ----------
309 obj : any Python object
309 obj : any Python object
310
310
311 Returns
311 Returns
312 -------
312 -------
313 lineno : int
313 lineno : int
314 The line number where the object definition starts.
314 The line number where the object definition starts.
315 """
315 """
316 # get source if obj was decorated with @decorator
316 # get source if obj was decorated with @decorator
317 if safe_hasattr(obj, '__wrapped__'):
317 if safe_hasattr(obj, '__wrapped__'):
318 obj = obj.__wrapped__
318 obj = obj.__wrapped__
319
319
320 try:
320 try:
321 try:
321 try:
322 lineno = inspect.getsourcelines(obj)[1]
322 lineno = inspect.getsourcelines(obj)[1]
323 except TypeError:
323 except TypeError:
324 # For instances, try the class object like getsource() does
324 # For instances, try the class object like getsource() does
325 if hasattr(obj, '__class__'):
325 if hasattr(obj, '__class__'):
326 lineno = inspect.getsourcelines(obj.__class__)[1]
326 lineno = inspect.getsourcelines(obj.__class__)[1]
327 else:
327 else:
328 lineno = None
328 lineno = None
329 except:
329 except:
330 return None
330 return None
331
331
332 return lineno
332 return lineno
333
333
334
334
335 class Inspector:
335 class Inspector:
336 def __init__(self, color_table=InspectColors,
336 def __init__(self, color_table=InspectColors,
337 code_color_table=PyColorize.ANSICodeColors,
337 code_color_table=PyColorize.ANSICodeColors,
338 scheme='NoColor',
338 scheme='NoColor',
339 str_detail_level=0):
339 str_detail_level=0):
340 self.color_table = color_table
340 self.color_table = color_table
341 self.parser = PyColorize.Parser(code_color_table,out='str')
341 self.parser = PyColorize.Parser(code_color_table,out='str')
342 self.format = self.parser.format
342 self.format = self.parser.format
343 self.str_detail_level = str_detail_level
343 self.str_detail_level = str_detail_level
344 self.set_active_scheme(scheme)
344 self.set_active_scheme(scheme)
345
345
346 def _getdef(self,obj,oname=''):
346 def _getdef(self,obj,oname=''):
347 """Return the call signature for any callable object.
347 """Return the call signature for any callable object.
348
348
349 If any exception is generated, None is returned instead and the
349 If any exception is generated, None is returned instead and the
350 exception is suppressed."""
350 exception is suppressed."""
351
351
352 try:
352 try:
353 hdef = oname + inspect.formatargspec(*getargspec(obj))
353 hdef = oname + inspect.formatargspec(*getargspec(obj))
354 return cast_unicode(hdef)
354 return cast_unicode(hdef)
355 except:
355 except:
356 return None
356 return None
357
357
358 def __head(self,h):
358 def __head(self,h):
359 """Return a header string with proper colors."""
359 """Return a header string with proper colors."""
360 return '%s%s%s' % (self.color_table.active_colors.header,h,
360 return '%s%s%s' % (self.color_table.active_colors.header,h,
361 self.color_table.active_colors.normal)
361 self.color_table.active_colors.normal)
362
362
363 def set_active_scheme(self, scheme):
363 def set_active_scheme(self, scheme):
364 self.color_table.set_active_scheme(scheme)
364 self.color_table.set_active_scheme(scheme)
365 self.parser.color_table.set_active_scheme(scheme)
365 self.parser.color_table.set_active_scheme(scheme)
366
366
367 def noinfo(self, msg, oname):
367 def noinfo(self, msg, oname):
368 """Generic message when no information is found."""
368 """Generic message when no information is found."""
369 print('No %s found' % msg, end=' ')
369 print('No %s found' % msg, end=' ')
370 if oname:
370 if oname:
371 print('for %s' % oname)
371 print('for %s' % oname)
372 else:
372 else:
373 print()
373 print()
374
374
375 def pdef(self, obj, oname=''):
375 def pdef(self, obj, oname=''):
376 """Print the call signature for any callable object.
376 """Print the call signature for any callable object.
377
377
378 If the object is a class, print the constructor information."""
378 If the object is a class, print the constructor information."""
379
379
380 if not callable(obj):
380 if not callable(obj):
381 print('Object is not callable.')
381 print('Object is not callable.')
382 return
382 return
383
383
384 header = ''
384 header = ''
385
385
386 if inspect.isclass(obj):
386 if inspect.isclass(obj):
387 header = self.__head('Class constructor information:\n')
387 header = self.__head('Class constructor information:\n')
388 obj = obj.__init__
388 obj = obj.__init__
389 elif (not py3compat.PY3) and type(obj) is types.InstanceType:
389 elif (not py3compat.PY3) and type(obj) is types.InstanceType:
390 obj = obj.__call__
390 obj = obj.__call__
391
391
392 output = self._getdef(obj,oname)
392 output = self._getdef(obj,oname)
393 if output is None:
393 if output is None:
394 self.noinfo('definition header',oname)
394 self.noinfo('definition header',oname)
395 else:
395 else:
396 print(header,self.format(output), end=' ', file=io.stdout)
396 print(header,self.format(output), end=' ', file=io.stdout)
397
397
398 # In Python 3, all classes are new-style, so they all have __init__.
398 # In Python 3, all classes are new-style, so they all have __init__.
399 @skip_doctest_py3
399 @skip_doctest_py3
400 def pdoc(self,obj,oname='',formatter = None):
400 def pdoc(self,obj,oname='',formatter = None):
401 """Print the docstring for any object.
401 """Print the docstring for any object.
402
402
403 Optional:
403 Optional:
404 -formatter: a function to run the docstring through for specially
404 -formatter: a function to run the docstring through for specially
405 formatted docstrings.
405 formatted docstrings.
406
406
407 Examples
407 Examples
408 --------
408 --------
409
409
410 In [1]: class NoInit:
410 In [1]: class NoInit:
411 ...: pass
411 ...: pass
412
412
413 In [2]: class NoDoc:
413 In [2]: class NoDoc:
414 ...: def __init__(self):
414 ...: def __init__(self):
415 ...: pass
415 ...: pass
416
416
417 In [3]: %pdoc NoDoc
417 In [3]: %pdoc NoDoc
418 No documentation found for NoDoc
418 No documentation found for NoDoc
419
419
420 In [4]: %pdoc NoInit
420 In [4]: %pdoc NoInit
421 No documentation found for NoInit
421 No documentation found for NoInit
422
422
423 In [5]: obj = NoInit()
423 In [5]: obj = NoInit()
424
424
425 In [6]: %pdoc obj
425 In [6]: %pdoc obj
426 No documentation found for obj
426 No documentation found for obj
427
427
428 In [5]: obj2 = NoDoc()
428 In [5]: obj2 = NoDoc()
429
429
430 In [6]: %pdoc obj2
430 In [6]: %pdoc obj2
431 No documentation found for obj2
431 No documentation found for obj2
432 """
432 """
433
433
434 head = self.__head # For convenience
434 head = self.__head # For convenience
435 lines = []
435 lines = []
436 ds = getdoc(obj)
436 ds = getdoc(obj)
437 if formatter:
437 if formatter:
438 ds = formatter(ds)
438 ds = formatter(ds)
439 if ds:
439 if ds:
440 lines.append(head("Class Docstring:"))
440 lines.append(head("Class Docstring:"))
441 lines.append(indent(ds))
441 lines.append(indent(ds))
442 if inspect.isclass(obj) and hasattr(obj, '__init__'):
442 if inspect.isclass(obj) and hasattr(obj, '__init__'):
443 init_ds = getdoc(obj.__init__)
443 init_ds = getdoc(obj.__init__)
444 if init_ds is not None:
444 if init_ds is not None:
445 lines.append(head("Constructor Docstring:"))
445 lines.append(head("Constructor Docstring:"))
446 lines.append(indent(init_ds))
446 lines.append(indent(init_ds))
447 elif hasattr(obj,'__call__'):
447 elif hasattr(obj,'__call__'):
448 call_ds = getdoc(obj.__call__)
448 call_ds = getdoc(obj.__call__)
449 if call_ds:
449 if call_ds:
450 lines.append(head("Calling Docstring:"))
450 lines.append(head("Calling Docstring:"))
451 lines.append(indent(call_ds))
451 lines.append(indent(call_ds))
452
452
453 if not lines:
453 if not lines:
454 self.noinfo('documentation',oname)
454 self.noinfo('documentation',oname)
455 else:
455 else:
456 page.page('\n'.join(lines))
456 page.page('\n'.join(lines))
457
457
458 def psource(self,obj,oname=''):
458 def psource(self,obj,oname=''):
459 """Print the source code for an object."""
459 """Print the source code for an object."""
460
460
461 # Flush the source cache because inspect can return out-of-date source
461 # Flush the source cache because inspect can return out-of-date source
462 linecache.checkcache()
462 linecache.checkcache()
463 try:
463 try:
464 src = getsource(obj)
464 src = getsource(obj)
465 except:
465 except:
466 self.noinfo('source',oname)
466 self.noinfo('source',oname)
467 else:
467 else:
468 page.page(self.format(src))
468 page.page(self.format(src))
469
469
470 def pfile(self, obj, oname=''):
470 def pfile(self, obj, oname=''):
471 """Show the whole file where an object was defined."""
471 """Show the whole file where an object was defined."""
472
472
473 lineno = find_source_lines(obj)
473 lineno = find_source_lines(obj)
474 if lineno is None:
474 if lineno is None:
475 self.noinfo('file', oname)
475 self.noinfo('file', oname)
476 return
476 return
477
477
478 ofile = find_file(obj)
478 ofile = find_file(obj)
479 # run contents of file through pager starting at line where the object
479 # run contents of file through pager starting at line where the object
480 # is defined, as long as the file isn't binary and is actually on the
480 # is defined, as long as the file isn't binary and is actually on the
481 # filesystem.
481 # filesystem.
482 if ofile.endswith(('.so', '.dll', '.pyd')):
482 if ofile.endswith(('.so', '.dll', '.pyd')):
483 print('File %r is binary, not printing.' % ofile)
483 print('File %r is binary, not printing.' % ofile)
484 elif not os.path.isfile(ofile):
484 elif not os.path.isfile(ofile):
485 print('File %r does not exist, not printing.' % ofile)
485 print('File %r does not exist, not printing.' % ofile)
486 else:
486 else:
487 # Print only text files, not extension binaries. Note that
487 # Print only text files, not extension binaries. Note that
488 # getsourcelines returns lineno with 1-offset and page() uses
488 # getsourcelines returns lineno with 1-offset and page() uses
489 # 0-offset, so we must adjust.
489 # 0-offset, so we must adjust.
490 page.page(self.format(openpy.read_py_file(ofile, skip_encoding_cookie=False)), lineno - 1)
490 page.page(self.format(openpy.read_py_file(ofile, skip_encoding_cookie=False)), lineno - 1)
491
491
492 def _format_fields(self, fields, title_width=12):
492 def _format_fields(self, fields, title_width=12):
493 """Formats a list of fields for display.
493 """Formats a list of fields for display.
494
494
495 Parameters
495 Parameters
496 ----------
496 ----------
497 fields : list
497 fields : list
498 A list of 2-tuples: (field_title, field_content)
498 A list of 2-tuples: (field_title, field_content)
499 title_width : int
499 title_width : int
500 How many characters to pad titles to. Default 12.
500 How many characters to pad titles to. Default 12.
501 """
501 """
502 out = []
502 out = []
503 header = self.__head
503 header = self.__head
504 for title, content in fields:
504 for title, content in fields:
505 if len(content.splitlines()) > 1:
505 if len(content.splitlines()) > 1:
506 title = header(title + ":") + "\n"
506 title = header(title + ":") + "\n"
507 else:
507 else:
508 title = header((title+":").ljust(title_width))
508 title = header((title+":").ljust(title_width))
509 out.append(cast_unicode(title) + cast_unicode(content))
509 out.append(cast_unicode(title) + cast_unicode(content))
510 return "\n".join(out)
510 return "\n".join(out)
511
511
512 # The fields to be displayed by pinfo: (fancy_name, key_in_info_dict)
512 # The fields to be displayed by pinfo: (fancy_name, key_in_info_dict)
513 pinfo_fields1 = [("Type", "type_name"),
513 pinfo_fields1 = [("Type", "type_name"),
514 ]
514 ]
515
515
516 pinfo_fields2 = [("String Form", "string_form"),
516 pinfo_fields2 = [("String Form", "string_form"),
517 ]
517 ]
518
518
519 pinfo_fields3 = [("Length", "length"),
519 pinfo_fields3 = [("Length", "length"),
520 ("File", "file"),
520 ("File", "file"),
521 ("Definition", "definition"),
521 ("Definition", "definition"),
522 ]
522 ]
523
523
524 pinfo_fields_obj = [("Class Docstring", "class_docstring"),
524 pinfo_fields_obj = [("Class Docstring", "class_docstring"),
525 ("Constructor Docstring","init_docstring"),
525 ("Constructor Docstring","init_docstring"),
526 ("Call def", "call_def"),
526 ("Call def", "call_def"),
527 ("Call docstring", "call_docstring")]
527 ("Call docstring", "call_docstring")]
528
528
529 def pinfo(self,obj,oname='',formatter=None,info=None,detail_level=0):
529 def pinfo(self,obj,oname='',formatter=None,info=None,detail_level=0):
530 """Show detailed information about an object.
530 """Show detailed information about an object.
531
531
532 Optional arguments:
532 Optional arguments:
533
533
534 - oname: name of the variable pointing to the object.
534 - oname: name of the variable pointing to the object.
535
535
536 - formatter: special formatter for docstrings (see pdoc)
536 - formatter: special formatter for docstrings (see pdoc)
537
537
538 - info: a structure with some information fields which may have been
538 - info: a structure with some information fields which may have been
539 precomputed already.
539 precomputed already.
540
540
541 - detail_level: if set to 1, more information is given.
541 - detail_level: if set to 1, more information is given.
542 """
542 """
543 info = self.info(obj, oname=oname, formatter=formatter,
543 info = self.info(obj, oname=oname, formatter=formatter,
544 info=info, detail_level=detail_level)
544 info=info, detail_level=detail_level)
545 displayfields = []
545 displayfields = []
546 def add_fields(fields):
546 def add_fields(fields):
547 for title, key in fields:
547 for title, key in fields:
548 field = info[key]
548 field = info[key]
549 if field is not None:
549 if field is not None:
550 displayfields.append((title, field.rstrip()))
550 displayfields.append((title, field.rstrip()))
551
551
552 add_fields(self.pinfo_fields1)
552 add_fields(self.pinfo_fields1)
553
553
554 # Base class for old-style instances
554 # Base class for old-style instances
555 if (not py3compat.PY3) and isinstance(obj, types.InstanceType) and info['base_class']:
555 if (not py3compat.PY3) and isinstance(obj, types.InstanceType) and info['base_class']:
556 displayfields.append(("Base Class", info['base_class'].rstrip()))
556 displayfields.append(("Base Class", info['base_class'].rstrip()))
557
557
558 add_fields(self.pinfo_fields2)
558 add_fields(self.pinfo_fields2)
559
559
560 # Namespace
560 # Namespace
561 if info['namespace'] != 'Interactive':
561 if info['namespace'] != 'Interactive':
562 displayfields.append(("Namespace", info['namespace'].rstrip()))
562 displayfields.append(("Namespace", info['namespace'].rstrip()))
563
563
564 add_fields(self.pinfo_fields3)
564 add_fields(self.pinfo_fields3)
565
565
566 # Source or docstring, depending on detail level and whether
566 # Source or docstring, depending on detail level and whether
567 # source found.
567 # source found.
568 if detail_level > 0 and info['source'] is not None:
568 if detail_level > 0 and info['source'] is not None:
569 displayfields.append(("Source",
569 displayfields.append(("Source",
570 self.format(cast_unicode(info['source']))))
570 self.format(cast_unicode(info['source']))))
571 elif info['docstring'] is not None:
571 elif info['docstring'] is not None:
572 displayfields.append(("Docstring", info["docstring"]))
572 displayfields.append(("Docstring", info["docstring"]))
573
573
574 # Constructor info for classes
574 # Constructor info for classes
575 if info['isclass']:
575 if info['isclass']:
576 if info['init_definition'] or info['init_docstring']:
576 if info['init_definition'] or info['init_docstring']:
577 displayfields.append(("Constructor information", ""))
577 displayfields.append(("Constructor information", ""))
578 if info['init_definition'] is not None:
578 if info['init_definition'] is not None:
579 displayfields.append((" Definition",
579 displayfields.append((" Definition",
580 info['init_definition'].rstrip()))
580 info['init_definition'].rstrip()))
581 if info['init_docstring'] is not None:
581 if info['init_docstring'] is not None:
582 displayfields.append((" Docstring",
582 displayfields.append((" Docstring",
583 indent(info['init_docstring'])))
583 indent(info['init_docstring'])))
584
584
585 # Info for objects:
585 # Info for objects:
586 else:
586 else:
587 add_fields(self.pinfo_fields_obj)
587 add_fields(self.pinfo_fields_obj)
588
588
589 # Finally send to printer/pager:
589 # Finally send to printer/pager:
590 if displayfields:
590 if displayfields:
591 page.page(self._format_fields(displayfields))
591 page.page(self._format_fields(displayfields))
592
592
593 def info(self, obj, oname='', formatter=None, info=None, detail_level=0):
593 def info(self, obj, oname='', formatter=None, info=None, detail_level=0):
594 """Compute a dict with detailed information about an object.
594 """Compute a dict with detailed information about an object.
595
595
596 Optional arguments:
596 Optional arguments:
597
597
598 - oname: name of the variable pointing to the object.
598 - oname: name of the variable pointing to the object.
599
599
600 - formatter: special formatter for docstrings (see pdoc)
600 - formatter: special formatter for docstrings (see pdoc)
601
601
602 - info: a structure with some information fields which may have been
602 - info: a structure with some information fields which may have been
603 precomputed already.
603 precomputed already.
604
604
605 - detail_level: if set to 1, more information is given.
605 - detail_level: if set to 1, more information is given.
606 """
606 """
607
607
608 obj_type = type(obj)
608 obj_type = type(obj)
609
609
610 header = self.__head
610 header = self.__head
611 if info is None:
611 if info is None:
612 ismagic = 0
612 ismagic = 0
613 isalias = 0
613 isalias = 0
614 ospace = ''
614 ospace = ''
615 else:
615 else:
616 ismagic = info.ismagic
616 ismagic = info.ismagic
617 isalias = info.isalias
617 isalias = info.isalias
618 ospace = info.namespace
618 ospace = info.namespace
619
619
620 # Get docstring, special-casing aliases:
620 # Get docstring, special-casing aliases:
621 if isalias:
621 if isalias:
622 if not callable(obj):
622 if not callable(obj):
623 try:
623 try:
624 ds = "Alias to the system command:\n %s" % obj[1]
624 ds = "Alias to the system command:\n %s" % obj[1]
625 except:
625 except:
626 ds = "Alias: " + str(obj)
626 ds = "Alias: " + str(obj)
627 else:
627 else:
628 ds = "Alias to " + str(obj)
628 ds = "Alias to " + str(obj)
629 if obj.__doc__:
629 if obj.__doc__:
630 ds += "\nDocstring:\n" + obj.__doc__
630 ds += "\nDocstring:\n" + obj.__doc__
631 else:
631 else:
632 ds = getdoc(obj)
632 ds = getdoc(obj)
633 if ds is None:
633 if ds is None:
634 ds = '<no docstring>'
634 ds = '<no docstring>'
635 if formatter is not None:
635 if formatter is not None:
636 ds = formatter(ds)
636 ds = formatter(ds)
637
637
638 # store output in a dict, we initialize it here and fill it as we go
638 # store output in a dict, we initialize it here and fill it as we go
639 out = dict(name=oname, found=True, isalias=isalias, ismagic=ismagic)
639 out = dict(name=oname, found=True, isalias=isalias, ismagic=ismagic)
640
640
641 string_max = 200 # max size of strings to show (snipped if longer)
641 string_max = 200 # max size of strings to show (snipped if longer)
642 shalf = int((string_max -5)/2)
642 shalf = int((string_max -5)/2)
643
643
644 if ismagic:
644 if ismagic:
645 obj_type_name = 'Magic function'
645 obj_type_name = 'Magic function'
646 elif isalias:
646 elif isalias:
647 obj_type_name = 'System alias'
647 obj_type_name = 'System alias'
648 else:
648 else:
649 obj_type_name = obj_type.__name__
649 obj_type_name = obj_type.__name__
650 out['type_name'] = obj_type_name
650 out['type_name'] = obj_type_name
651
651
652 try:
652 try:
653 bclass = obj.__class__
653 bclass = obj.__class__
654 out['base_class'] = str(bclass)
654 out['base_class'] = str(bclass)
655 except: pass
655 except: pass
656
656
657 # String form, but snip if too long in ? form (full in ??)
657 # String form, but snip if too long in ? form (full in ??)
658 if detail_level >= self.str_detail_level:
658 if detail_level >= self.str_detail_level:
659 try:
659 try:
660 ostr = str(obj)
660 ostr = str(obj)
661 str_head = 'string_form'
661 str_head = 'string_form'
662 if not detail_level and len(ostr)>string_max:
662 if not detail_level and len(ostr)>string_max:
663 ostr = ostr[:shalf] + ' <...> ' + ostr[-shalf:]
663 ostr = ostr[:shalf] + ' <...> ' + ostr[-shalf:]
664 ostr = ("\n" + " " * len(str_head.expandtabs())).\
664 ostr = ("\n" + " " * len(str_head.expandtabs())).\
665 join(q.strip() for q in ostr.split("\n"))
665 join(q.strip() for q in ostr.split("\n"))
666 out[str_head] = ostr
666 out[str_head] = ostr
667 except:
667 except:
668 pass
668 pass
669
669
670 if ospace:
670 if ospace:
671 out['namespace'] = ospace
671 out['namespace'] = ospace
672
672
673 # Length (for strings and lists)
673 # Length (for strings and lists)
674 try:
674 try:
675 out['length'] = str(len(obj))
675 out['length'] = str(len(obj))
676 except: pass
676 except: pass
677
677
678 # Filename where object was defined
678 # Filename where object was defined
679 binary_file = False
679 binary_file = False
680 fname = find_file(obj)
680 fname = find_file(obj)
681 if fname is None:
681 if fname is None:
682 # if anything goes wrong, we don't want to show source, so it's as
682 # if anything goes wrong, we don't want to show source, so it's as
683 # if the file was binary
683 # if the file was binary
684 binary_file = True
684 binary_file = True
685 else:
685 else:
686 if fname.endswith(('.so', '.dll', '.pyd')):
686 if fname.endswith(('.so', '.dll', '.pyd')):
687 binary_file = True
687 binary_file = True
688 elif fname.endswith('<string>'):
688 elif fname.endswith('<string>'):
689 fname = 'Dynamically generated function. No source code available.'
689 fname = 'Dynamically generated function. No source code available.'
690 out['file'] = fname
690 out['file'] = fname
691
691
692 # reconstruct the function definition and print it:
692 # reconstruct the function definition and print it:
693 defln = self._getdef(obj, oname)
693 defln = self._getdef(obj, oname)
694 if defln:
694 if defln:
695 out['definition'] = self.format(defln)
695 out['definition'] = self.format(defln)
696
696
697 # Docstrings only in detail 0 mode, since source contains them (we
697 # Docstrings only in detail 0 mode, since source contains them (we
698 # avoid repetitions). If source fails, we add them back, see below.
698 # avoid repetitions). If source fails, we add them back, see below.
699 if ds and detail_level == 0:
699 if ds and detail_level == 0:
700 out['docstring'] = ds
700 out['docstring'] = ds
701
701
702 # Original source code for any callable
702 # Original source code for any callable
703 if detail_level:
703 if detail_level:
704 # Flush the source cache because inspect can return out-of-date
704 # Flush the source cache because inspect can return out-of-date
705 # source
705 # source
706 linecache.checkcache()
706 linecache.checkcache()
707 source = None
707 source = None
708 try:
708 try:
709 try:
709 try:
710 source = getsource(obj, binary_file)
710 source = getsource(obj, binary_file)
711 except TypeError:
711 except TypeError:
712 if hasattr(obj, '__class__'):
712 if hasattr(obj, '__class__'):
713 source = getsource(obj.__class__, binary_file)
713 source = getsource(obj.__class__, binary_file)
714 if source is not None:
714 if source is not None:
715 out['source'] = source.rstrip()
715 out['source'] = source.rstrip()
716 except Exception:
716 except Exception:
717 pass
717 pass
718
718
719 if ds and source is None:
719 if ds and source is None:
720 out['docstring'] = ds
720 out['docstring'] = ds
721
721
722
722
723 # Constructor docstring for classes
723 # Constructor docstring for classes
724 if inspect.isclass(obj):
724 if inspect.isclass(obj):
725 out['isclass'] = True
725 out['isclass'] = True
726 # reconstruct the function definition and print it:
726 # reconstruct the function definition and print it:
727 try:
727 try:
728 obj_init = obj.__init__
728 obj_init = obj.__init__
729 except AttributeError:
729 except AttributeError:
730 init_def = init_ds = None
730 init_def = init_ds = None
731 else:
731 else:
732 init_def = self._getdef(obj_init,oname)
732 init_def = self._getdef(obj_init,oname)
733 init_ds = getdoc(obj_init)
733 init_ds = getdoc(obj_init)
734 # Skip Python's auto-generated docstrings
734 # Skip Python's auto-generated docstrings
735 if init_ds and \
735 if init_ds and \
736 init_ds.startswith('x.__init__(...) initializes'):
736 init_ds.startswith('x.__init__(...) initializes'):
737 init_ds = None
737 init_ds = None
738
738
739 if init_def or init_ds:
739 if init_def or init_ds:
740 if init_def:
740 if init_def:
741 out['init_definition'] = self.format(init_def)
741 out['init_definition'] = self.format(init_def)
742 if init_ds:
742 if init_ds:
743 out['init_docstring'] = init_ds
743 out['init_docstring'] = init_ds
744
744
745 # and class docstring for instances:
745 # and class docstring for instances:
746 else:
746 else:
747 # First, check whether the instance docstring is identical to the
747 # First, check whether the instance docstring is identical to the
748 # class one, and print it separately if they don't coincide. In
748 # class one, and print it separately if they don't coincide. In
749 # most cases they will, but it's nice to print all the info for
749 # most cases they will, but it's nice to print all the info for
750 # objects which use instance-customized docstrings.
750 # objects which use instance-customized docstrings.
751 if ds:
751 if ds:
752 try:
752 try:
753 cls = getattr(obj,'__class__')
753 cls = getattr(obj,'__class__')
754 except:
754 except:
755 class_ds = None
755 class_ds = None
756 else:
756 else:
757 class_ds = getdoc(cls)
757 class_ds = getdoc(cls)
758 # Skip Python's auto-generated docstrings
758 # Skip Python's auto-generated docstrings
759 if class_ds and \
759 if class_ds and \
760 (class_ds.startswith('function(code, globals[,') or \
760 (class_ds.startswith('function(code, globals[,') or \
761 class_ds.startswith('instancemethod(function, instance,') or \
761 class_ds.startswith('instancemethod(function, instance,') or \
762 class_ds.startswith('module(name[,') ):
762 class_ds.startswith('module(name[,') ):
763 class_ds = None
763 class_ds = None
764 if class_ds and ds != class_ds:
764 if class_ds and ds != class_ds:
765 out['class_docstring'] = class_ds
765 out['class_docstring'] = class_ds
766
766
767 # Next, try to show constructor docstrings
767 # Next, try to show constructor docstrings
768 try:
768 try:
769 init_ds = getdoc(obj.__init__)
769 init_ds = getdoc(obj.__init__)
770 # Skip Python's auto-generated docstrings
770 # Skip Python's auto-generated docstrings
771 if init_ds and \
771 if init_ds and \
772 init_ds.startswith('x.__init__(...) initializes'):
772 init_ds.startswith('x.__init__(...) initializes'):
773 init_ds = None
773 init_ds = None
774 except AttributeError:
774 except AttributeError:
775 init_ds = None
775 init_ds = None
776 if init_ds:
776 if init_ds:
777 out['init_docstring'] = init_ds
777 out['init_docstring'] = init_ds
778
778
779 # Call form docstring for callable instances
779 # Call form docstring for callable instances
780 if safe_hasattr(obj, '__call__'):
780 if safe_hasattr(obj, '__call__'):
781 call_def = self._getdef(obj.__call__, oname)
781 call_def = self._getdef(obj.__call__, oname)
782 if call_def is not None:
782 if call_def is not None:
783 out['call_def'] = self.format(call_def)
783 out['call_def'] = self.format(call_def)
784 call_ds = getdoc(obj.__call__)
784 call_ds = getdoc(obj.__call__)
785 # Skip Python's auto-generated docstrings
785 # Skip Python's auto-generated docstrings
786 if call_ds and call_ds.startswith('x.__call__(...) <==> x(...)'):
786 if call_ds and call_ds.startswith('x.__call__(...) <==> x(...)'):
787 call_ds = None
787 call_ds = None
788 if call_ds:
788 if call_ds:
789 out['call_docstring'] = call_ds
789 out['call_docstring'] = call_ds
790
790
791 # Compute the object's argspec as a callable. The key is to decide
791 # Compute the object's argspec as a callable. The key is to decide
792 # whether to pull it from the object itself, from its __init__ or
792 # whether to pull it from the object itself, from its __init__ or
793 # from its __call__ method.
793 # from its __call__ method.
794
794
795 if inspect.isclass(obj):
795 if inspect.isclass(obj):
796 # Old-style classes need not have an __init__
796 # Old-style classes need not have an __init__
797 callable_obj = getattr(obj, "__init__", None)
797 callable_obj = getattr(obj, "__init__", None)
798 elif callable(obj):
798 elif callable(obj):
799 callable_obj = obj
799 callable_obj = obj
800 else:
800 else:
801 callable_obj = None
801 callable_obj = None
802
802
803 if callable_obj:
803 if callable_obj:
804 try:
804 try:
805 args, varargs, varkw, defaults = getargspec(callable_obj)
805 args, varargs, varkw, defaults = getargspec(callable_obj)
806 except (TypeError, AttributeError):
806 except (TypeError, AttributeError):
807 # For extensions/builtins we can't retrieve the argspec
807 # For extensions/builtins we can't retrieve the argspec
808 pass
808 pass
809 else:
809 else:
810 out['argspec'] = dict(args=args, varargs=varargs,
810 out['argspec'] = dict(args=args, varargs=varargs,
811 varkw=varkw, defaults=defaults)
811 varkw=varkw, defaults=defaults)
812
812
813 return object_info(**out)
813 return object_info(**out)
814
814
815
815
816 def psearch(self,pattern,ns_table,ns_search=[],
816 def psearch(self,pattern,ns_table,ns_search=[],
817 ignore_case=False,show_all=False):
817 ignore_case=False,show_all=False):
818 """Search namespaces with wildcards for objects.
818 """Search namespaces with wildcards for objects.
819
819
820 Arguments:
820 Arguments:
821
821
822 - pattern: string containing shell-like wildcards to use in namespace
822 - pattern: string containing shell-like wildcards to use in namespace
823 searches and optionally a type specification to narrow the search to
823 searches and optionally a type specification to narrow the search to
824 objects of that type.
824 objects of that type.
825
825
826 - ns_table: dict of name->namespaces for search.
826 - ns_table: dict of name->namespaces for search.
827
827
828 Optional arguments:
828 Optional arguments:
829
829
830 - ns_search: list of namespace names to include in search.
830 - ns_search: list of namespace names to include in search.
831
831
832 - ignore_case(False): make the search case-insensitive.
832 - ignore_case(False): make the search case-insensitive.
833
833
834 - show_all(False): show all names, including those starting with
834 - show_all(False): show all names, including those starting with
835 underscores.
835 underscores.
836 """
836 """
837 #print 'ps pattern:<%r>' % pattern # dbg
837 #print 'ps pattern:<%r>' % pattern # dbg
838
838
839 # defaults
839 # defaults
840 type_pattern = 'all'
840 type_pattern = 'all'
841 filter = ''
841 filter = ''
842
842
843 cmds = pattern.split()
843 cmds = pattern.split()
844 len_cmds = len(cmds)
844 len_cmds = len(cmds)
845 if len_cmds == 1:
845 if len_cmds == 1:
846 # Only filter pattern given
846 # Only filter pattern given
847 filter = cmds[0]
847 filter = cmds[0]
848 elif len_cmds == 2:
848 elif len_cmds == 2:
849 # Both filter and type specified
849 # Both filter and type specified
850 filter,type_pattern = cmds
850 filter,type_pattern = cmds
851 else:
851 else:
852 raise ValueError('invalid argument string for psearch: <%s>' %
852 raise ValueError('invalid argument string for psearch: <%s>' %
853 pattern)
853 pattern)
854
854
855 # filter search namespaces
855 # filter search namespaces
856 for name in ns_search:
856 for name in ns_search:
857 if name not in ns_table:
857 if name not in ns_table:
858 raise ValueError('invalid namespace <%s>. Valid names: %s' %
858 raise ValueError('invalid namespace <%s>. Valid names: %s' %
859 (name,ns_table.keys()))
859 (name,ns_table.keys()))
860
860
861 #print 'type_pattern:',type_pattern # dbg
861 #print 'type_pattern:',type_pattern # dbg
862 search_result, namespaces_seen = set(), set()
862 search_result, namespaces_seen = set(), set()
863 for ns_name in ns_search:
863 for ns_name in ns_search:
864 ns = ns_table[ns_name]
864 ns = ns_table[ns_name]
865 # Normally, locals and globals are the same, so we just check one.
865 # Normally, locals and globals are the same, so we just check one.
866 if id(ns) in namespaces_seen:
866 if id(ns) in namespaces_seen:
867 continue
867 continue
868 namespaces_seen.add(id(ns))
868 namespaces_seen.add(id(ns))
869 tmp_res = list_namespace(ns, type_pattern, filter,
869 tmp_res = list_namespace(ns, type_pattern, filter,
870 ignore_case=ignore_case, show_all=show_all)
870 ignore_case=ignore_case, show_all=show_all)
871 search_result.update(tmp_res)
871 search_result.update(tmp_res)
872
872
873 page.page('\n'.join(sorted(search_result)))
873 page.page('\n'.join(sorted(search_result)))
@@ -1,1267 +1,1267 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 ultratb.py -- Spice up your tracebacks!
3 ultratb.py -- Spice up your tracebacks!
4
4
5 * ColorTB
5 * ColorTB
6 I've always found it a bit hard to visually parse tracebacks in Python. The
6 I've always found it a bit hard to visually parse tracebacks in Python. The
7 ColorTB class is a solution to that problem. It colors the different parts of a
7 ColorTB class is a solution to that problem. It colors the different parts of a
8 traceback in a manner similar to what you would expect from a syntax-highlighting
8 traceback in a manner similar to what you would expect from a syntax-highlighting
9 text editor.
9 text editor.
10
10
11 Installation instructions for ColorTB::
11 Installation instructions for ColorTB::
12
12
13 import sys,ultratb
13 import sys,ultratb
14 sys.excepthook = ultratb.ColorTB()
14 sys.excepthook = ultratb.ColorTB()
15
15
16 * VerboseTB
16 * VerboseTB
17 I've also included a port of Ka-Ping Yee's "cgitb.py" that produces all kinds
17 I've also included a port of Ka-Ping Yee's "cgitb.py" that produces all kinds
18 of useful info when a traceback occurs. Ping originally had it spit out HTML
18 of useful info when a traceback occurs. Ping originally had it spit out HTML
19 and intended it for CGI programmers, but why should they have all the fun? I
19 and intended it for CGI programmers, but why should they have all the fun? I
20 altered it to spit out colored text to the terminal. It's a bit overwhelming,
20 altered it to spit out colored text to the terminal. It's a bit overwhelming,
21 but kind of neat, and maybe useful for long-running programs that you believe
21 but kind of neat, and maybe useful for long-running programs that you believe
22 are bug-free. If a crash *does* occur in that type of program you want details.
22 are bug-free. If a crash *does* occur in that type of program you want details.
23 Give it a shot--you'll love it or you'll hate it.
23 Give it a shot--you'll love it or you'll hate it.
24
24
25 .. note::
25 .. note::
26
26
27 The Verbose mode prints the variables currently visible where the exception
27 The Verbose mode prints the variables currently visible where the exception
28 happened (shortening their strings if too long). This can potentially be
28 happened (shortening their strings if too long). This can potentially be
29 very slow, if you happen to have a huge data structure whose string
29 very slow, if you happen to have a huge data structure whose string
30 representation is complex to compute. Your computer may appear to freeze for
30 representation is complex to compute. Your computer may appear to freeze for
31 a while with cpu usage at 100%. If this occurs, you can cancel the traceback
31 a while with cpu usage at 100%. If this occurs, you can cancel the traceback
32 with Ctrl-C (maybe hitting it more than once).
32 with Ctrl-C (maybe hitting it more than once).
33
33
34 If you encounter this kind of situation often, you may want to use the
34 If you encounter this kind of situation often, you may want to use the
35 Verbose_novars mode instead of the regular Verbose, which avoids formatting
35 Verbose_novars mode instead of the regular Verbose, which avoids formatting
36 variables (but otherwise includes the information and context given by
36 variables (but otherwise includes the information and context given by
37 Verbose).
37 Verbose).
38
38
39
39
40 Installation instructions for ColorTB::
40 Installation instructions for ColorTB::
41
41
42 import sys,ultratb
42 import sys,ultratb
43 sys.excepthook = ultratb.VerboseTB()
43 sys.excepthook = ultratb.VerboseTB()
44
44
45 Note: Much of the code in this module was lifted verbatim from the standard
45 Note: Much of the code in this module was lifted verbatim from the standard
46 library module 'traceback.py' and Ka-Ping Yee's 'cgitb.py'.
46 library module 'traceback.py' and Ka-Ping Yee's 'cgitb.py'.
47
47
48 Color schemes
48 Color schemes
49 -------------
49 -------------
50
50
51 The colors are defined in the class TBTools through the use of the
51 The colors are defined in the class TBTools through the use of the
52 ColorSchemeTable class. Currently the following exist:
52 ColorSchemeTable class. Currently the following exist:
53
53
54 - NoColor: allows all of this module to be used in any terminal (the color
54 - NoColor: allows all of this module to be used in any terminal (the color
55 escapes are just dummy blank strings).
55 escapes are just dummy blank strings).
56
56
57 - Linux: is meant to look good in a terminal like the Linux console (black
57 - Linux: is meant to look good in a terminal like the Linux console (black
58 or very dark background).
58 or very dark background).
59
59
60 - LightBG: similar to Linux but swaps dark/light colors to be more readable
60 - LightBG: similar to Linux but swaps dark/light colors to be more readable
61 in light background terminals.
61 in light background terminals.
62
62
63 You can implement other color schemes easily, the syntax is fairly
63 You can implement other color schemes easily, the syntax is fairly
64 self-explanatory. Please send back new schemes you develop to the author for
64 self-explanatory. Please send back new schemes you develop to the author for
65 possible inclusion in future releases.
65 possible inclusion in future releases.
66
66
67 Inheritance diagram:
67 Inheritance diagram:
68
68
69 .. inheritance-diagram:: IPython.core.ultratb
69 .. inheritance-diagram:: IPython.core.ultratb
70 :parts: 3
70 :parts: 3
71 """
71 """
72
72
73 #*****************************************************************************
73 #*****************************************************************************
74 # Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu>
74 # Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu>
75 # Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu>
75 # Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu>
76 #
76 #
77 # Distributed under the terms of the BSD License. The full license is in
77 # Distributed under the terms of the BSD License. The full license is in
78 # the file COPYING, distributed as part of this software.
78 # the file COPYING, distributed as part of this software.
79 #*****************************************************************************
79 #*****************************************************************************
80
80
81 from __future__ import unicode_literals
81 from __future__ import unicode_literals
82 from __future__ import print_function
82 from __future__ import print_function
83
83
84 import inspect
84 import inspect
85 import keyword
85 import keyword
86 import linecache
86 import linecache
87 import os
87 import os
88 import pydoc
88 import pydoc
89 import re
89 import re
90 import sys
90 import sys
91 import time
91 import time
92 import tokenize
92 import tokenize
93 import traceback
93 import traceback
94 import types
94 import types
95
95
96 try: # Python 2
96 try: # Python 2
97 generate_tokens = tokenize.generate_tokens
97 generate_tokens = tokenize.generate_tokens
98 except AttributeError: # Python 3
98 except AttributeError: # Python 3
99 generate_tokens = tokenize.tokenize
99 generate_tokens = tokenize.tokenize
100
100
101 # For purposes of monkeypatching inspect to fix a bug in it.
101 # For purposes of monkeypatching inspect to fix a bug in it.
102 from inspect import getsourcefile, getfile, getmodule,\
102 from inspect import getsourcefile, getfile, getmodule,\
103 ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode
103 ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode
104
104
105 # IPython's own modules
105 # IPython's own modules
106 # Modified pdb which doesn't damage IPython's readline handling
106 # Modified pdb which doesn't damage IPython's readline handling
107 from IPython import get_ipython
107 from IPython import get_ipython
108 from IPython.core import debugger
108 from IPython.core import debugger
109 from IPython.core.display_trap import DisplayTrap
109 from IPython.core.display_trap import DisplayTrap
110 from IPython.core.excolors import exception_colors
110 from IPython.core.excolors import exception_colors
111 from IPython.utils import PyColorize
111 from IPython.utils import PyColorize
112 from IPython.utils import io
112 from IPython.utils import io
113 from IPython.utils import openpy
113 from IPython.utils import openpy
114 from IPython.utils import path as util_path
114 from IPython.utils import path as util_path
115 from IPython.utils import py3compat
115 from IPython.utils import py3compat
116 from IPython.utils import ulinecache
116 from IPython.utils import ulinecache
117 from IPython.utils.data import uniq_stable
117 from IPython.utils.data import uniq_stable
118 from IPython.utils.warn import info, error
118 from IPython.utils.warn import info, error
119
119
120 # Globals
120 # Globals
121 # amount of space to put line numbers before verbose tracebacks
121 # amount of space to put line numbers before verbose tracebacks
122 INDENT_SIZE = 8
122 INDENT_SIZE = 8
123
123
124 # Default color scheme. This is used, for example, by the traceback
124 # Default color scheme. This is used, for example, by the traceback
125 # formatter. When running in an actual IPython instance, the user's rc.colors
125 # formatter. When running in an actual IPython instance, the user's rc.colors
126 # value is used, but havinga module global makes this functionality available
126 # value is used, but havinga module global makes this functionality available
127 # to users of ultratb who are NOT running inside ipython.
127 # to users of ultratb who are NOT running inside ipython.
128 DEFAULT_SCHEME = 'NoColor'
128 DEFAULT_SCHEME = 'NoColor'
129
129
130 #---------------------------------------------------------------------------
130 #---------------------------------------------------------------------------
131 # Code begins
131 # Code begins
132
132
133 # Utility functions
133 # Utility functions
134 def inspect_error():
134 def inspect_error():
135 """Print a message about internal inspect errors.
135 """Print a message about internal inspect errors.
136
136
137 These are unfortunately quite common."""
137 These are unfortunately quite common."""
138
138
139 error('Internal Python error in the inspect module.\n'
139 error('Internal Python error in the inspect module.\n'
140 'Below is the traceback from this internal error.\n')
140 'Below is the traceback from this internal error.\n')
141
141
142 # This function is a monkeypatch we apply to the Python inspect module. We have
142 # This function is a monkeypatch we apply to the Python inspect module. We have
143 # now found when it's needed (see discussion on issue gh-1456), and we have a
143 # now found when it's needed (see discussion on issue gh-1456), and we have a
144 # test case (IPython.core.tests.test_ultratb.ChangedPyFileTest) that fails if
144 # test case (IPython.core.tests.test_ultratb.ChangedPyFileTest) that fails if
145 # the monkeypatch is not applied. TK, Aug 2012.
145 # the monkeypatch is not applied. TK, Aug 2012.
146 def findsource(object):
146 def findsource(object):
147 """Return the entire source file and starting line number for an object.
147 """Return the entire source file and starting line number for an object.
148
148
149 The argument may be a module, class, method, function, traceback, frame,
149 The argument may be a module, class, method, function, traceback, frame,
150 or code object. The source code is returned as a list of all the lines
150 or code object. The source code is returned as a list of all the lines
151 in the file and the line number indexes a line in that list. An IOError
151 in the file and the line number indexes a line in that list. An IOError
152 is raised if the source code cannot be retrieved.
152 is raised if the source code cannot be retrieved.
153
153
154 FIXED version with which we monkeypatch the stdlib to work around a bug."""
154 FIXED version with which we monkeypatch the stdlib to work around a bug."""
155
155
156 file = getsourcefile(object) or getfile(object)
156 file = getsourcefile(object) or getfile(object)
157 # If the object is a frame, then trying to get the globals dict from its
157 # If the object is a frame, then trying to get the globals dict from its
158 # module won't work. Instead, the frame object itself has the globals
158 # module won't work. Instead, the frame object itself has the globals
159 # dictionary.
159 # dictionary.
160 globals_dict = None
160 globals_dict = None
161 if inspect.isframe(object):
161 if inspect.isframe(object):
162 # XXX: can this ever be false?
162 # XXX: can this ever be false?
163 globals_dict = object.f_globals
163 globals_dict = object.f_globals
164 else:
164 else:
165 module = getmodule(object, file)
165 module = getmodule(object, file)
166 if module:
166 if module:
167 globals_dict = module.__dict__
167 globals_dict = module.__dict__
168 lines = linecache.getlines(file, globals_dict)
168 lines = linecache.getlines(file, globals_dict)
169 if not lines:
169 if not lines:
170 raise IOError('could not get source code')
170 raise IOError('could not get source code')
171
171
172 if ismodule(object):
172 if ismodule(object):
173 return lines, 0
173 return lines, 0
174
174
175 if isclass(object):
175 if isclass(object):
176 name = object.__name__
176 name = object.__name__
177 pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
177 pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
178 # make some effort to find the best matching class definition:
178 # make some effort to find the best matching class definition:
179 # use the one with the least indentation, which is the one
179 # use the one with the least indentation, which is the one
180 # that's most probably not inside a function definition.
180 # that's most probably not inside a function definition.
181 candidates = []
181 candidates = []
182 for i in range(len(lines)):
182 for i in range(len(lines)):
183 match = pat.match(lines[i])
183 match = pat.match(lines[i])
184 if match:
184 if match:
185 # if it's at toplevel, it's already the best one
185 # if it's at toplevel, it's already the best one
186 if lines[i][0] == 'c':
186 if lines[i][0] == 'c':
187 return lines, i
187 return lines, i
188 # else add whitespace to candidate list
188 # else add whitespace to candidate list
189 candidates.append((match.group(1), i))
189 candidates.append((match.group(1), i))
190 if candidates:
190 if candidates:
191 # this will sort by whitespace, and by line number,
191 # this will sort by whitespace, and by line number,
192 # less whitespace first
192 # less whitespace first
193 candidates.sort()
193 candidates.sort()
194 return lines, candidates[0][1]
194 return lines, candidates[0][1]
195 else:
195 else:
196 raise IOError('could not find class definition')
196 raise IOError('could not find class definition')
197
197
198 if ismethod(object):
198 if ismethod(object):
199 object = object.im_func
199 object = object.im_func
200 if isfunction(object):
200 if isfunction(object):
201 object = object.func_code
201 object = object.__code__
202 if istraceback(object):
202 if istraceback(object):
203 object = object.tb_frame
203 object = object.tb_frame
204 if isframe(object):
204 if isframe(object):
205 object = object.f_code
205 object = object.f_code
206 if iscode(object):
206 if iscode(object):
207 if not hasattr(object, 'co_firstlineno'):
207 if not hasattr(object, 'co_firstlineno'):
208 raise IOError('could not find function definition')
208 raise IOError('could not find function definition')
209 pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
209 pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
210 pmatch = pat.match
210 pmatch = pat.match
211 # fperez - fix: sometimes, co_firstlineno can give a number larger than
211 # fperez - fix: sometimes, co_firstlineno can give a number larger than
212 # the length of lines, which causes an error. Safeguard against that.
212 # the length of lines, which causes an error. Safeguard against that.
213 lnum = min(object.co_firstlineno,len(lines))-1
213 lnum = min(object.co_firstlineno,len(lines))-1
214 while lnum > 0:
214 while lnum > 0:
215 if pmatch(lines[lnum]): break
215 if pmatch(lines[lnum]): break
216 lnum -= 1
216 lnum -= 1
217
217
218 return lines, lnum
218 return lines, lnum
219 raise IOError('could not find code object')
219 raise IOError('could not find code object')
220
220
221 # Monkeypatch inspect to apply our bugfix. This code only works with Python >= 2.5
221 # Monkeypatch inspect to apply our bugfix. This code only works with Python >= 2.5
222 inspect.findsource = findsource
222 inspect.findsource = findsource
223
223
224 def fix_frame_records_filenames(records):
224 def fix_frame_records_filenames(records):
225 """Try to fix the filenames in each record from inspect.getinnerframes().
225 """Try to fix the filenames in each record from inspect.getinnerframes().
226
226
227 Particularly, modules loaded from within zip files have useless filenames
227 Particularly, modules loaded from within zip files have useless filenames
228 attached to their code object, and inspect.getinnerframes() just uses it.
228 attached to their code object, and inspect.getinnerframes() just uses it.
229 """
229 """
230 fixed_records = []
230 fixed_records = []
231 for frame, filename, line_no, func_name, lines, index in records:
231 for frame, filename, line_no, func_name, lines, index in records:
232 # Look inside the frame's globals dictionary for __file__, which should
232 # Look inside the frame's globals dictionary for __file__, which should
233 # be better.
233 # be better.
234 better_fn = frame.f_globals.get('__file__', None)
234 better_fn = frame.f_globals.get('__file__', None)
235 if isinstance(better_fn, str):
235 if isinstance(better_fn, str):
236 # Check the type just in case someone did something weird with
236 # Check the type just in case someone did something weird with
237 # __file__. It might also be None if the error occurred during
237 # __file__. It might also be None if the error occurred during
238 # import.
238 # import.
239 filename = better_fn
239 filename = better_fn
240 fixed_records.append((frame, filename, line_no, func_name, lines, index))
240 fixed_records.append((frame, filename, line_no, func_name, lines, index))
241 return fixed_records
241 return fixed_records
242
242
243
243
244 def _fixed_getinnerframes(etb, context=1,tb_offset=0):
244 def _fixed_getinnerframes(etb, context=1,tb_offset=0):
245 LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5
245 LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5
246
246
247 records = fix_frame_records_filenames(inspect.getinnerframes(etb, context))
247 records = fix_frame_records_filenames(inspect.getinnerframes(etb, context))
248
248
249 # If the error is at the console, don't build any context, since it would
249 # If the error is at the console, don't build any context, since it would
250 # otherwise produce 5 blank lines printed out (there is no file at the
250 # otherwise produce 5 blank lines printed out (there is no file at the
251 # console)
251 # console)
252 rec_check = records[tb_offset:]
252 rec_check = records[tb_offset:]
253 try:
253 try:
254 rname = rec_check[0][1]
254 rname = rec_check[0][1]
255 if rname == '<ipython console>' or rname.endswith('<string>'):
255 if rname == '<ipython console>' or rname.endswith('<string>'):
256 return rec_check
256 return rec_check
257 except IndexError:
257 except IndexError:
258 pass
258 pass
259
259
260 aux = traceback.extract_tb(etb)
260 aux = traceback.extract_tb(etb)
261 assert len(records) == len(aux)
261 assert len(records) == len(aux)
262 for i, (file, lnum, _, _) in zip(range(len(records)), aux):
262 for i, (file, lnum, _, _) in zip(range(len(records)), aux):
263 maybeStart = lnum-1 - context//2
263 maybeStart = lnum-1 - context//2
264 start = max(maybeStart, 0)
264 start = max(maybeStart, 0)
265 end = start + context
265 end = start + context
266 lines = ulinecache.getlines(file)[start:end]
266 lines = ulinecache.getlines(file)[start:end]
267 buf = list(records[i])
267 buf = list(records[i])
268 buf[LNUM_POS] = lnum
268 buf[LNUM_POS] = lnum
269 buf[INDEX_POS] = lnum - 1 - start
269 buf[INDEX_POS] = lnum - 1 - start
270 buf[LINES_POS] = lines
270 buf[LINES_POS] = lines
271 records[i] = tuple(buf)
271 records[i] = tuple(buf)
272 return records[tb_offset:]
272 return records[tb_offset:]
273
273
274 # Helper function -- largely belongs to VerboseTB, but we need the same
274 # Helper function -- largely belongs to VerboseTB, but we need the same
275 # functionality to produce a pseudo verbose TB for SyntaxErrors, so that they
275 # functionality to produce a pseudo verbose TB for SyntaxErrors, so that they
276 # can be recognized properly by ipython.el's py-traceback-line-re
276 # can be recognized properly by ipython.el's py-traceback-line-re
277 # (SyntaxErrors have to be treated specially because they have no traceback)
277 # (SyntaxErrors have to be treated specially because they have no traceback)
278
278
279 _parser = PyColorize.Parser()
279 _parser = PyColorize.Parser()
280
280
281 def _format_traceback_lines(lnum, index, lines, Colors, lvals=None,scheme=None):
281 def _format_traceback_lines(lnum, index, lines, Colors, lvals=None,scheme=None):
282 numbers_width = INDENT_SIZE - 1
282 numbers_width = INDENT_SIZE - 1
283 res = []
283 res = []
284 i = lnum - index
284 i = lnum - index
285
285
286 # This lets us get fully syntax-highlighted tracebacks.
286 # This lets us get fully syntax-highlighted tracebacks.
287 if scheme is None:
287 if scheme is None:
288 ipinst = get_ipython()
288 ipinst = get_ipython()
289 if ipinst is not None:
289 if ipinst is not None:
290 scheme = ipinst.colors
290 scheme = ipinst.colors
291 else:
291 else:
292 scheme = DEFAULT_SCHEME
292 scheme = DEFAULT_SCHEME
293
293
294 _line_format = _parser.format2
294 _line_format = _parser.format2
295
295
296 for line in lines:
296 for line in lines:
297 line = py3compat.cast_unicode(line)
297 line = py3compat.cast_unicode(line)
298
298
299 new_line, err = _line_format(line, 'str', scheme)
299 new_line, err = _line_format(line, 'str', scheme)
300 if not err: line = new_line
300 if not err: line = new_line
301
301
302 if i == lnum:
302 if i == lnum:
303 # This is the line with the error
303 # This is the line with the error
304 pad = numbers_width - len(str(i))
304 pad = numbers_width - len(str(i))
305 if pad >= 3:
305 if pad >= 3:
306 marker = '-'*(pad-3) + '-> '
306 marker = '-'*(pad-3) + '-> '
307 elif pad == 2:
307 elif pad == 2:
308 marker = '> '
308 marker = '> '
309 elif pad == 1:
309 elif pad == 1:
310 marker = '>'
310 marker = '>'
311 else:
311 else:
312 marker = ''
312 marker = ''
313 num = marker + str(i)
313 num = marker + str(i)
314 line = '%s%s%s %s%s' %(Colors.linenoEm, num,
314 line = '%s%s%s %s%s' %(Colors.linenoEm, num,
315 Colors.line, line, Colors.Normal)
315 Colors.line, line, Colors.Normal)
316 else:
316 else:
317 num = '%*s' % (numbers_width,i)
317 num = '%*s' % (numbers_width,i)
318 line = '%s%s%s %s' %(Colors.lineno, num,
318 line = '%s%s%s %s' %(Colors.lineno, num,
319 Colors.Normal, line)
319 Colors.Normal, line)
320
320
321 res.append(line)
321 res.append(line)
322 if lvals and i == lnum:
322 if lvals and i == lnum:
323 res.append(lvals + '\n')
323 res.append(lvals + '\n')
324 i = i + 1
324 i = i + 1
325 return res
325 return res
326
326
327
327
328 #---------------------------------------------------------------------------
328 #---------------------------------------------------------------------------
329 # Module classes
329 # Module classes
330 class TBTools(object):
330 class TBTools(object):
331 """Basic tools used by all traceback printer classes."""
331 """Basic tools used by all traceback printer classes."""
332
332
333 # Number of frames to skip when reporting tracebacks
333 # Number of frames to skip when reporting tracebacks
334 tb_offset = 0
334 tb_offset = 0
335
335
336 def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None):
336 def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None):
337 # Whether to call the interactive pdb debugger after printing
337 # Whether to call the interactive pdb debugger after printing
338 # tracebacks or not
338 # tracebacks or not
339 self.call_pdb = call_pdb
339 self.call_pdb = call_pdb
340
340
341 # Output stream to write to. Note that we store the original value in
341 # Output stream to write to. Note that we store the original value in
342 # a private attribute and then make the public ostream a property, so
342 # a private attribute and then make the public ostream a property, so
343 # that we can delay accessing io.stdout until runtime. The way
343 # that we can delay accessing io.stdout until runtime. The way
344 # things are written now, the io.stdout object is dynamically managed
344 # things are written now, the io.stdout object is dynamically managed
345 # so a reference to it should NEVER be stored statically. This
345 # so a reference to it should NEVER be stored statically. This
346 # property approach confines this detail to a single location, and all
346 # property approach confines this detail to a single location, and all
347 # subclasses can simply access self.ostream for writing.
347 # subclasses can simply access self.ostream for writing.
348 self._ostream = ostream
348 self._ostream = ostream
349
349
350 # Create color table
350 # Create color table
351 self.color_scheme_table = exception_colors()
351 self.color_scheme_table = exception_colors()
352
352
353 self.set_colors(color_scheme)
353 self.set_colors(color_scheme)
354 self.old_scheme = color_scheme # save initial value for toggles
354 self.old_scheme = color_scheme # save initial value for toggles
355
355
356 if call_pdb:
356 if call_pdb:
357 self.pdb = debugger.Pdb(self.color_scheme_table.active_scheme_name)
357 self.pdb = debugger.Pdb(self.color_scheme_table.active_scheme_name)
358 else:
358 else:
359 self.pdb = None
359 self.pdb = None
360
360
361 def _get_ostream(self):
361 def _get_ostream(self):
362 """Output stream that exceptions are written to.
362 """Output stream that exceptions are written to.
363
363
364 Valid values are:
364 Valid values are:
365
365
366 - None: the default, which means that IPython will dynamically resolve
366 - None: the default, which means that IPython will dynamically resolve
367 to io.stdout. This ensures compatibility with most tools, including
367 to io.stdout. This ensures compatibility with most tools, including
368 Windows (where plain stdout doesn't recognize ANSI escapes).
368 Windows (where plain stdout doesn't recognize ANSI escapes).
369
369
370 - Any object with 'write' and 'flush' attributes.
370 - Any object with 'write' and 'flush' attributes.
371 """
371 """
372 return io.stdout if self._ostream is None else self._ostream
372 return io.stdout if self._ostream is None else self._ostream
373
373
374 def _set_ostream(self, val):
374 def _set_ostream(self, val):
375 assert val is None or (hasattr(val, 'write') and hasattr(val, 'flush'))
375 assert val is None or (hasattr(val, 'write') and hasattr(val, 'flush'))
376 self._ostream = val
376 self._ostream = val
377
377
378 ostream = property(_get_ostream, _set_ostream)
378 ostream = property(_get_ostream, _set_ostream)
379
379
380 def set_colors(self,*args,**kw):
380 def set_colors(self,*args,**kw):
381 """Shorthand access to the color table scheme selector method."""
381 """Shorthand access to the color table scheme selector method."""
382
382
383 # Set own color table
383 # Set own color table
384 self.color_scheme_table.set_active_scheme(*args,**kw)
384 self.color_scheme_table.set_active_scheme(*args,**kw)
385 # for convenience, set Colors to the active scheme
385 # for convenience, set Colors to the active scheme
386 self.Colors = self.color_scheme_table.active_colors
386 self.Colors = self.color_scheme_table.active_colors
387 # Also set colors of debugger
387 # Also set colors of debugger
388 if hasattr(self,'pdb') and self.pdb is not None:
388 if hasattr(self,'pdb') and self.pdb is not None:
389 self.pdb.set_colors(*args,**kw)
389 self.pdb.set_colors(*args,**kw)
390
390
391 def color_toggle(self):
391 def color_toggle(self):
392 """Toggle between the currently active color scheme and NoColor."""
392 """Toggle between the currently active color scheme and NoColor."""
393
393
394 if self.color_scheme_table.active_scheme_name == 'NoColor':
394 if self.color_scheme_table.active_scheme_name == 'NoColor':
395 self.color_scheme_table.set_active_scheme(self.old_scheme)
395 self.color_scheme_table.set_active_scheme(self.old_scheme)
396 self.Colors = self.color_scheme_table.active_colors
396 self.Colors = self.color_scheme_table.active_colors
397 else:
397 else:
398 self.old_scheme = self.color_scheme_table.active_scheme_name
398 self.old_scheme = self.color_scheme_table.active_scheme_name
399 self.color_scheme_table.set_active_scheme('NoColor')
399 self.color_scheme_table.set_active_scheme('NoColor')
400 self.Colors = self.color_scheme_table.active_colors
400 self.Colors = self.color_scheme_table.active_colors
401
401
402 def stb2text(self, stb):
402 def stb2text(self, stb):
403 """Convert a structured traceback (a list) to a string."""
403 """Convert a structured traceback (a list) to a string."""
404 return '\n'.join(stb)
404 return '\n'.join(stb)
405
405
406 def text(self, etype, value, tb, tb_offset=None, context=5):
406 def text(self, etype, value, tb, tb_offset=None, context=5):
407 """Return formatted traceback.
407 """Return formatted traceback.
408
408
409 Subclasses may override this if they add extra arguments.
409 Subclasses may override this if they add extra arguments.
410 """
410 """
411 tb_list = self.structured_traceback(etype, value, tb,
411 tb_list = self.structured_traceback(etype, value, tb,
412 tb_offset, context)
412 tb_offset, context)
413 return self.stb2text(tb_list)
413 return self.stb2text(tb_list)
414
414
415 def structured_traceback(self, etype, evalue, tb, tb_offset=None,
415 def structured_traceback(self, etype, evalue, tb, tb_offset=None,
416 context=5, mode=None):
416 context=5, mode=None):
417 """Return a list of traceback frames.
417 """Return a list of traceback frames.
418
418
419 Must be implemented by each class.
419 Must be implemented by each class.
420 """
420 """
421 raise NotImplementedError()
421 raise NotImplementedError()
422
422
423
423
424 #---------------------------------------------------------------------------
424 #---------------------------------------------------------------------------
425 class ListTB(TBTools):
425 class ListTB(TBTools):
426 """Print traceback information from a traceback list, with optional color.
426 """Print traceback information from a traceback list, with optional color.
427
427
428 Calling requires 3 arguments: (etype, evalue, elist)
428 Calling requires 3 arguments: (etype, evalue, elist)
429 as would be obtained by::
429 as would be obtained by::
430
430
431 etype, evalue, tb = sys.exc_info()
431 etype, evalue, tb = sys.exc_info()
432 if tb:
432 if tb:
433 elist = traceback.extract_tb(tb)
433 elist = traceback.extract_tb(tb)
434 else:
434 else:
435 elist = None
435 elist = None
436
436
437 It can thus be used by programs which need to process the traceback before
437 It can thus be used by programs which need to process the traceback before
438 printing (such as console replacements based on the code module from the
438 printing (such as console replacements based on the code module from the
439 standard library).
439 standard library).
440
440
441 Because they are meant to be called without a full traceback (only a
441 Because they are meant to be called without a full traceback (only a
442 list), instances of this class can't call the interactive pdb debugger."""
442 list), instances of this class can't call the interactive pdb debugger."""
443
443
444 def __init__(self,color_scheme = 'NoColor', call_pdb=False, ostream=None):
444 def __init__(self,color_scheme = 'NoColor', call_pdb=False, ostream=None):
445 TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
445 TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
446 ostream=ostream)
446 ostream=ostream)
447
447
448 def __call__(self, etype, value, elist):
448 def __call__(self, etype, value, elist):
449 self.ostream.flush()
449 self.ostream.flush()
450 self.ostream.write(self.text(etype, value, elist))
450 self.ostream.write(self.text(etype, value, elist))
451 self.ostream.write('\n')
451 self.ostream.write('\n')
452
452
453 def structured_traceback(self, etype, value, elist, tb_offset=None,
453 def structured_traceback(self, etype, value, elist, tb_offset=None,
454 context=5):
454 context=5):
455 """Return a color formatted string with the traceback info.
455 """Return a color formatted string with the traceback info.
456
456
457 Parameters
457 Parameters
458 ----------
458 ----------
459 etype : exception type
459 etype : exception type
460 Type of the exception raised.
460 Type of the exception raised.
461
461
462 value : object
462 value : object
463 Data stored in the exception
463 Data stored in the exception
464
464
465 elist : list
465 elist : list
466 List of frames, see class docstring for details.
466 List of frames, see class docstring for details.
467
467
468 tb_offset : int, optional
468 tb_offset : int, optional
469 Number of frames in the traceback to skip. If not given, the
469 Number of frames in the traceback to skip. If not given, the
470 instance value is used (set in constructor).
470 instance value is used (set in constructor).
471
471
472 context : int, optional
472 context : int, optional
473 Number of lines of context information to print.
473 Number of lines of context information to print.
474
474
475 Returns
475 Returns
476 -------
476 -------
477 String with formatted exception.
477 String with formatted exception.
478 """
478 """
479 tb_offset = self.tb_offset if tb_offset is None else tb_offset
479 tb_offset = self.tb_offset if tb_offset is None else tb_offset
480 Colors = self.Colors
480 Colors = self.Colors
481 out_list = []
481 out_list = []
482 if elist:
482 if elist:
483
483
484 if tb_offset and len(elist) > tb_offset:
484 if tb_offset and len(elist) > tb_offset:
485 elist = elist[tb_offset:]
485 elist = elist[tb_offset:]
486
486
487 out_list.append('Traceback %s(most recent call last)%s:' %
487 out_list.append('Traceback %s(most recent call last)%s:' %
488 (Colors.normalEm, Colors.Normal) + '\n')
488 (Colors.normalEm, Colors.Normal) + '\n')
489 out_list.extend(self._format_list(elist))
489 out_list.extend(self._format_list(elist))
490 # The exception info should be a single entry in the list.
490 # The exception info should be a single entry in the list.
491 lines = ''.join(self._format_exception_only(etype, value))
491 lines = ''.join(self._format_exception_only(etype, value))
492 out_list.append(lines)
492 out_list.append(lines)
493
493
494 # Note: this code originally read:
494 # Note: this code originally read:
495
495
496 ## for line in lines[:-1]:
496 ## for line in lines[:-1]:
497 ## out_list.append(" "+line)
497 ## out_list.append(" "+line)
498 ## out_list.append(lines[-1])
498 ## out_list.append(lines[-1])
499
499
500 # This means it was indenting everything but the last line by a little
500 # This means it was indenting everything but the last line by a little
501 # bit. I've disabled this for now, but if we see ugliness somewhre we
501 # bit. I've disabled this for now, but if we see ugliness somewhre we
502 # can restore it.
502 # can restore it.
503
503
504 return out_list
504 return out_list
505
505
506 def _format_list(self, extracted_list):
506 def _format_list(self, extracted_list):
507 """Format a list of traceback entry tuples for printing.
507 """Format a list of traceback entry tuples for printing.
508
508
509 Given a list of tuples as returned by extract_tb() or
509 Given a list of tuples as returned by extract_tb() or
510 extract_stack(), return a list of strings ready for printing.
510 extract_stack(), return a list of strings ready for printing.
511 Each string in the resulting list corresponds to the item with the
511 Each string in the resulting list corresponds to the item with the
512 same index in the argument list. Each string ends in a newline;
512 same index in the argument list. Each string ends in a newline;
513 the strings may contain internal newlines as well, for those items
513 the strings may contain internal newlines as well, for those items
514 whose source text line is not None.
514 whose source text line is not None.
515
515
516 Lifted almost verbatim from traceback.py
516 Lifted almost verbatim from traceback.py
517 """
517 """
518
518
519 Colors = self.Colors
519 Colors = self.Colors
520 list = []
520 list = []
521 for filename, lineno, name, line in extracted_list[:-1]:
521 for filename, lineno, name, line in extracted_list[:-1]:
522 item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \
522 item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \
523 (Colors.filename, filename, Colors.Normal,
523 (Colors.filename, filename, Colors.Normal,
524 Colors.lineno, lineno, Colors.Normal,
524 Colors.lineno, lineno, Colors.Normal,
525 Colors.name, name, Colors.Normal)
525 Colors.name, name, Colors.Normal)
526 if line:
526 if line:
527 item += ' %s\n' % line.strip()
527 item += ' %s\n' % line.strip()
528 list.append(item)
528 list.append(item)
529 # Emphasize the last entry
529 # Emphasize the last entry
530 filename, lineno, name, line = extracted_list[-1]
530 filename, lineno, name, line = extracted_list[-1]
531 item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \
531 item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \
532 (Colors.normalEm,
532 (Colors.normalEm,
533 Colors.filenameEm, filename, Colors.normalEm,
533 Colors.filenameEm, filename, Colors.normalEm,
534 Colors.linenoEm, lineno, Colors.normalEm,
534 Colors.linenoEm, lineno, Colors.normalEm,
535 Colors.nameEm, name, Colors.normalEm,
535 Colors.nameEm, name, Colors.normalEm,
536 Colors.Normal)
536 Colors.Normal)
537 if line:
537 if line:
538 item += '%s %s%s\n' % (Colors.line, line.strip(),
538 item += '%s %s%s\n' % (Colors.line, line.strip(),
539 Colors.Normal)
539 Colors.Normal)
540 list.append(item)
540 list.append(item)
541 #from pprint import pformat; print 'LISTTB', pformat(list) # dbg
541 #from pprint import pformat; print 'LISTTB', pformat(list) # dbg
542 return list
542 return list
543
543
544 def _format_exception_only(self, etype, value):
544 def _format_exception_only(self, etype, value):
545 """Format the exception part of a traceback.
545 """Format the exception part of a traceback.
546
546
547 The arguments are the exception type and value such as given by
547 The arguments are the exception type and value such as given by
548 sys.exc_info()[:2]. The return value is a list of strings, each ending
548 sys.exc_info()[:2]. The return value is a list of strings, each ending
549 in a newline. Normally, the list contains a single string; however,
549 in a newline. Normally, the list contains a single string; however,
550 for SyntaxError exceptions, it contains several lines that (when
550 for SyntaxError exceptions, it contains several lines that (when
551 printed) display detailed information about where the syntax error
551 printed) display detailed information about where the syntax error
552 occurred. The message indicating which exception occurred is the
552 occurred. The message indicating which exception occurred is the
553 always last string in the list.
553 always last string in the list.
554
554
555 Also lifted nearly verbatim from traceback.py
555 Also lifted nearly verbatim from traceback.py
556 """
556 """
557 have_filedata = False
557 have_filedata = False
558 Colors = self.Colors
558 Colors = self.Colors
559 list = []
559 list = []
560 stype = Colors.excName + etype.__name__ + Colors.Normal
560 stype = Colors.excName + etype.__name__ + Colors.Normal
561 if value is None:
561 if value is None:
562 # Not sure if this can still happen in Python 2.6 and above
562 # Not sure if this can still happen in Python 2.6 and above
563 list.append( py3compat.cast_unicode(stype) + '\n')
563 list.append( py3compat.cast_unicode(stype) + '\n')
564 else:
564 else:
565 if issubclass(etype, SyntaxError):
565 if issubclass(etype, SyntaxError):
566 have_filedata = True
566 have_filedata = True
567 #print 'filename is',filename # dbg
567 #print 'filename is',filename # dbg
568 if not value.filename: value.filename = "<string>"
568 if not value.filename: value.filename = "<string>"
569 if value.lineno:
569 if value.lineno:
570 lineno = value.lineno
570 lineno = value.lineno
571 textline = ulinecache.getline(value.filename, value.lineno)
571 textline = ulinecache.getline(value.filename, value.lineno)
572 else:
572 else:
573 lineno = 'unknown'
573 lineno = 'unknown'
574 textline = ''
574 textline = ''
575 list.append('%s File %s"%s"%s, line %s%s%s\n' % \
575 list.append('%s File %s"%s"%s, line %s%s%s\n' % \
576 (Colors.normalEm,
576 (Colors.normalEm,
577 Colors.filenameEm, py3compat.cast_unicode(value.filename), Colors.normalEm,
577 Colors.filenameEm, py3compat.cast_unicode(value.filename), Colors.normalEm,
578 Colors.linenoEm, lineno, Colors.Normal ))
578 Colors.linenoEm, lineno, Colors.Normal ))
579 if textline == '':
579 if textline == '':
580 textline = py3compat.cast_unicode(value.text, "utf-8")
580 textline = py3compat.cast_unicode(value.text, "utf-8")
581
581
582 if textline is not None:
582 if textline is not None:
583 i = 0
583 i = 0
584 while i < len(textline) and textline[i].isspace():
584 while i < len(textline) and textline[i].isspace():
585 i += 1
585 i += 1
586 list.append('%s %s%s\n' % (Colors.line,
586 list.append('%s %s%s\n' % (Colors.line,
587 textline.strip(),
587 textline.strip(),
588 Colors.Normal))
588 Colors.Normal))
589 if value.offset is not None:
589 if value.offset is not None:
590 s = ' '
590 s = ' '
591 for c in textline[i:value.offset-1]:
591 for c in textline[i:value.offset-1]:
592 if c.isspace():
592 if c.isspace():
593 s += c
593 s += c
594 else:
594 else:
595 s += ' '
595 s += ' '
596 list.append('%s%s^%s\n' % (Colors.caret, s,
596 list.append('%s%s^%s\n' % (Colors.caret, s,
597 Colors.Normal) )
597 Colors.Normal) )
598
598
599 try:
599 try:
600 s = value.msg
600 s = value.msg
601 except Exception:
601 except Exception:
602 s = self._some_str(value)
602 s = self._some_str(value)
603 if s:
603 if s:
604 list.append('%s%s:%s %s\n' % (str(stype), Colors.excName,
604 list.append('%s%s:%s %s\n' % (str(stype), Colors.excName,
605 Colors.Normal, s))
605 Colors.Normal, s))
606 else:
606 else:
607 list.append('%s\n' % str(stype))
607 list.append('%s\n' % str(stype))
608
608
609 # sync with user hooks
609 # sync with user hooks
610 if have_filedata:
610 if have_filedata:
611 ipinst = get_ipython()
611 ipinst = get_ipython()
612 if ipinst is not None:
612 if ipinst is not None:
613 ipinst.hooks.synchronize_with_editor(value.filename, value.lineno, 0)
613 ipinst.hooks.synchronize_with_editor(value.filename, value.lineno, 0)
614
614
615 return list
615 return list
616
616
617 def get_exception_only(self, etype, value):
617 def get_exception_only(self, etype, value):
618 """Only print the exception type and message, without a traceback.
618 """Only print the exception type and message, without a traceback.
619
619
620 Parameters
620 Parameters
621 ----------
621 ----------
622 etype : exception type
622 etype : exception type
623 value : exception value
623 value : exception value
624 """
624 """
625 return ListTB.structured_traceback(self, etype, value, [])
625 return ListTB.structured_traceback(self, etype, value, [])
626
626
627
627
628 def show_exception_only(self, etype, evalue):
628 def show_exception_only(self, etype, evalue):
629 """Only print the exception type and message, without a traceback.
629 """Only print the exception type and message, without a traceback.
630
630
631 Parameters
631 Parameters
632 ----------
632 ----------
633 etype : exception type
633 etype : exception type
634 value : exception value
634 value : exception value
635 """
635 """
636 # This method needs to use __call__ from *this* class, not the one from
636 # This method needs to use __call__ from *this* class, not the one from
637 # a subclass whose signature or behavior may be different
637 # a subclass whose signature or behavior may be different
638 ostream = self.ostream
638 ostream = self.ostream
639 ostream.flush()
639 ostream.flush()
640 ostream.write('\n'.join(self.get_exception_only(etype, evalue)))
640 ostream.write('\n'.join(self.get_exception_only(etype, evalue)))
641 ostream.flush()
641 ostream.flush()
642
642
643 def _some_str(self, value):
643 def _some_str(self, value):
644 # Lifted from traceback.py
644 # Lifted from traceback.py
645 try:
645 try:
646 return str(value)
646 return str(value)
647 except:
647 except:
648 return '<unprintable %s object>' % type(value).__name__
648 return '<unprintable %s object>' % type(value).__name__
649
649
650 #----------------------------------------------------------------------------
650 #----------------------------------------------------------------------------
651 class VerboseTB(TBTools):
651 class VerboseTB(TBTools):
652 """A port of Ka-Ping Yee's cgitb.py module that outputs color text instead
652 """A port of Ka-Ping Yee's cgitb.py module that outputs color text instead
653 of HTML. Requires inspect and pydoc. Crazy, man.
653 of HTML. Requires inspect and pydoc. Crazy, man.
654
654
655 Modified version which optionally strips the topmost entries from the
655 Modified version which optionally strips the topmost entries from the
656 traceback, to be used with alternate interpreters (because their own code
656 traceback, to be used with alternate interpreters (because their own code
657 would appear in the traceback)."""
657 would appear in the traceback)."""
658
658
659 def __init__(self,color_scheme = 'Linux', call_pdb=False, ostream=None,
659 def __init__(self,color_scheme = 'Linux', call_pdb=False, ostream=None,
660 tb_offset=0, long_header=False, include_vars=True,
660 tb_offset=0, long_header=False, include_vars=True,
661 check_cache=None):
661 check_cache=None):
662 """Specify traceback offset, headers and color scheme.
662 """Specify traceback offset, headers and color scheme.
663
663
664 Define how many frames to drop from the tracebacks. Calling it with
664 Define how many frames to drop from the tracebacks. Calling it with
665 tb_offset=1 allows use of this handler in interpreters which will have
665 tb_offset=1 allows use of this handler in interpreters which will have
666 their own code at the top of the traceback (VerboseTB will first
666 their own code at the top of the traceback (VerboseTB will first
667 remove that frame before printing the traceback info)."""
667 remove that frame before printing the traceback info)."""
668 TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
668 TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
669 ostream=ostream)
669 ostream=ostream)
670 self.tb_offset = tb_offset
670 self.tb_offset = tb_offset
671 self.long_header = long_header
671 self.long_header = long_header
672 self.include_vars = include_vars
672 self.include_vars = include_vars
673 # By default we use linecache.checkcache, but the user can provide a
673 # By default we use linecache.checkcache, but the user can provide a
674 # different check_cache implementation. This is used by the IPython
674 # different check_cache implementation. This is used by the IPython
675 # kernel to provide tracebacks for interactive code that is cached,
675 # kernel to provide tracebacks for interactive code that is cached,
676 # by a compiler instance that flushes the linecache but preserves its
676 # by a compiler instance that flushes the linecache but preserves its
677 # own code cache.
677 # own code cache.
678 if check_cache is None:
678 if check_cache is None:
679 check_cache = linecache.checkcache
679 check_cache = linecache.checkcache
680 self.check_cache = check_cache
680 self.check_cache = check_cache
681
681
682 def structured_traceback(self, etype, evalue, etb, tb_offset=None,
682 def structured_traceback(self, etype, evalue, etb, tb_offset=None,
683 context=5):
683 context=5):
684 """Return a nice text document describing the traceback."""
684 """Return a nice text document describing the traceback."""
685
685
686 tb_offset = self.tb_offset if tb_offset is None else tb_offset
686 tb_offset = self.tb_offset if tb_offset is None else tb_offset
687
687
688 # some locals
688 # some locals
689 try:
689 try:
690 etype = etype.__name__
690 etype = etype.__name__
691 except AttributeError:
691 except AttributeError:
692 pass
692 pass
693 Colors = self.Colors # just a shorthand + quicker name lookup
693 Colors = self.Colors # just a shorthand + quicker name lookup
694 ColorsNormal = Colors.Normal # used a lot
694 ColorsNormal = Colors.Normal # used a lot
695 col_scheme = self.color_scheme_table.active_scheme_name
695 col_scheme = self.color_scheme_table.active_scheme_name
696 indent = ' '*INDENT_SIZE
696 indent = ' '*INDENT_SIZE
697 em_normal = '%s\n%s%s' % (Colors.valEm, indent,ColorsNormal)
697 em_normal = '%s\n%s%s' % (Colors.valEm, indent,ColorsNormal)
698 undefined = '%sundefined%s' % (Colors.em, ColorsNormal)
698 undefined = '%sundefined%s' % (Colors.em, ColorsNormal)
699 exc = '%s%s%s' % (Colors.excName,etype,ColorsNormal)
699 exc = '%s%s%s' % (Colors.excName,etype,ColorsNormal)
700
700
701 # some internal-use functions
701 # some internal-use functions
702 def text_repr(value):
702 def text_repr(value):
703 """Hopefully pretty robust repr equivalent."""
703 """Hopefully pretty robust repr equivalent."""
704 # this is pretty horrible but should always return *something*
704 # this is pretty horrible but should always return *something*
705 try:
705 try:
706 return pydoc.text.repr(value)
706 return pydoc.text.repr(value)
707 except KeyboardInterrupt:
707 except KeyboardInterrupt:
708 raise
708 raise
709 except:
709 except:
710 try:
710 try:
711 return repr(value)
711 return repr(value)
712 except KeyboardInterrupt:
712 except KeyboardInterrupt:
713 raise
713 raise
714 except:
714 except:
715 try:
715 try:
716 # all still in an except block so we catch
716 # all still in an except block so we catch
717 # getattr raising
717 # getattr raising
718 name = getattr(value, '__name__', None)
718 name = getattr(value, '__name__', None)
719 if name:
719 if name:
720 # ick, recursion
720 # ick, recursion
721 return text_repr(name)
721 return text_repr(name)
722 klass = getattr(value, '__class__', None)
722 klass = getattr(value, '__class__', None)
723 if klass:
723 if klass:
724 return '%s instance' % text_repr(klass)
724 return '%s instance' % text_repr(klass)
725 except KeyboardInterrupt:
725 except KeyboardInterrupt:
726 raise
726 raise
727 except:
727 except:
728 return 'UNRECOVERABLE REPR FAILURE'
728 return 'UNRECOVERABLE REPR FAILURE'
729 def eqrepr(value, repr=text_repr): return '=%s' % repr(value)
729 def eqrepr(value, repr=text_repr): return '=%s' % repr(value)
730 def nullrepr(value, repr=text_repr): return ''
730 def nullrepr(value, repr=text_repr): return ''
731
731
732 # meat of the code begins
732 # meat of the code begins
733 try:
733 try:
734 etype = etype.__name__
734 etype = etype.__name__
735 except AttributeError:
735 except AttributeError:
736 pass
736 pass
737
737
738 if self.long_header:
738 if self.long_header:
739 # Header with the exception type, python version, and date
739 # Header with the exception type, python version, and date
740 pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
740 pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
741 date = time.ctime(time.time())
741 date = time.ctime(time.time())
742
742
743 head = '%s%s%s\n%s%s%s\n%s' % (Colors.topline, '-'*75, ColorsNormal,
743 head = '%s%s%s\n%s%s%s\n%s' % (Colors.topline, '-'*75, ColorsNormal,
744 exc, ' '*(75-len(str(etype))-len(pyver)),
744 exc, ' '*(75-len(str(etype))-len(pyver)),
745 pyver, date.rjust(75) )
745 pyver, date.rjust(75) )
746 head += "\nA problem occured executing Python code. Here is the sequence of function"\
746 head += "\nA problem occured executing Python code. Here is the sequence of function"\
747 "\ncalls leading up to the error, with the most recent (innermost) call last."
747 "\ncalls leading up to the error, with the most recent (innermost) call last."
748 else:
748 else:
749 # Simplified header
749 # Simplified header
750 head = '%s%s%s\n%s%s' % (Colors.topline, '-'*75, ColorsNormal,exc,
750 head = '%s%s%s\n%s%s' % (Colors.topline, '-'*75, ColorsNormal,exc,
751 'Traceback (most recent call last)'.\
751 'Traceback (most recent call last)'.\
752 rjust(75 - len(str(etype)) ) )
752 rjust(75 - len(str(etype)) ) )
753 frames = []
753 frames = []
754 # Flush cache before calling inspect. This helps alleviate some of the
754 # Flush cache before calling inspect. This helps alleviate some of the
755 # problems with python 2.3's inspect.py.
755 # problems with python 2.3's inspect.py.
756 ##self.check_cache()
756 ##self.check_cache()
757 # Drop topmost frames if requested
757 # Drop topmost frames if requested
758 try:
758 try:
759 # Try the default getinnerframes and Alex's: Alex's fixes some
759 # Try the default getinnerframes and Alex's: Alex's fixes some
760 # problems, but it generates empty tracebacks for console errors
760 # problems, but it generates empty tracebacks for console errors
761 # (5 blanks lines) where none should be returned.
761 # (5 blanks lines) where none should be returned.
762 #records = inspect.getinnerframes(etb, context)[tb_offset:]
762 #records = inspect.getinnerframes(etb, context)[tb_offset:]
763 #print 'python records:', records # dbg
763 #print 'python records:', records # dbg
764 records = _fixed_getinnerframes(etb, context, tb_offset)
764 records = _fixed_getinnerframes(etb, context, tb_offset)
765 #print 'alex records:', records # dbg
765 #print 'alex records:', records # dbg
766 except:
766 except:
767
767
768 # FIXME: I've been getting many crash reports from python 2.3
768 # FIXME: I've been getting many crash reports from python 2.3
769 # users, traceable to inspect.py. If I can find a small test-case
769 # users, traceable to inspect.py. If I can find a small test-case
770 # to reproduce this, I should either write a better workaround or
770 # to reproduce this, I should either write a better workaround or
771 # file a bug report against inspect (if that's the real problem).
771 # file a bug report against inspect (if that's the real problem).
772 # So far, I haven't been able to find an isolated example to
772 # So far, I haven't been able to find an isolated example to
773 # reproduce the problem.
773 # reproduce the problem.
774 inspect_error()
774 inspect_error()
775 traceback.print_exc(file=self.ostream)
775 traceback.print_exc(file=self.ostream)
776 info('\nUnfortunately, your original traceback can not be constructed.\n')
776 info('\nUnfortunately, your original traceback can not be constructed.\n')
777 return ''
777 return ''
778
778
779 # build some color string templates outside these nested loops
779 # build some color string templates outside these nested loops
780 tpl_link = '%s%%s%s' % (Colors.filenameEm,ColorsNormal)
780 tpl_link = '%s%%s%s' % (Colors.filenameEm,ColorsNormal)
781 tpl_call = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm,
781 tpl_call = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm,
782 ColorsNormal)
782 ColorsNormal)
783 tpl_call_fail = 'in %s%%s%s(***failed resolving arguments***)%s' % \
783 tpl_call_fail = 'in %s%%s%s(***failed resolving arguments***)%s' % \
784 (Colors.vName, Colors.valEm, ColorsNormal)
784 (Colors.vName, Colors.valEm, ColorsNormal)
785 tpl_local_var = '%s%%s%s' % (Colors.vName, ColorsNormal)
785 tpl_local_var = '%s%%s%s' % (Colors.vName, ColorsNormal)
786 tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal,
786 tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal,
787 Colors.vName, ColorsNormal)
787 Colors.vName, ColorsNormal)
788 tpl_name_val = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal)
788 tpl_name_val = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal)
789 tpl_line = '%s%%s%s %%s' % (Colors.lineno, ColorsNormal)
789 tpl_line = '%s%%s%s %%s' % (Colors.lineno, ColorsNormal)
790 tpl_line_em = '%s%%s%s %%s%s' % (Colors.linenoEm,Colors.line,
790 tpl_line_em = '%s%%s%s %%s%s' % (Colors.linenoEm,Colors.line,
791 ColorsNormal)
791 ColorsNormal)
792
792
793 # now, loop over all records printing context and info
793 # now, loop over all records printing context and info
794 abspath = os.path.abspath
794 abspath = os.path.abspath
795 for frame, file, lnum, func, lines, index in records:
795 for frame, file, lnum, func, lines, index in records:
796 #print '*** record:',file,lnum,func,lines,index # dbg
796 #print '*** record:',file,lnum,func,lines,index # dbg
797 if not file:
797 if not file:
798 file = '?'
798 file = '?'
799 elif not(file.startswith(str("<")) and file.endswith(str(">"))):
799 elif not(file.startswith(str("<")) and file.endswith(str(">"))):
800 # Guess that filenames like <string> aren't real filenames, so
800 # Guess that filenames like <string> aren't real filenames, so
801 # don't call abspath on them.
801 # don't call abspath on them.
802 try:
802 try:
803 file = abspath(file)
803 file = abspath(file)
804 except OSError:
804 except OSError:
805 # Not sure if this can still happen: abspath now works with
805 # Not sure if this can still happen: abspath now works with
806 # file names like <string>
806 # file names like <string>
807 pass
807 pass
808 file = py3compat.cast_unicode(file, util_path.fs_encoding)
808 file = py3compat.cast_unicode(file, util_path.fs_encoding)
809 link = tpl_link % file
809 link = tpl_link % file
810 args, varargs, varkw, locals = inspect.getargvalues(frame)
810 args, varargs, varkw, locals = inspect.getargvalues(frame)
811
811
812 if func == '?':
812 if func == '?':
813 call = ''
813 call = ''
814 else:
814 else:
815 # Decide whether to include variable details or not
815 # Decide whether to include variable details or not
816 var_repr = self.include_vars and eqrepr or nullrepr
816 var_repr = self.include_vars and eqrepr or nullrepr
817 try:
817 try:
818 call = tpl_call % (func,inspect.formatargvalues(args,
818 call = tpl_call % (func,inspect.formatargvalues(args,
819 varargs, varkw,
819 varargs, varkw,
820 locals,formatvalue=var_repr))
820 locals,formatvalue=var_repr))
821 except KeyError:
821 except KeyError:
822 # This happens in situations like errors inside generator
822 # This happens in situations like errors inside generator
823 # expressions, where local variables are listed in the
823 # expressions, where local variables are listed in the
824 # line, but can't be extracted from the frame. I'm not
824 # line, but can't be extracted from the frame. I'm not
825 # 100% sure this isn't actually a bug in inspect itself,
825 # 100% sure this isn't actually a bug in inspect itself,
826 # but since there's no info for us to compute with, the
826 # but since there's no info for us to compute with, the
827 # best we can do is report the failure and move on. Here
827 # best we can do is report the failure and move on. Here
828 # we must *not* call any traceback construction again,
828 # we must *not* call any traceback construction again,
829 # because that would mess up use of %debug later on. So we
829 # because that would mess up use of %debug later on. So we
830 # simply report the failure and move on. The only
830 # simply report the failure and move on. The only
831 # limitation will be that this frame won't have locals
831 # limitation will be that this frame won't have locals
832 # listed in the call signature. Quite subtle problem...
832 # listed in the call signature. Quite subtle problem...
833 # I can't think of a good way to validate this in a unit
833 # I can't think of a good way to validate this in a unit
834 # test, but running a script consisting of:
834 # test, but running a script consisting of:
835 # dict( (k,v.strip()) for (k,v) in range(10) )
835 # dict( (k,v.strip()) for (k,v) in range(10) )
836 # will illustrate the error, if this exception catch is
836 # will illustrate the error, if this exception catch is
837 # disabled.
837 # disabled.
838 call = tpl_call_fail % func
838 call = tpl_call_fail % func
839
839
840 # Don't attempt to tokenize binary files.
840 # Don't attempt to tokenize binary files.
841 if file.endswith(('.so', '.pyd', '.dll')):
841 if file.endswith(('.so', '.pyd', '.dll')):
842 frames.append('%s %s\n' % (link,call))
842 frames.append('%s %s\n' % (link,call))
843 continue
843 continue
844 elif file.endswith(('.pyc','.pyo')):
844 elif file.endswith(('.pyc','.pyo')):
845 # Look up the corresponding source file.
845 # Look up the corresponding source file.
846 file = openpy.source_from_cache(file)
846 file = openpy.source_from_cache(file)
847
847
848 def linereader(file=file, lnum=[lnum], getline=ulinecache.getline):
848 def linereader(file=file, lnum=[lnum], getline=ulinecache.getline):
849 line = getline(file, lnum[0])
849 line = getline(file, lnum[0])
850 lnum[0] += 1
850 lnum[0] += 1
851 return line
851 return line
852
852
853 # Build the list of names on this line of code where the exception
853 # Build the list of names on this line of code where the exception
854 # occurred.
854 # occurred.
855 try:
855 try:
856 names = []
856 names = []
857 name_cont = False
857 name_cont = False
858
858
859 for token_type, token, start, end, line in generate_tokens(linereader):
859 for token_type, token, start, end, line in generate_tokens(linereader):
860 # build composite names
860 # build composite names
861 if token_type == tokenize.NAME and token not in keyword.kwlist:
861 if token_type == tokenize.NAME and token not in keyword.kwlist:
862 if name_cont:
862 if name_cont:
863 # Continuation of a dotted name
863 # Continuation of a dotted name
864 try:
864 try:
865 names[-1].append(token)
865 names[-1].append(token)
866 except IndexError:
866 except IndexError:
867 names.append([token])
867 names.append([token])
868 name_cont = False
868 name_cont = False
869 else:
869 else:
870 # Regular new names. We append everything, the caller
870 # Regular new names. We append everything, the caller
871 # will be responsible for pruning the list later. It's
871 # will be responsible for pruning the list later. It's
872 # very tricky to try to prune as we go, b/c composite
872 # very tricky to try to prune as we go, b/c composite
873 # names can fool us. The pruning at the end is easy
873 # names can fool us. The pruning at the end is easy
874 # to do (or the caller can print a list with repeated
874 # to do (or the caller can print a list with repeated
875 # names if so desired.
875 # names if so desired.
876 names.append([token])
876 names.append([token])
877 elif token == '.':
877 elif token == '.':
878 name_cont = True
878 name_cont = True
879 elif token_type == tokenize.NEWLINE:
879 elif token_type == tokenize.NEWLINE:
880 break
880 break
881
881
882 except (IndexError, UnicodeDecodeError):
882 except (IndexError, UnicodeDecodeError):
883 # signals exit of tokenizer
883 # signals exit of tokenizer
884 pass
884 pass
885 except tokenize.TokenError as msg:
885 except tokenize.TokenError as msg:
886 _m = ("An unexpected error occurred while tokenizing input\n"
886 _m = ("An unexpected error occurred while tokenizing input\n"
887 "The following traceback may be corrupted or invalid\n"
887 "The following traceback may be corrupted or invalid\n"
888 "The error message is: %s\n" % msg)
888 "The error message is: %s\n" % msg)
889 error(_m)
889 error(_m)
890
890
891 # Join composite names (e.g. "dict.fromkeys")
891 # Join composite names (e.g. "dict.fromkeys")
892 names = ['.'.join(n) for n in names]
892 names = ['.'.join(n) for n in names]
893 # prune names list of duplicates, but keep the right order
893 # prune names list of duplicates, but keep the right order
894 unique_names = uniq_stable(names)
894 unique_names = uniq_stable(names)
895
895
896 # Start loop over vars
896 # Start loop over vars
897 lvals = []
897 lvals = []
898 if self.include_vars:
898 if self.include_vars:
899 for name_full in unique_names:
899 for name_full in unique_names:
900 name_base = name_full.split('.',1)[0]
900 name_base = name_full.split('.',1)[0]
901 if name_base in frame.f_code.co_varnames:
901 if name_base in frame.f_code.co_varnames:
902 if name_base in locals:
902 if name_base in locals:
903 try:
903 try:
904 value = repr(eval(name_full,locals))
904 value = repr(eval(name_full,locals))
905 except:
905 except:
906 value = undefined
906 value = undefined
907 else:
907 else:
908 value = undefined
908 value = undefined
909 name = tpl_local_var % name_full
909 name = tpl_local_var % name_full
910 else:
910 else:
911 if name_base in frame.f_globals:
911 if name_base in frame.f_globals:
912 try:
912 try:
913 value = repr(eval(name_full,frame.f_globals))
913 value = repr(eval(name_full,frame.f_globals))
914 except:
914 except:
915 value = undefined
915 value = undefined
916 else:
916 else:
917 value = undefined
917 value = undefined
918 name = tpl_global_var % name_full
918 name = tpl_global_var % name_full
919 lvals.append(tpl_name_val % (name,value))
919 lvals.append(tpl_name_val % (name,value))
920 if lvals:
920 if lvals:
921 lvals = '%s%s' % (indent,em_normal.join(lvals))
921 lvals = '%s%s' % (indent,em_normal.join(lvals))
922 else:
922 else:
923 lvals = ''
923 lvals = ''
924
924
925 level = '%s %s\n' % (link,call)
925 level = '%s %s\n' % (link,call)
926
926
927 if index is None:
927 if index is None:
928 frames.append(level)
928 frames.append(level)
929 else:
929 else:
930 frames.append('%s%s' % (level,''.join(
930 frames.append('%s%s' % (level,''.join(
931 _format_traceback_lines(lnum,index,lines,Colors,lvals,
931 _format_traceback_lines(lnum,index,lines,Colors,lvals,
932 col_scheme))))
932 col_scheme))))
933
933
934 # Get (safely) a string form of the exception info
934 # Get (safely) a string form of the exception info
935 try:
935 try:
936 etype_str,evalue_str = map(str,(etype,evalue))
936 etype_str,evalue_str = map(str,(etype,evalue))
937 except:
937 except:
938 # User exception is improperly defined.
938 # User exception is improperly defined.
939 etype,evalue = str,sys.exc_info()[:2]
939 etype,evalue = str,sys.exc_info()[:2]
940 etype_str,evalue_str = map(str,(etype,evalue))
940 etype_str,evalue_str = map(str,(etype,evalue))
941 # ... and format it
941 # ... and format it
942 exception = ['%s%s%s: %s' % (Colors.excName, etype_str,
942 exception = ['%s%s%s: %s' % (Colors.excName, etype_str,
943 ColorsNormal, py3compat.cast_unicode(evalue_str))]
943 ColorsNormal, py3compat.cast_unicode(evalue_str))]
944 if (not py3compat.PY3) and type(evalue) is types.InstanceType:
944 if (not py3compat.PY3) and type(evalue) is types.InstanceType:
945 try:
945 try:
946 names = [w for w in dir(evalue) if isinstance(w, py3compat.string_types)]
946 names = [w for w in dir(evalue) if isinstance(w, py3compat.string_types)]
947 except:
947 except:
948 # Every now and then, an object with funny inernals blows up
948 # Every now and then, an object with funny inernals blows up
949 # when dir() is called on it. We do the best we can to report
949 # when dir() is called on it. We do the best we can to report
950 # the problem and continue
950 # the problem and continue
951 _m = '%sException reporting error (object with broken dir())%s:'
951 _m = '%sException reporting error (object with broken dir())%s:'
952 exception.append(_m % (Colors.excName,ColorsNormal))
952 exception.append(_m % (Colors.excName,ColorsNormal))
953 etype_str,evalue_str = map(str,sys.exc_info()[:2])
953 etype_str,evalue_str = map(str,sys.exc_info()[:2])
954 exception.append('%s%s%s: %s' % (Colors.excName,etype_str,
954 exception.append('%s%s%s: %s' % (Colors.excName,etype_str,
955 ColorsNormal, py3compat.cast_unicode(evalue_str)))
955 ColorsNormal, py3compat.cast_unicode(evalue_str)))
956 names = []
956 names = []
957 for name in names:
957 for name in names:
958 value = text_repr(getattr(evalue, name))
958 value = text_repr(getattr(evalue, name))
959 exception.append('\n%s%s = %s' % (indent, name, value))
959 exception.append('\n%s%s = %s' % (indent, name, value))
960
960
961 # vds: >>
961 # vds: >>
962 if records:
962 if records:
963 filepath, lnum = records[-1][1:3]
963 filepath, lnum = records[-1][1:3]
964 #print "file:", str(file), "linenb", str(lnum) # dbg
964 #print "file:", str(file), "linenb", str(lnum) # dbg
965 filepath = os.path.abspath(filepath)
965 filepath = os.path.abspath(filepath)
966 ipinst = get_ipython()
966 ipinst = get_ipython()
967 if ipinst is not None:
967 if ipinst is not None:
968 ipinst.hooks.synchronize_with_editor(filepath, lnum, 0)
968 ipinst.hooks.synchronize_with_editor(filepath, lnum, 0)
969 # vds: <<
969 # vds: <<
970
970
971 # return all our info assembled as a single string
971 # return all our info assembled as a single string
972 # return '%s\n\n%s\n%s' % (head,'\n'.join(frames),''.join(exception[0]) )
972 # return '%s\n\n%s\n%s' % (head,'\n'.join(frames),''.join(exception[0]) )
973 return [head] + frames + [''.join(exception[0])]
973 return [head] + frames + [''.join(exception[0])]
974
974
975 def debugger(self,force=False):
975 def debugger(self,force=False):
976 """Call up the pdb debugger if desired, always clean up the tb
976 """Call up the pdb debugger if desired, always clean up the tb
977 reference.
977 reference.
978
978
979 Keywords:
979 Keywords:
980
980
981 - force(False): by default, this routine checks the instance call_pdb
981 - force(False): by default, this routine checks the instance call_pdb
982 flag and does not actually invoke the debugger if the flag is false.
982 flag and does not actually invoke the debugger if the flag is false.
983 The 'force' option forces the debugger to activate even if the flag
983 The 'force' option forces the debugger to activate even if the flag
984 is false.
984 is false.
985
985
986 If the call_pdb flag is set, the pdb interactive debugger is
986 If the call_pdb flag is set, the pdb interactive debugger is
987 invoked. In all cases, the self.tb reference to the current traceback
987 invoked. In all cases, the self.tb reference to the current traceback
988 is deleted to prevent lingering references which hamper memory
988 is deleted to prevent lingering references which hamper memory
989 management.
989 management.
990
990
991 Note that each call to pdb() does an 'import readline', so if your app
991 Note that each call to pdb() does an 'import readline', so if your app
992 requires a special setup for the readline completers, you'll have to
992 requires a special setup for the readline completers, you'll have to
993 fix that by hand after invoking the exception handler."""
993 fix that by hand after invoking the exception handler."""
994
994
995 if force or self.call_pdb:
995 if force or self.call_pdb:
996 if self.pdb is None:
996 if self.pdb is None:
997 self.pdb = debugger.Pdb(
997 self.pdb = debugger.Pdb(
998 self.color_scheme_table.active_scheme_name)
998 self.color_scheme_table.active_scheme_name)
999 # the system displayhook may have changed, restore the original
999 # the system displayhook may have changed, restore the original
1000 # for pdb
1000 # for pdb
1001 display_trap = DisplayTrap(hook=sys.__displayhook__)
1001 display_trap = DisplayTrap(hook=sys.__displayhook__)
1002 with display_trap:
1002 with display_trap:
1003 self.pdb.reset()
1003 self.pdb.reset()
1004 # Find the right frame so we don't pop up inside ipython itself
1004 # Find the right frame so we don't pop up inside ipython itself
1005 if hasattr(self,'tb') and self.tb is not None:
1005 if hasattr(self,'tb') and self.tb is not None:
1006 etb = self.tb
1006 etb = self.tb
1007 else:
1007 else:
1008 etb = self.tb = sys.last_traceback
1008 etb = self.tb = sys.last_traceback
1009 while self.tb is not None and self.tb.tb_next is not None:
1009 while self.tb is not None and self.tb.tb_next is not None:
1010 self.tb = self.tb.tb_next
1010 self.tb = self.tb.tb_next
1011 if etb and etb.tb_next:
1011 if etb and etb.tb_next:
1012 etb = etb.tb_next
1012 etb = etb.tb_next
1013 self.pdb.botframe = etb.tb_frame
1013 self.pdb.botframe = etb.tb_frame
1014 self.pdb.interaction(self.tb.tb_frame, self.tb)
1014 self.pdb.interaction(self.tb.tb_frame, self.tb)
1015
1015
1016 if hasattr(self,'tb'):
1016 if hasattr(self,'tb'):
1017 del self.tb
1017 del self.tb
1018
1018
1019 def handler(self, info=None):
1019 def handler(self, info=None):
1020 (etype, evalue, etb) = info or sys.exc_info()
1020 (etype, evalue, etb) = info or sys.exc_info()
1021 self.tb = etb
1021 self.tb = etb
1022 ostream = self.ostream
1022 ostream = self.ostream
1023 ostream.flush()
1023 ostream.flush()
1024 ostream.write(self.text(etype, evalue, etb))
1024 ostream.write(self.text(etype, evalue, etb))
1025 ostream.write('\n')
1025 ostream.write('\n')
1026 ostream.flush()
1026 ostream.flush()
1027
1027
1028 # Changed so an instance can just be called as VerboseTB_inst() and print
1028 # Changed so an instance can just be called as VerboseTB_inst() and print
1029 # out the right info on its own.
1029 # out the right info on its own.
1030 def __call__(self, etype=None, evalue=None, etb=None):
1030 def __call__(self, etype=None, evalue=None, etb=None):
1031 """This hook can replace sys.excepthook (for Python 2.1 or higher)."""
1031 """This hook can replace sys.excepthook (for Python 2.1 or higher)."""
1032 if etb is None:
1032 if etb is None:
1033 self.handler()
1033 self.handler()
1034 else:
1034 else:
1035 self.handler((etype, evalue, etb))
1035 self.handler((etype, evalue, etb))
1036 try:
1036 try:
1037 self.debugger()
1037 self.debugger()
1038 except KeyboardInterrupt:
1038 except KeyboardInterrupt:
1039 print("\nKeyboardInterrupt")
1039 print("\nKeyboardInterrupt")
1040
1040
1041 #----------------------------------------------------------------------------
1041 #----------------------------------------------------------------------------
1042 class FormattedTB(VerboseTB, ListTB):
1042 class FormattedTB(VerboseTB, ListTB):
1043 """Subclass ListTB but allow calling with a traceback.
1043 """Subclass ListTB but allow calling with a traceback.
1044
1044
1045 It can thus be used as a sys.excepthook for Python > 2.1.
1045 It can thus be used as a sys.excepthook for Python > 2.1.
1046
1046
1047 Also adds 'Context' and 'Verbose' modes, not available in ListTB.
1047 Also adds 'Context' and 'Verbose' modes, not available in ListTB.
1048
1048
1049 Allows a tb_offset to be specified. This is useful for situations where
1049 Allows a tb_offset to be specified. This is useful for situations where
1050 one needs to remove a number of topmost frames from the traceback (such as
1050 one needs to remove a number of topmost frames from the traceback (such as
1051 occurs with python programs that themselves execute other python code,
1051 occurs with python programs that themselves execute other python code,
1052 like Python shells). """
1052 like Python shells). """
1053
1053
1054 def __init__(self, mode='Plain', color_scheme='Linux', call_pdb=False,
1054 def __init__(self, mode='Plain', color_scheme='Linux', call_pdb=False,
1055 ostream=None,
1055 ostream=None,
1056 tb_offset=0, long_header=False, include_vars=False,
1056 tb_offset=0, long_header=False, include_vars=False,
1057 check_cache=None):
1057 check_cache=None):
1058
1058
1059 # NEVER change the order of this list. Put new modes at the end:
1059 # NEVER change the order of this list. Put new modes at the end:
1060 self.valid_modes = ['Plain','Context','Verbose']
1060 self.valid_modes = ['Plain','Context','Verbose']
1061 self.verbose_modes = self.valid_modes[1:3]
1061 self.verbose_modes = self.valid_modes[1:3]
1062
1062
1063 VerboseTB.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
1063 VerboseTB.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
1064 ostream=ostream, tb_offset=tb_offset,
1064 ostream=ostream, tb_offset=tb_offset,
1065 long_header=long_header, include_vars=include_vars,
1065 long_header=long_header, include_vars=include_vars,
1066 check_cache=check_cache)
1066 check_cache=check_cache)
1067
1067
1068 # Different types of tracebacks are joined with different separators to
1068 # Different types of tracebacks are joined with different separators to
1069 # form a single string. They are taken from this dict
1069 # form a single string. They are taken from this dict
1070 self._join_chars = dict(Plain='', Context='\n', Verbose='\n')
1070 self._join_chars = dict(Plain='', Context='\n', Verbose='\n')
1071 # set_mode also sets the tb_join_char attribute
1071 # set_mode also sets the tb_join_char attribute
1072 self.set_mode(mode)
1072 self.set_mode(mode)
1073
1073
1074 def _extract_tb(self,tb):
1074 def _extract_tb(self,tb):
1075 if tb:
1075 if tb:
1076 return traceback.extract_tb(tb)
1076 return traceback.extract_tb(tb)
1077 else:
1077 else:
1078 return None
1078 return None
1079
1079
1080 def structured_traceback(self, etype, value, tb, tb_offset=None, context=5):
1080 def structured_traceback(self, etype, value, tb, tb_offset=None, context=5):
1081 tb_offset = self.tb_offset if tb_offset is None else tb_offset
1081 tb_offset = self.tb_offset if tb_offset is None else tb_offset
1082 mode = self.mode
1082 mode = self.mode
1083 if mode in self.verbose_modes:
1083 if mode in self.verbose_modes:
1084 # Verbose modes need a full traceback
1084 # Verbose modes need a full traceback
1085 return VerboseTB.structured_traceback(
1085 return VerboseTB.structured_traceback(
1086 self, etype, value, tb, tb_offset, context
1086 self, etype, value, tb, tb_offset, context
1087 )
1087 )
1088 else:
1088 else:
1089 # We must check the source cache because otherwise we can print
1089 # We must check the source cache because otherwise we can print
1090 # out-of-date source code.
1090 # out-of-date source code.
1091 self.check_cache()
1091 self.check_cache()
1092 # Now we can extract and format the exception
1092 # Now we can extract and format the exception
1093 elist = self._extract_tb(tb)
1093 elist = self._extract_tb(tb)
1094 return ListTB.structured_traceback(
1094 return ListTB.structured_traceback(
1095 self, etype, value, elist, tb_offset, context
1095 self, etype, value, elist, tb_offset, context
1096 )
1096 )
1097
1097
1098 def stb2text(self, stb):
1098 def stb2text(self, stb):
1099 """Convert a structured traceback (a list) to a string."""
1099 """Convert a structured traceback (a list) to a string."""
1100 return self.tb_join_char.join(stb)
1100 return self.tb_join_char.join(stb)
1101
1101
1102
1102
1103 def set_mode(self,mode=None):
1103 def set_mode(self,mode=None):
1104 """Switch to the desired mode.
1104 """Switch to the desired mode.
1105
1105
1106 If mode is not specified, cycles through the available modes."""
1106 If mode is not specified, cycles through the available modes."""
1107
1107
1108 if not mode:
1108 if not mode:
1109 new_idx = ( self.valid_modes.index(self.mode) + 1 ) % \
1109 new_idx = ( self.valid_modes.index(self.mode) + 1 ) % \
1110 len(self.valid_modes)
1110 len(self.valid_modes)
1111 self.mode = self.valid_modes[new_idx]
1111 self.mode = self.valid_modes[new_idx]
1112 elif mode not in self.valid_modes:
1112 elif mode not in self.valid_modes:
1113 raise ValueError('Unrecognized mode in FormattedTB: <'+mode+'>\n'
1113 raise ValueError('Unrecognized mode in FormattedTB: <'+mode+'>\n'
1114 'Valid modes: '+str(self.valid_modes))
1114 'Valid modes: '+str(self.valid_modes))
1115 else:
1115 else:
1116 self.mode = mode
1116 self.mode = mode
1117 # include variable details only in 'Verbose' mode
1117 # include variable details only in 'Verbose' mode
1118 self.include_vars = (self.mode == self.valid_modes[2])
1118 self.include_vars = (self.mode == self.valid_modes[2])
1119 # Set the join character for generating text tracebacks
1119 # Set the join character for generating text tracebacks
1120 self.tb_join_char = self._join_chars[self.mode]
1120 self.tb_join_char = self._join_chars[self.mode]
1121
1121
1122 # some convenient shorcuts
1122 # some convenient shorcuts
1123 def plain(self):
1123 def plain(self):
1124 self.set_mode(self.valid_modes[0])
1124 self.set_mode(self.valid_modes[0])
1125
1125
1126 def context(self):
1126 def context(self):
1127 self.set_mode(self.valid_modes[1])
1127 self.set_mode(self.valid_modes[1])
1128
1128
1129 def verbose(self):
1129 def verbose(self):
1130 self.set_mode(self.valid_modes[2])
1130 self.set_mode(self.valid_modes[2])
1131
1131
1132 #----------------------------------------------------------------------------
1132 #----------------------------------------------------------------------------
1133 class AutoFormattedTB(FormattedTB):
1133 class AutoFormattedTB(FormattedTB):
1134 """A traceback printer which can be called on the fly.
1134 """A traceback printer which can be called on the fly.
1135
1135
1136 It will find out about exceptions by itself.
1136 It will find out about exceptions by itself.
1137
1137
1138 A brief example::
1138 A brief example::
1139
1139
1140 AutoTB = AutoFormattedTB(mode = 'Verbose',color_scheme='Linux')
1140 AutoTB = AutoFormattedTB(mode = 'Verbose',color_scheme='Linux')
1141 try:
1141 try:
1142 ...
1142 ...
1143 except:
1143 except:
1144 AutoTB() # or AutoTB(out=logfile) where logfile is an open file object
1144 AutoTB() # or AutoTB(out=logfile) where logfile is an open file object
1145 """
1145 """
1146
1146
1147 def __call__(self,etype=None,evalue=None,etb=None,
1147 def __call__(self,etype=None,evalue=None,etb=None,
1148 out=None,tb_offset=None):
1148 out=None,tb_offset=None):
1149 """Print out a formatted exception traceback.
1149 """Print out a formatted exception traceback.
1150
1150
1151 Optional arguments:
1151 Optional arguments:
1152 - out: an open file-like object to direct output to.
1152 - out: an open file-like object to direct output to.
1153
1153
1154 - tb_offset: the number of frames to skip over in the stack, on a
1154 - tb_offset: the number of frames to skip over in the stack, on a
1155 per-call basis (this overrides temporarily the instance's tb_offset
1155 per-call basis (this overrides temporarily the instance's tb_offset
1156 given at initialization time. """
1156 given at initialization time. """
1157
1157
1158
1158
1159 if out is None:
1159 if out is None:
1160 out = self.ostream
1160 out = self.ostream
1161 out.flush()
1161 out.flush()
1162 out.write(self.text(etype, evalue, etb, tb_offset))
1162 out.write(self.text(etype, evalue, etb, tb_offset))
1163 out.write('\n')
1163 out.write('\n')
1164 out.flush()
1164 out.flush()
1165 # FIXME: we should remove the auto pdb behavior from here and leave
1165 # FIXME: we should remove the auto pdb behavior from here and leave
1166 # that to the clients.
1166 # that to the clients.
1167 try:
1167 try:
1168 self.debugger()
1168 self.debugger()
1169 except KeyboardInterrupt:
1169 except KeyboardInterrupt:
1170 print("\nKeyboardInterrupt")
1170 print("\nKeyboardInterrupt")
1171
1171
1172 def structured_traceback(self, etype=None, value=None, tb=None,
1172 def structured_traceback(self, etype=None, value=None, tb=None,
1173 tb_offset=None, context=5):
1173 tb_offset=None, context=5):
1174 if etype is None:
1174 if etype is None:
1175 etype,value,tb = sys.exc_info()
1175 etype,value,tb = sys.exc_info()
1176 self.tb = tb
1176 self.tb = tb
1177 return FormattedTB.structured_traceback(
1177 return FormattedTB.structured_traceback(
1178 self, etype, value, tb, tb_offset, context)
1178 self, etype, value, tb, tb_offset, context)
1179
1179
1180 #---------------------------------------------------------------------------
1180 #---------------------------------------------------------------------------
1181
1181
1182 # A simple class to preserve Nathan's original functionality.
1182 # A simple class to preserve Nathan's original functionality.
1183 class ColorTB(FormattedTB):
1183 class ColorTB(FormattedTB):
1184 """Shorthand to initialize a FormattedTB in Linux colors mode."""
1184 """Shorthand to initialize a FormattedTB in Linux colors mode."""
1185 def __init__(self,color_scheme='Linux',call_pdb=0):
1185 def __init__(self,color_scheme='Linux',call_pdb=0):
1186 FormattedTB.__init__(self,color_scheme=color_scheme,
1186 FormattedTB.__init__(self,color_scheme=color_scheme,
1187 call_pdb=call_pdb)
1187 call_pdb=call_pdb)
1188
1188
1189
1189
1190 class SyntaxTB(ListTB):
1190 class SyntaxTB(ListTB):
1191 """Extension which holds some state: the last exception value"""
1191 """Extension which holds some state: the last exception value"""
1192
1192
1193 def __init__(self,color_scheme = 'NoColor'):
1193 def __init__(self,color_scheme = 'NoColor'):
1194 ListTB.__init__(self,color_scheme)
1194 ListTB.__init__(self,color_scheme)
1195 self.last_syntax_error = None
1195 self.last_syntax_error = None
1196
1196
1197 def __call__(self, etype, value, elist):
1197 def __call__(self, etype, value, elist):
1198 self.last_syntax_error = value
1198 self.last_syntax_error = value
1199 ListTB.__call__(self,etype,value,elist)
1199 ListTB.__call__(self,etype,value,elist)
1200
1200
1201 def structured_traceback(self, etype, value, elist, tb_offset=None,
1201 def structured_traceback(self, etype, value, elist, tb_offset=None,
1202 context=5):
1202 context=5):
1203 # If the source file has been edited, the line in the syntax error can
1203 # If the source file has been edited, the line in the syntax error can
1204 # be wrong (retrieved from an outdated cache). This replaces it with
1204 # be wrong (retrieved from an outdated cache). This replaces it with
1205 # the current value.
1205 # the current value.
1206 if isinstance(value, SyntaxError) \
1206 if isinstance(value, SyntaxError) \
1207 and isinstance(value.filename, py3compat.string_types) \
1207 and isinstance(value.filename, py3compat.string_types) \
1208 and isinstance(value.lineno, int):
1208 and isinstance(value.lineno, int):
1209 linecache.checkcache(value.filename)
1209 linecache.checkcache(value.filename)
1210 newtext = ulinecache.getline(value.filename, value.lineno)
1210 newtext = ulinecache.getline(value.filename, value.lineno)
1211 if newtext:
1211 if newtext:
1212 value.text = newtext
1212 value.text = newtext
1213 return super(SyntaxTB, self).structured_traceback(etype, value, elist,
1213 return super(SyntaxTB, self).structured_traceback(etype, value, elist,
1214 tb_offset=tb_offset, context=context)
1214 tb_offset=tb_offset, context=context)
1215
1215
1216 def clear_err_state(self):
1216 def clear_err_state(self):
1217 """Return the current error state and clear it"""
1217 """Return the current error state and clear it"""
1218 e = self.last_syntax_error
1218 e = self.last_syntax_error
1219 self.last_syntax_error = None
1219 self.last_syntax_error = None
1220 return e
1220 return e
1221
1221
1222 def stb2text(self, stb):
1222 def stb2text(self, stb):
1223 """Convert a structured traceback (a list) to a string."""
1223 """Convert a structured traceback (a list) to a string."""
1224 return ''.join(stb)
1224 return ''.join(stb)
1225
1225
1226
1226
1227 #----------------------------------------------------------------------------
1227 #----------------------------------------------------------------------------
1228 # module testing (minimal)
1228 # module testing (minimal)
1229 if __name__ == "__main__":
1229 if __name__ == "__main__":
1230 def spam(c, d_e):
1230 def spam(c, d_e):
1231 (d, e) = d_e
1231 (d, e) = d_e
1232 x = c + d
1232 x = c + d
1233 y = c * d
1233 y = c * d
1234 foo(x, y)
1234 foo(x, y)
1235
1235
1236 def foo(a, b, bar=1):
1236 def foo(a, b, bar=1):
1237 eggs(a, b + bar)
1237 eggs(a, b + bar)
1238
1238
1239 def eggs(f, g, z=globals()):
1239 def eggs(f, g, z=globals()):
1240 h = f + g
1240 h = f + g
1241 i = f - g
1241 i = f - g
1242 return h / i
1242 return h / i
1243
1243
1244 print('')
1244 print('')
1245 print('*** Before ***')
1245 print('*** Before ***')
1246 try:
1246 try:
1247 print(spam(1, (2, 3)))
1247 print(spam(1, (2, 3)))
1248 except:
1248 except:
1249 traceback.print_exc()
1249 traceback.print_exc()
1250 print('')
1250 print('')
1251
1251
1252 handler = ColorTB()
1252 handler = ColorTB()
1253 print('*** ColorTB ***')
1253 print('*** ColorTB ***')
1254 try:
1254 try:
1255 print(spam(1, (2, 3)))
1255 print(spam(1, (2, 3)))
1256 except:
1256 except:
1257 handler(*sys.exc_info())
1257 handler(*sys.exc_info())
1258 print('')
1258 print('')
1259
1259
1260 handler = VerboseTB()
1260 handler = VerboseTB()
1261 print('*** VerboseTB ***')
1261 print('*** VerboseTB ***')
1262 try:
1262 try:
1263 print(spam(1, (2, 3)))
1263 print(spam(1, (2, 3)))
1264 except:
1264 except:
1265 handler(*sys.exc_info())
1265 handler(*sys.exc_info())
1266 print('')
1266 print('')
1267
1267
@@ -1,221 +1,221 b''
1 ########################## LICENCE ###############################
1 ########################## LICENCE ###############################
2
2
3 # Copyright (c) 2005-2012, Michele Simionato
3 # Copyright (c) 2005-2012, Michele Simionato
4 # All rights reserved.
4 # All rights reserved.
5
5
6 # Redistribution and use in source and binary forms, with or without
6 # Redistribution and use in source and binary forms, with or without
7 # modification, are permitted provided that the following conditions are
7 # modification, are permitted provided that the following conditions are
8 # met:
8 # met:
9
9
10 # Redistributions of source code must retain the above copyright
10 # Redistributions of source code must retain the above copyright
11 # notice, this list of conditions and the following disclaimer.
11 # notice, this list of conditions and the following disclaimer.
12 # Redistributions in bytecode form must reproduce the above copyright
12 # Redistributions in bytecode form must reproduce the above copyright
13 # notice, this list of conditions and the following disclaimer in
13 # notice, this list of conditions and the following disclaimer in
14 # the documentation and/or other materials provided with the
14 # the documentation and/or other materials provided with the
15 # distribution.
15 # distribution.
16
16
17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 # HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
21 # HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
22 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
22 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
23 # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
23 # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
24 # OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 # OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
25 # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
26 # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
26 # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
27 # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
27 # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
28 # DAMAGE.
28 # DAMAGE.
29
29
30 """
30 """
31 Decorator module, see http://pypi.python.org/pypi/decorator
31 Decorator module, see http://pypi.python.org/pypi/decorator
32 for the documentation.
32 for the documentation.
33 """
33 """
34 from __future__ import print_function
34 from __future__ import print_function
35
35
36 __version__ = '3.3.3'
36 __version__ = '3.3.3'
37
37
38 __all__ = ["decorator", "FunctionMaker", "partial"]
38 __all__ = ["decorator", "FunctionMaker", "partial"]
39
39
40 import sys, re, inspect
40 import sys, re, inspect
41
41
42 try:
42 try:
43 from functools import partial
43 from functools import partial
44 except ImportError: # for Python version < 2.5
44 except ImportError: # for Python version < 2.5
45 class partial(object):
45 class partial(object):
46 "A simple replacement of functools.partial"
46 "A simple replacement of functools.partial"
47 def __init__(self, func, *args, **kw):
47 def __init__(self, func, *args, **kw):
48 self.func = func
48 self.func = func
49 self.args = args
49 self.args = args
50 self.keywords = kw
50 self.keywords = kw
51 def __call__(self, *otherargs, **otherkw):
51 def __call__(self, *otherargs, **otherkw):
52 kw = self.keywords.copy()
52 kw = self.keywords.copy()
53 kw.update(otherkw)
53 kw.update(otherkw)
54 return self.func(*(self.args + otherargs), **kw)
54 return self.func(*(self.args + otherargs), **kw)
55
55
56 if sys.version >= '3':
56 if sys.version >= '3':
57 from inspect import getfullargspec
57 from inspect import getfullargspec
58 else:
58 else:
59 class getfullargspec(object):
59 class getfullargspec(object):
60 "A quick and dirty replacement for getfullargspec for Python 2.X"
60 "A quick and dirty replacement for getfullargspec for Python 2.X"
61 def __init__(self, f):
61 def __init__(self, f):
62 self.args, self.varargs, self.varkw, self.defaults = \
62 self.args, self.varargs, self.varkw, self.defaults = \
63 inspect.getargspec(f)
63 inspect.getargspec(f)
64 self.kwonlyargs = []
64 self.kwonlyargs = []
65 self.kwonlydefaults = None
65 self.kwonlydefaults = None
66 def __iter__(self):
66 def __iter__(self):
67 yield self.args
67 yield self.args
68 yield self.varargs
68 yield self.varargs
69 yield self.varkw
69 yield self.varkw
70 yield self.defaults
70 yield self.defaults
71
71
72 DEF = re.compile('\s*def\s*([_\w][_\w\d]*)\s*\(')
72 DEF = re.compile('\s*def\s*([_\w][_\w\d]*)\s*\(')
73
73
74 # basic functionality
74 # basic functionality
75 class FunctionMaker(object):
75 class FunctionMaker(object):
76 """
76 """
77 An object with the ability to create functions with a given signature.
77 An object with the ability to create functions with a given signature.
78 It has attributes name, doc, module, signature, defaults, dict and
78 It has attributes name, doc, module, signature, defaults, dict and
79 methods update and make.
79 methods update and make.
80 """
80 """
81 def __init__(self, func=None, name=None, signature=None,
81 def __init__(self, func=None, name=None, signature=None,
82 defaults=None, doc=None, module=None, funcdict=None):
82 defaults=None, doc=None, module=None, funcdict=None):
83 self.shortsignature = signature
83 self.shortsignature = signature
84 if func:
84 if func:
85 # func can be a class or a callable, but not an instance method
85 # func can be a class or a callable, but not an instance method
86 self.name = func.__name__
86 self.name = func.__name__
87 if self.name == '<lambda>': # small hack for lambda functions
87 if self.name == '<lambda>': # small hack for lambda functions
88 self.name = '_lambda_'
88 self.name = '_lambda_'
89 self.doc = func.__doc__
89 self.doc = func.__doc__
90 self.module = func.__module__
90 self.module = func.__module__
91 if inspect.isfunction(func):
91 if inspect.isfunction(func):
92 argspec = getfullargspec(func)
92 argspec = getfullargspec(func)
93 self.annotations = getattr(func, '__annotations__', {})
93 self.annotations = getattr(func, '__annotations__', {})
94 for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs',
94 for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs',
95 'kwonlydefaults'):
95 'kwonlydefaults'):
96 setattr(self, a, getattr(argspec, a))
96 setattr(self, a, getattr(argspec, a))
97 for i, arg in enumerate(self.args):
97 for i, arg in enumerate(self.args):
98 setattr(self, 'arg%d' % i, arg)
98 setattr(self, 'arg%d' % i, arg)
99 if sys.version < '3': # easy way
99 if sys.version < '3': # easy way
100 self.shortsignature = self.signature = \
100 self.shortsignature = self.signature = \
101 inspect.formatargspec(
101 inspect.formatargspec(
102 formatvalue=lambda val: "", *argspec)[1:-1]
102 formatvalue=lambda val: "", *argspec)[1:-1]
103 else: # Python 3 way
103 else: # Python 3 way
104 self.signature = self.shortsignature = ', '.join(self.args)
104 self.signature = self.shortsignature = ', '.join(self.args)
105 if self.varargs:
105 if self.varargs:
106 self.signature += ', *' + self.varargs
106 self.signature += ', *' + self.varargs
107 self.shortsignature += ', *' + self.varargs
107 self.shortsignature += ', *' + self.varargs
108 if self.kwonlyargs:
108 if self.kwonlyargs:
109 for a in self.kwonlyargs:
109 for a in self.kwonlyargs:
110 self.signature += ', %s=None' % a
110 self.signature += ', %s=None' % a
111 self.shortsignature += ', %s=%s' % (a, a)
111 self.shortsignature += ', %s=%s' % (a, a)
112 if self.varkw:
112 if self.varkw:
113 self.signature += ', **' + self.varkw
113 self.signature += ', **' + self.varkw
114 self.shortsignature += ', **' + self.varkw
114 self.shortsignature += ', **' + self.varkw
115 self.dict = func.__dict__.copy()
115 self.dict = func.__dict__.copy()
116 # func=None happens when decorating a caller
116 # func=None happens when decorating a caller
117 if name:
117 if name:
118 self.name = name
118 self.name = name
119 if signature is not None:
119 if signature is not None:
120 self.signature = signature
120 self.signature = signature
121 if defaults:
121 if defaults:
122 self.defaults = defaults
122 self.defaults = defaults
123 if doc:
123 if doc:
124 self.doc = doc
124 self.doc = doc
125 if module:
125 if module:
126 self.module = module
126 self.module = module
127 if funcdict:
127 if funcdict:
128 self.dict = funcdict
128 self.dict = funcdict
129 # check existence required attributes
129 # check existence required attributes
130 assert hasattr(self, 'name')
130 assert hasattr(self, 'name')
131 if not hasattr(self, 'signature'):
131 if not hasattr(self, 'signature'):
132 raise TypeError('You are decorating a non function: %s' % func)
132 raise TypeError('You are decorating a non function: %s' % func)
133
133
134 def update(self, func, **kw):
134 def update(self, func, **kw):
135 "Update the signature of func with the data in self"
135 "Update the signature of func with the data in self"
136 func.__name__ = self.name
136 func.__name__ = self.name
137 func.__doc__ = getattr(self, 'doc', None)
137 func.__doc__ = getattr(self, 'doc', None)
138 func.__dict__ = getattr(self, 'dict', {})
138 func.__dict__ = getattr(self, 'dict', {})
139 func.func_defaults = getattr(self, 'defaults', ())
139 func.__defaults__ = getattr(self, 'defaults', ())
140 func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None)
140 func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None)
141 func.__annotations__ = getattr(self, 'annotations', None)
141 func.__annotations__ = getattr(self, 'annotations', None)
142 callermodule = sys._getframe(3).f_globals.get('__name__', '?')
142 callermodule = sys._getframe(3).f_globals.get('__name__', '?')
143 func.__module__ = getattr(self, 'module', callermodule)
143 func.__module__ = getattr(self, 'module', callermodule)
144 func.__dict__.update(kw)
144 func.__dict__.update(kw)
145
145
146 def make(self, src_templ, evaldict=None, addsource=False, **attrs):
146 def make(self, src_templ, evaldict=None, addsource=False, **attrs):
147 "Make a new function from a given template and update the signature"
147 "Make a new function from a given template and update the signature"
148 src = src_templ % vars(self) # expand name and signature
148 src = src_templ % vars(self) # expand name and signature
149 evaldict = evaldict or {}
149 evaldict = evaldict or {}
150 mo = DEF.match(src)
150 mo = DEF.match(src)
151 if mo is None:
151 if mo is None:
152 raise SyntaxError('not a valid function template\n%s' % src)
152 raise SyntaxError('not a valid function template\n%s' % src)
153 name = mo.group(1) # extract the function name
153 name = mo.group(1) # extract the function name
154 names = set([name] + [arg.strip(' *') for arg in
154 names = set([name] + [arg.strip(' *') for arg in
155 self.shortsignature.split(',')])
155 self.shortsignature.split(',')])
156 for n in names:
156 for n in names:
157 if n in ('_func_', '_call_'):
157 if n in ('_func_', '_call_'):
158 raise NameError('%s is overridden in\n%s' % (n, src))
158 raise NameError('%s is overridden in\n%s' % (n, src))
159 if not src.endswith('\n'): # add a newline just for safety
159 if not src.endswith('\n'): # add a newline just for safety
160 src += '\n' # this is needed in old versions of Python
160 src += '\n' # this is needed in old versions of Python
161 try:
161 try:
162 code = compile(src, '<string>', 'single')
162 code = compile(src, '<string>', 'single')
163 # print >> sys.stderr, 'Compiling %s' % src
163 # print >> sys.stderr, 'Compiling %s' % src
164 exec(code, evaldict)
164 exec(code, evaldict)
165 except:
165 except:
166 print('Error in generated code:', file=sys.stderr)
166 print('Error in generated code:', file=sys.stderr)
167 print(src, file=sys.stderr)
167 print(src, file=sys.stderr)
168 raise
168 raise
169 func = evaldict[name]
169 func = evaldict[name]
170 if addsource:
170 if addsource:
171 attrs['__source__'] = src
171 attrs['__source__'] = src
172 self.update(func, **attrs)
172 self.update(func, **attrs)
173 return func
173 return func
174
174
175 @classmethod
175 @classmethod
176 def create(cls, obj, body, evaldict, defaults=None,
176 def create(cls, obj, body, evaldict, defaults=None,
177 doc=None, module=None, addsource=True, **attrs):
177 doc=None, module=None, addsource=True, **attrs):
178 """
178 """
179 Create a function from the strings name, signature and body.
179 Create a function from the strings name, signature and body.
180 evaldict is the evaluation dictionary. If addsource is true an attribute
180 evaldict is the evaluation dictionary. If addsource is true an attribute
181 __source__ is added to the result. The attributes attrs are added,
181 __source__ is added to the result. The attributes attrs are added,
182 if any.
182 if any.
183 """
183 """
184 if isinstance(obj, str): # "name(signature)"
184 if isinstance(obj, str): # "name(signature)"
185 name, rest = obj.strip().split('(', 1)
185 name, rest = obj.strip().split('(', 1)
186 signature = rest[:-1] #strip a right parens
186 signature = rest[:-1] #strip a right parens
187 func = None
187 func = None
188 else: # a function
188 else: # a function
189 name = None
189 name = None
190 signature = None
190 signature = None
191 func = obj
191 func = obj
192 self = cls(func, name, signature, defaults, doc, module)
192 self = cls(func, name, signature, defaults, doc, module)
193 ibody = '\n'.join(' ' + line for line in body.splitlines())
193 ibody = '\n'.join(' ' + line for line in body.splitlines())
194 return self.make('def %(name)s(%(signature)s):\n' + ibody,
194 return self.make('def %(name)s(%(signature)s):\n' + ibody,
195 evaldict, addsource, **attrs)
195 evaldict, addsource, **attrs)
196
196
197 def decorator(caller, func=None):
197 def decorator(caller, func=None):
198 """
198 """
199 decorator(caller) converts a caller function into a decorator;
199 decorator(caller) converts a caller function into a decorator;
200 decorator(caller, func) decorates a function using a caller.
200 decorator(caller, func) decorates a function using a caller.
201 """
201 """
202 if func is not None: # returns a decorated function
202 if func is not None: # returns a decorated function
203 evaldict = func.func_globals.copy()
203 evaldict = func.__globals__.copy()
204 evaldict['_call_'] = caller
204 evaldict['_call_'] = caller
205 evaldict['_func_'] = func
205 evaldict['_func_'] = func
206 return FunctionMaker.create(
206 return FunctionMaker.create(
207 func, "return _call_(_func_, %(shortsignature)s)",
207 func, "return _call_(_func_, %(shortsignature)s)",
208 evaldict, undecorated=func, __wrapped__=func)
208 evaldict, undecorated=func, __wrapped__=func)
209 else: # returns a decorator
209 else: # returns a decorator
210 if isinstance(caller, partial):
210 if isinstance(caller, partial):
211 return partial(decorator, caller)
211 return partial(decorator, caller)
212 # otherwise assume caller is a function
212 # otherwise assume caller is a function
213 first = inspect.getargspec(caller)[0][0] # first arg
213 first = inspect.getargspec(caller)[0][0] # first arg
214 evaldict = caller.func_globals.copy()
214 evaldict = caller.__globals__.copy()
215 evaldict['_call_'] = caller
215 evaldict['_call_'] = caller
216 evaldict['decorator'] = decorator
216 evaldict['decorator'] = decorator
217 return FunctionMaker.create(
217 return FunctionMaker.create(
218 '%s(%s)' % (caller.__name__, first),
218 '%s(%s)' % (caller.__name__, first),
219 'return decorator(_call_, %s)' % first,
219 'return decorator(_call_, %s)' % first,
220 evaldict, undecorated=caller, __wrapped__=caller,
220 evaldict, undecorated=caller, __wrapped__=caller,
221 doc=caller.__doc__, module=caller.__module__)
221 doc=caller.__doc__, module=caller.__module__)
@@ -1,226 +1,226 b''
1 """Dependency utilities
1 """Dependency utilities
2
2
3 Authors:
3 Authors:
4
4
5 * Min RK
5 * Min RK
6 """
6 """
7 #-----------------------------------------------------------------------------
7 #-----------------------------------------------------------------------------
8 # Copyright (C) 2013 The IPython Development Team
8 # Copyright (C) 2013 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 from types import ModuleType
14 from types import ModuleType
15
15
16 from IPython.parallel.client.asyncresult import AsyncResult
16 from IPython.parallel.client.asyncresult import AsyncResult
17 from IPython.parallel.error import UnmetDependency
17 from IPython.parallel.error import UnmetDependency
18 from IPython.parallel.util import interactive
18 from IPython.parallel.util import interactive
19 from IPython.utils import py3compat
19 from IPython.utils import py3compat
20 from IPython.utils.py3compat import string_types
20 from IPython.utils.py3compat import string_types
21 from IPython.utils.pickleutil import can, uncan
21 from IPython.utils.pickleutil import can, uncan
22
22
23 class depend(object):
23 class depend(object):
24 """Dependency decorator, for use with tasks.
24 """Dependency decorator, for use with tasks.
25
25
26 `@depend` lets you define a function for engine dependencies
26 `@depend` lets you define a function for engine dependencies
27 just like you use `apply` for tasks.
27 just like you use `apply` for tasks.
28
28
29
29
30 Examples
30 Examples
31 --------
31 --------
32 ::
32 ::
33
33
34 @depend(df, a,b, c=5)
34 @depend(df, a,b, c=5)
35 def f(m,n,p)
35 def f(m,n,p)
36
36
37 view.apply(f, 1,2,3)
37 view.apply(f, 1,2,3)
38
38
39 will call df(a,b,c=5) on the engine, and if it returns False or
39 will call df(a,b,c=5) on the engine, and if it returns False or
40 raises an UnmetDependency error, then the task will not be run
40 raises an UnmetDependency error, then the task will not be run
41 and another engine will be tried.
41 and another engine will be tried.
42 """
42 """
43 def __init__(self, f, *args, **kwargs):
43 def __init__(self, f, *args, **kwargs):
44 self.f = f
44 self.f = f
45 self.args = args
45 self.args = args
46 self.kwargs = kwargs
46 self.kwargs = kwargs
47
47
48 def __call__(self, f):
48 def __call__(self, f):
49 return dependent(f, self.f, *self.args, **self.kwargs)
49 return dependent(f, self.f, *self.args, **self.kwargs)
50
50
51 class dependent(object):
51 class dependent(object):
52 """A function that depends on another function.
52 """A function that depends on another function.
53 This is an object to prevent the closure used
53 This is an object to prevent the closure used
54 in traditional decorators, which are not picklable.
54 in traditional decorators, which are not picklable.
55 """
55 """
56
56
57 def __init__(self, f, df, *dargs, **dkwargs):
57 def __init__(self, f, df, *dargs, **dkwargs):
58 self.f = f
58 self.f = f
59 self.func_name = getattr(f, '__name__', 'f')
59 self.__name__ = getattr(f, '__name__', 'f')
60 self.df = df
60 self.df = df
61 self.dargs = dargs
61 self.dargs = dargs
62 self.dkwargs = dkwargs
62 self.dkwargs = dkwargs
63
63
64 def check_dependency(self):
64 def check_dependency(self):
65 if self.df(*self.dargs, **self.dkwargs) is False:
65 if self.df(*self.dargs, **self.dkwargs) is False:
66 raise UnmetDependency()
66 raise UnmetDependency()
67
67
68 def __call__(self, *args, **kwargs):
68 def __call__(self, *args, **kwargs):
69 return self.f(*args, **kwargs)
69 return self.f(*args, **kwargs)
70
70
71 if not py3compat.PY3:
71 if not py3compat.PY3:
72 @property
72 @property
73 def __name__(self):
73 def __name__(self):
74 return self.func_name
74 return self.__name__
75
75
76 @interactive
76 @interactive
77 def _require(*modules, **mapping):
77 def _require(*modules, **mapping):
78 """Helper for @require decorator."""
78 """Helper for @require decorator."""
79 from IPython.parallel.error import UnmetDependency
79 from IPython.parallel.error import UnmetDependency
80 from IPython.utils.pickleutil import uncan
80 from IPython.utils.pickleutil import uncan
81 user_ns = globals()
81 user_ns = globals()
82 for name in modules:
82 for name in modules:
83 try:
83 try:
84 exec('import %s' % name, user_ns)
84 exec('import %s' % name, user_ns)
85 except ImportError:
85 except ImportError:
86 raise UnmetDependency(name)
86 raise UnmetDependency(name)
87
87
88 for name, cobj in mapping.items():
88 for name, cobj in mapping.items():
89 user_ns[name] = uncan(cobj, user_ns)
89 user_ns[name] = uncan(cobj, user_ns)
90 return True
90 return True
91
91
92 def require(*objects, **mapping):
92 def require(*objects, **mapping):
93 """Simple decorator for requiring local objects and modules to be available
93 """Simple decorator for requiring local objects and modules to be available
94 when the decorated function is called on the engine.
94 when the decorated function is called on the engine.
95
95
96 Modules specified by name or passed directly will be imported
96 Modules specified by name or passed directly will be imported
97 prior to calling the decorated function.
97 prior to calling the decorated function.
98
98
99 Objects other than modules will be pushed as a part of the task.
99 Objects other than modules will be pushed as a part of the task.
100 Functions can be passed positionally,
100 Functions can be passed positionally,
101 and will be pushed to the engine with their __name__.
101 and will be pushed to the engine with their __name__.
102 Other objects can be passed by keyword arg.
102 Other objects can be passed by keyword arg.
103
103
104 Examples
104 Examples
105 --------
105 --------
106
106
107 In [1]: @require('numpy')
107 In [1]: @require('numpy')
108 ...: def norm(a):
108 ...: def norm(a):
109 ...: return numpy.linalg.norm(a,2)
109 ...: return numpy.linalg.norm(a,2)
110
110
111 In [2]: foo = lambda x: x*x
111 In [2]: foo = lambda x: x*x
112 In [3]: @require(foo)
112 In [3]: @require(foo)
113 ...: def bar(a):
113 ...: def bar(a):
114 ...: return foo(1-a)
114 ...: return foo(1-a)
115 """
115 """
116 names = []
116 names = []
117 for obj in objects:
117 for obj in objects:
118 if isinstance(obj, ModuleType):
118 if isinstance(obj, ModuleType):
119 obj = obj.__name__
119 obj = obj.__name__
120
120
121 if isinstance(obj, string_types):
121 if isinstance(obj, string_types):
122 names.append(obj)
122 names.append(obj)
123 elif hasattr(obj, '__name__'):
123 elif hasattr(obj, '__name__'):
124 mapping[obj.__name__] = obj
124 mapping[obj.__name__] = obj
125 else:
125 else:
126 raise TypeError("Objects other than modules and functions "
126 raise TypeError("Objects other than modules and functions "
127 "must be passed by kwarg, but got: %s" % type(obj)
127 "must be passed by kwarg, but got: %s" % type(obj)
128 )
128 )
129
129
130 for name, obj in mapping.items():
130 for name, obj in mapping.items():
131 mapping[name] = can(obj)
131 mapping[name] = can(obj)
132 return depend(_require, *names, **mapping)
132 return depend(_require, *names, **mapping)
133
133
134 class Dependency(set):
134 class Dependency(set):
135 """An object for representing a set of msg_id dependencies.
135 """An object for representing a set of msg_id dependencies.
136
136
137 Subclassed from set().
137 Subclassed from set().
138
138
139 Parameters
139 Parameters
140 ----------
140 ----------
141 dependencies: list/set of msg_ids or AsyncResult objects or output of Dependency.as_dict()
141 dependencies: list/set of msg_ids or AsyncResult objects or output of Dependency.as_dict()
142 The msg_ids to depend on
142 The msg_ids to depend on
143 all : bool [default True]
143 all : bool [default True]
144 Whether the dependency should be considered met when *all* depending tasks have completed
144 Whether the dependency should be considered met when *all* depending tasks have completed
145 or only when *any* have been completed.
145 or only when *any* have been completed.
146 success : bool [default True]
146 success : bool [default True]
147 Whether to consider successes as fulfilling dependencies.
147 Whether to consider successes as fulfilling dependencies.
148 failure : bool [default False]
148 failure : bool [default False]
149 Whether to consider failures as fulfilling dependencies.
149 Whether to consider failures as fulfilling dependencies.
150
150
151 If `all=success=True` and `failure=False`, then the task will fail with an ImpossibleDependency
151 If `all=success=True` and `failure=False`, then the task will fail with an ImpossibleDependency
152 as soon as the first depended-upon task fails.
152 as soon as the first depended-upon task fails.
153 """
153 """
154
154
155 all=True
155 all=True
156 success=True
156 success=True
157 failure=True
157 failure=True
158
158
159 def __init__(self, dependencies=[], all=True, success=True, failure=False):
159 def __init__(self, dependencies=[], all=True, success=True, failure=False):
160 if isinstance(dependencies, dict):
160 if isinstance(dependencies, dict):
161 # load from dict
161 # load from dict
162 all = dependencies.get('all', True)
162 all = dependencies.get('all', True)
163 success = dependencies.get('success', success)
163 success = dependencies.get('success', success)
164 failure = dependencies.get('failure', failure)
164 failure = dependencies.get('failure', failure)
165 dependencies = dependencies.get('dependencies', [])
165 dependencies = dependencies.get('dependencies', [])
166 ids = []
166 ids = []
167
167
168 # extract ids from various sources:
168 # extract ids from various sources:
169 if isinstance(dependencies, string_types + (AsyncResult,)):
169 if isinstance(dependencies, string_types + (AsyncResult,)):
170 dependencies = [dependencies]
170 dependencies = [dependencies]
171 for d in dependencies:
171 for d in dependencies:
172 if isinstance(d, string_types):
172 if isinstance(d, string_types):
173 ids.append(d)
173 ids.append(d)
174 elif isinstance(d, AsyncResult):
174 elif isinstance(d, AsyncResult):
175 ids.extend(d.msg_ids)
175 ids.extend(d.msg_ids)
176 else:
176 else:
177 raise TypeError("invalid dependency type: %r"%type(d))
177 raise TypeError("invalid dependency type: %r"%type(d))
178
178
179 set.__init__(self, ids)
179 set.__init__(self, ids)
180 self.all = all
180 self.all = all
181 if not (success or failure):
181 if not (success or failure):
182 raise ValueError("Must depend on at least one of successes or failures!")
182 raise ValueError("Must depend on at least one of successes or failures!")
183 self.success=success
183 self.success=success
184 self.failure = failure
184 self.failure = failure
185
185
186 def check(self, completed, failed=None):
186 def check(self, completed, failed=None):
187 """check whether our dependencies have been met."""
187 """check whether our dependencies have been met."""
188 if len(self) == 0:
188 if len(self) == 0:
189 return True
189 return True
190 against = set()
190 against = set()
191 if self.success:
191 if self.success:
192 against = completed
192 against = completed
193 if failed is not None and self.failure:
193 if failed is not None and self.failure:
194 against = against.union(failed)
194 against = against.union(failed)
195 if self.all:
195 if self.all:
196 return self.issubset(against)
196 return self.issubset(against)
197 else:
197 else:
198 return not self.isdisjoint(against)
198 return not self.isdisjoint(against)
199
199
200 def unreachable(self, completed, failed=None):
200 def unreachable(self, completed, failed=None):
201 """return whether this dependency has become impossible."""
201 """return whether this dependency has become impossible."""
202 if len(self) == 0:
202 if len(self) == 0:
203 return False
203 return False
204 against = set()
204 against = set()
205 if not self.success:
205 if not self.success:
206 against = completed
206 against = completed
207 if failed is not None and not self.failure:
207 if failed is not None and not self.failure:
208 against = against.union(failed)
208 against = against.union(failed)
209 if self.all:
209 if self.all:
210 return not self.isdisjoint(against)
210 return not self.isdisjoint(against)
211 else:
211 else:
212 return self.issubset(against)
212 return self.issubset(against)
213
213
214
214
215 def as_dict(self):
215 def as_dict(self):
216 """Represent this dependency as a dict. For json compatibility."""
216 """Represent this dependency as a dict. For json compatibility."""
217 return dict(
217 return dict(
218 dependencies=list(self),
218 dependencies=list(self),
219 all=self.all,
219 all=self.all,
220 success=self.success,
220 success=self.success,
221 failure=self.failure
221 failure=self.failure
222 )
222 )
223
223
224
224
225 __all__ = ['depend', 'require', 'dependent', 'Dependency']
225 __all__ = ['depend', 'require', 'dependent', 'Dependency']
226
226
@@ -1,860 +1,860 b''
1 """The Python scheduler for rich scheduling.
1 """The Python scheduler for rich scheduling.
2
2
3 The Pure ZMQ scheduler does not allow routing schemes other than LRU,
3 The Pure ZMQ scheduler does not allow routing schemes other than LRU,
4 nor does it check msg_id DAG dependencies. For those, a slightly slower
4 nor does it check msg_id DAG dependencies. For those, a slightly slower
5 Python Scheduler exists.
5 Python Scheduler exists.
6
6
7 Authors:
7 Authors:
8
8
9 * Min RK
9 * Min RK
10 """
10 """
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12 # Copyright (C) 2010-2011 The IPython Development Team
12 # Copyright (C) 2010-2011 The IPython Development Team
13 #
13 #
14 # Distributed under the terms of the BSD License. The full license is in
14 # Distributed under the terms of the BSD License. The full license is in
15 # the file COPYING, distributed as part of this software.
15 # the file COPYING, distributed as part of this software.
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17
17
18 #----------------------------------------------------------------------
18 #----------------------------------------------------------------------
19 # Imports
19 # Imports
20 #----------------------------------------------------------------------
20 #----------------------------------------------------------------------
21
21
22 import logging
22 import logging
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from collections import deque
26 from collections import deque
27 from datetime import datetime
27 from datetime import datetime
28 from random import randint, random
28 from random import randint, random
29 from types import FunctionType
29 from types import FunctionType
30
30
31 try:
31 try:
32 import numpy
32 import numpy
33 except ImportError:
33 except ImportError:
34 numpy = None
34 numpy = None
35
35
36 import zmq
36 import zmq
37 from zmq.eventloop import ioloop, zmqstream
37 from zmq.eventloop import ioloop, zmqstream
38
38
39 # local imports
39 # local imports
40 from IPython.external.decorator import decorator
40 from IPython.external.decorator import decorator
41 from IPython.config.application import Application
41 from IPython.config.application import Application
42 from IPython.config.loader import Config
42 from IPython.config.loader import Config
43 from IPython.utils.traitlets import Instance, Dict, List, Set, Integer, Enum, CBytes
43 from IPython.utils.traitlets import Instance, Dict, List, Set, Integer, Enum, CBytes
44 from IPython.utils.py3compat import cast_bytes
44 from IPython.utils.py3compat import cast_bytes
45
45
46 from IPython.parallel import error, util
46 from IPython.parallel import error, util
47 from IPython.parallel.factory import SessionFactory
47 from IPython.parallel.factory import SessionFactory
48 from IPython.parallel.util import connect_logger, local_logger
48 from IPython.parallel.util import connect_logger, local_logger
49
49
50 from .dependency import Dependency
50 from .dependency import Dependency
51
51
52 @decorator
52 @decorator
53 def logged(f,self,*args,**kwargs):
53 def logged(f,self,*args,**kwargs):
54 # print ("#--------------------")
54 # print ("#--------------------")
55 self.log.debug("scheduler::%s(*%s,**%s)", f.func_name, args, kwargs)
55 self.log.debug("scheduler::%s(*%s,**%s)", f.__name__, args, kwargs)
56 # print ("#--")
56 # print ("#--")
57 return f(self,*args, **kwargs)
57 return f(self,*args, **kwargs)
58
58
59 #----------------------------------------------------------------------
59 #----------------------------------------------------------------------
60 # Chooser functions
60 # Chooser functions
61 #----------------------------------------------------------------------
61 #----------------------------------------------------------------------
62
62
63 def plainrandom(loads):
63 def plainrandom(loads):
64 """Plain random pick."""
64 """Plain random pick."""
65 n = len(loads)
65 n = len(loads)
66 return randint(0,n-1)
66 return randint(0,n-1)
67
67
68 def lru(loads):
68 def lru(loads):
69 """Always pick the front of the line.
69 """Always pick the front of the line.
70
70
71 The content of `loads` is ignored.
71 The content of `loads` is ignored.
72
72
73 Assumes LRU ordering of loads, with oldest first.
73 Assumes LRU ordering of loads, with oldest first.
74 """
74 """
75 return 0
75 return 0
76
76
77 def twobin(loads):
77 def twobin(loads):
78 """Pick two at random, use the LRU of the two.
78 """Pick two at random, use the LRU of the two.
79
79
80 The content of loads is ignored.
80 The content of loads is ignored.
81
81
82 Assumes LRU ordering of loads, with oldest first.
82 Assumes LRU ordering of loads, with oldest first.
83 """
83 """
84 n = len(loads)
84 n = len(loads)
85 a = randint(0,n-1)
85 a = randint(0,n-1)
86 b = randint(0,n-1)
86 b = randint(0,n-1)
87 return min(a,b)
87 return min(a,b)
88
88
89 def weighted(loads):
89 def weighted(loads):
90 """Pick two at random using inverse load as weight.
90 """Pick two at random using inverse load as weight.
91
91
92 Return the less loaded of the two.
92 Return the less loaded of the two.
93 """
93 """
94 # weight 0 a million times more than 1:
94 # weight 0 a million times more than 1:
95 weights = 1./(1e-6+numpy.array(loads))
95 weights = 1./(1e-6+numpy.array(loads))
96 sums = weights.cumsum()
96 sums = weights.cumsum()
97 t = sums[-1]
97 t = sums[-1]
98 x = random()*t
98 x = random()*t
99 y = random()*t
99 y = random()*t
100 idx = 0
100 idx = 0
101 idy = 0
101 idy = 0
102 while sums[idx] < x:
102 while sums[idx] < x:
103 idx += 1
103 idx += 1
104 while sums[idy] < y:
104 while sums[idy] < y:
105 idy += 1
105 idy += 1
106 if weights[idy] > weights[idx]:
106 if weights[idy] > weights[idx]:
107 return idy
107 return idy
108 else:
108 else:
109 return idx
109 return idx
110
110
111 def leastload(loads):
111 def leastload(loads):
112 """Always choose the lowest load.
112 """Always choose the lowest load.
113
113
114 If the lowest load occurs more than once, the first
114 If the lowest load occurs more than once, the first
115 occurance will be used. If loads has LRU ordering, this means
115 occurance will be used. If loads has LRU ordering, this means
116 the LRU of those with the lowest load is chosen.
116 the LRU of those with the lowest load is chosen.
117 """
117 """
118 return loads.index(min(loads))
118 return loads.index(min(loads))
119
119
120 #---------------------------------------------------------------------
120 #---------------------------------------------------------------------
121 # Classes
121 # Classes
122 #---------------------------------------------------------------------
122 #---------------------------------------------------------------------
123
123
124
124
125 # store empty default dependency:
125 # store empty default dependency:
126 MET = Dependency([])
126 MET = Dependency([])
127
127
128
128
129 class Job(object):
129 class Job(object):
130 """Simple container for a job"""
130 """Simple container for a job"""
131 def __init__(self, msg_id, raw_msg, idents, msg, header, metadata,
131 def __init__(self, msg_id, raw_msg, idents, msg, header, metadata,
132 targets, after, follow, timeout):
132 targets, after, follow, timeout):
133 self.msg_id = msg_id
133 self.msg_id = msg_id
134 self.raw_msg = raw_msg
134 self.raw_msg = raw_msg
135 self.idents = idents
135 self.idents = idents
136 self.msg = msg
136 self.msg = msg
137 self.header = header
137 self.header = header
138 self.metadata = metadata
138 self.metadata = metadata
139 self.targets = targets
139 self.targets = targets
140 self.after = after
140 self.after = after
141 self.follow = follow
141 self.follow = follow
142 self.timeout = timeout
142 self.timeout = timeout
143
143
144 self.removed = False # used for lazy-delete from sorted queue
144 self.removed = False # used for lazy-delete from sorted queue
145 self.timestamp = time.time()
145 self.timestamp = time.time()
146 self.timeout_id = 0
146 self.timeout_id = 0
147 self.blacklist = set()
147 self.blacklist = set()
148
148
149 def __lt__(self, other):
149 def __lt__(self, other):
150 return self.timestamp < other.timestamp
150 return self.timestamp < other.timestamp
151
151
152 def __cmp__(self, other):
152 def __cmp__(self, other):
153 return cmp(self.timestamp, other.timestamp)
153 return cmp(self.timestamp, other.timestamp)
154
154
155 @property
155 @property
156 def dependents(self):
156 def dependents(self):
157 return self.follow.union(self.after)
157 return self.follow.union(self.after)
158
158
159
159
160 class TaskScheduler(SessionFactory):
160 class TaskScheduler(SessionFactory):
161 """Python TaskScheduler object.
161 """Python TaskScheduler object.
162
162
163 This is the simplest object that supports msg_id based
163 This is the simplest object that supports msg_id based
164 DAG dependencies. *Only* task msg_ids are checked, not
164 DAG dependencies. *Only* task msg_ids are checked, not
165 msg_ids of jobs submitted via the MUX queue.
165 msg_ids of jobs submitted via the MUX queue.
166
166
167 """
167 """
168
168
169 hwm = Integer(1, config=True,
169 hwm = Integer(1, config=True,
170 help="""specify the High Water Mark (HWM) for the downstream
170 help="""specify the High Water Mark (HWM) for the downstream
171 socket in the Task scheduler. This is the maximum number
171 socket in the Task scheduler. This is the maximum number
172 of allowed outstanding tasks on each engine.
172 of allowed outstanding tasks on each engine.
173
173
174 The default (1) means that only one task can be outstanding on each
174 The default (1) means that only one task can be outstanding on each
175 engine. Setting TaskScheduler.hwm=0 means there is no limit, and the
175 engine. Setting TaskScheduler.hwm=0 means there is no limit, and the
176 engines continue to be assigned tasks while they are working,
176 engines continue to be assigned tasks while they are working,
177 effectively hiding network latency behind computation, but can result
177 effectively hiding network latency behind computation, but can result
178 in an imbalance of work when submitting many heterogenous tasks all at
178 in an imbalance of work when submitting many heterogenous tasks all at
179 once. Any positive value greater than one is a compromise between the
179 once. Any positive value greater than one is a compromise between the
180 two.
180 two.
181
181
182 """
182 """
183 )
183 )
184 scheme_name = Enum(('leastload', 'pure', 'lru', 'plainrandom', 'weighted', 'twobin'),
184 scheme_name = Enum(('leastload', 'pure', 'lru', 'plainrandom', 'weighted', 'twobin'),
185 'leastload', config=True, allow_none=False,
185 'leastload', config=True, allow_none=False,
186 help="""select the task scheduler scheme [default: Python LRU]
186 help="""select the task scheduler scheme [default: Python LRU]
187 Options are: 'pure', 'lru', 'plainrandom', 'weighted', 'twobin','leastload'"""
187 Options are: 'pure', 'lru', 'plainrandom', 'weighted', 'twobin','leastload'"""
188 )
188 )
189 def _scheme_name_changed(self, old, new):
189 def _scheme_name_changed(self, old, new):
190 self.log.debug("Using scheme %r"%new)
190 self.log.debug("Using scheme %r"%new)
191 self.scheme = globals()[new]
191 self.scheme = globals()[new]
192
192
193 # input arguments:
193 # input arguments:
194 scheme = Instance(FunctionType) # function for determining the destination
194 scheme = Instance(FunctionType) # function for determining the destination
195 def _scheme_default(self):
195 def _scheme_default(self):
196 return leastload
196 return leastload
197 client_stream = Instance(zmqstream.ZMQStream) # client-facing stream
197 client_stream = Instance(zmqstream.ZMQStream) # client-facing stream
198 engine_stream = Instance(zmqstream.ZMQStream) # engine-facing stream
198 engine_stream = Instance(zmqstream.ZMQStream) # engine-facing stream
199 notifier_stream = Instance(zmqstream.ZMQStream) # hub-facing sub stream
199 notifier_stream = Instance(zmqstream.ZMQStream) # hub-facing sub stream
200 mon_stream = Instance(zmqstream.ZMQStream) # hub-facing pub stream
200 mon_stream = Instance(zmqstream.ZMQStream) # hub-facing pub stream
201 query_stream = Instance(zmqstream.ZMQStream) # hub-facing DEALER stream
201 query_stream = Instance(zmqstream.ZMQStream) # hub-facing DEALER stream
202
202
203 # internals:
203 # internals:
204 queue = Instance(deque) # sorted list of Jobs
204 queue = Instance(deque) # sorted list of Jobs
205 def _queue_default(self):
205 def _queue_default(self):
206 return deque()
206 return deque()
207 queue_map = Dict() # dict by msg_id of Jobs (for O(1) access to the Queue)
207 queue_map = Dict() # dict by msg_id of Jobs (for O(1) access to the Queue)
208 graph = Dict() # dict by msg_id of [ msg_ids that depend on key ]
208 graph = Dict() # dict by msg_id of [ msg_ids that depend on key ]
209 retries = Dict() # dict by msg_id of retries remaining (non-neg ints)
209 retries = Dict() # dict by msg_id of retries remaining (non-neg ints)
210 # waiting = List() # list of msg_ids ready to run, but haven't due to HWM
210 # waiting = List() # list of msg_ids ready to run, but haven't due to HWM
211 pending = Dict() # dict by engine_uuid of submitted tasks
211 pending = Dict() # dict by engine_uuid of submitted tasks
212 completed = Dict() # dict by engine_uuid of completed tasks
212 completed = Dict() # dict by engine_uuid of completed tasks
213 failed = Dict() # dict by engine_uuid of failed tasks
213 failed = Dict() # dict by engine_uuid of failed tasks
214 destinations = Dict() # dict by msg_id of engine_uuids where jobs ran (reverse of completed+failed)
214 destinations = Dict() # dict by msg_id of engine_uuids where jobs ran (reverse of completed+failed)
215 clients = Dict() # dict by msg_id for who submitted the task
215 clients = Dict() # dict by msg_id for who submitted the task
216 targets = List() # list of target IDENTs
216 targets = List() # list of target IDENTs
217 loads = List() # list of engine loads
217 loads = List() # list of engine loads
218 # full = Set() # set of IDENTs that have HWM outstanding tasks
218 # full = Set() # set of IDENTs that have HWM outstanding tasks
219 all_completed = Set() # set of all completed tasks
219 all_completed = Set() # set of all completed tasks
220 all_failed = Set() # set of all failed tasks
220 all_failed = Set() # set of all failed tasks
221 all_done = Set() # set of all finished tasks=union(completed,failed)
221 all_done = Set() # set of all finished tasks=union(completed,failed)
222 all_ids = Set() # set of all submitted task IDs
222 all_ids = Set() # set of all submitted task IDs
223
223
224 ident = CBytes() # ZMQ identity. This should just be self.session.session
224 ident = CBytes() # ZMQ identity. This should just be self.session.session
225 # but ensure Bytes
225 # but ensure Bytes
226 def _ident_default(self):
226 def _ident_default(self):
227 return self.session.bsession
227 return self.session.bsession
228
228
229 def start(self):
229 def start(self):
230 self.query_stream.on_recv(self.dispatch_query_reply)
230 self.query_stream.on_recv(self.dispatch_query_reply)
231 self.session.send(self.query_stream, "connection_request", {})
231 self.session.send(self.query_stream, "connection_request", {})
232
232
233 self.engine_stream.on_recv(self.dispatch_result, copy=False)
233 self.engine_stream.on_recv(self.dispatch_result, copy=False)
234 self.client_stream.on_recv(self.dispatch_submission, copy=False)
234 self.client_stream.on_recv(self.dispatch_submission, copy=False)
235
235
236 self._notification_handlers = dict(
236 self._notification_handlers = dict(
237 registration_notification = self._register_engine,
237 registration_notification = self._register_engine,
238 unregistration_notification = self._unregister_engine
238 unregistration_notification = self._unregister_engine
239 )
239 )
240 self.notifier_stream.on_recv(self.dispatch_notification)
240 self.notifier_stream.on_recv(self.dispatch_notification)
241 self.log.info("Scheduler started [%s]" % self.scheme_name)
241 self.log.info("Scheduler started [%s]" % self.scheme_name)
242
242
243 def resume_receiving(self):
243 def resume_receiving(self):
244 """Resume accepting jobs."""
244 """Resume accepting jobs."""
245 self.client_stream.on_recv(self.dispatch_submission, copy=False)
245 self.client_stream.on_recv(self.dispatch_submission, copy=False)
246
246
247 def stop_receiving(self):
247 def stop_receiving(self):
248 """Stop accepting jobs while there are no engines.
248 """Stop accepting jobs while there are no engines.
249 Leave them in the ZMQ queue."""
249 Leave them in the ZMQ queue."""
250 self.client_stream.on_recv(None)
250 self.client_stream.on_recv(None)
251
251
252 #-----------------------------------------------------------------------
252 #-----------------------------------------------------------------------
253 # [Un]Registration Handling
253 # [Un]Registration Handling
254 #-----------------------------------------------------------------------
254 #-----------------------------------------------------------------------
255
255
256
256
257 def dispatch_query_reply(self, msg):
257 def dispatch_query_reply(self, msg):
258 """handle reply to our initial connection request"""
258 """handle reply to our initial connection request"""
259 try:
259 try:
260 idents,msg = self.session.feed_identities(msg)
260 idents,msg = self.session.feed_identities(msg)
261 except ValueError:
261 except ValueError:
262 self.log.warn("task::Invalid Message: %r",msg)
262 self.log.warn("task::Invalid Message: %r",msg)
263 return
263 return
264 try:
264 try:
265 msg = self.session.unserialize(msg)
265 msg = self.session.unserialize(msg)
266 except ValueError:
266 except ValueError:
267 self.log.warn("task::Unauthorized message from: %r"%idents)
267 self.log.warn("task::Unauthorized message from: %r"%idents)
268 return
268 return
269
269
270 content = msg['content']
270 content = msg['content']
271 for uuid in content.get('engines', {}).values():
271 for uuid in content.get('engines', {}).values():
272 self._register_engine(cast_bytes(uuid))
272 self._register_engine(cast_bytes(uuid))
273
273
274
274
275 @util.log_errors
275 @util.log_errors
276 def dispatch_notification(self, msg):
276 def dispatch_notification(self, msg):
277 """dispatch register/unregister events."""
277 """dispatch register/unregister events."""
278 try:
278 try:
279 idents,msg = self.session.feed_identities(msg)
279 idents,msg = self.session.feed_identities(msg)
280 except ValueError:
280 except ValueError:
281 self.log.warn("task::Invalid Message: %r",msg)
281 self.log.warn("task::Invalid Message: %r",msg)
282 return
282 return
283 try:
283 try:
284 msg = self.session.unserialize(msg)
284 msg = self.session.unserialize(msg)
285 except ValueError:
285 except ValueError:
286 self.log.warn("task::Unauthorized message from: %r"%idents)
286 self.log.warn("task::Unauthorized message from: %r"%idents)
287 return
287 return
288
288
289 msg_type = msg['header']['msg_type']
289 msg_type = msg['header']['msg_type']
290
290
291 handler = self._notification_handlers.get(msg_type, None)
291 handler = self._notification_handlers.get(msg_type, None)
292 if handler is None:
292 if handler is None:
293 self.log.error("Unhandled message type: %r"%msg_type)
293 self.log.error("Unhandled message type: %r"%msg_type)
294 else:
294 else:
295 try:
295 try:
296 handler(cast_bytes(msg['content']['uuid']))
296 handler(cast_bytes(msg['content']['uuid']))
297 except Exception:
297 except Exception:
298 self.log.error("task::Invalid notification msg: %r", msg, exc_info=True)
298 self.log.error("task::Invalid notification msg: %r", msg, exc_info=True)
299
299
300 def _register_engine(self, uid):
300 def _register_engine(self, uid):
301 """New engine with ident `uid` became available."""
301 """New engine with ident `uid` became available."""
302 # head of the line:
302 # head of the line:
303 self.targets.insert(0,uid)
303 self.targets.insert(0,uid)
304 self.loads.insert(0,0)
304 self.loads.insert(0,0)
305
305
306 # initialize sets
306 # initialize sets
307 self.completed[uid] = set()
307 self.completed[uid] = set()
308 self.failed[uid] = set()
308 self.failed[uid] = set()
309 self.pending[uid] = {}
309 self.pending[uid] = {}
310
310
311 # rescan the graph:
311 # rescan the graph:
312 self.update_graph(None)
312 self.update_graph(None)
313
313
314 def _unregister_engine(self, uid):
314 def _unregister_engine(self, uid):
315 """Existing engine with ident `uid` became unavailable."""
315 """Existing engine with ident `uid` became unavailable."""
316 if len(self.targets) == 1:
316 if len(self.targets) == 1:
317 # this was our only engine
317 # this was our only engine
318 pass
318 pass
319
319
320 # handle any potentially finished tasks:
320 # handle any potentially finished tasks:
321 self.engine_stream.flush()
321 self.engine_stream.flush()
322
322
323 # don't pop destinations, because they might be used later
323 # don't pop destinations, because they might be used later
324 # map(self.destinations.pop, self.completed.pop(uid))
324 # map(self.destinations.pop, self.completed.pop(uid))
325 # map(self.destinations.pop, self.failed.pop(uid))
325 # map(self.destinations.pop, self.failed.pop(uid))
326
326
327 # prevent this engine from receiving work
327 # prevent this engine from receiving work
328 idx = self.targets.index(uid)
328 idx = self.targets.index(uid)
329 self.targets.pop(idx)
329 self.targets.pop(idx)
330 self.loads.pop(idx)
330 self.loads.pop(idx)
331
331
332 # wait 5 seconds before cleaning up pending jobs, since the results might
332 # wait 5 seconds before cleaning up pending jobs, since the results might
333 # still be incoming
333 # still be incoming
334 if self.pending[uid]:
334 if self.pending[uid]:
335 dc = ioloop.DelayedCallback(lambda : self.handle_stranded_tasks(uid), 5000, self.loop)
335 dc = ioloop.DelayedCallback(lambda : self.handle_stranded_tasks(uid), 5000, self.loop)
336 dc.start()
336 dc.start()
337 else:
337 else:
338 self.completed.pop(uid)
338 self.completed.pop(uid)
339 self.failed.pop(uid)
339 self.failed.pop(uid)
340
340
341
341
342 def handle_stranded_tasks(self, engine):
342 def handle_stranded_tasks(self, engine):
343 """Deal with jobs resident in an engine that died."""
343 """Deal with jobs resident in an engine that died."""
344 lost = self.pending[engine]
344 lost = self.pending[engine]
345 for msg_id in lost.keys():
345 for msg_id in lost.keys():
346 if msg_id not in self.pending[engine]:
346 if msg_id not in self.pending[engine]:
347 # prevent double-handling of messages
347 # prevent double-handling of messages
348 continue
348 continue
349
349
350 raw_msg = lost[msg_id].raw_msg
350 raw_msg = lost[msg_id].raw_msg
351 idents,msg = self.session.feed_identities(raw_msg, copy=False)
351 idents,msg = self.session.feed_identities(raw_msg, copy=False)
352 parent = self.session.unpack(msg[1].bytes)
352 parent = self.session.unpack(msg[1].bytes)
353 idents = [engine, idents[0]]
353 idents = [engine, idents[0]]
354
354
355 # build fake error reply
355 # build fake error reply
356 try:
356 try:
357 raise error.EngineError("Engine %r died while running task %r"%(engine, msg_id))
357 raise error.EngineError("Engine %r died while running task %r"%(engine, msg_id))
358 except:
358 except:
359 content = error.wrap_exception()
359 content = error.wrap_exception()
360 # build fake metadata
360 # build fake metadata
361 md = dict(
361 md = dict(
362 status=u'error',
362 status=u'error',
363 engine=engine.decode('ascii'),
363 engine=engine.decode('ascii'),
364 date=datetime.now(),
364 date=datetime.now(),
365 )
365 )
366 msg = self.session.msg('apply_reply', content, parent=parent, metadata=md)
366 msg = self.session.msg('apply_reply', content, parent=parent, metadata=md)
367 raw_reply = map(zmq.Message, self.session.serialize(msg, ident=idents))
367 raw_reply = map(zmq.Message, self.session.serialize(msg, ident=idents))
368 # and dispatch it
368 # and dispatch it
369 self.dispatch_result(raw_reply)
369 self.dispatch_result(raw_reply)
370
370
371 # finally scrub completed/failed lists
371 # finally scrub completed/failed lists
372 self.completed.pop(engine)
372 self.completed.pop(engine)
373 self.failed.pop(engine)
373 self.failed.pop(engine)
374
374
375
375
376 #-----------------------------------------------------------------------
376 #-----------------------------------------------------------------------
377 # Job Submission
377 # Job Submission
378 #-----------------------------------------------------------------------
378 #-----------------------------------------------------------------------
379
379
380
380
381 @util.log_errors
381 @util.log_errors
382 def dispatch_submission(self, raw_msg):
382 def dispatch_submission(self, raw_msg):
383 """Dispatch job submission to appropriate handlers."""
383 """Dispatch job submission to appropriate handlers."""
384 # ensure targets up to date:
384 # ensure targets up to date:
385 self.notifier_stream.flush()
385 self.notifier_stream.flush()
386 try:
386 try:
387 idents, msg = self.session.feed_identities(raw_msg, copy=False)
387 idents, msg = self.session.feed_identities(raw_msg, copy=False)
388 msg = self.session.unserialize(msg, content=False, copy=False)
388 msg = self.session.unserialize(msg, content=False, copy=False)
389 except Exception:
389 except Exception:
390 self.log.error("task::Invaid task msg: %r"%raw_msg, exc_info=True)
390 self.log.error("task::Invaid task msg: %r"%raw_msg, exc_info=True)
391 return
391 return
392
392
393
393
394 # send to monitor
394 # send to monitor
395 self.mon_stream.send_multipart([b'intask']+raw_msg, copy=False)
395 self.mon_stream.send_multipart([b'intask']+raw_msg, copy=False)
396
396
397 header = msg['header']
397 header = msg['header']
398 md = msg['metadata']
398 md = msg['metadata']
399 msg_id = header['msg_id']
399 msg_id = header['msg_id']
400 self.all_ids.add(msg_id)
400 self.all_ids.add(msg_id)
401
401
402 # get targets as a set of bytes objects
402 # get targets as a set of bytes objects
403 # from a list of unicode objects
403 # from a list of unicode objects
404 targets = md.get('targets', [])
404 targets = md.get('targets', [])
405 targets = map(cast_bytes, targets)
405 targets = map(cast_bytes, targets)
406 targets = set(targets)
406 targets = set(targets)
407
407
408 retries = md.get('retries', 0)
408 retries = md.get('retries', 0)
409 self.retries[msg_id] = retries
409 self.retries[msg_id] = retries
410
410
411 # time dependencies
411 # time dependencies
412 after = md.get('after', None)
412 after = md.get('after', None)
413 if after:
413 if after:
414 after = Dependency(after)
414 after = Dependency(after)
415 if after.all:
415 if after.all:
416 if after.success:
416 if after.success:
417 after = Dependency(after.difference(self.all_completed),
417 after = Dependency(after.difference(self.all_completed),
418 success=after.success,
418 success=after.success,
419 failure=after.failure,
419 failure=after.failure,
420 all=after.all,
420 all=after.all,
421 )
421 )
422 if after.failure:
422 if after.failure:
423 after = Dependency(after.difference(self.all_failed),
423 after = Dependency(after.difference(self.all_failed),
424 success=after.success,
424 success=after.success,
425 failure=after.failure,
425 failure=after.failure,
426 all=after.all,
426 all=after.all,
427 )
427 )
428 if after.check(self.all_completed, self.all_failed):
428 if after.check(self.all_completed, self.all_failed):
429 # recast as empty set, if `after` already met,
429 # recast as empty set, if `after` already met,
430 # to prevent unnecessary set comparisons
430 # to prevent unnecessary set comparisons
431 after = MET
431 after = MET
432 else:
432 else:
433 after = MET
433 after = MET
434
434
435 # location dependencies
435 # location dependencies
436 follow = Dependency(md.get('follow', []))
436 follow = Dependency(md.get('follow', []))
437
437
438 timeout = md.get('timeout', None)
438 timeout = md.get('timeout', None)
439 if timeout:
439 if timeout:
440 timeout = float(timeout)
440 timeout = float(timeout)
441
441
442 job = Job(msg_id=msg_id, raw_msg=raw_msg, idents=idents, msg=msg,
442 job = Job(msg_id=msg_id, raw_msg=raw_msg, idents=idents, msg=msg,
443 header=header, targets=targets, after=after, follow=follow,
443 header=header, targets=targets, after=after, follow=follow,
444 timeout=timeout, metadata=md,
444 timeout=timeout, metadata=md,
445 )
445 )
446 # validate and reduce dependencies:
446 # validate and reduce dependencies:
447 for dep in after,follow:
447 for dep in after,follow:
448 if not dep: # empty dependency
448 if not dep: # empty dependency
449 continue
449 continue
450 # check valid:
450 # check valid:
451 if msg_id in dep or dep.difference(self.all_ids):
451 if msg_id in dep or dep.difference(self.all_ids):
452 self.queue_map[msg_id] = job
452 self.queue_map[msg_id] = job
453 return self.fail_unreachable(msg_id, error.InvalidDependency)
453 return self.fail_unreachable(msg_id, error.InvalidDependency)
454 # check if unreachable:
454 # check if unreachable:
455 if dep.unreachable(self.all_completed, self.all_failed):
455 if dep.unreachable(self.all_completed, self.all_failed):
456 self.queue_map[msg_id] = job
456 self.queue_map[msg_id] = job
457 return self.fail_unreachable(msg_id)
457 return self.fail_unreachable(msg_id)
458
458
459 if after.check(self.all_completed, self.all_failed):
459 if after.check(self.all_completed, self.all_failed):
460 # time deps already met, try to run
460 # time deps already met, try to run
461 if not self.maybe_run(job):
461 if not self.maybe_run(job):
462 # can't run yet
462 # can't run yet
463 if msg_id not in self.all_failed:
463 if msg_id not in self.all_failed:
464 # could have failed as unreachable
464 # could have failed as unreachable
465 self.save_unmet(job)
465 self.save_unmet(job)
466 else:
466 else:
467 self.save_unmet(job)
467 self.save_unmet(job)
468
468
469 def job_timeout(self, job, timeout_id):
469 def job_timeout(self, job, timeout_id):
470 """callback for a job's timeout.
470 """callback for a job's timeout.
471
471
472 The job may or may not have been run at this point.
472 The job may or may not have been run at this point.
473 """
473 """
474 if job.timeout_id != timeout_id:
474 if job.timeout_id != timeout_id:
475 # not the most recent call
475 # not the most recent call
476 return
476 return
477 now = time.time()
477 now = time.time()
478 if job.timeout >= (now + 1):
478 if job.timeout >= (now + 1):
479 self.log.warn("task %s timeout fired prematurely: %s > %s",
479 self.log.warn("task %s timeout fired prematurely: %s > %s",
480 job.msg_id, job.timeout, now
480 job.msg_id, job.timeout, now
481 )
481 )
482 if job.msg_id in self.queue_map:
482 if job.msg_id in self.queue_map:
483 # still waiting, but ran out of time
483 # still waiting, but ran out of time
484 self.log.info("task %r timed out", job.msg_id)
484 self.log.info("task %r timed out", job.msg_id)
485 self.fail_unreachable(job.msg_id, error.TaskTimeout)
485 self.fail_unreachable(job.msg_id, error.TaskTimeout)
486
486
487 def fail_unreachable(self, msg_id, why=error.ImpossibleDependency):
487 def fail_unreachable(self, msg_id, why=error.ImpossibleDependency):
488 """a task has become unreachable, send a reply with an ImpossibleDependency
488 """a task has become unreachable, send a reply with an ImpossibleDependency
489 error."""
489 error."""
490 if msg_id not in self.queue_map:
490 if msg_id not in self.queue_map:
491 self.log.error("task %r already failed!", msg_id)
491 self.log.error("task %r already failed!", msg_id)
492 return
492 return
493 job = self.queue_map.pop(msg_id)
493 job = self.queue_map.pop(msg_id)
494 # lazy-delete from the queue
494 # lazy-delete from the queue
495 job.removed = True
495 job.removed = True
496 for mid in job.dependents:
496 for mid in job.dependents:
497 if mid in self.graph:
497 if mid in self.graph:
498 self.graph[mid].remove(msg_id)
498 self.graph[mid].remove(msg_id)
499
499
500 try:
500 try:
501 raise why()
501 raise why()
502 except:
502 except:
503 content = error.wrap_exception()
503 content = error.wrap_exception()
504 self.log.debug("task %r failing as unreachable with: %s", msg_id, content['ename'])
504 self.log.debug("task %r failing as unreachable with: %s", msg_id, content['ename'])
505
505
506 self.all_done.add(msg_id)
506 self.all_done.add(msg_id)
507 self.all_failed.add(msg_id)
507 self.all_failed.add(msg_id)
508
508
509 msg = self.session.send(self.client_stream, 'apply_reply', content,
509 msg = self.session.send(self.client_stream, 'apply_reply', content,
510 parent=job.header, ident=job.idents)
510 parent=job.header, ident=job.idents)
511 self.session.send(self.mon_stream, msg, ident=[b'outtask']+job.idents)
511 self.session.send(self.mon_stream, msg, ident=[b'outtask']+job.idents)
512
512
513 self.update_graph(msg_id, success=False)
513 self.update_graph(msg_id, success=False)
514
514
515 def available_engines(self):
515 def available_engines(self):
516 """return a list of available engine indices based on HWM"""
516 """return a list of available engine indices based on HWM"""
517 if not self.hwm:
517 if not self.hwm:
518 return range(len(self.targets))
518 return range(len(self.targets))
519 available = []
519 available = []
520 for idx in range(len(self.targets)):
520 for idx in range(len(self.targets)):
521 if self.loads[idx] < self.hwm:
521 if self.loads[idx] < self.hwm:
522 available.append(idx)
522 available.append(idx)
523 return available
523 return available
524
524
525 def maybe_run(self, job):
525 def maybe_run(self, job):
526 """check location dependencies, and run if they are met."""
526 """check location dependencies, and run if they are met."""
527 msg_id = job.msg_id
527 msg_id = job.msg_id
528 self.log.debug("Attempting to assign task %s", msg_id)
528 self.log.debug("Attempting to assign task %s", msg_id)
529 available = self.available_engines()
529 available = self.available_engines()
530 if not available:
530 if not available:
531 # no engines, definitely can't run
531 # no engines, definitely can't run
532 return False
532 return False
533
533
534 if job.follow or job.targets or job.blacklist or self.hwm:
534 if job.follow or job.targets or job.blacklist or self.hwm:
535 # we need a can_run filter
535 # we need a can_run filter
536 def can_run(idx):
536 def can_run(idx):
537 # check hwm
537 # check hwm
538 if self.hwm and self.loads[idx] == self.hwm:
538 if self.hwm and self.loads[idx] == self.hwm:
539 return False
539 return False
540 target = self.targets[idx]
540 target = self.targets[idx]
541 # check blacklist
541 # check blacklist
542 if target in job.blacklist:
542 if target in job.blacklist:
543 return False
543 return False
544 # check targets
544 # check targets
545 if job.targets and target not in job.targets:
545 if job.targets and target not in job.targets:
546 return False
546 return False
547 # check follow
547 # check follow
548 return job.follow.check(self.completed[target], self.failed[target])
548 return job.follow.check(self.completed[target], self.failed[target])
549
549
550 indices = filter(can_run, available)
550 indices = filter(can_run, available)
551
551
552 if not indices:
552 if not indices:
553 # couldn't run
553 # couldn't run
554 if job.follow.all:
554 if job.follow.all:
555 # check follow for impossibility
555 # check follow for impossibility
556 dests = set()
556 dests = set()
557 relevant = set()
557 relevant = set()
558 if job.follow.success:
558 if job.follow.success:
559 relevant = self.all_completed
559 relevant = self.all_completed
560 if job.follow.failure:
560 if job.follow.failure:
561 relevant = relevant.union(self.all_failed)
561 relevant = relevant.union(self.all_failed)
562 for m in job.follow.intersection(relevant):
562 for m in job.follow.intersection(relevant):
563 dests.add(self.destinations[m])
563 dests.add(self.destinations[m])
564 if len(dests) > 1:
564 if len(dests) > 1:
565 self.queue_map[msg_id] = job
565 self.queue_map[msg_id] = job
566 self.fail_unreachable(msg_id)
566 self.fail_unreachable(msg_id)
567 return False
567 return False
568 if job.targets:
568 if job.targets:
569 # check blacklist+targets for impossibility
569 # check blacklist+targets for impossibility
570 job.targets.difference_update(job.blacklist)
570 job.targets.difference_update(job.blacklist)
571 if not job.targets or not job.targets.intersection(self.targets):
571 if not job.targets or not job.targets.intersection(self.targets):
572 self.queue_map[msg_id] = job
572 self.queue_map[msg_id] = job
573 self.fail_unreachable(msg_id)
573 self.fail_unreachable(msg_id)
574 return False
574 return False
575 return False
575 return False
576 else:
576 else:
577 indices = None
577 indices = None
578
578
579 self.submit_task(job, indices)
579 self.submit_task(job, indices)
580 return True
580 return True
581
581
582 def save_unmet(self, job):
582 def save_unmet(self, job):
583 """Save a message for later submission when its dependencies are met."""
583 """Save a message for later submission when its dependencies are met."""
584 msg_id = job.msg_id
584 msg_id = job.msg_id
585 self.log.debug("Adding task %s to the queue", msg_id)
585 self.log.debug("Adding task %s to the queue", msg_id)
586 self.queue_map[msg_id] = job
586 self.queue_map[msg_id] = job
587 self.queue.append(job)
587 self.queue.append(job)
588 # track the ids in follow or after, but not those already finished
588 # track the ids in follow or after, but not those already finished
589 for dep_id in job.after.union(job.follow).difference(self.all_done):
589 for dep_id in job.after.union(job.follow).difference(self.all_done):
590 if dep_id not in self.graph:
590 if dep_id not in self.graph:
591 self.graph[dep_id] = set()
591 self.graph[dep_id] = set()
592 self.graph[dep_id].add(msg_id)
592 self.graph[dep_id].add(msg_id)
593
593
594 # schedule timeout callback
594 # schedule timeout callback
595 if job.timeout:
595 if job.timeout:
596 timeout_id = job.timeout_id = job.timeout_id + 1
596 timeout_id = job.timeout_id = job.timeout_id + 1
597 self.loop.add_timeout(time.time() + job.timeout,
597 self.loop.add_timeout(time.time() + job.timeout,
598 lambda : self.job_timeout(job, timeout_id)
598 lambda : self.job_timeout(job, timeout_id)
599 )
599 )
600
600
601
601
602 def submit_task(self, job, indices=None):
602 def submit_task(self, job, indices=None):
603 """Submit a task to any of a subset of our targets."""
603 """Submit a task to any of a subset of our targets."""
604 if indices:
604 if indices:
605 loads = [self.loads[i] for i in indices]
605 loads = [self.loads[i] for i in indices]
606 else:
606 else:
607 loads = self.loads
607 loads = self.loads
608 idx = self.scheme(loads)
608 idx = self.scheme(loads)
609 if indices:
609 if indices:
610 idx = indices[idx]
610 idx = indices[idx]
611 target = self.targets[idx]
611 target = self.targets[idx]
612 # print (target, map(str, msg[:3]))
612 # print (target, map(str, msg[:3]))
613 # send job to the engine
613 # send job to the engine
614 self.engine_stream.send(target, flags=zmq.SNDMORE, copy=False)
614 self.engine_stream.send(target, flags=zmq.SNDMORE, copy=False)
615 self.engine_stream.send_multipart(job.raw_msg, copy=False)
615 self.engine_stream.send_multipart(job.raw_msg, copy=False)
616 # update load
616 # update load
617 self.add_job(idx)
617 self.add_job(idx)
618 self.pending[target][job.msg_id] = job
618 self.pending[target][job.msg_id] = job
619 # notify Hub
619 # notify Hub
620 content = dict(msg_id=job.msg_id, engine_id=target.decode('ascii'))
620 content = dict(msg_id=job.msg_id, engine_id=target.decode('ascii'))
621 self.session.send(self.mon_stream, 'task_destination', content=content,
621 self.session.send(self.mon_stream, 'task_destination', content=content,
622 ident=[b'tracktask',self.ident])
622 ident=[b'tracktask',self.ident])
623
623
624
624
625 #-----------------------------------------------------------------------
625 #-----------------------------------------------------------------------
626 # Result Handling
626 # Result Handling
627 #-----------------------------------------------------------------------
627 #-----------------------------------------------------------------------
628
628
629
629
630 @util.log_errors
630 @util.log_errors
631 def dispatch_result(self, raw_msg):
631 def dispatch_result(self, raw_msg):
632 """dispatch method for result replies"""
632 """dispatch method for result replies"""
633 try:
633 try:
634 idents,msg = self.session.feed_identities(raw_msg, copy=False)
634 idents,msg = self.session.feed_identities(raw_msg, copy=False)
635 msg = self.session.unserialize(msg, content=False, copy=False)
635 msg = self.session.unserialize(msg, content=False, copy=False)
636 engine = idents[0]
636 engine = idents[0]
637 try:
637 try:
638 idx = self.targets.index(engine)
638 idx = self.targets.index(engine)
639 except ValueError:
639 except ValueError:
640 pass # skip load-update for dead engines
640 pass # skip load-update for dead engines
641 else:
641 else:
642 self.finish_job(idx)
642 self.finish_job(idx)
643 except Exception:
643 except Exception:
644 self.log.error("task::Invalid result: %r", raw_msg, exc_info=True)
644 self.log.error("task::Invalid result: %r", raw_msg, exc_info=True)
645 return
645 return
646
646
647 md = msg['metadata']
647 md = msg['metadata']
648 parent = msg['parent_header']
648 parent = msg['parent_header']
649 if md.get('dependencies_met', True):
649 if md.get('dependencies_met', True):
650 success = (md['status'] == 'ok')
650 success = (md['status'] == 'ok')
651 msg_id = parent['msg_id']
651 msg_id = parent['msg_id']
652 retries = self.retries[msg_id]
652 retries = self.retries[msg_id]
653 if not success and retries > 0:
653 if not success and retries > 0:
654 # failed
654 # failed
655 self.retries[msg_id] = retries - 1
655 self.retries[msg_id] = retries - 1
656 self.handle_unmet_dependency(idents, parent)
656 self.handle_unmet_dependency(idents, parent)
657 else:
657 else:
658 del self.retries[msg_id]
658 del self.retries[msg_id]
659 # relay to client and update graph
659 # relay to client and update graph
660 self.handle_result(idents, parent, raw_msg, success)
660 self.handle_result(idents, parent, raw_msg, success)
661 # send to Hub monitor
661 # send to Hub monitor
662 self.mon_stream.send_multipart([b'outtask']+raw_msg, copy=False)
662 self.mon_stream.send_multipart([b'outtask']+raw_msg, copy=False)
663 else:
663 else:
664 self.handle_unmet_dependency(idents, parent)
664 self.handle_unmet_dependency(idents, parent)
665
665
666 def handle_result(self, idents, parent, raw_msg, success=True):
666 def handle_result(self, idents, parent, raw_msg, success=True):
667 """handle a real task result, either success or failure"""
667 """handle a real task result, either success or failure"""
668 # first, relay result to client
668 # first, relay result to client
669 engine = idents[0]
669 engine = idents[0]
670 client = idents[1]
670 client = idents[1]
671 # swap_ids for ROUTER-ROUTER mirror
671 # swap_ids for ROUTER-ROUTER mirror
672 raw_msg[:2] = [client,engine]
672 raw_msg[:2] = [client,engine]
673 # print (map(str, raw_msg[:4]))
673 # print (map(str, raw_msg[:4]))
674 self.client_stream.send_multipart(raw_msg, copy=False)
674 self.client_stream.send_multipart(raw_msg, copy=False)
675 # now, update our data structures
675 # now, update our data structures
676 msg_id = parent['msg_id']
676 msg_id = parent['msg_id']
677 self.pending[engine].pop(msg_id)
677 self.pending[engine].pop(msg_id)
678 if success:
678 if success:
679 self.completed[engine].add(msg_id)
679 self.completed[engine].add(msg_id)
680 self.all_completed.add(msg_id)
680 self.all_completed.add(msg_id)
681 else:
681 else:
682 self.failed[engine].add(msg_id)
682 self.failed[engine].add(msg_id)
683 self.all_failed.add(msg_id)
683 self.all_failed.add(msg_id)
684 self.all_done.add(msg_id)
684 self.all_done.add(msg_id)
685 self.destinations[msg_id] = engine
685 self.destinations[msg_id] = engine
686
686
687 self.update_graph(msg_id, success)
687 self.update_graph(msg_id, success)
688
688
689 def handle_unmet_dependency(self, idents, parent):
689 def handle_unmet_dependency(self, idents, parent):
690 """handle an unmet dependency"""
690 """handle an unmet dependency"""
691 engine = idents[0]
691 engine = idents[0]
692 msg_id = parent['msg_id']
692 msg_id = parent['msg_id']
693
693
694 job = self.pending[engine].pop(msg_id)
694 job = self.pending[engine].pop(msg_id)
695 job.blacklist.add(engine)
695 job.blacklist.add(engine)
696
696
697 if job.blacklist == job.targets:
697 if job.blacklist == job.targets:
698 self.queue_map[msg_id] = job
698 self.queue_map[msg_id] = job
699 self.fail_unreachable(msg_id)
699 self.fail_unreachable(msg_id)
700 elif not self.maybe_run(job):
700 elif not self.maybe_run(job):
701 # resubmit failed
701 # resubmit failed
702 if msg_id not in self.all_failed:
702 if msg_id not in self.all_failed:
703 # put it back in our dependency tree
703 # put it back in our dependency tree
704 self.save_unmet(job)
704 self.save_unmet(job)
705
705
706 if self.hwm:
706 if self.hwm:
707 try:
707 try:
708 idx = self.targets.index(engine)
708 idx = self.targets.index(engine)
709 except ValueError:
709 except ValueError:
710 pass # skip load-update for dead engines
710 pass # skip load-update for dead engines
711 else:
711 else:
712 if self.loads[idx] == self.hwm-1:
712 if self.loads[idx] == self.hwm-1:
713 self.update_graph(None)
713 self.update_graph(None)
714
714
715 def update_graph(self, dep_id=None, success=True):
715 def update_graph(self, dep_id=None, success=True):
716 """dep_id just finished. Update our dependency
716 """dep_id just finished. Update our dependency
717 graph and submit any jobs that just became runnable.
717 graph and submit any jobs that just became runnable.
718
718
719 Called with dep_id=None to update entire graph for hwm, but without finishing a task.
719 Called with dep_id=None to update entire graph for hwm, but without finishing a task.
720 """
720 """
721 # print ("\n\n***********")
721 # print ("\n\n***********")
722 # pprint (dep_id)
722 # pprint (dep_id)
723 # pprint (self.graph)
723 # pprint (self.graph)
724 # pprint (self.queue_map)
724 # pprint (self.queue_map)
725 # pprint (self.all_completed)
725 # pprint (self.all_completed)
726 # pprint (self.all_failed)
726 # pprint (self.all_failed)
727 # print ("\n\n***********\n\n")
727 # print ("\n\n***********\n\n")
728 # update any jobs that depended on the dependency
728 # update any jobs that depended on the dependency
729 msg_ids = self.graph.pop(dep_id, [])
729 msg_ids = self.graph.pop(dep_id, [])
730
730
731 # recheck *all* jobs if
731 # recheck *all* jobs if
732 # a) we have HWM and an engine just become no longer full
732 # a) we have HWM and an engine just become no longer full
733 # or b) dep_id was given as None
733 # or b) dep_id was given as None
734
734
735 if dep_id is None or self.hwm and any( [ load==self.hwm-1 for load in self.loads ]):
735 if dep_id is None or self.hwm and any( [ load==self.hwm-1 for load in self.loads ]):
736 jobs = self.queue
736 jobs = self.queue
737 using_queue = True
737 using_queue = True
738 else:
738 else:
739 using_queue = False
739 using_queue = False
740 jobs = deque(sorted( self.queue_map[msg_id] for msg_id in msg_ids ))
740 jobs = deque(sorted( self.queue_map[msg_id] for msg_id in msg_ids ))
741
741
742 to_restore = []
742 to_restore = []
743 while jobs:
743 while jobs:
744 job = jobs.popleft()
744 job = jobs.popleft()
745 if job.removed:
745 if job.removed:
746 continue
746 continue
747 msg_id = job.msg_id
747 msg_id = job.msg_id
748
748
749 put_it_back = True
749 put_it_back = True
750
750
751 if job.after.unreachable(self.all_completed, self.all_failed)\
751 if job.after.unreachable(self.all_completed, self.all_failed)\
752 or job.follow.unreachable(self.all_completed, self.all_failed):
752 or job.follow.unreachable(self.all_completed, self.all_failed):
753 self.fail_unreachable(msg_id)
753 self.fail_unreachable(msg_id)
754 put_it_back = False
754 put_it_back = False
755
755
756 elif job.after.check(self.all_completed, self.all_failed): # time deps met, maybe run
756 elif job.after.check(self.all_completed, self.all_failed): # time deps met, maybe run
757 if self.maybe_run(job):
757 if self.maybe_run(job):
758 put_it_back = False
758 put_it_back = False
759 self.queue_map.pop(msg_id)
759 self.queue_map.pop(msg_id)
760 for mid in job.dependents:
760 for mid in job.dependents:
761 if mid in self.graph:
761 if mid in self.graph:
762 self.graph[mid].remove(msg_id)
762 self.graph[mid].remove(msg_id)
763
763
764 # abort the loop if we just filled up all of our engines.
764 # abort the loop if we just filled up all of our engines.
765 # avoids an O(N) operation in situation of full queue,
765 # avoids an O(N) operation in situation of full queue,
766 # where graph update is triggered as soon as an engine becomes
766 # where graph update is triggered as soon as an engine becomes
767 # non-full, and all tasks after the first are checked,
767 # non-full, and all tasks after the first are checked,
768 # even though they can't run.
768 # even though they can't run.
769 if not self.available_engines():
769 if not self.available_engines():
770 break
770 break
771
771
772 if using_queue and put_it_back:
772 if using_queue and put_it_back:
773 # popped a job from the queue but it neither ran nor failed,
773 # popped a job from the queue but it neither ran nor failed,
774 # so we need to put it back when we are done
774 # so we need to put it back when we are done
775 # make sure to_restore preserves the same ordering
775 # make sure to_restore preserves the same ordering
776 to_restore.append(job)
776 to_restore.append(job)
777
777
778 # put back any tasks we popped but didn't run
778 # put back any tasks we popped but didn't run
779 if using_queue:
779 if using_queue:
780 self.queue.extendleft(to_restore)
780 self.queue.extendleft(to_restore)
781
781
782 #----------------------------------------------------------------------
782 #----------------------------------------------------------------------
783 # methods to be overridden by subclasses
783 # methods to be overridden by subclasses
784 #----------------------------------------------------------------------
784 #----------------------------------------------------------------------
785
785
786 def add_job(self, idx):
786 def add_job(self, idx):
787 """Called after self.targets[idx] just got the job with header.
787 """Called after self.targets[idx] just got the job with header.
788 Override with subclasses. The default ordering is simple LRU.
788 Override with subclasses. The default ordering is simple LRU.
789 The default loads are the number of outstanding jobs."""
789 The default loads are the number of outstanding jobs."""
790 self.loads[idx] += 1
790 self.loads[idx] += 1
791 for lis in (self.targets, self.loads):
791 for lis in (self.targets, self.loads):
792 lis.append(lis.pop(idx))
792 lis.append(lis.pop(idx))
793
793
794
794
795 def finish_job(self, idx):
795 def finish_job(self, idx):
796 """Called after self.targets[idx] just finished a job.
796 """Called after self.targets[idx] just finished a job.
797 Override with subclasses."""
797 Override with subclasses."""
798 self.loads[idx] -= 1
798 self.loads[idx] -= 1
799
799
800
800
801
801
802 def launch_scheduler(in_addr, out_addr, mon_addr, not_addr, reg_addr, config=None,
802 def launch_scheduler(in_addr, out_addr, mon_addr, not_addr, reg_addr, config=None,
803 logname='root', log_url=None, loglevel=logging.DEBUG,
803 logname='root', log_url=None, loglevel=logging.DEBUG,
804 identity=b'task', in_thread=False):
804 identity=b'task', in_thread=False):
805
805
806 ZMQStream = zmqstream.ZMQStream
806 ZMQStream = zmqstream.ZMQStream
807
807
808 if config:
808 if config:
809 # unwrap dict back into Config
809 # unwrap dict back into Config
810 config = Config(config)
810 config = Config(config)
811
811
812 if in_thread:
812 if in_thread:
813 # use instance() to get the same Context/Loop as our parent
813 # use instance() to get the same Context/Loop as our parent
814 ctx = zmq.Context.instance()
814 ctx = zmq.Context.instance()
815 loop = ioloop.IOLoop.instance()
815 loop = ioloop.IOLoop.instance()
816 else:
816 else:
817 # in a process, don't use instance()
817 # in a process, don't use instance()
818 # for safety with multiprocessing
818 # for safety with multiprocessing
819 ctx = zmq.Context()
819 ctx = zmq.Context()
820 loop = ioloop.IOLoop()
820 loop = ioloop.IOLoop()
821 ins = ZMQStream(ctx.socket(zmq.ROUTER),loop)
821 ins = ZMQStream(ctx.socket(zmq.ROUTER),loop)
822 util.set_hwm(ins, 0)
822 util.set_hwm(ins, 0)
823 ins.setsockopt(zmq.IDENTITY, identity + b'_in')
823 ins.setsockopt(zmq.IDENTITY, identity + b'_in')
824 ins.bind(in_addr)
824 ins.bind(in_addr)
825
825
826 outs = ZMQStream(ctx.socket(zmq.ROUTER),loop)
826 outs = ZMQStream(ctx.socket(zmq.ROUTER),loop)
827 util.set_hwm(outs, 0)
827 util.set_hwm(outs, 0)
828 outs.setsockopt(zmq.IDENTITY, identity + b'_out')
828 outs.setsockopt(zmq.IDENTITY, identity + b'_out')
829 outs.bind(out_addr)
829 outs.bind(out_addr)
830 mons = zmqstream.ZMQStream(ctx.socket(zmq.PUB),loop)
830 mons = zmqstream.ZMQStream(ctx.socket(zmq.PUB),loop)
831 util.set_hwm(mons, 0)
831 util.set_hwm(mons, 0)
832 mons.connect(mon_addr)
832 mons.connect(mon_addr)
833 nots = zmqstream.ZMQStream(ctx.socket(zmq.SUB),loop)
833 nots = zmqstream.ZMQStream(ctx.socket(zmq.SUB),loop)
834 nots.setsockopt(zmq.SUBSCRIBE, b'')
834 nots.setsockopt(zmq.SUBSCRIBE, b'')
835 nots.connect(not_addr)
835 nots.connect(not_addr)
836
836
837 querys = ZMQStream(ctx.socket(zmq.DEALER),loop)
837 querys = ZMQStream(ctx.socket(zmq.DEALER),loop)
838 querys.connect(reg_addr)
838 querys.connect(reg_addr)
839
839
840 # setup logging.
840 # setup logging.
841 if in_thread:
841 if in_thread:
842 log = Application.instance().log
842 log = Application.instance().log
843 else:
843 else:
844 if log_url:
844 if log_url:
845 log = connect_logger(logname, ctx, log_url, root="scheduler", loglevel=loglevel)
845 log = connect_logger(logname, ctx, log_url, root="scheduler", loglevel=loglevel)
846 else:
846 else:
847 log = local_logger(logname, loglevel)
847 log = local_logger(logname, loglevel)
848
848
849 scheduler = TaskScheduler(client_stream=ins, engine_stream=outs,
849 scheduler = TaskScheduler(client_stream=ins, engine_stream=outs,
850 mon_stream=mons, notifier_stream=nots,
850 mon_stream=mons, notifier_stream=nots,
851 query_stream=querys,
851 query_stream=querys,
852 loop=loop, log=log,
852 loop=loop, log=log,
853 config=config)
853 config=config)
854 scheduler.start()
854 scheduler.start()
855 if not in_thread:
855 if not in_thread:
856 try:
856 try:
857 loop.start()
857 loop.start()
858 except KeyboardInterrupt:
858 except KeyboardInterrupt:
859 scheduler.log.critical("Interrupted, exiting...")
859 scheduler.log.critical("Interrupted, exiting...")
860
860
@@ -1,760 +1,760 b''
1 """Nose Plugin that supports IPython doctests.
1 """Nose Plugin that supports IPython doctests.
2
2
3 Limitations:
3 Limitations:
4
4
5 - When generating examples for use as doctests, make sure that you have
5 - When generating examples for use as doctests, make sure that you have
6 pretty-printing OFF. This can be done either by setting the
6 pretty-printing OFF. This can be done either by setting the
7 ``PlainTextFormatter.pprint`` option in your configuration file to False, or
7 ``PlainTextFormatter.pprint`` option in your configuration file to False, or
8 by interactively disabling it with %Pprint. This is required so that IPython
8 by interactively disabling it with %Pprint. This is required so that IPython
9 output matches that of normal Python, which is used by doctest for internal
9 output matches that of normal Python, which is used by doctest for internal
10 execution.
10 execution.
11
11
12 - Do not rely on specific prompt numbers for results (such as using
12 - Do not rely on specific prompt numbers for results (such as using
13 '_34==True', for example). For IPython tests run via an external process the
13 '_34==True', for example). For IPython tests run via an external process the
14 prompt numbers may be different, and IPython tests run as normal python code
14 prompt numbers may be different, and IPython tests run as normal python code
15 won't even have these special _NN variables set at all.
15 won't even have these special _NN variables set at all.
16 """
16 """
17
17
18 #-----------------------------------------------------------------------------
18 #-----------------------------------------------------------------------------
19 # Module imports
19 # Module imports
20
20
21 # From the standard library
21 # From the standard library
22 import doctest
22 import doctest
23 import inspect
23 import inspect
24 import logging
24 import logging
25 import os
25 import os
26 import re
26 import re
27 import sys
27 import sys
28 import traceback
28 import traceback
29 import unittest
29 import unittest
30
30
31 from inspect import getmodule
31 from inspect import getmodule
32 from io import StringIO
32 from io import StringIO
33
33
34 # We are overriding the default doctest runner, so we need to import a few
34 # We are overriding the default doctest runner, so we need to import a few
35 # things from doctest directly
35 # things from doctest directly
36 from doctest import (REPORTING_FLAGS, REPORT_ONLY_FIRST_FAILURE,
36 from doctest import (REPORTING_FLAGS, REPORT_ONLY_FIRST_FAILURE,
37 _unittest_reportflags, DocTestRunner,
37 _unittest_reportflags, DocTestRunner,
38 _extract_future_flags, pdb, _OutputRedirectingPdb,
38 _extract_future_flags, pdb, _OutputRedirectingPdb,
39 _exception_traceback,
39 _exception_traceback,
40 linecache)
40 linecache)
41
41
42 # Third-party modules
42 # Third-party modules
43 import nose.core
43 import nose.core
44
44
45 from nose.plugins import doctests, Plugin
45 from nose.plugins import doctests, Plugin
46 from nose.util import anyp, getpackage, test_address, resolve_name, tolist
46 from nose.util import anyp, getpackage, test_address, resolve_name, tolist
47
47
48 # Our own imports
48 # Our own imports
49 from IPython.utils.py3compat import builtin_mod
49 from IPython.utils.py3compat import builtin_mod
50
50
51 #-----------------------------------------------------------------------------
51 #-----------------------------------------------------------------------------
52 # Module globals and other constants
52 # Module globals and other constants
53 #-----------------------------------------------------------------------------
53 #-----------------------------------------------------------------------------
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 #-----------------------------------------------------------------------------
58 #-----------------------------------------------------------------------------
59 # Classes and functions
59 # Classes and functions
60 #-----------------------------------------------------------------------------
60 #-----------------------------------------------------------------------------
61
61
62 def is_extension_module(filename):
62 def is_extension_module(filename):
63 """Return whether the given filename is an extension module.
63 """Return whether the given filename is an extension module.
64
64
65 This simply checks that the extension is either .so or .pyd.
65 This simply checks that the extension is either .so or .pyd.
66 """
66 """
67 return os.path.splitext(filename)[1].lower() in ('.so','.pyd')
67 return os.path.splitext(filename)[1].lower() in ('.so','.pyd')
68
68
69
69
70 class DocTestSkip(object):
70 class DocTestSkip(object):
71 """Object wrapper for doctests to be skipped."""
71 """Object wrapper for doctests to be skipped."""
72
72
73 ds_skip = """Doctest to skip.
73 ds_skip = """Doctest to skip.
74 >>> 1 #doctest: +SKIP
74 >>> 1 #doctest: +SKIP
75 """
75 """
76
76
77 def __init__(self,obj):
77 def __init__(self,obj):
78 self.obj = obj
78 self.obj = obj
79
79
80 def __getattribute__(self,key):
80 def __getattribute__(self,key):
81 if key == '__doc__':
81 if key == '__doc__':
82 return DocTestSkip.ds_skip
82 return DocTestSkip.ds_skip
83 else:
83 else:
84 return getattr(object.__getattribute__(self,'obj'),key)
84 return getattr(object.__getattribute__(self,'obj'),key)
85
85
86 # Modified version of the one in the stdlib, that fixes a python bug (doctests
86 # Modified version of the one in the stdlib, that fixes a python bug (doctests
87 # not found in extension modules, http://bugs.python.org/issue3158)
87 # not found in extension modules, http://bugs.python.org/issue3158)
88 class DocTestFinder(doctest.DocTestFinder):
88 class DocTestFinder(doctest.DocTestFinder):
89
89
90 def _from_module(self, module, object):
90 def _from_module(self, module, object):
91 """
91 """
92 Return true if the given object is defined in the given
92 Return true if the given object is defined in the given
93 module.
93 module.
94 """
94 """
95 if module is None:
95 if module is None:
96 return True
96 return True
97 elif inspect.isfunction(object):
97 elif inspect.isfunction(object):
98 return module.__dict__ is object.func_globals
98 return module.__dict__ is object.__globals__
99 elif inspect.isbuiltin(object):
99 elif inspect.isbuiltin(object):
100 return module.__name__ == object.__module__
100 return module.__name__ == object.__module__
101 elif inspect.isclass(object):
101 elif inspect.isclass(object):
102 return module.__name__ == object.__module__
102 return module.__name__ == object.__module__
103 elif inspect.ismethod(object):
103 elif inspect.ismethod(object):
104 # This one may be a bug in cython that fails to correctly set the
104 # This one may be a bug in cython that fails to correctly set the
105 # __module__ attribute of methods, but since the same error is easy
105 # __module__ attribute of methods, but since the same error is easy
106 # to make by extension code writers, having this safety in place
106 # to make by extension code writers, having this safety in place
107 # isn't such a bad idea
107 # isn't such a bad idea
108 return module.__name__ == object.im_class.__module__
108 return module.__name__ == object.im_class.__module__
109 elif inspect.getmodule(object) is not None:
109 elif inspect.getmodule(object) is not None:
110 return module is inspect.getmodule(object)
110 return module is inspect.getmodule(object)
111 elif hasattr(object, '__module__'):
111 elif hasattr(object, '__module__'):
112 return module.__name__ == object.__module__
112 return module.__name__ == object.__module__
113 elif isinstance(object, property):
113 elif isinstance(object, property):
114 return True # [XX] no way not be sure.
114 return True # [XX] no way not be sure.
115 else:
115 else:
116 raise ValueError("object must be a class or function")
116 raise ValueError("object must be a class or function")
117
117
118 def _find(self, tests, obj, name, module, source_lines, globs, seen):
118 def _find(self, tests, obj, name, module, source_lines, globs, seen):
119 """
119 """
120 Find tests for the given object and any contained objects, and
120 Find tests for the given object and any contained objects, and
121 add them to `tests`.
121 add them to `tests`.
122 """
122 """
123 #print '_find for:', obj, name, module # dbg
123 #print '_find for:', obj, name, module # dbg
124 if hasattr(obj,"skip_doctest"):
124 if hasattr(obj,"skip_doctest"):
125 #print 'SKIPPING DOCTEST FOR:',obj # dbg
125 #print 'SKIPPING DOCTEST FOR:',obj # dbg
126 obj = DocTestSkip(obj)
126 obj = DocTestSkip(obj)
127
127
128 doctest.DocTestFinder._find(self,tests, obj, name, module,
128 doctest.DocTestFinder._find(self,tests, obj, name, module,
129 source_lines, globs, seen)
129 source_lines, globs, seen)
130
130
131 # Below we re-run pieces of the above method with manual modifications,
131 # Below we re-run pieces of the above method with manual modifications,
132 # because the original code is buggy and fails to correctly identify
132 # because the original code is buggy and fails to correctly identify
133 # doctests in extension modules.
133 # doctests in extension modules.
134
134
135 # Local shorthands
135 # Local shorthands
136 from inspect import isroutine, isclass, ismodule
136 from inspect import isroutine, isclass, ismodule
137
137
138 # Look for tests in a module's contained objects.
138 # Look for tests in a module's contained objects.
139 if inspect.ismodule(obj) and self._recurse:
139 if inspect.ismodule(obj) and self._recurse:
140 for valname, val in obj.__dict__.items():
140 for valname, val in obj.__dict__.items():
141 valname1 = '%s.%s' % (name, valname)
141 valname1 = '%s.%s' % (name, valname)
142 if ( (isroutine(val) or isclass(val))
142 if ( (isroutine(val) or isclass(val))
143 and self._from_module(module, val) ):
143 and self._from_module(module, val) ):
144
144
145 self._find(tests, val, valname1, module, source_lines,
145 self._find(tests, val, valname1, module, source_lines,
146 globs, seen)
146 globs, seen)
147
147
148 # Look for tests in a class's contained objects.
148 # Look for tests in a class's contained objects.
149 if inspect.isclass(obj) and self._recurse:
149 if inspect.isclass(obj) and self._recurse:
150 #print 'RECURSE into class:',obj # dbg
150 #print 'RECURSE into class:',obj # dbg
151 for valname, val in obj.__dict__.items():
151 for valname, val in obj.__dict__.items():
152 # Special handling for staticmethod/classmethod.
152 # Special handling for staticmethod/classmethod.
153 if isinstance(val, staticmethod):
153 if isinstance(val, staticmethod):
154 val = getattr(obj, valname)
154 val = getattr(obj, valname)
155 if isinstance(val, classmethod):
155 if isinstance(val, classmethod):
156 val = getattr(obj, valname).im_func
156 val = getattr(obj, valname).im_func
157
157
158 # Recurse to methods, properties, and nested classes.
158 # Recurse to methods, properties, and nested classes.
159 if ((inspect.isfunction(val) or inspect.isclass(val) or
159 if ((inspect.isfunction(val) or inspect.isclass(val) or
160 inspect.ismethod(val) or
160 inspect.ismethod(val) or
161 isinstance(val, property)) and
161 isinstance(val, property)) and
162 self._from_module(module, val)):
162 self._from_module(module, val)):
163 valname = '%s.%s' % (name, valname)
163 valname = '%s.%s' % (name, valname)
164 self._find(tests, val, valname, module, source_lines,
164 self._find(tests, val, valname, module, source_lines,
165 globs, seen)
165 globs, seen)
166
166
167
167
168 class IPDoctestOutputChecker(doctest.OutputChecker):
168 class IPDoctestOutputChecker(doctest.OutputChecker):
169 """Second-chance checker with support for random tests.
169 """Second-chance checker with support for random tests.
170
170
171 If the default comparison doesn't pass, this checker looks in the expected
171 If the default comparison doesn't pass, this checker looks in the expected
172 output string for flags that tell us to ignore the output.
172 output string for flags that tell us to ignore the output.
173 """
173 """
174
174
175 random_re = re.compile(r'#\s*random\s+')
175 random_re = re.compile(r'#\s*random\s+')
176
176
177 def check_output(self, want, got, optionflags):
177 def check_output(self, want, got, optionflags):
178 """Check output, accepting special markers embedded in the output.
178 """Check output, accepting special markers embedded in the output.
179
179
180 If the output didn't pass the default validation but the special string
180 If the output didn't pass the default validation but the special string
181 '#random' is included, we accept it."""
181 '#random' is included, we accept it."""
182
182
183 # Let the original tester verify first, in case people have valid tests
183 # Let the original tester verify first, in case people have valid tests
184 # that happen to have a comment saying '#random' embedded in.
184 # that happen to have a comment saying '#random' embedded in.
185 ret = doctest.OutputChecker.check_output(self, want, got,
185 ret = doctest.OutputChecker.check_output(self, want, got,
186 optionflags)
186 optionflags)
187 if not ret and self.random_re.search(want):
187 if not ret and self.random_re.search(want):
188 #print >> sys.stderr, 'RANDOM OK:',want # dbg
188 #print >> sys.stderr, 'RANDOM OK:',want # dbg
189 return True
189 return True
190
190
191 return ret
191 return ret
192
192
193
193
194 class DocTestCase(doctests.DocTestCase):
194 class DocTestCase(doctests.DocTestCase):
195 """Proxy for DocTestCase: provides an address() method that
195 """Proxy for DocTestCase: provides an address() method that
196 returns the correct address for the doctest case. Otherwise
196 returns the correct address for the doctest case. Otherwise
197 acts as a proxy to the test case. To provide hints for address(),
197 acts as a proxy to the test case. To provide hints for address(),
198 an obj may also be passed -- this will be used as the test object
198 an obj may also be passed -- this will be used as the test object
199 for purposes of determining the test address, if it is provided.
199 for purposes of determining the test address, if it is provided.
200 """
200 """
201
201
202 # Note: this method was taken from numpy's nosetester module.
202 # Note: this method was taken from numpy's nosetester module.
203
203
204 # Subclass nose.plugins.doctests.DocTestCase to work around a bug in
204 # Subclass nose.plugins.doctests.DocTestCase to work around a bug in
205 # its constructor that blocks non-default arguments from being passed
205 # its constructor that blocks non-default arguments from being passed
206 # down into doctest.DocTestCase
206 # down into doctest.DocTestCase
207
207
208 def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
208 def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
209 checker=None, obj=None, result_var='_'):
209 checker=None, obj=None, result_var='_'):
210 self._result_var = result_var
210 self._result_var = result_var
211 doctests.DocTestCase.__init__(self, test,
211 doctests.DocTestCase.__init__(self, test,
212 optionflags=optionflags,
212 optionflags=optionflags,
213 setUp=setUp, tearDown=tearDown,
213 setUp=setUp, tearDown=tearDown,
214 checker=checker)
214 checker=checker)
215 # Now we must actually copy the original constructor from the stdlib
215 # Now we must actually copy the original constructor from the stdlib
216 # doctest class, because we can't call it directly and a bug in nose
216 # doctest class, because we can't call it directly and a bug in nose
217 # means it never gets passed the right arguments.
217 # means it never gets passed the right arguments.
218
218
219 self._dt_optionflags = optionflags
219 self._dt_optionflags = optionflags
220 self._dt_checker = checker
220 self._dt_checker = checker
221 self._dt_test = test
221 self._dt_test = test
222 self._dt_test_globs_ori = test.globs
222 self._dt_test_globs_ori = test.globs
223 self._dt_setUp = setUp
223 self._dt_setUp = setUp
224 self._dt_tearDown = tearDown
224 self._dt_tearDown = tearDown
225
225
226 # XXX - store this runner once in the object!
226 # XXX - store this runner once in the object!
227 runner = IPDocTestRunner(optionflags=optionflags,
227 runner = IPDocTestRunner(optionflags=optionflags,
228 checker=checker, verbose=False)
228 checker=checker, verbose=False)
229 self._dt_runner = runner
229 self._dt_runner = runner
230
230
231
231
232 # Each doctest should remember the directory it was loaded from, so
232 # Each doctest should remember the directory it was loaded from, so
233 # things like %run work without too many contortions
233 # things like %run work without too many contortions
234 self._ori_dir = os.path.dirname(test.filename)
234 self._ori_dir = os.path.dirname(test.filename)
235
235
236 # Modified runTest from the default stdlib
236 # Modified runTest from the default stdlib
237 def runTest(self):
237 def runTest(self):
238 test = self._dt_test
238 test = self._dt_test
239 runner = self._dt_runner
239 runner = self._dt_runner
240
240
241 old = sys.stdout
241 old = sys.stdout
242 new = StringIO()
242 new = StringIO()
243 optionflags = self._dt_optionflags
243 optionflags = self._dt_optionflags
244
244
245 if not (optionflags & REPORTING_FLAGS):
245 if not (optionflags & REPORTING_FLAGS):
246 # The option flags don't include any reporting flags,
246 # The option flags don't include any reporting flags,
247 # so add the default reporting flags
247 # so add the default reporting flags
248 optionflags |= _unittest_reportflags
248 optionflags |= _unittest_reportflags
249
249
250 try:
250 try:
251 # Save our current directory and switch out to the one where the
251 # Save our current directory and switch out to the one where the
252 # test was originally created, in case another doctest did a
252 # test was originally created, in case another doctest did a
253 # directory change. We'll restore this in the finally clause.
253 # directory change. We'll restore this in the finally clause.
254 curdir = os.getcwdu()
254 curdir = os.getcwdu()
255 #print 'runTest in dir:', self._ori_dir # dbg
255 #print 'runTest in dir:', self._ori_dir # dbg
256 os.chdir(self._ori_dir)
256 os.chdir(self._ori_dir)
257
257
258 runner.DIVIDER = "-"*70
258 runner.DIVIDER = "-"*70
259 failures, tries = runner.run(test,out=new.write,
259 failures, tries = runner.run(test,out=new.write,
260 clear_globs=False)
260 clear_globs=False)
261 finally:
261 finally:
262 sys.stdout = old
262 sys.stdout = old
263 os.chdir(curdir)
263 os.chdir(curdir)
264
264
265 if failures:
265 if failures:
266 raise self.failureException(self.format_failure(new.getvalue()))
266 raise self.failureException(self.format_failure(new.getvalue()))
267
267
268 def setUp(self):
268 def setUp(self):
269 """Modified test setup that syncs with ipython namespace"""
269 """Modified test setup that syncs with ipython namespace"""
270 #print "setUp test", self._dt_test.examples # dbg
270 #print "setUp test", self._dt_test.examples # dbg
271 if isinstance(self._dt_test.examples[0], IPExample):
271 if isinstance(self._dt_test.examples[0], IPExample):
272 # for IPython examples *only*, we swap the globals with the ipython
272 # for IPython examples *only*, we swap the globals with the ipython
273 # namespace, after updating it with the globals (which doctest
273 # namespace, after updating it with the globals (which doctest
274 # fills with the necessary info from the module being tested).
274 # fills with the necessary info from the module being tested).
275 self.user_ns_orig = {}
275 self.user_ns_orig = {}
276 self.user_ns_orig.update(_ip.user_ns)
276 self.user_ns_orig.update(_ip.user_ns)
277 _ip.user_ns.update(self._dt_test.globs)
277 _ip.user_ns.update(self._dt_test.globs)
278 # We must remove the _ key in the namespace, so that Python's
278 # We must remove the _ key in the namespace, so that Python's
279 # doctest code sets it naturally
279 # doctest code sets it naturally
280 _ip.user_ns.pop('_', None)
280 _ip.user_ns.pop('_', None)
281 _ip.user_ns['__builtins__'] = builtin_mod
281 _ip.user_ns['__builtins__'] = builtin_mod
282 self._dt_test.globs = _ip.user_ns
282 self._dt_test.globs = _ip.user_ns
283
283
284 super(DocTestCase, self).setUp()
284 super(DocTestCase, self).setUp()
285
285
286 def tearDown(self):
286 def tearDown(self):
287
287
288 # Undo the test.globs reassignment we made, so that the parent class
288 # Undo the test.globs reassignment we made, so that the parent class
289 # teardown doesn't destroy the ipython namespace
289 # teardown doesn't destroy the ipython namespace
290 if isinstance(self._dt_test.examples[0], IPExample):
290 if isinstance(self._dt_test.examples[0], IPExample):
291 self._dt_test.globs = self._dt_test_globs_ori
291 self._dt_test.globs = self._dt_test_globs_ori
292 _ip.user_ns.clear()
292 _ip.user_ns.clear()
293 _ip.user_ns.update(self.user_ns_orig)
293 _ip.user_ns.update(self.user_ns_orig)
294
294
295 # XXX - fperez: I am not sure if this is truly a bug in nose 0.11, but
295 # XXX - fperez: I am not sure if this is truly a bug in nose 0.11, but
296 # it does look like one to me: its tearDown method tries to run
296 # it does look like one to me: its tearDown method tries to run
297 #
297 #
298 # delattr(builtin_mod, self._result_var)
298 # delattr(builtin_mod, self._result_var)
299 #
299 #
300 # without checking that the attribute really is there; it implicitly
300 # without checking that the attribute really is there; it implicitly
301 # assumes it should have been set via displayhook. But if the
301 # assumes it should have been set via displayhook. But if the
302 # displayhook was never called, this doesn't necessarily happen. I
302 # displayhook was never called, this doesn't necessarily happen. I
303 # haven't been able to find a little self-contained example outside of
303 # haven't been able to find a little self-contained example outside of
304 # ipython that would show the problem so I can report it to the nose
304 # ipython that would show the problem so I can report it to the nose
305 # team, but it does happen a lot in our code.
305 # team, but it does happen a lot in our code.
306 #
306 #
307 # So here, we just protect as narrowly as possible by trapping an
307 # So here, we just protect as narrowly as possible by trapping an
308 # attribute error whose message would be the name of self._result_var,
308 # attribute error whose message would be the name of self._result_var,
309 # and letting any other error propagate.
309 # and letting any other error propagate.
310 try:
310 try:
311 super(DocTestCase, self).tearDown()
311 super(DocTestCase, self).tearDown()
312 except AttributeError as exc:
312 except AttributeError as exc:
313 if exc.args[0] != self._result_var:
313 if exc.args[0] != self._result_var:
314 raise
314 raise
315
315
316
316
317 # A simple subclassing of the original with a different class name, so we can
317 # A simple subclassing of the original with a different class name, so we can
318 # distinguish and treat differently IPython examples from pure python ones.
318 # distinguish and treat differently IPython examples from pure python ones.
319 class IPExample(doctest.Example): pass
319 class IPExample(doctest.Example): pass
320
320
321
321
322 class IPExternalExample(doctest.Example):
322 class IPExternalExample(doctest.Example):
323 """Doctest examples to be run in an external process."""
323 """Doctest examples to be run in an external process."""
324
324
325 def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
325 def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
326 options=None):
326 options=None):
327 # Parent constructor
327 # Parent constructor
328 doctest.Example.__init__(self,source,want,exc_msg,lineno,indent,options)
328 doctest.Example.__init__(self,source,want,exc_msg,lineno,indent,options)
329
329
330 # An EXTRA newline is needed to prevent pexpect hangs
330 # An EXTRA newline is needed to prevent pexpect hangs
331 self.source += '\n'
331 self.source += '\n'
332
332
333
333
334 class IPDocTestParser(doctest.DocTestParser):
334 class IPDocTestParser(doctest.DocTestParser):
335 """
335 """
336 A class used to parse strings containing doctest examples.
336 A class used to parse strings containing doctest examples.
337
337
338 Note: This is a version modified to properly recognize IPython input and
338 Note: This is a version modified to properly recognize IPython input and
339 convert any IPython examples into valid Python ones.
339 convert any IPython examples into valid Python ones.
340 """
340 """
341 # This regular expression is used to find doctest examples in a
341 # This regular expression is used to find doctest examples in a
342 # string. It defines three groups: `source` is the source code
342 # string. It defines three groups: `source` is the source code
343 # (including leading indentation and prompts); `indent` is the
343 # (including leading indentation and prompts); `indent` is the
344 # indentation of the first (PS1) line of the source code; and
344 # indentation of the first (PS1) line of the source code; and
345 # `want` is the expected output (including leading indentation).
345 # `want` is the expected output (including leading indentation).
346
346
347 # Classic Python prompts or default IPython ones
347 # Classic Python prompts or default IPython ones
348 _PS1_PY = r'>>>'
348 _PS1_PY = r'>>>'
349 _PS2_PY = r'\.\.\.'
349 _PS2_PY = r'\.\.\.'
350
350
351 _PS1_IP = r'In\ \[\d+\]:'
351 _PS1_IP = r'In\ \[\d+\]:'
352 _PS2_IP = r'\ \ \ \.\.\.+:'
352 _PS2_IP = r'\ \ \ \.\.\.+:'
353
353
354 _RE_TPL = r'''
354 _RE_TPL = r'''
355 # Source consists of a PS1 line followed by zero or more PS2 lines.
355 # Source consists of a PS1 line followed by zero or more PS2 lines.
356 (?P<source>
356 (?P<source>
357 (?:^(?P<indent> [ ]*) (?P<ps1> %s) .*) # PS1 line
357 (?:^(?P<indent> [ ]*) (?P<ps1> %s) .*) # PS1 line
358 (?:\n [ ]* (?P<ps2> %s) .*)*) # PS2 lines
358 (?:\n [ ]* (?P<ps2> %s) .*)*) # PS2 lines
359 \n? # a newline
359 \n? # a newline
360 # Want consists of any non-blank lines that do not start with PS1.
360 # Want consists of any non-blank lines that do not start with PS1.
361 (?P<want> (?:(?![ ]*$) # Not a blank line
361 (?P<want> (?:(?![ ]*$) # Not a blank line
362 (?![ ]*%s) # Not a line starting with PS1
362 (?![ ]*%s) # Not a line starting with PS1
363 (?![ ]*%s) # Not a line starting with PS2
363 (?![ ]*%s) # Not a line starting with PS2
364 .*$\n? # But any other line
364 .*$\n? # But any other line
365 )*)
365 )*)
366 '''
366 '''
367
367
368 _EXAMPLE_RE_PY = re.compile( _RE_TPL % (_PS1_PY,_PS2_PY,_PS1_PY,_PS2_PY),
368 _EXAMPLE_RE_PY = re.compile( _RE_TPL % (_PS1_PY,_PS2_PY,_PS1_PY,_PS2_PY),
369 re.MULTILINE | re.VERBOSE)
369 re.MULTILINE | re.VERBOSE)
370
370
371 _EXAMPLE_RE_IP = re.compile( _RE_TPL % (_PS1_IP,_PS2_IP,_PS1_IP,_PS2_IP),
371 _EXAMPLE_RE_IP = re.compile( _RE_TPL % (_PS1_IP,_PS2_IP,_PS1_IP,_PS2_IP),
372 re.MULTILINE | re.VERBOSE)
372 re.MULTILINE | re.VERBOSE)
373
373
374 # Mark a test as being fully random. In this case, we simply append the
374 # Mark a test as being fully random. In this case, we simply append the
375 # random marker ('#random') to each individual example's output. This way
375 # random marker ('#random') to each individual example's output. This way
376 # we don't need to modify any other code.
376 # we don't need to modify any other code.
377 _RANDOM_TEST = re.compile(r'#\s*all-random\s+')
377 _RANDOM_TEST = re.compile(r'#\s*all-random\s+')
378
378
379 # Mark tests to be executed in an external process - currently unsupported.
379 # Mark tests to be executed in an external process - currently unsupported.
380 _EXTERNAL_IP = re.compile(r'#\s*ipdoctest:\s*EXTERNAL')
380 _EXTERNAL_IP = re.compile(r'#\s*ipdoctest:\s*EXTERNAL')
381
381
382 def ip2py(self,source):
382 def ip2py(self,source):
383 """Convert input IPython source into valid Python."""
383 """Convert input IPython source into valid Python."""
384 block = _ip.input_transformer_manager.transform_cell(source)
384 block = _ip.input_transformer_manager.transform_cell(source)
385 if len(block.splitlines()) == 1:
385 if len(block.splitlines()) == 1:
386 return _ip.prefilter(block)
386 return _ip.prefilter(block)
387 else:
387 else:
388 return block
388 return block
389
389
390 def parse(self, string, name='<string>'):
390 def parse(self, string, name='<string>'):
391 """
391 """
392 Divide the given string into examples and intervening text,
392 Divide the given string into examples and intervening text,
393 and return them as a list of alternating Examples and strings.
393 and return them as a list of alternating Examples and strings.
394 Line numbers for the Examples are 0-based. The optional
394 Line numbers for the Examples are 0-based. The optional
395 argument `name` is a name identifying this string, and is only
395 argument `name` is a name identifying this string, and is only
396 used for error messages.
396 used for error messages.
397 """
397 """
398
398
399 #print 'Parse string:\n',string # dbg
399 #print 'Parse string:\n',string # dbg
400
400
401 string = string.expandtabs()
401 string = string.expandtabs()
402 # If all lines begin with the same indentation, then strip it.
402 # If all lines begin with the same indentation, then strip it.
403 min_indent = self._min_indent(string)
403 min_indent = self._min_indent(string)
404 if min_indent > 0:
404 if min_indent > 0:
405 string = '\n'.join([l[min_indent:] for l in string.split('\n')])
405 string = '\n'.join([l[min_indent:] for l in string.split('\n')])
406
406
407 output = []
407 output = []
408 charno, lineno = 0, 0
408 charno, lineno = 0, 0
409
409
410 # We make 'all random' tests by adding the '# random' mark to every
410 # We make 'all random' tests by adding the '# random' mark to every
411 # block of output in the test.
411 # block of output in the test.
412 if self._RANDOM_TEST.search(string):
412 if self._RANDOM_TEST.search(string):
413 random_marker = '\n# random'
413 random_marker = '\n# random'
414 else:
414 else:
415 random_marker = ''
415 random_marker = ''
416
416
417 # Whether to convert the input from ipython to python syntax
417 # Whether to convert the input from ipython to python syntax
418 ip2py = False
418 ip2py = False
419 # Find all doctest examples in the string. First, try them as Python
419 # Find all doctest examples in the string. First, try them as Python
420 # examples, then as IPython ones
420 # examples, then as IPython ones
421 terms = list(self._EXAMPLE_RE_PY.finditer(string))
421 terms = list(self._EXAMPLE_RE_PY.finditer(string))
422 if terms:
422 if terms:
423 # Normal Python example
423 # Normal Python example
424 #print '-'*70 # dbg
424 #print '-'*70 # dbg
425 #print 'PyExample, Source:\n',string # dbg
425 #print 'PyExample, Source:\n',string # dbg
426 #print '-'*70 # dbg
426 #print '-'*70 # dbg
427 Example = doctest.Example
427 Example = doctest.Example
428 else:
428 else:
429 # It's an ipython example. Note that IPExamples are run
429 # It's an ipython example. Note that IPExamples are run
430 # in-process, so their syntax must be turned into valid python.
430 # in-process, so their syntax must be turned into valid python.
431 # IPExternalExamples are run out-of-process (via pexpect) so they
431 # IPExternalExamples are run out-of-process (via pexpect) so they
432 # don't need any filtering (a real ipython will be executing them).
432 # don't need any filtering (a real ipython will be executing them).
433 terms = list(self._EXAMPLE_RE_IP.finditer(string))
433 terms = list(self._EXAMPLE_RE_IP.finditer(string))
434 if self._EXTERNAL_IP.search(string):
434 if self._EXTERNAL_IP.search(string):
435 #print '-'*70 # dbg
435 #print '-'*70 # dbg
436 #print 'IPExternalExample, Source:\n',string # dbg
436 #print 'IPExternalExample, Source:\n',string # dbg
437 #print '-'*70 # dbg
437 #print '-'*70 # dbg
438 Example = IPExternalExample
438 Example = IPExternalExample
439 else:
439 else:
440 #print '-'*70 # dbg
440 #print '-'*70 # dbg
441 #print 'IPExample, Source:\n',string # dbg
441 #print 'IPExample, Source:\n',string # dbg
442 #print '-'*70 # dbg
442 #print '-'*70 # dbg
443 Example = IPExample
443 Example = IPExample
444 ip2py = True
444 ip2py = True
445
445
446 for m in terms:
446 for m in terms:
447 # Add the pre-example text to `output`.
447 # Add the pre-example text to `output`.
448 output.append(string[charno:m.start()])
448 output.append(string[charno:m.start()])
449 # Update lineno (lines before this example)
449 # Update lineno (lines before this example)
450 lineno += string.count('\n', charno, m.start())
450 lineno += string.count('\n', charno, m.start())
451 # Extract info from the regexp match.
451 # Extract info from the regexp match.
452 (source, options, want, exc_msg) = \
452 (source, options, want, exc_msg) = \
453 self._parse_example(m, name, lineno,ip2py)
453 self._parse_example(m, name, lineno,ip2py)
454
454
455 # Append the random-output marker (it defaults to empty in most
455 # Append the random-output marker (it defaults to empty in most
456 # cases, it's only non-empty for 'all-random' tests):
456 # cases, it's only non-empty for 'all-random' tests):
457 want += random_marker
457 want += random_marker
458
458
459 if Example is IPExternalExample:
459 if Example is IPExternalExample:
460 options[doctest.NORMALIZE_WHITESPACE] = True
460 options[doctest.NORMALIZE_WHITESPACE] = True
461 want += '\n'
461 want += '\n'
462
462
463 # Create an Example, and add it to the list.
463 # Create an Example, and add it to the list.
464 if not self._IS_BLANK_OR_COMMENT(source):
464 if not self._IS_BLANK_OR_COMMENT(source):
465 output.append(Example(source, want, exc_msg,
465 output.append(Example(source, want, exc_msg,
466 lineno=lineno,
466 lineno=lineno,
467 indent=min_indent+len(m.group('indent')),
467 indent=min_indent+len(m.group('indent')),
468 options=options))
468 options=options))
469 # Update lineno (lines inside this example)
469 # Update lineno (lines inside this example)
470 lineno += string.count('\n', m.start(), m.end())
470 lineno += string.count('\n', m.start(), m.end())
471 # Update charno.
471 # Update charno.
472 charno = m.end()
472 charno = m.end()
473 # Add any remaining post-example text to `output`.
473 # Add any remaining post-example text to `output`.
474 output.append(string[charno:])
474 output.append(string[charno:])
475 return output
475 return output
476
476
477 def _parse_example(self, m, name, lineno,ip2py=False):
477 def _parse_example(self, m, name, lineno,ip2py=False):
478 """
478 """
479 Given a regular expression match from `_EXAMPLE_RE` (`m`),
479 Given a regular expression match from `_EXAMPLE_RE` (`m`),
480 return a pair `(source, want)`, where `source` is the matched
480 return a pair `(source, want)`, where `source` is the matched
481 example's source code (with prompts and indentation stripped);
481 example's source code (with prompts and indentation stripped);
482 and `want` is the example's expected output (with indentation
482 and `want` is the example's expected output (with indentation
483 stripped).
483 stripped).
484
484
485 `name` is the string's name, and `lineno` is the line number
485 `name` is the string's name, and `lineno` is the line number
486 where the example starts; both are used for error messages.
486 where the example starts; both are used for error messages.
487
487
488 Optional:
488 Optional:
489 `ip2py`: if true, filter the input via IPython to convert the syntax
489 `ip2py`: if true, filter the input via IPython to convert the syntax
490 into valid python.
490 into valid python.
491 """
491 """
492
492
493 # Get the example's indentation level.
493 # Get the example's indentation level.
494 indent = len(m.group('indent'))
494 indent = len(m.group('indent'))
495
495
496 # Divide source into lines; check that they're properly
496 # Divide source into lines; check that they're properly
497 # indented; and then strip their indentation & prompts.
497 # indented; and then strip their indentation & prompts.
498 source_lines = m.group('source').split('\n')
498 source_lines = m.group('source').split('\n')
499
499
500 # We're using variable-length input prompts
500 # We're using variable-length input prompts
501 ps1 = m.group('ps1')
501 ps1 = m.group('ps1')
502 ps2 = m.group('ps2')
502 ps2 = m.group('ps2')
503 ps1_len = len(ps1)
503 ps1_len = len(ps1)
504
504
505 self._check_prompt_blank(source_lines, indent, name, lineno,ps1_len)
505 self._check_prompt_blank(source_lines, indent, name, lineno,ps1_len)
506 if ps2:
506 if ps2:
507 self._check_prefix(source_lines[1:], ' '*indent + ps2, name, lineno)
507 self._check_prefix(source_lines[1:], ' '*indent + ps2, name, lineno)
508
508
509 source = '\n'.join([sl[indent+ps1_len+1:] for sl in source_lines])
509 source = '\n'.join([sl[indent+ps1_len+1:] for sl in source_lines])
510
510
511 if ip2py:
511 if ip2py:
512 # Convert source input from IPython into valid Python syntax
512 # Convert source input from IPython into valid Python syntax
513 source = self.ip2py(source)
513 source = self.ip2py(source)
514
514
515 # Divide want into lines; check that it's properly indented; and
515 # Divide want into lines; check that it's properly indented; and
516 # then strip the indentation. Spaces before the last newline should
516 # then strip the indentation. Spaces before the last newline should
517 # be preserved, so plain rstrip() isn't good enough.
517 # be preserved, so plain rstrip() isn't good enough.
518 want = m.group('want')
518 want = m.group('want')
519 want_lines = want.split('\n')
519 want_lines = want.split('\n')
520 if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
520 if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
521 del want_lines[-1] # forget final newline & spaces after it
521 del want_lines[-1] # forget final newline & spaces after it
522 self._check_prefix(want_lines, ' '*indent, name,
522 self._check_prefix(want_lines, ' '*indent, name,
523 lineno + len(source_lines))
523 lineno + len(source_lines))
524
524
525 # Remove ipython output prompt that might be present in the first line
525 # Remove ipython output prompt that might be present in the first line
526 want_lines[0] = re.sub(r'Out\[\d+\]: \s*?\n?','',want_lines[0])
526 want_lines[0] = re.sub(r'Out\[\d+\]: \s*?\n?','',want_lines[0])
527
527
528 want = '\n'.join([wl[indent:] for wl in want_lines])
528 want = '\n'.join([wl[indent:] for wl in want_lines])
529
529
530 # If `want` contains a traceback message, then extract it.
530 # If `want` contains a traceback message, then extract it.
531 m = self._EXCEPTION_RE.match(want)
531 m = self._EXCEPTION_RE.match(want)
532 if m:
532 if m:
533 exc_msg = m.group('msg')
533 exc_msg = m.group('msg')
534 else:
534 else:
535 exc_msg = None
535 exc_msg = None
536
536
537 # Extract options from the source.
537 # Extract options from the source.
538 options = self._find_options(source, name, lineno)
538 options = self._find_options(source, name, lineno)
539
539
540 return source, options, want, exc_msg
540 return source, options, want, exc_msg
541
541
542 def _check_prompt_blank(self, lines, indent, name, lineno, ps1_len):
542 def _check_prompt_blank(self, lines, indent, name, lineno, ps1_len):
543 """
543 """
544 Given the lines of a source string (including prompts and
544 Given the lines of a source string (including prompts and
545 leading indentation), check to make sure that every prompt is
545 leading indentation), check to make sure that every prompt is
546 followed by a space character. If any line is not followed by
546 followed by a space character. If any line is not followed by
547 a space character, then raise ValueError.
547 a space character, then raise ValueError.
548
548
549 Note: IPython-modified version which takes the input prompt length as a
549 Note: IPython-modified version which takes the input prompt length as a
550 parameter, so that prompts of variable length can be dealt with.
550 parameter, so that prompts of variable length can be dealt with.
551 """
551 """
552 space_idx = indent+ps1_len
552 space_idx = indent+ps1_len
553 min_len = space_idx+1
553 min_len = space_idx+1
554 for i, line in enumerate(lines):
554 for i, line in enumerate(lines):
555 if len(line) >= min_len and line[space_idx] != ' ':
555 if len(line) >= min_len and line[space_idx] != ' ':
556 raise ValueError('line %r of the docstring for %s '
556 raise ValueError('line %r of the docstring for %s '
557 'lacks blank after %s: %r' %
557 'lacks blank after %s: %r' %
558 (lineno+i+1, name,
558 (lineno+i+1, name,
559 line[indent:space_idx], line))
559 line[indent:space_idx], line))
560
560
561
561
562 SKIP = doctest.register_optionflag('SKIP')
562 SKIP = doctest.register_optionflag('SKIP')
563
563
564
564
565 class IPDocTestRunner(doctest.DocTestRunner,object):
565 class IPDocTestRunner(doctest.DocTestRunner,object):
566 """Test runner that synchronizes the IPython namespace with test globals.
566 """Test runner that synchronizes the IPython namespace with test globals.
567 """
567 """
568
568
569 def run(self, test, compileflags=None, out=None, clear_globs=True):
569 def run(self, test, compileflags=None, out=None, clear_globs=True):
570
570
571 # Hack: ipython needs access to the execution context of the example,
571 # Hack: ipython needs access to the execution context of the example,
572 # so that it can propagate user variables loaded by %run into
572 # so that it can propagate user variables loaded by %run into
573 # test.globs. We put them here into our modified %run as a function
573 # test.globs. We put them here into our modified %run as a function
574 # attribute. Our new %run will then only make the namespace update
574 # attribute. Our new %run will then only make the namespace update
575 # when called (rather than unconconditionally updating test.globs here
575 # when called (rather than unconconditionally updating test.globs here
576 # for all examples, most of which won't be calling %run anyway).
576 # for all examples, most of which won't be calling %run anyway).
577 #_ip._ipdoctest_test_globs = test.globs
577 #_ip._ipdoctest_test_globs = test.globs
578 #_ip._ipdoctest_test_filename = test.filename
578 #_ip._ipdoctest_test_filename = test.filename
579
579
580 test.globs.update(_ip.user_ns)
580 test.globs.update(_ip.user_ns)
581
581
582 return super(IPDocTestRunner,self).run(test,
582 return super(IPDocTestRunner,self).run(test,
583 compileflags,out,clear_globs)
583 compileflags,out,clear_globs)
584
584
585
585
586 class DocFileCase(doctest.DocFileCase):
586 class DocFileCase(doctest.DocFileCase):
587 """Overrides to provide filename
587 """Overrides to provide filename
588 """
588 """
589 def address(self):
589 def address(self):
590 return (self._dt_test.filename, None, None)
590 return (self._dt_test.filename, None, None)
591
591
592
592
593 class ExtensionDoctest(doctests.Doctest):
593 class ExtensionDoctest(doctests.Doctest):
594 """Nose Plugin that supports doctests in extension modules.
594 """Nose Plugin that supports doctests in extension modules.
595 """
595 """
596 name = 'extdoctest' # call nosetests with --with-extdoctest
596 name = 'extdoctest' # call nosetests with --with-extdoctest
597 enabled = True
597 enabled = True
598
598
599 def options(self, parser, env=os.environ):
599 def options(self, parser, env=os.environ):
600 Plugin.options(self, parser, env)
600 Plugin.options(self, parser, env)
601 parser.add_option('--doctest-tests', action='store_true',
601 parser.add_option('--doctest-tests', action='store_true',
602 dest='doctest_tests',
602 dest='doctest_tests',
603 default=env.get('NOSE_DOCTEST_TESTS',True),
603 default=env.get('NOSE_DOCTEST_TESTS',True),
604 help="Also look for doctests in test modules. "
604 help="Also look for doctests in test modules. "
605 "Note that classes, methods and functions should "
605 "Note that classes, methods and functions should "
606 "have either doctests or non-doctest tests, "
606 "have either doctests or non-doctest tests, "
607 "not both. [NOSE_DOCTEST_TESTS]")
607 "not both. [NOSE_DOCTEST_TESTS]")
608 parser.add_option('--doctest-extension', action="append",
608 parser.add_option('--doctest-extension', action="append",
609 dest="doctestExtension",
609 dest="doctestExtension",
610 help="Also look for doctests in files with "
610 help="Also look for doctests in files with "
611 "this extension [NOSE_DOCTEST_EXTENSION]")
611 "this extension [NOSE_DOCTEST_EXTENSION]")
612 # Set the default as a list, if given in env; otherwise
612 # Set the default as a list, if given in env; otherwise
613 # an additional value set on the command line will cause
613 # an additional value set on the command line will cause
614 # an error.
614 # an error.
615 env_setting = env.get('NOSE_DOCTEST_EXTENSION')
615 env_setting = env.get('NOSE_DOCTEST_EXTENSION')
616 if env_setting is not None:
616 if env_setting is not None:
617 parser.set_defaults(doctestExtension=tolist(env_setting))
617 parser.set_defaults(doctestExtension=tolist(env_setting))
618
618
619
619
620 def configure(self, options, config):
620 def configure(self, options, config):
621 Plugin.configure(self, options, config)
621 Plugin.configure(self, options, config)
622 # Pull standard doctest plugin out of config; we will do doctesting
622 # Pull standard doctest plugin out of config; we will do doctesting
623 config.plugins.plugins = [p for p in config.plugins.plugins
623 config.plugins.plugins = [p for p in config.plugins.plugins
624 if p.name != 'doctest']
624 if p.name != 'doctest']
625 self.doctest_tests = options.doctest_tests
625 self.doctest_tests = options.doctest_tests
626 self.extension = tolist(options.doctestExtension)
626 self.extension = tolist(options.doctestExtension)
627
627
628 self.parser = doctest.DocTestParser()
628 self.parser = doctest.DocTestParser()
629 self.finder = DocTestFinder()
629 self.finder = DocTestFinder()
630 self.checker = IPDoctestOutputChecker()
630 self.checker = IPDoctestOutputChecker()
631 self.globs = None
631 self.globs = None
632 self.extraglobs = None
632 self.extraglobs = None
633
633
634
634
635 def loadTestsFromExtensionModule(self,filename):
635 def loadTestsFromExtensionModule(self,filename):
636 bpath,mod = os.path.split(filename)
636 bpath,mod = os.path.split(filename)
637 modname = os.path.splitext(mod)[0]
637 modname = os.path.splitext(mod)[0]
638 try:
638 try:
639 sys.path.append(bpath)
639 sys.path.append(bpath)
640 module = __import__(modname)
640 module = __import__(modname)
641 tests = list(self.loadTestsFromModule(module))
641 tests = list(self.loadTestsFromModule(module))
642 finally:
642 finally:
643 sys.path.pop()
643 sys.path.pop()
644 return tests
644 return tests
645
645
646 # NOTE: the method below is almost a copy of the original one in nose, with
646 # NOTE: the method below is almost a copy of the original one in nose, with
647 # a few modifications to control output checking.
647 # a few modifications to control output checking.
648
648
649 def loadTestsFromModule(self, module):
649 def loadTestsFromModule(self, module):
650 #print '*** ipdoctest - lTM',module # dbg
650 #print '*** ipdoctest - lTM',module # dbg
651
651
652 if not self.matches(module.__name__):
652 if not self.matches(module.__name__):
653 log.debug("Doctest doesn't want module %s", module)
653 log.debug("Doctest doesn't want module %s", module)
654 return
654 return
655
655
656 tests = self.finder.find(module,globs=self.globs,
656 tests = self.finder.find(module,globs=self.globs,
657 extraglobs=self.extraglobs)
657 extraglobs=self.extraglobs)
658 if not tests:
658 if not tests:
659 return
659 return
660
660
661 # always use whitespace and ellipsis options
661 # always use whitespace and ellipsis options
662 optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS
662 optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS
663
663
664 tests.sort()
664 tests.sort()
665 module_file = module.__file__
665 module_file = module.__file__
666 if module_file[-4:] in ('.pyc', '.pyo'):
666 if module_file[-4:] in ('.pyc', '.pyo'):
667 module_file = module_file[:-1]
667 module_file = module_file[:-1]
668 for test in tests:
668 for test in tests:
669 if not test.examples:
669 if not test.examples:
670 continue
670 continue
671 if not test.filename:
671 if not test.filename:
672 test.filename = module_file
672 test.filename = module_file
673
673
674 yield DocTestCase(test,
674 yield DocTestCase(test,
675 optionflags=optionflags,
675 optionflags=optionflags,
676 checker=self.checker)
676 checker=self.checker)
677
677
678
678
679 def loadTestsFromFile(self, filename):
679 def loadTestsFromFile(self, filename):
680 #print "ipdoctest - from file", filename # dbg
680 #print "ipdoctest - from file", filename # dbg
681 if is_extension_module(filename):
681 if is_extension_module(filename):
682 for t in self.loadTestsFromExtensionModule(filename):
682 for t in self.loadTestsFromExtensionModule(filename):
683 yield t
683 yield t
684 else:
684 else:
685 if self.extension and anyp(filename.endswith, self.extension):
685 if self.extension and anyp(filename.endswith, self.extension):
686 name = os.path.basename(filename)
686 name = os.path.basename(filename)
687 dh = open(filename)
687 dh = open(filename)
688 try:
688 try:
689 doc = dh.read()
689 doc = dh.read()
690 finally:
690 finally:
691 dh.close()
691 dh.close()
692 test = self.parser.get_doctest(
692 test = self.parser.get_doctest(
693 doc, globs={'__file__': filename}, name=name,
693 doc, globs={'__file__': filename}, name=name,
694 filename=filename, lineno=0)
694 filename=filename, lineno=0)
695 if test.examples:
695 if test.examples:
696 #print 'FileCase:',test.examples # dbg
696 #print 'FileCase:',test.examples # dbg
697 yield DocFileCase(test)
697 yield DocFileCase(test)
698 else:
698 else:
699 yield False # no tests to load
699 yield False # no tests to load
700
700
701
701
702 class IPythonDoctest(ExtensionDoctest):
702 class IPythonDoctest(ExtensionDoctest):
703 """Nose Plugin that supports doctests in extension modules.
703 """Nose Plugin that supports doctests in extension modules.
704 """
704 """
705 name = 'ipdoctest' # call nosetests with --with-ipdoctest
705 name = 'ipdoctest' # call nosetests with --with-ipdoctest
706 enabled = True
706 enabled = True
707
707
708 def makeTest(self, obj, parent):
708 def makeTest(self, obj, parent):
709 """Look for doctests in the given object, which will be a
709 """Look for doctests in the given object, which will be a
710 function, method or class.
710 function, method or class.
711 """
711 """
712 #print 'Plugin analyzing:', obj, parent # dbg
712 #print 'Plugin analyzing:', obj, parent # dbg
713 # always use whitespace and ellipsis options
713 # always use whitespace and ellipsis options
714 optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS
714 optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS
715
715
716 doctests = self.finder.find(obj, module=getmodule(parent))
716 doctests = self.finder.find(obj, module=getmodule(parent))
717 if doctests:
717 if doctests:
718 for test in doctests:
718 for test in doctests:
719 if len(test.examples) == 0:
719 if len(test.examples) == 0:
720 continue
720 continue
721
721
722 yield DocTestCase(test, obj=obj,
722 yield DocTestCase(test, obj=obj,
723 optionflags=optionflags,
723 optionflags=optionflags,
724 checker=self.checker)
724 checker=self.checker)
725
725
726 def options(self, parser, env=os.environ):
726 def options(self, parser, env=os.environ):
727 #print "Options for nose plugin:", self.name # dbg
727 #print "Options for nose plugin:", self.name # dbg
728 Plugin.options(self, parser, env)
728 Plugin.options(self, parser, env)
729 parser.add_option('--ipdoctest-tests', action='store_true',
729 parser.add_option('--ipdoctest-tests', action='store_true',
730 dest='ipdoctest_tests',
730 dest='ipdoctest_tests',
731 default=env.get('NOSE_IPDOCTEST_TESTS',True),
731 default=env.get('NOSE_IPDOCTEST_TESTS',True),
732 help="Also look for doctests in test modules. "
732 help="Also look for doctests in test modules. "
733 "Note that classes, methods and functions should "
733 "Note that classes, methods and functions should "
734 "have either doctests or non-doctest tests, "
734 "have either doctests or non-doctest tests, "
735 "not both. [NOSE_IPDOCTEST_TESTS]")
735 "not both. [NOSE_IPDOCTEST_TESTS]")
736 parser.add_option('--ipdoctest-extension', action="append",
736 parser.add_option('--ipdoctest-extension', action="append",
737 dest="ipdoctest_extension",
737 dest="ipdoctest_extension",
738 help="Also look for doctests in files with "
738 help="Also look for doctests in files with "
739 "this extension [NOSE_IPDOCTEST_EXTENSION]")
739 "this extension [NOSE_IPDOCTEST_EXTENSION]")
740 # Set the default as a list, if given in env; otherwise
740 # Set the default as a list, if given in env; otherwise
741 # an additional value set on the command line will cause
741 # an additional value set on the command line will cause
742 # an error.
742 # an error.
743 env_setting = env.get('NOSE_IPDOCTEST_EXTENSION')
743 env_setting = env.get('NOSE_IPDOCTEST_EXTENSION')
744 if env_setting is not None:
744 if env_setting is not None:
745 parser.set_defaults(ipdoctest_extension=tolist(env_setting))
745 parser.set_defaults(ipdoctest_extension=tolist(env_setting))
746
746
747 def configure(self, options, config):
747 def configure(self, options, config):
748 #print "Configuring nose plugin:", self.name # dbg
748 #print "Configuring nose plugin:", self.name # dbg
749 Plugin.configure(self, options, config)
749 Plugin.configure(self, options, config)
750 # Pull standard doctest plugin out of config; we will do doctesting
750 # Pull standard doctest plugin out of config; we will do doctesting
751 config.plugins.plugins = [p for p in config.plugins.plugins
751 config.plugins.plugins = [p for p in config.plugins.plugins
752 if p.name != 'doctest']
752 if p.name != 'doctest']
753 self.doctest_tests = options.ipdoctest_tests
753 self.doctest_tests = options.ipdoctest_tests
754 self.extension = tolist(options.ipdoctest_extension)
754 self.extension = tolist(options.ipdoctest_extension)
755
755
756 self.parser = IPDocTestParser()
756 self.parser = IPDocTestParser()
757 self.finder = DocTestFinder(parser=self.parser)
757 self.finder = DocTestFinder(parser=self.parser)
758 self.checker = IPDoctestOutputChecker()
758 self.checker = IPDoctestOutputChecker()
759 self.globs = None
759 self.globs = None
760 self.extraglobs = None
760 self.extraglobs = None
@@ -1,169 +1,169 b''
1 """Tests for the decorators we've created for IPython.
1 """Tests for the decorators we've created for IPython.
2 """
2 """
3 from __future__ import print_function
3 from __future__ import print_function
4
4
5 # Module imports
5 # Module imports
6 # Std lib
6 # Std lib
7 import inspect
7 import inspect
8 import sys
8 import sys
9
9
10 # Third party
10 # Third party
11 import nose.tools as nt
11 import nose.tools as nt
12
12
13 # Our own
13 # Our own
14 from IPython.testing import decorators as dec
14 from IPython.testing import decorators as dec
15 from IPython.testing.skipdoctest import skip_doctest
15 from IPython.testing.skipdoctest import skip_doctest
16
16
17 #-----------------------------------------------------------------------------
17 #-----------------------------------------------------------------------------
18 # Utilities
18 # Utilities
19
19
20 # Note: copied from OInspect, kept here so the testing stuff doesn't create
20 # Note: copied from OInspect, kept here so the testing stuff doesn't create
21 # circular dependencies and is easier to reuse.
21 # circular dependencies and is easier to reuse.
22 def getargspec(obj):
22 def getargspec(obj):
23 """Get the names and default values of a function's arguments.
23 """Get the names and default values of a function's arguments.
24
24
25 A tuple of four things is returned: (args, varargs, varkw, defaults).
25 A tuple of four things is returned: (args, varargs, varkw, defaults).
26 'args' is a list of the argument names (it may contain nested lists).
26 'args' is a list of the argument names (it may contain nested lists).
27 'varargs' and 'varkw' are the names of the * and ** arguments or None.
27 'varargs' and 'varkw' are the names of the * and ** arguments or None.
28 'defaults' is an n-tuple of the default values of the last n arguments.
28 'defaults' is an n-tuple of the default values of the last n arguments.
29
29
30 Modified version of inspect.getargspec from the Python Standard
30 Modified version of inspect.getargspec from the Python Standard
31 Library."""
31 Library."""
32
32
33 if inspect.isfunction(obj):
33 if inspect.isfunction(obj):
34 func_obj = obj
34 func_obj = obj
35 elif inspect.ismethod(obj):
35 elif inspect.ismethod(obj):
36 func_obj = obj.im_func
36 func_obj = obj.im_func
37 else:
37 else:
38 raise TypeError('arg is not a Python function')
38 raise TypeError('arg is not a Python function')
39 args, varargs, varkw = inspect.getargs(func_obj.func_code)
39 args, varargs, varkw = inspect.getargs(func_obj.__code__)
40 return args, varargs, varkw, func_obj.func_defaults
40 return args, varargs, varkw, func_obj.__defaults__
41
41
42 #-----------------------------------------------------------------------------
42 #-----------------------------------------------------------------------------
43 # Testing functions
43 # Testing functions
44
44
45 @dec.as_unittest
45 @dec.as_unittest
46 def trivial():
46 def trivial():
47 """A trivial test"""
47 """A trivial test"""
48 pass
48 pass
49
49
50
50
51 @dec.skip
51 @dec.skip
52 def test_deliberately_broken():
52 def test_deliberately_broken():
53 """A deliberately broken test - we want to skip this one."""
53 """A deliberately broken test - we want to skip this one."""
54 1/0
54 1/0
55
55
56 @dec.skip('Testing the skip decorator')
56 @dec.skip('Testing the skip decorator')
57 def test_deliberately_broken2():
57 def test_deliberately_broken2():
58 """Another deliberately broken test - we want to skip this one."""
58 """Another deliberately broken test - we want to skip this one."""
59 1/0
59 1/0
60
60
61
61
62 # Verify that we can correctly skip the doctest for a function at will, but
62 # Verify that we can correctly skip the doctest for a function at will, but
63 # that the docstring itself is NOT destroyed by the decorator.
63 # that the docstring itself is NOT destroyed by the decorator.
64 @skip_doctest
64 @skip_doctest
65 def doctest_bad(x,y=1,**k):
65 def doctest_bad(x,y=1,**k):
66 """A function whose doctest we need to skip.
66 """A function whose doctest we need to skip.
67
67
68 >>> 1+1
68 >>> 1+1
69 3
69 3
70 """
70 """
71 print('x:',x)
71 print('x:',x)
72 print('y:',y)
72 print('y:',y)
73 print('k:',k)
73 print('k:',k)
74
74
75
75
76 def call_doctest_bad():
76 def call_doctest_bad():
77 """Check that we can still call the decorated functions.
77 """Check that we can still call the decorated functions.
78
78
79 >>> doctest_bad(3,y=4)
79 >>> doctest_bad(3,y=4)
80 x: 3
80 x: 3
81 y: 4
81 y: 4
82 k: {}
82 k: {}
83 """
83 """
84 pass
84 pass
85
85
86
86
87 def test_skip_dt_decorator():
87 def test_skip_dt_decorator():
88 """Doctest-skipping decorator should preserve the docstring.
88 """Doctest-skipping decorator should preserve the docstring.
89 """
89 """
90 # Careful: 'check' must be a *verbatim* copy of the doctest_bad docstring!
90 # Careful: 'check' must be a *verbatim* copy of the doctest_bad docstring!
91 check = """A function whose doctest we need to skip.
91 check = """A function whose doctest we need to skip.
92
92
93 >>> 1+1
93 >>> 1+1
94 3
94 3
95 """
95 """
96 # Fetch the docstring from doctest_bad after decoration.
96 # Fetch the docstring from doctest_bad after decoration.
97 val = doctest_bad.__doc__
97 val = doctest_bad.__doc__
98
98
99 nt.assert_equal(check,val,"doctest_bad docstrings don't match")
99 nt.assert_equal(check,val,"doctest_bad docstrings don't match")
100
100
101
101
102 # Doctest skipping should work for class methods too
102 # Doctest skipping should work for class methods too
103 class FooClass(object):
103 class FooClass(object):
104 """FooClass
104 """FooClass
105
105
106 Example:
106 Example:
107
107
108 >>> 1+1
108 >>> 1+1
109 2
109 2
110 """
110 """
111
111
112 @skip_doctest
112 @skip_doctest
113 def __init__(self,x):
113 def __init__(self,x):
114 """Make a FooClass.
114 """Make a FooClass.
115
115
116 Example:
116 Example:
117
117
118 >>> f = FooClass(3)
118 >>> f = FooClass(3)
119 junk
119 junk
120 """
120 """
121 print('Making a FooClass.')
121 print('Making a FooClass.')
122 self.x = x
122 self.x = x
123
123
124 @skip_doctest
124 @skip_doctest
125 def bar(self,y):
125 def bar(self,y):
126 """Example:
126 """Example:
127
127
128 >>> ff = FooClass(3)
128 >>> ff = FooClass(3)
129 >>> ff.bar(0)
129 >>> ff.bar(0)
130 boom!
130 boom!
131 >>> 1/0
131 >>> 1/0
132 bam!
132 bam!
133 """
133 """
134 return 1/y
134 return 1/y
135
135
136 def baz(self,y):
136 def baz(self,y):
137 """Example:
137 """Example:
138
138
139 >>> ff2 = FooClass(3)
139 >>> ff2 = FooClass(3)
140 Making a FooClass.
140 Making a FooClass.
141 >>> ff2.baz(3)
141 >>> ff2.baz(3)
142 True
142 True
143 """
143 """
144 return self.x==y
144 return self.x==y
145
145
146
146
147 def test_skip_dt_decorator2():
147 def test_skip_dt_decorator2():
148 """Doctest-skipping decorator should preserve function signature.
148 """Doctest-skipping decorator should preserve function signature.
149 """
149 """
150 # Hardcoded correct answer
150 # Hardcoded correct answer
151 dtargs = (['x', 'y'], None, 'k', (1,))
151 dtargs = (['x', 'y'], None, 'k', (1,))
152 # Introspect out the value
152 # Introspect out the value
153 dtargsr = getargspec(doctest_bad)
153 dtargsr = getargspec(doctest_bad)
154 assert dtargsr==dtargs, \
154 assert dtargsr==dtargs, \
155 "Incorrectly reconstructed args for doctest_bad: %s" % (dtargsr,)
155 "Incorrectly reconstructed args for doctest_bad: %s" % (dtargsr,)
156
156
157
157
158 @dec.skip_linux
158 @dec.skip_linux
159 def test_linux():
159 def test_linux():
160 nt.assert_false(sys.platform.startswith('linux'),"This test can't run under linux")
160 nt.assert_false(sys.platform.startswith('linux'),"This test can't run under linux")
161
161
162 @dec.skip_win32
162 @dec.skip_win32
163 def test_win32():
163 def test_win32():
164 nt.assert_not_equal(sys.platform,'win32',"This test can't run under windows")
164 nt.assert_not_equal(sys.platform,'win32',"This test can't run under windows")
165
165
166 @dec.skip_osx
166 @dec.skip_osx
167 def test_osx():
167 def test_osx():
168 nt.assert_not_equal(sys.platform,'darwin',"This test can't run under osx")
168 nt.assert_not_equal(sys.platform,'darwin',"This test can't run under osx")
169
169
@@ -1,352 +1,352 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """Pickle related utilities. Perhaps this should be called 'can'."""
3 """Pickle related utilities. Perhaps this should be called 'can'."""
4
4
5 __docformat__ = "restructuredtext en"
5 __docformat__ = "restructuredtext en"
6
6
7 #-------------------------------------------------------------------------------
7 #-------------------------------------------------------------------------------
8 # Copyright (C) 2008-2011 The IPython Development Team
8 # Copyright (C) 2008-2011 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-------------------------------------------------------------------------------
12 #-------------------------------------------------------------------------------
13
13
14 #-------------------------------------------------------------------------------
14 #-------------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17
17
18 import copy
18 import copy
19 import logging
19 import logging
20 import sys
20 import sys
21 from types import FunctionType
21 from types import FunctionType
22
22
23 try:
23 try:
24 import cPickle as pickle
24 import cPickle as pickle
25 except ImportError:
25 except ImportError:
26 import pickle
26 import pickle
27
27
28 from . import codeutil # This registers a hook when it's imported
28 from . import codeutil # This registers a hook when it's imported
29 from . import py3compat
29 from . import py3compat
30 from .importstring import import_item
30 from .importstring import import_item
31 from .py3compat import string_types, iteritems
31 from .py3compat import string_types, iteritems
32
32
33 from IPython.config import Application
33 from IPython.config import Application
34
34
35 if py3compat.PY3:
35 if py3compat.PY3:
36 buffer = memoryview
36 buffer = memoryview
37 class_type = type
37 class_type = type
38 else:
38 else:
39 from types import ClassType
39 from types import ClassType
40 class_type = (type, ClassType)
40 class_type = (type, ClassType)
41
41
42 #-------------------------------------------------------------------------------
42 #-------------------------------------------------------------------------------
43 # Classes
43 # Classes
44 #-------------------------------------------------------------------------------
44 #-------------------------------------------------------------------------------
45
45
46
46
47 class CannedObject(object):
47 class CannedObject(object):
48 def __init__(self, obj, keys=[], hook=None):
48 def __init__(self, obj, keys=[], hook=None):
49 """can an object for safe pickling
49 """can an object for safe pickling
50
50
51 Parameters
51 Parameters
52 ==========
52 ==========
53
53
54 obj:
54 obj:
55 The object to be canned
55 The object to be canned
56 keys: list (optional)
56 keys: list (optional)
57 list of attribute names that will be explicitly canned / uncanned
57 list of attribute names that will be explicitly canned / uncanned
58 hook: callable (optional)
58 hook: callable (optional)
59 An optional extra callable,
59 An optional extra callable,
60 which can do additional processing of the uncanned object.
60 which can do additional processing of the uncanned object.
61
61
62 large data may be offloaded into the buffers list,
62 large data may be offloaded into the buffers list,
63 used for zero-copy transfers.
63 used for zero-copy transfers.
64 """
64 """
65 self.keys = keys
65 self.keys = keys
66 self.obj = copy.copy(obj)
66 self.obj = copy.copy(obj)
67 self.hook = can(hook)
67 self.hook = can(hook)
68 for key in keys:
68 for key in keys:
69 setattr(self.obj, key, can(getattr(obj, key)))
69 setattr(self.obj, key, can(getattr(obj, key)))
70
70
71 self.buffers = []
71 self.buffers = []
72
72
73 def get_object(self, g=None):
73 def get_object(self, g=None):
74 if g is None:
74 if g is None:
75 g = {}
75 g = {}
76 obj = self.obj
76 obj = self.obj
77 for key in self.keys:
77 for key in self.keys:
78 setattr(obj, key, uncan(getattr(obj, key), g))
78 setattr(obj, key, uncan(getattr(obj, key), g))
79
79
80 if self.hook:
80 if self.hook:
81 self.hook = uncan(self.hook, g)
81 self.hook = uncan(self.hook, g)
82 self.hook(obj, g)
82 self.hook(obj, g)
83 return self.obj
83 return self.obj
84
84
85
85
86 class Reference(CannedObject):
86 class Reference(CannedObject):
87 """object for wrapping a remote reference by name."""
87 """object for wrapping a remote reference by name."""
88 def __init__(self, name):
88 def __init__(self, name):
89 if not isinstance(name, string_types):
89 if not isinstance(name, string_types):
90 raise TypeError("illegal name: %r"%name)
90 raise TypeError("illegal name: %r"%name)
91 self.name = name
91 self.name = name
92 self.buffers = []
92 self.buffers = []
93
93
94 def __repr__(self):
94 def __repr__(self):
95 return "<Reference: %r>"%self.name
95 return "<Reference: %r>"%self.name
96
96
97 def get_object(self, g=None):
97 def get_object(self, g=None):
98 if g is None:
98 if g is None:
99 g = {}
99 g = {}
100
100
101 return eval(self.name, g)
101 return eval(self.name, g)
102
102
103
103
104 class CannedFunction(CannedObject):
104 class CannedFunction(CannedObject):
105
105
106 def __init__(self, f):
106 def __init__(self, f):
107 self._check_type(f)
107 self._check_type(f)
108 self.code = f.func_code
108 self.code = f.__code__
109 if f.func_defaults:
109 if f.__defaults__:
110 self.defaults = [ can(fd) for fd in f.func_defaults ]
110 self.defaults = [ can(fd) for fd in f.__defaults__ ]
111 else:
111 else:
112 self.defaults = None
112 self.defaults = None
113 self.module = f.__module__ or '__main__'
113 self.module = f.__module__ or '__main__'
114 self.__name__ = f.__name__
114 self.__name__ = f.__name__
115 self.buffers = []
115 self.buffers = []
116
116
117 def _check_type(self, obj):
117 def _check_type(self, obj):
118 assert isinstance(obj, FunctionType), "Not a function type"
118 assert isinstance(obj, FunctionType), "Not a function type"
119
119
120 def get_object(self, g=None):
120 def get_object(self, g=None):
121 # try to load function back into its module:
121 # try to load function back into its module:
122 if not self.module.startswith('__'):
122 if not self.module.startswith('__'):
123 __import__(self.module)
123 __import__(self.module)
124 g = sys.modules[self.module].__dict__
124 g = sys.modules[self.module].__dict__
125
125
126 if g is None:
126 if g is None:
127 g = {}
127 g = {}
128 if self.defaults:
128 if self.defaults:
129 defaults = tuple(uncan(cfd, g) for cfd in self.defaults)
129 defaults = tuple(uncan(cfd, g) for cfd in self.defaults)
130 else:
130 else:
131 defaults = None
131 defaults = None
132 newFunc = FunctionType(self.code, g, self.__name__, defaults)
132 newFunc = FunctionType(self.code, g, self.__name__, defaults)
133 return newFunc
133 return newFunc
134
134
135 class CannedClass(CannedObject):
135 class CannedClass(CannedObject):
136
136
137 def __init__(self, cls):
137 def __init__(self, cls):
138 self._check_type(cls)
138 self._check_type(cls)
139 self.name = cls.__name__
139 self.name = cls.__name__
140 self.old_style = not isinstance(cls, type)
140 self.old_style = not isinstance(cls, type)
141 self._canned_dict = {}
141 self._canned_dict = {}
142 for k,v in cls.__dict__.items():
142 for k,v in cls.__dict__.items():
143 if k not in ('__weakref__', '__dict__'):
143 if k not in ('__weakref__', '__dict__'):
144 self._canned_dict[k] = can(v)
144 self._canned_dict[k] = can(v)
145 if self.old_style:
145 if self.old_style:
146 mro = []
146 mro = []
147 else:
147 else:
148 mro = cls.mro()
148 mro = cls.mro()
149
149
150 self.parents = [ can(c) for c in mro[1:] ]
150 self.parents = [ can(c) for c in mro[1:] ]
151 self.buffers = []
151 self.buffers = []
152
152
153 def _check_type(self, obj):
153 def _check_type(self, obj):
154 assert isinstance(obj, class_type), "Not a class type"
154 assert isinstance(obj, class_type), "Not a class type"
155
155
156 def get_object(self, g=None):
156 def get_object(self, g=None):
157 parents = tuple(uncan(p, g) for p in self.parents)
157 parents = tuple(uncan(p, g) for p in self.parents)
158 return type(self.name, parents, uncan_dict(self._canned_dict, g=g))
158 return type(self.name, parents, uncan_dict(self._canned_dict, g=g))
159
159
160 class CannedArray(CannedObject):
160 class CannedArray(CannedObject):
161 def __init__(self, obj):
161 def __init__(self, obj):
162 from numpy import ascontiguousarray
162 from numpy import ascontiguousarray
163 self.shape = obj.shape
163 self.shape = obj.shape
164 self.dtype = obj.dtype.descr if obj.dtype.fields else obj.dtype.str
164 self.dtype = obj.dtype.descr if obj.dtype.fields else obj.dtype.str
165 if sum(obj.shape) == 0:
165 if sum(obj.shape) == 0:
166 # just pickle it
166 # just pickle it
167 self.buffers = [pickle.dumps(obj, -1)]
167 self.buffers = [pickle.dumps(obj, -1)]
168 else:
168 else:
169 # ensure contiguous
169 # ensure contiguous
170 obj = ascontiguousarray(obj, dtype=None)
170 obj = ascontiguousarray(obj, dtype=None)
171 self.buffers = [buffer(obj)]
171 self.buffers = [buffer(obj)]
172
172
173 def get_object(self, g=None):
173 def get_object(self, g=None):
174 from numpy import frombuffer
174 from numpy import frombuffer
175 data = self.buffers[0]
175 data = self.buffers[0]
176 if sum(self.shape) == 0:
176 if sum(self.shape) == 0:
177 # no shape, we just pickled it
177 # no shape, we just pickled it
178 return pickle.loads(data)
178 return pickle.loads(data)
179 else:
179 else:
180 return frombuffer(data, dtype=self.dtype).reshape(self.shape)
180 return frombuffer(data, dtype=self.dtype).reshape(self.shape)
181
181
182
182
183 class CannedBytes(CannedObject):
183 class CannedBytes(CannedObject):
184 wrap = bytes
184 wrap = bytes
185 def __init__(self, obj):
185 def __init__(self, obj):
186 self.buffers = [obj]
186 self.buffers = [obj]
187
187
188 def get_object(self, g=None):
188 def get_object(self, g=None):
189 data = self.buffers[0]
189 data = self.buffers[0]
190 return self.wrap(data)
190 return self.wrap(data)
191
191
192 def CannedBuffer(CannedBytes):
192 def CannedBuffer(CannedBytes):
193 wrap = buffer
193 wrap = buffer
194
194
195 #-------------------------------------------------------------------------------
195 #-------------------------------------------------------------------------------
196 # Functions
196 # Functions
197 #-------------------------------------------------------------------------------
197 #-------------------------------------------------------------------------------
198
198
199 def _logger():
199 def _logger():
200 """get the logger for the current Application
200 """get the logger for the current Application
201
201
202 the root logger will be used if no Application is running
202 the root logger will be used if no Application is running
203 """
203 """
204 if Application.initialized():
204 if Application.initialized():
205 logger = Application.instance().log
205 logger = Application.instance().log
206 else:
206 else:
207 logger = logging.getLogger()
207 logger = logging.getLogger()
208 if not logger.handlers:
208 if not logger.handlers:
209 logging.basicConfig()
209 logging.basicConfig()
210
210
211 return logger
211 return logger
212
212
213 def _import_mapping(mapping, original=None):
213 def _import_mapping(mapping, original=None):
214 """import any string-keys in a type mapping
214 """import any string-keys in a type mapping
215
215
216 """
216 """
217 log = _logger()
217 log = _logger()
218 log.debug("Importing canning map")
218 log.debug("Importing canning map")
219 for key,value in mapping.items():
219 for key,value in mapping.items():
220 if isinstance(key, string_types):
220 if isinstance(key, string_types):
221 try:
221 try:
222 cls = import_item(key)
222 cls = import_item(key)
223 except Exception:
223 except Exception:
224 if original and key not in original:
224 if original and key not in original:
225 # only message on user-added classes
225 # only message on user-added classes
226 log.error("canning class not importable: %r", key, exc_info=True)
226 log.error("canning class not importable: %r", key, exc_info=True)
227 mapping.pop(key)
227 mapping.pop(key)
228 else:
228 else:
229 mapping[cls] = mapping.pop(key)
229 mapping[cls] = mapping.pop(key)
230
230
231 def istype(obj, check):
231 def istype(obj, check):
232 """like isinstance(obj, check), but strict
232 """like isinstance(obj, check), but strict
233
233
234 This won't catch subclasses.
234 This won't catch subclasses.
235 """
235 """
236 if isinstance(check, tuple):
236 if isinstance(check, tuple):
237 for cls in check:
237 for cls in check:
238 if type(obj) is cls:
238 if type(obj) is cls:
239 return True
239 return True
240 return False
240 return False
241 else:
241 else:
242 return type(obj) is check
242 return type(obj) is check
243
243
244 def can(obj):
244 def can(obj):
245 """prepare an object for pickling"""
245 """prepare an object for pickling"""
246
246
247 import_needed = False
247 import_needed = False
248
248
249 for cls,canner in iteritems(can_map):
249 for cls,canner in iteritems(can_map):
250 if isinstance(cls, string_types):
250 if isinstance(cls, string_types):
251 import_needed = True
251 import_needed = True
252 break
252 break
253 elif istype(obj, cls):
253 elif istype(obj, cls):
254 return canner(obj)
254 return canner(obj)
255
255
256 if import_needed:
256 if import_needed:
257 # perform can_map imports, then try again
257 # perform can_map imports, then try again
258 # this will usually only happen once
258 # this will usually only happen once
259 _import_mapping(can_map, _original_can_map)
259 _import_mapping(can_map, _original_can_map)
260 return can(obj)
260 return can(obj)
261
261
262 return obj
262 return obj
263
263
264 def can_class(obj):
264 def can_class(obj):
265 if isinstance(obj, class_type) and obj.__module__ == '__main__':
265 if isinstance(obj, class_type) and obj.__module__ == '__main__':
266 return CannedClass(obj)
266 return CannedClass(obj)
267 else:
267 else:
268 return obj
268 return obj
269
269
270 def can_dict(obj):
270 def can_dict(obj):
271 """can the *values* of a dict"""
271 """can the *values* of a dict"""
272 if istype(obj, dict):
272 if istype(obj, dict):
273 newobj = {}
273 newobj = {}
274 for k, v in iteritems(obj):
274 for k, v in iteritems(obj):
275 newobj[k] = can(v)
275 newobj[k] = can(v)
276 return newobj
276 return newobj
277 else:
277 else:
278 return obj
278 return obj
279
279
280 sequence_types = (list, tuple, set)
280 sequence_types = (list, tuple, set)
281
281
282 def can_sequence(obj):
282 def can_sequence(obj):
283 """can the elements of a sequence"""
283 """can the elements of a sequence"""
284 if istype(obj, sequence_types):
284 if istype(obj, sequence_types):
285 t = type(obj)
285 t = type(obj)
286 return t([can(i) for i in obj])
286 return t([can(i) for i in obj])
287 else:
287 else:
288 return obj
288 return obj
289
289
290 def uncan(obj, g=None):
290 def uncan(obj, g=None):
291 """invert canning"""
291 """invert canning"""
292
292
293 import_needed = False
293 import_needed = False
294 for cls,uncanner in iteritems(uncan_map):
294 for cls,uncanner in iteritems(uncan_map):
295 if isinstance(cls, string_types):
295 if isinstance(cls, string_types):
296 import_needed = True
296 import_needed = True
297 break
297 break
298 elif isinstance(obj, cls):
298 elif isinstance(obj, cls):
299 return uncanner(obj, g)
299 return uncanner(obj, g)
300
300
301 if import_needed:
301 if import_needed:
302 # perform uncan_map imports, then try again
302 # perform uncan_map imports, then try again
303 # this will usually only happen once
303 # this will usually only happen once
304 _import_mapping(uncan_map, _original_uncan_map)
304 _import_mapping(uncan_map, _original_uncan_map)
305 return uncan(obj, g)
305 return uncan(obj, g)
306
306
307 return obj
307 return obj
308
308
309 def uncan_dict(obj, g=None):
309 def uncan_dict(obj, g=None):
310 if istype(obj, dict):
310 if istype(obj, dict):
311 newobj = {}
311 newobj = {}
312 for k, v in iteritems(obj):
312 for k, v in iteritems(obj):
313 newobj[k] = uncan(v,g)
313 newobj[k] = uncan(v,g)
314 return newobj
314 return newobj
315 else:
315 else:
316 return obj
316 return obj
317
317
318 def uncan_sequence(obj, g=None):
318 def uncan_sequence(obj, g=None):
319 if istype(obj, sequence_types):
319 if istype(obj, sequence_types):
320 t = type(obj)
320 t = type(obj)
321 return t([uncan(i,g) for i in obj])
321 return t([uncan(i,g) for i in obj])
322 else:
322 else:
323 return obj
323 return obj
324
324
325 def _uncan_dependent_hook(dep, g=None):
325 def _uncan_dependent_hook(dep, g=None):
326 dep.check_dependency()
326 dep.check_dependency()
327
327
328 def can_dependent(obj):
328 def can_dependent(obj):
329 return CannedObject(obj, keys=('f', 'df'), hook=_uncan_dependent_hook)
329 return CannedObject(obj, keys=('f', 'df'), hook=_uncan_dependent_hook)
330
330
331 #-------------------------------------------------------------------------------
331 #-------------------------------------------------------------------------------
332 # API dictionaries
332 # API dictionaries
333 #-------------------------------------------------------------------------------
333 #-------------------------------------------------------------------------------
334
334
335 # These dicts can be extended for custom serialization of new objects
335 # These dicts can be extended for custom serialization of new objects
336
336
337 can_map = {
337 can_map = {
338 'IPython.parallel.dependent' : can_dependent,
338 'IPython.parallel.dependent' : can_dependent,
339 'numpy.ndarray' : CannedArray,
339 'numpy.ndarray' : CannedArray,
340 FunctionType : CannedFunction,
340 FunctionType : CannedFunction,
341 bytes : CannedBytes,
341 bytes : CannedBytes,
342 buffer : CannedBuffer,
342 buffer : CannedBuffer,
343 class_type : can_class,
343 class_type : can_class,
344 }
344 }
345
345
346 uncan_map = {
346 uncan_map = {
347 CannedObject : lambda obj, g: obj.get_object(g),
347 CannedObject : lambda obj, g: obj.get_object(g),
348 }
348 }
349
349
350 # for use in _import_mapping:
350 # for use in _import_mapping:
351 _original_can_map = can_map.copy()
351 _original_can_map = can_map.copy()
352 _original_uncan_map = uncan_map.copy()
352 _original_uncan_map = uncan_map.copy()
General Comments 0
You need to be logged in to leave comments. Login now