##// END OF EJS Templates
Don't rely on `get_ipython` in builtins in library code
MinRK -
Show More
@@ -1,337 +1,337 b''
1 1 """Implementations for various useful completers.
2 2
3 3 These are all loaded by default by IPython.
4 4 """
5 5 #-----------------------------------------------------------------------------
6 6 # Copyright (C) 2010-2011 The IPython Development Team.
7 7 #
8 8 # Distributed under the terms of the BSD License.
9 9 #
10 10 # The full license is in the file COPYING.txt, distributed with this software.
11 11 #-----------------------------------------------------------------------------
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Imports
15 15 #-----------------------------------------------------------------------------
16 16 from __future__ import print_function
17 17
18 18 # Stdlib imports
19 19 import glob
20 20 import imp
21 21 import inspect
22 22 import os
23 23 import re
24 24 import sys
25 25
26 26 # Third-party imports
27 27 from time import time
28 28 from zipimport import zipimporter
29 29
30 30 # Our own imports
31 31 from IPython.core.completer import expand_user, compress_user
32 32 from IPython.core.error import TryNext
33 33 from IPython.utils._process_common import arg_split
34 34
35 35 # FIXME: this should be pulled in with the right call via the component system
36 from IPython.core.ipapi import get as get_ipython
36 from IPython import get_ipython
37 37
38 38 #-----------------------------------------------------------------------------
39 39 # Globals and constants
40 40 #-----------------------------------------------------------------------------
41 41
42 42 # Time in seconds after which the rootmodules will be stored permanently in the
43 43 # ipython ip.db database (kept in the user's .ipython dir).
44 44 TIMEOUT_STORAGE = 2
45 45
46 46 # Time in seconds after which we give up
47 47 TIMEOUT_GIVEUP = 20
48 48
49 49 # Regular expression for the python import statement
50 50 import_re = re.compile(r'(?P<name>[a-zA-Z_][a-zA-Z0-9_]*?)'
51 51 r'(?P<package>[/\\]__init__)?'
52 52 r'(?P<suffix>%s)$' %
53 53 r'|'.join(re.escape(s[0]) for s in imp.get_suffixes()))
54 54
55 55 # RE for the ipython %run command (python + ipython scripts)
56 56 magic_run_re = re.compile(r'.*(\.ipy|\.py[w]?)$')
57 57
58 58 #-----------------------------------------------------------------------------
59 59 # Local utilities
60 60 #-----------------------------------------------------------------------------
61 61
62 62 def module_list(path):
63 63 """
64 64 Return the list containing the names of the modules available in the given
65 65 folder.
66 66 """
67 67 # sys.path has the cwd as an empty string, but isdir/listdir need it as '.'
68 68 if path == '':
69 69 path = '.'
70 70
71 71 # A few local constants to be used in loops below
72 72 pjoin = os.path.join
73 73
74 74 if os.path.isdir(path):
75 75 # Build a list of all files in the directory and all files
76 76 # in its subdirectories. For performance reasons, do not
77 77 # recurse more than one level into subdirectories.
78 78 files = []
79 79 for root, dirs, nondirs in os.walk(path):
80 80 subdir = root[len(path)+1:]
81 81 if subdir:
82 82 files.extend(pjoin(subdir, f) for f in nondirs)
83 83 dirs[:] = [] # Do not recurse into additional subdirectories.
84 84 else:
85 85 files.extend(nondirs)
86 86
87 87 else:
88 88 try:
89 89 files = list(zipimporter(path)._files.keys())
90 90 except:
91 91 files = []
92 92
93 93 # Build a list of modules which match the import_re regex.
94 94 modules = []
95 95 for f in files:
96 96 m = import_re.match(f)
97 97 if m:
98 98 modules.append(m.group('name'))
99 99 return list(set(modules))
100 100
101 101
102 102 def get_root_modules():
103 103 """
104 104 Returns a list containing the names of all the modules available in the
105 105 folders of the pythonpath.
106 106
107 107 ip.db['rootmodules_cache'] maps sys.path entries to list of modules.
108 108 """
109 109 ip = get_ipython()
110 110 rootmodules_cache = ip.db.get('rootmodules_cache', {})
111 111 rootmodules = list(sys.builtin_module_names)
112 112 start_time = time()
113 113 store = False
114 114 for path in sys.path:
115 115 try:
116 116 modules = rootmodules_cache[path]
117 117 except KeyError:
118 118 modules = module_list(path)
119 119 try:
120 120 modules.remove('__init__')
121 121 except ValueError:
122 122 pass
123 123 if path not in ('', '.'): # cwd modules should not be cached
124 124 rootmodules_cache[path] = modules
125 125 if time() - start_time > TIMEOUT_STORAGE and not store:
126 126 store = True
127 127 print("\nCaching the list of root modules, please wait!")
128 128 print("(This will only be done once - type '%rehashx' to "
129 129 "reset cache!)\n")
130 130 sys.stdout.flush()
131 131 if time() - start_time > TIMEOUT_GIVEUP:
132 132 print("This is taking too long, we give up.\n")
133 133 return []
134 134 rootmodules.extend(modules)
135 135 if store:
136 136 ip.db['rootmodules_cache'] = rootmodules_cache
137 137 rootmodules = list(set(rootmodules))
138 138 return rootmodules
139 139
140 140
141 141 def is_importable(module, attr, only_modules):
142 142 if only_modules:
143 143 return inspect.ismodule(getattr(module, attr))
144 144 else:
145 145 return not(attr[:2] == '__' and attr[-2:] == '__')
146 146
147 147
148 148 def try_import(mod, only_modules=False):
149 149 try:
150 150 m = __import__(mod)
151 151 except:
152 152 return []
153 153 mods = mod.split('.')
154 154 for module in mods[1:]:
155 155 m = getattr(m, module)
156 156
157 157 m_is_init = hasattr(m, '__file__') and '__init__' in m.__file__
158 158
159 159 completions = []
160 160 if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init:
161 161 completions.extend( [attr for attr in dir(m) if
162 162 is_importable(m, attr, only_modules)])
163 163
164 164 completions.extend(getattr(m, '__all__', []))
165 165 if m_is_init:
166 166 completions.extend(module_list(os.path.dirname(m.__file__)))
167 167 completions = set(completions)
168 168 if '__init__' in completions:
169 169 completions.remove('__init__')
170 170 return list(completions)
171 171
172 172
173 173 #-----------------------------------------------------------------------------
174 174 # Completion-related functions.
175 175 #-----------------------------------------------------------------------------
176 176
177 177 def quick_completer(cmd, completions):
178 178 """ Easily create a trivial completer for a command.
179 179
180 180 Takes either a list of completions, or all completions in string (that will
181 181 be split on whitespace).
182 182
183 183 Example::
184 184
185 185 [d:\ipython]|1> import ipy_completers
186 186 [d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz'])
187 187 [d:\ipython]|3> foo b<TAB>
188 188 bar baz
189 189 [d:\ipython]|3> foo ba
190 190 """
191 191
192 192 if isinstance(completions, basestring):
193 193 completions = completions.split()
194 194
195 195 def do_complete(self, event):
196 196 return completions
197 197
198 198 get_ipython().set_hook('complete_command',do_complete, str_key = cmd)
199 199
200 200 def module_completion(line):
201 201 """
202 202 Returns a list containing the completion possibilities for an import line.
203 203
204 204 The line looks like this :
205 205 'import xml.d'
206 206 'from xml.dom import'
207 207 """
208 208
209 209 words = line.split(' ')
210 210 nwords = len(words)
211 211
212 212 # from whatever <tab> -> 'import '
213 213 if nwords == 3 and words[0] == 'from':
214 214 return ['import ']
215 215
216 216 # 'from xy<tab>' or 'import xy<tab>'
217 217 if nwords < 3 and (words[0] in ['import','from']) :
218 218 if nwords == 1:
219 219 return get_root_modules()
220 220 mod = words[1].split('.')
221 221 if len(mod) < 2:
222 222 return get_root_modules()
223 223 completion_list = try_import('.'.join(mod[:-1]), True)
224 224 return ['.'.join(mod[:-1] + [el]) for el in completion_list]
225 225
226 226 # 'from xyz import abc<tab>'
227 227 if nwords >= 3 and words[0] == 'from':
228 228 mod = words[1]
229 229 return try_import(mod)
230 230
231 231 #-----------------------------------------------------------------------------
232 232 # Completers
233 233 #-----------------------------------------------------------------------------
234 234 # These all have the func(self, event) signature to be used as custom
235 235 # completers
236 236
237 237 def module_completer(self,event):
238 238 """Give completions after user has typed 'import ...' or 'from ...'"""
239 239
240 240 # This works in all versions of python. While 2.5 has
241 241 # pkgutil.walk_packages(), that particular routine is fairly dangerous,
242 242 # since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full
243 243 # of possibly problematic side effects.
244 244 # This search the folders in the sys.path for available modules.
245 245
246 246 return module_completion(event.line)
247 247
248 248 # FIXME: there's a lot of logic common to the run, cd and builtin file
249 249 # completers, that is currently reimplemented in each.
250 250
251 251 def magic_run_completer(self, event):
252 252 """Complete files that end in .py or .ipy for the %run command.
253 253 """
254 254 comps = arg_split(event.line, strict=False)
255 255 relpath = (len(comps) > 1 and comps[-1] or '').strip("'\"")
256 256
257 257 #print("\nev=", event) # dbg
258 258 #print("rp=", relpath) # dbg
259 259 #print('comps=', comps) # dbg
260 260
261 261 lglob = glob.glob
262 262 isdir = os.path.isdir
263 263 relpath, tilde_expand, tilde_val = expand_user(relpath)
264 264
265 265 dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)]
266 266
267 267 # Find if the user has already typed the first filename, after which we
268 268 # should complete on all files, since after the first one other files may
269 269 # be arguments to the input script.
270 270
271 271 if filter(magic_run_re.match, comps):
272 272 pys = [f.replace('\\','/') for f in lglob('*')]
273 273 else:
274 274 pys = [f.replace('\\','/')
275 275 for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') +
276 276 lglob(relpath + '*.pyw')]
277 277 #print('run comp:', dirs+pys) # dbg
278 278 return [compress_user(p, tilde_expand, tilde_val) for p in dirs+pys]
279 279
280 280
281 281 def cd_completer(self, event):
282 282 """Completer function for cd, which only returns directories."""
283 283 ip = get_ipython()
284 284 relpath = event.symbol
285 285
286 286 #print(event) # dbg
287 287 if event.line.endswith('-b') or ' -b ' in event.line:
288 288 # return only bookmark completions
289 289 bkms = self.db.get('bookmarks', None)
290 290 if bkms:
291 291 return bkms.keys()
292 292 else:
293 293 return []
294 294
295 295 if event.symbol == '-':
296 296 width_dh = str(len(str(len(ip.user_ns['_dh']) + 1)))
297 297 # jump in directory history by number
298 298 fmt = '-%0' + width_dh +'d [%s]'
299 299 ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])]
300 300 if len(ents) > 1:
301 301 return ents
302 302 return []
303 303
304 304 if event.symbol.startswith('--'):
305 305 return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']]
306 306
307 307 # Expand ~ in path and normalize directory separators.
308 308 relpath, tilde_expand, tilde_val = expand_user(relpath)
309 309 relpath = relpath.replace('\\','/')
310 310
311 311 found = []
312 312 for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*')
313 313 if os.path.isdir(f)]:
314 314 if ' ' in d:
315 315 # we don't want to deal with any of that, complex code
316 316 # for this is elsewhere
317 317 raise TryNext
318 318
319 319 found.append(d)
320 320
321 321 if not found:
322 322 if os.path.isdir(relpath):
323 323 return [compress_user(relpath, tilde_expand, tilde_val)]
324 324
325 325 # if no completions so far, try bookmarks
326 326 bks = self.db.get('bookmarks',{}).iterkeys()
327 327 bkmatches = [s for s in bks if s.startswith(event.symbol)]
328 328 if bkmatches:
329 329 return bkmatches
330 330
331 331 raise TryNext
332 332
333 333 return [compress_user(p, tilde_expand, tilde_val) for p in found]
334 334
335 335 def reset_completer(self, event):
336 336 "A completer for %reset magic"
337 337 return '-f -s in out array dhist'.split()
@@ -1,566 +1,566 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 Pdb debugger class.
4 4
5 5 Modified from the standard pdb.Pdb class to avoid including readline, so that
6 6 the command line completion of other programs which include this isn't
7 7 damaged.
8 8
9 9 In the future, this class will be expanded with improvements over the standard
10 10 pdb.
11 11
12 12 The code in this file is mainly lifted out of cmd.py in Python 2.2, with minor
13 13 changes. Licensing should therefore be under the standard Python terms. For
14 14 details on the PSF (Python Software Foundation) standard license, see:
15 15
16 16 http://www.python.org/2.2.3/license.html"""
17 17
18 18 #*****************************************************************************
19 19 #
20 20 # This file is licensed under the PSF license.
21 21 #
22 22 # Copyright (C) 2001 Python Software Foundation, www.python.org
23 23 # Copyright (C) 2005-2006 Fernando Perez. <fperez@colorado.edu>
24 24 #
25 25 #
26 26 #*****************************************************************************
27 27 from __future__ import print_function
28 28
29 29 import bdb
30 30 import functools
31 31 import linecache
32 32 import sys
33 33
34 from IPython import get_ipython
34 35 from IPython.utils import PyColorize, ulinecache
35 36 from IPython.core import ipapi
36 37 from IPython.utils import coloransi, io, py3compat
37 38 from IPython.core.excolors import exception_colors
38 39
39 40 # See if we can use pydb.
40 41 has_pydb = False
41 42 prompt = 'ipdb> '
42 43 #We have to check this directly from sys.argv, config struct not yet available
43 44 if '--pydb' in sys.argv:
44 45 try:
45 46 import pydb
46 47 if hasattr(pydb.pydb, "runl") and pydb.version>'1.17':
47 48 # Version 1.17 is broken, and that's what ships with Ubuntu Edgy, so we
48 49 # better protect against it.
49 50 has_pydb = True
50 51 except ImportError:
51 52 print("Pydb (http://bashdb.sourceforge.net/pydb/) does not seem to be available")
52 53
53 54 if has_pydb:
54 55 from pydb import Pdb as OldPdb
55 56 #print "Using pydb for %run -d and post-mortem" #dbg
56 57 prompt = 'ipydb> '
57 58 else:
58 59 from pdb import Pdb as OldPdb
59 60
60 61 # Allow the set_trace code to operate outside of an ipython instance, even if
61 62 # it does so with some limitations. The rest of this support is implemented in
62 63 # the Tracer constructor.
63 64 def BdbQuit_excepthook(et, ev, tb, excepthook=None):
64 65 """Exception hook which handles `BdbQuit` exceptions.
65 66
66 67 All other exceptions are processed using the `excepthook`
67 68 parameter.
68 69 """
69 70 if et==bdb.BdbQuit:
70 71 print('Exiting Debugger.')
71 72 elif excepthook is not None:
72 73 excepthook(et, ev, tb)
73 74 else:
74 75 # Backwards compatibility. Raise deprecation warning?
75 76 BdbQuit_excepthook.excepthook_ori(et,ev,tb)
76 77
77 78 def BdbQuit_IPython_excepthook(self,et,ev,tb,tb_offset=None):
78 79 print('Exiting Debugger.')
79 80
80 81
81 82 class Tracer(object):
82 83 """Class for local debugging, similar to pdb.set_trace.
83 84
84 85 Instances of this class, when called, behave like pdb.set_trace, but
85 86 providing IPython's enhanced capabilities.
86 87
87 88 This is implemented as a class which must be initialized in your own code
88 89 and not as a standalone function because we need to detect at runtime
89 90 whether IPython is already active or not. That detection is done in the
90 91 constructor, ensuring that this code plays nicely with a running IPython,
91 92 while functioning acceptably (though with limitations) if outside of it.
92 93 """
93 94
94 95 def __init__(self,colors=None):
95 96 """Create a local debugger instance.
96 97
97 98 :Parameters:
98 99
99 100 - `colors` (None): a string containing the name of the color scheme to
100 101 use, it must be one of IPython's valid color schemes. If not given, the
101 102 function will default to the current IPython scheme when running inside
102 103 IPython, and to 'NoColor' otherwise.
103 104
104 105 Usage example:
105 106
106 107 from IPython.core.debugger import Tracer; debug_here = Tracer()
107 108
108 109 ... later in your code
109 110 debug_here() # -> will open up the debugger at that point.
110 111
111 112 Once the debugger activates, you can use all of its regular commands to
112 113 step through code, set breakpoints, etc. See the pdb documentation
113 114 from the Python standard library for usage details.
114 115 """
115 116
116 try:
117 ip = get_ipython()
118 except NameError:
117 ip = get_ipython()
118 if ip is None:
119 119 # Outside of ipython, we set our own exception hook manually
120 120 sys.excepthook = functools.partial(BdbQuit_excepthook,
121 121 excepthook=sys.excepthook)
122 122 def_colors = 'NoColor'
123 123 try:
124 124 # Limited tab completion support
125 125 import readline
126 126 readline.parse_and_bind('tab: complete')
127 127 except ImportError:
128 128 pass
129 129 else:
130 130 # In ipython, we use its custom exception handler mechanism
131 131 def_colors = ip.colors
132 132 ip.set_custom_exc((bdb.BdbQuit,), BdbQuit_IPython_excepthook)
133 133
134 134 if colors is None:
135 135 colors = def_colors
136 136
137 137 # The stdlib debugger internally uses a modified repr from the `repr`
138 138 # module, that limits the length of printed strings to a hardcoded
139 139 # limit of 30 characters. That much trimming is too aggressive, let's
140 140 # at least raise that limit to 80 chars, which should be enough for
141 141 # most interactive uses.
142 142 try:
143 143 from repr import aRepr
144 144 aRepr.maxstring = 80
145 145 except:
146 146 # This is only a user-facing convenience, so any error we encounter
147 147 # here can be warned about but can be otherwise ignored. These
148 148 # printouts will tell us about problems if this API changes
149 149 import traceback
150 150 traceback.print_exc()
151 151
152 152 self.debugger = Pdb(colors)
153 153
154 154 def __call__(self):
155 155 """Starts an interactive debugger at the point where called.
156 156
157 157 This is similar to the pdb.set_trace() function from the std lib, but
158 158 using IPython's enhanced debugger."""
159 159
160 160 self.debugger.set_trace(sys._getframe().f_back)
161 161
162 162
163 163 def decorate_fn_with_doc(new_fn, old_fn, additional_text=""):
164 164 """Make new_fn have old_fn's doc string. This is particularly useful
165 165 for the ``do_...`` commands that hook into the help system.
166 166 Adapted from from a comp.lang.python posting
167 167 by Duncan Booth."""
168 168 def wrapper(*args, **kw):
169 169 return new_fn(*args, **kw)
170 170 if old_fn.__doc__:
171 171 wrapper.__doc__ = old_fn.__doc__ + additional_text
172 172 return wrapper
173 173
174 174
175 175 def _file_lines(fname):
176 176 """Return the contents of a named file as a list of lines.
177 177
178 178 This function never raises an IOError exception: if the file can't be
179 179 read, it simply returns an empty list."""
180 180
181 181 try:
182 182 outfile = open(fname)
183 183 except IOError:
184 184 return []
185 185 else:
186 186 out = outfile.readlines()
187 187 outfile.close()
188 188 return out
189 189
190 190
191 191 class Pdb(OldPdb):
192 192 """Modified Pdb class, does not load readline."""
193 193
194 194 def __init__(self,color_scheme='NoColor',completekey=None,
195 195 stdin=None, stdout=None):
196 196
197 197 # Parent constructor:
198 198 if has_pydb and completekey is None:
199 199 OldPdb.__init__(self,stdin=stdin,stdout=io.stdout)
200 200 else:
201 201 OldPdb.__init__(self,completekey,stdin,stdout)
202 202
203 203 self.prompt = prompt # The default prompt is '(Pdb)'
204 204
205 205 # IPython changes...
206 206 self.is_pydb = has_pydb
207 207
208 208 self.shell = ipapi.get()
209 209
210 210 if self.is_pydb:
211 211
212 212 # interactiveshell.py's ipalias seems to want pdb's checkline
213 213 # which located in pydb.fn
214 214 import pydb.fns
215 215 self.checkline = lambda filename, lineno: \
216 216 pydb.fns.checkline(self, filename, lineno)
217 217
218 218 self.curframe = None
219 219 self.do_restart = self.new_do_restart
220 220
221 221 self.old_all_completions = self.shell.Completer.all_completions
222 222 self.shell.Completer.all_completions=self.all_completions
223 223
224 224 self.do_list = decorate_fn_with_doc(self.list_command_pydb,
225 225 OldPdb.do_list)
226 226 self.do_l = self.do_list
227 227 self.do_frame = decorate_fn_with_doc(self.new_do_frame,
228 228 OldPdb.do_frame)
229 229
230 230 self.aliases = {}
231 231
232 232 # Create color table: we copy the default one from the traceback
233 233 # module and add a few attributes needed for debugging
234 234 self.color_scheme_table = exception_colors()
235 235
236 236 # shorthands
237 237 C = coloransi.TermColors
238 238 cst = self.color_scheme_table
239 239
240 240 cst['NoColor'].colors.breakpoint_enabled = C.NoColor
241 241 cst['NoColor'].colors.breakpoint_disabled = C.NoColor
242 242
243 243 cst['Linux'].colors.breakpoint_enabled = C.LightRed
244 244 cst['Linux'].colors.breakpoint_disabled = C.Red
245 245
246 246 cst['LightBG'].colors.breakpoint_enabled = C.LightRed
247 247 cst['LightBG'].colors.breakpoint_disabled = C.Red
248 248
249 249 self.set_colors(color_scheme)
250 250
251 251 # Add a python parser so we can syntax highlight source while
252 252 # debugging.
253 253 self.parser = PyColorize.Parser()
254 254
255 255 def set_colors(self, scheme):
256 256 """Shorthand access to the color table scheme selector method."""
257 257 self.color_scheme_table.set_active_scheme(scheme)
258 258
259 259 def interaction(self, frame, traceback):
260 260 self.shell.set_completer_frame(frame)
261 261 OldPdb.interaction(self, frame, traceback)
262 262
263 263 def new_do_up(self, arg):
264 264 OldPdb.do_up(self, arg)
265 265 self.shell.set_completer_frame(self.curframe)
266 266 do_u = do_up = decorate_fn_with_doc(new_do_up, OldPdb.do_up)
267 267
268 268 def new_do_down(self, arg):
269 269 OldPdb.do_down(self, arg)
270 270 self.shell.set_completer_frame(self.curframe)
271 271
272 272 do_d = do_down = decorate_fn_with_doc(new_do_down, OldPdb.do_down)
273 273
274 274 def new_do_frame(self, arg):
275 275 OldPdb.do_frame(self, arg)
276 276 self.shell.set_completer_frame(self.curframe)
277 277
278 278 def new_do_quit(self, arg):
279 279
280 280 if hasattr(self, 'old_all_completions'):
281 281 self.shell.Completer.all_completions=self.old_all_completions
282 282
283 283
284 284 return OldPdb.do_quit(self, arg)
285 285
286 286 do_q = do_quit = decorate_fn_with_doc(new_do_quit, OldPdb.do_quit)
287 287
288 288 def new_do_restart(self, arg):
289 289 """Restart command. In the context of ipython this is exactly the same
290 290 thing as 'quit'."""
291 291 self.msg("Restart doesn't make sense here. Using 'quit' instead.")
292 292 return self.do_quit(arg)
293 293
294 294 def postloop(self):
295 295 self.shell.set_completer_frame(None)
296 296
297 297 def print_stack_trace(self):
298 298 try:
299 299 for frame_lineno in self.stack:
300 300 self.print_stack_entry(frame_lineno, context = 5)
301 301 except KeyboardInterrupt:
302 302 pass
303 303
304 304 def print_stack_entry(self,frame_lineno,prompt_prefix='\n-> ',
305 305 context = 3):
306 306 #frame, lineno = frame_lineno
307 307 print(self.format_stack_entry(frame_lineno, '', context), file=io.stdout)
308 308
309 309 # vds: >>
310 310 frame, lineno = frame_lineno
311 311 filename = frame.f_code.co_filename
312 312 self.shell.hooks.synchronize_with_editor(filename, lineno, 0)
313 313 # vds: <<
314 314
315 315 def format_stack_entry(self, frame_lineno, lprefix=': ', context = 3):
316 316 import repr
317 317
318 318 ret = []
319 319
320 320 Colors = self.color_scheme_table.active_colors
321 321 ColorsNormal = Colors.Normal
322 322 tpl_link = u'%s%%s%s' % (Colors.filenameEm, ColorsNormal)
323 323 tpl_call = u'%s%%s%s%%s%s' % (Colors.vName, Colors.valEm, ColorsNormal)
324 324 tpl_line = u'%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal)
325 325 tpl_line_em = u'%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line,
326 326 ColorsNormal)
327 327
328 328 frame, lineno = frame_lineno
329 329
330 330 return_value = ''
331 331 if '__return__' in frame.f_locals:
332 332 rv = frame.f_locals['__return__']
333 333 #return_value += '->'
334 334 return_value += repr.repr(rv) + '\n'
335 335 ret.append(return_value)
336 336
337 337 #s = filename + '(' + `lineno` + ')'
338 338 filename = self.canonic(frame.f_code.co_filename)
339 339 link = tpl_link % py3compat.cast_unicode(filename)
340 340
341 341 if frame.f_code.co_name:
342 342 func = frame.f_code.co_name
343 343 else:
344 344 func = "<lambda>"
345 345
346 346 call = ''
347 347 if func != '?':
348 348 if '__args__' in frame.f_locals:
349 349 args = repr.repr(frame.f_locals['__args__'])
350 350 else:
351 351 args = '()'
352 352 call = tpl_call % (func, args)
353 353
354 354 # The level info should be generated in the same format pdb uses, to
355 355 # avoid breaking the pdbtrack functionality of python-mode in *emacs.
356 356 if frame is self.curframe:
357 357 ret.append('> ')
358 358 else:
359 359 ret.append(' ')
360 360 ret.append(u'%s(%s)%s\n' % (link,lineno,call))
361 361
362 362 start = lineno - 1 - context//2
363 363 lines = ulinecache.getlines(filename)
364 364 start = min(start, len(lines) - context)
365 365 start = max(start, 0)
366 366 lines = lines[start : start + context]
367 367
368 368 for i,line in enumerate(lines):
369 369 show_arrow = (start + 1 + i == lineno)
370 370 linetpl = (frame is self.curframe or show_arrow) \
371 371 and tpl_line_em \
372 372 or tpl_line
373 373 ret.append(self.__format_line(linetpl, filename,
374 374 start + 1 + i, line,
375 375 arrow = show_arrow) )
376 376 return ''.join(ret)
377 377
378 378 def __format_line(self, tpl_line, filename, lineno, line, arrow = False):
379 379 bp_mark = ""
380 380 bp_mark_color = ""
381 381
382 382 scheme = self.color_scheme_table.active_scheme_name
383 383 new_line, err = self.parser.format2(line, 'str', scheme)
384 384 if not err: line = new_line
385 385
386 386 bp = None
387 387 if lineno in self.get_file_breaks(filename):
388 388 bps = self.get_breaks(filename, lineno)
389 389 bp = bps[-1]
390 390
391 391 if bp:
392 392 Colors = self.color_scheme_table.active_colors
393 393 bp_mark = str(bp.number)
394 394 bp_mark_color = Colors.breakpoint_enabled
395 395 if not bp.enabled:
396 396 bp_mark_color = Colors.breakpoint_disabled
397 397
398 398 numbers_width = 7
399 399 if arrow:
400 400 # This is the line with the error
401 401 pad = numbers_width - len(str(lineno)) - len(bp_mark)
402 402 if pad >= 3:
403 403 marker = '-'*(pad-3) + '-> '
404 404 elif pad == 2:
405 405 marker = '> '
406 406 elif pad == 1:
407 407 marker = '>'
408 408 else:
409 409 marker = ''
410 410 num = '%s%s' % (marker, str(lineno))
411 411 line = tpl_line % (bp_mark_color + bp_mark, num, line)
412 412 else:
413 413 num = '%*s' % (numbers_width - len(bp_mark), str(lineno))
414 414 line = tpl_line % (bp_mark_color + bp_mark, num, line)
415 415
416 416 return line
417 417
418 418 def list_command_pydb(self, arg):
419 419 """List command to use if we have a newer pydb installed"""
420 420 filename, first, last = OldPdb.parse_list_cmd(self, arg)
421 421 if filename is not None:
422 422 self.print_list_lines(filename, first, last)
423 423
424 424 def print_list_lines(self, filename, first, last):
425 425 """The printing (as opposed to the parsing part of a 'list'
426 426 command."""
427 427 try:
428 428 Colors = self.color_scheme_table.active_colors
429 429 ColorsNormal = Colors.Normal
430 430 tpl_line = '%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal)
431 431 tpl_line_em = '%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, ColorsNormal)
432 432 src = []
433 433 if filename == "<string>" and hasattr(self, "_exec_filename"):
434 434 filename = self._exec_filename
435 435
436 436 for lineno in range(first, last+1):
437 437 line = ulinecache.getline(filename, lineno)
438 438 if not line:
439 439 break
440 440
441 441 if lineno == self.curframe.f_lineno:
442 442 line = self.__format_line(tpl_line_em, filename, lineno, line, arrow = True)
443 443 else:
444 444 line = self.__format_line(tpl_line, filename, lineno, line, arrow = False)
445 445
446 446 src.append(line)
447 447 self.lineno = lineno
448 448
449 449 print(''.join(src), file=io.stdout)
450 450
451 451 except KeyboardInterrupt:
452 452 pass
453 453
454 454 def do_list(self, arg):
455 455 self.lastcmd = 'list'
456 456 last = None
457 457 if arg:
458 458 try:
459 459 x = eval(arg, {}, {})
460 460 if type(x) == type(()):
461 461 first, last = x
462 462 first = int(first)
463 463 last = int(last)
464 464 if last < first:
465 465 # Assume it's a count
466 466 last = first + last
467 467 else:
468 468 first = max(1, int(x) - 5)
469 469 except:
470 470 print('*** Error in argument:', repr(arg))
471 471 return
472 472 elif self.lineno is None:
473 473 first = max(1, self.curframe.f_lineno - 5)
474 474 else:
475 475 first = self.lineno + 1
476 476 if last is None:
477 477 last = first + 10
478 478 self.print_list_lines(self.curframe.f_code.co_filename, first, last)
479 479
480 480 # vds: >>
481 481 lineno = first
482 482 filename = self.curframe.f_code.co_filename
483 483 self.shell.hooks.synchronize_with_editor(filename, lineno, 0)
484 484 # vds: <<
485 485
486 486 do_l = do_list
487 487
488 488 def do_pdef(self, arg):
489 489 """Print the call signature for any callable object.
490 490
491 491 The debugger interface to %pdef"""
492 492 namespaces = [('Locals', self.curframe.f_locals),
493 493 ('Globals', self.curframe.f_globals)]
494 494 self.shell.find_line_magic('pdef')(arg, namespaces=namespaces)
495 495
496 496 def do_pdoc(self, arg):
497 497 """Print the docstring for an object.
498 498
499 499 The debugger interface to %pdoc."""
500 500 namespaces = [('Locals', self.curframe.f_locals),
501 501 ('Globals', self.curframe.f_globals)]
502 502 self.shell.find_line_magic('pdoc')(arg, namespaces=namespaces)
503 503
504 504 def do_pfile(self, arg):
505 505 """Print (or run through pager) the file where an object is defined.
506 506
507 507 The debugger interface to %pfile.
508 508 """
509 509 namespaces = [('Locals', self.curframe.f_locals),
510 510 ('Globals', self.curframe.f_globals)]
511 511 self.shell.find_line_magic('pfile')(arg, namespaces=namespaces)
512 512
513 513 def do_pinfo(self, arg):
514 514 """Provide detailed information about an object.
515 515
516 516 The debugger interface to %pinfo, i.e., obj?."""
517 517 namespaces = [('Locals', self.curframe.f_locals),
518 518 ('Globals', self.curframe.f_globals)]
519 519 self.shell.find_line_magic('pinfo')(arg, namespaces=namespaces)
520 520
521 521 def do_pinfo2(self, arg):
522 522 """Provide extra detailed information about an object.
523 523
524 524 The debugger interface to %pinfo2, i.e., obj??."""
525 525 namespaces = [('Locals', self.curframe.f_locals),
526 526 ('Globals', self.curframe.f_globals)]
527 527 self.shell.find_line_magic('pinfo2')(arg, namespaces=namespaces)
528 528
529 529 def do_psource(self, arg):
530 530 """Print (or run through pager) the source code for an object."""
531 531 namespaces = [('Locals', self.curframe.f_locals),
532 532 ('Globals', self.curframe.f_globals)]
533 533 self.shell.find_line_magic('psource')(arg, namespaces=namespaces)
534 534
535 535 def checkline(self, filename, lineno):
536 536 """Check whether specified line seems to be executable.
537 537
538 538 Return `lineno` if it is, 0 if not (e.g. a docstring, comment, blank
539 539 line or EOF). Warning: testing is not comprehensive.
540 540 """
541 541 #######################################################################
542 542 # XXX Hack! Use python-2.5 compatible code for this call, because with
543 543 # all of our changes, we've drifted from the pdb api in 2.6. For now,
544 544 # changing:
545 545 #
546 546 #line = linecache.getline(filename, lineno, self.curframe.f_globals)
547 547 # to:
548 548 #
549 549 line = linecache.getline(filename, lineno)
550 550 #
551 551 # does the trick. But in reality, we need to fix this by reconciling
552 552 # our updates with the new Pdb APIs in Python 2.6.
553 553 #
554 554 # End hack. The rest of this method is copied verbatim from 2.6 pdb.py
555 555 #######################################################################
556 556
557 557 if not line:
558 558 print('End of file', file=self.stdout)
559 559 return 0
560 560 line = line.strip()
561 561 # Don't allow setting breakpoint at a blank line
562 562 if (not line or (line[0] == '#') or
563 563 (line[:3] == '"""') or line[:3] == "'''"):
564 564 print('*** Blank or comment', file=self.stdout)
565 565 return 0
566 566 return lineno
@@ -1,483 +1,484 b''
1 1 # -*- coding: utf-8 -*-
2 2 """Manage background (threaded) jobs conveniently from an interactive shell.
3 3
4 4 This module provides a BackgroundJobManager class. This is the main class
5 5 meant for public usage, it implements an object which can create and manage
6 6 new background jobs.
7 7
8 8 It also provides the actual job classes managed by these BackgroundJobManager
9 9 objects, see their docstrings below.
10 10
11 11
12 12 This system was inspired by discussions with B. Granger and the
13 13 BackgroundCommand class described in the book Python Scripting for
14 14 Computational Science, by H. P. Langtangen:
15 15
16 16 http://folk.uio.no/hpl/scripting
17 17
18 18 (although ultimately no code from this text was used, as IPython's system is a
19 19 separate implementation).
20 20
21 21 An example notebook is provided in our documentation illustrating interactive
22 22 use of the system.
23 23 """
24 24
25 25 #*****************************************************************************
26 26 # Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu>
27 27 #
28 28 # Distributed under the terms of the BSD License. The full license is in
29 29 # the file COPYING, distributed as part of this software.
30 30 #*****************************************************************************
31 31
32 32 # Code begins
33 33 import sys
34 34 import threading
35 35
36 from IPython import get_ipython
36 37 from IPython.core.ultratb import AutoFormattedTB
37 38 from IPython.utils.warn import error
38 39
39 40
40 41 class BackgroundJobManager(object):
41 42 """Class to manage a pool of backgrounded threaded jobs.
42 43
43 44 Below, we assume that 'jobs' is a BackgroundJobManager instance.
44 45
45 46 Usage summary (see the method docstrings for details):
46 47
47 48 jobs.new(...) -> start a new job
48 49
49 50 jobs() or jobs.status() -> print status summary of all jobs
50 51
51 52 jobs[N] -> returns job number N.
52 53
53 54 foo = jobs[N].result -> assign to variable foo the result of job N
54 55
55 56 jobs[N].traceback() -> print the traceback of dead job N
56 57
57 58 jobs.remove(N) -> remove (finished) job N
58 59
59 60 jobs.flush() -> remove all finished jobs
60 61
61 62 As a convenience feature, BackgroundJobManager instances provide the
62 63 utility result and traceback methods which retrieve the corresponding
63 64 information from the jobs list:
64 65
65 66 jobs.result(N) <--> jobs[N].result
66 67 jobs.traceback(N) <--> jobs[N].traceback()
67 68
68 69 While this appears minor, it allows you to use tab completion
69 70 interactively on the job manager instance.
70 71 """
71 72
72 73 def __init__(self):
73 74 # Lists for job management, accessed via a property to ensure they're
74 75 # up to date.x
75 76 self._running = []
76 77 self._completed = []
77 78 self._dead = []
78 79 # A dict of all jobs, so users can easily access any of them
79 80 self.all = {}
80 81 # For reporting
81 82 self._comp_report = []
82 83 self._dead_report = []
83 84 # Store status codes locally for fast lookups
84 85 self._s_created = BackgroundJobBase.stat_created_c
85 86 self._s_running = BackgroundJobBase.stat_running_c
86 87 self._s_completed = BackgroundJobBase.stat_completed_c
87 88 self._s_dead = BackgroundJobBase.stat_dead_c
88 89
89 90 @property
90 91 def running(self):
91 92 self._update_status()
92 93 return self._running
93 94
94 95 @property
95 96 def dead(self):
96 97 self._update_status()
97 98 return self._dead
98 99
99 100 @property
100 101 def completed(self):
101 102 self._update_status()
102 103 return self._completed
103 104
104 105 def new(self, func_or_exp, *args, **kwargs):
105 106 """Add a new background job and start it in a separate thread.
106 107
107 108 There are two types of jobs which can be created:
108 109
109 110 1. Jobs based on expressions which can be passed to an eval() call.
110 111 The expression must be given as a string. For example:
111 112
112 113 job_manager.new('myfunc(x,y,z=1)'[,glob[,loc]])
113 114
114 115 The given expression is passed to eval(), along with the optional
115 116 global/local dicts provided. If no dicts are given, they are
116 117 extracted automatically from the caller's frame.
117 118
118 119 A Python statement is NOT a valid eval() expression. Basically, you
119 120 can only use as an eval() argument something which can go on the right
120 121 of an '=' sign and be assigned to a variable.
121 122
122 123 For example,"print 'hello'" is not valid, but '2+3' is.
123 124
124 125 2. Jobs given a function object, optionally passing additional
125 126 positional arguments:
126 127
127 128 job_manager.new(myfunc, x, y)
128 129
129 130 The function is called with the given arguments.
130 131
131 132 If you need to pass keyword arguments to your function, you must
132 133 supply them as a dict named kw:
133 134
134 135 job_manager.new(myfunc, x, y, kw=dict(z=1))
135 136
136 137 The reason for this assymmetry is that the new() method needs to
137 138 maintain access to its own keywords, and this prevents name collisions
138 139 between arguments to new() and arguments to your own functions.
139 140
140 141 In both cases, the result is stored in the job.result field of the
141 142 background job object.
142 143
143 144 You can set `daemon` attribute of the thread by giving the keyword
144 145 argument `daemon`.
145 146
146 147 Notes and caveats:
147 148
148 149 1. All threads running share the same standard output. Thus, if your
149 150 background jobs generate output, it will come out on top of whatever
150 151 you are currently writing. For this reason, background jobs are best
151 152 used with silent functions which simply return their output.
152 153
153 154 2. Threads also all work within the same global namespace, and this
154 155 system does not lock interactive variables. So if you send job to the
155 156 background which operates on a mutable object for a long time, and
156 157 start modifying that same mutable object interactively (or in another
157 158 backgrounded job), all sorts of bizarre behaviour will occur.
158 159
159 160 3. If a background job is spending a lot of time inside a C extension
160 161 module which does not release the Python Global Interpreter Lock
161 162 (GIL), this will block the IPython prompt. This is simply because the
162 163 Python interpreter can only switch between threads at Python
163 164 bytecodes. While the execution is inside C code, the interpreter must
164 165 simply wait unless the extension module releases the GIL.
165 166
166 167 4. There is no way, due to limitations in the Python threads library,
167 168 to kill a thread once it has started."""
168 169
169 170 if callable(func_or_exp):
170 171 kw = kwargs.get('kw',{})
171 172 job = BackgroundJobFunc(func_or_exp,*args,**kw)
172 173 elif isinstance(func_or_exp, basestring):
173 174 if not args:
174 175 frame = sys._getframe(1)
175 176 glob, loc = frame.f_globals, frame.f_locals
176 177 elif len(args)==1:
177 178 glob = loc = args[0]
178 179 elif len(args)==2:
179 180 glob,loc = args
180 181 else:
181 182 raise ValueError(
182 183 'Expression jobs take at most 2 args (globals,locals)')
183 184 job = BackgroundJobExpr(func_or_exp, glob, loc)
184 185 else:
185 186 raise TypeError('invalid args for new job')
186 187
187 188 if kwargs.get('daemon', False):
188 189 job.daemon = True
189 190 job.num = len(self.all)+1 if self.all else 0
190 191 self.running.append(job)
191 192 self.all[job.num] = job
192 193 print 'Starting job # %s in a separate thread.' % job.num
193 194 job.start()
194 195 return job
195 196
196 197 def __getitem__(self, job_key):
197 198 num = job_key if isinstance(job_key, int) else job_key.num
198 199 return self.all[num]
199 200
200 201 def __call__(self):
201 202 """An alias to self.status(),
202 203
203 204 This allows you to simply call a job manager instance much like the
204 205 Unix `jobs` shell command."""
205 206
206 207 return self.status()
207 208
208 209 def _update_status(self):
209 210 """Update the status of the job lists.
210 211
211 212 This method moves finished jobs to one of two lists:
212 213 - self.completed: jobs which completed successfully
213 214 - self.dead: jobs which finished but died.
214 215
215 216 It also copies those jobs to corresponding _report lists. These lists
216 217 are used to report jobs completed/dead since the last update, and are
217 218 then cleared by the reporting function after each call."""
218 219
219 220 # Status codes
220 221 srun, scomp, sdead = self._s_running, self._s_completed, self._s_dead
221 222 # State lists, use the actual lists b/c the public names are properties
222 223 # that call this very function on access
223 224 running, completed, dead = self._running, self._completed, self._dead
224 225
225 226 # Now, update all state lists
226 227 for num, job in enumerate(running):
227 228 stat = job.stat_code
228 229 if stat == srun:
229 230 continue
230 231 elif stat == scomp:
231 232 completed.append(job)
232 233 self._comp_report.append(job)
233 234 running[num] = False
234 235 elif stat == sdead:
235 236 dead.append(job)
236 237 self._dead_report.append(job)
237 238 running[num] = False
238 239 # Remove dead/completed jobs from running list
239 240 running[:] = filter(None, running)
240 241
241 242 def _group_report(self,group,name):
242 243 """Report summary for a given job group.
243 244
244 245 Return True if the group had any elements."""
245 246
246 247 if group:
247 248 print '%s jobs:' % name
248 249 for job in group:
249 250 print '%s : %s' % (job.num,job)
250 251 print
251 252 return True
252 253
253 254 def _group_flush(self,group,name):
254 255 """Flush a given job group
255 256
256 257 Return True if the group had any elements."""
257 258
258 259 njobs = len(group)
259 260 if njobs:
260 261 plural = {1:''}.setdefault(njobs,'s')
261 262 print 'Flushing %s %s job%s.' % (njobs,name,plural)
262 263 group[:] = []
263 264 return True
264 265
265 266 def _status_new(self):
266 267 """Print the status of newly finished jobs.
267 268
268 269 Return True if any new jobs are reported.
269 270
270 271 This call resets its own state every time, so it only reports jobs
271 272 which have finished since the last time it was called."""
272 273
273 274 self._update_status()
274 275 new_comp = self._group_report(self._comp_report, 'Completed')
275 276 new_dead = self._group_report(self._dead_report,
276 277 'Dead, call jobs.traceback() for details')
277 278 self._comp_report[:] = []
278 279 self._dead_report[:] = []
279 280 return new_comp or new_dead
280 281
281 282 def status(self,verbose=0):
282 283 """Print a status of all jobs currently being managed."""
283 284
284 285 self._update_status()
285 286 self._group_report(self.running,'Running')
286 287 self._group_report(self.completed,'Completed')
287 288 self._group_report(self.dead,'Dead')
288 289 # Also flush the report queues
289 290 self._comp_report[:] = []
290 291 self._dead_report[:] = []
291 292
292 293 def remove(self,num):
293 294 """Remove a finished (completed or dead) job."""
294 295
295 296 try:
296 297 job = self.all[num]
297 298 except KeyError:
298 299 error('Job #%s not found' % num)
299 300 else:
300 301 stat_code = job.stat_code
301 302 if stat_code == self._s_running:
302 303 error('Job #%s is still running, it can not be removed.' % num)
303 304 return
304 305 elif stat_code == self._s_completed:
305 306 self.completed.remove(job)
306 307 elif stat_code == self._s_dead:
307 308 self.dead.remove(job)
308 309
309 310 def flush(self):
310 311 """Flush all finished jobs (completed and dead) from lists.
311 312
312 313 Running jobs are never flushed.
313 314
314 315 It first calls _status_new(), to update info. If any jobs have
315 316 completed since the last _status_new() call, the flush operation
316 317 aborts."""
317 318
318 319 # Remove the finished jobs from the master dict
319 320 alljobs = self.all
320 321 for job in self.completed+self.dead:
321 322 del(alljobs[job.num])
322 323
323 324 # Now flush these lists completely
324 325 fl_comp = self._group_flush(self.completed, 'Completed')
325 326 fl_dead = self._group_flush(self.dead, 'Dead')
326 327 if not (fl_comp or fl_dead):
327 328 print 'No jobs to flush.'
328 329
329 330 def result(self,num):
330 331 """result(N) -> return the result of job N."""
331 332 try:
332 333 return self.all[num].result
333 334 except KeyError:
334 335 error('Job #%s not found' % num)
335 336
336 337 def _traceback(self, job):
337 338 num = job if isinstance(job, int) else job.num
338 339 try:
339 340 self.all[num].traceback()
340 341 except KeyError:
341 342 error('Job #%s not found' % num)
342 343
343 344 def traceback(self, job=None):
344 345 if job is None:
345 346 self._update_status()
346 347 for deadjob in self.dead:
347 348 print "Traceback for: %r" % deadjob
348 349 self._traceback(deadjob)
349 350 print
350 351 else:
351 352 self._traceback(job)
352 353
353 354
354 355 class BackgroundJobBase(threading.Thread):
355 356 """Base class to build BackgroundJob classes.
356 357
357 358 The derived classes must implement:
358 359
359 360 - Their own __init__, since the one here raises NotImplementedError. The
360 361 derived constructor must call self._init() at the end, to provide common
361 362 initialization.
362 363
363 364 - A strform attribute used in calls to __str__.
364 365
365 366 - A call() method, which will make the actual execution call and must
366 367 return a value to be held in the 'result' field of the job object."""
367 368
368 369 # Class constants for status, in string and as numerical codes (when
369 370 # updating jobs lists, we don't want to do string comparisons). This will
370 371 # be done at every user prompt, so it has to be as fast as possible
371 372 stat_created = 'Created'; stat_created_c = 0
372 373 stat_running = 'Running'; stat_running_c = 1
373 374 stat_completed = 'Completed'; stat_completed_c = 2
374 375 stat_dead = 'Dead (Exception), call jobs.traceback() for details'
375 376 stat_dead_c = -1
376 377
377 378 def __init__(self):
378 379 raise NotImplementedError("This class can not be instantiated directly.")
379 380
380 381 def _init(self):
381 382 """Common initialization for all BackgroundJob objects"""
382 383
383 384 for attr in ['call','strform']:
384 385 assert hasattr(self,attr), "Missing attribute <%s>" % attr
385 386
386 387 # The num tag can be set by an external job manager
387 388 self.num = None
388 389
389 390 self.status = BackgroundJobBase.stat_created
390 391 self.stat_code = BackgroundJobBase.stat_created_c
391 392 self.finished = False
392 393 self.result = '<BackgroundJob has not completed>'
393 394
394 395 # reuse the ipython traceback handler if we can get to it, otherwise
395 396 # make a new one
396 397 try:
397 398 make_tb = get_ipython().InteractiveTB.text
398 399 except:
399 400 make_tb = AutoFormattedTB(mode = 'Context',
400 401 color_scheme='NoColor',
401 402 tb_offset = 1).text
402 403 # Note that the actual API for text() requires the three args to be
403 404 # passed in, so we wrap it in a simple lambda.
404 405 self._make_tb = lambda : make_tb(None, None, None)
405 406
406 407 # Hold a formatted traceback if one is generated.
407 408 self._tb = None
408 409
409 410 threading.Thread.__init__(self)
410 411
411 412 def __str__(self):
412 413 return self.strform
413 414
414 415 def __repr__(self):
415 416 return '<BackgroundJob #%d: %s>' % (self.num, self.strform)
416 417
417 418 def traceback(self):
418 419 print self._tb
419 420
420 421 def run(self):
421 422 try:
422 423 self.status = BackgroundJobBase.stat_running
423 424 self.stat_code = BackgroundJobBase.stat_running_c
424 425 self.result = self.call()
425 426 except:
426 427 self.status = BackgroundJobBase.stat_dead
427 428 self.stat_code = BackgroundJobBase.stat_dead_c
428 429 self.finished = None
429 430 self.result = ('<BackgroundJob died, call jobs.traceback() for details>')
430 431 self._tb = self._make_tb()
431 432 else:
432 433 self.status = BackgroundJobBase.stat_completed
433 434 self.stat_code = BackgroundJobBase.stat_completed_c
434 435 self.finished = True
435 436
436 437
437 438 class BackgroundJobExpr(BackgroundJobBase):
438 439 """Evaluate an expression as a background job (uses a separate thread)."""
439 440
440 441 def __init__(self, expression, glob=None, loc=None):
441 442 """Create a new job from a string which can be fed to eval().
442 443
443 444 global/locals dicts can be provided, which will be passed to the eval
444 445 call."""
445 446
446 447 # fail immediately if the given expression can't be compiled
447 448 self.code = compile(expression,'<BackgroundJob compilation>','eval')
448 449
449 450 glob = {} if glob is None else glob
450 451 loc = {} if loc is None else loc
451 452 self.expression = self.strform = expression
452 453 self.glob = glob
453 454 self.loc = loc
454 455 self._init()
455 456
456 457 def call(self):
457 458 return eval(self.code,self.glob,self.loc)
458 459
459 460
460 461 class BackgroundJobFunc(BackgroundJobBase):
461 462 """Run a function call as a background job (uses a separate thread)."""
462 463
463 464 def __init__(self, func, *args, **kwargs):
464 465 """Create a new job from a callable object.
465 466
466 467 Any positional arguments and keyword args given to this constructor
467 468 after the initial callable are passed directly to it."""
468 469
469 470 if not callable(func):
470 471 raise TypeError(
471 472 'first argument to BackgroundJobFunc must be callable')
472 473
473 474 self.func = func
474 475 self.args = args
475 476 self.kwargs = kwargs
476 477 # The string form will only include the function passed, because
477 478 # generating string representations of the arguments is a potentially
478 479 # _very_ expensive operation (e.g. with large arrays).
479 480 self.strform = str(func)
480 481 self._init()
481 482
482 483 def call(self):
483 484 return self.func(*self.args, **self.kwargs)
@@ -1,120 +1,122 b''
1 1 """ 'editor' hooks for common editors that work well with ipython
2 2
3 3 They should honor the line number argument, at least.
4 4
5 5 Contributions are *very* welcome.
6 6 """
7 7
8 8 import os
9 9 import pipes
10 10 import subprocess
11
12 from IPython import get_ipython
11 13 from IPython.core.error import TryNext
12 14
13 15
14 16 def install_editor(template, wait=False):
15 17 """Installs the editor that is called by IPython for the %edit magic.
16 18
17 19 This overrides the default editor, which is generally set by your EDITOR
18 20 environment variable or is notepad (windows) or vi (linux). By supplying a
19 21 template string `run_template`, you can control how the editor is invoked
20 22 by IPython -- (e.g. the format in which it accepts command line options)
21 23
22 24 Parameters
23 25 ----------
24 26 template : basestring
25 27 run_template acts as a template for how your editor is invoked by
26 28 the shell. It should contain '{filename}', which will be replaced on
27 29 invokation with the file name, and '{line}', $line by line number
28 30 (or 0) to invoke the file with.
29 31 wait : bool
30 32 If `wait` is true, wait until the user presses enter before returning,
31 33 to facilitate non-blocking editors that exit immediately after
32 34 the call.
33 35 """
34 36
35 37 # not all editors support $line, so we'll leave out this check
36 38 # for substitution in ['$file', '$line']:
37 39 # if not substitution in run_template:
38 40 # raise ValueError(('run_template should contain %s'
39 41 # ' for string substitution. You supplied "%s"' % (substitution,
40 42 # run_template)))
41 43
42 44 def call_editor(self, filename, line=0):
43 45 if line is None:
44 46 line = 0
45 47 cmd = template.format(filename=pipes.quote(filename), line=line)
46 48 print ">", cmd
47 49 proc = subprocess.Popen(cmd, shell=True)
48 50 if wait and proc.wait() != 0:
49 51 raise TryNext()
50 52 if wait:
51 53 raw_input("Press Enter when done editing:")
52 54
53 55 get_ipython().set_hook('editor', call_editor)
54 56 get_ipython().editor = template
55 57
56 58
57 59 # in these, exe is always the path/name of the executable. Useful
58 60 # if you don't have the editor directory in your path
59 61 def komodo(exe=u'komodo'):
60 62 """ Activestate Komodo [Edit] """
61 63 install_editor(exe + u' -l {line} {filename}', wait=True)
62 64
63 65
64 66 def scite(exe=u"scite"):
65 67 """ SciTE or Sc1 """
66 68 install_editor(exe + u' {filename} -goto:{line}')
67 69
68 70
69 71 def notepadplusplus(exe=u'notepad++'):
70 72 """ Notepad++ http://notepad-plus.sourceforge.net """
71 73 install_editor(exe + u' -n{line} {filename}')
72 74
73 75
74 76 def jed(exe=u'jed'):
75 77 """ JED, the lightweight emacsish editor """
76 78 install_editor(exe + u' +{line} {filename}')
77 79
78 80
79 81 def idle(exe=u'idle'):
80 82 """ Idle, the editor bundled with python
81 83
82 84 Parameters
83 85 ----------
84 86 exe : str, None
85 87 If none, should be pretty smart about finding the executable.
86 88 """
87 89 if exe is None:
88 90 import idlelib
89 91 p = os.path.dirname(idlelib.__filename__)
90 92 # i'm not sure if this actually works. Is this idle.py script
91 93 # guarenteed to be executable?
92 94 exe = os.path.join(p, 'idle.py')
93 95 install_editor(exe + u' {filename}')
94 96
95 97
96 98 def mate(exe=u'mate'):
97 99 """ TextMate, the missing editor"""
98 100 # wait=True is not required since we're using the -w flag to mate
99 101 install_editor(exe + u' -w -l {line} {filename}')
100 102
101 103
102 104 # ##########################################
103 105 # these are untested, report any problems
104 106 # ##########################################
105 107
106 108
107 109 def emacs(exe=u'emacs'):
108 110 install_editor(exe + u' +{line} {filename}')
109 111
110 112
111 113 def gnuclient(exe=u'gnuclient'):
112 114 install_editor(exe + u' -nw +{line} {filename}')
113 115
114 116
115 117 def crimson_editor(exe=u'cedt.exe'):
116 118 install_editor(exe + u' /L:{line} {filename}')
117 119
118 120
119 121 def kate(exe=u'kate'):
120 122 install_editor(exe + u' -u -l {line} {filename}')
General Comments 0
You need to be logged in to leave comments. Login now