##// END OF EJS Templates
Clean up converted code....
Thomas Kluyver -
Show More
@@ -1,221 +1,221 b''
1 1 """Logger class for IPython's logging facilities.
2 2 """
3 3 from __future__ import print_function
4 4
5 5 #*****************************************************************************
6 6 # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
7 7 # Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
8 8 #
9 9 # Distributed under the terms of the BSD License. The full license is in
10 10 # the file COPYING, distributed as part of this software.
11 11 #*****************************************************************************
12 12
13 13 #****************************************************************************
14 14 # Modules and globals
15 15
16 16 # Python standard modules
17 17 import glob
18 18 import io
19 19 import os
20 20 import time
21 21
22 22 from IPython.utils.py3compat import str_to_unicode
23 23
24 24 #****************************************************************************
25 25 # FIXME: This class isn't a mixin anymore, but it still needs attributes from
26 26 # ipython and does input cache management. Finish cleanup later...
27 27
28 28 class Logger(object):
29 29 """A Logfile class with different policies for file creation"""
30 30
31 31 def __init__(self, home_dir, logfname='Logger.log', loghead=u'',
32 32 logmode='over'):
33 33
34 34 # this is the full ipython instance, we need some attributes from it
35 35 # which won't exist until later. What a mess, clean up later...
36 36 self.home_dir = home_dir
37 37
38 38 self.logfname = logfname
39 39 self.loghead = loghead
40 40 self.logmode = logmode
41 41 self.logfile = None
42 42
43 43 # Whether to log raw or processed input
44 44 self.log_raw_input = False
45 45
46 46 # whether to also log output
47 47 self.log_output = False
48 48
49 49 # whether to put timestamps before each log entry
50 50 self.timestamp = False
51 51
52 52 # activity control flags
53 53 self.log_active = False
54 54
55 55 # logmode is a validated property
56 56 def _set_mode(self,mode):
57 57 if mode not in ['append','backup','global','over','rotate']:
58 58 raise ValueError('invalid log mode %s given' % mode)
59 59 self._logmode = mode
60 60
61 61 def _get_mode(self):
62 62 return self._logmode
63 63
64 64 logmode = property(_get_mode,_set_mode)
65 65
66 66 def logstart(self, logfname=None, loghead=None, logmode=None,
67 67 log_output=False, timestamp=False, log_raw_input=False):
68 68 """Generate a new log-file with a default header.
69 69
70 70 Raises RuntimeError if the log has already been started"""
71 71
72 72 if self.logfile is not None:
73 73 raise RuntimeError('Log file is already active: %s' %
74 74 self.logfname)
75 75
76 76 # The parameters can override constructor defaults
77 77 if logfname is not None: self.logfname = logfname
78 78 if loghead is not None: self.loghead = loghead
79 79 if logmode is not None: self.logmode = logmode
80 80
81 81 # Parameters not part of the constructor
82 82 self.timestamp = timestamp
83 83 self.log_output = log_output
84 84 self.log_raw_input = log_raw_input
85 85
86 86 # init depending on the log mode requested
87 87 isfile = os.path.isfile
88 88 logmode = self.logmode
89 89
90 90 if logmode == 'append':
91 91 self.logfile = io.open(self.logfname, 'a', encoding='utf-8')
92 92
93 93 elif logmode == 'backup':
94 94 if isfile(self.logfname):
95 95 backup_logname = self.logfname+'~'
96 96 # Manually remove any old backup, since os.rename may fail
97 97 # under Windows.
98 98 if isfile(backup_logname):
99 99 os.remove(backup_logname)
100 100 os.rename(self.logfname,backup_logname)
101 101 self.logfile = io.open(self.logfname, 'w', encoding='utf-8')
102 102
103 103 elif logmode == 'global':
104 104 self.logfname = os.path.join(self.home_dir,self.logfname)
105 105 self.logfile = io.open(self.logfname, 'a', encoding='utf-8')
106 106
107 107 elif logmode == 'over':
108 108 if isfile(self.logfname):
109 109 os.remove(self.logfname)
110 110 self.logfile = io.open(self.logfname,'w', encoding='utf-8')
111 111
112 112 elif logmode == 'rotate':
113 113 if isfile(self.logfname):
114 114 if isfile(self.logfname+'.001~'):
115 115 old = glob.glob(self.logfname+'.*~')
116 116 old.sort()
117 117 old.reverse()
118 118 for f in old:
119 119 root, ext = os.path.splitext(f)
120 120 num = int(ext[1:-1])+1
121 121 os.rename(f, root+'.'+repr(num).zfill(3)+'~')
122 122 os.rename(self.logfname, self.logfname+'.001~')
123 123 self.logfile = io.open(self.logfname, 'w', encoding='utf-8')
124 124
125 125 if logmode != 'append':
126 126 self.logfile.write(self.loghead)
127 127
128 128 self.logfile.flush()
129 129 self.log_active = True
130 130
131 131 def switch_log(self,val):
132 132 """Switch logging on/off. val should be ONLY a boolean."""
133 133
134 134 if val not in [False,True,0,1]:
135 135 raise ValueError('Call switch_log ONLY with a boolean argument, '
136 136 'not with: %s' % val)
137 137
138 138 label = {0:'OFF',1:'ON',False:'OFF',True:'ON'}
139 139
140 140 if self.logfile is None:
141 141 print("""
142 142 Logging hasn't been started yet (use logstart for that).
143 143
144 144 %logon/%logoff are for temporarily starting and stopping logging for a logfile
145 145 which already exists. But you must first start the logging process with
146 146 %logstart (optionally giving a logfile name).""")
147 147
148 148 else:
149 149 if self.log_active == val:
150 150 print('Logging is already',label[val])
151 151 else:
152 152 print('Switching logging',label[val])
153 153 self.log_active = not self.log_active
154 154 self.log_active_out = self.log_active
155 155
156 156 def logstate(self):
157 157 """Print a status message about the logger."""
158 158 if self.logfile is None:
159 159 print('Logging has not been activated.')
160 160 else:
161 161 state = self.log_active and 'active' or 'temporarily suspended'
162 print('Filename :',self.logfname)
163 print('Mode :',self.logmode)
164 print('Output logging :',self.log_output)
165 print('Raw input log :',self.log_raw_input)
166 print('Timestamping :',self.timestamp)
167 print('State :',state)
162 print('Filename :', self.logfname)
163 print('Mode :', self.logmode)
164 print('Output logging :', self.log_output)
165 print('Raw input log :', self.log_raw_input)
166 print('Timestamping :', self.timestamp)
167 print('State :', state)
168 168
169 169 def log(self, line_mod, line_ori):
170 170 """Write the sources to a log.
171 171
172 172 Inputs:
173 173
174 174 - line_mod: possibly modified input, such as the transformations made
175 175 by input prefilters or input handlers of various kinds. This should
176 176 always be valid Python.
177 177
178 178 - line_ori: unmodified input line from the user. This is not
179 179 necessarily valid Python.
180 180 """
181 181
182 182 # Write the log line, but decide which one according to the
183 183 # log_raw_input flag, set when the log is started.
184 184 if self.log_raw_input:
185 185 self.log_write(line_ori)
186 186 else:
187 187 self.log_write(line_mod)
188 188
189 189 def log_write(self, data, kind='input'):
190 190 """Write data to the log file, if active"""
191 191
192 192 #print 'data: %r' % data # dbg
193 193 if self.log_active and data:
194 194 write = self.logfile.write
195 195 if kind=='input':
196 196 if self.timestamp:
197 197 write(str_to_unicode(time.strftime('# %a, %d %b %Y %H:%M:%S\n',
198 198 time.localtime())))
199 199 write(data)
200 200 elif kind=='output' and self.log_output:
201 201 odata = u'\n'.join([u'#[Out]# %s' % s
202 202 for s in data.splitlines()])
203 203 write(u'%s\n' % odata)
204 204 self.logfile.flush()
205 205
206 206 def logstop(self):
207 207 """Fully stop logging and close log file.
208 208
209 209 In order to start logging again, a new logstart() call needs to be
210 210 made, possibly (though not necessarily) with a new filename, mode and
211 211 other options."""
212 212
213 213 if self.logfile is not None:
214 214 self.logfile.close()
215 215 self.logfile = None
216 216 else:
217 217 print("Logging hadn't been started.")
218 218 self.log_active = False
219 219
220 220 # For backwards compatibility, in case anyone was using this.
221 221 close_log = logstop
@@ -1,740 +1,740 b''
1 1 """Implementation of magic functions for interaction with the OS.
2 2
3 3 Note: this module is named 'osm' instead of 'os' to avoid a collision with the
4 4 builtin.
5 5 """
6 6 from __future__ import print_function
7 7 #-----------------------------------------------------------------------------
8 8 # Copyright (c) 2012 The IPython Development Team.
9 9 #
10 10 # Distributed under the terms of the Modified BSD License.
11 11 #
12 12 # The full license is in the file COPYING.txt, distributed with this software.
13 13 #-----------------------------------------------------------------------------
14 14
15 15 #-----------------------------------------------------------------------------
16 16 # Imports
17 17 #-----------------------------------------------------------------------------
18 18
19 19 # Stdlib
20 20 import io
21 21 import os
22 22 import re
23 23 import sys
24 24 from pprint import pformat
25 25
26 26 # Our own packages
27 27 from IPython.core import magic_arguments
28 28 from IPython.core import oinspect
29 29 from IPython.core import page
30 30 from IPython.core.alias import AliasError, Alias
31 31 from IPython.core.error import UsageError
32 32 from IPython.core.magic import (
33 33 Magics, compress_dhist, magics_class, line_magic, cell_magic, line_cell_magic
34 34 )
35 35 from IPython.testing.skipdoctest import skip_doctest
36 36 from IPython.utils.openpy import source_to_unicode
37 37 from IPython.utils.path import unquote_filename
38 38 from IPython.utils.process import abbrev_cwd
39 39 from IPython.utils.py3compat import unicode_type
40 40 from IPython.utils.terminal import set_term_title
41 41
42 42 #-----------------------------------------------------------------------------
43 43 # Magic implementation classes
44 44 #-----------------------------------------------------------------------------
45 45 @magics_class
46 46 class OSMagics(Magics):
47 47 """Magics to interact with the underlying OS (shell-type functionality).
48 48 """
49 49
50 50 @skip_doctest
51 51 @line_magic
52 52 def alias(self, parameter_s=''):
53 53 """Define an alias for a system command.
54 54
55 55 '%alias alias_name cmd' defines 'alias_name' as an alias for 'cmd'
56 56
57 57 Then, typing 'alias_name params' will execute the system command 'cmd
58 58 params' (from your underlying operating system).
59 59
60 60 Aliases have lower precedence than magic functions and Python normal
61 61 variables, so if 'foo' is both a Python variable and an alias, the
62 62 alias can not be executed until 'del foo' removes the Python variable.
63 63
64 64 You can use the %l specifier in an alias definition to represent the
65 65 whole line when the alias is called. For example::
66 66
67 67 In [2]: alias bracket echo "Input in brackets: <%l>"
68 68 In [3]: bracket hello world
69 69 Input in brackets: <hello world>
70 70
71 71 You can also define aliases with parameters using %s specifiers (one
72 72 per parameter)::
73 73
74 74 In [1]: alias parts echo first %s second %s
75 75 In [2]: %parts A B
76 76 first A second B
77 77 In [3]: %parts A
78 78 Incorrect number of arguments: 2 expected.
79 79 parts is an alias to: 'echo first %s second %s'
80 80
81 81 Note that %l and %s are mutually exclusive. You can only use one or
82 82 the other in your aliases.
83 83
84 84 Aliases expand Python variables just like system calls using ! or !!
85 85 do: all expressions prefixed with '$' get expanded. For details of
86 86 the semantic rules, see PEP-215:
87 87 http://www.python.org/peps/pep-0215.html. This is the library used by
88 88 IPython for variable expansion. If you want to access a true shell
89 89 variable, an extra $ is necessary to prevent its expansion by
90 90 IPython::
91 91
92 92 In [6]: alias show echo
93 93 In [7]: PATH='A Python string'
94 94 In [8]: show $PATH
95 95 A Python string
96 96 In [9]: show $$PATH
97 97 /usr/local/lf9560/bin:/usr/local/intel/compiler70/ia32/bin:...
98 98
99 99 You can use the alias facility to acess all of $PATH. See the %rehash
100 100 and %rehashx functions, which automatically create aliases for the
101 101 contents of your $PATH.
102 102
103 103 If called with no parameters, %alias prints the current alias table."""
104 104
105 105 par = parameter_s.strip()
106 106 if not par:
107 107 aliases = sorted(self.shell.alias_manager.aliases)
108 108 # stored = self.shell.db.get('stored_aliases', {} )
109 109 # for k, v in stored:
110 110 # atab.append(k, v[0])
111 111
112 112 print("Total number of aliases:", len(aliases))
113 113 sys.stdout.flush()
114 114 return aliases
115 115
116 116 # Now try to define a new one
117 117 try:
118 118 alias,cmd = par.split(None, 1)
119 119 except TypeError:
120 print((oinspect.getdoc(self.alias)))
120 print(oinspect.getdoc(self.alias))
121 121 return
122 122
123 123 try:
124 124 self.shell.alias_manager.define_alias(alias, cmd)
125 125 except AliasError as e:
126 126 print(e)
127 127 # end magic_alias
128 128
129 129 @line_magic
130 130 def unalias(self, parameter_s=''):
131 131 """Remove an alias"""
132 132
133 133 aname = parameter_s.strip()
134 134 try:
135 135 self.shell.alias_manager.undefine_alias(aname)
136 136 except ValueError as e:
137 137 print(e)
138 138 return
139 139
140 140 stored = self.shell.db.get('stored_aliases', {} )
141 141 if aname in stored:
142 142 print("Removing %stored alias",aname)
143 143 del stored[aname]
144 144 self.shell.db['stored_aliases'] = stored
145 145
146 146 @line_magic
147 147 def rehashx(self, parameter_s=''):
148 148 """Update the alias table with all executable files in $PATH.
149 149
150 150 This version explicitly checks that every entry in $PATH is a file
151 151 with execute access (os.X_OK), so it is much slower than %rehash.
152 152
153 153 Under Windows, it checks executability as a match against a
154 154 '|'-separated string of extensions, stored in the IPython config
155 155 variable win_exec_ext. This defaults to 'exe|com|bat'.
156 156
157 157 This function also resets the root module cache of module completer,
158 158 used on slow filesystems.
159 159 """
160 160 from IPython.core.alias import InvalidAliasError
161 161
162 162 # for the benefit of module completer in ipy_completers.py
163 163 del self.shell.db['rootmodules_cache']
164 164
165 165 path = [os.path.abspath(os.path.expanduser(p)) for p in
166 166 os.environ.get('PATH','').split(os.pathsep)]
167 167 path = filter(os.path.isdir,path)
168 168
169 169 syscmdlist = []
170 170 # Now define isexec in a cross platform manner.
171 171 if os.name == 'posix':
172 172 isexec = lambda fname:os.path.isfile(fname) and \
173 173 os.access(fname,os.X_OK)
174 174 else:
175 175 try:
176 176 winext = os.environ['pathext'].replace(';','|').replace('.','')
177 177 except KeyError:
178 178 winext = 'exe|com|bat|py'
179 179 if 'py' not in winext:
180 180 winext += '|py'
181 181 execre = re.compile(r'(.*)\.(%s)$' % winext,re.IGNORECASE)
182 182 isexec = lambda fname:os.path.isfile(fname) and execre.match(fname)
183 183 savedir = os.getcwdu()
184 184
185 185 # Now walk the paths looking for executables to alias.
186 186 try:
187 187 # write the whole loop for posix/Windows so we don't have an if in
188 188 # the innermost part
189 189 if os.name == 'posix':
190 190 for pdir in path:
191 191 os.chdir(pdir)
192 192 for ff in os.listdir(pdir):
193 193 if isexec(ff):
194 194 try:
195 195 # Removes dots from the name since ipython
196 196 # will assume names with dots to be python.
197 197 if not self.shell.alias_manager.is_alias(ff):
198 198 self.shell.alias_manager.define_alias(
199 199 ff.replace('.',''), ff)
200 200 except InvalidAliasError:
201 201 pass
202 202 else:
203 203 syscmdlist.append(ff)
204 204 else:
205 205 no_alias = Alias.blacklist
206 206 for pdir in path:
207 207 os.chdir(pdir)
208 208 for ff in os.listdir(pdir):
209 209 base, ext = os.path.splitext(ff)
210 210 if isexec(ff) and base.lower() not in no_alias:
211 211 if ext.lower() == '.exe':
212 212 ff = base
213 213 try:
214 214 # Removes dots from the name since ipython
215 215 # will assume names with dots to be python.
216 216 self.shell.alias_manager.define_alias(
217 217 base.lower().replace('.',''), ff)
218 218 except InvalidAliasError:
219 219 pass
220 220 syscmdlist.append(ff)
221 221 self.shell.db['syscmdlist'] = syscmdlist
222 222 finally:
223 223 os.chdir(savedir)
224 224
225 225 @skip_doctest
226 226 @line_magic
227 227 def pwd(self, parameter_s=''):
228 228 """Return the current working directory path.
229 229
230 230 Examples
231 231 --------
232 232 ::
233 233
234 234 In [9]: pwd
235 235 Out[9]: '/home/tsuser/sprint/ipython'
236 236 """
237 237 return os.getcwdu()
238 238
239 239 @skip_doctest
240 240 @line_magic
241 241 def cd(self, parameter_s=''):
242 242 """Change the current working directory.
243 243
244 244 This command automatically maintains an internal list of directories
245 245 you visit during your IPython session, in the variable _dh. The
246 246 command %dhist shows this history nicely formatted. You can also
247 247 do 'cd -<tab>' to see directory history conveniently.
248 248
249 249 Usage:
250 250
251 251 cd 'dir': changes to directory 'dir'.
252 252
253 253 cd -: changes to the last visited directory.
254 254
255 255 cd -<n>: changes to the n-th directory in the directory history.
256 256
257 257 cd --foo: change to directory that matches 'foo' in history
258 258
259 259 cd -b <bookmark_name>: jump to a bookmark set by %bookmark
260 260 (note: cd <bookmark_name> is enough if there is no
261 261 directory <bookmark_name>, but a bookmark with the name exists.)
262 262 'cd -b <tab>' allows you to tab-complete bookmark names.
263 263
264 264 Options:
265 265
266 266 -q: quiet. Do not print the working directory after the cd command is
267 267 executed. By default IPython's cd command does print this directory,
268 268 since the default prompts do not display path information.
269 269
270 270 Note that !cd doesn't work for this purpose because the shell where
271 271 !command runs is immediately discarded after executing 'command'.
272 272
273 273 Examples
274 274 --------
275 275 ::
276 276
277 277 In [10]: cd parent/child
278 278 /home/tsuser/parent/child
279 279 """
280 280
281 281 oldcwd = os.getcwdu()
282 282 numcd = re.match(r'(-)(\d+)$',parameter_s)
283 283 # jump in directory history by number
284 284 if numcd:
285 285 nn = int(numcd.group(2))
286 286 try:
287 287 ps = self.shell.user_ns['_dh'][nn]
288 288 except IndexError:
289 289 print('The requested directory does not exist in history.')
290 290 return
291 291 else:
292 292 opts = {}
293 293 elif parameter_s.startswith('--'):
294 294 ps = None
295 295 fallback = None
296 296 pat = parameter_s[2:]
297 297 dh = self.shell.user_ns['_dh']
298 298 # first search only by basename (last component)
299 299 for ent in reversed(dh):
300 300 if pat in os.path.basename(ent) and os.path.isdir(ent):
301 301 ps = ent
302 302 break
303 303
304 304 if fallback is None and pat in ent and os.path.isdir(ent):
305 305 fallback = ent
306 306
307 307 # if we have no last part match, pick the first full path match
308 308 if ps is None:
309 309 ps = fallback
310 310
311 311 if ps is None:
312 312 print("No matching entry in directory history")
313 313 return
314 314 else:
315 315 opts = {}
316 316
317 317
318 318 else:
319 319 #turn all non-space-escaping backslashes to slashes,
320 320 # for c:\windows\directory\names\
321 321 parameter_s = re.sub(r'\\(?! )','/', parameter_s)
322 322 opts,ps = self.parse_options(parameter_s,'qb',mode='string')
323 323 # jump to previous
324 324 if ps == '-':
325 325 try:
326 326 ps = self.shell.user_ns['_dh'][-2]
327 327 except IndexError:
328 328 raise UsageError('%cd -: No previous directory to change to.')
329 329 # jump to bookmark if needed
330 330 else:
331 331 if not os.path.isdir(ps) or 'b' in opts:
332 332 bkms = self.shell.db.get('bookmarks', {})
333 333
334 334 if ps in bkms:
335 335 target = bkms[ps]
336 336 print('(bookmark:%s) -> %s' % (ps, target))
337 337 ps = target
338 338 else:
339 339 if 'b' in opts:
340 340 raise UsageError("Bookmark '%s' not found. "
341 341 "Use '%%bookmark -l' to see your bookmarks." % ps)
342 342
343 343 # strip extra quotes on Windows, because os.chdir doesn't like them
344 344 ps = unquote_filename(ps)
345 345 # at this point ps should point to the target dir
346 346 if ps:
347 347 try:
348 348 os.chdir(os.path.expanduser(ps))
349 349 if hasattr(self.shell, 'term_title') and self.shell.term_title:
350 350 set_term_title('IPython: ' + abbrev_cwd())
351 351 except OSError:
352 352 print(sys.exc_info()[1])
353 353 else:
354 354 cwd = os.getcwdu()
355 355 dhist = self.shell.user_ns['_dh']
356 356 if oldcwd != cwd:
357 357 dhist.append(cwd)
358 358 self.shell.db['dhist'] = compress_dhist(dhist)[-100:]
359 359
360 360 else:
361 361 os.chdir(self.shell.home_dir)
362 362 if hasattr(self.shell, 'term_title') and self.shell.term_title:
363 363 set_term_title('IPython: ' + '~')
364 364 cwd = os.getcwdu()
365 365 dhist = self.shell.user_ns['_dh']
366 366
367 367 if oldcwd != cwd:
368 368 dhist.append(cwd)
369 369 self.shell.db['dhist'] = compress_dhist(dhist)[-100:]
370 370 if not 'q' in opts and self.shell.user_ns['_dh']:
371 371 print(self.shell.user_ns['_dh'][-1])
372 372
373 373
374 374 @line_magic
375 375 def env(self, parameter_s=''):
376 376 """List environment variables."""
377 377
378 378 return dict(os.environ)
379 379
380 380 @line_magic
381 381 def pushd(self, parameter_s=''):
382 382 """Place the current dir on stack and change directory.
383 383
384 384 Usage:\\
385 385 %pushd ['dirname']
386 386 """
387 387
388 388 dir_s = self.shell.dir_stack
389 389 tgt = os.path.expanduser(unquote_filename(parameter_s))
390 390 cwd = os.getcwdu().replace(self.shell.home_dir,'~')
391 391 if tgt:
392 392 self.cd(parameter_s)
393 393 dir_s.insert(0,cwd)
394 394 return self.shell.magic('dirs')
395 395
396 396 @line_magic
397 397 def popd(self, parameter_s=''):
398 398 """Change to directory popped off the top of the stack.
399 399 """
400 400 if not self.shell.dir_stack:
401 401 raise UsageError("%popd on empty stack")
402 402 top = self.shell.dir_stack.pop(0)
403 403 self.cd(top)
404 404 print("popd ->",top)
405 405
406 406 @line_magic
407 407 def dirs(self, parameter_s=''):
408 408 """Return the current directory stack."""
409 409
410 410 return self.shell.dir_stack
411 411
412 412 @line_magic
413 413 def dhist(self, parameter_s=''):
414 414 """Print your history of visited directories.
415 415
416 416 %dhist -> print full history\\
417 417 %dhist n -> print last n entries only\\
418 418 %dhist n1 n2 -> print entries between n1 and n2 (n2 not included)\\
419 419
420 420 This history is automatically maintained by the %cd command, and
421 421 always available as the global list variable _dh. You can use %cd -<n>
422 422 to go to directory number <n>.
423 423
424 424 Note that most of time, you should view directory history by entering
425 425 cd -<TAB>.
426 426
427 427 """
428 428
429 429 dh = self.shell.user_ns['_dh']
430 430 if parameter_s:
431 431 try:
432 432 args = map(int,parameter_s.split())
433 433 except:
434 434 self.arg_err(self.dhist)
435 435 return
436 436 if len(args) == 1:
437 437 ini,fin = max(len(dh)-(args[0]),0),len(dh)
438 438 elif len(args) == 2:
439 439 ini,fin = args
440 440 fin = min(fin, len(dh))
441 441 else:
442 442 self.arg_err(self.dhist)
443 443 return
444 444 else:
445 445 ini,fin = 0,len(dh)
446 446 print('Directory history (kept in _dh)')
447 447 for i in range(ini, fin):
448 448 print("%d: %s" % (i, dh[i]))
449 449
450 450 @skip_doctest
451 451 @line_magic
452 452 def sc(self, parameter_s=''):
453 453 """Shell capture - run shell command and capture output (DEPRECATED use !).
454 454
455 455 DEPRECATED. Suboptimal, retained for backwards compatibility.
456 456
457 457 You should use the form 'var = !command' instead. Example:
458 458
459 459 "%sc -l myfiles = ls ~" should now be written as
460 460
461 461 "myfiles = !ls ~"
462 462
463 463 myfiles.s, myfiles.l and myfiles.n still apply as documented
464 464 below.
465 465
466 466 --
467 467 %sc [options] varname=command
468 468
469 469 IPython will run the given command using commands.getoutput(), and
470 470 will then update the user's interactive namespace with a variable
471 471 called varname, containing the value of the call. Your command can
472 472 contain shell wildcards, pipes, etc.
473 473
474 474 The '=' sign in the syntax is mandatory, and the variable name you
475 475 supply must follow Python's standard conventions for valid names.
476 476
477 477 (A special format without variable name exists for internal use)
478 478
479 479 Options:
480 480
481 481 -l: list output. Split the output on newlines into a list before
482 482 assigning it to the given variable. By default the output is stored
483 483 as a single string.
484 484
485 485 -v: verbose. Print the contents of the variable.
486 486
487 487 In most cases you should not need to split as a list, because the
488 488 returned value is a special type of string which can automatically
489 489 provide its contents either as a list (split on newlines) or as a
490 490 space-separated string. These are convenient, respectively, either
491 491 for sequential processing or to be passed to a shell command.
492 492
493 493 For example::
494 494
495 495 # Capture into variable a
496 496 In [1]: sc a=ls *py
497 497
498 498 # a is a string with embedded newlines
499 499 In [2]: a
500 500 Out[2]: 'setup.py\\nwin32_manual_post_install.py'
501 501
502 502 # which can be seen as a list:
503 503 In [3]: a.l
504 504 Out[3]: ['setup.py', 'win32_manual_post_install.py']
505 505
506 506 # or as a whitespace-separated string:
507 507 In [4]: a.s
508 508 Out[4]: 'setup.py win32_manual_post_install.py'
509 509
510 510 # a.s is useful to pass as a single command line:
511 511 In [5]: !wc -l $a.s
512 512 146 setup.py
513 513 130 win32_manual_post_install.py
514 514 276 total
515 515
516 516 # while the list form is useful to loop over:
517 517 In [6]: for f in a.l:
518 518 ...: !wc -l $f
519 519 ...:
520 520 146 setup.py
521 521 130 win32_manual_post_install.py
522 522
523 523 Similarly, the lists returned by the -l option are also special, in
524 524 the sense that you can equally invoke the .s attribute on them to
525 525 automatically get a whitespace-separated string from their contents::
526 526
527 527 In [7]: sc -l b=ls *py
528 528
529 529 In [8]: b
530 530 Out[8]: ['setup.py', 'win32_manual_post_install.py']
531 531
532 532 In [9]: b.s
533 533 Out[9]: 'setup.py win32_manual_post_install.py'
534 534
535 535 In summary, both the lists and strings used for output capture have
536 536 the following special attributes::
537 537
538 538 .l (or .list) : value as list.
539 539 .n (or .nlstr): value as newline-separated string.
540 540 .s (or .spstr): value as space-separated string.
541 541 """
542 542
543 543 opts,args = self.parse_options(parameter_s, 'lv')
544 544 # Try to get a variable name and command to run
545 545 try:
546 546 # the variable name must be obtained from the parse_options
547 547 # output, which uses shlex.split to strip options out.
548 548 var,_ = args.split('=', 1)
549 549 var = var.strip()
550 550 # But the command has to be extracted from the original input
551 551 # parameter_s, not on what parse_options returns, to avoid the
552 552 # quote stripping which shlex.split performs on it.
553 553 _,cmd = parameter_s.split('=', 1)
554 554 except ValueError:
555 555 var,cmd = '',''
556 556 # If all looks ok, proceed
557 557 split = 'l' in opts
558 558 out = self.shell.getoutput(cmd, split=split)
559 559 if 'v' in opts:
560 560 print('%s ==\n%s' % (var, pformat(out)))
561 561 if var:
562 562 self.shell.user_ns.update({var:out})
563 563 else:
564 564 return out
565 565
566 566 @line_cell_magic
567 567 def sx(self, line='', cell=None):
568 568 """Shell execute - run shell command and capture output (!! is short-hand).
569 569
570 570 %sx command
571 571
572 572 IPython will run the given command using commands.getoutput(), and
573 573 return the result formatted as a list (split on '\\n'). Since the
574 574 output is _returned_, it will be stored in ipython's regular output
575 575 cache Out[N] and in the '_N' automatic variables.
576 576
577 577 Notes:
578 578
579 579 1) If an input line begins with '!!', then %sx is automatically
580 580 invoked. That is, while::
581 581
582 582 !ls
583 583
584 584 causes ipython to simply issue system('ls'), typing::
585 585
586 586 !!ls
587 587
588 588 is a shorthand equivalent to::
589 589
590 590 %sx ls
591 591
592 592 2) %sx differs from %sc in that %sx automatically splits into a list,
593 593 like '%sc -l'. The reason for this is to make it as easy as possible
594 594 to process line-oriented shell output via further python commands.
595 595 %sc is meant to provide much finer control, but requires more
596 596 typing.
597 597
598 598 3) Just like %sc -l, this is a list with special attributes:
599 599 ::
600 600
601 601 .l (or .list) : value as list.
602 602 .n (or .nlstr): value as newline-separated string.
603 603 .s (or .spstr): value as whitespace-separated string.
604 604
605 605 This is very useful when trying to use such lists as arguments to
606 606 system commands."""
607 607
608 608 if cell is None:
609 609 # line magic
610 610 return self.shell.getoutput(line)
611 611 else:
612 612 opts,args = self.parse_options(line, '', 'out=')
613 613 output = self.shell.getoutput(cell)
614 614 out_name = opts.get('out', opts.get('o'))
615 615 if out_name:
616 616 self.shell.user_ns[out_name] = output
617 617 else:
618 618 return output
619 619
620 620 system = line_cell_magic('system')(sx)
621 621 bang = cell_magic('!')(sx)
622 622
623 623 @line_magic
624 624 def bookmark(self, parameter_s=''):
625 625 """Manage IPython's bookmark system.
626 626
627 627 %bookmark <name> - set bookmark to current dir
628 628 %bookmark <name> <dir> - set bookmark to <dir>
629 629 %bookmark -l - list all bookmarks
630 630 %bookmark -d <name> - remove bookmark
631 631 %bookmark -r - remove all bookmarks
632 632
633 633 You can later on access a bookmarked folder with::
634 634
635 635 %cd -b <name>
636 636
637 637 or simply '%cd <name>' if there is no directory called <name> AND
638 638 there is such a bookmark defined.
639 639
640 640 Your bookmarks persist through IPython sessions, but they are
641 641 associated with each profile."""
642 642
643 643 opts,args = self.parse_options(parameter_s,'drl',mode='list')
644 644 if len(args) > 2:
645 645 raise UsageError("%bookmark: too many arguments")
646 646
647 647 bkms = self.shell.db.get('bookmarks',{})
648 648
649 649 if 'd' in opts:
650 650 try:
651 651 todel = args[0]
652 652 except IndexError:
653 653 raise UsageError(
654 654 "%bookmark -d: must provide a bookmark to delete")
655 655 else:
656 656 try:
657 657 del bkms[todel]
658 658 except KeyError:
659 659 raise UsageError(
660 660 "%%bookmark -d: Can't delete bookmark '%s'" % todel)
661 661
662 662 elif 'r' in opts:
663 663 bkms = {}
664 664 elif 'l' in opts:
665 665 bks = bkms.keys()
666 666 bks.sort()
667 667 if bks:
668 668 size = max(map(len, bks))
669 669 else:
670 670 size = 0
671 671 fmt = '%-'+str(size)+'s -> %s'
672 672 print('Current bookmarks:')
673 673 for bk in bks:
674 674 print(fmt % (bk, bkms[bk]))
675 675 else:
676 676 if not args:
677 677 raise UsageError("%bookmark: You must specify the bookmark name")
678 678 elif len(args)==1:
679 679 bkms[args[0]] = os.getcwdu()
680 680 elif len(args)==2:
681 681 bkms[args[0]] = args[1]
682 682 self.shell.db['bookmarks'] = bkms
683 683
684 684 @line_magic
685 685 def pycat(self, parameter_s=''):
686 686 """Show a syntax-highlighted file through a pager.
687 687
688 688 This magic is similar to the cat utility, but it will assume the file
689 689 to be Python source and will show it with syntax highlighting.
690 690
691 691 This magic command can either take a local filename, an url,
692 692 an history range (see %history) or a macro as argument ::
693 693
694 694 %pycat myscript.py
695 695 %pycat 7-27
696 696 %pycat myMacro
697 697 %pycat http://www.example.com/myscript.py
698 698 """
699 699 if not parameter_s:
700 700 raise UsageError('Missing filename, URL, input history range, '
701 701 'or macro.')
702 702
703 703 try :
704 704 cont = self.shell.find_user_code(parameter_s, skip_encoding_cookie=False)
705 705 except (ValueError, IOError):
706 706 print("Error: no such file, variable, URL, history range or macro")
707 707 return
708 708
709 709 page.page(self.shell.pycolorize(source_to_unicode(cont)))
710 710
711 711 @magic_arguments.magic_arguments()
712 712 @magic_arguments.argument(
713 713 '-a', '--append', action='store_true', default=False,
714 714 help='Append contents of the cell to an existing file. '
715 715 'The file will be created if it does not exist.'
716 716 )
717 717 @magic_arguments.argument(
718 718 'filename', type=unicode_type,
719 719 help='file to write'
720 720 )
721 721 @cell_magic
722 722 def writefile(self, line, cell):
723 723 """Write the contents of the cell to a file.
724 724
725 725 The file will be overwritten unless the -a (--append) flag is specified.
726 726 """
727 727 args = magic_arguments.parse_argstring(self.writefile, line)
728 728 filename = os.path.expanduser(unquote_filename(args.filename))
729 729
730 730 if os.path.exists(filename):
731 731 if args.append:
732 732 print("Appending to %s" % filename)
733 733 else:
734 734 print("Overwriting %s" % filename)
735 735 else:
736 736 print("Writing %s" % filename)
737 737
738 738 mode = 'a' if args.append else 'w'
739 739 with io.open(filename, mode, encoding='utf-8') as f:
740 740 f.write(cell)
@@ -1,144 +1,143 b''
1 1 """Implementation of magic functions for matplotlib/pylab support.
2 2 """
3 3 from __future__ import print_function
4 4 #-----------------------------------------------------------------------------
5 5 # Copyright (c) 2012 The IPython Development Team.
6 6 #
7 7 # Distributed under the terms of the Modified BSD License.
8 8 #
9 9 # The full license is in the file COPYING.txt, distributed with this software.
10 10 #-----------------------------------------------------------------------------
11 11
12 12 #-----------------------------------------------------------------------------
13 13 # Imports
14 14 #-----------------------------------------------------------------------------
15 15
16 16 # Our own packages
17 17 from IPython.config.application import Application
18 18 from IPython.core import magic_arguments
19 19 from IPython.core.magic import Magics, magics_class, line_magic
20 20 from IPython.testing.skipdoctest import skip_doctest
21 21 from IPython.utils.warn import warn
22 22 from IPython.core.pylabtools import backends
23 23
24 24 #-----------------------------------------------------------------------------
25 25 # Magic implementation classes
26 26 #-----------------------------------------------------------------------------
27 27
28 28 magic_gui_arg = magic_arguments.argument(
29 29 'gui', nargs='?',
30 30 help="""Name of the matplotlib backend to use %s.
31 31 If given, the corresponding matplotlib backend is used,
32 32 otherwise it will be matplotlib's default
33 33 (which you can set in your matplotlib config file).
34 34 """ % str(tuple(sorted(backends.keys())))
35 35 )
36 36
37 37
38 38 @magics_class
39 39 class PylabMagics(Magics):
40 40 """Magics related to matplotlib's pylab support"""
41 41
42 42 @skip_doctest
43 43 @line_magic
44 44 @magic_arguments.magic_arguments()
45 45 @magic_gui_arg
46 46 def matplotlib(self, line=''):
47 47 """Set up matplotlib to work interactively.
48 48
49 49 This function lets you activate matplotlib interactive support
50 50 at any point during an IPython session.
51 51 It does not import anything into the interactive namespace.
52 52
53 53 If you are using the inline matplotlib backend for embedded figures,
54 54 you can adjust its behavior via the %config magic::
55 55
56 56 # enable SVG figures, necessary for SVG+XHTML export in the qtconsole
57 57 In [1]: %config InlineBackend.figure_format = 'svg'
58 58
59 59 # change the behavior of closing all figures at the end of each
60 60 # execution (cell), or allowing reuse of active figures across
61 61 # cells:
62 62 In [2]: %config InlineBackend.close_figures = False
63 63
64 64 Examples
65 65 --------
66 66 In this case, where the MPL default is TkAgg::
67 67
68 68 In [2]: %matplotlib
69 69 Using matplotlib backend: TkAgg
70 70
71 71 But you can explicitly request a different backend::
72 72
73 73 In [3]: %matplotlib qt
74 74 """
75 75 args = magic_arguments.parse_argstring(self.matplotlib, line)
76 76 gui, backend = self.shell.enable_matplotlib(args.gui)
77 77 self._show_matplotlib_backend(args.gui, backend)
78 78
79 79 @skip_doctest
80 80 @line_magic
81 81 @magic_arguments.magic_arguments()
82 82 @magic_arguments.argument(
83 83 '--no-import-all', action='store_true', default=None,
84 84 help="""Prevent IPython from performing ``import *`` into the interactive namespace.
85 85
86 86 You can govern the default behavior of this flag with the
87 87 InteractiveShellApp.pylab_import_all configurable.
88 88 """
89 89 )
90 90 @magic_gui_arg
91 91 def pylab(self, line=''):
92 92 """Load numpy and matplotlib to work interactively.
93 93
94 94 This function lets you activate pylab (matplotlib, numpy and
95 95 interactive support) at any point during an IPython session.
96 96
97 97 %pylab makes the following imports::
98 98
99 99 import numpy
100 100 import matplotlib
101 101 from matplotlib import pylab, mlab, pyplot
102 102 np = numpy
103 103 plt = pyplot
104 104
105 105 from IPython.display import display
106 106 from IPython.core.pylabtools import figsize, getfigs
107 107
108 108 from pylab import *
109 109 from numpy import *
110 110
111 111 If you pass `--no-import-all`, the last two `*` imports will be excluded.
112 112
113 113 See the %matplotlib magic for more details about activating matplotlib
114 114 without affecting the interactive namespace.
115 115 """
116 116 args = magic_arguments.parse_argstring(self.pylab, line)
117 117 if args.no_import_all is None:
118 118 # get default from Application
119 119 if Application.initialized():
120 120 app = Application.instance()
121 121 try:
122 122 import_all = app.pylab_import_all
123 123 except AttributeError:
124 124 import_all = True
125 125 else:
126 126 # nothing specified, no app - default True
127 127 import_all = True
128 128 else:
129 129 # invert no-import flag
130 130 import_all = not args.no_import_all
131 131
132 132 gui, backend, clobbered = self.shell.enable_pylab(args.gui, import_all=import_all)
133 133 self._show_matplotlib_backend(args.gui, backend)
134 134 print ("Populating the interactive namespace from numpy and matplotlib")
135 135 if clobbered:
136 136 warn("pylab import has clobbered these variables: %s" % clobbered +
137 137 "\n`%pylab --no-import-all` prevents importing * from pylab and numpy"
138 138 )
139 139
140 140 def _show_matplotlib_backend(self, gui, backend):
141 141 """show matplotlib message backend message"""
142 142 if not gui or gui == 'auto':
143 print(("Using matplotlib backend: %s" % backend))
144
143 print("Using matplotlib backend: %s" % backend)
@@ -1,340 +1,340 b''
1 1 # -*- coding: utf-8 -*-
2 2 """Pylab (matplotlib) support utilities.
3 3
4 4 Authors
5 5 -------
6 6
7 7 * Fernando Perez.
8 8 * Brian Granger
9 9 """
10 10 from __future__ import print_function
11 11
12 12 #-----------------------------------------------------------------------------
13 13 # Copyright (C) 2009 The IPython Development Team
14 14 #
15 15 # Distributed under the terms of the BSD License. The full license is in
16 16 # the file COPYING, distributed as part of this software.
17 17 #-----------------------------------------------------------------------------
18 18
19 19 #-----------------------------------------------------------------------------
20 20 # Imports
21 21 #-----------------------------------------------------------------------------
22 22
23 23 import sys
24 24 from io import BytesIO
25 25
26 26 from IPython.core.display import _pngxy
27 27 from IPython.utils.decorators import flag_calls
28 28
29 29 # If user specifies a GUI, that dictates the backend, otherwise we read the
30 30 # user's mpl default from the mpl rc structure
31 31 backends = {'tk': 'TkAgg',
32 32 'gtk': 'GTKAgg',
33 33 'wx': 'WXAgg',
34 34 'qt': 'Qt4Agg', # qt3 not supported
35 35 'qt4': 'Qt4Agg',
36 36 'osx': 'MacOSX',
37 37 'inline' : 'module://IPython.kernel.zmq.pylab.backend_inline'}
38 38
39 39 # We also need a reverse backends2guis mapping that will properly choose which
40 40 # GUI support to activate based on the desired matplotlib backend. For the
41 41 # most part it's just a reverse of the above dict, but we also need to add a
42 42 # few others that map to the same GUI manually:
43 43 backend2gui = dict(zip(backends.values(), backends.keys()))
44 44 # Our tests expect backend2gui to just return 'qt'
45 45 backend2gui['Qt4Agg'] = 'qt'
46 46 # In the reverse mapping, there are a few extra valid matplotlib backends that
47 47 # map to the same GUI support
48 48 backend2gui['GTK'] = backend2gui['GTKCairo'] = 'gtk'
49 49 backend2gui['WX'] = 'wx'
50 50 backend2gui['CocoaAgg'] = 'osx'
51 51
52 52 #-----------------------------------------------------------------------------
53 53 # Matplotlib utilities
54 54 #-----------------------------------------------------------------------------
55 55
56 56
57 57 def getfigs(*fig_nums):
58 58 """Get a list of matplotlib figures by figure numbers.
59 59
60 60 If no arguments are given, all available figures are returned. If the
61 61 argument list contains references to invalid figures, a warning is printed
62 62 but the function continues pasting further figures.
63 63
64 64 Parameters
65 65 ----------
66 66 figs : tuple
67 67 A tuple of ints giving the figure numbers of the figures to return.
68 68 """
69 69 from matplotlib._pylab_helpers import Gcf
70 70 if not fig_nums:
71 71 fig_managers = Gcf.get_all_fig_managers()
72 72 return [fm.canvas.figure for fm in fig_managers]
73 73 else:
74 74 figs = []
75 75 for num in fig_nums:
76 76 f = Gcf.figs.get(num)
77 77 if f is None:
78 print(('Warning: figure %s not available.' % num))
78 print('Warning: figure %s not available.' % num)
79 79 else:
80 80 figs.append(f.canvas.figure)
81 81 return figs
82 82
83 83
84 84 def figsize(sizex, sizey):
85 85 """Set the default figure size to be [sizex, sizey].
86 86
87 87 This is just an easy to remember, convenience wrapper that sets::
88 88
89 89 matplotlib.rcParams['figure.figsize'] = [sizex, sizey]
90 90 """
91 91 import matplotlib
92 92 matplotlib.rcParams['figure.figsize'] = [sizex, sizey]
93 93
94 94
95 95 def print_figure(fig, fmt='png'):
96 96 """Convert a figure to svg or png for inline display."""
97 97 from matplotlib import rcParams
98 98 # When there's an empty figure, we shouldn't return anything, otherwise we
99 99 # get big blank areas in the qt console.
100 100 if not fig.axes and not fig.lines:
101 101 return
102 102
103 103 fc = fig.get_facecolor()
104 104 ec = fig.get_edgecolor()
105 105 bytes_io = BytesIO()
106 106 dpi = rcParams['savefig.dpi']
107 107 if fmt == 'retina':
108 108 dpi = dpi * 2
109 109 fmt = 'png'
110 110 fig.canvas.print_figure(bytes_io, format=fmt, bbox_inches='tight',
111 111 facecolor=fc, edgecolor=ec, dpi=dpi)
112 112 data = bytes_io.getvalue()
113 113 return data
114 114
115 115 def retina_figure(fig):
116 116 """format a figure as a pixel-doubled (retina) PNG"""
117 117 pngdata = print_figure(fig, fmt='retina')
118 118 w, h = _pngxy(pngdata)
119 119 metadata = dict(width=w//2, height=h//2)
120 120 return pngdata, metadata
121 121
122 122 # We need a little factory function here to create the closure where
123 123 # safe_execfile can live.
124 124 def mpl_runner(safe_execfile):
125 125 """Factory to return a matplotlib-enabled runner for %run.
126 126
127 127 Parameters
128 128 ----------
129 129 safe_execfile : function
130 130 This must be a function with the same interface as the
131 131 :meth:`safe_execfile` method of IPython.
132 132
133 133 Returns
134 134 -------
135 135 A function suitable for use as the ``runner`` argument of the %run magic
136 136 function.
137 137 """
138 138
139 139 def mpl_execfile(fname,*where,**kw):
140 140 """matplotlib-aware wrapper around safe_execfile.
141 141
142 142 Its interface is identical to that of the :func:`execfile` builtin.
143 143
144 144 This is ultimately a call to execfile(), but wrapped in safeties to
145 145 properly handle interactive rendering."""
146 146
147 147 import matplotlib
148 148 import matplotlib.pylab as pylab
149 149
150 150 #print '*** Matplotlib runner ***' # dbg
151 151 # turn off rendering until end of script
152 152 is_interactive = matplotlib.rcParams['interactive']
153 153 matplotlib.interactive(False)
154 154 safe_execfile(fname,*where,**kw)
155 155 matplotlib.interactive(is_interactive)
156 156 # make rendering call now, if the user tried to do it
157 157 if pylab.draw_if_interactive.called:
158 158 pylab.draw()
159 159 pylab.draw_if_interactive.called = False
160 160
161 161 return mpl_execfile
162 162
163 163
164 164 def select_figure_format(shell, fmt):
165 165 """Select figure format for inline backend, can be 'png', 'retina', or 'svg'.
166 166
167 167 Using this method ensures only one figure format is active at a time.
168 168 """
169 169 from matplotlib.figure import Figure
170 170 from IPython.kernel.zmq.pylab import backend_inline
171 171
172 172 svg_formatter = shell.display_formatter.formatters['image/svg+xml']
173 173 png_formatter = shell.display_formatter.formatters['image/png']
174 174
175 175 if fmt == 'png':
176 176 svg_formatter.type_printers.pop(Figure, None)
177 177 png_formatter.for_type(Figure, lambda fig: print_figure(fig, 'png'))
178 178 elif fmt in ('png2x', 'retina'):
179 179 svg_formatter.type_printers.pop(Figure, None)
180 180 png_formatter.for_type(Figure, retina_figure)
181 181 elif fmt == 'svg':
182 182 png_formatter.type_printers.pop(Figure, None)
183 183 svg_formatter.for_type(Figure, lambda fig: print_figure(fig, 'svg'))
184 184 else:
185 185 raise ValueError("supported formats are: 'png', 'retina', 'svg', not %r" % fmt)
186 186
187 187 # set the format to be used in the backend()
188 188 backend_inline._figure_format = fmt
189 189
190 190 #-----------------------------------------------------------------------------
191 191 # Code for initializing matplotlib and importing pylab
192 192 #-----------------------------------------------------------------------------
193 193
194 194
195 195 def find_gui_and_backend(gui=None, gui_select=None):
196 196 """Given a gui string return the gui and mpl backend.
197 197
198 198 Parameters
199 199 ----------
200 200 gui : str
201 201 Can be one of ('tk','gtk','wx','qt','qt4','inline').
202 202 gui_select : str
203 203 Can be one of ('tk','gtk','wx','qt','qt4','inline').
204 204 This is any gui already selected by the shell.
205 205
206 206 Returns
207 207 -------
208 208 A tuple of (gui, backend) where backend is one of ('TkAgg','GTKAgg',
209 209 'WXAgg','Qt4Agg','module://IPython.kernel.zmq.pylab.backend_inline').
210 210 """
211 211
212 212 import matplotlib
213 213
214 214 if gui and gui != 'auto':
215 215 # select backend based on requested gui
216 216 backend = backends[gui]
217 217 else:
218 218 # We need to read the backend from the original data structure, *not*
219 219 # from mpl.rcParams, since a prior invocation of %matplotlib may have
220 220 # overwritten that.
221 221 # WARNING: this assumes matplotlib 1.1 or newer!!
222 222 backend = matplotlib.rcParamsOrig['backend']
223 223 # In this case, we need to find what the appropriate gui selection call
224 224 # should be for IPython, so we can activate inputhook accordingly
225 225 gui = backend2gui.get(backend, None)
226 226
227 227 # If we have already had a gui active, we need it and inline are the
228 228 # ones allowed.
229 229 if gui_select and gui != gui_select:
230 230 gui = gui_select
231 231 backend = backends[gui]
232 232
233 233 return gui, backend
234 234
235 235
236 236 def activate_matplotlib(backend):
237 237 """Activate the given backend and set interactive to True."""
238 238
239 239 import matplotlib
240 240 matplotlib.interactive(True)
241 241
242 242 # Matplotlib had a bug where even switch_backend could not force
243 243 # the rcParam to update. This needs to be set *before* the module
244 244 # magic of switch_backend().
245 245 matplotlib.rcParams['backend'] = backend
246 246
247 247 import matplotlib.pyplot
248 248 matplotlib.pyplot.switch_backend(backend)
249 249
250 250 # This must be imported last in the matplotlib series, after
251 251 # backend/interactivity choices have been made
252 252 import matplotlib.pylab as pylab
253 253
254 254 pylab.show._needmain = False
255 255 # We need to detect at runtime whether show() is called by the user.
256 256 # For this, we wrap it into a decorator which adds a 'called' flag.
257 257 pylab.draw_if_interactive = flag_calls(pylab.draw_if_interactive)
258 258
259 259
260 260 def import_pylab(user_ns, import_all=True):
261 261 """Populate the namespace with pylab-related values.
262 262
263 263 Imports matplotlib, pylab, numpy, and everything from pylab and numpy.
264 264
265 265 Also imports a few names from IPython (figsize, display, getfigs)
266 266
267 267 """
268 268
269 269 # Import numpy as np/pyplot as plt are conventions we're trying to
270 270 # somewhat standardize on. Making them available to users by default
271 271 # will greatly help this.
272 272 s = ("import numpy\n"
273 273 "import matplotlib\n"
274 274 "from matplotlib import pylab, mlab, pyplot\n"
275 275 "np = numpy\n"
276 276 "plt = pyplot\n"
277 277 )
278 278 exec(s, user_ns)
279 279
280 280 if import_all:
281 281 s = ("from matplotlib.pylab import *\n"
282 282 "from numpy import *\n")
283 283 exec(s, user_ns)
284 284
285 285 # IPython symbols to add
286 286 user_ns['figsize'] = figsize
287 287 from IPython.core.display import display
288 288 # Add display and getfigs to the user's namespace
289 289 user_ns['display'] = display
290 290 user_ns['getfigs'] = getfigs
291 291
292 292
293 293 def configure_inline_support(shell, backend):
294 294 """Configure an IPython shell object for matplotlib use.
295 295
296 296 Parameters
297 297 ----------
298 298 shell : InteractiveShell instance
299 299
300 300 backend : matplotlib backend
301 301 """
302 302 # If using our svg payload backend, register the post-execution
303 303 # function that will pick up the results for display. This can only be
304 304 # done with access to the real shell object.
305 305
306 306 # Note: if we can't load the inline backend, then there's no point
307 307 # continuing (such as in terminal-only shells in environments without
308 308 # zeromq available).
309 309 try:
310 310 from IPython.kernel.zmq.pylab.backend_inline import InlineBackend
311 311 except ImportError:
312 312 return
313 313 from matplotlib import pyplot
314 314
315 315 cfg = InlineBackend.instance(parent=shell)
316 316 cfg.shell = shell
317 317 if cfg not in shell.configurables:
318 318 shell.configurables.append(cfg)
319 319
320 320 if backend == backends['inline']:
321 321 from IPython.kernel.zmq.pylab.backend_inline import flush_figures
322 322 shell.register_post_execute(flush_figures)
323 323
324 324 # Save rcParams that will be overwrittern
325 325 shell._saved_rcParams = dict()
326 326 for k in cfg.rc:
327 327 shell._saved_rcParams[k] = pyplot.rcParams[k]
328 328 # load inline_rc
329 329 pyplot.rcParams.update(cfg.rc)
330 330 else:
331 331 from IPython.kernel.zmq.pylab.backend_inline import flush_figures
332 332 if flush_figures in shell._post_execute:
333 333 shell._post_execute.pop(flush_figures)
334 334 if hasattr(shell, '_saved_rcParams'):
335 335 pyplot.rcParams.update(shell._saved_rcParams)
336 336 del shell._saved_rcParams
337 337
338 338 # Setup the default figure format
339 339 select_figure_format(shell, cfg.figure_format)
340 340
@@ -1,410 +1,410 b''
1 1 # encoding: utf-8
2 2 """
3 3 A mixin for :class:`~IPython.core.application.Application` classes that
4 4 launch InteractiveShell instances, load extensions, etc.
5 5
6 6 Authors
7 7 -------
8 8
9 9 * Min Ragan-Kelley
10 10 """
11 11
12 12 #-----------------------------------------------------------------------------
13 13 # Copyright (C) 2008-2011 The IPython Development Team
14 14 #
15 15 # Distributed under the terms of the BSD License. The full license is in
16 16 # the file COPYING, distributed as part of this software.
17 17 #-----------------------------------------------------------------------------
18 18
19 19 #-----------------------------------------------------------------------------
20 20 # Imports
21 21 #-----------------------------------------------------------------------------
22 22
23 23 from __future__ import absolute_import
24 24 from __future__ import print_function
25 25
26 26 import glob
27 27 import os
28 28 import sys
29 29
30 30 from IPython.config.application import boolean_flag
31 31 from IPython.config.configurable import Configurable
32 32 from IPython.config.loader import Config
33 33 from IPython.core import pylabtools
34 34 from IPython.utils import py3compat
35 35 from IPython.utils.contexts import preserve_keys
36 36 from IPython.utils.path import filefind
37 37 from IPython.utils.traitlets import (
38 38 Unicode, Instance, List, Bool, CaselessStrEnum, Dict
39 39 )
40 40 from IPython.lib.inputhook import guis
41 41
42 42 #-----------------------------------------------------------------------------
43 43 # Aliases and Flags
44 44 #-----------------------------------------------------------------------------
45 45
46 46 gui_keys = tuple(sorted([ key for key in guis if key is not None ]))
47 47
48 48 backend_keys = sorted(pylabtools.backends.keys())
49 49 backend_keys.insert(0, 'auto')
50 50
51 51 shell_flags = {}
52 52
53 53 addflag = lambda *args: shell_flags.update(boolean_flag(*args))
54 54 addflag('autoindent', 'InteractiveShell.autoindent',
55 55 'Turn on autoindenting.', 'Turn off autoindenting.'
56 56 )
57 57 addflag('automagic', 'InteractiveShell.automagic',
58 58 """Turn on the auto calling of magic commands. Type %%magic at the
59 59 IPython prompt for more information.""",
60 60 'Turn off the auto calling of magic commands.'
61 61 )
62 62 addflag('pdb', 'InteractiveShell.pdb',
63 63 "Enable auto calling the pdb debugger after every exception.",
64 64 "Disable auto calling the pdb debugger after every exception."
65 65 )
66 66 # pydb flag doesn't do any config, as core.debugger switches on import,
67 67 # which is before parsing. This just allows the flag to be passed.
68 68 shell_flags.update(dict(
69 69 pydb = ({},
70 70 """Use the third party 'pydb' package as debugger, instead of pdb.
71 71 Requires that pydb is installed."""
72 72 )
73 73 ))
74 74 addflag('pprint', 'PlainTextFormatter.pprint',
75 75 "Enable auto pretty printing of results.",
76 76 "Disable auto pretty printing of results."
77 77 )
78 78 addflag('color-info', 'InteractiveShell.color_info',
79 79 """IPython can display information about objects via a set of func-
80 80 tions, and optionally can use colors for this, syntax highlighting
81 81 source code and various other elements. However, because this
82 82 information is passed through a pager (like 'less') and many pagers get
83 83 confused with color codes, this option is off by default. You can test
84 84 it and turn it on permanently in your ipython_config.py file if it
85 85 works for you. Test it and turn it on permanently if it works with
86 86 your system. The magic function %%color_info allows you to toggle this
87 87 interactively for testing.""",
88 88 "Disable using colors for info related things."
89 89 )
90 90 addflag('deep-reload', 'InteractiveShell.deep_reload',
91 91 """Enable deep (recursive) reloading by default. IPython can use the
92 92 deep_reload module which reloads changes in modules recursively (it
93 93 replaces the reload() function, so you don't need to change anything to
94 94 use it). deep_reload() forces a full reload of modules whose code may
95 95 have changed, which the default reload() function does not. When
96 96 deep_reload is off, IPython will use the normal reload(), but
97 97 deep_reload will still be available as dreload(). This feature is off
98 98 by default [which means that you have both normal reload() and
99 99 dreload()].""",
100 100 "Disable deep (recursive) reloading by default."
101 101 )
102 102 nosep_config = Config()
103 103 nosep_config.InteractiveShell.separate_in = ''
104 104 nosep_config.InteractiveShell.separate_out = ''
105 105 nosep_config.InteractiveShell.separate_out2 = ''
106 106
107 107 shell_flags['nosep']=(nosep_config, "Eliminate all spacing between prompts.")
108 108 shell_flags['pylab'] = (
109 109 {'InteractiveShellApp' : {'pylab' : 'auto'}},
110 110 """Pre-load matplotlib and numpy for interactive use with
111 111 the default matplotlib backend."""
112 112 )
113 113 shell_flags['matplotlib'] = (
114 114 {'InteractiveShellApp' : {'matplotlib' : 'auto'}},
115 115 """Configure matplotlib for interactive use with
116 116 the default matplotlib backend."""
117 117 )
118 118
119 119 # it's possible we don't want short aliases for *all* of these:
120 120 shell_aliases = dict(
121 121 autocall='InteractiveShell.autocall',
122 122 colors='InteractiveShell.colors',
123 123 logfile='InteractiveShell.logfile',
124 124 logappend='InteractiveShell.logappend',
125 125 c='InteractiveShellApp.code_to_run',
126 126 m='InteractiveShellApp.module_to_run',
127 127 ext='InteractiveShellApp.extra_extension',
128 128 gui='InteractiveShellApp.gui',
129 129 pylab='InteractiveShellApp.pylab',
130 130 matplotlib='InteractiveShellApp.matplotlib',
131 131 )
132 132 shell_aliases['cache-size'] = 'InteractiveShell.cache_size'
133 133
134 134 #-----------------------------------------------------------------------------
135 135 # Main classes and functions
136 136 #-----------------------------------------------------------------------------
137 137
138 138 class InteractiveShellApp(Configurable):
139 139 """A Mixin for applications that start InteractiveShell instances.
140 140
141 141 Provides configurables for loading extensions and executing files
142 142 as part of configuring a Shell environment.
143 143
144 144 The following methods should be called by the :meth:`initialize` method
145 145 of the subclass:
146 146
147 147 - :meth:`init_path`
148 148 - :meth:`init_shell` (to be implemented by the subclass)
149 149 - :meth:`init_gui_pylab`
150 150 - :meth:`init_extensions`
151 151 - :meth:`init_code`
152 152 """
153 153 extensions = List(Unicode, config=True,
154 154 help="A list of dotted module names of IPython extensions to load."
155 155 )
156 156 extra_extension = Unicode('', config=True,
157 157 help="dotted module name of an IPython extension to load."
158 158 )
159 159 def _extra_extension_changed(self, name, old, new):
160 160 if new:
161 161 # add to self.extensions
162 162 self.extensions.append(new)
163 163
164 164 # Extensions that are always loaded (not configurable)
165 165 default_extensions = List(Unicode, [u'storemagic'], config=False)
166 166
167 167 exec_files = List(Unicode, config=True,
168 168 help="""List of files to run at IPython startup."""
169 169 )
170 170 file_to_run = Unicode('', config=True,
171 171 help="""A file to be run""")
172 172
173 173 exec_lines = List(Unicode, config=True,
174 174 help="""lines of code to run at IPython startup."""
175 175 )
176 176 code_to_run = Unicode('', config=True,
177 177 help="Execute the given command string."
178 178 )
179 179 module_to_run = Unicode('', config=True,
180 180 help="Run the module as a script."
181 181 )
182 182 gui = CaselessStrEnum(gui_keys, config=True,
183 183 help="Enable GUI event loop integration with any of {0}.".format(gui_keys)
184 184 )
185 185 matplotlib = CaselessStrEnum(backend_keys,
186 186 config=True,
187 187 help="""Configure matplotlib for interactive use with
188 188 the default matplotlib backend."""
189 189 )
190 190 pylab = CaselessStrEnum(backend_keys,
191 191 config=True,
192 192 help="""Pre-load matplotlib and numpy for interactive use,
193 193 selecting a particular matplotlib backend and loop integration.
194 194 """
195 195 )
196 196 pylab_import_all = Bool(True, config=True,
197 197 help="""If true, IPython will populate the user namespace with numpy, pylab, etc.
198 198 and an 'import *' is done from numpy and pylab, when using pylab mode.
199 199
200 200 When False, pylab mode should not import any names into the user namespace.
201 201 """
202 202 )
203 203 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
204 204
205 205 user_ns = Instance(dict, args=None, allow_none=True)
206 206 def _user_ns_changed(self, name, old, new):
207 207 if self.shell is not None:
208 208 self.shell.user_ns = new
209 209 self.shell.init_user_ns()
210 210
211 211 def init_path(self):
212 212 """Add current working directory, '', to sys.path"""
213 213 if sys.path[0] != '':
214 214 sys.path.insert(0, '')
215 215
216 216 def init_shell(self):
217 217 raise NotImplementedError("Override in subclasses")
218 218
219 219 def init_gui_pylab(self):
220 220 """Enable GUI event loop integration, taking pylab into account."""
221 221 enable = False
222 222 shell = self.shell
223 223 if self.pylab:
224 224 enable = lambda key: shell.enable_pylab(key, import_all=self.pylab_import_all)
225 225 key = self.pylab
226 226 elif self.matplotlib:
227 227 enable = shell.enable_matplotlib
228 228 key = self.matplotlib
229 229 elif self.gui:
230 230 enable = shell.enable_gui
231 231 key = self.gui
232 232
233 233 if not enable:
234 234 return
235 235
236 236 try:
237 237 r = enable(key)
238 238 except ImportError:
239 239 self.log.warn("Eventloop or matplotlib integration failed. Is matplotlib installed?")
240 240 self.shell.showtraceback()
241 241 return
242 242 except Exception:
243 243 self.log.warn("GUI event loop or pylab initialization failed")
244 244 self.shell.showtraceback()
245 245 return
246 246
247 247 if isinstance(r, tuple):
248 248 gui, backend = r[:2]
249 249 self.log.info("Enabling GUI event loop integration, "
250 250 "eventloop=%s, matplotlib=%s", gui, backend)
251 251 if key == "auto":
252 print(("Using matplotlib backend: %s" % backend))
252 print("Using matplotlib backend: %s" % backend)
253 253 else:
254 254 gui = r
255 255 self.log.info("Enabling GUI event loop integration, "
256 256 "eventloop=%s", gui)
257 257
258 258 def init_extensions(self):
259 259 """Load all IPython extensions in IPythonApp.extensions.
260 260
261 261 This uses the :meth:`ExtensionManager.load_extensions` to load all
262 262 the extensions listed in ``self.extensions``.
263 263 """
264 264 try:
265 265 self.log.debug("Loading IPython extensions...")
266 266 extensions = self.default_extensions + self.extensions
267 267 for ext in extensions:
268 268 try:
269 269 self.log.info("Loading IPython extension: %s" % ext)
270 270 self.shell.extension_manager.load_extension(ext)
271 271 except:
272 272 self.log.warn("Error in loading extension: %s" % ext +
273 273 "\nCheck your config files in %s" % self.profile_dir.location
274 274 )
275 275 self.shell.showtraceback()
276 276 except:
277 277 self.log.warn("Unknown error in loading extensions:")
278 278 self.shell.showtraceback()
279 279
280 280 def init_code(self):
281 281 """run the pre-flight code, specified via exec_lines"""
282 282 self._run_startup_files()
283 283 self._run_exec_lines()
284 284 self._run_exec_files()
285 285 self._run_cmd_line_code()
286 286 self._run_module()
287 287
288 288 # flush output, so itwon't be attached to the first cell
289 289 sys.stdout.flush()
290 290 sys.stderr.flush()
291 291
292 292 # Hide variables defined here from %who etc.
293 293 self.shell.user_ns_hidden.update(self.shell.user_ns)
294 294
295 295 def _run_exec_lines(self):
296 296 """Run lines of code in IPythonApp.exec_lines in the user's namespace."""
297 297 if not self.exec_lines:
298 298 return
299 299 try:
300 300 self.log.debug("Running code from IPythonApp.exec_lines...")
301 301 for line in self.exec_lines:
302 302 try:
303 303 self.log.info("Running code in user namespace: %s" %
304 304 line)
305 305 self.shell.run_cell(line, store_history=False)
306 306 except:
307 307 self.log.warn("Error in executing line in user "
308 308 "namespace: %s" % line)
309 309 self.shell.showtraceback()
310 310 except:
311 311 self.log.warn("Unknown error in handling IPythonApp.exec_lines:")
312 312 self.shell.showtraceback()
313 313
314 314 def _exec_file(self, fname):
315 315 try:
316 316 full_filename = filefind(fname, [u'.', self.ipython_dir])
317 317 except IOError as e:
318 318 self.log.warn("File not found: %r"%fname)
319 319 return
320 320 # Make sure that the running script gets a proper sys.argv as if it
321 321 # were run from a system shell.
322 322 save_argv = sys.argv
323 323 sys.argv = [full_filename] + self.extra_args[1:]
324 324 # protect sys.argv from potential unicode strings on Python 2:
325 325 if not py3compat.PY3:
326 326 sys.argv = [ py3compat.cast_bytes(a) for a in sys.argv ]
327 327 try:
328 328 if os.path.isfile(full_filename):
329 329 self.log.info("Running file in user namespace: %s" %
330 330 full_filename)
331 331 # Ensure that __file__ is always defined to match Python
332 332 # behavior.
333 333 with preserve_keys(self.shell.user_ns, '__file__'):
334 334 self.shell.user_ns['__file__'] = fname
335 335 if full_filename.endswith('.ipy'):
336 336 self.shell.safe_execfile_ipy(full_filename)
337 337 else:
338 338 # default to python, even without extension
339 339 self.shell.safe_execfile(full_filename,
340 340 self.shell.user_ns)
341 341 finally:
342 342 sys.argv = save_argv
343 343
344 344 def _run_startup_files(self):
345 345 """Run files from profile startup directory"""
346 346 startup_dir = self.profile_dir.startup_dir
347 347 startup_files = []
348 348 if os.environ.get('PYTHONSTARTUP', False):
349 349 startup_files.append(os.environ['PYTHONSTARTUP'])
350 350 startup_files += glob.glob(os.path.join(startup_dir, '*.py'))
351 351 startup_files += glob.glob(os.path.join(startup_dir, '*.ipy'))
352 352 if not startup_files:
353 353 return
354 354
355 355 self.log.debug("Running startup files from %s...", startup_dir)
356 356 try:
357 357 for fname in sorted(startup_files):
358 358 self._exec_file(fname)
359 359 except:
360 360 self.log.warn("Unknown error in handling startup files:")
361 361 self.shell.showtraceback()
362 362
363 363 def _run_exec_files(self):
364 364 """Run files from IPythonApp.exec_files"""
365 365 if not self.exec_files:
366 366 return
367 367
368 368 self.log.debug("Running files in IPythonApp.exec_files...")
369 369 try:
370 370 for fname in self.exec_files:
371 371 self._exec_file(fname)
372 372 except:
373 373 self.log.warn("Unknown error in handling IPythonApp.exec_files:")
374 374 self.shell.showtraceback()
375 375
376 376 def _run_cmd_line_code(self):
377 377 """Run code or file specified at the command-line"""
378 378 if self.code_to_run:
379 379 line = self.code_to_run
380 380 try:
381 381 self.log.info("Running code given at command line (c=): %s" %
382 382 line)
383 383 self.shell.run_cell(line, store_history=False)
384 384 except:
385 385 self.log.warn("Error in executing line in user namespace: %s" %
386 386 line)
387 387 self.shell.showtraceback()
388 388
389 389 # Like Python itself, ignore the second if the first of these is present
390 390 elif self.file_to_run:
391 391 fname = self.file_to_run
392 392 try:
393 393 self._exec_file(fname)
394 394 except:
395 395 self.log.warn("Error in executing file in user namespace: %s" %
396 396 fname)
397 397 self.shell.showtraceback()
398 398
399 399 def _run_module(self):
400 400 """Run module specified at the command-line."""
401 401 if self.module_to_run:
402 402 # Make sure that the module gets a proper sys.argv as if it were
403 403 # run using `python -m`.
404 404 save_argv = sys.argv
405 405 sys.argv = [sys.executable] + self.extra_args
406 406 try:
407 407 self.shell.safe_run_module(self.module_to_run,
408 408 self.shell.user_ns)
409 409 finally:
410 410 sys.argv = save_argv
@@ -1,508 +1,507 b''
1 1 """IPython extension to reload modules before executing user code.
2 2
3 3 ``autoreload`` reloads modules automatically before entering the execution of
4 4 code typed at the IPython prompt.
5 5
6 6 This makes for example the following workflow possible:
7 7
8 8 .. sourcecode:: ipython
9 9
10 10 In [1]: %load_ext autoreload
11 11
12 12 In [2]: %autoreload 2
13 13
14 14 In [3]: from foo import some_function
15 15
16 16 In [4]: some_function()
17 17 Out[4]: 42
18 18
19 19 In [5]: # open foo.py in an editor and change some_function to return 43
20 20
21 21 In [6]: some_function()
22 22 Out[6]: 43
23 23
24 24 The module was reloaded without reloading it explicitly, and the object
25 25 imported with ``from foo import ...`` was also updated.
26 26
27 27 Usage
28 28 =====
29 29
30 30 The following magic commands are provided:
31 31
32 32 ``%autoreload``
33 33
34 34 Reload all modules (except those excluded by ``%aimport``)
35 35 automatically now.
36 36
37 37 ``%autoreload 0``
38 38
39 39 Disable automatic reloading.
40 40
41 41 ``%autoreload 1``
42 42
43 43 Reload all modules imported with ``%aimport`` every time before
44 44 executing the Python code typed.
45 45
46 46 ``%autoreload 2``
47 47
48 48 Reload all modules (except those excluded by ``%aimport``) every
49 49 time before executing the Python code typed.
50 50
51 51 ``%aimport``
52 52
53 53 List modules which are to be automatically imported or not to be imported.
54 54
55 55 ``%aimport foo``
56 56
57 57 Import module 'foo' and mark it to be autoreloaded for ``%autoreload 1``
58 58
59 59 ``%aimport -foo``
60 60
61 61 Mark module 'foo' to not be autoreloaded.
62 62
63 63 Caveats
64 64 =======
65 65
66 66 Reloading Python modules in a reliable way is in general difficult,
67 67 and unexpected things may occur. ``%autoreload`` tries to work around
68 68 common pitfalls by replacing function code objects and parts of
69 69 classes previously in the module with new versions. This makes the
70 70 following things to work:
71 71
72 72 - Functions and classes imported via 'from xxx import foo' are upgraded
73 73 to new versions when 'xxx' is reloaded.
74 74
75 75 - Methods and properties of classes are upgraded on reload, so that
76 76 calling 'c.foo()' on an object 'c' created before the reload causes
77 77 the new code for 'foo' to be executed.
78 78
79 79 Some of the known remaining caveats are:
80 80
81 81 - Replacing code objects does not always succeed: changing a @property
82 82 in a class to an ordinary method or a method to a member variable
83 83 can cause problems (but in old objects only).
84 84
85 85 - Functions that are removed (eg. via monkey-patching) from a module
86 86 before it is reloaded are not upgraded.
87 87
88 88 - C extension modules cannot be reloaded, and so cannot be autoreloaded.
89 89 """
90 90 from __future__ import print_function
91 91
92 92 skip_doctest = True
93 93
94 94 #-----------------------------------------------------------------------------
95 95 # Copyright (C) 2000 Thomas Heller
96 96 # Copyright (C) 2008 Pauli Virtanen <pav@iki.fi>
97 97 # Copyright (C) 2012 The IPython Development Team
98 98 #
99 99 # Distributed under the terms of the BSD License. The full license is in
100 100 # the file COPYING, distributed as part of this software.
101 101 #-----------------------------------------------------------------------------
102 102 #
103 103 # This IPython module is written by Pauli Virtanen, based on the autoreload
104 104 # code by Thomas Heller.
105 105
106 106 #-----------------------------------------------------------------------------
107 107 # Imports
108 108 #-----------------------------------------------------------------------------
109 109
110 110 import os
111 111 import sys
112 112 import traceback
113 113 import types
114 114 import weakref
115 115
116 116 try:
117 117 # Reload is not defined by default in Python3.
118 118 reload
119 119 except NameError:
120 120 from imp import reload
121 121
122 122 from IPython.utils import openpy
123 123 from IPython.utils.py3compat import PY3
124 124
125 125 #------------------------------------------------------------------------------
126 126 # Autoreload functionality
127 127 #------------------------------------------------------------------------------
128 128
129 129 class ModuleReloader(object):
130 130 enabled = False
131 131 """Whether this reloader is enabled"""
132 132
133 133 failed = {}
134 134 """Modules that failed to reload: {module: mtime-on-failed-reload, ...}"""
135 135
136 136 modules = {}
137 137 """Modules specially marked as autoreloadable."""
138 138
139 139 skip_modules = {}
140 140 """Modules specially marked as not autoreloadable."""
141 141
142 142 check_all = True
143 143 """Autoreload all modules, not just those listed in 'modules'"""
144 144
145 145 old_objects = {}
146 146 """(module-name, name) -> weakref, for replacing old code objects"""
147 147
148 148 def mark_module_skipped(self, module_name):
149 149 """Skip reloading the named module in the future"""
150 150 try:
151 151 del self.modules[module_name]
152 152 except KeyError:
153 153 pass
154 154 self.skip_modules[module_name] = True
155 155
156 156 def mark_module_reloadable(self, module_name):
157 157 """Reload the named module in the future (if it is imported)"""
158 158 try:
159 159 del self.skip_modules[module_name]
160 160 except KeyError:
161 161 pass
162 162 self.modules[module_name] = True
163 163
164 164 def aimport_module(self, module_name):
165 165 """Import a module, and mark it reloadable
166 166
167 167 Returns
168 168 -------
169 169 top_module : module
170 170 The imported module if it is top-level, or the top-level
171 171 top_name : module
172 172 Name of top_module
173 173
174 174 """
175 175 self.mark_module_reloadable(module_name)
176 176
177 177 __import__(module_name)
178 178 top_name = module_name.split('.')[0]
179 179 top_module = sys.modules[top_name]
180 180 return top_module, top_name
181 181
182 182 def check(self, check_all=False):
183 183 """Check whether some modules need to be reloaded."""
184 184
185 185 if not self.enabled and not check_all:
186 186 return
187 187
188 188 if check_all or self.check_all:
189 189 modules = list(sys.modules.keys())
190 190 else:
191 191 modules = list(self.modules.keys())
192 192
193 193 for modname in modules:
194 194 m = sys.modules.get(modname, None)
195 195
196 196 if modname in self.skip_modules:
197 197 continue
198 198
199 199 if not hasattr(m, '__file__'):
200 200 continue
201 201
202 202 if m.__name__ == '__main__':
203 203 # we cannot reload(__main__)
204 204 continue
205 205
206 206 filename = m.__file__
207 207 path, ext = os.path.splitext(filename)
208 208
209 209 if ext.lower() == '.py':
210 210 pyc_filename = openpy.cache_from_source(filename)
211 211 py_filename = filename
212 212 else:
213 213 pyc_filename = filename
214 214 try:
215 215 py_filename = openpy.source_from_cache(filename)
216 216 except ValueError:
217 217 continue
218 218
219 219 try:
220 220 pymtime = os.stat(py_filename).st_mtime
221 221 if pymtime <= os.stat(pyc_filename).st_mtime:
222 222 continue
223 223 if self.failed.get(py_filename, None) == pymtime:
224 224 continue
225 225 except OSError:
226 226 continue
227 227
228 228 try:
229 229 superreload(m, reload, self.old_objects)
230 230 if py_filename in self.failed:
231 231 del self.failed[py_filename]
232 232 except:
233 233 print("[autoreload of %s failed: %s]" % (
234 234 modname, traceback.format_exc(1)), file=sys.stderr)
235 235 self.failed[py_filename] = pymtime
236 236
237 237 #------------------------------------------------------------------------------
238 238 # superreload
239 239 #------------------------------------------------------------------------------
240 240
241 241 if PY3:
242 242 func_attrs = ['__code__', '__defaults__', '__doc__',
243 243 '__closure__', '__globals__', '__dict__']
244 244 else:
245 245 func_attrs = ['func_code', 'func_defaults', 'func_doc',
246 246 'func_closure', 'func_globals', 'func_dict']
247 247
248 248
249 249 def update_function(old, new):
250 250 """Upgrade the code object of a function"""
251 251 for name in func_attrs:
252 252 try:
253 253 setattr(old, name, getattr(new, name))
254 254 except (AttributeError, TypeError):
255 255 pass
256 256
257 257
258 258 def update_class(old, new):
259 259 """Replace stuff in the __dict__ of a class, and upgrade
260 260 method code objects"""
261 261 for key in list(old.__dict__.keys()):
262 262 old_obj = getattr(old, key)
263 263
264 264 try:
265 265 new_obj = getattr(new, key)
266 266 except AttributeError:
267 267 # obsolete attribute: remove it
268 268 try:
269 269 delattr(old, key)
270 270 except (AttributeError, TypeError):
271 271 pass
272 272 continue
273 273
274 274 if update_generic(old_obj, new_obj): continue
275 275
276 276 try:
277 277 setattr(old, key, getattr(new, key))
278 278 except (AttributeError, TypeError):
279 279 pass # skip non-writable attributes
280 280
281 281
282 282 def update_property(old, new):
283 283 """Replace get/set/del functions of a property"""
284 284 update_generic(old.fdel, new.fdel)
285 285 update_generic(old.fget, new.fget)
286 286 update_generic(old.fset, new.fset)
287 287
288 288
289 289 def isinstance2(a, b, typ):
290 290 return isinstance(a, typ) and isinstance(b, typ)
291 291
292 292
293 293 UPDATE_RULES = [
294 294 (lambda a, b: isinstance2(a, b, type),
295 295 update_class),
296 296 (lambda a, b: isinstance2(a, b, types.FunctionType),
297 297 update_function),
298 298 (lambda a, b: isinstance2(a, b, property),
299 299 update_property),
300 300 ]
301 301
302 302
303 303 if PY3:
304 304 UPDATE_RULES.extend([(lambda a, b: isinstance2(a, b, types.MethodType),
305 305 lambda a, b: update_function(a.__func__, b.__func__)),
306 306 ])
307 307 else:
308 308 UPDATE_RULES.extend([(lambda a, b: isinstance2(a, b, types.ClassType),
309 309 update_class),
310 310 (lambda a, b: isinstance2(a, b, types.MethodType),
311 311 lambda a, b: update_function(a.__func__, b.__func__)),
312 312 ])
313 313
314 314
315 315 def update_generic(a, b):
316 316 for type_check, update in UPDATE_RULES:
317 317 if type_check(a, b):
318 318 update(a, b)
319 319 return True
320 320 return False
321 321
322 322
323 323 class StrongRef(object):
324 324 def __init__(self, obj):
325 325 self.obj = obj
326 326 def __call__(self):
327 327 return self.obj
328 328
329 329
330 330 def superreload(module, reload=reload, old_objects={}):
331 331 """Enhanced version of the builtin reload function.
332 332
333 333 superreload remembers objects previously in the module, and
334 334
335 335 - upgrades the class dictionary of every old class in the module
336 336 - upgrades the code object of every old function and method
337 337 - clears the module's namespace before reloading
338 338
339 339 """
340 340
341 341 # collect old objects in the module
342 342 for name, obj in list(module.__dict__.items()):
343 343 if not hasattr(obj, '__module__') or obj.__module__ != module.__name__:
344 344 continue
345 345 key = (module.__name__, name)
346 346 try:
347 347 old_objects.setdefault(key, []).append(weakref.ref(obj))
348 348 except TypeError:
349 349 # weakref doesn't work for all types;
350 350 # create strong references for 'important' cases
351 351 if not PY3 and isinstance(obj, types.ClassType):
352 352 old_objects.setdefault(key, []).append(StrongRef(obj))
353 353
354 354 # reload module
355 355 try:
356 356 # clear namespace first from old cruft
357 357 old_dict = module.__dict__.copy()
358 358 old_name = module.__name__
359 359 module.__dict__.clear()
360 360 module.__dict__['__name__'] = old_name
361 361 module.__dict__['__loader__'] = old_dict['__loader__']
362 362 except (TypeError, AttributeError, KeyError):
363 363 pass
364 364
365 365 try:
366 366 module = reload(module)
367 367 except:
368 368 # restore module dictionary on failed reload
369 369 module.__dict__.update(old_dict)
370 370 raise
371 371
372 372 # iterate over all objects and update functions & classes
373 373 for name, new_obj in list(module.__dict__.items()):
374 374 key = (module.__name__, name)
375 375 if key not in old_objects: continue
376 376
377 377 new_refs = []
378 378 for old_ref in old_objects[key]:
379 379 old_obj = old_ref()
380 380 if old_obj is None: continue
381 381 new_refs.append(old_ref)
382 382 update_generic(old_obj, new_obj)
383 383
384 384 if new_refs:
385 385 old_objects[key] = new_refs
386 386 else:
387 387 del old_objects[key]
388 388
389 389 return module
390 390
391 391 #------------------------------------------------------------------------------
392 392 # IPython connectivity
393 393 #------------------------------------------------------------------------------
394 394
395 395 from IPython.core.hooks import TryNext
396 396 from IPython.core.magic import Magics, magics_class, line_magic
397 397
398 398 @magics_class
399 399 class AutoreloadMagics(Magics):
400 400 def __init__(self, *a, **kw):
401 401 super(AutoreloadMagics, self).__init__(*a, **kw)
402 402 self._reloader = ModuleReloader()
403 403 self._reloader.check_all = False
404 404
405 405 @line_magic
406 406 def autoreload(self, parameter_s=''):
407 407 r"""%autoreload => Reload modules automatically
408 408
409 409 %autoreload
410 410 Reload all modules (except those excluded by %aimport) automatically
411 411 now.
412 412
413 413 %autoreload 0
414 414 Disable automatic reloading.
415 415
416 416 %autoreload 1
417 417 Reload all modules imported with %aimport every time before executing
418 418 the Python code typed.
419 419
420 420 %autoreload 2
421 421 Reload all modules (except those excluded by %aimport) every time
422 422 before executing the Python code typed.
423 423
424 424 Reloading Python modules in a reliable way is in general
425 425 difficult, and unexpected things may occur. %autoreload tries to
426 426 work around common pitfalls by replacing function code objects and
427 427 parts of classes previously in the module with new versions. This
428 428 makes the following things to work:
429 429
430 430 - Functions and classes imported via 'from xxx import foo' are upgraded
431 431 to new versions when 'xxx' is reloaded.
432 432
433 433 - Methods and properties of classes are upgraded on reload, so that
434 434 calling 'c.foo()' on an object 'c' created before the reload causes
435 435 the new code for 'foo' to be executed.
436 436
437 437 Some of the known remaining caveats are:
438 438
439 439 - Replacing code objects does not always succeed: changing a @property
440 440 in a class to an ordinary method or a method to a member variable
441 441 can cause problems (but in old objects only).
442 442
443 443 - Functions that are removed (eg. via monkey-patching) from a module
444 444 before it is reloaded are not upgraded.
445 445
446 446 - C extension modules cannot be reloaded, and so cannot be
447 447 autoreloaded.
448 448
449 449 """
450 450 if parameter_s == '':
451 451 self._reloader.check(True)
452 452 elif parameter_s == '0':
453 453 self._reloader.enabled = False
454 454 elif parameter_s == '1':
455 455 self._reloader.check_all = False
456 456 self._reloader.enabled = True
457 457 elif parameter_s == '2':
458 458 self._reloader.check_all = True
459 459 self._reloader.enabled = True
460 460
461 461 @line_magic
462 462 def aimport(self, parameter_s='', stream=None):
463 463 """%aimport => Import modules for automatic reloading.
464 464
465 465 %aimport
466 466 List modules to automatically import and not to import.
467 467
468 468 %aimport foo
469 469 Import module 'foo' and mark it to be autoreloaded for %autoreload 1
470 470
471 471 %aimport -foo
472 472 Mark module 'foo' to not be autoreloaded for %autoreload 1
473 473 """
474 474 modname = parameter_s
475 475 if not modname:
476 476 to_reload = sorted(self._reloader.modules.keys())
477 477 to_skip = sorted(self._reloader.skip_modules.keys())
478 to_skip.sort()
479 478 if stream is None:
480 479 stream = sys.stdout
481 480 if self._reloader.check_all:
482 481 stream.write("Modules to reload:\nall-except-skipped\n")
483 482 else:
484 483 stream.write("Modules to reload:\n%s\n" % ' '.join(to_reload))
485 484 stream.write("\nModules to skip:\n%s\n" % ' '.join(to_skip))
486 485 elif modname.startswith('-'):
487 486 modname = modname[1:]
488 487 self._reloader.mark_module_skipped(modname)
489 488 else:
490 489 top_module, top_name = self._reloader.aimport_module(modname)
491 490
492 491 # Inject module to user namespace
493 492 self.shell.push({top_name: top_module})
494 493
495 494 def pre_run_code_hook(self, ip):
496 495 if not self._reloader.enabled:
497 496 raise TryNext
498 497 try:
499 498 self._reloader.check()
500 499 except:
501 500 pass
502 501
503 502
504 503 def load_ipython_extension(ip):
505 504 """Load the extension in IPython."""
506 505 auto_reload = AutoreloadMagics(ip)
507 506 ip.register_magics(auto_reload)
508 507 ip.set_hook('pre_run_code_hook', auto_reload.pre_run_code_hook)
@@ -1,695 +1,695 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 ======
4 4 Rmagic
5 5 ======
6 6
7 7 Magic command interface for interactive work with R via rpy2
8 8
9 9 .. note::
10 10
11 11 The ``rpy2`` package needs to be installed separately. It
12 12 can be obtained using ``easy_install`` or ``pip``.
13 13
14 14 You will also need a working copy of R.
15 15
16 16 Usage
17 17 =====
18 18
19 19 To enable the magics below, execute ``%load_ext rmagic``.
20 20
21 21 ``%R``
22 22
23 23 {R_DOC}
24 24
25 25 ``%Rpush``
26 26
27 27 {RPUSH_DOC}
28 28
29 29 ``%Rpull``
30 30
31 31 {RPULL_DOC}
32 32
33 33 ``%Rget``
34 34
35 35 {RGET_DOC}
36 36
37 37 """
38 38 from __future__ import print_function
39 39
40 40 #-----------------------------------------------------------------------------
41 41 # Copyright (C) 2012 The IPython Development Team
42 42 #
43 43 # Distributed under the terms of the BSD License. The full license is in
44 44 # the file COPYING, distributed as part of this software.
45 45 #-----------------------------------------------------------------------------
46 46
47 47 import sys
48 48 import tempfile
49 49 from glob import glob
50 50 from shutil import rmtree
51 51
52 52 # numpy and rpy2 imports
53 53
54 54 import numpy as np
55 55
56 56 import rpy2.rinterface as ri
57 57 import rpy2.robjects as ro
58 58 try:
59 59 from rpy2.robjects import pandas2ri
60 60 pandas2ri.activate()
61 61 except ImportError:
62 62 pandas2ri = None
63 63 from rpy2.robjects import numpy2ri
64 64 numpy2ri.activate()
65 65
66 66 # IPython imports
67 67
68 68 from IPython.core.displaypub import publish_display_data
69 69 from IPython.core.magic import (Magics, magics_class, line_magic,
70 70 line_cell_magic, needs_local_scope)
71 71 from IPython.testing.skipdoctest import skip_doctest
72 72 from IPython.core.magic_arguments import (
73 73 argument, magic_arguments, parse_argstring
74 74 )
75 75 from IPython.external.simplegeneric import generic
76 76 from IPython.utils.py3compat import (str_to_unicode, unicode_to_str, PY3,
77 77 unicode_type)
78 78
79 79 class RInterpreterError(ri.RRuntimeError):
80 80 """An error when running R code in a %%R magic cell."""
81 81 def __init__(self, line, err, stdout):
82 82 self.line = line
83 83 self.err = err.rstrip()
84 84 self.stdout = stdout.rstrip()
85 85
86 86 def __unicode__(self):
87 87 s = 'Failed to parse and evaluate line %r.\nR error message: %r' % \
88 88 (self.line, self.err)
89 89 if self.stdout and (self.stdout != self.err):
90 90 s += '\nR stdout:\n' + self.stdout
91 91 return s
92 92
93 93 if PY3:
94 94 __str__ = __unicode__
95 95 else:
96 96 def __str__(self):
97 97 return unicode_to_str(unicode(self), 'utf-8')
98 98
99 99 def Rconverter(Robj, dataframe=False):
100 100 """
101 101 Convert an object in R's namespace to one suitable
102 102 for ipython's namespace.
103 103
104 104 For a data.frame, it tries to return a structured array.
105 105 It first checks for colnames, then names.
106 106 If all are NULL, it returns np.asarray(Robj), else
107 107 it tries to construct a recarray
108 108
109 109 Parameters
110 110 ----------
111 111
112 112 Robj: an R object returned from rpy2
113 113 """
114 114 is_data_frame = ro.r('is.data.frame')
115 115 colnames = ro.r('colnames')
116 116 rownames = ro.r('rownames') # with pandas, these could be used for the index
117 117 names = ro.r('names')
118 118
119 119 if dataframe:
120 120 as_data_frame = ro.r('as.data.frame')
121 121 cols = colnames(Robj)
122 122 _names = names(Robj)
123 123 if cols != ri.NULL:
124 124 Robj = as_data_frame(Robj)
125 125 names = tuple(np.array(cols))
126 126 elif _names != ri.NULL:
127 127 names = tuple(np.array(_names))
128 128 else: # failed to find names
129 129 return np.asarray(Robj)
130 130 Robj = np.rec.fromarrays(Robj, names = names)
131 131 return np.asarray(Robj)
132 132
133 133 @generic
134 134 def pyconverter(pyobj):
135 135 """Convert Python objects to R objects. Add types using the decorator:
136 136
137 137 @pyconverter.when_type
138 138 """
139 139 return pyobj
140 140
141 141 # The default conversion for lists seems to make them a nested list. That has
142 142 # some advantages, but is rarely convenient, so for interactive use, we convert
143 143 # lists to a numpy array, which becomes an R vector.
144 144 @pyconverter.when_type(list)
145 145 def pyconverter_list(pyobj):
146 146 return np.asarray(pyobj)
147 147
148 148 if pandas2ri is None:
149 149 # pandas2ri was new in rpy2 2.3.3, so for now we'll fallback to pandas'
150 150 # conversion function.
151 151 try:
152 152 from pandas import DataFrame
153 153 from pandas.rpy.common import convert_to_r_dataframe
154 154 @pyconverter.when_type(DataFrame)
155 155 def pyconverter_dataframe(pyobj):
156 156 return convert_to_r_dataframe(pyobj, strings_as_factors=True)
157 157 except ImportError:
158 158 pass
159 159
160 160 @magics_class
161 161 class RMagics(Magics):
162 162 """A set of magics useful for interactive work with R via rpy2.
163 163 """
164 164
165 165 def __init__(self, shell, Rconverter=Rconverter,
166 166 pyconverter=pyconverter,
167 167 cache_display_data=False):
168 168 """
169 169 Parameters
170 170 ----------
171 171
172 172 shell : IPython shell
173 173
174 174 Rconverter : callable
175 175 To be called on values taken from R before putting them in the
176 176 IPython namespace.
177 177
178 178 pyconverter : callable
179 179 To be called on values in ipython namespace before
180 180 assigning to variables in rpy2.
181 181
182 182 cache_display_data : bool
183 183 If True, the published results of the final call to R are
184 184 cached in the variable 'display_cache'.
185 185
186 186 """
187 187 super(RMagics, self).__init__(shell)
188 188 self.cache_display_data = cache_display_data
189 189
190 190 self.r = ro.R()
191 191
192 192 self.Rstdout_cache = []
193 193 self.pyconverter = pyconverter
194 194 self.Rconverter = Rconverter
195 195
196 196 def eval(self, line):
197 197 '''
198 198 Parse and evaluate a line of R code with rpy2.
199 199 Returns the output to R's stdout() connection,
200 200 the value generated by evaluating the code, and a
201 201 boolean indicating whether the return value would be
202 202 visible if the line of code were evaluated in an R REPL.
203 203
204 204 R Code evaluation and visibility determination are
205 205 done via an R call of the form withVisible({<code>})
206 206
207 207 '''
208 208 old_writeconsole = ri.get_writeconsole()
209 209 ri.set_writeconsole(self.write_console)
210 210 try:
211 211 res = ro.r("withVisible({%s})" % line)
212 212 value = res[0] #value (R object)
213 213 visible = ro.conversion.ri2py(res[1])[0] #visible (boolean)
214 214 except (ri.RRuntimeError, ValueError) as exception:
215 215 warning_or_other_msg = self.flush() # otherwise next return seems to have copy of error
216 216 raise RInterpreterError(line, str_to_unicode(str(exception)), warning_or_other_msg)
217 217 text_output = self.flush()
218 218 ri.set_writeconsole(old_writeconsole)
219 219 return text_output, value, visible
220 220
221 221 def write_console(self, output):
222 222 '''
223 223 A hook to capture R's stdout in a cache.
224 224 '''
225 225 self.Rstdout_cache.append(output)
226 226
227 227 def flush(self):
228 228 '''
229 229 Flush R's stdout cache to a string, returning the string.
230 230 '''
231 231 value = ''.join([str_to_unicode(s, 'utf-8') for s in self.Rstdout_cache])
232 232 self.Rstdout_cache = []
233 233 return value
234 234
235 235 @skip_doctest
236 236 @needs_local_scope
237 237 @line_magic
238 238 def Rpush(self, line, local_ns=None):
239 239 '''
240 240 A line-level magic for R that pushes
241 241 variables from python to rpy2. The line should be made up
242 242 of whitespace separated variable names in the IPython
243 243 namespace::
244 244
245 245 In [7]: import numpy as np
246 246
247 247 In [8]: X = np.array([4.5,6.3,7.9])
248 248
249 249 In [9]: X.mean()
250 250 Out[9]: 6.2333333333333343
251 251
252 252 In [10]: %Rpush X
253 253
254 254 In [11]: %R mean(X)
255 255 Out[11]: array([ 6.23333333])
256 256
257 257 '''
258 258 if local_ns is None:
259 259 local_ns = {}
260 260
261 261 inputs = line.split(' ')
262 262 for input in inputs:
263 263 try:
264 264 val = local_ns[input]
265 265 except KeyError:
266 266 try:
267 267 val = self.shell.user_ns[input]
268 268 except KeyError:
269 269 # reraise the KeyError as a NameError so that it looks like
270 270 # the standard python behavior when you use an unnamed
271 271 # variable
272 272 raise NameError("name '%s' is not defined" % input)
273 273
274 274 self.r.assign(input, self.pyconverter(val))
275 275
276 276 @skip_doctest
277 277 @magic_arguments()
278 278 @argument(
279 279 '-d', '--as_dataframe', action='store_true',
280 280 default=False,
281 281 help='Convert objects to data.frames before returning to ipython.'
282 282 )
283 283 @argument(
284 284 'outputs',
285 285 nargs='*',
286 286 )
287 287 @line_magic
288 288 def Rpull(self, line):
289 289 '''
290 290 A line-level magic for R that pulls
291 291 variables from python to rpy2::
292 292
293 293 In [18]: _ = %R x = c(3,4,6.7); y = c(4,6,7); z = c('a',3,4)
294 294
295 295 In [19]: %Rpull x y z
296 296
297 297 In [20]: x
298 298 Out[20]: array([ 3. , 4. , 6.7])
299 299
300 300 In [21]: y
301 301 Out[21]: array([ 4., 6., 7.])
302 302
303 303 In [22]: z
304 304 Out[22]:
305 305 array(['a', '3', '4'],
306 306 dtype='|S1')
307 307
308 308
309 309 If --as_dataframe, then each object is returned as a structured array
310 310 after first passed through "as.data.frame" in R before
311 311 being calling self.Rconverter.
312 312 This is useful when a structured array is desired as output, or
313 313 when the object in R has mixed data types.
314 314 See the %%R docstring for more examples.
315 315
316 316 Notes
317 317 -----
318 318
319 319 Beware that R names can have '.' so this is not fool proof.
320 320 To avoid this, don't name your R objects with '.'s...
321 321
322 322 '''
323 323 args = parse_argstring(self.Rpull, line)
324 324 outputs = args.outputs
325 325 for output in outputs:
326 326 self.shell.push({output:self.Rconverter(self.r(output),dataframe=args.as_dataframe)})
327 327
328 328 @skip_doctest
329 329 @magic_arguments()
330 330 @argument(
331 331 '-d', '--as_dataframe', action='store_true',
332 332 default=False,
333 333 help='Convert objects to data.frames before returning to ipython.'
334 334 )
335 335 @argument(
336 336 'output',
337 337 nargs=1,
338 338 type=str,
339 339 )
340 340 @line_magic
341 341 def Rget(self, line):
342 342 '''
343 343 Return an object from rpy2, possibly as a structured array (if possible).
344 344 Similar to Rpull except only one argument is accepted and the value is
345 345 returned rather than pushed to self.shell.user_ns::
346 346
347 347 In [3]: dtype=[('x', '<i4'), ('y', '<f8'), ('z', '|S1')]
348 348
349 349 In [4]: datapy = np.array([(1, 2.9, 'a'), (2, 3.5, 'b'), (3, 2.1, 'c'), (4, 5, 'e')], dtype=dtype)
350 350
351 351 In [5]: %R -i datapy
352 352
353 353 In [6]: %Rget datapy
354 354 Out[6]:
355 355 array([['1', '2', '3', '4'],
356 356 ['2', '3', '2', '5'],
357 357 ['a', 'b', 'c', 'e']],
358 358 dtype='|S1')
359 359
360 360 In [7]: %Rget -d datapy
361 361 Out[7]:
362 362 array([(1, 2.9, 'a'), (2, 3.5, 'b'), (3, 2.1, 'c'), (4, 5.0, 'e')],
363 363 dtype=[('x', '<i4'), ('y', '<f8'), ('z', '|S1')])
364 364
365 365 '''
366 366 args = parse_argstring(self.Rget, line)
367 367 output = args.output
368 368 return self.Rconverter(self.r(output[0]),dataframe=args.as_dataframe)
369 369
370 370
371 371 @skip_doctest
372 372 @magic_arguments()
373 373 @argument(
374 374 '-i', '--input', action='append',
375 375 help='Names of input variable from shell.user_ns to be assigned to R variables of the same names after calling self.pyconverter. Multiple names can be passed separated only by commas with no whitespace.'
376 376 )
377 377 @argument(
378 378 '-o', '--output', action='append',
379 379 help='Names of variables to be pushed from rpy2 to shell.user_ns after executing cell body and applying self.Rconverter. Multiple names can be passed separated only by commas with no whitespace.'
380 380 )
381 381 @argument(
382 382 '-w', '--width', type=int,
383 383 help='Width of png plotting device sent as an argument to *png* in R.'
384 384 )
385 385 @argument(
386 386 '-h', '--height', type=int,
387 387 help='Height of png plotting device sent as an argument to *png* in R.'
388 388 )
389 389
390 390 @argument(
391 391 '-d', '--dataframe', action='append',
392 392 help='Convert these objects to data.frames and return as structured arrays.'
393 393 )
394 394 @argument(
395 395 '-u', '--units', type=unicode_type, choices=["px", "in", "cm", "mm"],
396 396 help='Units of png plotting device sent as an argument to *png* in R. One of ["px", "in", "cm", "mm"].'
397 397 )
398 398 @argument(
399 399 '-r', '--res', type=int,
400 400 help='Resolution of png plotting device sent as an argument to *png* in R. Defaults to 72 if *units* is one of ["in", "cm", "mm"].'
401 401 )
402 402 @argument(
403 403 '-p', '--pointsize', type=int,
404 404 help='Pointsize of png plotting device sent as an argument to *png* in R.'
405 405 )
406 406 @argument(
407 407 '-b', '--bg',
408 408 help='Background of png plotting device sent as an argument to *png* in R.'
409 409 )
410 410 @argument(
411 411 '-n', '--noreturn',
412 412 help='Force the magic to not return anything.',
413 413 action='store_true',
414 414 default=False
415 415 )
416 416 @argument(
417 417 'code',
418 418 nargs='*',
419 419 )
420 420 @needs_local_scope
421 421 @line_cell_magic
422 422 def R(self, line, cell=None, local_ns=None):
423 423 '''
424 424 Execute code in R, and pull some of the results back into the Python namespace.
425 425
426 426 In line mode, this will evaluate an expression and convert the returned value to a Python object.
427 427 The return value is determined by rpy2's behaviour of returning the result of evaluating the
428 428 final line.
429 429
430 430 Multiple R lines can be executed by joining them with semicolons::
431 431
432 432 In [9]: %R X=c(1,4,5,7); sd(X); mean(X)
433 433 Out[9]: array([ 4.25])
434 434
435 435 In cell mode, this will run a block of R code. The resulting value
436 436 is printed if it would printed be when evaluating the same code
437 437 within a standard R REPL.
438 438
439 439 Nothing is returned to python by default in cell mode::
440 440
441 441 In [10]: %%R
442 442 ....: Y = c(2,4,3,9)
443 443 ....: summary(lm(Y~X))
444 444
445 445 Call:
446 446 lm(formula = Y ~ X)
447 447
448 448 Residuals:
449 449 1 2 3 4
450 450 0.88 -0.24 -2.28 1.64
451 451
452 452 Coefficients:
453 453 Estimate Std. Error t value Pr(>|t|)
454 454 (Intercept) 0.0800 2.3000 0.035 0.975
455 455 X 1.0400 0.4822 2.157 0.164
456 456
457 457 Residual standard error: 2.088 on 2 degrees of freedom
458 458 Multiple R-squared: 0.6993,Adjusted R-squared: 0.549
459 459 F-statistic: 4.651 on 1 and 2 DF, p-value: 0.1638
460 460
461 461 In the notebook, plots are published as the output of the cell::
462 462
463 463 %R plot(X, Y)
464 464
465 465 will create a scatter plot of X bs Y.
466 466
467 467 If cell is not None and line has some R code, it is prepended to
468 468 the R code in cell.
469 469
470 470 Objects can be passed back and forth between rpy2 and python via the -i -o flags in line::
471 471
472 472 In [14]: Z = np.array([1,4,5,10])
473 473
474 474 In [15]: %R -i Z mean(Z)
475 475 Out[15]: array([ 5.])
476 476
477 477
478 478 In [16]: %R -o W W=Z*mean(Z)
479 479 Out[16]: array([ 5., 20., 25., 50.])
480 480
481 481 In [17]: W
482 482 Out[17]: array([ 5., 20., 25., 50.])
483 483
484 484 The return value is determined by these rules:
485 485
486 486 * If the cell is not None, the magic returns None.
487 487
488 488 * If the cell evaluates as False, the resulting value is returned
489 489 unless the final line prints something to the console, in
490 490 which case None is returned.
491 491
492 492 * If the final line results in a NULL value when evaluated
493 493 by rpy2, then None is returned.
494 494
495 495 * No attempt is made to convert the final value to a structured array.
496 496 Use the --dataframe flag or %Rget to push / return a structured array.
497 497
498 498 * If the -n flag is present, there is no return value.
499 499
500 500 * A trailing ';' will also result in no return value as the last
501 501 value in the line is an empty string.
502 502
503 503 The --dataframe argument will attempt to return structured arrays.
504 504 This is useful for dataframes with
505 505 mixed data types. Note also that for a data.frame,
506 506 if it is returned as an ndarray, it is transposed::
507 507
508 508 In [18]: dtype=[('x', '<i4'), ('y', '<f8'), ('z', '|S1')]
509 509
510 510 In [19]: datapy = np.array([(1, 2.9, 'a'), (2, 3.5, 'b'), (3, 2.1, 'c'), (4, 5, 'e')], dtype=dtype)
511 511
512 512 In [20]: %%R -o datar
513 513 datar = datapy
514 514 ....:
515 515
516 516 In [21]: datar
517 517 Out[21]:
518 518 array([['1', '2', '3', '4'],
519 519 ['2', '3', '2', '5'],
520 520 ['a', 'b', 'c', 'e']],
521 521 dtype='|S1')
522 522
523 523 In [22]: %%R -d datar
524 524 datar = datapy
525 525 ....:
526 526
527 527 In [23]: datar
528 528 Out[23]:
529 529 array([(1, 2.9, 'a'), (2, 3.5, 'b'), (3, 2.1, 'c'), (4, 5.0, 'e')],
530 530 dtype=[('x', '<i4'), ('y', '<f8'), ('z', '|S1')])
531 531
532 532 The --dataframe argument first tries colnames, then names.
533 533 If both are NULL, it returns an ndarray (i.e. unstructured)::
534 534
535 535 In [1]: %R mydata=c(4,6,8.3); NULL
536 536
537 537 In [2]: %R -d mydata
538 538
539 539 In [3]: mydata
540 540 Out[3]: array([ 4. , 6. , 8.3])
541 541
542 542 In [4]: %R names(mydata) = c('a','b','c'); NULL
543 543
544 544 In [5]: %R -d mydata
545 545
546 546 In [6]: mydata
547 547 Out[6]:
548 548 array((4.0, 6.0, 8.3),
549 549 dtype=[('a', '<f8'), ('b', '<f8'), ('c', '<f8')])
550 550
551 551 In [7]: %R -o mydata
552 552
553 553 In [8]: mydata
554 554 Out[8]: array([ 4. , 6. , 8.3])
555 555
556 556 '''
557 557
558 558 args = parse_argstring(self.R, line)
559 559
560 560 # arguments 'code' in line are prepended to
561 561 # the cell lines
562 562
563 563 if cell is None:
564 564 code = ''
565 565 return_output = True
566 566 line_mode = True
567 567 else:
568 568 code = cell
569 569 return_output = False
570 570 line_mode = False
571 571
572 572 code = ' '.join(args.code) + code
573 573
574 574 # if there is no local namespace then default to an empty dict
575 575 if local_ns is None:
576 576 local_ns = {}
577 577
578 578 if args.input:
579 579 for input in ','.join(args.input).split(','):
580 580 try:
581 581 val = local_ns[input]
582 582 except KeyError:
583 583 try:
584 584 val = self.shell.user_ns[input]
585 585 except KeyError:
586 586 raise NameError("name '%s' is not defined" % input)
587 587 self.r.assign(input, self.pyconverter(val))
588 588
589 589 if getattr(args, 'units') is not None:
590 590 if args.units != "px" and getattr(args, 'res') is None:
591 591 args.res = 72
592 592 args.units = '"%s"' % args.units
593 593
594 594 png_argdict = dict([(n, getattr(args, n)) for n in ['units', 'res', 'height', 'width', 'bg', 'pointsize']])
595 595 png_args = ','.join(['%s=%s' % (o,v) for o, v in png_argdict.items() if v is not None])
596 596 # execute the R code in a temporary directory
597 597
598 598 tmpd = tempfile.mkdtemp()
599 599 self.r('png("%s/Rplots%%03d.png",%s)' % (tmpd.replace('\\', '/'), png_args))
600 600
601 601 text_output = ''
602 602 try:
603 603 if line_mode:
604 604 for line in code.split(';'):
605 605 text_result, result, visible = self.eval(line)
606 606 text_output += text_result
607 607 if text_result:
608 608 # the last line printed something to the console so we won't return it
609 609 return_output = False
610 610 else:
611 611 text_result, result, visible = self.eval(code)
612 612 text_output += text_result
613 613 if visible:
614 614 old_writeconsole = ri.get_writeconsole()
615 615 ri.set_writeconsole(self.write_console)
616 616 ro.r.show(result)
617 617 text_output += self.flush()
618 618 ri.set_writeconsole(old_writeconsole)
619 619
620 620 except RInterpreterError as e:
621 print((e.stdout))
621 print(e.stdout)
622 622 if not e.stdout.endswith(e.err):
623 print((e.err))
623 print(e.err)
624 624 rmtree(tmpd)
625 625 return
626 626
627 627 self.r('dev.off()')
628 628
629 629 # read out all the saved .png files
630 630
631 631 images = [open(imgfile, 'rb').read() for imgfile in glob("%s/Rplots*png" % tmpd)]
632 632
633 633 # now publish the images
634 634 # mimicking IPython/zmq/pylab/backend_inline.py
635 635 fmt = 'png'
636 636 mimetypes = { 'png' : 'image/png', 'svg' : 'image/svg+xml' }
637 637 mime = mimetypes[fmt]
638 638
639 639 # publish the printed R objects, if any
640 640
641 641 display_data = []
642 642 if text_output:
643 643 display_data.append(('RMagic.R', {'text/plain':text_output}))
644 644
645 645 # flush text streams before sending figures, helps a little with output
646 646 for image in images:
647 647 # synchronization in the console (though it's a bandaid, not a real sln)
648 648 sys.stdout.flush(); sys.stderr.flush()
649 649 display_data.append(('RMagic.R', {mime: image}))
650 650
651 651 # kill the temporary directory
652 652 rmtree(tmpd)
653 653
654 654 # try to turn every output into a numpy array
655 655 # this means that output are assumed to be castable
656 656 # as numpy arrays
657 657
658 658 if args.output:
659 659 for output in ','.join(args.output).split(','):
660 660 self.shell.push({output:self.Rconverter(self.r(output), dataframe=False)})
661 661
662 662 if args.dataframe:
663 663 for output in ','.join(args.dataframe).split(','):
664 664 self.shell.push({output:self.Rconverter(self.r(output), dataframe=True)})
665 665
666 666 for tag, disp_d in display_data:
667 667 publish_display_data(tag, disp_d)
668 668
669 669 # this will keep a reference to the display_data
670 670 # which might be useful to other objects who happen to use
671 671 # this method
672 672
673 673 if self.cache_display_data:
674 674 self.display_cache = display_data
675 675
676 676 # if in line mode and return_output, return the result as an ndarray
677 677 if return_output and not args.noreturn:
678 678 if result != ri.NULL:
679 679 return self.Rconverter(result, dataframe=False)
680 680
681 681 __doc__ = __doc__.format(
682 682 R_DOC = ' '*8 + RMagics.R.__doc__,
683 683 RPUSH_DOC = ' '*8 + RMagics.Rpush.__doc__,
684 684 RPULL_DOC = ' '*8 + RMagics.Rpull.__doc__,
685 685 RGET_DOC = ' '*8 + RMagics.Rget.__doc__
686 686 )
687 687
688 688
689 689 def load_ipython_extension(ip):
690 690 """Load the extension in IPython."""
691 691 ip.register_magics(RMagics)
692 692 # Initialising rpy2 interferes with readline. Since, at this point, we've
693 693 # probably just loaded rpy2, we reset the delimiters. See issue gh-2759.
694 694 if ip.has_readline:
695 695 ip.readline.set_completer_delims(ip.readline_delims)
@@ -1,234 +1,234 b''
1 1 #!/usr/bin/python
2 2 """Utility function for installing MathJax javascript library into
3 3 your IPython nbextensions directory, for offline use.
4 4
5 5 Authors:
6 6
7 7 * Min RK
8 8 * Mark Sienkiewicz
9 9 * Matthias Bussonnier
10 10
11 11 To download and install MathJax:
12 12
13 13 From Python:
14 14
15 15 >>> from IPython.external.mathjax import install_mathjax
16 16 >>> install_mathjax()
17 17
18 18 From the command line:
19 19
20 20 $ python -m IPython.external.mathjax
21 21
22 22 To a specific location:
23 23
24 24 $ python -m IPython.external.mathjax -i /usr/share/
25 25
26 26 will install mathjax to /usr/share/mathjax
27 27
28 28 To install MathJax from a file you have already downloaded:
29 29
30 30 $ python -m IPython.external.mathjax mathjax-xxx.tar.gz
31 31 $ python -m IPython.external.mathjax mathjax-xxx.zip
32 32
33 33 It will not install MathJax if it is already there. Use -r to
34 34 replace the existing copy of MathJax.
35 35
36 36 To find the directory where IPython would like MathJax installed:
37 37
38 38 $ python -m IPython.external.mathjax -d
39 39
40 40 """
41 41 from __future__ import print_function
42 42
43 43
44 44 #-----------------------------------------------------------------------------
45 45 # Copyright (C) 2011 The IPython Development Team
46 46 #
47 47 # Distributed under the terms of the BSD License. The full license is in
48 48 # the file COPYING, distributed as part of this software.
49 49 #-----------------------------------------------------------------------------
50 50
51 51
52 52 #-----------------------------------------------------------------------------
53 53 # Imports
54 54 #-----------------------------------------------------------------------------
55 55
56 56 import argparse
57 57 import os
58 58 import shutil
59 59 import sys
60 60 import tarfile
61 61 import urllib2
62 62 import zipfile
63 63
64 64 from IPython.utils.path import get_ipython_dir
65 65
66 66 #-----------------------------------------------------------------------------
67 67 #
68 68 #-----------------------------------------------------------------------------
69 69
70 70 # Where mathjax will be installed
71 71
72 72 nbextensions = os.path.join(get_ipython_dir(), 'nbextensions')
73 73 default_dest = os.path.join(nbextensions, 'mathjax')
74 74
75 75 # Test for access to install mathjax
76 76
77 77 def prepare_dest(dest, replace=False):
78 78 """prepare the destination folder for mathjax install
79 79
80 80 Returns False if mathjax appears to already be installed and there is nothing to do,
81 81 True otherwise.
82 82 """
83 83
84 84 parent = os.path.abspath(os.path.join(dest, os.path.pardir))
85 85 if not os.path.exists(parent):
86 86 os.makedirs(parent)
87 87
88 88 if os.path.exists(dest):
89 89 if replace:
90 90 print("removing existing MathJax at %s" % dest)
91 91 shutil.rmtree(dest)
92 92 return True
93 93 else:
94 94 mathjax_js = os.path.join(dest, 'MathJax.js')
95 95 if not os.path.exists(mathjax_js):
96 96 raise IOError("%s exists, but does not contain MathJax.js" % dest)
97 97 print("%s already exists" % mathjax_js)
98 98 return False
99 99 else:
100 100 return True
101 101
102 102
103 103 def extract_tar(fd, dest):
104 104 """extract a tarball from filelike `fd` to destination `dest`"""
105 105 # use 'r|gz' stream mode, because socket file-like objects can't seek:
106 106 tar = tarfile.open(fileobj=fd, mode='r|gz')
107 107
108 108 # The first entry in the archive is the top-level dir
109 109 topdir = tar.firstmember.path
110 110
111 111 # extract the archive (contains a single directory) to the destination directory
112 112 parent = os.path.abspath(os.path.join(dest, os.path.pardir))
113 113 tar.extractall(parent)
114 114
115 115 # it will be mathjax-MathJax-<sha>, rename to just mathjax
116 116 os.rename(os.path.join(parent, topdir), dest)
117 117
118 118
119 119 def extract_zip(fd, dest):
120 120 """extract a zip file from filelike `fd` to destination `dest`"""
121 121 z = zipfile.ZipFile(fd, 'r')
122 122
123 123 # The first entry in the archive is the top-level dir
124 124 topdir = z.namelist()[0]
125 125
126 126 # extract the archive (contains a single directory) to the static/ directory
127 127 parent = os.path.abspath(os.path.join(dest, os.path.pardir))
128 128 z.extractall(parent)
129 129
130 130 # it will be mathjax-MathJax-<sha>, rename to just mathjax
131 131 d = os.path.join(parent, topdir)
132 132 os.rename(os.path.join(parent, topdir), dest)
133 133
134 134
135 135 def install_mathjax(tag='v2.2', dest=default_dest, replace=False, file=None, extractor=extract_tar):
136 136 """Download and/or install MathJax for offline use.
137 137
138 138 This will install mathjax to the nbextensions dir in your IPYTHONDIR.
139 139
140 140 MathJax is a ~15MB download, and ~150MB installed.
141 141
142 142 Parameters
143 143 ----------
144 144
145 145 replace : bool [False]
146 146 Whether to remove and replace an existing install.
147 147 dest : str [IPYTHONDIR/nbextensions/mathjax]
148 148 Where to install mathjax
149 149 tag : str ['v2.2']
150 150 Which tag to download. Default is 'v2.2', the current stable release,
151 151 but alternatives include 'v1.1a' and 'master'.
152 152 file : file like object [ defualt to content of https://github.com/mathjax/MathJax/tarball/#{tag}]
153 153 File handle from which to untar/unzip/... mathjax
154 154 extractor : function
155 155 Method to use to untar/unzip/... `file`
156 156 """
157 157 try:
158 158 anything_to_do = prepare_dest(dest, replace)
159 159 except OSError as e:
160 print(("ERROR %s, require write access to %s" % (e, dest)))
160 print("ERROR %s, require write access to %s" % (e, dest))
161 161 return 1
162 162 else:
163 163 if not anything_to_do:
164 164 return 0
165 165
166 166 if file is None:
167 167 # download mathjax
168 168 mathjax_url = "https://github.com/mathjax/MathJax/archive/%s.tar.gz" %tag
169 169 print("Downloading mathjax source from %s" % mathjax_url)
170 170 response = urllib2.urlopen(mathjax_url)
171 171 file = response.fp
172 172
173 173 print("Extracting to %s" % dest)
174 174 extractor(file, dest)
175 175 return 0
176 176
177 177
178 178 def main():
179 179 parser = argparse.ArgumentParser(
180 180 description="""Install mathjax from internet or local archive""",
181 181 )
182 182
183 183 parser.add_argument(
184 184 '-i',
185 185 '--install-dir',
186 186 default=nbextensions,
187 187 help='custom installation directory. Mathjax will be installed in here/mathjax')
188 188
189 189 parser.add_argument(
190 190 '-d',
191 191 '--print-dest',
192 192 action='store_true',
193 193 help='print where mathjax would be installed and exit')
194 194 parser.add_argument(
195 195 '-r',
196 196 '--replace',
197 197 action='store_true',
198 198 help='Whether to replace current mathjax if it already exists')
199 199 parser.add_argument('filename',
200 200 help="the local tar/zip-ball filename containing mathjax",
201 201 nargs='?',
202 202 metavar='filename')
203 203
204 204 pargs = parser.parse_args()
205 205
206 206 dest = os.path.join(pargs.install_dir, 'mathjax')
207 207
208 208 if pargs.print_dest:
209 209 print(dest)
210 210 return
211 211
212 212 # remove/replace existing mathjax?
213 213 replace = pargs.replace
214 214
215 215 # do it
216 216 if pargs.filename:
217 217 fname = pargs.filename
218 218
219 219 # automatically detect zip/tar - could do something based
220 220 # on file content, but really not cost-effective here.
221 221 if fname.endswith('.zip'):
222 222 extractor = extract_zip
223 223 else :
224 224 extractor = extract_tar
225 225 # do it
226 226 return install_mathjax(file=open(fname, "rb"), replace=replace, extractor=extractor, dest=dest)
227 227 else:
228 228 return install_mathjax(replace=replace, dest=dest)
229 229
230 230
231 231 if __name__ == '__main__' :
232 232 sys.exit(main())
233 233
234 234 __all__ = ['install_mathjax', 'main', 'default_dest']
@@ -1,583 +1,583 b''
1 1 """Module for interactive demos using IPython.
2 2
3 3 This module implements a few classes for running Python scripts interactively
4 4 in IPython for demonstrations. With very simple markup (a few tags in
5 5 comments), you can control points where the script stops executing and returns
6 6 control to IPython.
7 7
8 8
9 9 Provided classes
10 10 ----------------
11 11
12 12 The classes are (see their docstrings for further details):
13 13
14 14 - Demo: pure python demos
15 15
16 16 - IPythonDemo: demos with input to be processed by IPython as if it had been
17 17 typed interactively (so magics work, as well as any other special syntax you
18 18 may have added via input prefilters).
19 19
20 20 - LineDemo: single-line version of the Demo class. These demos are executed
21 21 one line at a time, and require no markup.
22 22
23 23 - IPythonLineDemo: IPython version of the LineDemo class (the demo is
24 24 executed a line at a time, but processed via IPython).
25 25
26 26 - ClearMixin: mixin to make Demo classes with less visual clutter. It
27 27 declares an empty marquee and a pre_cmd that clears the screen before each
28 28 block (see Subclassing below).
29 29
30 30 - ClearDemo, ClearIPDemo: mixin-enabled versions of the Demo and IPythonDemo
31 31 classes.
32 32
33 33 Inheritance diagram:
34 34
35 35 .. inheritance-diagram:: IPython.lib.demo
36 36 :parts: 3
37 37
38 38 Subclassing
39 39 -----------
40 40
41 41 The classes here all include a few methods meant to make customization by
42 42 subclassing more convenient. Their docstrings below have some more details:
43 43
44 44 - marquee(): generates a marquee to provide visible on-screen markers at each
45 45 block start and end.
46 46
47 47 - pre_cmd(): run right before the execution of each block.
48 48
49 49 - post_cmd(): run right after the execution of each block. If the block
50 50 raises an exception, this is NOT called.
51 51
52 52
53 53 Operation
54 54 ---------
55 55
56 56 The file is run in its own empty namespace (though you can pass it a string of
57 57 arguments as if in a command line environment, and it will see those as
58 58 sys.argv). But at each stop, the global IPython namespace is updated with the
59 59 current internal demo namespace, so you can work interactively with the data
60 60 accumulated so far.
61 61
62 62 By default, each block of code is printed (with syntax highlighting) before
63 63 executing it and you have to confirm execution. This is intended to show the
64 64 code to an audience first so you can discuss it, and only proceed with
65 65 execution once you agree. There are a few tags which allow you to modify this
66 66 behavior.
67 67
68 68 The supported tags are:
69 69
70 70 # <demo> stop
71 71
72 72 Defines block boundaries, the points where IPython stops execution of the
73 73 file and returns to the interactive prompt.
74 74
75 75 You can optionally mark the stop tag with extra dashes before and after the
76 76 word 'stop', to help visually distinguish the blocks in a text editor:
77 77
78 78 # <demo> --- stop ---
79 79
80 80
81 81 # <demo> silent
82 82
83 83 Make a block execute silently (and hence automatically). Typically used in
84 84 cases where you have some boilerplate or initialization code which you need
85 85 executed but do not want to be seen in the demo.
86 86
87 87 # <demo> auto
88 88
89 89 Make a block execute automatically, but still being printed. Useful for
90 90 simple code which does not warrant discussion, since it avoids the extra
91 91 manual confirmation.
92 92
93 93 # <demo> auto_all
94 94
95 95 This tag can _only_ be in the first block, and if given it overrides the
96 96 individual auto tags to make the whole demo fully automatic (no block asks
97 97 for confirmation). It can also be given at creation time (or the attribute
98 98 set later) to override what's in the file.
99 99
100 100 While _any_ python file can be run as a Demo instance, if there are no stop
101 101 tags the whole file will run in a single block (no different that calling
102 102 first %pycat and then %run). The minimal markup to make this useful is to
103 103 place a set of stop tags; the other tags are only there to let you fine-tune
104 104 the execution.
105 105
106 106 This is probably best explained with the simple example file below. You can
107 107 copy this into a file named ex_demo.py, and try running it via::
108 108
109 109 from IPython.demo import Demo
110 110 d = Demo('ex_demo.py')
111 111 d()
112 112
113 113 Each time you call the demo object, it runs the next block. The demo object
114 114 has a few useful methods for navigation, like again(), edit(), jump(), seek()
115 115 and back(). It can be reset for a new run via reset() or reloaded from disk
116 116 (in case you've edited the source) via reload(). See their docstrings below.
117 117
118 118 Note: To make this simpler to explore, a file called "demo-exercizer.py" has
119 119 been added to the "docs/examples/core" directory. Just cd to this directory in
120 120 an IPython session, and type::
121 121
122 122 %run demo-exercizer.py
123 123
124 124 and then follow the directions.
125 125
126 126 Example
127 127 -------
128 128
129 129 The following is a very simple example of a valid demo file.
130 130
131 131 ::
132 132
133 133 #################### EXAMPLE DEMO <ex_demo.py> ###############################
134 134 '''A simple interactive demo to illustrate the use of IPython's Demo class.'''
135 135
136 136 print 'Hello, welcome to an interactive IPython demo.'
137 137
138 138 # The mark below defines a block boundary, which is a point where IPython will
139 139 # stop execution and return to the interactive prompt. The dashes are actually
140 140 # optional and used only as a visual aid to clearly separate blocks while
141 141 # editing the demo code.
142 142 # <demo> stop
143 143
144 144 x = 1
145 145 y = 2
146 146
147 147 # <demo> stop
148 148
149 149 # the mark below makes this block as silent
150 150 # <demo> silent
151 151
152 152 print 'This is a silent block, which gets executed but not printed.'
153 153
154 154 # <demo> stop
155 155 # <demo> auto
156 156 print 'This is an automatic block.'
157 157 print 'It is executed without asking for confirmation, but printed.'
158 158 z = x+y
159 159
160 160 print 'z=',x
161 161
162 162 # <demo> stop
163 163 # This is just another normal block.
164 164 print 'z is now:', z
165 165
166 166 print 'bye!'
167 167 ################### END EXAMPLE DEMO <ex_demo.py> ############################
168 168 """
169 169
170 170 from __future__ import unicode_literals
171 171
172 172 #*****************************************************************************
173 173 # Copyright (C) 2005-2006 Fernando Perez. <Fernando.Perez@colorado.edu>
174 174 #
175 175 # Distributed under the terms of the BSD License. The full license is in
176 176 # the file COPYING, distributed as part of this software.
177 177 #
178 178 #*****************************************************************************
179 179 from __future__ import print_function
180 180
181 181 import os
182 182 import re
183 183 import shlex
184 184 import sys
185 185
186 186 from IPython.utils import io
187 187 from IPython.utils.text import marquee
188 188 from IPython.utils import openpy
189 189 from IPython.utils import py3compat
190 190 __all__ = ['Demo','IPythonDemo','LineDemo','IPythonLineDemo','DemoError']
191 191
192 192 class DemoError(Exception): pass
193 193
194 194 def re_mark(mark):
195 195 return re.compile(r'^\s*#\s+<demo>\s+%s\s*$' % mark,re.MULTILINE)
196 196
197 197 class Demo(object):
198 198
199 199 re_stop = re_mark('-*\s?stop\s?-*')
200 200 re_silent = re_mark('silent')
201 201 re_auto = re_mark('auto')
202 202 re_auto_all = re_mark('auto_all')
203 203
204 204 def __init__(self,src,title='',arg_str='',auto_all=None):
205 205 """Make a new demo object. To run the demo, simply call the object.
206 206
207 207 See the module docstring for full details and an example (you can use
208 208 IPython.Demo? in IPython to see it).
209 209
210 210 Inputs:
211 211
212 212 - src is either a file, or file-like object, or a
213 213 string that can be resolved to a filename.
214 214
215 215 Optional inputs:
216 216
217 217 - title: a string to use as the demo name. Of most use when the demo
218 218 you are making comes from an object that has no filename, or if you
219 219 want an alternate denotation distinct from the filename.
220 220
221 221 - arg_str(''): a string of arguments, internally converted to a list
222 222 just like sys.argv, so the demo script can see a similar
223 223 environment.
224 224
225 225 - auto_all(None): global flag to run all blocks automatically without
226 226 confirmation. This attribute overrides the block-level tags and
227 227 applies to the whole demo. It is an attribute of the object, and
228 228 can be changed at runtime simply by reassigning it to a boolean
229 229 value.
230 230 """
231 231 if hasattr(src, "read"):
232 232 # It seems to be a file or a file-like object
233 233 self.fname = "from a file-like object"
234 234 if title == '':
235 235 self.title = "from a file-like object"
236 236 else:
237 237 self.title = title
238 238 else:
239 239 # Assume it's a string or something that can be converted to one
240 240 self.fname = src
241 241 if title == '':
242 242 (filepath, filename) = os.path.split(src)
243 243 self.title = filename
244 244 else:
245 245 self.title = title
246 246 self.sys_argv = [src] + shlex.split(arg_str)
247 247 self.auto_all = auto_all
248 248 self.src = src
249 249
250 250 # get a few things from ipython. While it's a bit ugly design-wise,
251 251 # it ensures that things like color scheme and the like are always in
252 252 # sync with the ipython mode being used. This class is only meant to
253 253 # be used inside ipython anyways, so it's OK.
254 254 ip = get_ipython() # this is in builtins whenever IPython is running
255 255 self.ip_ns = ip.user_ns
256 256 self.ip_colorize = ip.pycolorize
257 257 self.ip_showtb = ip.showtraceback
258 258 self.ip_run_cell = ip.run_cell
259 259 self.shell = ip
260 260
261 261 # load user data and initialize data structures
262 262 self.reload()
263 263
264 264 def fload(self):
265 265 """Load file object."""
266 266 # read data and parse into blocks
267 267 if hasattr(self, 'fobj') and self.fobj is not None:
268 268 self.fobj.close()
269 269 if hasattr(self.src, "read"):
270 270 # It seems to be a file or a file-like object
271 271 self.fobj = self.src
272 272 else:
273 273 # Assume it's a string or something that can be converted to one
274 274 self.fobj = openpy.open(self.fname)
275 275
276 276 def reload(self):
277 277 """Reload source from disk and initialize state."""
278 278 self.fload()
279 279
280 280 self.src = "".join(openpy.strip_encoding_cookie(self.fobj))
281 281 src_b = [b.strip() for b in self.re_stop.split(self.src) if b]
282 282 self._silent = [bool(self.re_silent.findall(b)) for b in src_b]
283 283 self._auto = [bool(self.re_auto.findall(b)) for b in src_b]
284 284
285 285 # if auto_all is not given (def. None), we read it from the file
286 286 if self.auto_all is None:
287 287 self.auto_all = bool(self.re_auto_all.findall(src_b[0]))
288 288 else:
289 289 self.auto_all = bool(self.auto_all)
290 290
291 291 # Clean the sources from all markup so it doesn't get displayed when
292 292 # running the demo
293 293 src_blocks = []
294 294 auto_strip = lambda s: self.re_auto.sub('',s)
295 295 for i,b in enumerate(src_b):
296 296 if self._auto[i]:
297 297 src_blocks.append(auto_strip(b))
298 298 else:
299 299 src_blocks.append(b)
300 300 # remove the auto_all marker
301 301 src_blocks[0] = self.re_auto_all.sub('',src_blocks[0])
302 302
303 303 self.nblocks = len(src_blocks)
304 304 self.src_blocks = src_blocks
305 305
306 306 # also build syntax-highlighted source
307 307 self.src_blocks_colored = map(self.ip_colorize,self.src_blocks)
308 308
309 309 # ensure clean namespace and seek offset
310 310 self.reset()
311 311
312 312 def reset(self):
313 313 """Reset the namespace and seek pointer to restart the demo"""
314 314 self.user_ns = {}
315 315 self.finished = False
316 316 self.block_index = 0
317 317
318 318 def _validate_index(self,index):
319 319 if index<0 or index>=self.nblocks:
320 320 raise ValueError('invalid block index %s' % index)
321 321
322 322 def _get_index(self,index):
323 323 """Get the current block index, validating and checking status.
324 324
325 325 Returns None if the demo is finished"""
326 326
327 327 if index is None:
328 328 if self.finished:
329 329 print('Demo finished. Use <demo_name>.reset() if you want to rerun it.', file=io.stdout)
330 330 return None
331 331 index = self.block_index
332 332 else:
333 333 self._validate_index(index)
334 334 return index
335 335
336 336 def seek(self,index):
337 337 """Move the current seek pointer to the given block.
338 338
339 339 You can use negative indices to seek from the end, with identical
340 340 semantics to those of Python lists."""
341 341 if index<0:
342 342 index = self.nblocks + index
343 343 self._validate_index(index)
344 344 self.block_index = index
345 345 self.finished = False
346 346
347 347 def back(self,num=1):
348 348 """Move the seek pointer back num blocks (default is 1)."""
349 349 self.seek(self.block_index-num)
350 350
351 351 def jump(self,num=1):
352 352 """Jump a given number of blocks relative to the current one.
353 353
354 354 The offset can be positive or negative, defaults to 1."""
355 355 self.seek(self.block_index+num)
356 356
357 357 def again(self):
358 358 """Move the seek pointer back one block and re-execute."""
359 359 self.back(1)
360 360 self()
361 361
362 362 def edit(self,index=None):
363 363 """Edit a block.
364 364
365 365 If no number is given, use the last block executed.
366 366
367 367 This edits the in-memory copy of the demo, it does NOT modify the
368 368 original source file. If you want to do that, simply open the file in
369 369 an editor and use reload() when you make changes to the file. This
370 370 method is meant to let you change a block during a demonstration for
371 371 explanatory purposes, without damaging your original script."""
372 372
373 373 index = self._get_index(index)
374 374 if index is None:
375 375 return
376 376 # decrease the index by one (unless we're at the very beginning), so
377 377 # that the default demo.edit() call opens up the sblock we've last run
378 378 if index>0:
379 379 index -= 1
380 380
381 381 filename = self.shell.mktempfile(self.src_blocks[index])
382 382 self.shell.hooks.editor(filename,1)
383 383 with open(filename, 'r') as f:
384 384 new_block = f.read()
385 385 # update the source and colored block
386 386 self.src_blocks[index] = new_block
387 387 self.src_blocks_colored[index] = self.ip_colorize(new_block)
388 388 self.block_index = index
389 389 # call to run with the newly edited index
390 390 self()
391 391
392 392 def show(self,index=None):
393 393 """Show a single block on screen"""
394 394
395 395 index = self._get_index(index)
396 396 if index is None:
397 397 return
398 398
399 399 print(self.marquee('<%s> block # %s (%s remaining)' %
400 400 (self.title,index,self.nblocks-index-1)), file=io.stdout)
401 print((self.src_blocks_colored[index]), file=io.stdout)
401 print(self.src_blocks_colored[index], file=io.stdout)
402 402 sys.stdout.flush()
403 403
404 404 def show_all(self):
405 405 """Show entire demo on screen, block by block"""
406 406
407 407 fname = self.title
408 408 title = self.title
409 409 nblocks = self.nblocks
410 410 silent = self._silent
411 411 marquee = self.marquee
412 412 for index,block in enumerate(self.src_blocks_colored):
413 413 if silent[index]:
414 414 print(marquee('<%s> SILENT block # %s (%s remaining)' %
415 415 (title,index,nblocks-index-1)), file=io.stdout)
416 416 else:
417 417 print(marquee('<%s> block # %s (%s remaining)' %
418 418 (title,index,nblocks-index-1)), file=io.stdout)
419 419 print(block, end=' ', file=io.stdout)
420 420 sys.stdout.flush()
421 421
422 422 def run_cell(self,source):
423 423 """Execute a string with one or more lines of code"""
424 424
425 425 exec(source, self.user_ns)
426 426
427 427 def __call__(self,index=None):
428 428 """run a block of the demo.
429 429
430 430 If index is given, it should be an integer >=1 and <= nblocks. This
431 431 means that the calling convention is one off from typical Python
432 432 lists. The reason for the inconsistency is that the demo always
433 433 prints 'Block n/N, and N is the total, so it would be very odd to use
434 434 zero-indexing here."""
435 435
436 436 index = self._get_index(index)
437 437 if index is None:
438 438 return
439 439 try:
440 440 marquee = self.marquee
441 441 next_block = self.src_blocks[index]
442 442 self.block_index += 1
443 443 if self._silent[index]:
444 444 print(marquee('Executing silent block # %s (%s remaining)' %
445 445 (index,self.nblocks-index-1)), file=io.stdout)
446 446 else:
447 447 self.pre_cmd()
448 448 self.show(index)
449 449 if self.auto_all or self._auto[index]:
450 450 print(marquee('output:'), file=io.stdout)
451 451 else:
452 452 print(marquee('Press <q> to quit, <Enter> to execute...'), end=' ', file=io.stdout)
453 453 ans = py3compat.input().strip()
454 454 if ans:
455 455 print(marquee('Block NOT executed'), file=io.stdout)
456 456 return
457 457 try:
458 458 save_argv = sys.argv
459 459 sys.argv = self.sys_argv
460 460 self.run_cell(next_block)
461 461 self.post_cmd()
462 462 finally:
463 463 sys.argv = save_argv
464 464
465 465 except:
466 466 self.ip_showtb(filename=self.fname)
467 467 else:
468 468 self.ip_ns.update(self.user_ns)
469 469
470 470 if self.block_index == self.nblocks:
471 471 mq1 = self.marquee('END OF DEMO')
472 472 if mq1:
473 473 # avoid spurious print >>io.stdout,s if empty marquees are used
474 474 print(file=io.stdout)
475 475 print(mq1, file=io.stdout)
476 476 print(self.marquee('Use <demo_name>.reset() if you want to rerun it.'), file=io.stdout)
477 477 self.finished = True
478 478
479 479 # These methods are meant to be overridden by subclasses who may wish to
480 480 # customize the behavior of of their demos.
481 481 def marquee(self,txt='',width=78,mark='*'):
482 482 """Return the input string centered in a 'marquee'."""
483 483 return marquee(txt,width,mark)
484 484
485 485 def pre_cmd(self):
486 486 """Method called before executing each block."""
487 487 pass
488 488
489 489 def post_cmd(self):
490 490 """Method called after executing each block."""
491 491 pass
492 492
493 493
494 494 class IPythonDemo(Demo):
495 495 """Class for interactive demos with IPython's input processing applied.
496 496
497 497 This subclasses Demo, but instead of executing each block by the Python
498 498 interpreter (via exec), it actually calls IPython on it, so that any input
499 499 filters which may be in place are applied to the input block.
500 500
501 501 If you have an interactive environment which exposes special input
502 502 processing, you can use this class instead to write demo scripts which
503 503 operate exactly as if you had typed them interactively. The default Demo
504 504 class requires the input to be valid, pure Python code.
505 505 """
506 506
507 507 def run_cell(self,source):
508 508 """Execute a string with one or more lines of code"""
509 509
510 510 self.shell.run_cell(source)
511 511
512 512 class LineDemo(Demo):
513 513 """Demo where each line is executed as a separate block.
514 514
515 515 The input script should be valid Python code.
516 516
517 517 This class doesn't require any markup at all, and it's meant for simple
518 518 scripts (with no nesting or any kind of indentation) which consist of
519 519 multiple lines of input to be executed, one at a time, as if they had been
520 520 typed in the interactive prompt.
521 521
522 522 Note: the input can not have *any* indentation, which means that only
523 523 single-lines of input are accepted, not even function definitions are
524 524 valid."""
525 525
526 526 def reload(self):
527 527 """Reload source from disk and initialize state."""
528 528 # read data and parse into blocks
529 529 self.fload()
530 530 lines = self.fobj.readlines()
531 531 src_b = [l for l in lines if l.strip()]
532 532 nblocks = len(src_b)
533 533 self.src = ''.join(lines)
534 534 self._silent = [False]*nblocks
535 535 self._auto = [True]*nblocks
536 536 self.auto_all = True
537 537 self.nblocks = nblocks
538 538 self.src_blocks = src_b
539 539
540 540 # also build syntax-highlighted source
541 541 self.src_blocks_colored = map(self.ip_colorize,self.src_blocks)
542 542
543 543 # ensure clean namespace and seek offset
544 544 self.reset()
545 545
546 546
547 547 class IPythonLineDemo(IPythonDemo,LineDemo):
548 548 """Variant of the LineDemo class whose input is processed by IPython."""
549 549 pass
550 550
551 551
552 552 class ClearMixin(object):
553 553 """Use this mixin to make Demo classes with less visual clutter.
554 554
555 555 Demos using this mixin will clear the screen before every block and use
556 556 blank marquees.
557 557
558 558 Note that in order for the methods defined here to actually override those
559 559 of the classes it's mixed with, it must go /first/ in the inheritance
560 560 tree. For example:
561 561
562 562 class ClearIPDemo(ClearMixin,IPythonDemo): pass
563 563
564 564 will provide an IPythonDemo class with the mixin's features.
565 565 """
566 566
567 567 def marquee(self,txt='',width=78,mark='*'):
568 568 """Blank marquee that returns '' no matter what the input."""
569 569 return ''
570 570
571 571 def pre_cmd(self):
572 572 """Method called before executing each block.
573 573
574 574 This one simply clears the screen."""
575 575 from IPython.utils.terminal import term_clear
576 576 term_clear()
577 577
578 578 class ClearDemo(ClearMixin,Demo):
579 579 pass
580 580
581 581
582 582 class ClearIPDemo(ClearMixin,IPythonDemo):
583 583 pass
@@ -1,112 +1,112 b''
1 1 """PostProcessor for serving reveal.js HTML slideshows."""
2 2 from __future__ import print_function
3 3 #-----------------------------------------------------------------------------
4 4 #Copyright (c) 2013, the IPython Development Team.
5 5 #
6 6 #Distributed under the terms of the Modified BSD License.
7 7 #
8 8 #The full license is in the file COPYING.txt, distributed with this software.
9 9 #-----------------------------------------------------------------------------
10 10
11 11 #-----------------------------------------------------------------------------
12 12 # Imports
13 13 #-----------------------------------------------------------------------------
14 14
15 15 import os
16 16 import webbrowser
17 17
18 18 from tornado import web, ioloop, httpserver
19 19 from tornado.httpclient import AsyncHTTPClient
20 20
21 21 from IPython.utils.traitlets import Bool, Unicode, Int
22 22
23 23 from .base import PostProcessorBase
24 24
25 25 #-----------------------------------------------------------------------------
26 26 # Classes
27 27 #-----------------------------------------------------------------------------
28 28
29 29 class ProxyHandler(web.RequestHandler):
30 30 """handler the proxies requests from a local prefix to a CDN"""
31 31 @web.asynchronous
32 32 def get(self, prefix, url):
33 33 """proxy a request to a CDN"""
34 34 proxy_url = "/".join([self.settings['cdn'], url])
35 35 client = self.settings['client']
36 36 client.fetch(proxy_url, callback=self.finish_get)
37 37
38 38 def finish_get(self, response):
39 39 """finish the request"""
40 40 # copy potentially relevant headers
41 41 for header in ["Content-Type", "Cache-Control", "Date", "Last-Modified", "Expires"]:
42 42 if header in response.headers:
43 43 self.set_header(header, response.headers[header])
44 44 self.finish(response.body)
45 45
46 46 class ServePostProcessor(PostProcessorBase):
47 47 """Post processor designed to serve files
48 48
49 49 Proxies reveal.js requests to a CDN if no local reveal.js is present
50 50 """
51 51
52 52
53 53 open_in_browser = Bool(True, config=True,
54 54 help="""Should the browser be opened automatically?"""
55 55 )
56 56 reveal_cdn = Unicode("https://cdn.jsdelivr.net/reveal.js/2.4.0", config=True,
57 57 help="""URL for reveal.js CDN."""
58 58 )
59 59 reveal_prefix = Unicode("reveal.js", config=True, help="URL prefix for reveal.js")
60 60 ip = Unicode("127.0.0.1", config=True, help="The IP address to listen on.")
61 61 port = Int(8000, config=True, help="port for the server to listen on.")
62 62
63 63 def postprocess(self, input):
64 64 """Serve the build directory with a webserver."""
65 65 dirname, filename = os.path.split(input)
66 66 handlers = [
67 67 (r"/(.+)", web.StaticFileHandler, {'path' : dirname}),
68 68 (r"/", web.RedirectHandler, {"url": "/%s" % filename})
69 69 ]
70 70
71 71 if ('://' in self.reveal_prefix or self.reveal_prefix.startswith("//")):
72 72 # reveal specifically from CDN, nothing to do
73 73 pass
74 74 elif os.path.isdir(os.path.join(dirname, self.reveal_prefix)):
75 75 # reveal prefix exists
76 76 self.log.info("Serving local %s", self.reveal_prefix)
77 77 else:
78 78 self.log.info("Redirecting %s requests to %s", self.reveal_prefix, self.reveal_cdn)
79 79 handlers.insert(0, (r"/(%s)/(.*)" % self.reveal_prefix, ProxyHandler))
80 80
81 81 app = web.Application(handlers,
82 82 cdn=self.reveal_cdn,
83 83 client=AsyncHTTPClient(),
84 84 )
85 85 # hook up tornado logging to our logger
86 86 try:
87 87 from tornado import log
88 88 log.app_log = self.log
89 89 except ImportError:
90 90 # old tornado (<= 3), ignore
91 91 pass
92 92
93 93 http_server = httpserver.HTTPServer(app)
94 94 http_server.listen(self.port, address=self.ip)
95 95 url = "http://%s:%i/%s" % (self.ip, self.port, filename)
96 print(("Serving your slides at %s" % url))
96 print("Serving your slides at %s" % url)
97 97 print("Use Control-C to stop this server")
98 98 if self.open_in_browser:
99 99 webbrowser.open(url, new=2)
100 100 try:
101 101 ioloop.IOLoop.instance().start()
102 102 except KeyboardInterrupt:
103 103 print("\nInterrupted")
104 104
105 105 def main(path):
106 106 """allow running this module to serve the slides"""
107 107 server = ServePostProcessor()
108 108 server(path)
109 109
110 110 if __name__ == '__main__':
111 111 import sys
112 112 main(sys.argv[1])
@@ -1,43 +1,43 b''
1 1 """
2 2 Contains debug writer.
3 3 """
4 4 from __future__ import print_function
5 5 #-----------------------------------------------------------------------------
6 6 #Copyright (c) 2013, the IPython Development Team.
7 7 #
8 8 #Distributed under the terms of the Modified BSD License.
9 9 #
10 10 #The full license is in the file COPYING.txt, distributed with this software.
11 11 #-----------------------------------------------------------------------------
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Imports
15 15 #-----------------------------------------------------------------------------
16 16
17 17 from .base import WriterBase
18 18 from pprint import pprint
19 19
20 20 #-----------------------------------------------------------------------------
21 21 # Classes
22 22 #-----------------------------------------------------------------------------
23 23
24 24 class DebugWriter(WriterBase):
25 25 """Consumes output from nbconvert export...() methods and writes usefull
26 26 debugging information to the stdout. The information includes a list of
27 27 resources that were extracted from the notebook(s) during export."""
28 28
29 29
30 30 def write(self, output, resources, notebook_name='notebook', **kw):
31 31 """
32 32 Consume and write Jinja output.
33 33
34 34 See base for more...
35 35 """
36 36
37 37 if isinstance(resources['outputs'], dict):
38 print(("outputs extracted from %s" % notebook_name))
39 print(('-' * 80))
38 print("outputs extracted from %s" % notebook_name)
39 print('-' * 80)
40 40 pprint(resources['outputs'], indent=2, width=70)
41 41 else:
42 print(("no outputs extracted from %s" % notebook_name))
43 print(('=' * 80))
42 print("no outputs extracted from %s" % notebook_name)
43 print('=' * 80)
@@ -1,1855 +1,1855 b''
1 1 """A semi-synchronous Client for the ZMQ cluster
2 2
3 3 Authors:
4 4
5 5 * MinRK
6 6 """
7 7 from __future__ import print_function
8 8 #-----------------------------------------------------------------------------
9 9 # Copyright (C) 2010-2011 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-----------------------------------------------------------------------------
14 14
15 15 #-----------------------------------------------------------------------------
16 16 # Imports
17 17 #-----------------------------------------------------------------------------
18 18
19 19 import os
20 20 import json
21 21 import sys
22 22 from threading import Thread, Event
23 23 import time
24 24 import warnings
25 25 from datetime import datetime
26 26 from getpass import getpass
27 27 from pprint import pprint
28 28
29 29 pjoin = os.path.join
30 30
31 31 import zmq
32 32 # from zmq.eventloop import ioloop, zmqstream
33 33
34 34 from IPython.config.configurable import MultipleInstanceError
35 35 from IPython.core.application import BaseIPythonApplication
36 36 from IPython.core.profiledir import ProfileDir, ProfileDirError
37 37
38 38 from IPython.utils.capture import RichOutput
39 39 from IPython.utils.coloransi import TermColors
40 40 from IPython.utils.jsonutil import rekey
41 41 from IPython.utils.localinterfaces import localhost, is_local_ip
42 42 from IPython.utils.path import get_ipython_dir
43 43 from IPython.utils.py3compat import cast_bytes, string_types, xrange, iteritems
44 44 from IPython.utils.traitlets import (HasTraits, Integer, Instance, Unicode,
45 45 Dict, List, Bool, Set, Any)
46 46 from IPython.external.decorator import decorator
47 47 from IPython.external.ssh import tunnel
48 48
49 49 from IPython.parallel import Reference
50 50 from IPython.parallel import error
51 51 from IPython.parallel import util
52 52
53 53 from IPython.kernel.zmq.session import Session, Message
54 54 from IPython.kernel.zmq import serialize
55 55
56 56 from .asyncresult import AsyncResult, AsyncHubResult
57 57 from .view import DirectView, LoadBalancedView
58 58
59 59 #--------------------------------------------------------------------------
60 60 # Decorators for Client methods
61 61 #--------------------------------------------------------------------------
62 62
63 63 @decorator
64 64 def spin_first(f, self, *args, **kwargs):
65 65 """Call spin() to sync state prior to calling the method."""
66 66 self.spin()
67 67 return f(self, *args, **kwargs)
68 68
69 69
70 70 #--------------------------------------------------------------------------
71 71 # Classes
72 72 #--------------------------------------------------------------------------
73 73
74 74
75 75 class ExecuteReply(RichOutput):
76 76 """wrapper for finished Execute results"""
77 77 def __init__(self, msg_id, content, metadata):
78 78 self.msg_id = msg_id
79 79 self._content = content
80 80 self.execution_count = content['execution_count']
81 81 self.metadata = metadata
82 82
83 83 # RichOutput overrides
84 84
85 85 @property
86 86 def source(self):
87 87 pyout = self.metadata['pyout']
88 88 if pyout:
89 89 return pyout.get('source', '')
90 90
91 91 @property
92 92 def data(self):
93 93 pyout = self.metadata['pyout']
94 94 if pyout:
95 95 return pyout.get('data', {})
96 96
97 97 @property
98 98 def _metadata(self):
99 99 pyout = self.metadata['pyout']
100 100 if pyout:
101 101 return pyout.get('metadata', {})
102 102
103 103 def display(self):
104 104 from IPython.display import publish_display_data
105 105 publish_display_data(self.source, self.data, self.metadata)
106 106
107 107 def _repr_mime_(self, mime):
108 108 if mime not in self.data:
109 109 return
110 110 data = self.data[mime]
111 111 if mime in self._metadata:
112 112 return data, self._metadata[mime]
113 113 else:
114 114 return data
115 115
116 116 def __getitem__(self, key):
117 117 return self.metadata[key]
118 118
119 119 def __getattr__(self, key):
120 120 if key not in self.metadata:
121 121 raise AttributeError(key)
122 122 return self.metadata[key]
123 123
124 124 def __repr__(self):
125 125 pyout = self.metadata['pyout'] or {'data':{}}
126 126 text_out = pyout['data'].get('text/plain', '')
127 127 if len(text_out) > 32:
128 128 text_out = text_out[:29] + '...'
129 129
130 130 return "<ExecuteReply[%i]: %s>" % (self.execution_count, text_out)
131 131
132 132 def _repr_pretty_(self, p, cycle):
133 133 pyout = self.metadata['pyout'] or {'data':{}}
134 134 text_out = pyout['data'].get('text/plain', '')
135 135
136 136 if not text_out:
137 137 return
138 138
139 139 try:
140 140 ip = get_ipython()
141 141 except NameError:
142 142 colors = "NoColor"
143 143 else:
144 144 colors = ip.colors
145 145
146 146 if colors == "NoColor":
147 147 out = normal = ""
148 148 else:
149 149 out = TermColors.Red
150 150 normal = TermColors.Normal
151 151
152 152 if '\n' in text_out and not text_out.startswith('\n'):
153 153 # add newline for multiline reprs
154 154 text_out = '\n' + text_out
155 155
156 156 p.text(
157 157 out + u'Out[%i:%i]: ' % (
158 158 self.metadata['engine_id'], self.execution_count
159 159 ) + normal + text_out
160 160 )
161 161
162 162
163 163 class Metadata(dict):
164 164 """Subclass of dict for initializing metadata values.
165 165
166 166 Attribute access works on keys.
167 167
168 168 These objects have a strict set of keys - errors will raise if you try
169 169 to add new keys.
170 170 """
171 171 def __init__(self, *args, **kwargs):
172 172 dict.__init__(self)
173 173 md = {'msg_id' : None,
174 174 'submitted' : None,
175 175 'started' : None,
176 176 'completed' : None,
177 177 'received' : None,
178 178 'engine_uuid' : None,
179 179 'engine_id' : None,
180 180 'follow' : None,
181 181 'after' : None,
182 182 'status' : None,
183 183
184 184 'pyin' : None,
185 185 'pyout' : None,
186 186 'pyerr' : None,
187 187 'stdout' : '',
188 188 'stderr' : '',
189 189 'outputs' : [],
190 190 'data': {},
191 191 'outputs_ready' : False,
192 192 }
193 193 self.update(md)
194 194 self.update(dict(*args, **kwargs))
195 195
196 196 def __getattr__(self, key):
197 197 """getattr aliased to getitem"""
198 198 if key in self:
199 199 return self[key]
200 200 else:
201 201 raise AttributeError(key)
202 202
203 203 def __setattr__(self, key, value):
204 204 """setattr aliased to setitem, with strict"""
205 205 if key in self:
206 206 self[key] = value
207 207 else:
208 208 raise AttributeError(key)
209 209
210 210 def __setitem__(self, key, value):
211 211 """strict static key enforcement"""
212 212 if key in self:
213 213 dict.__setitem__(self, key, value)
214 214 else:
215 215 raise KeyError(key)
216 216
217 217
218 218 class Client(HasTraits):
219 219 """A semi-synchronous client to the IPython ZMQ cluster
220 220
221 221 Parameters
222 222 ----------
223 223
224 224 url_file : str/unicode; path to ipcontroller-client.json
225 225 This JSON file should contain all the information needed to connect to a cluster,
226 226 and is likely the only argument needed.
227 227 Connection information for the Hub's registration. If a json connector
228 228 file is given, then likely no further configuration is necessary.
229 229 [Default: use profile]
230 230 profile : bytes
231 231 The name of the Cluster profile to be used to find connector information.
232 232 If run from an IPython application, the default profile will be the same
233 233 as the running application, otherwise it will be 'default'.
234 234 cluster_id : str
235 235 String id to added to runtime files, to prevent name collisions when using
236 236 multiple clusters with a single profile simultaneously.
237 237 When set, will look for files named like: 'ipcontroller-<cluster_id>-client.json'
238 238 Since this is text inserted into filenames, typical recommendations apply:
239 239 Simple character strings are ideal, and spaces are not recommended (but
240 240 should generally work)
241 241 context : zmq.Context
242 242 Pass an existing zmq.Context instance, otherwise the client will create its own.
243 243 debug : bool
244 244 flag for lots of message printing for debug purposes
245 245 timeout : int/float
246 246 time (in seconds) to wait for connection replies from the Hub
247 247 [Default: 10]
248 248
249 249 #-------------- session related args ----------------
250 250
251 251 config : Config object
252 252 If specified, this will be relayed to the Session for configuration
253 253 username : str
254 254 set username for the session object
255 255
256 256 #-------------- ssh related args ----------------
257 257 # These are args for configuring the ssh tunnel to be used
258 258 # credentials are used to forward connections over ssh to the Controller
259 259 # Note that the ip given in `addr` needs to be relative to sshserver
260 260 # The most basic case is to leave addr as pointing to localhost (127.0.0.1),
261 261 # and set sshserver as the same machine the Controller is on. However,
262 262 # the only requirement is that sshserver is able to see the Controller
263 263 # (i.e. is within the same trusted network).
264 264
265 265 sshserver : str
266 266 A string of the form passed to ssh, i.e. 'server.tld' or 'user@server.tld:port'
267 267 If keyfile or password is specified, and this is not, it will default to
268 268 the ip given in addr.
269 269 sshkey : str; path to ssh private key file
270 270 This specifies a key to be used in ssh login, default None.
271 271 Regular default ssh keys will be used without specifying this argument.
272 272 password : str
273 273 Your ssh password to sshserver. Note that if this is left None,
274 274 you will be prompted for it if passwordless key based login is unavailable.
275 275 paramiko : bool
276 276 flag for whether to use paramiko instead of shell ssh for tunneling.
277 277 [default: True on win32, False else]
278 278
279 279
280 280 Attributes
281 281 ----------
282 282
283 283 ids : list of int engine IDs
284 284 requesting the ids attribute always synchronizes
285 285 the registration state. To request ids without synchronization,
286 286 use semi-private _ids attributes.
287 287
288 288 history : list of msg_ids
289 289 a list of msg_ids, keeping track of all the execution
290 290 messages you have submitted in order.
291 291
292 292 outstanding : set of msg_ids
293 293 a set of msg_ids that have been submitted, but whose
294 294 results have not yet been received.
295 295
296 296 results : dict
297 297 a dict of all our results, keyed by msg_id
298 298
299 299 block : bool
300 300 determines default behavior when block not specified
301 301 in execution methods
302 302
303 303 Methods
304 304 -------
305 305
306 306 spin
307 307 flushes incoming results and registration state changes
308 308 control methods spin, and requesting `ids` also ensures up to date
309 309
310 310 wait
311 311 wait on one or more msg_ids
312 312
313 313 execution methods
314 314 apply
315 315 legacy: execute, run
316 316
317 317 data movement
318 318 push, pull, scatter, gather
319 319
320 320 query methods
321 321 queue_status, get_result, purge, result_status
322 322
323 323 control methods
324 324 abort, shutdown
325 325
326 326 """
327 327
328 328
329 329 block = Bool(False)
330 330 outstanding = Set()
331 331 results = Instance('collections.defaultdict', (dict,))
332 332 metadata = Instance('collections.defaultdict', (Metadata,))
333 333 history = List()
334 334 debug = Bool(False)
335 335 _spin_thread = Any()
336 336 _stop_spinning = Any()
337 337
338 338 profile=Unicode()
339 339 def _profile_default(self):
340 340 if BaseIPythonApplication.initialized():
341 341 # an IPython app *might* be running, try to get its profile
342 342 try:
343 343 return BaseIPythonApplication.instance().profile
344 344 except (AttributeError, MultipleInstanceError):
345 345 # could be a *different* subclass of config.Application,
346 346 # which would raise one of these two errors.
347 347 return u'default'
348 348 else:
349 349 return u'default'
350 350
351 351
352 352 _outstanding_dict = Instance('collections.defaultdict', (set,))
353 353 _ids = List()
354 354 _connected=Bool(False)
355 355 _ssh=Bool(False)
356 356 _context = Instance('zmq.Context')
357 357 _config = Dict()
358 358 _engines=Instance(util.ReverseDict, (), {})
359 359 # _hub_socket=Instance('zmq.Socket')
360 360 _query_socket=Instance('zmq.Socket')
361 361 _control_socket=Instance('zmq.Socket')
362 362 _iopub_socket=Instance('zmq.Socket')
363 363 _notification_socket=Instance('zmq.Socket')
364 364 _mux_socket=Instance('zmq.Socket')
365 365 _task_socket=Instance('zmq.Socket')
366 366 _task_scheme=Unicode()
367 367 _closed = False
368 368 _ignored_control_replies=Integer(0)
369 369 _ignored_hub_replies=Integer(0)
370 370
371 371 def __new__(self, *args, **kw):
372 372 # don't raise on positional args
373 373 return HasTraits.__new__(self, **kw)
374 374
375 375 def __init__(self, url_file=None, profile=None, profile_dir=None, ipython_dir=None,
376 376 context=None, debug=False,
377 377 sshserver=None, sshkey=None, password=None, paramiko=None,
378 378 timeout=10, cluster_id=None, **extra_args
379 379 ):
380 380 if profile:
381 381 super(Client, self).__init__(debug=debug, profile=profile)
382 382 else:
383 383 super(Client, self).__init__(debug=debug)
384 384 if context is None:
385 385 context = zmq.Context.instance()
386 386 self._context = context
387 387 self._stop_spinning = Event()
388 388
389 389 if 'url_or_file' in extra_args:
390 390 url_file = extra_args['url_or_file']
391 391 warnings.warn("url_or_file arg no longer supported, use url_file", DeprecationWarning)
392 392
393 393 if url_file and util.is_url(url_file):
394 394 raise ValueError("single urls cannot be specified, url-files must be used.")
395 395
396 396 self._setup_profile_dir(self.profile, profile_dir, ipython_dir)
397 397
398 398 if self._cd is not None:
399 399 if url_file is None:
400 400 if not cluster_id:
401 401 client_json = 'ipcontroller-client.json'
402 402 else:
403 403 client_json = 'ipcontroller-%s-client.json' % cluster_id
404 404 url_file = pjoin(self._cd.security_dir, client_json)
405 405 if url_file is None:
406 406 raise ValueError(
407 407 "I can't find enough information to connect to a hub!"
408 408 " Please specify at least one of url_file or profile."
409 409 )
410 410
411 411 with open(url_file) as f:
412 412 cfg = json.load(f)
413 413
414 414 self._task_scheme = cfg['task_scheme']
415 415
416 416 # sync defaults from args, json:
417 417 if sshserver:
418 418 cfg['ssh'] = sshserver
419 419
420 420 location = cfg.setdefault('location', None)
421 421
422 422 proto,addr = cfg['interface'].split('://')
423 423 addr = util.disambiguate_ip_address(addr, location)
424 424 cfg['interface'] = "%s://%s" % (proto, addr)
425 425
426 426 # turn interface,port into full urls:
427 427 for key in ('control', 'task', 'mux', 'iopub', 'notification', 'registration'):
428 428 cfg[key] = cfg['interface'] + ':%i' % cfg[key]
429 429
430 430 url = cfg['registration']
431 431
432 432 if location is not None and addr == localhost():
433 433 # location specified, and connection is expected to be local
434 434 if not is_local_ip(location) and not sshserver:
435 435 # load ssh from JSON *only* if the controller is not on
436 436 # this machine
437 437 sshserver=cfg['ssh']
438 438 if not is_local_ip(location) and not sshserver:
439 439 # warn if no ssh specified, but SSH is probably needed
440 440 # This is only a warning, because the most likely cause
441 441 # is a local Controller on a laptop whose IP is dynamic
442 442 warnings.warn("""
443 443 Controller appears to be listening on localhost, but not on this machine.
444 444 If this is true, you should specify Client(...,sshserver='you@%s')
445 445 or instruct your controller to listen on an external IP."""%location,
446 446 RuntimeWarning)
447 447 elif not sshserver:
448 448 # otherwise sync with cfg
449 449 sshserver = cfg['ssh']
450 450
451 451 self._config = cfg
452 452
453 453 self._ssh = bool(sshserver or sshkey or password)
454 454 if self._ssh and sshserver is None:
455 455 # default to ssh via localhost
456 456 sshserver = addr
457 457 if self._ssh and password is None:
458 458 if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko):
459 459 password=False
460 460 else:
461 461 password = getpass("SSH Password for %s: "%sshserver)
462 462 ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko)
463 463
464 464 # configure and construct the session
465 465 try:
466 466 extra_args['packer'] = cfg['pack']
467 467 extra_args['unpacker'] = cfg['unpack']
468 468 extra_args['key'] = cast_bytes(cfg['key'])
469 469 extra_args['signature_scheme'] = cfg['signature_scheme']
470 470 except KeyError as exc:
471 471 msg = '\n'.join([
472 472 "Connection file is invalid (missing '{}'), possibly from an old version of IPython.",
473 473 "If you are reusing connection files, remove them and start ipcontroller again."
474 474 ])
475 475 raise ValueError(msg.format(exc.message))
476 476
477 477 self.session = Session(**extra_args)
478 478
479 479 self._query_socket = self._context.socket(zmq.DEALER)
480 480
481 481 if self._ssh:
482 482 tunnel.tunnel_connection(self._query_socket, cfg['registration'], sshserver, **ssh_kwargs)
483 483 else:
484 484 self._query_socket.connect(cfg['registration'])
485 485
486 486 self.session.debug = self.debug
487 487
488 488 self._notification_handlers = {'registration_notification' : self._register_engine,
489 489 'unregistration_notification' : self._unregister_engine,
490 490 'shutdown_notification' : lambda msg: self.close(),
491 491 }
492 492 self._queue_handlers = {'execute_reply' : self._handle_execute_reply,
493 493 'apply_reply' : self._handle_apply_reply}
494 494
495 495 try:
496 496 self._connect(sshserver, ssh_kwargs, timeout)
497 497 except:
498 498 self.close(linger=0)
499 499 raise
500 500
501 501 # last step: setup magics, if we are in IPython:
502 502
503 503 try:
504 504 ip = get_ipython()
505 505 except NameError:
506 506 return
507 507 else:
508 508 if 'px' not in ip.magics_manager.magics:
509 509 # in IPython but we are the first Client.
510 510 # activate a default view for parallel magics.
511 511 self.activate()
512 512
513 513 def __del__(self):
514 514 """cleanup sockets, but _not_ context."""
515 515 self.close()
516 516
517 517 def _setup_profile_dir(self, profile, profile_dir, ipython_dir):
518 518 if ipython_dir is None:
519 519 ipython_dir = get_ipython_dir()
520 520 if profile_dir is not None:
521 521 try:
522 522 self._cd = ProfileDir.find_profile_dir(profile_dir)
523 523 return
524 524 except ProfileDirError:
525 525 pass
526 526 elif profile is not None:
527 527 try:
528 528 self._cd = ProfileDir.find_profile_dir_by_name(
529 529 ipython_dir, profile)
530 530 return
531 531 except ProfileDirError:
532 532 pass
533 533 self._cd = None
534 534
535 535 def _update_engines(self, engines):
536 536 """Update our engines dict and _ids from a dict of the form: {id:uuid}."""
537 537 for k,v in iteritems(engines):
538 538 eid = int(k)
539 539 if eid not in self._engines:
540 540 self._ids.append(eid)
541 541 self._engines[eid] = v
542 542 self._ids = sorted(self._ids)
543 543 if sorted(self._engines.keys()) != list(range(len(self._engines))) and \
544 544 self._task_scheme == 'pure' and self._task_socket:
545 545 self._stop_scheduling_tasks()
546 546
547 547 def _stop_scheduling_tasks(self):
548 548 """Stop scheduling tasks because an engine has been unregistered
549 549 from a pure ZMQ scheduler.
550 550 """
551 551 self._task_socket.close()
552 552 self._task_socket = None
553 553 msg = "An engine has been unregistered, and we are using pure " +\
554 554 "ZMQ task scheduling. Task farming will be disabled."
555 555 if self.outstanding:
556 556 msg += " If you were running tasks when this happened, " +\
557 557 "some `outstanding` msg_ids may never resolve."
558 558 warnings.warn(msg, RuntimeWarning)
559 559
560 560 def _build_targets(self, targets):
561 561 """Turn valid target IDs or 'all' into two lists:
562 562 (int_ids, uuids).
563 563 """
564 564 if not self._ids:
565 565 # flush notification socket if no engines yet, just in case
566 566 if not self.ids:
567 567 raise error.NoEnginesRegistered("Can't build targets without any engines")
568 568
569 569 if targets is None:
570 570 targets = self._ids
571 571 elif isinstance(targets, string_types):
572 572 if targets.lower() == 'all':
573 573 targets = self._ids
574 574 else:
575 575 raise TypeError("%r not valid str target, must be 'all'"%(targets))
576 576 elif isinstance(targets, int):
577 577 if targets < 0:
578 578 targets = self.ids[targets]
579 579 if targets not in self._ids:
580 580 raise IndexError("No such engine: %i"%targets)
581 581 targets = [targets]
582 582
583 583 if isinstance(targets, slice):
584 584 indices = list(range(len(self._ids))[targets])
585 585 ids = self.ids
586 586 targets = [ ids[i] for i in indices ]
587 587
588 588 if not isinstance(targets, (tuple, list, xrange)):
589 589 raise TypeError("targets by int/slice/collection of ints only, not %s"%(type(targets)))
590 590
591 591 return [cast_bytes(self._engines[t]) for t in targets], list(targets)
592 592
593 593 def _connect(self, sshserver, ssh_kwargs, timeout):
594 594 """setup all our socket connections to the cluster. This is called from
595 595 __init__."""
596 596
597 597 # Maybe allow reconnecting?
598 598 if self._connected:
599 599 return
600 600 self._connected=True
601 601
602 602 def connect_socket(s, url):
603 603 if self._ssh:
604 604 return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs)
605 605 else:
606 606 return s.connect(url)
607 607
608 608 self.session.send(self._query_socket, 'connection_request')
609 609 # use Poller because zmq.select has wrong units in pyzmq 2.1.7
610 610 poller = zmq.Poller()
611 611 poller.register(self._query_socket, zmq.POLLIN)
612 612 # poll expects milliseconds, timeout is seconds
613 613 evts = poller.poll(timeout*1000)
614 614 if not evts:
615 615 raise error.TimeoutError("Hub connection request timed out")
616 616 idents,msg = self.session.recv(self._query_socket,mode=0)
617 617 if self.debug:
618 618 pprint(msg)
619 619 content = msg['content']
620 620 # self._config['registration'] = dict(content)
621 621 cfg = self._config
622 622 if content['status'] == 'ok':
623 623 self._mux_socket = self._context.socket(zmq.DEALER)
624 624 connect_socket(self._mux_socket, cfg['mux'])
625 625
626 626 self._task_socket = self._context.socket(zmq.DEALER)
627 627 connect_socket(self._task_socket, cfg['task'])
628 628
629 629 self._notification_socket = self._context.socket(zmq.SUB)
630 630 self._notification_socket.setsockopt(zmq.SUBSCRIBE, b'')
631 631 connect_socket(self._notification_socket, cfg['notification'])
632 632
633 633 self._control_socket = self._context.socket(zmq.DEALER)
634 634 connect_socket(self._control_socket, cfg['control'])
635 635
636 636 self._iopub_socket = self._context.socket(zmq.SUB)
637 637 self._iopub_socket.setsockopt(zmq.SUBSCRIBE, b'')
638 638 connect_socket(self._iopub_socket, cfg['iopub'])
639 639
640 640 self._update_engines(dict(content['engines']))
641 641 else:
642 642 self._connected = False
643 643 raise Exception("Failed to connect!")
644 644
645 645 #--------------------------------------------------------------------------
646 646 # handlers and callbacks for incoming messages
647 647 #--------------------------------------------------------------------------
648 648
649 649 def _unwrap_exception(self, content):
650 650 """unwrap exception, and remap engine_id to int."""
651 651 e = error.unwrap_exception(content)
652 652 # print e.traceback
653 653 if e.engine_info:
654 654 e_uuid = e.engine_info['engine_uuid']
655 655 eid = self._engines[e_uuid]
656 656 e.engine_info['engine_id'] = eid
657 657 return e
658 658
659 659 def _extract_metadata(self, msg):
660 660 header = msg['header']
661 661 parent = msg['parent_header']
662 662 msg_meta = msg['metadata']
663 663 content = msg['content']
664 664 md = {'msg_id' : parent['msg_id'],
665 665 'received' : datetime.now(),
666 666 'engine_uuid' : msg_meta.get('engine', None),
667 667 'follow' : msg_meta.get('follow', []),
668 668 'after' : msg_meta.get('after', []),
669 669 'status' : content['status'],
670 670 }
671 671
672 672 if md['engine_uuid'] is not None:
673 673 md['engine_id'] = self._engines.get(md['engine_uuid'], None)
674 674
675 675 if 'date' in parent:
676 676 md['submitted'] = parent['date']
677 677 if 'started' in msg_meta:
678 678 md['started'] = msg_meta['started']
679 679 if 'date' in header:
680 680 md['completed'] = header['date']
681 681 return md
682 682
683 683 def _register_engine(self, msg):
684 684 """Register a new engine, and update our connection info."""
685 685 content = msg['content']
686 686 eid = content['id']
687 687 d = {eid : content['uuid']}
688 688 self._update_engines(d)
689 689
690 690 def _unregister_engine(self, msg):
691 691 """Unregister an engine that has died."""
692 692 content = msg['content']
693 693 eid = int(content['id'])
694 694 if eid in self._ids:
695 695 self._ids.remove(eid)
696 696 uuid = self._engines.pop(eid)
697 697
698 698 self._handle_stranded_msgs(eid, uuid)
699 699
700 700 if self._task_socket and self._task_scheme == 'pure':
701 701 self._stop_scheduling_tasks()
702 702
703 703 def _handle_stranded_msgs(self, eid, uuid):
704 704 """Handle messages known to be on an engine when the engine unregisters.
705 705
706 706 It is possible that this will fire prematurely - that is, an engine will
707 707 go down after completing a result, and the client will be notified
708 708 of the unregistration and later receive the successful result.
709 709 """
710 710
711 711 outstanding = self._outstanding_dict[uuid]
712 712
713 713 for msg_id in list(outstanding):
714 714 if msg_id in self.results:
715 715 # we already
716 716 continue
717 717 try:
718 718 raise error.EngineError("Engine %r died while running task %r"%(eid, msg_id))
719 719 except:
720 720 content = error.wrap_exception()
721 721 # build a fake message:
722 722 msg = self.session.msg('apply_reply', content=content)
723 723 msg['parent_header']['msg_id'] = msg_id
724 724 msg['metadata']['engine'] = uuid
725 725 self._handle_apply_reply(msg)
726 726
727 727 def _handle_execute_reply(self, msg):
728 728 """Save the reply to an execute_request into our results.
729 729
730 730 execute messages are never actually used. apply is used instead.
731 731 """
732 732
733 733 parent = msg['parent_header']
734 734 msg_id = parent['msg_id']
735 735 if msg_id not in self.outstanding:
736 736 if msg_id in self.history:
737 print(("got stale result: %s"%msg_id))
737 print("got stale result: %s"%msg_id)
738 738 else:
739 print(("got unknown result: %s"%msg_id))
739 print("got unknown result: %s"%msg_id)
740 740 else:
741 741 self.outstanding.remove(msg_id)
742 742
743 743 content = msg['content']
744 744 header = msg['header']
745 745
746 746 # construct metadata:
747 747 md = self.metadata[msg_id]
748 748 md.update(self._extract_metadata(msg))
749 749 # is this redundant?
750 750 self.metadata[msg_id] = md
751 751
752 752 e_outstanding = self._outstanding_dict[md['engine_uuid']]
753 753 if msg_id in e_outstanding:
754 754 e_outstanding.remove(msg_id)
755 755
756 756 # construct result:
757 757 if content['status'] == 'ok':
758 758 self.results[msg_id] = ExecuteReply(msg_id, content, md)
759 759 elif content['status'] == 'aborted':
760 760 self.results[msg_id] = error.TaskAborted(msg_id)
761 761 elif content['status'] == 'resubmitted':
762 762 # TODO: handle resubmission
763 763 pass
764 764 else:
765 765 self.results[msg_id] = self._unwrap_exception(content)
766 766
767 767 def _handle_apply_reply(self, msg):
768 768 """Save the reply to an apply_request into our results."""
769 769 parent = msg['parent_header']
770 770 msg_id = parent['msg_id']
771 771 if msg_id not in self.outstanding:
772 772 if msg_id in self.history:
773 print(("got stale result: %s"%msg_id))
773 print("got stale result: %s"%msg_id)
774 774 print(self.results[msg_id])
775 775 print(msg)
776 776 else:
777 print(("got unknown result: %s"%msg_id))
777 print("got unknown result: %s"%msg_id)
778 778 else:
779 779 self.outstanding.remove(msg_id)
780 780 content = msg['content']
781 781 header = msg['header']
782 782
783 783 # construct metadata:
784 784 md = self.metadata[msg_id]
785 785 md.update(self._extract_metadata(msg))
786 786 # is this redundant?
787 787 self.metadata[msg_id] = md
788 788
789 789 e_outstanding = self._outstanding_dict[md['engine_uuid']]
790 790 if msg_id in e_outstanding:
791 791 e_outstanding.remove(msg_id)
792 792
793 793 # construct result:
794 794 if content['status'] == 'ok':
795 795 self.results[msg_id] = serialize.unserialize_object(msg['buffers'])[0]
796 796 elif content['status'] == 'aborted':
797 797 self.results[msg_id] = error.TaskAborted(msg_id)
798 798 elif content['status'] == 'resubmitted':
799 799 # TODO: handle resubmission
800 800 pass
801 801 else:
802 802 self.results[msg_id] = self._unwrap_exception(content)
803 803
804 804 def _flush_notifications(self):
805 805 """Flush notifications of engine registrations waiting
806 806 in ZMQ queue."""
807 807 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
808 808 while msg is not None:
809 809 if self.debug:
810 810 pprint(msg)
811 811 msg_type = msg['header']['msg_type']
812 812 handler = self._notification_handlers.get(msg_type, None)
813 813 if handler is None:
814 814 raise Exception("Unhandled message type: %s" % msg_type)
815 815 else:
816 816 handler(msg)
817 817 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
818 818
819 819 def _flush_results(self, sock):
820 820 """Flush task or queue results waiting in ZMQ queue."""
821 821 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
822 822 while msg is not None:
823 823 if self.debug:
824 824 pprint(msg)
825 825 msg_type = msg['header']['msg_type']
826 826 handler = self._queue_handlers.get(msg_type, None)
827 827 if handler is None:
828 828 raise Exception("Unhandled message type: %s" % msg_type)
829 829 else:
830 830 handler(msg)
831 831 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
832 832
833 833 def _flush_control(self, sock):
834 834 """Flush replies from the control channel waiting
835 835 in the ZMQ queue.
836 836
837 837 Currently: ignore them."""
838 838 if self._ignored_control_replies <= 0:
839 839 return
840 840 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
841 841 while msg is not None:
842 842 self._ignored_control_replies -= 1
843 843 if self.debug:
844 844 pprint(msg)
845 845 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
846 846
847 847 def _flush_ignored_control(self):
848 848 """flush ignored control replies"""
849 849 while self._ignored_control_replies > 0:
850 850 self.session.recv(self._control_socket)
851 851 self._ignored_control_replies -= 1
852 852
853 853 def _flush_ignored_hub_replies(self):
854 854 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
855 855 while msg is not None:
856 856 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
857 857
858 858 def _flush_iopub(self, sock):
859 859 """Flush replies from the iopub channel waiting
860 860 in the ZMQ queue.
861 861 """
862 862 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
863 863 while msg is not None:
864 864 if self.debug:
865 865 pprint(msg)
866 866 parent = msg['parent_header']
867 867 # ignore IOPub messages with no parent.
868 868 # Caused by print statements or warnings from before the first execution.
869 869 if not parent:
870 870 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
871 871 continue
872 872 msg_id = parent['msg_id']
873 873 content = msg['content']
874 874 header = msg['header']
875 875 msg_type = msg['header']['msg_type']
876 876
877 877 # init metadata:
878 878 md = self.metadata[msg_id]
879 879
880 880 if msg_type == 'stream':
881 881 name = content['name']
882 882 s = md[name] or ''
883 883 md[name] = s + content['data']
884 884 elif msg_type == 'pyerr':
885 885 md.update({'pyerr' : self._unwrap_exception(content)})
886 886 elif msg_type == 'pyin':
887 887 md.update({'pyin' : content['code']})
888 888 elif msg_type == 'display_data':
889 889 md['outputs'].append(content)
890 890 elif msg_type == 'pyout':
891 891 md['pyout'] = content
892 892 elif msg_type == 'data_message':
893 893 data, remainder = serialize.unserialize_object(msg['buffers'])
894 894 md['data'].update(data)
895 895 elif msg_type == 'status':
896 896 # idle message comes after all outputs
897 897 if content['execution_state'] == 'idle':
898 898 md['outputs_ready'] = True
899 899 else:
900 900 # unhandled msg_type (status, etc.)
901 901 pass
902 902
903 903 # reduntant?
904 904 self.metadata[msg_id] = md
905 905
906 906 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
907 907
908 908 #--------------------------------------------------------------------------
909 909 # len, getitem
910 910 #--------------------------------------------------------------------------
911 911
912 912 def __len__(self):
913 913 """len(client) returns # of engines."""
914 914 return len(self.ids)
915 915
916 916 def __getitem__(self, key):
917 917 """index access returns DirectView multiplexer objects
918 918
919 919 Must be int, slice, or list/tuple/xrange of ints"""
920 920 if not isinstance(key, (int, slice, tuple, list, xrange)):
921 921 raise TypeError("key by int/slice/iterable of ints only, not %s"%(type(key)))
922 922 else:
923 923 return self.direct_view(key)
924 924
925 925 #--------------------------------------------------------------------------
926 926 # Begin public methods
927 927 #--------------------------------------------------------------------------
928 928
929 929 @property
930 930 def ids(self):
931 931 """Always up-to-date ids property."""
932 932 self._flush_notifications()
933 933 # always copy:
934 934 return list(self._ids)
935 935
936 936 def activate(self, targets='all', suffix=''):
937 937 """Create a DirectView and register it with IPython magics
938 938
939 939 Defines the magics `%px, %autopx, %pxresult, %%px`
940 940
941 941 Parameters
942 942 ----------
943 943
944 944 targets: int, list of ints, or 'all'
945 945 The engines on which the view's magics will run
946 946 suffix: str [default: '']
947 947 The suffix, if any, for the magics. This allows you to have
948 948 multiple views associated with parallel magics at the same time.
949 949
950 950 e.g. ``rc.activate(targets=0, suffix='0')`` will give you
951 951 the magics ``%px0``, ``%pxresult0``, etc. for running magics just
952 952 on engine 0.
953 953 """
954 954 view = self.direct_view(targets)
955 955 view.block = True
956 956 view.activate(suffix)
957 957 return view
958 958
959 959 def close(self, linger=None):
960 960 """Close my zmq Sockets
961 961
962 962 If `linger`, set the zmq LINGER socket option,
963 963 which allows discarding of messages.
964 964 """
965 965 if self._closed:
966 966 return
967 967 self.stop_spin_thread()
968 968 snames = [ trait for trait in self.trait_names() if trait.endswith("socket") ]
969 969 for name in snames:
970 970 socket = getattr(self, name)
971 971 if socket is not None and not socket.closed:
972 972 if linger is not None:
973 973 socket.close(linger=linger)
974 974 else:
975 975 socket.close()
976 976 self._closed = True
977 977
978 978 def _spin_every(self, interval=1):
979 979 """target func for use in spin_thread"""
980 980 while True:
981 981 if self._stop_spinning.is_set():
982 982 return
983 983 time.sleep(interval)
984 984 self.spin()
985 985
986 986 def spin_thread(self, interval=1):
987 987 """call Client.spin() in a background thread on some regular interval
988 988
989 989 This helps ensure that messages don't pile up too much in the zmq queue
990 990 while you are working on other things, or just leaving an idle terminal.
991 991
992 992 It also helps limit potential padding of the `received` timestamp
993 993 on AsyncResult objects, used for timings.
994 994
995 995 Parameters
996 996 ----------
997 997
998 998 interval : float, optional
999 999 The interval on which to spin the client in the background thread
1000 1000 (simply passed to time.sleep).
1001 1001
1002 1002 Notes
1003 1003 -----
1004 1004
1005 1005 For precision timing, you may want to use this method to put a bound
1006 1006 on the jitter (in seconds) in `received` timestamps used
1007 1007 in AsyncResult.wall_time.
1008 1008
1009 1009 """
1010 1010 if self._spin_thread is not None:
1011 1011 self.stop_spin_thread()
1012 1012 self._stop_spinning.clear()
1013 1013 self._spin_thread = Thread(target=self._spin_every, args=(interval,))
1014 1014 self._spin_thread.daemon = True
1015 1015 self._spin_thread.start()
1016 1016
1017 1017 def stop_spin_thread(self):
1018 1018 """stop background spin_thread, if any"""
1019 1019 if self._spin_thread is not None:
1020 1020 self._stop_spinning.set()
1021 1021 self._spin_thread.join()
1022 1022 self._spin_thread = None
1023 1023
1024 1024 def spin(self):
1025 1025 """Flush any registration notifications and execution results
1026 1026 waiting in the ZMQ queue.
1027 1027 """
1028 1028 if self._notification_socket:
1029 1029 self._flush_notifications()
1030 1030 if self._iopub_socket:
1031 1031 self._flush_iopub(self._iopub_socket)
1032 1032 if self._mux_socket:
1033 1033 self._flush_results(self._mux_socket)
1034 1034 if self._task_socket:
1035 1035 self._flush_results(self._task_socket)
1036 1036 if self._control_socket:
1037 1037 self._flush_control(self._control_socket)
1038 1038 if self._query_socket:
1039 1039 self._flush_ignored_hub_replies()
1040 1040
1041 1041 def wait(self, jobs=None, timeout=-1):
1042 1042 """waits on one or more `jobs`, for up to `timeout` seconds.
1043 1043
1044 1044 Parameters
1045 1045 ----------
1046 1046
1047 1047 jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects
1048 1048 ints are indices to self.history
1049 1049 strs are msg_ids
1050 1050 default: wait on all outstanding messages
1051 1051 timeout : float
1052 1052 a time in seconds, after which to give up.
1053 1053 default is -1, which means no timeout
1054 1054
1055 1055 Returns
1056 1056 -------
1057 1057
1058 1058 True : when all msg_ids are done
1059 1059 False : timeout reached, some msg_ids still outstanding
1060 1060 """
1061 1061 tic = time.time()
1062 1062 if jobs is None:
1063 1063 theids = self.outstanding
1064 1064 else:
1065 1065 if isinstance(jobs, string_types + (int, AsyncResult)):
1066 1066 jobs = [jobs]
1067 1067 theids = set()
1068 1068 for job in jobs:
1069 1069 if isinstance(job, int):
1070 1070 # index access
1071 1071 job = self.history[job]
1072 1072 elif isinstance(job, AsyncResult):
1073 1073 theids.update(job.msg_ids)
1074 1074 continue
1075 1075 theids.add(job)
1076 1076 if not theids.intersection(self.outstanding):
1077 1077 return True
1078 1078 self.spin()
1079 1079 while theids.intersection(self.outstanding):
1080 1080 if timeout >= 0 and ( time.time()-tic ) > timeout:
1081 1081 break
1082 1082 time.sleep(1e-3)
1083 1083 self.spin()
1084 1084 return len(theids.intersection(self.outstanding)) == 0
1085 1085
1086 1086 #--------------------------------------------------------------------------
1087 1087 # Control methods
1088 1088 #--------------------------------------------------------------------------
1089 1089
1090 1090 @spin_first
1091 1091 def clear(self, targets=None, block=None):
1092 1092 """Clear the namespace in target(s)."""
1093 1093 block = self.block if block is None else block
1094 1094 targets = self._build_targets(targets)[0]
1095 1095 for t in targets:
1096 1096 self.session.send(self._control_socket, 'clear_request', content={}, ident=t)
1097 1097 error = False
1098 1098 if block:
1099 1099 self._flush_ignored_control()
1100 1100 for i in range(len(targets)):
1101 1101 idents,msg = self.session.recv(self._control_socket,0)
1102 1102 if self.debug:
1103 1103 pprint(msg)
1104 1104 if msg['content']['status'] != 'ok':
1105 1105 error = self._unwrap_exception(msg['content'])
1106 1106 else:
1107 1107 self._ignored_control_replies += len(targets)
1108 1108 if error:
1109 1109 raise error
1110 1110
1111 1111
1112 1112 @spin_first
1113 1113 def abort(self, jobs=None, targets=None, block=None):
1114 1114 """Abort specific jobs from the execution queues of target(s).
1115 1115
1116 1116 This is a mechanism to prevent jobs that have already been submitted
1117 1117 from executing.
1118 1118
1119 1119 Parameters
1120 1120 ----------
1121 1121
1122 1122 jobs : msg_id, list of msg_ids, or AsyncResult
1123 1123 The jobs to be aborted
1124 1124
1125 1125 If unspecified/None: abort all outstanding jobs.
1126 1126
1127 1127 """
1128 1128 block = self.block if block is None else block
1129 1129 jobs = jobs if jobs is not None else list(self.outstanding)
1130 1130 targets = self._build_targets(targets)[0]
1131 1131
1132 1132 msg_ids = []
1133 1133 if isinstance(jobs, string_types + (AsyncResult,)):
1134 1134 jobs = [jobs]
1135 1135 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1136 1136 if bad_ids:
1137 1137 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1138 1138 for j in jobs:
1139 1139 if isinstance(j, AsyncResult):
1140 1140 msg_ids.extend(j.msg_ids)
1141 1141 else:
1142 1142 msg_ids.append(j)
1143 1143 content = dict(msg_ids=msg_ids)
1144 1144 for t in targets:
1145 1145 self.session.send(self._control_socket, 'abort_request',
1146 1146 content=content, ident=t)
1147 1147 error = False
1148 1148 if block:
1149 1149 self._flush_ignored_control()
1150 1150 for i in range(len(targets)):
1151 1151 idents,msg = self.session.recv(self._control_socket,0)
1152 1152 if self.debug:
1153 1153 pprint(msg)
1154 1154 if msg['content']['status'] != 'ok':
1155 1155 error = self._unwrap_exception(msg['content'])
1156 1156 else:
1157 1157 self._ignored_control_replies += len(targets)
1158 1158 if error:
1159 1159 raise error
1160 1160
1161 1161 @spin_first
1162 1162 def shutdown(self, targets='all', restart=False, hub=False, block=None):
1163 1163 """Terminates one or more engine processes, optionally including the hub.
1164 1164
1165 1165 Parameters
1166 1166 ----------
1167 1167
1168 1168 targets: list of ints or 'all' [default: all]
1169 1169 Which engines to shutdown.
1170 1170 hub: bool [default: False]
1171 1171 Whether to include the Hub. hub=True implies targets='all'.
1172 1172 block: bool [default: self.block]
1173 1173 Whether to wait for clean shutdown replies or not.
1174 1174 restart: bool [default: False]
1175 1175 NOT IMPLEMENTED
1176 1176 whether to restart engines after shutting them down.
1177 1177 """
1178 1178 from IPython.parallel.error import NoEnginesRegistered
1179 1179 if restart:
1180 1180 raise NotImplementedError("Engine restart is not yet implemented")
1181 1181
1182 1182 block = self.block if block is None else block
1183 1183 if hub:
1184 1184 targets = 'all'
1185 1185 try:
1186 1186 targets = self._build_targets(targets)[0]
1187 1187 except NoEnginesRegistered:
1188 1188 targets = []
1189 1189 for t in targets:
1190 1190 self.session.send(self._control_socket, 'shutdown_request',
1191 1191 content={'restart':restart},ident=t)
1192 1192 error = False
1193 1193 if block or hub:
1194 1194 self._flush_ignored_control()
1195 1195 for i in range(len(targets)):
1196 1196 idents,msg = self.session.recv(self._control_socket, 0)
1197 1197 if self.debug:
1198 1198 pprint(msg)
1199 1199 if msg['content']['status'] != 'ok':
1200 1200 error = self._unwrap_exception(msg['content'])
1201 1201 else:
1202 1202 self._ignored_control_replies += len(targets)
1203 1203
1204 1204 if hub:
1205 1205 time.sleep(0.25)
1206 1206 self.session.send(self._query_socket, 'shutdown_request')
1207 1207 idents,msg = self.session.recv(self._query_socket, 0)
1208 1208 if self.debug:
1209 1209 pprint(msg)
1210 1210 if msg['content']['status'] != 'ok':
1211 1211 error = self._unwrap_exception(msg['content'])
1212 1212
1213 1213 if error:
1214 1214 raise error
1215 1215
1216 1216 #--------------------------------------------------------------------------
1217 1217 # Execution related methods
1218 1218 #--------------------------------------------------------------------------
1219 1219
1220 1220 def _maybe_raise(self, result):
1221 1221 """wrapper for maybe raising an exception if apply failed."""
1222 1222 if isinstance(result, error.RemoteError):
1223 1223 raise result
1224 1224
1225 1225 return result
1226 1226
1227 1227 def send_apply_request(self, socket, f, args=None, kwargs=None, metadata=None, track=False,
1228 1228 ident=None):
1229 1229 """construct and send an apply message via a socket.
1230 1230
1231 1231 This is the principal method with which all engine execution is performed by views.
1232 1232 """
1233 1233
1234 1234 if self._closed:
1235 1235 raise RuntimeError("Client cannot be used after its sockets have been closed")
1236 1236
1237 1237 # defaults:
1238 1238 args = args if args is not None else []
1239 1239 kwargs = kwargs if kwargs is not None else {}
1240 1240 metadata = metadata if metadata is not None else {}
1241 1241
1242 1242 # validate arguments
1243 1243 if not callable(f) and not isinstance(f, Reference):
1244 1244 raise TypeError("f must be callable, not %s"%type(f))
1245 1245 if not isinstance(args, (tuple, list)):
1246 1246 raise TypeError("args must be tuple or list, not %s"%type(args))
1247 1247 if not isinstance(kwargs, dict):
1248 1248 raise TypeError("kwargs must be dict, not %s"%type(kwargs))
1249 1249 if not isinstance(metadata, dict):
1250 1250 raise TypeError("metadata must be dict, not %s"%type(metadata))
1251 1251
1252 1252 bufs = serialize.pack_apply_message(f, args, kwargs,
1253 1253 buffer_threshold=self.session.buffer_threshold,
1254 1254 item_threshold=self.session.item_threshold,
1255 1255 )
1256 1256
1257 1257 msg = self.session.send(socket, "apply_request", buffers=bufs, ident=ident,
1258 1258 metadata=metadata, track=track)
1259 1259
1260 1260 msg_id = msg['header']['msg_id']
1261 1261 self.outstanding.add(msg_id)
1262 1262 if ident:
1263 1263 # possibly routed to a specific engine
1264 1264 if isinstance(ident, list):
1265 1265 ident = ident[-1]
1266 1266 if ident in self._engines.values():
1267 1267 # save for later, in case of engine death
1268 1268 self._outstanding_dict[ident].add(msg_id)
1269 1269 self.history.append(msg_id)
1270 1270 self.metadata[msg_id]['submitted'] = datetime.now()
1271 1271
1272 1272 return msg
1273 1273
1274 1274 def send_execute_request(self, socket, code, silent=True, metadata=None, ident=None):
1275 1275 """construct and send an execute request via a socket.
1276 1276
1277 1277 """
1278 1278
1279 1279 if self._closed:
1280 1280 raise RuntimeError("Client cannot be used after its sockets have been closed")
1281 1281
1282 1282 # defaults:
1283 1283 metadata = metadata if metadata is not None else {}
1284 1284
1285 1285 # validate arguments
1286 1286 if not isinstance(code, string_types):
1287 1287 raise TypeError("code must be text, not %s" % type(code))
1288 1288 if not isinstance(metadata, dict):
1289 1289 raise TypeError("metadata must be dict, not %s" % type(metadata))
1290 1290
1291 1291 content = dict(code=code, silent=bool(silent), user_variables=[], user_expressions={})
1292 1292
1293 1293
1294 1294 msg = self.session.send(socket, "execute_request", content=content, ident=ident,
1295 1295 metadata=metadata)
1296 1296
1297 1297 msg_id = msg['header']['msg_id']
1298 1298 self.outstanding.add(msg_id)
1299 1299 if ident:
1300 1300 # possibly routed to a specific engine
1301 1301 if isinstance(ident, list):
1302 1302 ident = ident[-1]
1303 1303 if ident in self._engines.values():
1304 1304 # save for later, in case of engine death
1305 1305 self._outstanding_dict[ident].add(msg_id)
1306 1306 self.history.append(msg_id)
1307 1307 self.metadata[msg_id]['submitted'] = datetime.now()
1308 1308
1309 1309 return msg
1310 1310
1311 1311 #--------------------------------------------------------------------------
1312 1312 # construct a View object
1313 1313 #--------------------------------------------------------------------------
1314 1314
1315 1315 def load_balanced_view(self, targets=None):
1316 1316 """construct a DirectView object.
1317 1317
1318 1318 If no arguments are specified, create a LoadBalancedView
1319 1319 using all engines.
1320 1320
1321 1321 Parameters
1322 1322 ----------
1323 1323
1324 1324 targets: list,slice,int,etc. [default: use all engines]
1325 1325 The subset of engines across which to load-balance
1326 1326 """
1327 1327 if targets == 'all':
1328 1328 targets = None
1329 1329 if targets is not None:
1330 1330 targets = self._build_targets(targets)[1]
1331 1331 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
1332 1332
1333 1333 def direct_view(self, targets='all'):
1334 1334 """construct a DirectView object.
1335 1335
1336 1336 If no targets are specified, create a DirectView using all engines.
1337 1337
1338 1338 rc.direct_view('all') is distinguished from rc[:] in that 'all' will
1339 1339 evaluate the target engines at each execution, whereas rc[:] will connect to
1340 1340 all *current* engines, and that list will not change.
1341 1341
1342 1342 That is, 'all' will always use all engines, whereas rc[:] will not use
1343 1343 engines added after the DirectView is constructed.
1344 1344
1345 1345 Parameters
1346 1346 ----------
1347 1347
1348 1348 targets: list,slice,int,etc. [default: use all engines]
1349 1349 The engines to use for the View
1350 1350 """
1351 1351 single = isinstance(targets, int)
1352 1352 # allow 'all' to be lazily evaluated at each execution
1353 1353 if targets != 'all':
1354 1354 targets = self._build_targets(targets)[1]
1355 1355 if single:
1356 1356 targets = targets[0]
1357 1357 return DirectView(client=self, socket=self._mux_socket, targets=targets)
1358 1358
1359 1359 #--------------------------------------------------------------------------
1360 1360 # Query methods
1361 1361 #--------------------------------------------------------------------------
1362 1362
1363 1363 @spin_first
1364 1364 def get_result(self, indices_or_msg_ids=None, block=None):
1365 1365 """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object.
1366 1366
1367 1367 If the client already has the results, no request to the Hub will be made.
1368 1368
1369 1369 This is a convenient way to construct AsyncResult objects, which are wrappers
1370 1370 that include metadata about execution, and allow for awaiting results that
1371 1371 were not submitted by this Client.
1372 1372
1373 1373 It can also be a convenient way to retrieve the metadata associated with
1374 1374 blocking execution, since it always retrieves
1375 1375
1376 1376 Examples
1377 1377 --------
1378 1378 ::
1379 1379
1380 1380 In [10]: r = client.apply()
1381 1381
1382 1382 Parameters
1383 1383 ----------
1384 1384
1385 1385 indices_or_msg_ids : integer history index, str msg_id, or list of either
1386 1386 The indices or msg_ids of indices to be retrieved
1387 1387
1388 1388 block : bool
1389 1389 Whether to wait for the result to be done
1390 1390
1391 1391 Returns
1392 1392 -------
1393 1393
1394 1394 AsyncResult
1395 1395 A single AsyncResult object will always be returned.
1396 1396
1397 1397 AsyncHubResult
1398 1398 A subclass of AsyncResult that retrieves results from the Hub
1399 1399
1400 1400 """
1401 1401 block = self.block if block is None else block
1402 1402 if indices_or_msg_ids is None:
1403 1403 indices_or_msg_ids = -1
1404 1404
1405 1405 single_result = False
1406 1406 if not isinstance(indices_or_msg_ids, (list,tuple)):
1407 1407 indices_or_msg_ids = [indices_or_msg_ids]
1408 1408 single_result = True
1409 1409
1410 1410 theids = []
1411 1411 for id in indices_or_msg_ids:
1412 1412 if isinstance(id, int):
1413 1413 id = self.history[id]
1414 1414 if not isinstance(id, string_types):
1415 1415 raise TypeError("indices must be str or int, not %r"%id)
1416 1416 theids.append(id)
1417 1417
1418 1418 local_ids = [msg_id for msg_id in theids if (msg_id in self.outstanding or msg_id in self.results)]
1419 1419 remote_ids = [msg_id for msg_id in theids if msg_id not in local_ids]
1420 1420
1421 1421 # given single msg_id initially, get_result shot get the result itself,
1422 1422 # not a length-one list
1423 1423 if single_result:
1424 1424 theids = theids[0]
1425 1425
1426 1426 if remote_ids:
1427 1427 ar = AsyncHubResult(self, msg_ids=theids)
1428 1428 else:
1429 1429 ar = AsyncResult(self, msg_ids=theids)
1430 1430
1431 1431 if block:
1432 1432 ar.wait()
1433 1433
1434 1434 return ar
1435 1435
1436 1436 @spin_first
1437 1437 def resubmit(self, indices_or_msg_ids=None, metadata=None, block=None):
1438 1438 """Resubmit one or more tasks.
1439 1439
1440 1440 in-flight tasks may not be resubmitted.
1441 1441
1442 1442 Parameters
1443 1443 ----------
1444 1444
1445 1445 indices_or_msg_ids : integer history index, str msg_id, or list of either
1446 1446 The indices or msg_ids of indices to be retrieved
1447 1447
1448 1448 block : bool
1449 1449 Whether to wait for the result to be done
1450 1450
1451 1451 Returns
1452 1452 -------
1453 1453
1454 1454 AsyncHubResult
1455 1455 A subclass of AsyncResult that retrieves results from the Hub
1456 1456
1457 1457 """
1458 1458 block = self.block if block is None else block
1459 1459 if indices_or_msg_ids is None:
1460 1460 indices_or_msg_ids = -1
1461 1461
1462 1462 if not isinstance(indices_or_msg_ids, (list,tuple)):
1463 1463 indices_or_msg_ids = [indices_or_msg_ids]
1464 1464
1465 1465 theids = []
1466 1466 for id in indices_or_msg_ids:
1467 1467 if isinstance(id, int):
1468 1468 id = self.history[id]
1469 1469 if not isinstance(id, string_types):
1470 1470 raise TypeError("indices must be str or int, not %r"%id)
1471 1471 theids.append(id)
1472 1472
1473 1473 content = dict(msg_ids = theids)
1474 1474
1475 1475 self.session.send(self._query_socket, 'resubmit_request', content)
1476 1476
1477 1477 zmq.select([self._query_socket], [], [])
1478 1478 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1479 1479 if self.debug:
1480 1480 pprint(msg)
1481 1481 content = msg['content']
1482 1482 if content['status'] != 'ok':
1483 1483 raise self._unwrap_exception(content)
1484 1484 mapping = content['resubmitted']
1485 1485 new_ids = [ mapping[msg_id] for msg_id in theids ]
1486 1486
1487 1487 ar = AsyncHubResult(self, msg_ids=new_ids)
1488 1488
1489 1489 if block:
1490 1490 ar.wait()
1491 1491
1492 1492 return ar
1493 1493
1494 1494 @spin_first
1495 1495 def result_status(self, msg_ids, status_only=True):
1496 1496 """Check on the status of the result(s) of the apply request with `msg_ids`.
1497 1497
1498 1498 If status_only is False, then the actual results will be retrieved, else
1499 1499 only the status of the results will be checked.
1500 1500
1501 1501 Parameters
1502 1502 ----------
1503 1503
1504 1504 msg_ids : list of msg_ids
1505 1505 if int:
1506 1506 Passed as index to self.history for convenience.
1507 1507 status_only : bool (default: True)
1508 1508 if False:
1509 1509 Retrieve the actual results of completed tasks.
1510 1510
1511 1511 Returns
1512 1512 -------
1513 1513
1514 1514 results : dict
1515 1515 There will always be the keys 'pending' and 'completed', which will
1516 1516 be lists of msg_ids that are incomplete or complete. If `status_only`
1517 1517 is False, then completed results will be keyed by their `msg_id`.
1518 1518 """
1519 1519 if not isinstance(msg_ids, (list,tuple)):
1520 1520 msg_ids = [msg_ids]
1521 1521
1522 1522 theids = []
1523 1523 for msg_id in msg_ids:
1524 1524 if isinstance(msg_id, int):
1525 1525 msg_id = self.history[msg_id]
1526 1526 if not isinstance(msg_id, string_types):
1527 1527 raise TypeError("msg_ids must be str, not %r"%msg_id)
1528 1528 theids.append(msg_id)
1529 1529
1530 1530 completed = []
1531 1531 local_results = {}
1532 1532
1533 1533 # comment this block out to temporarily disable local shortcut:
1534 1534 for msg_id in theids:
1535 1535 if msg_id in self.results:
1536 1536 completed.append(msg_id)
1537 1537 local_results[msg_id] = self.results[msg_id]
1538 1538 theids.remove(msg_id)
1539 1539
1540 1540 if theids: # some not locally cached
1541 1541 content = dict(msg_ids=theids, status_only=status_only)
1542 1542 msg = self.session.send(self._query_socket, "result_request", content=content)
1543 1543 zmq.select([self._query_socket], [], [])
1544 1544 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1545 1545 if self.debug:
1546 1546 pprint(msg)
1547 1547 content = msg['content']
1548 1548 if content['status'] != 'ok':
1549 1549 raise self._unwrap_exception(content)
1550 1550 buffers = msg['buffers']
1551 1551 else:
1552 1552 content = dict(completed=[],pending=[])
1553 1553
1554 1554 content['completed'].extend(completed)
1555 1555
1556 1556 if status_only:
1557 1557 return content
1558 1558
1559 1559 failures = []
1560 1560 # load cached results into result:
1561 1561 content.update(local_results)
1562 1562
1563 1563 # update cache with results:
1564 1564 for msg_id in sorted(theids):
1565 1565 if msg_id in content['completed']:
1566 1566 rec = content[msg_id]
1567 1567 parent = rec['header']
1568 1568 header = rec['result_header']
1569 1569 rcontent = rec['result_content']
1570 1570 iodict = rec['io']
1571 1571 if isinstance(rcontent, str):
1572 1572 rcontent = self.session.unpack(rcontent)
1573 1573
1574 1574 md = self.metadata[msg_id]
1575 1575 md_msg = dict(
1576 1576 content=rcontent,
1577 1577 parent_header=parent,
1578 1578 header=header,
1579 1579 metadata=rec['result_metadata'],
1580 1580 )
1581 1581 md.update(self._extract_metadata(md_msg))
1582 1582 if rec.get('received'):
1583 1583 md['received'] = rec['received']
1584 1584 md.update(iodict)
1585 1585
1586 1586 if rcontent['status'] == 'ok':
1587 1587 if header['msg_type'] == 'apply_reply':
1588 1588 res,buffers = serialize.unserialize_object(buffers)
1589 1589 elif header['msg_type'] == 'execute_reply':
1590 1590 res = ExecuteReply(msg_id, rcontent, md)
1591 1591 else:
1592 1592 raise KeyError("unhandled msg type: %r" % header['msg_type'])
1593 1593 else:
1594 1594 res = self._unwrap_exception(rcontent)
1595 1595 failures.append(res)
1596 1596
1597 1597 self.results[msg_id] = res
1598 1598 content[msg_id] = res
1599 1599
1600 1600 if len(theids) == 1 and failures:
1601 1601 raise failures[0]
1602 1602
1603 1603 error.collect_exceptions(failures, "result_status")
1604 1604 return content
1605 1605
1606 1606 @spin_first
1607 1607 def queue_status(self, targets='all', verbose=False):
1608 1608 """Fetch the status of engine queues.
1609 1609
1610 1610 Parameters
1611 1611 ----------
1612 1612
1613 1613 targets : int/str/list of ints/strs
1614 1614 the engines whose states are to be queried.
1615 1615 default : all
1616 1616 verbose : bool
1617 1617 Whether to return lengths only, or lists of ids for each element
1618 1618 """
1619 1619 if targets == 'all':
1620 1620 # allow 'all' to be evaluated on the engine
1621 1621 engine_ids = None
1622 1622 else:
1623 1623 engine_ids = self._build_targets(targets)[1]
1624 1624 content = dict(targets=engine_ids, verbose=verbose)
1625 1625 self.session.send(self._query_socket, "queue_request", content=content)
1626 1626 idents,msg = self.session.recv(self._query_socket, 0)
1627 1627 if self.debug:
1628 1628 pprint(msg)
1629 1629 content = msg['content']
1630 1630 status = content.pop('status')
1631 1631 if status != 'ok':
1632 1632 raise self._unwrap_exception(content)
1633 1633 content = rekey(content)
1634 1634 if isinstance(targets, int):
1635 1635 return content[targets]
1636 1636 else:
1637 1637 return content
1638 1638
1639 1639 def _build_msgids_from_target(self, targets=None):
1640 1640 """Build a list of msg_ids from the list of engine targets"""
1641 1641 if not targets: # needed as _build_targets otherwise uses all engines
1642 1642 return []
1643 1643 target_ids = self._build_targets(targets)[0]
1644 1644 return [md_id for md_id in self.metadata if self.metadata[md_id]["engine_uuid"] in target_ids]
1645 1645
1646 1646 def _build_msgids_from_jobs(self, jobs=None):
1647 1647 """Build a list of msg_ids from "jobs" """
1648 1648 if not jobs:
1649 1649 return []
1650 1650 msg_ids = []
1651 1651 if isinstance(jobs, string_types + (AsyncResult,)):
1652 1652 jobs = [jobs]
1653 1653 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1654 1654 if bad_ids:
1655 1655 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1656 1656 for j in jobs:
1657 1657 if isinstance(j, AsyncResult):
1658 1658 msg_ids.extend(j.msg_ids)
1659 1659 else:
1660 1660 msg_ids.append(j)
1661 1661 return msg_ids
1662 1662
1663 1663 def purge_local_results(self, jobs=[], targets=[]):
1664 1664 """Clears the client caches of results and frees such memory.
1665 1665
1666 1666 Individual results can be purged by msg_id, or the entire
1667 1667 history of specific targets can be purged.
1668 1668
1669 1669 Use `purge_local_results('all')` to scrub everything from the Clients's db.
1670 1670
1671 1671 The client must have no outstanding tasks before purging the caches.
1672 1672 Raises `AssertionError` if there are still outstanding tasks.
1673 1673
1674 1674 After this call all `AsyncResults` are invalid and should be discarded.
1675 1675
1676 1676 If you must "reget" the results, you can still do so by using
1677 1677 `client.get_result(msg_id)` or `client.get_result(asyncresult)`. This will
1678 1678 redownload the results from the hub if they are still available
1679 1679 (i.e `client.purge_hub_results(...)` has not been called.
1680 1680
1681 1681 Parameters
1682 1682 ----------
1683 1683
1684 1684 jobs : str or list of str or AsyncResult objects
1685 1685 the msg_ids whose results should be purged.
1686 1686 targets : int/str/list of ints/strs
1687 1687 The targets, by int_id, whose entire results are to be purged.
1688 1688
1689 1689 default : None
1690 1690 """
1691 1691 assert not self.outstanding, "Can't purge a client with outstanding tasks!"
1692 1692
1693 1693 if not targets and not jobs:
1694 1694 raise ValueError("Must specify at least one of `targets` and `jobs`")
1695 1695
1696 1696 if jobs == 'all':
1697 1697 self.results.clear()
1698 1698 self.metadata.clear()
1699 1699 return
1700 1700 else:
1701 1701 msg_ids = []
1702 1702 msg_ids.extend(self._build_msgids_from_target(targets))
1703 1703 msg_ids.extend(self._build_msgids_from_jobs(jobs))
1704 1704 for mid in msg_ids:
1705 1705 self.results.pop(mid)
1706 1706 self.metadata.pop(mid)
1707 1707
1708 1708
1709 1709 @spin_first
1710 1710 def purge_hub_results(self, jobs=[], targets=[]):
1711 1711 """Tell the Hub to forget results.
1712 1712
1713 1713 Individual results can be purged by msg_id, or the entire
1714 1714 history of specific targets can be purged.
1715 1715
1716 1716 Use `purge_results('all')` to scrub everything from the Hub's db.
1717 1717
1718 1718 Parameters
1719 1719 ----------
1720 1720
1721 1721 jobs : str or list of str or AsyncResult objects
1722 1722 the msg_ids whose results should be forgotten.
1723 1723 targets : int/str/list of ints/strs
1724 1724 The targets, by int_id, whose entire history is to be purged.
1725 1725
1726 1726 default : None
1727 1727 """
1728 1728 if not targets and not jobs:
1729 1729 raise ValueError("Must specify at least one of `targets` and `jobs`")
1730 1730 if targets:
1731 1731 targets = self._build_targets(targets)[1]
1732 1732
1733 1733 # construct msg_ids from jobs
1734 1734 if jobs == 'all':
1735 1735 msg_ids = jobs
1736 1736 else:
1737 1737 msg_ids = self._build_msgids_from_jobs(jobs)
1738 1738
1739 1739 content = dict(engine_ids=targets, msg_ids=msg_ids)
1740 1740 self.session.send(self._query_socket, "purge_request", content=content)
1741 1741 idents, msg = self.session.recv(self._query_socket, 0)
1742 1742 if self.debug:
1743 1743 pprint(msg)
1744 1744 content = msg['content']
1745 1745 if content['status'] != 'ok':
1746 1746 raise self._unwrap_exception(content)
1747 1747
1748 1748 def purge_results(self, jobs=[], targets=[]):
1749 1749 """Clears the cached results from both the hub and the local client
1750 1750
1751 1751 Individual results can be purged by msg_id, or the entire
1752 1752 history of specific targets can be purged.
1753 1753
1754 1754 Use `purge_results('all')` to scrub every cached result from both the Hub's and
1755 1755 the Client's db.
1756 1756
1757 1757 Equivalent to calling both `purge_hub_results()` and `purge_client_results()` with
1758 1758 the same arguments.
1759 1759
1760 1760 Parameters
1761 1761 ----------
1762 1762
1763 1763 jobs : str or list of str or AsyncResult objects
1764 1764 the msg_ids whose results should be forgotten.
1765 1765 targets : int/str/list of ints/strs
1766 1766 The targets, by int_id, whose entire history is to be purged.
1767 1767
1768 1768 default : None
1769 1769 """
1770 1770 self.purge_local_results(jobs=jobs, targets=targets)
1771 1771 self.purge_hub_results(jobs=jobs, targets=targets)
1772 1772
1773 1773 def purge_everything(self):
1774 1774 """Clears all content from previous Tasks from both the hub and the local client
1775 1775
1776 1776 In addition to calling `purge_results("all")` it also deletes the history and
1777 1777 other bookkeeping lists.
1778 1778 """
1779 1779 self.purge_results("all")
1780 1780 self.history = []
1781 1781 self.session.digest_history.clear()
1782 1782
1783 1783 @spin_first
1784 1784 def hub_history(self):
1785 1785 """Get the Hub's history
1786 1786
1787 1787 Just like the Client, the Hub has a history, which is a list of msg_ids.
1788 1788 This will contain the history of all clients, and, depending on configuration,
1789 1789 may contain history across multiple cluster sessions.
1790 1790
1791 1791 Any msg_id returned here is a valid argument to `get_result`.
1792 1792
1793 1793 Returns
1794 1794 -------
1795 1795
1796 1796 msg_ids : list of strs
1797 1797 list of all msg_ids, ordered by task submission time.
1798 1798 """
1799 1799
1800 1800 self.session.send(self._query_socket, "history_request", content={})
1801 1801 idents, msg = self.session.recv(self._query_socket, 0)
1802 1802
1803 1803 if self.debug:
1804 1804 pprint(msg)
1805 1805 content = msg['content']
1806 1806 if content['status'] != 'ok':
1807 1807 raise self._unwrap_exception(content)
1808 1808 else:
1809 1809 return content['history']
1810 1810
1811 1811 @spin_first
1812 1812 def db_query(self, query, keys=None):
1813 1813 """Query the Hub's TaskRecord database
1814 1814
1815 1815 This will return a list of task record dicts that match `query`
1816 1816
1817 1817 Parameters
1818 1818 ----------
1819 1819
1820 1820 query : mongodb query dict
1821 1821 The search dict. See mongodb query docs for details.
1822 1822 keys : list of strs [optional]
1823 1823 The subset of keys to be returned. The default is to fetch everything but buffers.
1824 1824 'msg_id' will *always* be included.
1825 1825 """
1826 1826 if isinstance(keys, string_types):
1827 1827 keys = [keys]
1828 1828 content = dict(query=query, keys=keys)
1829 1829 self.session.send(self._query_socket, "db_request", content=content)
1830 1830 idents, msg = self.session.recv(self._query_socket, 0)
1831 1831 if self.debug:
1832 1832 pprint(msg)
1833 1833 content = msg['content']
1834 1834 if content['status'] != 'ok':
1835 1835 raise self._unwrap_exception(content)
1836 1836
1837 1837 records = content['records']
1838 1838
1839 1839 buffer_lens = content['buffer_lens']
1840 1840 result_buffer_lens = content['result_buffer_lens']
1841 1841 buffers = msg['buffers']
1842 1842 has_bufs = buffer_lens is not None
1843 1843 has_rbufs = result_buffer_lens is not None
1844 1844 for i,rec in enumerate(records):
1845 1845 # relink buffers
1846 1846 if has_bufs:
1847 1847 blen = buffer_lens[i]
1848 1848 rec['buffers'], buffers = buffers[:blen],buffers[blen:]
1849 1849 if has_rbufs:
1850 1850 blen = result_buffer_lens[i]
1851 1851 rec['result_buffers'], buffers = buffers[:blen],buffers[blen:]
1852 1852
1853 1853 return records
1854 1854
1855 1855 __all__ = [ 'Client' ]
General Comments 0
You need to be logged in to leave comments. Login now