##// END OF EJS Templates
Merge pull request #1768 from minrk/parallelmagics...
Fernando Perez -
r7060:a1360828 merge
parent child Browse files
Show More
@@ -0,0 +1,342 b''
1 # -*- coding: utf-8 -*-
2 """Test Parallel magics
3
4 Authors:
5
6 * Min RK
7 """
8 #-------------------------------------------------------------------------------
9 # Copyright (C) 2011 The IPython Development Team
10 #
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
13 #-------------------------------------------------------------------------------
14
15 #-------------------------------------------------------------------------------
16 # Imports
17 #-------------------------------------------------------------------------------
18
19 import sys
20 import time
21
22 import zmq
23 from nose import SkipTest
24
25 from IPython.testing import decorators as dec
26 from IPython.testing.ipunittest import ParametricTestCase
27
28 from IPython import parallel as pmod
29 from IPython.parallel import error
30 from IPython.parallel import AsyncResult
31 from IPython.parallel.util import interactive
32
33 from IPython.parallel.tests import add_engines
34
35 from .clienttest import ClusterTestCase, capture_output, generate_output
36
37 def setup():
38 add_engines(3, total=True)
39
40 class TestParallelMagics(ClusterTestCase, ParametricTestCase):
41
42 def test_px_blocking(self):
43 ip = get_ipython()
44 v = self.client[-1:]
45 v.activate()
46 v.block=True
47
48 ip.magic('px a=5')
49 self.assertEquals(v['a'], [5])
50 ip.magic('px a=10')
51 self.assertEquals(v['a'], [10])
52 # just 'print a' works ~99% of the time, but this ensures that
53 # the stdout message has arrived when the result is finished:
54 with capture_output() as io:
55 ip.magic(
56 'px import sys,time;print(a);sys.stdout.flush();time.sleep(0.2)'
57 )
58 out = io.stdout
59 self.assertTrue('[stdout:' in out, out)
60 self.assertTrue(out.rstrip().endswith('10'))
61 self.assertRaisesRemote(ZeroDivisionError, ip.magic, 'px 1/0')
62
63 def test_cellpx_block_args(self):
64 """%%px --[no]block flags work"""
65 ip = get_ipython()
66 v = self.client[-1:]
67 v.activate()
68 v.block=False
69
70 for block in (True, False):
71 v.block = block
72
73 with capture_output() as io:
74 ip.run_cell_magic("px", "", "1")
75 if block:
76 self.assertTrue(io.stdout.startswith("Parallel"), io.stdout)
77 else:
78 self.assertTrue(io.stdout.startswith("Async"), io.stdout)
79
80 with capture_output() as io:
81 ip.run_cell_magic("px", "--block", "1")
82 self.assertTrue(io.stdout.startswith("Parallel"), io.stdout)
83
84 with capture_output() as io:
85 ip.run_cell_magic("px", "--noblock", "1")
86 self.assertTrue(io.stdout.startswith("Async"), io.stdout)
87
88 def test_cellpx_groupby_engine(self):
89 """%%px --group-outputs=engine"""
90 ip = get_ipython()
91 v = self.client[:]
92 v.block = True
93 v.activate()
94
95 v['generate_output'] = generate_output
96
97 with capture_output() as io:
98 ip.run_cell_magic('px', '--group-outputs=engine', 'generate_output()')
99
100 lines = io.stdout.strip().splitlines()[1:]
101 expected = [
102 ('[stdout:', '] stdout'),
103 'stdout2',
104 'IPython.core.display.HTML',
105 'IPython.core.display.Math',
106 ('] Out[', 'IPython.core.display.Math')
107 ] * len(v)
108
109 self.assertEquals(len(lines), len(expected), io.stdout)
110 for line,expect in zip(lines, expected):
111 if isinstance(expect, str):
112 expect = [expect]
113 for ex in expect:
114 self.assertTrue(ex in line, "Expected %r in %r" % (ex, line))
115
116 expected = [
117 ('[stderr:', '] stderr'),
118 'stderr2',
119 ] * len(v)
120
121 lines = io.stderr.strip().splitlines()
122 self.assertEquals(len(lines), len(expected), io.stderr)
123 for line,expect in zip(lines, expected):
124 if isinstance(expect, str):
125 expect = [expect]
126 for ex in expect:
127 self.assertTrue(ex in line, "Expected %r in %r" % (ex, line))
128
129
130 def test_cellpx_groupby_order(self):
131 """%%px --group-outputs=order"""
132 ip = get_ipython()
133 v = self.client[:]
134 v.block = True
135 v.activate()
136
137 v['generate_output'] = generate_output
138
139 with capture_output() as io:
140 ip.run_cell_magic('px', '--group-outputs=order', 'generate_output()')
141
142 lines = io.stdout.strip().splitlines()[1:]
143 expected = []
144 expected.extend([
145 ('[stdout:', '] stdout'),
146 'stdout2',
147 ] * len(v))
148 expected.extend([
149 'IPython.core.display.HTML',
150 ] * len(v))
151 expected.extend([
152 'IPython.core.display.Math',
153 ] * len(v))
154 expected.extend([
155 ('] Out[', 'IPython.core.display.Math')
156 ] * len(v))
157
158 self.assertEquals(len(lines), len(expected), io.stdout)
159 for line,expect in zip(lines, expected):
160 if isinstance(expect, str):
161 expect = [expect]
162 for ex in expect:
163 self.assertTrue(ex in line, "Expected %r in %r" % (ex, line))
164
165 expected = [
166 ('[stderr:', '] stderr'),
167 'stderr2',
168 ] * len(v)
169
170 lines = io.stderr.strip().splitlines()
171 self.assertEquals(len(lines), len(expected), io.stderr)
172 for line,expect in zip(lines, expected):
173 if isinstance(expect, str):
174 expect = [expect]
175 for ex in expect:
176 self.assertTrue(ex in line, "Expected %r in %r" % (ex, line))
177
178 def test_cellpx_groupby_atype(self):
179 """%%px --group-outputs=type"""
180 ip = get_ipython()
181 v = self.client[:]
182 v.block = True
183 v.activate()
184
185 v['generate_output'] = generate_output
186
187 with capture_output() as io:
188 ip.run_cell_magic('px', '--group-outputs=type', 'generate_output()')
189
190 lines = io.stdout.strip().splitlines()[1:]
191
192 expected = []
193 expected.extend([
194 ('[stdout:', '] stdout'),
195 'stdout2',
196 ] * len(v))
197 expected.extend([
198 'IPython.core.display.HTML',
199 'IPython.core.display.Math',
200 ] * len(v))
201 expected.extend([
202 ('] Out[', 'IPython.core.display.Math')
203 ] * len(v))
204
205 self.assertEquals(len(lines), len(expected), io.stdout)
206 for line,expect in zip(lines, expected):
207 if isinstance(expect, str):
208 expect = [expect]
209 for ex in expect:
210 self.assertTrue(ex in line, "Expected %r in %r" % (ex, line))
211
212 expected = [
213 ('[stderr:', '] stderr'),
214 'stderr2',
215 ] * len(v)
216
217 lines = io.stderr.strip().splitlines()
218 self.assertEquals(len(lines), len(expected), io.stderr)
219 for line,expect in zip(lines, expected):
220 if isinstance(expect, str):
221 expect = [expect]
222 for ex in expect:
223 self.assertTrue(ex in line, "Expected %r in %r" % (ex, line))
224
225
226 def test_px_nonblocking(self):
227 ip = get_ipython()
228 v = self.client[-1:]
229 v.activate()
230 v.block=False
231
232 ip.magic('px a=5')
233 self.assertEquals(v['a'], [5])
234 ip.magic('px a=10')
235 self.assertEquals(v['a'], [10])
236 with capture_output() as io:
237 ar = ip.magic('px print (a)')
238 self.assertTrue(isinstance(ar, AsyncResult))
239 self.assertTrue('Async' in io.stdout)
240 self.assertFalse('[stdout:' in io.stdout)
241 ar = ip.magic('px 1/0')
242 self.assertRaisesRemote(ZeroDivisionError, ar.get)
243
244 def test_autopx_blocking(self):
245 ip = get_ipython()
246 v = self.client[-1]
247 v.activate()
248 v.block=True
249
250 with capture_output() as io:
251 ip.magic('autopx')
252 ip.run_cell('\n'.join(('a=5','b=12345','c=0')))
253 ip.run_cell('b*=2')
254 ip.run_cell('print (b)')
255 ip.run_cell('b')
256 ip.run_cell("b/c")
257 ip.magic('autopx')
258
259 output = io.stdout.strip()
260
261 self.assertTrue(output.startswith('%autopx enabled'), output)
262 self.assertTrue(output.endswith('%autopx disabled'), output)
263 self.assertTrue('RemoteError: ZeroDivisionError' in output, output)
264 self.assertTrue('] Out[' in output, output)
265 self.assertTrue(': 24690' in output, output)
266 ar = v.get_result(-1)
267 self.assertEquals(v['a'], 5)
268 self.assertEquals(v['b'], 24690)
269 self.assertRaisesRemote(ZeroDivisionError, ar.get)
270
271 def test_autopx_nonblocking(self):
272 ip = get_ipython()
273 v = self.client[-1]
274 v.activate()
275 v.block=False
276
277 with capture_output() as io:
278 ip.magic('autopx')
279 ip.run_cell('\n'.join(('a=5','b=10','c=0')))
280 ip.run_cell('print (b)')
281 ip.run_cell('import time; time.sleep(0.1)')
282 ip.run_cell("b/c")
283 ip.run_cell('b*=2')
284 ip.magic('autopx')
285
286 output = io.stdout.strip()
287
288 self.assertTrue(output.startswith('%autopx enabled'))
289 self.assertTrue(output.endswith('%autopx disabled'))
290 self.assertFalse('ZeroDivisionError' in output)
291 ar = v.get_result(-2)
292 self.assertRaisesRemote(ZeroDivisionError, ar.get)
293 # prevent TaskAborted on pulls, due to ZeroDivisionError
294 time.sleep(0.5)
295 self.assertEquals(v['a'], 5)
296 # b*=2 will not fire, due to abort
297 self.assertEquals(v['b'], 10)
298
299 def test_result(self):
300 ip = get_ipython()
301 v = self.client[-1]
302 v.activate()
303 data = dict(a=111,b=222)
304 v.push(data, block=True)
305
306 ip.magic('px a')
307 ip.magic('px b')
308 for idx, name in [
309 ('', 'b'),
310 ('-1', 'b'),
311 ('2', 'b'),
312 ('1', 'a'),
313 ('-2', 'a'),
314 ]:
315 with capture_output() as io:
316 ip.magic('result ' + idx)
317 output = io.stdout.strip()
318 msg = "expected %s output to include %s, but got: %s" % \
319 ('%result '+idx, str(data[name]), output)
320 self.assertTrue(str(data[name]) in output, msg)
321
322 @dec.skipif_not_matplotlib
323 def test_px_pylab(self):
324 """%pylab works on engines"""
325 ip = get_ipython()
326 v = self.client[-1]
327 v.block = True
328 v.activate()
329
330 with capture_output() as io:
331 ip.magic("px %pylab inline")
332
333 self.assertTrue("Welcome to pylab" in io.stdout, io.stdout)
334 self.assertTrue("backend_inline" in io.stdout, io.stdout)
335
336 with capture_output() as io:
337 ip.magic("px plot(rand(100))")
338
339 self.assertTrue('] Out[' in io.stdout, io.stdout)
340 self.assertTrue('matplotlib.lines' in io.stdout, io.stdout)
341
342
@@ -0,0 +1,228 b''
1 {
2 "metadata": {
3 "name": "Parallel Magics"
4 },
5 "nbformat": 3,
6 "worksheets": [
7 {
8 "cells": [
9 {
10 "cell_type": "heading",
11 "level": 1,
12 "source": [
13 "Using Parallel Magics"
14 ]
15 },
16 {
17 "cell_type": "markdown",
18 "source": [
19 "IPython has a few magics for working with your engines.",
20 "",
21 "This assumes you have started an IPython cluster, either with the notebook interface,",
22 "or the `ipcluster/controller/engine` commands."
23 ]
24 },
25 {
26 "cell_type": "code",
27 "collapsed": false,
28 "input": [
29 "from IPython import parallel",
30 "rc = parallel.Client()",
31 "dv = rc[:]",
32 "dv.block = True",
33 "dv"
34 ],
35 "language": "python",
36 "outputs": []
37 },
38 {
39 "cell_type": "markdown",
40 "source": [
41 "The parallel magics come from the `parallelmagics` IPython extension.",
42 "The magics are set to work with a particular View object,",
43 "so to activate them, you call the `activate()` method on a particular view:"
44 ]
45 },
46 {
47 "cell_type": "code",
48 "collapsed": true,
49 "input": [
50 "dv.activate()"
51 ],
52 "language": "python",
53 "outputs": []
54 },
55 {
56 "cell_type": "markdown",
57 "source": [
58 "Now we can execute code remotely with `%px`:"
59 ]
60 },
61 {
62 "cell_type": "code",
63 "collapsed": false,
64 "input": [
65 "%px a=5"
66 ],
67 "language": "python",
68 "outputs": []
69 },
70 {
71 "cell_type": "code",
72 "collapsed": false,
73 "input": [
74 "%px print a"
75 ],
76 "language": "python",
77 "outputs": []
78 },
79 {
80 "cell_type": "code",
81 "collapsed": false,
82 "input": [
83 "%px a"
84 ],
85 "language": "python",
86 "outputs": []
87 },
88 {
89 "cell_type": "markdown",
90 "source": [
91 "You don't have to wait for results:"
92 ]
93 },
94 {
95 "cell_type": "code",
96 "collapsed": true,
97 "input": [
98 "dv.block = False"
99 ],
100 "language": "python",
101 "outputs": []
102 },
103 {
104 "cell_type": "code",
105 "collapsed": false,
106 "input": [
107 "%px import time",
108 "%px time.sleep(5)",
109 "%px time.time()"
110 ],
111 "language": "python",
112 "outputs": []
113 },
114 {
115 "cell_type": "markdown",
116 "source": [
117 "But you will notice that this didn't output the result of the last command.",
118 "For this, we have `%result`, which displays the output of the latest request:"
119 ]
120 },
121 {
122 "cell_type": "code",
123 "collapsed": false,
124 "input": [
125 "%result"
126 ],
127 "language": "python",
128 "outputs": []
129 },
130 {
131 "cell_type": "markdown",
132 "source": [
133 "Remember, an IPython engine is IPython, so you can do magics remotely as well!"
134 ]
135 },
136 {
137 "cell_type": "code",
138 "collapsed": false,
139 "input": [
140 "dv.block = True",
141 "%px %pylab inline"
142 ],
143 "language": "python",
144 "outputs": []
145 },
146 {
147 "cell_type": "markdown",
148 "source": [
149 "`%%px` can also be used as a cell magic, for submitting whole blocks.",
150 "This one acceps `--block` and `--noblock` flags to specify",
151 "the blocking behavior, though the default is unchanged.",
152 ""
153 ]
154 },
155 {
156 "cell_type": "code",
157 "collapsed": true,
158 "input": [
159 "dv.scatter('id', dv.targets, flatten=True)",
160 "dv['stride'] = len(dv)"
161 ],
162 "language": "python",
163 "outputs": []
164 },
165 {
166 "cell_type": "code",
167 "collapsed": false,
168 "input": [
169 "%%px --noblock",
170 "x = linspace(0,pi,1000)",
171 "for n in range(id,12, stride):",
172 " print n",
173 " plt.plot(x,sin(n*x))",
174 "plt.title(\"Plot %i\" % id)"
175 ],
176 "language": "python",
177 "outputs": []
178 },
179 {
180 "cell_type": "code",
181 "collapsed": false,
182 "input": [
183 "%result"
184 ],
185 "language": "python",
186 "outputs": []
187 },
188 {
189 "cell_type": "markdown",
190 "source": [
191 "It also lets you choose some amount of the grouping of the outputs with `--group-outputs`:",
192 "",
193 "The choices are:",
194 "",
195 "* `engine` - all of an engine's output is collected together",
196 "* `type` - where stdout of each engine is grouped, etc. (the default)",
197 "* `order` - same as `type`, but individual displaypub outputs are interleaved.",
198 " That is, it will output the first plot from each engine, then the second from each,",
199 " etc."
200 ]
201 },
202 {
203 "cell_type": "code",
204 "collapsed": false,
205 "input": [
206 "%%px --group-outputs=engine",
207 "x = linspace(0,pi,1000)",
208 "for n in range(id,12, stride):",
209 " print n",
210 " plt.plot(x,sin(n*x))",
211 "plt.title(\"Plot %i\" % id)"
212 ],
213 "language": "python",
214 "outputs": []
215 },
216 {
217 "cell_type": "code",
218 "collapsed": true,
219 "input": [
220 ""
221 ],
222 "language": "python",
223 "outputs": []
224 }
225 ]
226 }
227 ]
228 } No newline at end of file
@@ -1,576 +1,576 b''
1 1 # encoding: utf-8
2 2 """Magic functions for InteractiveShell.
3 3 """
4 4
5 5 #-----------------------------------------------------------------------------
6 6 # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
7 7 # Copyright (C) 2001 Fernando Perez <fperez@colorado.edu>
8 8 # Copyright (C) 2008 The IPython Development Team
9 9
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-----------------------------------------------------------------------------
13 13
14 14 #-----------------------------------------------------------------------------
15 15 # Imports
16 16 #-----------------------------------------------------------------------------
17 17 # Stdlib
18 18 import os
19 19 import re
20 20 import sys
21 21 import types
22 22 from getopt import getopt, GetoptError
23 23
24 24 # Our own
25 25 from IPython.config.configurable import Configurable
26 26 from IPython.core import oinspect
27 27 from IPython.core.error import UsageError
28 28 from IPython.core.prefilter import ESC_MAGIC
29 29 from IPython.external.decorator import decorator
30 30 from IPython.utils.ipstruct import Struct
31 31 from IPython.utils.process import arg_split
32 32 from IPython.utils.text import dedent
33 33 from IPython.utils.traitlets import Bool, Dict, Instance
34 34 from IPython.utils.warn import error, warn
35 35
36 36 #-----------------------------------------------------------------------------
37 37 # Globals
38 38 #-----------------------------------------------------------------------------
39 39
40 40 # A dict we'll use for each class that has magics, used as temporary storage to
41 41 # pass information between the @line/cell_magic method decorators and the
42 42 # @magics_class class decorator, because the method decorators have no
43 43 # access to the class when they run. See for more details:
44 44 # http://stackoverflow.com/questions/2366713/can-a-python-decorator-of-an-instance-method-access-the-class
45 45
46 46 magics = dict(line={}, cell={})
47 47
48 48 magic_kinds = ('line', 'cell')
49 49 magic_spec = ('line', 'cell', 'line_cell')
50 50
51 51 #-----------------------------------------------------------------------------
52 52 # Utility classes and functions
53 53 #-----------------------------------------------------------------------------
54 54
55 55 class Bunch: pass
56 56
57 57
58 58 def on_off(tag):
59 59 """Return an ON/OFF string for a 1/0 input. Simple utility function."""
60 60 return ['OFF','ON'][tag]
61 61
62 62
63 63 def compress_dhist(dh):
64 64 """Compress a directory history into a new one with at most 20 entries.
65 65
66 66 Return a new list made from the first and last 10 elements of dhist after
67 67 removal of duplicates.
68 68 """
69 69 head, tail = dh[:-10], dh[-10:]
70 70
71 71 newhead = []
72 72 done = set()
73 73 for h in head:
74 74 if h in done:
75 75 continue
76 76 newhead.append(h)
77 77 done.add(h)
78 78
79 79 return newhead + tail
80 80
81 81
82 82 def needs_local_scope(func):
83 83 """Decorator to mark magic functions which need to local scope to run."""
84 84 func.needs_local_scope = True
85 85 return func
86 86
87 87 #-----------------------------------------------------------------------------
88 88 # Class and method decorators for registering magics
89 89 #-----------------------------------------------------------------------------
90 90
91 91 def magics_class(cls):
92 92 """Class decorator for all subclasses of the main Magics class.
93 93
94 94 Any class that subclasses Magics *must* also apply this decorator, to
95 95 ensure that all the methods that have been decorated as line/cell magics
96 96 get correctly registered in the class instance. This is necessary because
97 97 when method decorators run, the class does not exist yet, so they
98 98 temporarily store their information into a module global. Application of
99 99 this class decorator copies that global data to the class instance and
100 100 clears the global.
101 101
102 102 Obviously, this mechanism is not thread-safe, which means that the
103 103 *creation* of subclasses of Magic should only be done in a single-thread
104 104 context. Instantiation of the classes has no restrictions. Given that
105 105 these classes are typically created at IPython startup time and before user
106 106 application code becomes active, in practice this should not pose any
107 107 problems.
108 108 """
109 109 cls.registered = True
110 110 cls.magics = dict(line = magics['line'],
111 111 cell = magics['cell'])
112 112 magics['line'] = {}
113 113 magics['cell'] = {}
114 114 return cls
115 115
116 116
117 117 def record_magic(dct, magic_kind, magic_name, func):
118 118 """Utility function to store a function as a magic of a specific kind.
119 119
120 120 Parameters
121 121 ----------
122 122 dct : dict
123 123 A dictionary with 'line' and 'cell' subdicts.
124 124
125 125 magic_kind : str
126 126 Kind of magic to be stored.
127 127
128 128 magic_name : str
129 129 Key to store the magic as.
130 130
131 131 func : function
132 132 Callable object to store.
133 133 """
134 134 if magic_kind == 'line_cell':
135 135 dct['line'][magic_name] = dct['cell'][magic_name] = func
136 136 else:
137 137 dct[magic_kind][magic_name] = func
138 138
139 139
140 140 def validate_type(magic_kind):
141 141 """Ensure that the given magic_kind is valid.
142 142
143 143 Check that the given magic_kind is one of the accepted spec types (stored
144 144 in the global `magic_spec`), raise ValueError otherwise.
145 145 """
146 146 if magic_kind not in magic_spec:
147 147 raise ValueError('magic_kind must be one of %s, %s given' %
148 148 magic_kinds, magic_kind)
149 149
150 150
151 151 # The docstrings for the decorator below will be fairly similar for the two
152 152 # types (method and function), so we generate them here once and reuse the
153 153 # templates below.
154 154 _docstring_template = \
155 155 """Decorate the given {0} as {1} magic.
156 156
157 157 The decorator can be used with or without arguments, as follows.
158 158
159 159 i) without arguments: it will create a {1} magic named as the {0} being
160 160 decorated::
161 161
162 162 @deco
163 163 def foo(...)
164 164
165 165 will create a {1} magic named `foo`.
166 166
167 167 ii) with one string argument: which will be used as the actual name of the
168 168 resulting magic::
169 169
170 170 @deco('bar')
171 171 def foo(...)
172 172
173 173 will create a {1} magic named `bar`.
174 174 """
175 175
176 176 # These two are decorator factories. While they are conceptually very similar,
177 177 # there are enough differences in the details that it's simpler to have them
178 178 # written as completely standalone functions rather than trying to share code
179 179 # and make a single one with convoluted logic.
180 180
181 181 def _method_magic_marker(magic_kind):
182 182 """Decorator factory for methods in Magics subclasses.
183 183 """
184 184
185 185 validate_type(magic_kind)
186 186
187 187 # This is a closure to capture the magic_kind. We could also use a class,
188 188 # but it's overkill for just that one bit of state.
189 189 def magic_deco(arg):
190 190 call = lambda f, *a, **k: f(*a, **k)
191 191
192 192 if callable(arg):
193 193 # "Naked" decorator call (just @foo, no args)
194 194 func = arg
195 195 name = func.func_name
196 196 retval = decorator(call, func)
197 197 record_magic(magics, magic_kind, name, name)
198 198 elif isinstance(arg, basestring):
199 199 # Decorator called with arguments (@foo('bar'))
200 200 name = arg
201 201 def mark(func, *a, **kw):
202 202 record_magic(magics, magic_kind, name, func.func_name)
203 203 return decorator(call, func)
204 204 retval = mark
205 205 else:
206 206 raise TypeError("Decorator can only be called with "
207 207 "string or function")
208 208 return retval
209 209
210 210 # Ensure the resulting decorator has a usable docstring
211 211 magic_deco.__doc__ = _docstring_template.format('method', magic_kind)
212 212 return magic_deco
213 213
214 214
215 215 def _function_magic_marker(magic_kind):
216 216 """Decorator factory for standalone functions.
217 217 """
218 218 validate_type(magic_kind)
219 219
220 220 # This is a closure to capture the magic_kind. We could also use a class,
221 221 # but it's overkill for just that one bit of state.
222 222 def magic_deco(arg):
223 223 call = lambda f, *a, **k: f(*a, **k)
224 224
225 225 # Find get_ipython() in the caller's namespace
226 226 caller = sys._getframe(1)
227 227 for ns in ['f_locals', 'f_globals', 'f_builtins']:
228 228 get_ipython = getattr(caller, ns).get('get_ipython')
229 229 if get_ipython is not None:
230 230 break
231 231 else:
232 232 raise NameError('Decorator can only run in context where '
233 233 '`get_ipython` exists')
234 234
235 235 ip = get_ipython()
236 236
237 237 if callable(arg):
238 238 # "Naked" decorator call (just @foo, no args)
239 239 func = arg
240 240 name = func.func_name
241 241 ip.register_magic_function(func, magic_kind, name)
242 242 retval = decorator(call, func)
243 243 elif isinstance(arg, basestring):
244 244 # Decorator called with arguments (@foo('bar'))
245 245 name = arg
246 246 def mark(func, *a, **kw):
247 247 ip.register_magic_function(func, magic_kind, name)
248 248 return decorator(call, func)
249 249 retval = mark
250 250 else:
251 251 raise TypeError("Decorator can only be called with "
252 252 "string or function")
253 253 return retval
254 254
255 255 # Ensure the resulting decorator has a usable docstring
256 256 ds = _docstring_template.format('function', magic_kind)
257 257
258 258 ds += dedent("""
259 259 Note: this decorator can only be used in a context where IPython is already
260 260 active, so that the `get_ipython()` call succeeds. You can therefore use
261 261 it in your startup files loaded after IPython initializes, but *not* in the
262 262 IPython configuration file itself, which is executed before IPython is
263 263 fully up and running. Any file located in the `startup` subdirectory of
264 264 your configuration profile will be OK in this sense.
265 265 """)
266 266
267 267 magic_deco.__doc__ = ds
268 268 return magic_deco
269 269
270 270
271 271 # Create the actual decorators for public use
272 272
273 273 # These three are used to decorate methods in class definitions
274 274 line_magic = _method_magic_marker('line')
275 275 cell_magic = _method_magic_marker('cell')
276 276 line_cell_magic = _method_magic_marker('line_cell')
277 277
278 278 # These three decorate standalone functions and perform the decoration
279 279 # immediately. They can only run where get_ipython() works
280 280 register_line_magic = _function_magic_marker('line')
281 281 register_cell_magic = _function_magic_marker('cell')
282 282 register_line_cell_magic = _function_magic_marker('line_cell')
283 283
284 284 #-----------------------------------------------------------------------------
285 285 # Core Magic classes
286 286 #-----------------------------------------------------------------------------
287 287
288 288 class MagicsManager(Configurable):
289 289 """Object that handles all magic-related functionality for IPython.
290 290 """
291 291 # Non-configurable class attributes
292 292
293 293 # A two-level dict, first keyed by magic type, then by magic function, and
294 294 # holding the actual callable object as value. This is the dict used for
295 295 # magic function dispatch
296 296 magics = Dict
297 297
298 298 # A registry of the original objects that we've been given holding magics.
299 299 registry = Dict
300 300
301 301 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
302 302
303 303 auto_magic = Bool(True, config=True, help=
304 304 "Automatically call line magics without requiring explicit % prefix")
305 305
306 306 _auto_status = [
307 307 'Automagic is OFF, % prefix IS needed for line magics.',
308 308 'Automagic is ON, % prefix IS NOT needed for line magics.']
309 309
310 310 user_magics = Instance('IPython.core.magics.UserMagics')
311 311
312 312 def __init__(self, shell=None, config=None, user_magics=None, **traits):
313 313
314 314 super(MagicsManager, self).__init__(shell=shell, config=config,
315 315 user_magics=user_magics, **traits)
316 316 self.magics = dict(line={}, cell={})
317 317 # Let's add the user_magics to the registry for uniformity, so *all*
318 318 # registered magic containers can be found there.
319 319 self.registry[user_magics.__class__.__name__] = user_magics
320 320
321 321 def auto_status(self):
322 322 """Return descriptive string with automagic status."""
323 323 return self._auto_status[self.auto_magic]
324 324
325 325 def lsmagic(self):
326 326 """Return a dict of currently available magic functions.
327 327
328 328 The return dict has the keys 'line' and 'cell', corresponding to the
329 329 two types of magics we support. Each value is a list of names.
330 330 """
331 331 return self.magics
332 332
333 333 def register(self, *magic_objects):
334 334 """Register one or more instances of Magics.
335 335
336 336 Take one or more classes or instances of classes that subclass the main
337 337 `core.Magic` class, and register them with IPython to use the magic
338 338 functions they provide. The registration process will then ensure that
339 339 any methods that have decorated to provide line and/or cell magics will
340 340 be recognized with the `%x`/`%%x` syntax as a line/cell magic
341 341 respectively.
342 342
343 343 If classes are given, they will be instantiated with the default
344 344 constructor. If your classes need a custom constructor, you should
345 345 instanitate them first and pass the instance.
346 346
347 347 The provided arguments can be an arbitrary mix of classes and instances.
348 348
349 349 Parameters
350 350 ----------
351 351 magic_objects : one or more classes or instances
352 352 """
353 353 # Start by validating them to ensure they have all had their magic
354 354 # methods registered at the instance level
355 355 for m in magic_objects:
356 356 if not m.registered:
357 357 raise ValueError("Class of magics %r was constructed without "
358 358 "the @register_macics class decorator")
359 359 if type(m) is type:
360 360 # If we're given an uninstantiated class
361 361 m = m(self.shell)
362 362
363 363 # Now that we have an instance, we can register it and update the
364 364 # table of callables
365 365 self.registry[m.__class__.__name__] = m
366 366 for mtype in magic_kinds:
367 367 self.magics[mtype].update(m.magics[mtype])
368 368
369 369 def register_function(self, func, magic_kind='line', magic_name=None):
370 370 """Expose a standalone function as magic function for IPython.
371 371
372 372 This will create an IPython magic (line, cell or both) from a
373 373 standalone function. The functions should have the following
374 374 signatures:
375 375
376 376 * For line magics: `def f(line)`
377 377 * For cell magics: `def f(line, cell)`
378 378 * For a function that does both: `def f(line, cell=None)`
379 379
380 380 In the latter case, the function will be called with `cell==None` when
381 381 invoked as `%f`, and with cell as a string when invoked as `%%f`.
382 382
383 383 Parameters
384 384 ----------
385 385 func : callable
386 386 Function to be registered as a magic.
387 387
388 388 magic_kind : str
389 389 Kind of magic, one of 'line', 'cell' or 'line_cell'
390 390
391 391 magic_name : optional str
392 392 If given, the name the magic will have in the IPython namespace. By
393 393 default, the name of the function itself is used.
394 394 """
395 395
396 396 # Create the new method in the user_magics and register it in the
397 397 # global table
398 398 validate_type(magic_kind)
399 399 magic_name = func.func_name if magic_name is None else magic_name
400 400 setattr(self.user_magics, magic_name, func)
401 401 record_magic(self.magics, magic_kind, magic_name, func)
402 402
403 403 def define_magic(self, name, func):
404 404 """[Deprecated] Expose own function as magic function for IPython.
405 405
406 406 Example::
407 407
408 408 def foo_impl(self, parameter_s=''):
409 409 'My very own magic!. (Use docstrings, IPython reads them).'
410 410 print 'Magic function. Passed parameter is between < >:'
411 411 print '<%s>' % parameter_s
412 412 print 'The self object is:', self
413 413
414 414 ip.define_magic('foo',foo_impl)
415 415 """
416 416 meth = types.MethodType(func, self.user_magics)
417 417 setattr(self.user_magics, name, meth)
418 418 record_magic(self.magics, 'line', name, meth)
419 419
420 420 # Key base class that provides the central functionality for magics.
421 421
422 422 class Magics(object):
423 423 """Base class for implementing magic functions.
424 424
425 425 Shell functions which can be reached as %function_name. All magic
426 426 functions should accept a string, which they can parse for their own
427 427 needs. This can make some functions easier to type, eg `%cd ../`
428 428 vs. `%cd("../")`
429 429
430 430 Classes providing magic functions need to subclass this class, and they
431 431 MUST:
432 432
433 433 - Use the method decorators `@line_magic` and `@cell_magic` to decorate
434 434 individual methods as magic functions, AND
435 435
436 436 - Use the class decorator `@magics_class` to ensure that the magic
437 437 methods are properly registered at the instance level upon instance
438 438 initialization.
439 439
440 440 See :mod:`magic_functions` for examples of actual implementation classes.
441 441 """
442 442 # Dict holding all command-line options for each magic.
443 443 options_table = None
444 444 # Dict for the mapping of magic names to methods, set by class decorator
445 445 magics = None
446 446 # Flag to check that the class decorator was properly applied
447 447 registered = False
448 448 # Instance of IPython shell
449 449 shell = None
450 450
451 451 def __init__(self, shell):
452 452 if not(self.__class__.registered):
453 453 raise ValueError('Magics subclass without registration - '
454 454 'did you forget to apply @magics_class?')
455 455 self.shell = shell
456 456 self.options_table = {}
457 457 # The method decorators are run when the instance doesn't exist yet, so
458 458 # they can only record the names of the methods they are supposed to
459 459 # grab. Only now, that the instance exists, can we create the proper
460 460 # mapping to bound methods. So we read the info off the original names
461 461 # table and replace each method name by the actual bound method.
462 462 for mtype in magic_kinds:
463 463 tab = self.magics[mtype]
464 464 # must explicitly use keys, as we're mutating this puppy
465 465 for magic_name in tab.keys():
466 466 meth_name = tab[magic_name]
467 467 if isinstance(meth_name, basestring):
468 468 tab[magic_name] = getattr(self, meth_name)
469 469
470 470 def arg_err(self,func):
471 471 """Print docstring if incorrect arguments were passed"""
472 472 print 'Error in arguments:'
473 473 print oinspect.getdoc(func)
474 474
475 475 def format_latex(self, strng):
476 476 """Format a string for latex inclusion."""
477 477
478 478 # Characters that need to be escaped for latex:
479 479 escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE)
480 480 # Magic command names as headers:
481 481 cmd_name_re = re.compile(r'^(%s.*?):' % ESC_MAGIC,
482 482 re.MULTILINE)
483 483 # Magic commands
484 484 cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % ESC_MAGIC,
485 485 re.MULTILINE)
486 486 # Paragraph continue
487 487 par_re = re.compile(r'\\$',re.MULTILINE)
488 488
489 489 # The "\n" symbol
490 490 newline_re = re.compile(r'\\n')
491 491
492 492 # Now build the string for output:
493 493 #strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng)
494 494 strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:',
495 495 strng)
496 496 strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng)
497 497 strng = par_re.sub(r'\\\\',strng)
498 498 strng = escape_re.sub(r'\\\1',strng)
499 499 strng = newline_re.sub(r'\\textbackslash{}n',strng)
500 500 return strng
501 501
502 502 def parse_options(self, arg_str, opt_str, *long_opts, **kw):
503 503 """Parse options passed to an argument string.
504 504
505 505 The interface is similar to that of getopt(), but it returns back a
506 506 Struct with the options as keys and the stripped argument string still
507 507 as a string.
508 508
509 509 arg_str is quoted as a true sys.argv vector by using shlex.split.
510 510 This allows us to easily expand variables, glob files, quote
511 511 arguments, etc.
512 512
513 513 Options:
514 514 -mode: default 'string'. If given as 'list', the argument string is
515 515 returned as a list (split on whitespace) instead of a string.
516 516
517 517 -list_all: put all option values in lists. Normally only options
518 518 appearing more than once are put in a list.
519 519
520 520 -posix (True): whether to split the input line in POSIX mode or not,
521 521 as per the conventions outlined in the shlex module from the
522 522 standard library."""
523 523
524 524 # inject default options at the beginning of the input line
525 525 caller = sys._getframe(1).f_code.co_name
526 526 arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str)
527 527
528 528 mode = kw.get('mode','string')
529 529 if mode not in ['string','list']:
530 530 raise ValueError,'incorrect mode given: %s' % mode
531 531 # Get options
532 532 list_all = kw.get('list_all',0)
533 533 posix = kw.get('posix', os.name == 'posix')
534 534 strict = kw.get('strict', True)
535 535
536 536 # Check if we have more than one argument to warrant extra processing:
537 537 odict = {} # Dictionary with options
538 538 args = arg_str.split()
539 539 if len(args) >= 1:
540 540 # If the list of inputs only has 0 or 1 thing in it, there's no
541 541 # need to look for options
542 542 argv = arg_split(arg_str, posix, strict)
543 543 # Do regular option processing
544 544 try:
545 opts,args = getopt(argv,opt_str,*long_opts)
545 opts,args = getopt(argv, opt_str, long_opts)
546 546 except GetoptError,e:
547 547 raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str,
548 548 " ".join(long_opts)))
549 549 for o,a in opts:
550 550 if o.startswith('--'):
551 551 o = o[2:]
552 552 else:
553 553 o = o[1:]
554 554 try:
555 555 odict[o].append(a)
556 556 except AttributeError:
557 557 odict[o] = [odict[o],a]
558 558 except KeyError:
559 559 if list_all:
560 560 odict[o] = [a]
561 561 else:
562 562 odict[o] = a
563 563
564 564 # Prepare opts,args for return
565 565 opts = Struct(odict)
566 566 if mode == 'string':
567 567 args = ' '.join(args)
568 568
569 569 return opts,args
570 570
571 571 def default_option(self, fn, optstr):
572 572 """Make an entry in the options_table for fn, with value optstr"""
573 573
574 574 if fn not in self.lsmagic():
575 575 error("%s is not a magic function" % fn)
576 576 self.options_table[fn] = optstr
@@ -1,547 +1,556 b''
1 1 # -*- coding: utf-8 -*-
2 2 """Tests for various magic functions.
3 3
4 4 Needs to be run by nose (to make ipython session available).
5 5 """
6 6 from __future__ import absolute_import
7 7
8 8 #-----------------------------------------------------------------------------
9 9 # Imports
10 10 #-----------------------------------------------------------------------------
11 11
12 12 import io
13 13 import os
14 14 import sys
15 15 from StringIO import StringIO
16 16 from unittest import TestCase
17 17
18 18 import nose.tools as nt
19 19
20 20 from IPython.core import magic
21 21 from IPython.core.magic import (Magics, magics_class, line_magic,
22 22 cell_magic, line_cell_magic,
23 23 register_line_magic, register_cell_magic,
24 24 register_line_cell_magic)
25 25 from IPython.core.magics import execution
26 26 from IPython.nbformat.v3.tests.nbexamples import nb0
27 27 from IPython.nbformat import current
28 28 from IPython.testing import decorators as dec
29 29 from IPython.testing import tools as tt
30 30 from IPython.utils import py3compat
31 31 from IPython.utils.tempdir import TemporaryDirectory
32 32
33 33 #-----------------------------------------------------------------------------
34 34 # Test functions begin
35 35 #-----------------------------------------------------------------------------
36 36
37 37 @magic.magics_class
38 38 class DummyMagics(magic.Magics): pass
39 39
40 40 def test_rehashx():
41 41 # clear up everything
42 42 _ip = get_ipython()
43 43 _ip.alias_manager.alias_table.clear()
44 44 del _ip.db['syscmdlist']
45 45
46 46 _ip.magic('rehashx')
47 47 # Practically ALL ipython development systems will have more than 10 aliases
48 48
49 49 yield (nt.assert_true, len(_ip.alias_manager.alias_table) > 10)
50 50 for key, val in _ip.alias_manager.alias_table.iteritems():
51 51 # we must strip dots from alias names
52 52 nt.assert_true('.' not in key)
53 53
54 54 # rehashx must fill up syscmdlist
55 55 scoms = _ip.db['syscmdlist']
56 56 yield (nt.assert_true, len(scoms) > 10)
57 57
58 58
59 59 def test_magic_parse_options():
60 60 """Test that we don't mangle paths when parsing magic options."""
61 61 ip = get_ipython()
62 62 path = 'c:\\x'
63 63 m = DummyMagics(ip)
64 64 opts = m.parse_options('-f %s' % path,'f:')[0]
65 65 # argv splitting is os-dependent
66 66 if os.name == 'posix':
67 67 expected = 'c:x'
68 68 else:
69 69 expected = path
70 70 nt.assert_equals(opts['f'], expected)
71 71
72 def test_magic_parse_long_options():
73 """Magic.parse_options can handle --foo=bar long options"""
74 ip = get_ipython()
75 m = DummyMagics(ip)
76 opts, _ = m.parse_options('--foo --bar=bubble', 'a', 'foo', 'bar=')
77 nt.assert_true('foo' in opts)
78 nt.assert_true('bar' in opts)
79 nt.assert_true(opts['bar'], "bubble")
80
72 81
73 82 @dec.skip_without('sqlite3')
74 83 def doctest_hist_f():
75 84 """Test %hist -f with temporary filename.
76 85
77 86 In [9]: import tempfile
78 87
79 88 In [10]: tfile = tempfile.mktemp('.py','tmp-ipython-')
80 89
81 90 In [11]: %hist -nl -f $tfile 3
82 91
83 92 In [13]: import os; os.unlink(tfile)
84 93 """
85 94
86 95
87 96 @dec.skip_without('sqlite3')
88 97 def doctest_hist_r():
89 98 """Test %hist -r
90 99
91 100 XXX - This test is not recording the output correctly. For some reason, in
92 101 testing mode the raw history isn't getting populated. No idea why.
93 102 Disabling the output checking for now, though at least we do run it.
94 103
95 104 In [1]: 'hist' in _ip.lsmagic()
96 105 Out[1]: True
97 106
98 107 In [2]: x=1
99 108
100 109 In [3]: %hist -rl 2
101 110 x=1 # random
102 111 %hist -r 2
103 112 """
104 113
105 114
106 115 @dec.skip_without('sqlite3')
107 116 def doctest_hist_op():
108 117 """Test %hist -op
109 118
110 119 In [1]: class b(float):
111 120 ...: pass
112 121 ...:
113 122
114 123 In [2]: class s(object):
115 124 ...: def __str__(self):
116 125 ...: return 's'
117 126 ...:
118 127
119 128 In [3]:
120 129
121 130 In [4]: class r(b):
122 131 ...: def __repr__(self):
123 132 ...: return 'r'
124 133 ...:
125 134
126 135 In [5]: class sr(s,r): pass
127 136 ...:
128 137
129 138 In [6]:
130 139
131 140 In [7]: bb=b()
132 141
133 142 In [8]: ss=s()
134 143
135 144 In [9]: rr=r()
136 145
137 146 In [10]: ssrr=sr()
138 147
139 148 In [11]: 4.5
140 149 Out[11]: 4.5
141 150
142 151 In [12]: str(ss)
143 152 Out[12]: 's'
144 153
145 154 In [13]:
146 155
147 156 In [14]: %hist -op
148 157 >>> class b:
149 158 ... pass
150 159 ...
151 160 >>> class s(b):
152 161 ... def __str__(self):
153 162 ... return 's'
154 163 ...
155 164 >>>
156 165 >>> class r(b):
157 166 ... def __repr__(self):
158 167 ... return 'r'
159 168 ...
160 169 >>> class sr(s,r): pass
161 170 >>>
162 171 >>> bb=b()
163 172 >>> ss=s()
164 173 >>> rr=r()
165 174 >>> ssrr=sr()
166 175 >>> 4.5
167 176 4.5
168 177 >>> str(ss)
169 178 's'
170 179 >>>
171 180 """
172 181
173 182
174 183 @dec.skip_without('sqlite3')
175 184 def test_macro():
176 185 ip = get_ipython()
177 186 ip.history_manager.reset() # Clear any existing history.
178 187 cmds = ["a=1", "def b():\n return a**2", "print(a,b())"]
179 188 for i, cmd in enumerate(cmds, start=1):
180 189 ip.history_manager.store_inputs(i, cmd)
181 190 ip.magic("macro test 1-3")
182 191 nt.assert_equal(ip.user_ns["test"].value, "\n".join(cmds)+"\n")
183 192
184 193 # List macros.
185 194 assert "test" in ip.magic("macro")
186 195
187 196
188 197 @dec.skip_without('sqlite3')
189 198 def test_macro_run():
190 199 """Test that we can run a multi-line macro successfully."""
191 200 ip = get_ipython()
192 201 ip.history_manager.reset()
193 202 cmds = ["a=10", "a+=1", py3compat.doctest_refactor_print("print a"),
194 203 "%macro test 2-3"]
195 204 for cmd in cmds:
196 205 ip.run_cell(cmd, store_history=True)
197 206 nt.assert_equal(ip.user_ns["test"].value,
198 207 py3compat.doctest_refactor_print("a+=1\nprint a\n"))
199 208 with tt.AssertPrints("12"):
200 209 ip.run_cell("test")
201 210 with tt.AssertPrints("13"):
202 211 ip.run_cell("test")
203 212
204 213
205 214 @dec.skipif_not_numpy
206 215 def test_numpy_reset_array_undec():
207 216 "Test '%reset array' functionality"
208 217 _ip.ex('import numpy as np')
209 218 _ip.ex('a = np.empty(2)')
210 219 yield (nt.assert_true, 'a' in _ip.user_ns)
211 220 _ip.magic('reset -f array')
212 221 yield (nt.assert_false, 'a' in _ip.user_ns)
213 222
214 223 def test_reset_out():
215 224 "Test '%reset out' magic"
216 225 _ip.run_cell("parrot = 'dead'", store_history=True)
217 226 # test '%reset -f out', make an Out prompt
218 227 _ip.run_cell("parrot", store_history=True)
219 228 nt.assert_true('dead' in [_ip.user_ns[x] for x in '_','__','___'])
220 229 _ip.magic('reset -f out')
221 230 nt.assert_false('dead' in [_ip.user_ns[x] for x in '_','__','___'])
222 231 nt.assert_true(len(_ip.user_ns['Out']) == 0)
223 232
224 233 def test_reset_in():
225 234 "Test '%reset in' magic"
226 235 # test '%reset -f in'
227 236 _ip.run_cell("parrot", store_history=True)
228 237 nt.assert_true('parrot' in [_ip.user_ns[x] for x in '_i','_ii','_iii'])
229 238 _ip.magic('%reset -f in')
230 239 nt.assert_false('parrot' in [_ip.user_ns[x] for x in '_i','_ii','_iii'])
231 240 nt.assert_true(len(set(_ip.user_ns['In'])) == 1)
232 241
233 242 def test_reset_dhist():
234 243 "Test '%reset dhist' magic"
235 244 _ip.run_cell("tmp = [d for d in _dh]") # copy before clearing
236 245 _ip.magic('cd ' + os.path.dirname(nt.__file__))
237 246 _ip.magic('cd -')
238 247 nt.assert_true(len(_ip.user_ns['_dh']) > 0)
239 248 _ip.magic('reset -f dhist')
240 249 nt.assert_true(len(_ip.user_ns['_dh']) == 0)
241 250 _ip.run_cell("_dh = [d for d in tmp]") #restore
242 251
243 252 def test_reset_in_length():
244 253 "Test that '%reset in' preserves In[] length"
245 254 _ip.run_cell("print 'foo'")
246 255 _ip.run_cell("reset -f in")
247 256 nt.assert_true(len(_ip.user_ns['In']) == _ip.displayhook.prompt_count+1)
248 257
249 258 def test_time():
250 259 _ip.magic('time None')
251 260
252 261 def test_tb_syntaxerror():
253 262 """test %tb after a SyntaxError"""
254 263 ip = get_ipython()
255 264 ip.run_cell("for")
256 265
257 266 # trap and validate stdout
258 267 save_stdout = sys.stdout
259 268 try:
260 269 sys.stdout = StringIO()
261 270 ip.run_cell("%tb")
262 271 out = sys.stdout.getvalue()
263 272 finally:
264 273 sys.stdout = save_stdout
265 274 # trim output, and only check the last line
266 275 last_line = out.rstrip().splitlines()[-1].strip()
267 276 nt.assert_equals(last_line, "SyntaxError: invalid syntax")
268 277
269 278
270 279 @py3compat.doctest_refactor_print
271 280 def doctest_time():
272 281 """
273 282 In [10]: %time None
274 283 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
275 284 Wall time: 0.00 s
276 285
277 286 In [11]: def f(kmjy):
278 287 ....: %time print 2*kmjy
279 288
280 289 In [12]: f(3)
281 290 6
282 291 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
283 292 Wall time: 0.00 s
284 293 """
285 294
286 295
287 296 def test_doctest_mode():
288 297 "Toggle doctest_mode twice, it should be a no-op and run without error"
289 298 _ip.magic('doctest_mode')
290 299 _ip.magic('doctest_mode')
291 300
292 301
293 302 def test_parse_options():
294 303 """Tests for basic options parsing in magics."""
295 304 # These are only the most minimal of tests, more should be added later. At
296 305 # the very least we check that basic text/unicode calls work OK.
297 306 m = DummyMagics(_ip)
298 307 nt.assert_equal(m.parse_options('foo', '')[1], 'foo')
299 308 nt.assert_equal(m.parse_options(u'foo', '')[1], u'foo')
300 309
301 310
302 311 def test_dirops():
303 312 """Test various directory handling operations."""
304 313 # curpath = lambda :os.path.splitdrive(os.getcwdu())[1].replace('\\','/')
305 314 curpath = os.getcwdu
306 315 startdir = os.getcwdu()
307 316 ipdir = os.path.realpath(_ip.ipython_dir)
308 317 try:
309 318 _ip.magic('cd "%s"' % ipdir)
310 319 nt.assert_equal(curpath(), ipdir)
311 320 _ip.magic('cd -')
312 321 nt.assert_equal(curpath(), startdir)
313 322 _ip.magic('pushd "%s"' % ipdir)
314 323 nt.assert_equal(curpath(), ipdir)
315 324 _ip.magic('popd')
316 325 nt.assert_equal(curpath(), startdir)
317 326 finally:
318 327 os.chdir(startdir)
319 328
320 329
321 330 def test_xmode():
322 331 # Calling xmode three times should be a no-op
323 332 xmode = _ip.InteractiveTB.mode
324 333 for i in range(3):
325 334 _ip.magic("xmode")
326 335 nt.assert_equal(_ip.InteractiveTB.mode, xmode)
327 336
328 337 def test_reset_hard():
329 338 monitor = []
330 339 class A(object):
331 340 def __del__(self):
332 341 monitor.append(1)
333 342 def __repr__(self):
334 343 return "<A instance>"
335 344
336 345 _ip.user_ns["a"] = A()
337 346 _ip.run_cell("a")
338 347
339 348 nt.assert_equal(monitor, [])
340 349 _ip.magic("reset -f")
341 350 nt.assert_equal(monitor, [1])
342 351
343 352 class TestXdel(tt.TempFileMixin):
344 353 def test_xdel(self):
345 354 """Test that references from %run are cleared by xdel."""
346 355 src = ("class A(object):\n"
347 356 " monitor = []\n"
348 357 " def __del__(self):\n"
349 358 " self.monitor.append(1)\n"
350 359 "a = A()\n")
351 360 self.mktmp(src)
352 361 # %run creates some hidden references...
353 362 _ip.magic("run %s" % self.fname)
354 363 # ... as does the displayhook.
355 364 _ip.run_cell("a")
356 365
357 366 monitor = _ip.user_ns["A"].monitor
358 367 nt.assert_equal(monitor, [])
359 368
360 369 _ip.magic("xdel a")
361 370
362 371 # Check that a's __del__ method has been called.
363 372 nt.assert_equal(monitor, [1])
364 373
365 374 def doctest_who():
366 375 """doctest for %who
367 376
368 377 In [1]: %reset -f
369 378
370 379 In [2]: alpha = 123
371 380
372 381 In [3]: beta = 'beta'
373 382
374 383 In [4]: %who int
375 384 alpha
376 385
377 386 In [5]: %who str
378 387 beta
379 388
380 389 In [6]: %whos
381 390 Variable Type Data/Info
382 391 ----------------------------
383 392 alpha int 123
384 393 beta str beta
385 394
386 395 In [7]: %who_ls
387 396 Out[7]: ['alpha', 'beta']
388 397 """
389 398
390 399 def test_whos():
391 400 """Check that whos is protected against objects where repr() fails."""
392 401 class A(object):
393 402 def __repr__(self):
394 403 raise Exception()
395 404 _ip.user_ns['a'] = A()
396 405 _ip.magic("whos")
397 406
398 407 @py3compat.u_format
399 408 def doctest_precision():
400 409 """doctest for %precision
401 410
402 411 In [1]: f = get_ipython().display_formatter.formatters['text/plain']
403 412
404 413 In [2]: %precision 5
405 414 Out[2]: {u}'%.5f'
406 415
407 416 In [3]: f.float_format
408 417 Out[3]: {u}'%.5f'
409 418
410 419 In [4]: %precision %e
411 420 Out[4]: {u}'%e'
412 421
413 422 In [5]: f(3.1415927)
414 423 Out[5]: {u}'3.141593e+00'
415 424 """
416 425
417 426 def test_psearch():
418 427 with tt.AssertPrints("dict.fromkeys"):
419 428 _ip.run_cell("dict.fr*?")
420 429
421 430 def test_timeit_shlex():
422 431 """test shlex issues with timeit (#1109)"""
423 432 _ip.ex("def f(*a,**kw): pass")
424 433 _ip.magic('timeit -n1 "this is a bug".count(" ")')
425 434 _ip.magic('timeit -r1 -n1 f(" ", 1)')
426 435 _ip.magic('timeit -r1 -n1 f(" ", 1, " ", 2, " ")')
427 436 _ip.magic('timeit -r1 -n1 ("a " + "b")')
428 437 _ip.magic('timeit -r1 -n1 f("a " + "b")')
429 438 _ip.magic('timeit -r1 -n1 f("a " + "b ")')
430 439
431 440
432 441 def test_timeit_arguments():
433 442 "Test valid timeit arguments, should not cause SyntaxError (GH #1269)"
434 443 _ip.magic("timeit ('#')")
435 444
436 445
437 446 @dec.skipif(execution.profile is None)
438 447 def test_prun_quotes():
439 448 "Test that prun does not clobber string escapes (GH #1302)"
440 449 _ip.magic("prun -q x = '\t'")
441 450 nt.assert_equal(_ip.user_ns['x'], '\t')
442 451
443 452 def test_extension():
444 453 tmpdir = TemporaryDirectory()
445 454 orig_ipython_dir = _ip.ipython_dir
446 455 try:
447 456 _ip.ipython_dir = tmpdir.name
448 457 nt.assert_raises(ImportError, _ip.magic, "load_ext daft_extension")
449 458 url = os.path.join(os.path.dirname(__file__), "daft_extension.py")
450 459 _ip.magic("install_ext %s" % url)
451 460 _ip.user_ns.pop('arq', None)
452 461 _ip.magic("load_ext daft_extension")
453 462 tt.assert_equal(_ip.user_ns['arq'], 185)
454 463 _ip.magic("unload_ext daft_extension")
455 464 assert 'arq' not in _ip.user_ns
456 465 finally:
457 466 _ip.ipython_dir = orig_ipython_dir
458 467
459 468 def test_notebook_export_json():
460 469 with TemporaryDirectory() as td:
461 470 outfile = os.path.join(td, "nb.ipynb")
462 471 _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
463 472 _ip.magic("notebook -e %s" % outfile)
464 473
465 474 def test_notebook_export_py():
466 475 with TemporaryDirectory() as td:
467 476 outfile = os.path.join(td, "nb.py")
468 477 _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
469 478 _ip.magic("notebook -e %s" % outfile)
470 479
471 480 def test_notebook_reformat_py():
472 481 with TemporaryDirectory() as td:
473 482 infile = os.path.join(td, "nb.ipynb")
474 483 with io.open(infile, 'w', encoding='utf-8') as f:
475 484 current.write(nb0, f, 'json')
476 485
477 486 _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
478 487 _ip.magic("notebook -f py %s" % infile)
479 488
480 489 def test_notebook_reformat_json():
481 490 with TemporaryDirectory() as td:
482 491 infile = os.path.join(td, "nb.py")
483 492 with io.open(infile, 'w', encoding='utf-8') as f:
484 493 current.write(nb0, f, 'py')
485 494
486 495 _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
487 496 _ip.magic("notebook -f ipynb %s" % infile)
488 497 _ip.magic("notebook -f json %s" % infile)
489 498
490 499 def test_env():
491 500 env = _ip.magic("env")
492 501 assert isinstance(env, dict), type(env)
493 502
494 503
495 504 class CellMagicTestCase(TestCase):
496 505
497 506 def check_ident(self, magic):
498 507 # Manually called, we get the result
499 508 out = _ip.run_cell_magic(magic, 'a', 'b')
500 509 nt.assert_equals(out, ('a','b'))
501 510 # Via run_cell, it goes into the user's namespace via displayhook
502 511 _ip.run_cell('%%' + magic +' c\nd')
503 512 nt.assert_equals(_ip.user_ns['_'], ('c','d'))
504 513
505 514 def test_cell_magic_func_deco(self):
506 515 "Cell magic using simple decorator"
507 516 @register_cell_magic
508 517 def cellm(line, cell):
509 518 return line, cell
510 519
511 520 self.check_ident('cellm')
512 521
513 522 def test_cell_magic_reg(self):
514 523 "Cell magic manually registered"
515 524 def cellm(line, cell):
516 525 return line, cell
517 526
518 527 _ip.register_magic_function(cellm, 'cell', 'cellm2')
519 528 self.check_ident('cellm2')
520 529
521 530 def test_cell_magic_class(self):
522 531 "Cell magics declared via a class"
523 532 @magics_class
524 533 class MyMagics(Magics):
525 534
526 535 @cell_magic
527 536 def cellm3(self, line, cell):
528 537 return line, cell
529 538
530 539 _ip.register_magics(MyMagics)
531 540 self.check_ident('cellm3')
532 541
533 542 def test_cell_magic_class2(self):
534 543 "Cell magics declared via a class, #2"
535 544 @magics_class
536 545 class MyMagics2(Magics):
537 546
538 547 @cell_magic('cellm4')
539 548 def cellm33(self, line, cell):
540 549 return line, cell
541 550
542 551 _ip.register_magics(MyMagics2)
543 552 self.check_ident('cellm4')
544 553 # Check that nothing is registered as 'cellm33'
545 554 c33 = _ip.find_cell_magic('cellm33')
546 555 nt.assert_equals(c33, None)
547 556
@@ -1,316 +1,343 b''
1 1 # encoding: utf-8
2 2 """
3 3 =============
4 4 parallelmagic
5 5 =============
6 6
7 7 Magic command interface for interactive parallel work.
8 8
9 9 Usage
10 10 =====
11 11
12 12 ``%autopx``
13 13
14 @AUTOPX_DOC@
14 {AUTOPX_DOC}
15 15
16 16 ``%px``
17 17
18 @PX_DOC@
18 {PX_DOC}
19 19
20 20 ``%result``
21 21
22 @RESULT_DOC@
22 {RESULT_DOC}
23 23
24 24 """
25 25
26 26 #-----------------------------------------------------------------------------
27 27 # Copyright (C) 2008 The IPython Development Team
28 28 #
29 29 # Distributed under the terms of the BSD License. The full license is in
30 30 # the file COPYING, distributed as part of this software.
31 31 #-----------------------------------------------------------------------------
32 32
33 33 #-----------------------------------------------------------------------------
34 34 # Imports
35 35 #-----------------------------------------------------------------------------
36 36
37 37 import ast
38 38 import re
39 39
40 from IPython.core.magic import Magics, magics_class, line_magic
40 from IPython.core.error import UsageError
41 from IPython.core.magic import Magics, magics_class, line_magic, cell_magic
41 42 from IPython.testing.skipdoctest import skip_doctest
42 43
43 44 #-----------------------------------------------------------------------------
44 45 # Definitions of magic functions for use with IPython
45 46 #-----------------------------------------------------------------------------
46 47
47 NO_ACTIVE_VIEW = """
48 Use activate() on a DirectView object to activate it for magics.
49 """
48
49 NO_ACTIVE_VIEW = "Use activate() on a DirectView object to use it with magics."
50 50
51 51
52 52 @magics_class
53 53 class ParallelMagics(Magics):
54 54 """A set of magics useful when controlling a parallel IPython cluster.
55 55 """
56
57 def __init__(self, shell):
58 super(ParallelMagics, self).__init__(shell)
59 # A flag showing if autopx is activated or not
60 self.autopx = False
56
57 # A flag showing if autopx is activated or not
58 _autopx = False
59 # the current view used by the magics:
60 active_view = None
61 61
62 62 @skip_doctest
63 63 @line_magic
64 64 def result(self, parameter_s=''):
65 """Print the result of command i on all engines..
65 """Print the result of command i on all engines.
66 66
67 67 To use this a :class:`DirectView` instance must be created
68 68 and then activated by calling its :meth:`activate` method.
69
70 This lets you recall the results of %px computations after
71 asynchronous submission (view.block=False).
69 72
70 73 Then you can do the following::
71 74
72 In [23]: %result
73 Out[23]:
74 <Results List>
75 [0] In [6]: a = 10
76 [1] In [6]: a = 10
77
78 In [22]: %result 6
79 Out[22]:
80 <Results List>
81 [0] In [6]: a = 10
82 [1] In [6]: a = 10
75 In [23]: %px os.getpid()
76 Async parallel execution on engine(s): all
77
78 In [24]: %result
79 [ 8] Out[10]: 60920
80 [ 9] Out[10]: 60921
81 [10] Out[10]: 60922
82 [11] Out[10]: 60923
83 83 """
84
84 85 if self.active_view is None:
85 print NO_ACTIVE_VIEW
86 return
87
86 raise UsageError(NO_ACTIVE_VIEW)
87
88 stride = len(self.active_view)
88 89 try:
89 90 index = int(parameter_s)
90 91 except:
91 index = None
92 result = self.active_view.get_result(index)
93 return result
92 index = -1
93 msg_ids = self.active_view.history[stride * index:(stride * (index + 1)) or None]
94
95 result = self.active_view.get_result(msg_ids)
96
97 result.get()
98 result.display_outputs()
94 99
95 100 @skip_doctest
96 101 @line_magic
97 102 def px(self, parameter_s=''):
98 103 """Executes the given python command in parallel.
99
104
100 105 To use this a :class:`DirectView` instance must be created
101 106 and then activated by calling its :meth:`activate` method.
102 107
103 108 Then you can do the following::
104 109
105 In [24]: %px a = 5
110 In [24]: %px a = os.getpid()
106 111 Parallel execution on engine(s): all
107 Out[24]:
108 <Results List>
109 [0] In [7]: a = 5
110 [1] In [7]: a = 5
112
113 In [25]: %px print a
114 [stdout:0] 1234
115 [stdout:1] 1235
116 [stdout:2] 1236
117 [stdout:3] 1237
111 118 """
119 return self.parallel_execute(parameter_s)
120
121 def parallel_execute(self, cell, block=None, groupby='type'):
122 """implementation used by %px and %%parallel"""
112 123
113 124 if self.active_view is None:
114 print NO_ACTIVE_VIEW
115 return
116 print "Parallel execution on engine(s): %s" % self.active_view.targets
117 result = self.active_view.execute(parameter_s, block=False)
118 if self.active_view.block:
125 raise UsageError(NO_ACTIVE_VIEW)
126
127 # defaults:
128 block = self.active_view.block if block is None else block
129
130 base = "Parallel" if block else "Async parallel"
131 print base + " execution on engine(s): %s" % self.active_view.targets
132
133 result = self.active_view.execute(cell, silent=False, block=False)
134 if block:
119 135 result.get()
120 self._maybe_display_output(result)
136 result.display_outputs(groupby)
137 else:
138 # return AsyncResult only on non-blocking submission
139 return result
140
141 @skip_doctest
142 @cell_magic('px')
143 def cell_px(self, line='', cell=None):
144 """Executes the given python command in parallel.
145
146 Cell magic usage:
147
148 %%px [-o] [-e] [--group-options=type|engine|order] [--[no]block]
149
150 Options (%%px cell magic only):
151
152 -o: collate outputs in oder (same as group-outputs=order)
153
154 -e: group outputs by engine (same as group-outputs=engine)
155
156 --group-outputs=type [default behavior]:
157 each output type (stdout, stderr, displaypub) for all engines
158 displayed together.
159
160 --group-outputs=order:
161 The same as 'type', but individual displaypub outputs (e.g. plots)
162 will be interleaved, so it will display all of the first plots,
163 then all of the second plots, etc.
164
165 --group-outputs=engine:
166 All of an engine's output is displayed before moving on to the next.
167
168 --[no]block:
169 Whether or not to block for the execution to complete
170 (and display the results). If unspecified, the active view's
171
172
173 To use this a :class:`DirectView` instance must be created
174 and then activated by calling its :meth:`activate` method.
175
176 Then you can do the following::
177
178 In [24]: %%parallel --noblock a = os.getpid()
179 Async parallel execution on engine(s): all
180
181 In [25]: %px print a
182 [stdout:0] 1234
183 [stdout:1] 1235
184 [stdout:2] 1236
185 [stdout:3] 1237
186 """
187
188 block = None
189 groupby = 'type'
190 # as a cell magic, we accept args
191 opts, _ = self.parse_options(line, 'oe', 'group-outputs=', 'block', 'noblock')
192
193 if 'group-outputs' in opts:
194 groupby = opts['group-outputs']
195 elif 'o' in opts:
196 groupby = 'order'
197 elif 'e' in opts:
198 groupby = 'engine'
199
200 if 'block' in opts:
201 block = True
202 elif 'noblock' in opts:
203 block = False
204
205 return self.parallel_execute(cell, block=block, groupby=groupby)
121 206
122 207 @skip_doctest
123 208 @line_magic
124 209 def autopx(self, parameter_s=''):
125 210 """Toggles auto parallel mode.
126 211
127 212 To use this a :class:`DirectView` instance must be created
128 213 and then activated by calling its :meth:`activate` method. Once this
129 214 is called, all commands typed at the command line are send to
130 215 the engines to be executed in parallel. To control which engine
131 216 are used, set the ``targets`` attributed of the multiengine client
132 217 before entering ``%autopx`` mode.
133 218
134 219 Then you can do the following::
135 220
136 221 In [25]: %autopx
137 222 %autopx to enabled
138 223
139 224 In [26]: a = 10
140 225 Parallel execution on engine(s): [0,1,2,3]
141 226 In [27]: print a
142 227 Parallel execution on engine(s): [0,1,2,3]
143 228 [stdout:0] 10
144 229 [stdout:1] 10
145 230 [stdout:2] 10
146 231 [stdout:3] 10
147 232
148 233
149 234 In [27]: %autopx
150 235 %autopx disabled
151 236 """
152 if self.autopx:
237 if self._autopx:
153 238 self._disable_autopx()
154 239 else:
155 240 self._enable_autopx()
156 241
157 242 def _enable_autopx(self):
158 243 """Enable %autopx mode by saving the original run_cell and installing
159 244 pxrun_cell.
160 245 """
161 246 if self.active_view is None:
162 print NO_ACTIVE_VIEW
163 return
247 raise UsageError(NO_ACTIVE_VIEW)
164 248
165 # override run_cell and run_code
249 # override run_cell
166 250 self._original_run_cell = self.shell.run_cell
167 251 self.shell.run_cell = self.pxrun_cell
168 self._original_run_code = self.shell.run_code
169 self.shell.run_code = self.pxrun_code
170 252
171 self.autopx = True
253 self._autopx = True
172 254 print "%autopx enabled"
173 255
174 256 def _disable_autopx(self):
175 257 """Disable %autopx by restoring the original InteractiveShell.run_cell.
176 258 """
177 if self.autopx:
259 if self._autopx:
178 260 self.shell.run_cell = self._original_run_cell
179 self.shell.run_code = self._original_run_code
180 self.autopx = False
261 self._autopx = False
181 262 print "%autopx disabled"
182 263
183 def _maybe_display_output(self, result):
184 """Maybe display the output of a parallel result.
185
186 If self.active_view.block is True, wait for the result
187 and display the result. Otherwise, this is a noop.
188 """
189 if isinstance(result.stdout, basestring):
190 # single result
191 stdouts = [result.stdout.rstrip()]
192 else:
193 stdouts = [s.rstrip() for s in result.stdout]
194
195 targets = self.active_view.targets
196 if isinstance(targets, int):
197 targets = [targets]
198 elif targets == 'all':
199 targets = self.active_view.client.ids
200
201 if any(stdouts):
202 for eid,stdout in zip(targets, stdouts):
203 print '[stdout:%i]'%eid, stdout
204
205
206 264 def pxrun_cell(self, raw_cell, store_history=False, silent=False):
207 265 """drop-in replacement for InteractiveShell.run_cell.
208 266
209 267 This executes code remotely, instead of in the local namespace.
210 268
211 269 See InteractiveShell.run_cell for details.
212 270 """
213 271
214 272 if (not raw_cell) or raw_cell.isspace():
215 273 return
216 274
217 275 ipself = self.shell
218 276
219 277 with ipself.builtin_trap:
220 278 cell = ipself.prefilter_manager.prefilter_lines(raw_cell)
221 279
222 280 # Store raw and processed history
223 281 if store_history:
224 282 ipself.history_manager.store_inputs(ipself.execution_count,
225 283 cell, raw_cell)
226 284
227 285 # ipself.logger.log(cell, raw_cell)
228 286
229 287 cell_name = ipself.compile.cache(cell, ipself.execution_count)
230 288
231 289 try:
232 290 ast.parse(cell, filename=cell_name)
233 291 except (OverflowError, SyntaxError, ValueError, TypeError,
234 292 MemoryError):
235 293 # Case 1
236 294 ipself.showsyntaxerror()
237 295 ipself.execution_count += 1
238 296 return None
239 297 except NameError:
240 298 # ignore name errors, because we don't know the remote keys
241 299 pass
242 300
243 301 if store_history:
244 302 # Write output to the database. Does nothing unless
245 303 # history output logging is enabled.
246 304 ipself.history_manager.store_output(ipself.execution_count)
247 305 # Each cell is a *single* input, regardless of how many lines it has
248 306 ipself.execution_count += 1
249 307 if re.search(r'get_ipython\(\)\.magic\(u?["\']%?autopx', cell):
250 308 self._disable_autopx()
251 309 return False
252 310 else:
253 311 try:
254 312 result = self.active_view.execute(cell, silent=False, block=False)
255 313 except:
256 314 ipself.showtraceback()
257 315 return True
258 316 else:
259 317 if self.active_view.block:
260 318 try:
261 319 result.get()
262 320 except:
263 321 self.shell.showtraceback()
264 322 return True
265 323 else:
266 self._maybe_display_output(result)
267 return False
268
269 def pxrun_code(self, code_obj):
270 """drop-in replacement for InteractiveShell.run_code.
271
272 This executes code remotely, instead of in the local namespace.
273
274 See InteractiveShell.run_code for details.
275 """
276 ipself = self.shell
277 # check code object for the autopx magic
278 if 'get_ipython' in code_obj.co_names and 'magic' in code_obj.co_names \
279 and any( [ isinstance(c, basestring) and 'autopx' in c
280 for c in code_obj.co_consts ]):
281 self._disable_autopx()
282 return False
283 else:
284 try:
285 result = self.active_view.execute(code_obj, block=False)
286 except:
287 ipself.showtraceback()
288 return True
289 else:
290 if self.active_view.block:
291 try:
292 result.get()
293 except:
294 self.shell.showtraceback()
295 return True
296 else:
297 self._maybe_display_output(result)
324 with ipself.builtin_trap:
325 result.display_outputs()
298 326 return False
299 327
300 328
301 __doc__ = __doc__.replace('@AUTOPX_DOC@',
302 " " + ParallelMagics.autopx.__doc__)
303 __doc__ = __doc__.replace('@PX_DOC@',
304 " " + ParallelMagics.px.__doc__)
305 __doc__ = __doc__.replace('@RESULT_DOC@',
306 " " + ParallelMagics.result.__doc__)
329 __doc__ = __doc__.format(
330 AUTOPX_DOC = ' '*8 + ParallelMagics.autopx.__doc__,
331 PX_DOC = ' '*8 + ParallelMagics.px.__doc__,
332 RESULT_DOC = ' '*8 + ParallelMagics.result.__doc__
333 )
307 334
308 335 _loaded = False
309 336
310 337
311 338 def load_ipython_extension(ip):
312 339 """Load the extension in IPython."""
313 340 global _loaded
314 341 if not _loaded:
315 342 ip.register_magics(ParallelMagics)
316 343 _loaded = True
@@ -1,517 +1,651 b''
1 1 """AsyncResult objects for the client
2 2
3 3 Authors:
4 4
5 5 * MinRK
6 6 """
7 7 #-----------------------------------------------------------------------------
8 8 # Copyright (C) 2010-2011 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-----------------------------------------------------------------------------
13 13
14 14 #-----------------------------------------------------------------------------
15 15 # Imports
16 16 #-----------------------------------------------------------------------------
17 17
18 18 import sys
19 19 import time
20 20 from datetime import datetime
21 21
22 22 from zmq import MessageTracker
23 23
24 from IPython.core.display import clear_output
24 from IPython.core.display import clear_output, display
25 25 from IPython.external.decorator import decorator
26 26 from IPython.parallel import error
27 27
28 28 #-----------------------------------------------------------------------------
29 29 # Functions
30 30 #-----------------------------------------------------------------------------
31 31
32 32 def _total_seconds(td):
33 33 """timedelta.total_seconds was added in 2.7"""
34 34 try:
35 35 # Python >= 2.7
36 36 return td.total_seconds()
37 37 except AttributeError:
38 38 # Python 2.6
39 39 return 1e-6 * (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6)
40 40
41 41 #-----------------------------------------------------------------------------
42 42 # Classes
43 43 #-----------------------------------------------------------------------------
44 44
45 45 # global empty tracker that's always done:
46 46 finished_tracker = MessageTracker()
47 47
48 48 @decorator
49 49 def check_ready(f, self, *args, **kwargs):
50 50 """Call spin() to sync state prior to calling the method."""
51 51 self.wait(0)
52 52 if not self._ready:
53 53 raise error.TimeoutError("result not ready")
54 54 return f(self, *args, **kwargs)
55 55
56 56 class AsyncResult(object):
57 57 """Class for representing results of non-blocking calls.
58 58
59 59 Provides the same interface as :py:class:`multiprocessing.pool.AsyncResult`.
60 60 """
61 61
62 62 msg_ids = None
63 63 _targets = None
64 64 _tracker = None
65 65 _single_result = False
66 66
67 67 def __init__(self, client, msg_ids, fname='unknown', targets=None, tracker=None):
68 68 if isinstance(msg_ids, basestring):
69 69 # always a list
70 70 msg_ids = [msg_ids]
71 71 if tracker is None:
72 72 # default to always done
73 73 tracker = finished_tracker
74 74 self._client = client
75 75 self.msg_ids = msg_ids
76 76 self._fname=fname
77 77 self._targets = targets
78 78 self._tracker = tracker
79 79 self._ready = False
80 80 self._success = None
81 81 self._metadata = None
82 82 if len(msg_ids) == 1:
83 83 self._single_result = not isinstance(targets, (list, tuple))
84 84 else:
85 85 self._single_result = False
86 86
87 87 def __repr__(self):
88 88 if self._ready:
89 89 return "<%s: finished>"%(self.__class__.__name__)
90 90 else:
91 91 return "<%s: %s>"%(self.__class__.__name__,self._fname)
92 92
93 93
94 94 def _reconstruct_result(self, res):
95 95 """Reconstruct our result from actual result list (always a list)
96 96
97 97 Override me in subclasses for turning a list of results
98 98 into the expected form.
99 99 """
100 100 if self._single_result:
101 101 return res[0]
102 102 else:
103 103 return res
104 104
105 105 def get(self, timeout=-1):
106 106 """Return the result when it arrives.
107 107
108 108 If `timeout` is not ``None`` and the result does not arrive within
109 109 `timeout` seconds then ``TimeoutError`` is raised. If the
110 110 remote call raised an exception then that exception will be reraised
111 111 by get() inside a `RemoteError`.
112 112 """
113 113 if not self.ready():
114 114 self.wait(timeout)
115 115
116 116 if self._ready:
117 117 if self._success:
118 118 return self._result
119 119 else:
120 120 raise self._exception
121 121 else:
122 122 raise error.TimeoutError("Result not ready.")
123 123
124 124 def ready(self):
125 125 """Return whether the call has completed."""
126 126 if not self._ready:
127 127 self.wait(0)
128 128 return self._ready
129 129
130 130 def wait(self, timeout=-1):
131 131 """Wait until the result is available or until `timeout` seconds pass.
132 132
133 133 This method always returns None.
134 134 """
135 135 if self._ready:
136 136 return
137 137 self._ready = self._client.wait(self.msg_ids, timeout)
138 138 if self._ready:
139 139 try:
140 140 results = map(self._client.results.get, self.msg_ids)
141 141 self._result = results
142 142 if self._single_result:
143 143 r = results[0]
144 144 if isinstance(r, Exception):
145 145 raise r
146 146 else:
147 147 results = error.collect_exceptions(results, self._fname)
148 148 self._result = self._reconstruct_result(results)
149 149 except Exception, e:
150 150 self._exception = e
151 151 self._success = False
152 152 else:
153 153 self._success = True
154 154 finally:
155 155 self._metadata = map(self._client.metadata.get, self.msg_ids)
156 156
157 157
158 158 def successful(self):
159 159 """Return whether the call completed without raising an exception.
160 160
161 161 Will raise ``AssertionError`` if the result is not ready.
162 162 """
163 163 assert self.ready()
164 164 return self._success
165 165
166 166 #----------------------------------------------------------------
167 167 # Extra methods not in mp.pool.AsyncResult
168 168 #----------------------------------------------------------------
169 169
170 170 def get_dict(self, timeout=-1):
171 171 """Get the results as a dict, keyed by engine_id.
172 172
173 173 timeout behavior is described in `get()`.
174 174 """
175 175
176 176 results = self.get(timeout)
177 177 engine_ids = [ md['engine_id'] for md in self._metadata ]
178 178 bycount = sorted(engine_ids, key=lambda k: engine_ids.count(k))
179 179 maxcount = bycount.count(bycount[-1])
180 180 if maxcount > 1:
181 181 raise ValueError("Cannot build dict, %i jobs ran on engine #%i"%(
182 182 maxcount, bycount[-1]))
183 183
184 184 return dict(zip(engine_ids,results))
185 185
186 186 @property
187 187 def result(self):
188 188 """result property wrapper for `get(timeout=0)`."""
189 189 return self.get()
190 190
191 191 # abbreviated alias:
192 192 r = result
193 193
194 194 @property
195 195 @check_ready
196 196 def metadata(self):
197 197 """property for accessing execution metadata."""
198 198 if self._single_result:
199 199 return self._metadata[0]
200 200 else:
201 201 return self._metadata
202 202
203 203 @property
204 204 def result_dict(self):
205 205 """result property as a dict."""
206 206 return self.get_dict()
207 207
208 208 def __dict__(self):
209 209 return self.get_dict(0)
210 210
211 211 def abort(self):
212 212 """abort my tasks."""
213 213 assert not self.ready(), "Can't abort, I am already done!"
214 214 return self._client.abort(self.msg_ids, targets=self._targets, block=True)
215 215
216 216 @property
217 217 def sent(self):
218 218 """check whether my messages have been sent."""
219 219 return self._tracker.done
220 220
221 221 def wait_for_send(self, timeout=-1):
222 222 """wait for pyzmq send to complete.
223 223
224 224 This is necessary when sending arrays that you intend to edit in-place.
225 225 `timeout` is in seconds, and will raise TimeoutError if it is reached
226 226 before the send completes.
227 227 """
228 228 return self._tracker.wait(timeout)
229 229
230 230 #-------------------------------------
231 231 # dict-access
232 232 #-------------------------------------
233 233
234 234 @check_ready
235 235 def __getitem__(self, key):
236 236 """getitem returns result value(s) if keyed by int/slice, or metadata if key is str.
237 237 """
238 238 if isinstance(key, int):
239 239 return error.collect_exceptions([self._result[key]], self._fname)[0]
240 240 elif isinstance(key, slice):
241 241 return error.collect_exceptions(self._result[key], self._fname)
242 242 elif isinstance(key, basestring):
243 243 values = [ md[key] for md in self._metadata ]
244 244 if self._single_result:
245 245 return values[0]
246 246 else:
247 247 return values
248 248 else:
249 249 raise TypeError("Invalid key type %r, must be 'int','slice', or 'str'"%type(key))
250 250
251 251 def __getattr__(self, key):
252 252 """getattr maps to getitem for convenient attr access to metadata."""
253 253 try:
254 254 return self.__getitem__(key)
255 255 except (error.TimeoutError, KeyError):
256 256 raise AttributeError("%r object has no attribute %r"%(
257 257 self.__class__.__name__, key))
258 258
259 259 # asynchronous iterator:
260 260 def __iter__(self):
261 261 if self._single_result:
262 262 raise TypeError("AsyncResults with a single result are not iterable.")
263 263 try:
264 264 rlist = self.get(0)
265 265 except error.TimeoutError:
266 266 # wait for each result individually
267 267 for msg_id in self.msg_ids:
268 268 ar = AsyncResult(self._client, msg_id, self._fname)
269 269 yield ar.get()
270 270 else:
271 271 # already done
272 272 for r in rlist:
273 273 yield r
274 274
275 275 def __len__(self):
276 276 return len(self.msg_ids)
277 277
278 278 #-------------------------------------
279 279 # Sugar methods and attributes
280 280 #-------------------------------------
281 281
282 282 def timedelta(self, start, end, start_key=min, end_key=max):
283 283 """compute the difference between two sets of timestamps
284 284
285 285 The default behavior is to use the earliest of the first
286 286 and the latest of the second list, but this can be changed
287 287 by passing a different
288 288
289 289 Parameters
290 290 ----------
291 291
292 292 start : one or more datetime objects (e.g. ar.submitted)
293 293 end : one or more datetime objects (e.g. ar.received)
294 294 start_key : callable
295 295 Function to call on `start` to extract the relevant
296 296 entry [defalt: min]
297 297 end_key : callable
298 298 Function to call on `end` to extract the relevant
299 299 entry [default: max]
300 300
301 301 Returns
302 302 -------
303 303
304 304 dt : float
305 305 The time elapsed (in seconds) between the two selected timestamps.
306 306 """
307 307 if not isinstance(start, datetime):
308 308 # handle single_result AsyncResults, where ar.stamp is single object,
309 309 # not a list
310 310 start = start_key(start)
311 311 if not isinstance(end, datetime):
312 312 # handle single_result AsyncResults, where ar.stamp is single object,
313 313 # not a list
314 314 end = end_key(end)
315 315 return _total_seconds(end - start)
316 316
317 317 @property
318 318 def progress(self):
319 319 """the number of tasks which have been completed at this point.
320 320
321 321 Fractional progress would be given by 1.0 * ar.progress / len(ar)
322 322 """
323 323 self.wait(0)
324 324 return len(self) - len(set(self.msg_ids).intersection(self._client.outstanding))
325 325
326 326 @property
327 327 def elapsed(self):
328 328 """elapsed time since initial submission"""
329 329 if self.ready():
330 330 return self.wall_time
331 331
332 332 now = submitted = datetime.now()
333 333 for msg_id in self.msg_ids:
334 334 if msg_id in self._client.metadata:
335 335 stamp = self._client.metadata[msg_id]['submitted']
336 336 if stamp and stamp < submitted:
337 337 submitted = stamp
338 338 return _total_seconds(now-submitted)
339 339
340 340 @property
341 341 @check_ready
342 342 def serial_time(self):
343 343 """serial computation time of a parallel calculation
344 344
345 345 Computed as the sum of (completed-started) of each task
346 346 """
347 347 t = 0
348 348 for md in self._metadata:
349 349 t += _total_seconds(md['completed'] - md['started'])
350 350 return t
351 351
352 352 @property
353 353 @check_ready
354 354 def wall_time(self):
355 355 """actual computation time of a parallel calculation
356 356
357 357 Computed as the time between the latest `received` stamp
358 358 and the earliest `submitted`.
359 359
360 360 Only reliable if Client was spinning/waiting when the task finished, because
361 361 the `received` timestamp is created when a result is pulled off of the zmq queue,
362 362 which happens as a result of `client.spin()`.
363 363
364 364 For similar comparison of other timestamp pairs, check out AsyncResult.timedelta.
365 365
366 366 """
367 367 return self.timedelta(self.submitted, self.received)
368 368
369 369 def wait_interactive(self, interval=1., timeout=None):
370 370 """interactive wait, printing progress at regular intervals"""
371 371 N = len(self)
372 372 tic = time.time()
373 373 while not self.ready() and (timeout is None or time.time() - tic <= timeout):
374 374 self.wait(interval)
375 375 clear_output()
376 376 print "%4i/%i tasks finished after %4i s" % (self.progress, N, self.elapsed),
377 377 sys.stdout.flush()
378 378 print
379 379 print "done"
380
381 def _republish_displaypub(self, content, eid):
382 """republish individual displaypub content dicts"""
383 try:
384 ip = get_ipython()
385 except NameError:
386 # displaypub is meaningless outside IPython
387 return
388 md = content['metadata'] or {}
389 md['engine'] = eid
390 ip.display_pub.publish(content['source'], content['data'], md)
391
392
393 def _display_single_result(self):
394
395 print self.stdout
396 print >> sys.stderr, self.stderr
397
398 try:
399 get_ipython()
400 except NameError:
401 # displaypub is meaningless outside IPython
402 return
403
404 for output in self.outputs:
405 self._republish_displaypub(output, self.engine_id)
406
407 if self.pyout is not None:
408 display(self.get())
409
410 @check_ready
411 def display_outputs(self, groupby="type"):
412 """republish the outputs of the computation
413
414 Parameters
415 ----------
416
417 groupby : str [default: type]
418 if 'type':
419 Group outputs by type (show all stdout, then all stderr, etc.):
420
421 [stdout:1] foo
422 [stdout:2] foo
423 [stderr:1] bar
424 [stderr:2] bar
425 if 'engine':
426 Display outputs for each engine before moving on to the next:
427
428 [stdout:1] foo
429 [stderr:1] bar
430 [stdout:2] foo
431 [stderr:2] bar
432
433 if 'order':
434 Like 'type', but further collate individual displaypub
435 outputs. This is meant for cases of each command producing
436 several plots, and you would like to see all of the first
437 plots together, then all of the second plots, and so on.
438 """
439 # flush iopub, just in case
440 self._client._flush_iopub(self._client._iopub_socket)
441 if self._single_result:
442 self._display_single_result()
443 return
444
445 stdouts = [s.rstrip() for s in self.stdout]
446 stderrs = [s.rstrip() for s in self.stderr]
447 pyouts = [p for p in self.pyout]
448 output_lists = self.outputs
449 results = self.get()
450
451 targets = self.engine_id
452
453 if groupby == "engine":
454 for eid,stdout,stderr,outputs,r,pyout in zip(
455 targets, stdouts, stderrs, output_lists, results, pyouts
456 ):
457 if stdout:
458 print '[stdout:%i]' % eid, stdout
459 if stderr:
460 print >> sys.stderr, '[stderr:%i]' % eid, stderr
461
462 try:
463 get_ipython()
464 except NameError:
465 # displaypub is meaningless outside IPython
466 return
467
468 for output in outputs:
469 self._republish_displaypub(output, eid)
470
471 if pyout is not None:
472 display(r)
473
474 elif groupby in ('type', 'order'):
475 # republish stdout:
476 if any(stdouts):
477 for eid,stdout in zip(targets, stdouts):
478 print '[stdout:%i]' % eid, stdout
479
480 # republish stderr:
481 if any(stderrs):
482 for eid,stderr in zip(targets, stderrs):
483 print >> sys.stderr, '[stderr:%i]' % eid, stderr
484
485 try:
486 get_ipython()
487 except NameError:
488 # displaypub is meaningless outside IPython
489 return
490
491 if groupby == 'order':
492 output_dict = dict((eid, outputs) for eid,outputs in zip(targets, output_lists))
493 N = max(len(outputs) for outputs in output_lists)
494 for i in range(N):
495 for eid in targets:
496 outputs = output_dict[eid]
497 if len(outputs) >= N:
498 self._republish_displaypub(outputs[i], eid)
499 else:
500 # republish displaypub output
501 for eid,outputs in zip(targets, output_lists):
502 for output in outputs:
503 self._republish_displaypub(output, eid)
504
505 # finally, add pyout:
506 for eid,r,pyout in zip(targets, results, pyouts):
507 if pyout is not None:
508 display(r)
509
510 else:
511 raise ValueError("groupby must be one of 'type', 'engine', 'collate', not %r" % groupby)
512
513
380 514
381 515
382 516 class AsyncMapResult(AsyncResult):
383 517 """Class for representing results of non-blocking gathers.
384 518
385 519 This will properly reconstruct the gather.
386 520
387 521 This class is iterable at any time, and will wait on results as they come.
388 522
389 523 If ordered=False, then the first results to arrive will come first, otherwise
390 524 results will be yielded in the order they were submitted.
391 525
392 526 """
393 527
394 528 def __init__(self, client, msg_ids, mapObject, fname='', ordered=True):
395 529 AsyncResult.__init__(self, client, msg_ids, fname=fname)
396 530 self._mapObject = mapObject
397 531 self._single_result = False
398 532 self.ordered = ordered
399 533
400 534 def _reconstruct_result(self, res):
401 535 """Perform the gather on the actual results."""
402 536 return self._mapObject.joinPartitions(res)
403 537
404 538 # asynchronous iterator:
405 539 def __iter__(self):
406 540 it = self._ordered_iter if self.ordered else self._unordered_iter
407 541 for r in it():
408 542 yield r
409 543
410 544 # asynchronous ordered iterator:
411 545 def _ordered_iter(self):
412 546 """iterator for results *as they arrive*, preserving submission order."""
413 547 try:
414 548 rlist = self.get(0)
415 549 except error.TimeoutError:
416 550 # wait for each result individually
417 551 for msg_id in self.msg_ids:
418 552 ar = AsyncResult(self._client, msg_id, self._fname)
419 553 rlist = ar.get()
420 554 try:
421 555 for r in rlist:
422 556 yield r
423 557 except TypeError:
424 558 # flattened, not a list
425 559 # this could get broken by flattened data that returns iterables
426 560 # but most calls to map do not expose the `flatten` argument
427 561 yield rlist
428 562 else:
429 563 # already done
430 564 for r in rlist:
431 565 yield r
432 566
433 567 # asynchronous unordered iterator:
434 568 def _unordered_iter(self):
435 569 """iterator for results *as they arrive*, on FCFS basis, ignoring submission order."""
436 570 try:
437 571 rlist = self.get(0)
438 572 except error.TimeoutError:
439 573 pending = set(self.msg_ids)
440 574 while pending:
441 575 try:
442 576 self._client.wait(pending, 1e-3)
443 577 except error.TimeoutError:
444 578 # ignore timeout error, because that only means
445 579 # *some* jobs are outstanding
446 580 pass
447 581 # update ready set with those no longer outstanding:
448 582 ready = pending.difference(self._client.outstanding)
449 583 # update pending to exclude those that are finished
450 584 pending = pending.difference(ready)
451 585 while ready:
452 586 msg_id = ready.pop()
453 587 ar = AsyncResult(self._client, msg_id, self._fname)
454 588 rlist = ar.get()
455 589 try:
456 590 for r in rlist:
457 591 yield r
458 592 except TypeError:
459 593 # flattened, not a list
460 594 # this could get broken by flattened data that returns iterables
461 595 # but most calls to map do not expose the `flatten` argument
462 596 yield rlist
463 597 else:
464 598 # already done
465 599 for r in rlist:
466 600 yield r
467 601
468 602
469 603
470 604 class AsyncHubResult(AsyncResult):
471 605 """Class to wrap pending results that must be requested from the Hub.
472 606
473 607 Note that waiting/polling on these objects requires polling the Hubover the network,
474 608 so use `AsyncHubResult.wait()` sparingly.
475 609 """
476 610
477 611 def wait(self, timeout=-1):
478 612 """wait for result to complete."""
479 613 start = time.time()
480 614 if self._ready:
481 615 return
482 616 local_ids = filter(lambda msg_id: msg_id in self._client.outstanding, self.msg_ids)
483 617 local_ready = self._client.wait(local_ids, timeout)
484 618 if local_ready:
485 619 remote_ids = filter(lambda msg_id: msg_id not in self._client.results, self.msg_ids)
486 620 if not remote_ids:
487 621 self._ready = True
488 622 else:
489 623 rdict = self._client.result_status(remote_ids, status_only=False)
490 624 pending = rdict['pending']
491 625 while pending and (timeout < 0 or time.time() < start+timeout):
492 626 rdict = self._client.result_status(remote_ids, status_only=False)
493 627 pending = rdict['pending']
494 628 if pending:
495 629 time.sleep(0.1)
496 630 if not pending:
497 631 self._ready = True
498 632 if self._ready:
499 633 try:
500 634 results = map(self._client.results.get, self.msg_ids)
501 635 self._result = results
502 636 if self._single_result:
503 637 r = results[0]
504 638 if isinstance(r, Exception):
505 639 raise r
506 640 else:
507 641 results = error.collect_exceptions(results, self._fname)
508 642 self._result = self._reconstruct_result(results)
509 643 except Exception, e:
510 644 self._exception = e
511 645 self._success = False
512 646 else:
513 647 self._success = True
514 648 finally:
515 649 self._metadata = map(self._client.metadata.get, self.msg_ids)
516 650
517 651 __all__ = ['AsyncResult', 'AsyncMapResult', 'AsyncHubResult'] No newline at end of file
@@ -1,1628 +1,1655 b''
1 1 """A semi-synchronous Client for the ZMQ cluster
2 2
3 3 Authors:
4 4
5 5 * MinRK
6 6 """
7 7 #-----------------------------------------------------------------------------
8 8 # Copyright (C) 2010-2011 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-----------------------------------------------------------------------------
13 13
14 14 #-----------------------------------------------------------------------------
15 15 # Imports
16 16 #-----------------------------------------------------------------------------
17 17
18 18 import os
19 19 import json
20 20 import sys
21 21 from threading import Thread, Event
22 22 import time
23 23 import warnings
24 24 from datetime import datetime
25 25 from getpass import getpass
26 26 from pprint import pprint
27 27
28 28 pjoin = os.path.join
29 29
30 30 import zmq
31 31 # from zmq.eventloop import ioloop, zmqstream
32 32
33 33 from IPython.config.configurable import MultipleInstanceError
34 34 from IPython.core.application import BaseIPythonApplication
35 35
36 from IPython.utils.coloransi import TermColors
36 37 from IPython.utils.jsonutil import rekey
37 38 from IPython.utils.localinterfaces import LOCAL_IPS
38 39 from IPython.utils.path import get_ipython_dir
39 40 from IPython.utils.py3compat import cast_bytes
40 41 from IPython.utils.traitlets import (HasTraits, Integer, Instance, Unicode,
41 42 Dict, List, Bool, Set, Any)
42 43 from IPython.external.decorator import decorator
43 44 from IPython.external.ssh import tunnel
44 45
45 46 from IPython.parallel import Reference
46 47 from IPython.parallel import error
47 48 from IPython.parallel import util
48 49
49 50 from IPython.zmq.session import Session, Message
50 51
51 52 from .asyncresult import AsyncResult, AsyncHubResult
52 53 from IPython.core.profiledir import ProfileDir, ProfileDirError
53 54 from .view import DirectView, LoadBalancedView
54 55
55 56 if sys.version_info[0] >= 3:
56 57 # xrange is used in a couple 'isinstance' tests in py2
57 58 # should be just 'range' in 3k
58 59 xrange = range
59 60
60 61 #--------------------------------------------------------------------------
61 62 # Decorators for Client methods
62 63 #--------------------------------------------------------------------------
63 64
64 65 @decorator
65 66 def spin_first(f, self, *args, **kwargs):
66 67 """Call spin() to sync state prior to calling the method."""
67 68 self.spin()
68 69 return f(self, *args, **kwargs)
69 70
70 71
71 72 #--------------------------------------------------------------------------
72 73 # Classes
73 74 #--------------------------------------------------------------------------
74 75
75 76
76 77 class ExecuteReply(object):
77 78 """wrapper for finished Execute results"""
78 79 def __init__(self, msg_id, content, metadata):
79 80 self.msg_id = msg_id
80 81 self._content = content
81 82 self.execution_count = content['execution_count']
82 83 self.metadata = metadata
83 84
84 85 def __getitem__(self, key):
85 86 return self.metadata[key]
86 87
87 88 def __getattr__(self, key):
88 89 if key not in self.metadata:
89 90 raise AttributeError(key)
90 91 return self.metadata[key]
91 92
92 93 def __repr__(self):
93 pyout = self.metadata['pyout'] or {}
94 text_out = pyout.get('data', {}).get('text/plain', '')
94 pyout = self.metadata['pyout'] or {'data':{}}
95 text_out = pyout['data'].get('text/plain', '')
95 96 if len(text_out) > 32:
96 97 text_out = text_out[:29] + '...'
97 98
98 99 return "<ExecuteReply[%i]: %s>" % (self.execution_count, text_out)
99 100
101 def _repr_pretty_(self, p, cycle):
102 pyout = self.metadata['pyout'] or {'data':{}}
103 text_out = pyout['data'].get('text/plain', '')
104
105 if not text_out:
106 return
107
108 try:
109 ip = get_ipython()
110 except NameError:
111 colors = "NoColor"
112 else:
113 colors = ip.colors
114
115 if colors == "NoColor":
116 out = normal = ""
117 else:
118 out = TermColors.Red
119 normal = TermColors.Normal
120
121 p.text(
122 u'[%i] ' % self.metadata['engine_id'] +
123 out + u'Out[%i]: ' % self.execution_count +
124 normal + text_out
125 )
126
100 127 def _repr_html_(self):
101 128 pyout = self.metadata['pyout'] or {'data':{}}
102 129 return pyout['data'].get("text/html")
103 130
104 131 def _repr_latex_(self):
105 132 pyout = self.metadata['pyout'] or {'data':{}}
106 133 return pyout['data'].get("text/latex")
107 134
108 135 def _repr_json_(self):
109 136 pyout = self.metadata['pyout'] or {'data':{}}
110 137 return pyout['data'].get("application/json")
111 138
112 139 def _repr_javascript_(self):
113 140 pyout = self.metadata['pyout'] or {'data':{}}
114 141 return pyout['data'].get("application/javascript")
115 142
116 143 def _repr_png_(self):
117 144 pyout = self.metadata['pyout'] or {'data':{}}
118 145 return pyout['data'].get("image/png")
119 146
120 147 def _repr_jpeg_(self):
121 148 pyout = self.metadata['pyout'] or {'data':{}}
122 149 return pyout['data'].get("image/jpeg")
123 150
124 151 def _repr_svg_(self):
125 152 pyout = self.metadata['pyout'] or {'data':{}}
126 153 return pyout['data'].get("image/svg+xml")
127 154
128 155
129 156 class Metadata(dict):
130 157 """Subclass of dict for initializing metadata values.
131 158
132 159 Attribute access works on keys.
133 160
134 161 These objects have a strict set of keys - errors will raise if you try
135 162 to add new keys.
136 163 """
137 164 def __init__(self, *args, **kwargs):
138 165 dict.__init__(self)
139 166 md = {'msg_id' : None,
140 167 'submitted' : None,
141 168 'started' : None,
142 169 'completed' : None,
143 170 'received' : None,
144 171 'engine_uuid' : None,
145 172 'engine_id' : None,
146 173 'follow' : None,
147 174 'after' : None,
148 175 'status' : None,
149 176
150 177 'pyin' : None,
151 178 'pyout' : None,
152 179 'pyerr' : None,
153 180 'stdout' : '',
154 181 'stderr' : '',
155 182 'outputs' : [],
156 183 }
157 184 self.update(md)
158 185 self.update(dict(*args, **kwargs))
159 186
160 187 def __getattr__(self, key):
161 188 """getattr aliased to getitem"""
162 189 if key in self.iterkeys():
163 190 return self[key]
164 191 else:
165 192 raise AttributeError(key)
166 193
167 194 def __setattr__(self, key, value):
168 195 """setattr aliased to setitem, with strict"""
169 196 if key in self.iterkeys():
170 197 self[key] = value
171 198 else:
172 199 raise AttributeError(key)
173 200
174 201 def __setitem__(self, key, value):
175 202 """strict static key enforcement"""
176 203 if key in self.iterkeys():
177 204 dict.__setitem__(self, key, value)
178 205 else:
179 206 raise KeyError(key)
180 207
181 208
182 209 class Client(HasTraits):
183 210 """A semi-synchronous client to the IPython ZMQ cluster
184 211
185 212 Parameters
186 213 ----------
187 214
188 215 url_or_file : bytes or unicode; zmq url or path to ipcontroller-client.json
189 216 Connection information for the Hub's registration. If a json connector
190 217 file is given, then likely no further configuration is necessary.
191 218 [Default: use profile]
192 219 profile : bytes
193 220 The name of the Cluster profile to be used to find connector information.
194 221 If run from an IPython application, the default profile will be the same
195 222 as the running application, otherwise it will be 'default'.
196 223 context : zmq.Context
197 224 Pass an existing zmq.Context instance, otherwise the client will create its own.
198 225 debug : bool
199 226 flag for lots of message printing for debug purposes
200 227 timeout : int/float
201 228 time (in seconds) to wait for connection replies from the Hub
202 229 [Default: 10]
203 230
204 231 #-------------- session related args ----------------
205 232
206 233 config : Config object
207 234 If specified, this will be relayed to the Session for configuration
208 235 username : str
209 236 set username for the session object
210 237 packer : str (import_string) or callable
211 238 Can be either the simple keyword 'json' or 'pickle', or an import_string to a
212 239 function to serialize messages. Must support same input as
213 240 JSON, and output must be bytes.
214 241 You can pass a callable directly as `pack`
215 242 unpacker : str (import_string) or callable
216 243 The inverse of packer. Only necessary if packer is specified as *not* one
217 244 of 'json' or 'pickle'.
218 245
219 246 #-------------- ssh related args ----------------
220 247 # These are args for configuring the ssh tunnel to be used
221 248 # credentials are used to forward connections over ssh to the Controller
222 249 # Note that the ip given in `addr` needs to be relative to sshserver
223 250 # The most basic case is to leave addr as pointing to localhost (127.0.0.1),
224 251 # and set sshserver as the same machine the Controller is on. However,
225 252 # the only requirement is that sshserver is able to see the Controller
226 253 # (i.e. is within the same trusted network).
227 254
228 255 sshserver : str
229 256 A string of the form passed to ssh, i.e. 'server.tld' or 'user@server.tld:port'
230 257 If keyfile or password is specified, and this is not, it will default to
231 258 the ip given in addr.
232 259 sshkey : str; path to ssh private key file
233 260 This specifies a key to be used in ssh login, default None.
234 261 Regular default ssh keys will be used without specifying this argument.
235 262 password : str
236 263 Your ssh password to sshserver. Note that if this is left None,
237 264 you will be prompted for it if passwordless key based login is unavailable.
238 265 paramiko : bool
239 266 flag for whether to use paramiko instead of shell ssh for tunneling.
240 267 [default: True on win32, False else]
241 268
242 269 ------- exec authentication args -------
243 270 If even localhost is untrusted, you can have some protection against
244 271 unauthorized execution by signing messages with HMAC digests.
245 272 Messages are still sent as cleartext, so if someone can snoop your
246 273 loopback traffic this will not protect your privacy, but will prevent
247 274 unauthorized execution.
248 275
249 276 exec_key : str
250 277 an authentication key or file containing a key
251 278 default: None
252 279
253 280
254 281 Attributes
255 282 ----------
256 283
257 284 ids : list of int engine IDs
258 285 requesting the ids attribute always synchronizes
259 286 the registration state. To request ids without synchronization,
260 287 use semi-private _ids attributes.
261 288
262 289 history : list of msg_ids
263 290 a list of msg_ids, keeping track of all the execution
264 291 messages you have submitted in order.
265 292
266 293 outstanding : set of msg_ids
267 294 a set of msg_ids that have been submitted, but whose
268 295 results have not yet been received.
269 296
270 297 results : dict
271 298 a dict of all our results, keyed by msg_id
272 299
273 300 block : bool
274 301 determines default behavior when block not specified
275 302 in execution methods
276 303
277 304 Methods
278 305 -------
279 306
280 307 spin
281 308 flushes incoming results and registration state changes
282 309 control methods spin, and requesting `ids` also ensures up to date
283 310
284 311 wait
285 312 wait on one or more msg_ids
286 313
287 314 execution methods
288 315 apply
289 316 legacy: execute, run
290 317
291 318 data movement
292 319 push, pull, scatter, gather
293 320
294 321 query methods
295 322 queue_status, get_result, purge, result_status
296 323
297 324 control methods
298 325 abort, shutdown
299 326
300 327 """
301 328
302 329
303 330 block = Bool(False)
304 331 outstanding = Set()
305 332 results = Instance('collections.defaultdict', (dict,))
306 333 metadata = Instance('collections.defaultdict', (Metadata,))
307 334 history = List()
308 335 debug = Bool(False)
309 336 _spin_thread = Any()
310 337 _stop_spinning = Any()
311 338
312 339 profile=Unicode()
313 340 def _profile_default(self):
314 341 if BaseIPythonApplication.initialized():
315 342 # an IPython app *might* be running, try to get its profile
316 343 try:
317 344 return BaseIPythonApplication.instance().profile
318 345 except (AttributeError, MultipleInstanceError):
319 346 # could be a *different* subclass of config.Application,
320 347 # which would raise one of these two errors.
321 348 return u'default'
322 349 else:
323 350 return u'default'
324 351
325 352
326 353 _outstanding_dict = Instance('collections.defaultdict', (set,))
327 354 _ids = List()
328 355 _connected=Bool(False)
329 356 _ssh=Bool(False)
330 357 _context = Instance('zmq.Context')
331 358 _config = Dict()
332 359 _engines=Instance(util.ReverseDict, (), {})
333 360 # _hub_socket=Instance('zmq.Socket')
334 361 _query_socket=Instance('zmq.Socket')
335 362 _control_socket=Instance('zmq.Socket')
336 363 _iopub_socket=Instance('zmq.Socket')
337 364 _notification_socket=Instance('zmq.Socket')
338 365 _mux_socket=Instance('zmq.Socket')
339 366 _task_socket=Instance('zmq.Socket')
340 367 _task_scheme=Unicode()
341 368 _closed = False
342 369 _ignored_control_replies=Integer(0)
343 370 _ignored_hub_replies=Integer(0)
344 371
345 372 def __new__(self, *args, **kw):
346 373 # don't raise on positional args
347 374 return HasTraits.__new__(self, **kw)
348 375
349 376 def __init__(self, url_or_file=None, profile=None, profile_dir=None, ipython_dir=None,
350 377 context=None, debug=False, exec_key=None,
351 378 sshserver=None, sshkey=None, password=None, paramiko=None,
352 379 timeout=10, **extra_args
353 380 ):
354 381 if profile:
355 382 super(Client, self).__init__(debug=debug, profile=profile)
356 383 else:
357 384 super(Client, self).__init__(debug=debug)
358 385 if context is None:
359 386 context = zmq.Context.instance()
360 387 self._context = context
361 388 self._stop_spinning = Event()
362 389
363 390 self._setup_profile_dir(self.profile, profile_dir, ipython_dir)
364 391 if self._cd is not None:
365 392 if url_or_file is None:
366 393 url_or_file = pjoin(self._cd.security_dir, 'ipcontroller-client.json')
367 394 if url_or_file is None:
368 395 raise ValueError(
369 396 "I can't find enough information to connect to a hub!"
370 397 " Please specify at least one of url_or_file or profile."
371 398 )
372 399
373 400 if not util.is_url(url_or_file):
374 401 # it's not a url, try for a file
375 402 if not os.path.exists(url_or_file):
376 403 if self._cd:
377 404 url_or_file = os.path.join(self._cd.security_dir, url_or_file)
378 405 if not os.path.exists(url_or_file):
379 406 raise IOError("Connection file not found: %r" % url_or_file)
380 407 with open(url_or_file) as f:
381 408 cfg = json.loads(f.read())
382 409 else:
383 410 cfg = {'url':url_or_file}
384 411
385 412 # sync defaults from args, json:
386 413 if sshserver:
387 414 cfg['ssh'] = sshserver
388 415 if exec_key:
389 416 cfg['exec_key'] = exec_key
390 417 exec_key = cfg['exec_key']
391 418 location = cfg.setdefault('location', None)
392 419 cfg['url'] = util.disambiguate_url(cfg['url'], location)
393 420 url = cfg['url']
394 421 proto,addr,port = util.split_url(url)
395 422 if location is not None and addr == '127.0.0.1':
396 423 # location specified, and connection is expected to be local
397 424 if location not in LOCAL_IPS and not sshserver:
398 425 # load ssh from JSON *only* if the controller is not on
399 426 # this machine
400 427 sshserver=cfg['ssh']
401 428 if location not in LOCAL_IPS and not sshserver:
402 429 # warn if no ssh specified, but SSH is probably needed
403 430 # This is only a warning, because the most likely cause
404 431 # is a local Controller on a laptop whose IP is dynamic
405 432 warnings.warn("""
406 433 Controller appears to be listening on localhost, but not on this machine.
407 434 If this is true, you should specify Client(...,sshserver='you@%s')
408 435 or instruct your controller to listen on an external IP."""%location,
409 436 RuntimeWarning)
410 437 elif not sshserver:
411 438 # otherwise sync with cfg
412 439 sshserver = cfg['ssh']
413 440
414 441 self._config = cfg
415 442
416 443 self._ssh = bool(sshserver or sshkey or password)
417 444 if self._ssh and sshserver is None:
418 445 # default to ssh via localhost
419 446 sshserver = url.split('://')[1].split(':')[0]
420 447 if self._ssh and password is None:
421 448 if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko):
422 449 password=False
423 450 else:
424 451 password = getpass("SSH Password for %s: "%sshserver)
425 452 ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko)
426 453
427 454 # configure and construct the session
428 455 if exec_key is not None:
429 456 if os.path.isfile(exec_key):
430 457 extra_args['keyfile'] = exec_key
431 458 else:
432 459 exec_key = cast_bytes(exec_key)
433 460 extra_args['key'] = exec_key
434 461 self.session = Session(**extra_args)
435 462
436 463 self._query_socket = self._context.socket(zmq.DEALER)
437 464 self._query_socket.setsockopt(zmq.IDENTITY, self.session.bsession)
438 465 if self._ssh:
439 466 tunnel.tunnel_connection(self._query_socket, url, sshserver, **ssh_kwargs)
440 467 else:
441 468 self._query_socket.connect(url)
442 469
443 470 self.session.debug = self.debug
444 471
445 472 self._notification_handlers = {'registration_notification' : self._register_engine,
446 473 'unregistration_notification' : self._unregister_engine,
447 474 'shutdown_notification' : lambda msg: self.close(),
448 475 }
449 476 self._queue_handlers = {'execute_reply' : self._handle_execute_reply,
450 477 'apply_reply' : self._handle_apply_reply}
451 478 self._connect(sshserver, ssh_kwargs, timeout)
452 479
453 480 def __del__(self):
454 481 """cleanup sockets, but _not_ context."""
455 482 self.close()
456 483
457 484 def _setup_profile_dir(self, profile, profile_dir, ipython_dir):
458 485 if ipython_dir is None:
459 486 ipython_dir = get_ipython_dir()
460 487 if profile_dir is not None:
461 488 try:
462 489 self._cd = ProfileDir.find_profile_dir(profile_dir)
463 490 return
464 491 except ProfileDirError:
465 492 pass
466 493 elif profile is not None:
467 494 try:
468 495 self._cd = ProfileDir.find_profile_dir_by_name(
469 496 ipython_dir, profile)
470 497 return
471 498 except ProfileDirError:
472 499 pass
473 500 self._cd = None
474 501
475 502 def _update_engines(self, engines):
476 503 """Update our engines dict and _ids from a dict of the form: {id:uuid}."""
477 504 for k,v in engines.iteritems():
478 505 eid = int(k)
479 506 self._engines[eid] = v
480 507 self._ids.append(eid)
481 508 self._ids = sorted(self._ids)
482 509 if sorted(self._engines.keys()) != range(len(self._engines)) and \
483 510 self._task_scheme == 'pure' and self._task_socket:
484 511 self._stop_scheduling_tasks()
485 512
486 513 def _stop_scheduling_tasks(self):
487 514 """Stop scheduling tasks because an engine has been unregistered
488 515 from a pure ZMQ scheduler.
489 516 """
490 517 self._task_socket.close()
491 518 self._task_socket = None
492 519 msg = "An engine has been unregistered, and we are using pure " +\
493 520 "ZMQ task scheduling. Task farming will be disabled."
494 521 if self.outstanding:
495 522 msg += " If you were running tasks when this happened, " +\
496 523 "some `outstanding` msg_ids may never resolve."
497 524 warnings.warn(msg, RuntimeWarning)
498 525
499 526 def _build_targets(self, targets):
500 527 """Turn valid target IDs or 'all' into two lists:
501 528 (int_ids, uuids).
502 529 """
503 530 if not self._ids:
504 531 # flush notification socket if no engines yet, just in case
505 532 if not self.ids:
506 533 raise error.NoEnginesRegistered("Can't build targets without any engines")
507 534
508 535 if targets is None:
509 536 targets = self._ids
510 537 elif isinstance(targets, basestring):
511 538 if targets.lower() == 'all':
512 539 targets = self._ids
513 540 else:
514 541 raise TypeError("%r not valid str target, must be 'all'"%(targets))
515 542 elif isinstance(targets, int):
516 543 if targets < 0:
517 544 targets = self.ids[targets]
518 545 if targets not in self._ids:
519 546 raise IndexError("No such engine: %i"%targets)
520 547 targets = [targets]
521 548
522 549 if isinstance(targets, slice):
523 550 indices = range(len(self._ids))[targets]
524 551 ids = self.ids
525 552 targets = [ ids[i] for i in indices ]
526 553
527 554 if not isinstance(targets, (tuple, list, xrange)):
528 555 raise TypeError("targets by int/slice/collection of ints only, not %s"%(type(targets)))
529 556
530 557 return [cast_bytes(self._engines[t]) for t in targets], list(targets)
531 558
532 559 def _connect(self, sshserver, ssh_kwargs, timeout):
533 560 """setup all our socket connections to the cluster. This is called from
534 561 __init__."""
535 562
536 563 # Maybe allow reconnecting?
537 564 if self._connected:
538 565 return
539 566 self._connected=True
540 567
541 568 def connect_socket(s, url):
542 569 url = util.disambiguate_url(url, self._config['location'])
543 570 if self._ssh:
544 571 return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs)
545 572 else:
546 573 return s.connect(url)
547 574
548 575 self.session.send(self._query_socket, 'connection_request')
549 576 # use Poller because zmq.select has wrong units in pyzmq 2.1.7
550 577 poller = zmq.Poller()
551 578 poller.register(self._query_socket, zmq.POLLIN)
552 579 # poll expects milliseconds, timeout is seconds
553 580 evts = poller.poll(timeout*1000)
554 581 if not evts:
555 582 raise error.TimeoutError("Hub connection request timed out")
556 583 idents,msg = self.session.recv(self._query_socket,mode=0)
557 584 if self.debug:
558 585 pprint(msg)
559 586 msg = Message(msg)
560 587 content = msg.content
561 588 self._config['registration'] = dict(content)
562 589 if content.status == 'ok':
563 590 ident = self.session.bsession
564 591 if content.mux:
565 592 self._mux_socket = self._context.socket(zmq.DEALER)
566 593 self._mux_socket.setsockopt(zmq.IDENTITY, ident)
567 594 connect_socket(self._mux_socket, content.mux)
568 595 if content.task:
569 596 self._task_scheme, task_addr = content.task
570 597 self._task_socket = self._context.socket(zmq.DEALER)
571 598 self._task_socket.setsockopt(zmq.IDENTITY, ident)
572 599 connect_socket(self._task_socket, task_addr)
573 600 if content.notification:
574 601 self._notification_socket = self._context.socket(zmq.SUB)
575 602 connect_socket(self._notification_socket, content.notification)
576 603 self._notification_socket.setsockopt(zmq.SUBSCRIBE, b'')
577 604 # if content.query:
578 605 # self._query_socket = self._context.socket(zmq.DEALER)
579 606 # self._query_socket.setsockopt(zmq.IDENTITY, self.session.bsession)
580 607 # connect_socket(self._query_socket, content.query)
581 608 if content.control:
582 609 self._control_socket = self._context.socket(zmq.DEALER)
583 610 self._control_socket.setsockopt(zmq.IDENTITY, ident)
584 611 connect_socket(self._control_socket, content.control)
585 612 if content.iopub:
586 613 self._iopub_socket = self._context.socket(zmq.SUB)
587 614 self._iopub_socket.setsockopt(zmq.SUBSCRIBE, b'')
588 615 self._iopub_socket.setsockopt(zmq.IDENTITY, ident)
589 616 connect_socket(self._iopub_socket, content.iopub)
590 617 self._update_engines(dict(content.engines))
591 618 else:
592 619 self._connected = False
593 620 raise Exception("Failed to connect!")
594 621
595 622 #--------------------------------------------------------------------------
596 623 # handlers and callbacks for incoming messages
597 624 #--------------------------------------------------------------------------
598 625
599 626 def _unwrap_exception(self, content):
600 627 """unwrap exception, and remap engine_id to int."""
601 628 e = error.unwrap_exception(content)
602 629 # print e.traceback
603 630 if e.engine_info:
604 631 e_uuid = e.engine_info['engine_uuid']
605 632 eid = self._engines[e_uuid]
606 633 e.engine_info['engine_id'] = eid
607 634 return e
608 635
609 636 def _extract_metadata(self, header, parent, content):
610 637 md = {'msg_id' : parent['msg_id'],
611 638 'received' : datetime.now(),
612 639 'engine_uuid' : header.get('engine', None),
613 640 'follow' : parent.get('follow', []),
614 641 'after' : parent.get('after', []),
615 642 'status' : content['status'],
616 643 }
617 644
618 645 if md['engine_uuid'] is not None:
619 646 md['engine_id'] = self._engines.get(md['engine_uuid'], None)
620 647
621 648 if 'date' in parent:
622 649 md['submitted'] = parent['date']
623 650 if 'started' in header:
624 651 md['started'] = header['started']
625 652 if 'date' in header:
626 653 md['completed'] = header['date']
627 654 return md
628 655
629 656 def _register_engine(self, msg):
630 657 """Register a new engine, and update our connection info."""
631 658 content = msg['content']
632 659 eid = content['id']
633 660 d = {eid : content['queue']}
634 661 self._update_engines(d)
635 662
636 663 def _unregister_engine(self, msg):
637 664 """Unregister an engine that has died."""
638 665 content = msg['content']
639 666 eid = int(content['id'])
640 667 if eid in self._ids:
641 668 self._ids.remove(eid)
642 669 uuid = self._engines.pop(eid)
643 670
644 671 self._handle_stranded_msgs(eid, uuid)
645 672
646 673 if self._task_socket and self._task_scheme == 'pure':
647 674 self._stop_scheduling_tasks()
648 675
649 676 def _handle_stranded_msgs(self, eid, uuid):
650 677 """Handle messages known to be on an engine when the engine unregisters.
651 678
652 679 It is possible that this will fire prematurely - that is, an engine will
653 680 go down after completing a result, and the client will be notified
654 681 of the unregistration and later receive the successful result.
655 682 """
656 683
657 684 outstanding = self._outstanding_dict[uuid]
658 685
659 686 for msg_id in list(outstanding):
660 687 if msg_id in self.results:
661 688 # we already
662 689 continue
663 690 try:
664 691 raise error.EngineError("Engine %r died while running task %r"%(eid, msg_id))
665 692 except:
666 693 content = error.wrap_exception()
667 694 # build a fake message:
668 695 parent = {}
669 696 header = {}
670 697 parent['msg_id'] = msg_id
671 698 header['engine'] = uuid
672 699 header['date'] = datetime.now()
673 700 msg = dict(parent_header=parent, header=header, content=content)
674 701 self._handle_apply_reply(msg)
675 702
676 703 def _handle_execute_reply(self, msg):
677 704 """Save the reply to an execute_request into our results.
678 705
679 706 execute messages are never actually used. apply is used instead.
680 707 """
681 708
682 709 parent = msg['parent_header']
683 710 msg_id = parent['msg_id']
684 711 if msg_id not in self.outstanding:
685 712 if msg_id in self.history:
686 713 print ("got stale result: %s"%msg_id)
687 714 else:
688 715 print ("got unknown result: %s"%msg_id)
689 716 else:
690 717 self.outstanding.remove(msg_id)
691 718
692 719 content = msg['content']
693 720 header = msg['header']
694 721
695 722 # construct metadata:
696 723 md = self.metadata[msg_id]
697 724 md.update(self._extract_metadata(header, parent, content))
698 725 # is this redundant?
699 726 self.metadata[msg_id] = md
700 727
701 728 e_outstanding = self._outstanding_dict[md['engine_uuid']]
702 729 if msg_id in e_outstanding:
703 730 e_outstanding.remove(msg_id)
704 731
705 732 # construct result:
706 733 if content['status'] == 'ok':
707 734 self.results[msg_id] = ExecuteReply(msg_id, content, md)
708 735 elif content['status'] == 'aborted':
709 736 self.results[msg_id] = error.TaskAborted(msg_id)
710 737 elif content['status'] == 'resubmitted':
711 738 # TODO: handle resubmission
712 739 pass
713 740 else:
714 741 self.results[msg_id] = self._unwrap_exception(content)
715 742
716 743 def _handle_apply_reply(self, msg):
717 744 """Save the reply to an apply_request into our results."""
718 745 parent = msg['parent_header']
719 746 msg_id = parent['msg_id']
720 747 if msg_id not in self.outstanding:
721 748 if msg_id in self.history:
722 749 print ("got stale result: %s"%msg_id)
723 750 print self.results[msg_id]
724 751 print msg
725 752 else:
726 753 print ("got unknown result: %s"%msg_id)
727 754 else:
728 755 self.outstanding.remove(msg_id)
729 756 content = msg['content']
730 757 header = msg['header']
731 758
732 759 # construct metadata:
733 760 md = self.metadata[msg_id]
734 761 md.update(self._extract_metadata(header, parent, content))
735 762 # is this redundant?
736 763 self.metadata[msg_id] = md
737 764
738 765 e_outstanding = self._outstanding_dict[md['engine_uuid']]
739 766 if msg_id in e_outstanding:
740 767 e_outstanding.remove(msg_id)
741 768
742 769 # construct result:
743 770 if content['status'] == 'ok':
744 771 self.results[msg_id] = util.unserialize_object(msg['buffers'])[0]
745 772 elif content['status'] == 'aborted':
746 773 self.results[msg_id] = error.TaskAborted(msg_id)
747 774 elif content['status'] == 'resubmitted':
748 775 # TODO: handle resubmission
749 776 pass
750 777 else:
751 778 self.results[msg_id] = self._unwrap_exception(content)
752 779
753 780 def _flush_notifications(self):
754 781 """Flush notifications of engine registrations waiting
755 782 in ZMQ queue."""
756 783 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
757 784 while msg is not None:
758 785 if self.debug:
759 786 pprint(msg)
760 787 msg_type = msg['header']['msg_type']
761 788 handler = self._notification_handlers.get(msg_type, None)
762 789 if handler is None:
763 790 raise Exception("Unhandled message type: %s"%msg.msg_type)
764 791 else:
765 792 handler(msg)
766 793 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
767 794
768 795 def _flush_results(self, sock):
769 796 """Flush task or queue results waiting in ZMQ queue."""
770 797 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
771 798 while msg is not None:
772 799 if self.debug:
773 800 pprint(msg)
774 801 msg_type = msg['header']['msg_type']
775 802 handler = self._queue_handlers.get(msg_type, None)
776 803 if handler is None:
777 804 raise Exception("Unhandled message type: %s"%msg.msg_type)
778 805 else:
779 806 handler(msg)
780 807 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
781 808
782 809 def _flush_control(self, sock):
783 810 """Flush replies from the control channel waiting
784 811 in the ZMQ queue.
785 812
786 813 Currently: ignore them."""
787 814 if self._ignored_control_replies <= 0:
788 815 return
789 816 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
790 817 while msg is not None:
791 818 self._ignored_control_replies -= 1
792 819 if self.debug:
793 820 pprint(msg)
794 821 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
795 822
796 823 def _flush_ignored_control(self):
797 824 """flush ignored control replies"""
798 825 while self._ignored_control_replies > 0:
799 826 self.session.recv(self._control_socket)
800 827 self._ignored_control_replies -= 1
801 828
802 829 def _flush_ignored_hub_replies(self):
803 830 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
804 831 while msg is not None:
805 832 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
806 833
807 834 def _flush_iopub(self, sock):
808 835 """Flush replies from the iopub channel waiting
809 836 in the ZMQ queue.
810 837 """
811 838 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
812 839 while msg is not None:
813 840 if self.debug:
814 841 pprint(msg)
815 842 parent = msg['parent_header']
816 843 # ignore IOPub messages with no parent.
817 844 # Caused by print statements or warnings from before the first execution.
818 845 if not parent:
819 846 continue
820 847 msg_id = parent['msg_id']
821 848 content = msg['content']
822 849 header = msg['header']
823 850 msg_type = msg['header']['msg_type']
824 851
825 852 # init metadata:
826 853 md = self.metadata[msg_id]
827 854
828 855 if msg_type == 'stream':
829 856 name = content['name']
830 857 s = md[name] or ''
831 858 md[name] = s + content['data']
832 859 elif msg_type == 'pyerr':
833 860 md.update({'pyerr' : self._unwrap_exception(content)})
834 861 elif msg_type == 'pyin':
835 862 md.update({'pyin' : content['code']})
836 863 elif msg_type == 'display_data':
837 864 md['outputs'].append(content)
838 865 elif msg_type == 'pyout':
839 866 md['pyout'] = content
840 867 else:
841 868 # unhandled msg_type (status, etc.)
842 869 pass
843 870
844 871 # reduntant?
845 872 self.metadata[msg_id] = md
846 873
847 874 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
848 875
849 876 #--------------------------------------------------------------------------
850 877 # len, getitem
851 878 #--------------------------------------------------------------------------
852 879
853 880 def __len__(self):
854 881 """len(client) returns # of engines."""
855 882 return len(self.ids)
856 883
857 884 def __getitem__(self, key):
858 885 """index access returns DirectView multiplexer objects
859 886
860 887 Must be int, slice, or list/tuple/xrange of ints"""
861 888 if not isinstance(key, (int, slice, tuple, list, xrange)):
862 889 raise TypeError("key by int/slice/iterable of ints only, not %s"%(type(key)))
863 890 else:
864 891 return self.direct_view(key)
865 892
866 893 #--------------------------------------------------------------------------
867 894 # Begin public methods
868 895 #--------------------------------------------------------------------------
869 896
870 897 @property
871 898 def ids(self):
872 899 """Always up-to-date ids property."""
873 900 self._flush_notifications()
874 901 # always copy:
875 902 return list(self._ids)
876 903
877 904 def close(self):
878 905 if self._closed:
879 906 return
880 907 self.stop_spin_thread()
881 908 snames = filter(lambda n: n.endswith('socket'), dir(self))
882 909 for socket in map(lambda name: getattr(self, name), snames):
883 910 if isinstance(socket, zmq.Socket) and not socket.closed:
884 911 socket.close()
885 912 self._closed = True
886 913
887 914 def _spin_every(self, interval=1):
888 915 """target func for use in spin_thread"""
889 916 while True:
890 917 if self._stop_spinning.is_set():
891 918 return
892 919 time.sleep(interval)
893 920 self.spin()
894 921
895 922 def spin_thread(self, interval=1):
896 923 """call Client.spin() in a background thread on some regular interval
897 924
898 925 This helps ensure that messages don't pile up too much in the zmq queue
899 926 while you are working on other things, or just leaving an idle terminal.
900 927
901 928 It also helps limit potential padding of the `received` timestamp
902 929 on AsyncResult objects, used for timings.
903 930
904 931 Parameters
905 932 ----------
906 933
907 934 interval : float, optional
908 935 The interval on which to spin the client in the background thread
909 936 (simply passed to time.sleep).
910 937
911 938 Notes
912 939 -----
913 940
914 941 For precision timing, you may want to use this method to put a bound
915 942 on the jitter (in seconds) in `received` timestamps used
916 943 in AsyncResult.wall_time.
917 944
918 945 """
919 946 if self._spin_thread is not None:
920 947 self.stop_spin_thread()
921 948 self._stop_spinning.clear()
922 949 self._spin_thread = Thread(target=self._spin_every, args=(interval,))
923 950 self._spin_thread.daemon = True
924 951 self._spin_thread.start()
925 952
926 953 def stop_spin_thread(self):
927 954 """stop background spin_thread, if any"""
928 955 if self._spin_thread is not None:
929 956 self._stop_spinning.set()
930 957 self._spin_thread.join()
931 958 self._spin_thread = None
932 959
933 960 def spin(self):
934 961 """Flush any registration notifications and execution results
935 962 waiting in the ZMQ queue.
936 963 """
937 964 if self._notification_socket:
938 965 self._flush_notifications()
939 966 if self._iopub_socket:
940 967 self._flush_iopub(self._iopub_socket)
941 968 if self._mux_socket:
942 969 self._flush_results(self._mux_socket)
943 970 if self._task_socket:
944 971 self._flush_results(self._task_socket)
945 972 if self._control_socket:
946 973 self._flush_control(self._control_socket)
947 974 if self._query_socket:
948 975 self._flush_ignored_hub_replies()
949 976
950 977 def wait(self, jobs=None, timeout=-1):
951 978 """waits on one or more `jobs`, for up to `timeout` seconds.
952 979
953 980 Parameters
954 981 ----------
955 982
956 983 jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects
957 984 ints are indices to self.history
958 985 strs are msg_ids
959 986 default: wait on all outstanding messages
960 987 timeout : float
961 988 a time in seconds, after which to give up.
962 989 default is -1, which means no timeout
963 990
964 991 Returns
965 992 -------
966 993
967 994 True : when all msg_ids are done
968 995 False : timeout reached, some msg_ids still outstanding
969 996 """
970 997 tic = time.time()
971 998 if jobs is None:
972 999 theids = self.outstanding
973 1000 else:
974 1001 if isinstance(jobs, (int, basestring, AsyncResult)):
975 1002 jobs = [jobs]
976 1003 theids = set()
977 1004 for job in jobs:
978 1005 if isinstance(job, int):
979 1006 # index access
980 1007 job = self.history[job]
981 1008 elif isinstance(job, AsyncResult):
982 1009 map(theids.add, job.msg_ids)
983 1010 continue
984 1011 theids.add(job)
985 1012 if not theids.intersection(self.outstanding):
986 1013 return True
987 1014 self.spin()
988 1015 while theids.intersection(self.outstanding):
989 1016 if timeout >= 0 and ( time.time()-tic ) > timeout:
990 1017 break
991 1018 time.sleep(1e-3)
992 1019 self.spin()
993 1020 return len(theids.intersection(self.outstanding)) == 0
994 1021
995 1022 #--------------------------------------------------------------------------
996 1023 # Control methods
997 1024 #--------------------------------------------------------------------------
998 1025
999 1026 @spin_first
1000 1027 def clear(self, targets=None, block=None):
1001 1028 """Clear the namespace in target(s)."""
1002 1029 block = self.block if block is None else block
1003 1030 targets = self._build_targets(targets)[0]
1004 1031 for t in targets:
1005 1032 self.session.send(self._control_socket, 'clear_request', content={}, ident=t)
1006 1033 error = False
1007 1034 if block:
1008 1035 self._flush_ignored_control()
1009 1036 for i in range(len(targets)):
1010 1037 idents,msg = self.session.recv(self._control_socket,0)
1011 1038 if self.debug:
1012 1039 pprint(msg)
1013 1040 if msg['content']['status'] != 'ok':
1014 1041 error = self._unwrap_exception(msg['content'])
1015 1042 else:
1016 1043 self._ignored_control_replies += len(targets)
1017 1044 if error:
1018 1045 raise error
1019 1046
1020 1047
1021 1048 @spin_first
1022 1049 def abort(self, jobs=None, targets=None, block=None):
1023 1050 """Abort specific jobs from the execution queues of target(s).
1024 1051
1025 1052 This is a mechanism to prevent jobs that have already been submitted
1026 1053 from executing.
1027 1054
1028 1055 Parameters
1029 1056 ----------
1030 1057
1031 1058 jobs : msg_id, list of msg_ids, or AsyncResult
1032 1059 The jobs to be aborted
1033 1060
1034 1061 If unspecified/None: abort all outstanding jobs.
1035 1062
1036 1063 """
1037 1064 block = self.block if block is None else block
1038 1065 jobs = jobs if jobs is not None else list(self.outstanding)
1039 1066 targets = self._build_targets(targets)[0]
1040 1067
1041 1068 msg_ids = []
1042 1069 if isinstance(jobs, (basestring,AsyncResult)):
1043 1070 jobs = [jobs]
1044 1071 bad_ids = filter(lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs)
1045 1072 if bad_ids:
1046 1073 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1047 1074 for j in jobs:
1048 1075 if isinstance(j, AsyncResult):
1049 1076 msg_ids.extend(j.msg_ids)
1050 1077 else:
1051 1078 msg_ids.append(j)
1052 1079 content = dict(msg_ids=msg_ids)
1053 1080 for t in targets:
1054 1081 self.session.send(self._control_socket, 'abort_request',
1055 1082 content=content, ident=t)
1056 1083 error = False
1057 1084 if block:
1058 1085 self._flush_ignored_control()
1059 1086 for i in range(len(targets)):
1060 1087 idents,msg = self.session.recv(self._control_socket,0)
1061 1088 if self.debug:
1062 1089 pprint(msg)
1063 1090 if msg['content']['status'] != 'ok':
1064 1091 error = self._unwrap_exception(msg['content'])
1065 1092 else:
1066 1093 self._ignored_control_replies += len(targets)
1067 1094 if error:
1068 1095 raise error
1069 1096
1070 1097 @spin_first
1071 1098 def shutdown(self, targets=None, restart=False, hub=False, block=None):
1072 1099 """Terminates one or more engine processes, optionally including the hub."""
1073 1100 block = self.block if block is None else block
1074 1101 if hub:
1075 1102 targets = 'all'
1076 1103 targets = self._build_targets(targets)[0]
1077 1104 for t in targets:
1078 1105 self.session.send(self._control_socket, 'shutdown_request',
1079 1106 content={'restart':restart},ident=t)
1080 1107 error = False
1081 1108 if block or hub:
1082 1109 self._flush_ignored_control()
1083 1110 for i in range(len(targets)):
1084 1111 idents,msg = self.session.recv(self._control_socket, 0)
1085 1112 if self.debug:
1086 1113 pprint(msg)
1087 1114 if msg['content']['status'] != 'ok':
1088 1115 error = self._unwrap_exception(msg['content'])
1089 1116 else:
1090 1117 self._ignored_control_replies += len(targets)
1091 1118
1092 1119 if hub:
1093 1120 time.sleep(0.25)
1094 1121 self.session.send(self._query_socket, 'shutdown_request')
1095 1122 idents,msg = self.session.recv(self._query_socket, 0)
1096 1123 if self.debug:
1097 1124 pprint(msg)
1098 1125 if msg['content']['status'] != 'ok':
1099 1126 error = self._unwrap_exception(msg['content'])
1100 1127
1101 1128 if error:
1102 1129 raise error
1103 1130
1104 1131 #--------------------------------------------------------------------------
1105 1132 # Execution related methods
1106 1133 #--------------------------------------------------------------------------
1107 1134
1108 1135 def _maybe_raise(self, result):
1109 1136 """wrapper for maybe raising an exception if apply failed."""
1110 1137 if isinstance(result, error.RemoteError):
1111 1138 raise result
1112 1139
1113 1140 return result
1114 1141
1115 1142 def send_apply_request(self, socket, f, args=None, kwargs=None, subheader=None, track=False,
1116 1143 ident=None):
1117 1144 """construct and send an apply message via a socket.
1118 1145
1119 1146 This is the principal method with which all engine execution is performed by views.
1120 1147 """
1121 1148
1122 1149 if self._closed:
1123 1150 raise RuntimeError("Client cannot be used after its sockets have been closed")
1124 1151
1125 1152 # defaults:
1126 1153 args = args if args is not None else []
1127 1154 kwargs = kwargs if kwargs is not None else {}
1128 1155 subheader = subheader if subheader is not None else {}
1129 1156
1130 1157 # validate arguments
1131 1158 if not callable(f) and not isinstance(f, Reference):
1132 1159 raise TypeError("f must be callable, not %s"%type(f))
1133 1160 if not isinstance(args, (tuple, list)):
1134 1161 raise TypeError("args must be tuple or list, not %s"%type(args))
1135 1162 if not isinstance(kwargs, dict):
1136 1163 raise TypeError("kwargs must be dict, not %s"%type(kwargs))
1137 1164 if not isinstance(subheader, dict):
1138 1165 raise TypeError("subheader must be dict, not %s"%type(subheader))
1139 1166
1140 1167 bufs = util.pack_apply_message(f,args,kwargs)
1141 1168
1142 1169 msg = self.session.send(socket, "apply_request", buffers=bufs, ident=ident,
1143 1170 subheader=subheader, track=track)
1144 1171
1145 1172 msg_id = msg['header']['msg_id']
1146 1173 self.outstanding.add(msg_id)
1147 1174 if ident:
1148 1175 # possibly routed to a specific engine
1149 1176 if isinstance(ident, list):
1150 1177 ident = ident[-1]
1151 1178 if ident in self._engines.values():
1152 1179 # save for later, in case of engine death
1153 1180 self._outstanding_dict[ident].add(msg_id)
1154 1181 self.history.append(msg_id)
1155 1182 self.metadata[msg_id]['submitted'] = datetime.now()
1156 1183
1157 1184 return msg
1158 1185
1159 1186 def send_execute_request(self, socket, code, silent=True, subheader=None, ident=None):
1160 1187 """construct and send an execute request via a socket.
1161 1188
1162 1189 """
1163 1190
1164 1191 if self._closed:
1165 1192 raise RuntimeError("Client cannot be used after its sockets have been closed")
1166 1193
1167 1194 # defaults:
1168 1195 subheader = subheader if subheader is not None else {}
1169 1196
1170 1197 # validate arguments
1171 1198 if not isinstance(code, basestring):
1172 1199 raise TypeError("code must be text, not %s" % type(code))
1173 1200 if not isinstance(subheader, dict):
1174 1201 raise TypeError("subheader must be dict, not %s" % type(subheader))
1175 1202
1176 1203 content = dict(code=code, silent=bool(silent), user_variables=[], user_expressions={})
1177 1204
1178 1205
1179 1206 msg = self.session.send(socket, "execute_request", content=content, ident=ident,
1180 1207 subheader=subheader)
1181 1208
1182 1209 msg_id = msg['header']['msg_id']
1183 1210 self.outstanding.add(msg_id)
1184 1211 if ident:
1185 1212 # possibly routed to a specific engine
1186 1213 if isinstance(ident, list):
1187 1214 ident = ident[-1]
1188 1215 if ident in self._engines.values():
1189 1216 # save for later, in case of engine death
1190 1217 self._outstanding_dict[ident].add(msg_id)
1191 1218 self.history.append(msg_id)
1192 1219 self.metadata[msg_id]['submitted'] = datetime.now()
1193 1220
1194 1221 return msg
1195 1222
1196 1223 #--------------------------------------------------------------------------
1197 1224 # construct a View object
1198 1225 #--------------------------------------------------------------------------
1199 1226
1200 1227 def load_balanced_view(self, targets=None):
1201 1228 """construct a DirectView object.
1202 1229
1203 1230 If no arguments are specified, create a LoadBalancedView
1204 1231 using all engines.
1205 1232
1206 1233 Parameters
1207 1234 ----------
1208 1235
1209 1236 targets: list,slice,int,etc. [default: use all engines]
1210 1237 The subset of engines across which to load-balance
1211 1238 """
1212 1239 if targets == 'all':
1213 1240 targets = None
1214 1241 if targets is not None:
1215 1242 targets = self._build_targets(targets)[1]
1216 1243 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
1217 1244
1218 1245 def direct_view(self, targets='all'):
1219 1246 """construct a DirectView object.
1220 1247
1221 1248 If no targets are specified, create a DirectView using all engines.
1222 1249
1223 1250 rc.direct_view('all') is distinguished from rc[:] in that 'all' will
1224 1251 evaluate the target engines at each execution, whereas rc[:] will connect to
1225 1252 all *current* engines, and that list will not change.
1226 1253
1227 1254 That is, 'all' will always use all engines, whereas rc[:] will not use
1228 1255 engines added after the DirectView is constructed.
1229 1256
1230 1257 Parameters
1231 1258 ----------
1232 1259
1233 1260 targets: list,slice,int,etc. [default: use all engines]
1234 1261 The engines to use for the View
1235 1262 """
1236 1263 single = isinstance(targets, int)
1237 1264 # allow 'all' to be lazily evaluated at each execution
1238 1265 if targets != 'all':
1239 1266 targets = self._build_targets(targets)[1]
1240 1267 if single:
1241 1268 targets = targets[0]
1242 1269 return DirectView(client=self, socket=self._mux_socket, targets=targets)
1243 1270
1244 1271 #--------------------------------------------------------------------------
1245 1272 # Query methods
1246 1273 #--------------------------------------------------------------------------
1247 1274
1248 1275 @spin_first
1249 1276 def get_result(self, indices_or_msg_ids=None, block=None):
1250 1277 """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object.
1251 1278
1252 1279 If the client already has the results, no request to the Hub will be made.
1253 1280
1254 1281 This is a convenient way to construct AsyncResult objects, which are wrappers
1255 1282 that include metadata about execution, and allow for awaiting results that
1256 1283 were not submitted by this Client.
1257 1284
1258 1285 It can also be a convenient way to retrieve the metadata associated with
1259 1286 blocking execution, since it always retrieves
1260 1287
1261 1288 Examples
1262 1289 --------
1263 1290 ::
1264 1291
1265 1292 In [10]: r = client.apply()
1266 1293
1267 1294 Parameters
1268 1295 ----------
1269 1296
1270 1297 indices_or_msg_ids : integer history index, str msg_id, or list of either
1271 1298 The indices or msg_ids of indices to be retrieved
1272 1299
1273 1300 block : bool
1274 1301 Whether to wait for the result to be done
1275 1302
1276 1303 Returns
1277 1304 -------
1278 1305
1279 1306 AsyncResult
1280 1307 A single AsyncResult object will always be returned.
1281 1308
1282 1309 AsyncHubResult
1283 1310 A subclass of AsyncResult that retrieves results from the Hub
1284 1311
1285 1312 """
1286 1313 block = self.block if block is None else block
1287 1314 if indices_or_msg_ids is None:
1288 1315 indices_or_msg_ids = -1
1289 1316
1290 1317 if not isinstance(indices_or_msg_ids, (list,tuple)):
1291 1318 indices_or_msg_ids = [indices_or_msg_ids]
1292 1319
1293 1320 theids = []
1294 1321 for id in indices_or_msg_ids:
1295 1322 if isinstance(id, int):
1296 1323 id = self.history[id]
1297 1324 if not isinstance(id, basestring):
1298 1325 raise TypeError("indices must be str or int, not %r"%id)
1299 1326 theids.append(id)
1300 1327
1301 1328 local_ids = filter(lambda msg_id: msg_id in self.history or msg_id in self.results, theids)
1302 1329 remote_ids = filter(lambda msg_id: msg_id not in local_ids, theids)
1303 1330
1304 1331 if remote_ids:
1305 1332 ar = AsyncHubResult(self, msg_ids=theids)
1306 1333 else:
1307 1334 ar = AsyncResult(self, msg_ids=theids)
1308 1335
1309 1336 if block:
1310 1337 ar.wait()
1311 1338
1312 1339 return ar
1313 1340
1314 1341 @spin_first
1315 1342 def resubmit(self, indices_or_msg_ids=None, subheader=None, block=None):
1316 1343 """Resubmit one or more tasks.
1317 1344
1318 1345 in-flight tasks may not be resubmitted.
1319 1346
1320 1347 Parameters
1321 1348 ----------
1322 1349
1323 1350 indices_or_msg_ids : integer history index, str msg_id, or list of either
1324 1351 The indices or msg_ids of indices to be retrieved
1325 1352
1326 1353 block : bool
1327 1354 Whether to wait for the result to be done
1328 1355
1329 1356 Returns
1330 1357 -------
1331 1358
1332 1359 AsyncHubResult
1333 1360 A subclass of AsyncResult that retrieves results from the Hub
1334 1361
1335 1362 """
1336 1363 block = self.block if block is None else block
1337 1364 if indices_or_msg_ids is None:
1338 1365 indices_or_msg_ids = -1
1339 1366
1340 1367 if not isinstance(indices_or_msg_ids, (list,tuple)):
1341 1368 indices_or_msg_ids = [indices_or_msg_ids]
1342 1369
1343 1370 theids = []
1344 1371 for id in indices_or_msg_ids:
1345 1372 if isinstance(id, int):
1346 1373 id = self.history[id]
1347 1374 if not isinstance(id, basestring):
1348 1375 raise TypeError("indices must be str or int, not %r"%id)
1349 1376 theids.append(id)
1350 1377
1351 1378 content = dict(msg_ids = theids)
1352 1379
1353 1380 self.session.send(self._query_socket, 'resubmit_request', content)
1354 1381
1355 1382 zmq.select([self._query_socket], [], [])
1356 1383 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1357 1384 if self.debug:
1358 1385 pprint(msg)
1359 1386 content = msg['content']
1360 1387 if content['status'] != 'ok':
1361 1388 raise self._unwrap_exception(content)
1362 1389 mapping = content['resubmitted']
1363 1390 new_ids = [ mapping[msg_id] for msg_id in theids ]
1364 1391
1365 1392 ar = AsyncHubResult(self, msg_ids=new_ids)
1366 1393
1367 1394 if block:
1368 1395 ar.wait()
1369 1396
1370 1397 return ar
1371 1398
1372 1399 @spin_first
1373 1400 def result_status(self, msg_ids, status_only=True):
1374 1401 """Check on the status of the result(s) of the apply request with `msg_ids`.
1375 1402
1376 1403 If status_only is False, then the actual results will be retrieved, else
1377 1404 only the status of the results will be checked.
1378 1405
1379 1406 Parameters
1380 1407 ----------
1381 1408
1382 1409 msg_ids : list of msg_ids
1383 1410 if int:
1384 1411 Passed as index to self.history for convenience.
1385 1412 status_only : bool (default: True)
1386 1413 if False:
1387 1414 Retrieve the actual results of completed tasks.
1388 1415
1389 1416 Returns
1390 1417 -------
1391 1418
1392 1419 results : dict
1393 1420 There will always be the keys 'pending' and 'completed', which will
1394 1421 be lists of msg_ids that are incomplete or complete. If `status_only`
1395 1422 is False, then completed results will be keyed by their `msg_id`.
1396 1423 """
1397 1424 if not isinstance(msg_ids, (list,tuple)):
1398 1425 msg_ids = [msg_ids]
1399 1426
1400 1427 theids = []
1401 1428 for msg_id in msg_ids:
1402 1429 if isinstance(msg_id, int):
1403 1430 msg_id = self.history[msg_id]
1404 1431 if not isinstance(msg_id, basestring):
1405 1432 raise TypeError("msg_ids must be str, not %r"%msg_id)
1406 1433 theids.append(msg_id)
1407 1434
1408 1435 completed = []
1409 1436 local_results = {}
1410 1437
1411 1438 # comment this block out to temporarily disable local shortcut:
1412 1439 for msg_id in theids:
1413 1440 if msg_id in self.results:
1414 1441 completed.append(msg_id)
1415 1442 local_results[msg_id] = self.results[msg_id]
1416 1443 theids.remove(msg_id)
1417 1444
1418 1445 if theids: # some not locally cached
1419 1446 content = dict(msg_ids=theids, status_only=status_only)
1420 1447 msg = self.session.send(self._query_socket, "result_request", content=content)
1421 1448 zmq.select([self._query_socket], [], [])
1422 1449 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1423 1450 if self.debug:
1424 1451 pprint(msg)
1425 1452 content = msg['content']
1426 1453 if content['status'] != 'ok':
1427 1454 raise self._unwrap_exception(content)
1428 1455 buffers = msg['buffers']
1429 1456 else:
1430 1457 content = dict(completed=[],pending=[])
1431 1458
1432 1459 content['completed'].extend(completed)
1433 1460
1434 1461 if status_only:
1435 1462 return content
1436 1463
1437 1464 failures = []
1438 1465 # load cached results into result:
1439 1466 content.update(local_results)
1440 1467
1441 1468 # update cache with results:
1442 1469 for msg_id in sorted(theids):
1443 1470 if msg_id in content['completed']:
1444 1471 rec = content[msg_id]
1445 1472 parent = rec['header']
1446 1473 header = rec['result_header']
1447 1474 rcontent = rec['result_content']
1448 1475 iodict = rec['io']
1449 1476 if isinstance(rcontent, str):
1450 1477 rcontent = self.session.unpack(rcontent)
1451 1478
1452 1479 md = self.metadata[msg_id]
1453 1480 md.update(self._extract_metadata(header, parent, rcontent))
1454 1481 if rec.get('received'):
1455 1482 md['received'] = rec['received']
1456 1483 md.update(iodict)
1457 1484
1458 1485 if rcontent['status'] == 'ok':
1459 1486 res,buffers = util.unserialize_object(buffers)
1460 1487 else:
1461 1488 print rcontent
1462 1489 res = self._unwrap_exception(rcontent)
1463 1490 failures.append(res)
1464 1491
1465 1492 self.results[msg_id] = res
1466 1493 content[msg_id] = res
1467 1494
1468 1495 if len(theids) == 1 and failures:
1469 1496 raise failures[0]
1470 1497
1471 1498 error.collect_exceptions(failures, "result_status")
1472 1499 return content
1473 1500
1474 1501 @spin_first
1475 1502 def queue_status(self, targets='all', verbose=False):
1476 1503 """Fetch the status of engine queues.
1477 1504
1478 1505 Parameters
1479 1506 ----------
1480 1507
1481 1508 targets : int/str/list of ints/strs
1482 1509 the engines whose states are to be queried.
1483 1510 default : all
1484 1511 verbose : bool
1485 1512 Whether to return lengths only, or lists of ids for each element
1486 1513 """
1487 1514 if targets == 'all':
1488 1515 # allow 'all' to be evaluated on the engine
1489 1516 engine_ids = None
1490 1517 else:
1491 1518 engine_ids = self._build_targets(targets)[1]
1492 1519 content = dict(targets=engine_ids, verbose=verbose)
1493 1520 self.session.send(self._query_socket, "queue_request", content=content)
1494 1521 idents,msg = self.session.recv(self._query_socket, 0)
1495 1522 if self.debug:
1496 1523 pprint(msg)
1497 1524 content = msg['content']
1498 1525 status = content.pop('status')
1499 1526 if status != 'ok':
1500 1527 raise self._unwrap_exception(content)
1501 1528 content = rekey(content)
1502 1529 if isinstance(targets, int):
1503 1530 return content[targets]
1504 1531 else:
1505 1532 return content
1506 1533
1507 1534 @spin_first
1508 1535 def purge_results(self, jobs=[], targets=[]):
1509 1536 """Tell the Hub to forget results.
1510 1537
1511 1538 Individual results can be purged by msg_id, or the entire
1512 1539 history of specific targets can be purged.
1513 1540
1514 1541 Use `purge_results('all')` to scrub everything from the Hub's db.
1515 1542
1516 1543 Parameters
1517 1544 ----------
1518 1545
1519 1546 jobs : str or list of str or AsyncResult objects
1520 1547 the msg_ids whose results should be forgotten.
1521 1548 targets : int/str/list of ints/strs
1522 1549 The targets, by int_id, whose entire history is to be purged.
1523 1550
1524 1551 default : None
1525 1552 """
1526 1553 if not targets and not jobs:
1527 1554 raise ValueError("Must specify at least one of `targets` and `jobs`")
1528 1555 if targets:
1529 1556 targets = self._build_targets(targets)[1]
1530 1557
1531 1558 # construct msg_ids from jobs
1532 1559 if jobs == 'all':
1533 1560 msg_ids = jobs
1534 1561 else:
1535 1562 msg_ids = []
1536 1563 if isinstance(jobs, (basestring,AsyncResult)):
1537 1564 jobs = [jobs]
1538 1565 bad_ids = filter(lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs)
1539 1566 if bad_ids:
1540 1567 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1541 1568 for j in jobs:
1542 1569 if isinstance(j, AsyncResult):
1543 1570 msg_ids.extend(j.msg_ids)
1544 1571 else:
1545 1572 msg_ids.append(j)
1546 1573
1547 1574 content = dict(engine_ids=targets, msg_ids=msg_ids)
1548 1575 self.session.send(self._query_socket, "purge_request", content=content)
1549 1576 idents, msg = self.session.recv(self._query_socket, 0)
1550 1577 if self.debug:
1551 1578 pprint(msg)
1552 1579 content = msg['content']
1553 1580 if content['status'] != 'ok':
1554 1581 raise self._unwrap_exception(content)
1555 1582
1556 1583 @spin_first
1557 1584 def hub_history(self):
1558 1585 """Get the Hub's history
1559 1586
1560 1587 Just like the Client, the Hub has a history, which is a list of msg_ids.
1561 1588 This will contain the history of all clients, and, depending on configuration,
1562 1589 may contain history across multiple cluster sessions.
1563 1590
1564 1591 Any msg_id returned here is a valid argument to `get_result`.
1565 1592
1566 1593 Returns
1567 1594 -------
1568 1595
1569 1596 msg_ids : list of strs
1570 1597 list of all msg_ids, ordered by task submission time.
1571 1598 """
1572 1599
1573 1600 self.session.send(self._query_socket, "history_request", content={})
1574 1601 idents, msg = self.session.recv(self._query_socket, 0)
1575 1602
1576 1603 if self.debug:
1577 1604 pprint(msg)
1578 1605 content = msg['content']
1579 1606 if content['status'] != 'ok':
1580 1607 raise self._unwrap_exception(content)
1581 1608 else:
1582 1609 return content['history']
1583 1610
1584 1611 @spin_first
1585 1612 def db_query(self, query, keys=None):
1586 1613 """Query the Hub's TaskRecord database
1587 1614
1588 1615 This will return a list of task record dicts that match `query`
1589 1616
1590 1617 Parameters
1591 1618 ----------
1592 1619
1593 1620 query : mongodb query dict
1594 1621 The search dict. See mongodb query docs for details.
1595 1622 keys : list of strs [optional]
1596 1623 The subset of keys to be returned. The default is to fetch everything but buffers.
1597 1624 'msg_id' will *always* be included.
1598 1625 """
1599 1626 if isinstance(keys, basestring):
1600 1627 keys = [keys]
1601 1628 content = dict(query=query, keys=keys)
1602 1629 self.session.send(self._query_socket, "db_request", content=content)
1603 1630 idents, msg = self.session.recv(self._query_socket, 0)
1604 1631 if self.debug:
1605 1632 pprint(msg)
1606 1633 content = msg['content']
1607 1634 if content['status'] != 'ok':
1608 1635 raise self._unwrap_exception(content)
1609 1636
1610 1637 records = content['records']
1611 1638
1612 1639 buffer_lens = content['buffer_lens']
1613 1640 result_buffer_lens = content['result_buffer_lens']
1614 1641 buffers = msg['buffers']
1615 1642 has_bufs = buffer_lens is not None
1616 1643 has_rbufs = result_buffer_lens is not None
1617 1644 for i,rec in enumerate(records):
1618 1645 # relink buffers
1619 1646 if has_bufs:
1620 1647 blen = buffer_lens[i]
1621 1648 rec['buffers'], buffers = buffers[:blen],buffers[blen:]
1622 1649 if has_rbufs:
1623 1650 blen = result_buffer_lens[i]
1624 1651 rec['result_buffers'], buffers = buffers[:blen],buffers[blen:]
1625 1652
1626 1653 return records
1627 1654
1628 1655 __all__ = [ 'Client' ]
@@ -1,144 +1,213 b''
1 1 """base class for parallel client tests
2 2
3 3 Authors:
4 4
5 5 * Min RK
6 6 """
7 7
8 8 #-------------------------------------------------------------------------------
9 9 # Copyright (C) 2011 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-------------------------------------------------------------------------------
14 14
15 15 import sys
16 16 import tempfile
17 17 import time
18 from StringIO import StringIO
18 19
19 20 from nose import SkipTest
20 21
21 22 import zmq
22 23 from zmq.tests import BaseZMQTestCase
23 24
24 25 from IPython.external.decorator import decorator
25 26
26 27 from IPython.parallel import error
27 28 from IPython.parallel import Client
28 29
29 30 from IPython.parallel.tests import launchers, add_engines
30 31
31 32 # simple tasks for use in apply tests
32 33
33 34 def segfault():
34 35 """this will segfault"""
35 36 import ctypes
36 37 ctypes.memset(-1,0,1)
37 38
38 39 def crash():
39 40 """from stdlib crashers in the test suite"""
40 41 import types
41 42 if sys.platform.startswith('win'):
42 43 import ctypes
43 44 ctypes.windll.kernel32.SetErrorMode(0x0002);
44 45 args = [ 0, 0, 0, 0, b'\x04\x71\x00\x00', (), (), (), '', '', 1, b'']
45 46 if sys.version_info[0] >= 3:
46 47 # Python3 adds 'kwonlyargcount' as the second argument to Code
47 48 args.insert(1, 0)
48 49
49 50 co = types.CodeType(*args)
50 51 exec(co)
51 52
52 53 def wait(n):
53 54 """sleep for a time"""
54 55 import time
55 56 time.sleep(n)
56 57 return n
57 58
58 59 def raiser(eclass):
59 60 """raise an exception"""
60 61 raise eclass()
61 62
63 def generate_output():
64 """function for testing output
65
66 publishes two outputs of each type, and returns
67 a rich displayable object.
68 """
69
70 import sys
71 from IPython.core.display import display, HTML, Math
72
73 print "stdout"
74 print >> sys.stderr, "stderr"
75
76 display(HTML("<b>HTML</b>"))
77
78 print "stdout2"
79 print >> sys.stderr, "stderr2"
80
81 display(Math(r"\alpha=\beta"))
82
83 return Math("42")
84
62 85 # test decorator for skipping tests when libraries are unavailable
63 86 def skip_without(*names):
64 87 """skip a test if some names are not importable"""
65 88 @decorator
66 89 def skip_without_names(f, *args, **kwargs):
67 90 """decorator to skip tests in the absence of numpy."""
68 91 for name in names:
69 92 try:
70 93 __import__(name)
71 94 except ImportError:
72 95 raise SkipTest
73 96 return f(*args, **kwargs)
74 97 return skip_without_names
75 98
99 #-------------------------------------------------------------------------------
100 # Classes
101 #-------------------------------------------------------------------------------
102
103 class CapturedIO(object):
104 """Simple object for containing captured stdout/err StringIO objects"""
105
106 def __init__(self, stdout, stderr):
107 self.stdout_io = stdout
108 self.stderr_io = stderr
109
110 @property
111 def stdout(self):
112 return self.stdout_io.getvalue()
113
114 @property
115 def stderr(self):
116 return self.stderr_io.getvalue()
117
118
119 class capture_output(object):
120 """context manager for capturing stdout/err"""
121
122 def __enter__(self):
123 self.sys_stdout = sys.stdout
124 self.sys_stderr = sys.stderr
125 stdout = sys.stdout = StringIO()
126 stderr = sys.stderr = StringIO()
127 return CapturedIO(stdout, stderr)
128
129 def __exit__(self, exc_type, exc_value, traceback):
130 sys.stdout = self.sys_stdout
131 sys.stderr = self.sys_stderr
132
133
76 134 class ClusterTestCase(BaseZMQTestCase):
77 135
78 136 def add_engines(self, n=1, block=True):
79 137 """add multiple engines to our cluster"""
80 138 self.engines.extend(add_engines(n))
81 139 if block:
82 140 self.wait_on_engines()
83 141
84 142 def minimum_engines(self, n=1, block=True):
85 143 """add engines until there are at least n connected"""
86 144 self.engines.extend(add_engines(n, total=True))
87 145 if block:
88 146 self.wait_on_engines()
89 147
90 148
91 149 def wait_on_engines(self, timeout=5):
92 150 """wait for our engines to connect."""
93 151 n = len(self.engines)+self.base_engine_count
94 152 tic = time.time()
95 153 while time.time()-tic < timeout and len(self.client.ids) < n:
96 154 time.sleep(0.1)
97 155
98 156 assert not len(self.client.ids) < n, "waiting for engines timed out"
99 157
100 158 def connect_client(self):
101 159 """connect a client with my Context, and track its sockets for cleanup"""
102 160 c = Client(profile='iptest', context=self.context)
103 161 for name in filter(lambda n:n.endswith('socket'), dir(c)):
104 162 s = getattr(c, name)
105 163 s.setsockopt(zmq.LINGER, 0)
106 164 self.sockets.append(s)
107 165 return c
108 166
109 167 def assertRaisesRemote(self, etype, f, *args, **kwargs):
110 168 try:
111 169 try:
112 170 f(*args, **kwargs)
113 171 except error.CompositeError as e:
114 172 e.raise_exception()
115 173 except error.RemoteError as e:
116 174 self.assertEquals(etype.__name__, e.ename, "Should have raised %r, but raised %r"%(etype.__name__, e.ename))
117 175 else:
118 176 self.fail("should have raised a RemoteError")
119 177
178 def _wait_for(self, f, timeout=10):
179 """wait for a condition"""
180 tic = time.time()
181 while time.time() <= tic + timeout:
182 if f():
183 return
184 time.sleep(0.1)
185 self.client.spin()
186 if not f():
187 print "Warning: Awaited condition never arrived"
188
120 189 def setUp(self):
121 190 BaseZMQTestCase.setUp(self)
122 191 self.client = self.connect_client()
123 192 # start every test with clean engine namespaces:
124 193 self.client.clear(block=True)
125 194 self.base_engine_count=len(self.client.ids)
126 195 self.engines=[]
127 196
128 197 def tearDown(self):
129 198 # self.client.clear(block=True)
130 199 # close fds:
131 200 for e in filter(lambda e: e.poll() is not None, launchers):
132 201 launchers.remove(e)
133 202
134 203 # allow flushing of incoming messages to prevent crash on socket close
135 204 self.client.wait(timeout=2)
136 205 # time.sleep(2)
137 206 self.client.spin()
138 207 self.client.close()
139 208 BaseZMQTestCase.tearDown(self)
140 209 # this will be redundant when pyzmq merges PR #88
141 210 # self.context.term()
142 211 # print tempfile.TemporaryFile().fileno(),
143 212 # sys.stdout.flush()
144 213 No newline at end of file
@@ -1,694 +1,572 b''
1 1 # -*- coding: utf-8 -*-
2 2 """test View objects
3 3
4 4 Authors:
5 5
6 6 * Min RK
7 7 """
8 8 #-------------------------------------------------------------------------------
9 9 # Copyright (C) 2011 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-------------------------------------------------------------------------------
14 14
15 15 #-------------------------------------------------------------------------------
16 16 # Imports
17 17 #-------------------------------------------------------------------------------
18 18
19 19 import sys
20 20 import time
21 21 from tempfile import mktemp
22 22 from StringIO import StringIO
23 23
24 24 import zmq
25 25 from nose import SkipTest
26 26
27 27 from IPython.testing import decorators as dec
28 28 from IPython.testing.ipunittest import ParametricTestCase
29 29
30 30 from IPython import parallel as pmod
31 31 from IPython.parallel import error
32 32 from IPython.parallel import AsyncResult, AsyncHubResult, AsyncMapResult
33 33 from IPython.parallel import DirectView
34 34 from IPython.parallel.util import interactive
35 35
36 36 from IPython.parallel.tests import add_engines
37 37
38 38 from .clienttest import ClusterTestCase, crash, wait, skip_without
39 39
40 40 def setup():
41 41 add_engines(3, total=True)
42 42
43 43 class TestView(ClusterTestCase, ParametricTestCase):
44 44
45 45 def test_z_crash_mux(self):
46 46 """test graceful handling of engine death (direct)"""
47 47 raise SkipTest("crash tests disabled, due to undesirable crash reports")
48 48 # self.add_engines(1)
49 49 eid = self.client.ids[-1]
50 50 ar = self.client[eid].apply_async(crash)
51 51 self.assertRaisesRemote(error.EngineError, ar.get, 10)
52 52 eid = ar.engine_id
53 53 tic = time.time()
54 54 while eid in self.client.ids and time.time()-tic < 5:
55 55 time.sleep(.01)
56 56 self.client.spin()
57 57 self.assertFalse(eid in self.client.ids, "Engine should have died")
58 58
59 59 def test_push_pull(self):
60 60 """test pushing and pulling"""
61 61 data = dict(a=10, b=1.05, c=range(10), d={'e':(1,2),'f':'hi'})
62 62 t = self.client.ids[-1]
63 63 v = self.client[t]
64 64 push = v.push
65 65 pull = v.pull
66 66 v.block=True
67 67 nengines = len(self.client)
68 68 push({'data':data})
69 69 d = pull('data')
70 70 self.assertEquals(d, data)
71 71 self.client[:].push({'data':data})
72 72 d = self.client[:].pull('data', block=True)
73 73 self.assertEquals(d, nengines*[data])
74 74 ar = push({'data':data}, block=False)
75 75 self.assertTrue(isinstance(ar, AsyncResult))
76 76 r = ar.get()
77 77 ar = self.client[:].pull('data', block=False)
78 78 self.assertTrue(isinstance(ar, AsyncResult))
79 79 r = ar.get()
80 80 self.assertEquals(r, nengines*[data])
81 81 self.client[:].push(dict(a=10,b=20))
82 82 r = self.client[:].pull(('a','b'), block=True)
83 83 self.assertEquals(r, nengines*[[10,20]])
84 84
85 85 def test_push_pull_function(self):
86 86 "test pushing and pulling functions"
87 87 def testf(x):
88 88 return 2.0*x
89 89
90 90 t = self.client.ids[-1]
91 91 v = self.client[t]
92 92 v.block=True
93 93 push = v.push
94 94 pull = v.pull
95 95 execute = v.execute
96 96 push({'testf':testf})
97 97 r = pull('testf')
98 98 self.assertEqual(r(1.0), testf(1.0))
99 99 execute('r = testf(10)')
100 100 r = pull('r')
101 101 self.assertEquals(r, testf(10))
102 102 ar = self.client[:].push({'testf':testf}, block=False)
103 103 ar.get()
104 104 ar = self.client[:].pull('testf', block=False)
105 105 rlist = ar.get()
106 106 for r in rlist:
107 107 self.assertEqual(r(1.0), testf(1.0))
108 108 execute("def g(x): return x*x")
109 109 r = pull(('testf','g'))
110 110 self.assertEquals((r[0](10),r[1](10)), (testf(10), 100))
111 111
112 112 def test_push_function_globals(self):
113 113 """test that pushed functions have access to globals"""
114 114 @interactive
115 115 def geta():
116 116 return a
117 117 # self.add_engines(1)
118 118 v = self.client[-1]
119 119 v.block=True
120 120 v['f'] = geta
121 121 self.assertRaisesRemote(NameError, v.execute, 'b=f()')
122 122 v.execute('a=5')
123 123 v.execute('b=f()')
124 124 self.assertEquals(v['b'], 5)
125 125
126 126 def test_push_function_defaults(self):
127 127 """test that pushed functions preserve default args"""
128 128 def echo(a=10):
129 129 return a
130 130 v = self.client[-1]
131 131 v.block=True
132 132 v['f'] = echo
133 133 v.execute('b=f()')
134 134 self.assertEquals(v['b'], 10)
135 135
136 136 def test_get_result(self):
137 137 """test getting results from the Hub."""
138 138 c = pmod.Client(profile='iptest')
139 139 # self.add_engines(1)
140 140 t = c.ids[-1]
141 141 v = c[t]
142 142 v2 = self.client[t]
143 143 ar = v.apply_async(wait, 1)
144 144 # give the monitor time to notice the message
145 145 time.sleep(.25)
146 146 ahr = v2.get_result(ar.msg_ids)
147 147 self.assertTrue(isinstance(ahr, AsyncHubResult))
148 148 self.assertEquals(ahr.get(), ar.get())
149 149 ar2 = v2.get_result(ar.msg_ids)
150 150 self.assertFalse(isinstance(ar2, AsyncHubResult))
151 151 c.spin()
152 152 c.close()
153 153
154 154 def test_run_newline(self):
155 155 """test that run appends newline to files"""
156 156 tmpfile = mktemp()
157 157 with open(tmpfile, 'w') as f:
158 158 f.write("""def g():
159 159 return 5
160 160 """)
161 161 v = self.client[-1]
162 162 v.run(tmpfile, block=True)
163 163 self.assertEquals(v.apply_sync(lambda f: f(), pmod.Reference('g')), 5)
164 164
165 165 def test_apply_tracked(self):
166 166 """test tracking for apply"""
167 167 # self.add_engines(1)
168 168 t = self.client.ids[-1]
169 169 v = self.client[t]
170 170 v.block=False
171 171 def echo(n=1024*1024, **kwargs):
172 172 with v.temp_flags(**kwargs):
173 173 return v.apply(lambda x: x, 'x'*n)
174 174 ar = echo(1, track=False)
175 175 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
176 176 self.assertTrue(ar.sent)
177 177 ar = echo(track=True)
178 178 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
179 179 self.assertEquals(ar.sent, ar._tracker.done)
180 180 ar._tracker.wait()
181 181 self.assertTrue(ar.sent)
182 182
183 183 def test_push_tracked(self):
184 184 t = self.client.ids[-1]
185 185 ns = dict(x='x'*1024*1024)
186 186 v = self.client[t]
187 187 ar = v.push(ns, block=False, track=False)
188 188 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
189 189 self.assertTrue(ar.sent)
190 190
191 191 ar = v.push(ns, block=False, track=True)
192 192 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
193 193 ar._tracker.wait()
194 194 self.assertEquals(ar.sent, ar._tracker.done)
195 195 self.assertTrue(ar.sent)
196 196 ar.get()
197 197
198 198 def test_scatter_tracked(self):
199 199 t = self.client.ids
200 200 x='x'*1024*1024
201 201 ar = self.client[t].scatter('x', x, block=False, track=False)
202 202 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
203 203 self.assertTrue(ar.sent)
204 204
205 205 ar = self.client[t].scatter('x', x, block=False, track=True)
206 206 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
207 207 self.assertEquals(ar.sent, ar._tracker.done)
208 208 ar._tracker.wait()
209 209 self.assertTrue(ar.sent)
210 210 ar.get()
211 211
212 212 def test_remote_reference(self):
213 213 v = self.client[-1]
214 214 v['a'] = 123
215 215 ra = pmod.Reference('a')
216 216 b = v.apply_sync(lambda x: x, ra)
217 217 self.assertEquals(b, 123)
218 218
219 219
220 220 def test_scatter_gather(self):
221 221 view = self.client[:]
222 222 seq1 = range(16)
223 223 view.scatter('a', seq1)
224 224 seq2 = view.gather('a', block=True)
225 225 self.assertEquals(seq2, seq1)
226 226 self.assertRaisesRemote(NameError, view.gather, 'asdf', block=True)
227 227
228 228 @skip_without('numpy')
229 229 def test_scatter_gather_numpy(self):
230 230 import numpy
231 231 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
232 232 view = self.client[:]
233 233 a = numpy.arange(64)
234 234 view.scatter('a', a)
235 235 b = view.gather('a', block=True)
236 236 assert_array_equal(b, a)
237 237
238 238 def test_scatter_gather_lazy(self):
239 239 """scatter/gather with targets='all'"""
240 240 view = self.client.direct_view(targets='all')
241 241 x = range(64)
242 242 view.scatter('x', x)
243 243 gathered = view.gather('x', block=True)
244 244 self.assertEquals(gathered, x)
245 245
246 246
247 247 @dec.known_failure_py3
248 248 @skip_without('numpy')
249 249 def test_push_numpy_nocopy(self):
250 250 import numpy
251 251 view = self.client[:]
252 252 a = numpy.arange(64)
253 253 view['A'] = a
254 254 @interactive
255 255 def check_writeable(x):
256 256 return x.flags.writeable
257 257
258 258 for flag in view.apply_sync(check_writeable, pmod.Reference('A')):
259 259 self.assertFalse(flag, "array is writeable, push shouldn't have pickled it")
260 260
261 261 view.push(dict(B=a))
262 262 for flag in view.apply_sync(check_writeable, pmod.Reference('B')):
263 263 self.assertFalse(flag, "array is writeable, push shouldn't have pickled it")
264 264
265 265 @skip_without('numpy')
266 266 def test_apply_numpy(self):
267 267 """view.apply(f, ndarray)"""
268 268 import numpy
269 269 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
270 270
271 271 A = numpy.random.random((100,100))
272 272 view = self.client[-1]
273 273 for dt in [ 'int32', 'uint8', 'float32', 'float64' ]:
274 274 B = A.astype(dt)
275 275 C = view.apply_sync(lambda x:x, B)
276 276 assert_array_equal(B,C)
277 277
278 278 def test_map(self):
279 279 view = self.client[:]
280 280 def f(x):
281 281 return x**2
282 282 data = range(16)
283 283 r = view.map_sync(f, data)
284 284 self.assertEquals(r, map(f, data))
285 285
286 286 def test_map_iterable(self):
287 287 """test map on iterables (direct)"""
288 288 view = self.client[:]
289 289 # 101 is prime, so it won't be evenly distributed
290 290 arr = range(101)
291 291 # ensure it will be an iterator, even in Python 3
292 292 it = iter(arr)
293 293 r = view.map_sync(lambda x:x, arr)
294 294 self.assertEquals(r, list(arr))
295 295
296 296 def test_scatterGatherNonblocking(self):
297 297 data = range(16)
298 298 view = self.client[:]
299 299 view.scatter('a', data, block=False)
300 300 ar = view.gather('a', block=False)
301 301 self.assertEquals(ar.get(), data)
302 302
303 303 @skip_without('numpy')
304 304 def test_scatter_gather_numpy_nonblocking(self):
305 305 import numpy
306 306 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
307 307 a = numpy.arange(64)
308 308 view = self.client[:]
309 309 ar = view.scatter('a', a, block=False)
310 310 self.assertTrue(isinstance(ar, AsyncResult))
311 311 amr = view.gather('a', block=False)
312 312 self.assertTrue(isinstance(amr, AsyncMapResult))
313 313 assert_array_equal(amr.get(), a)
314 314
315 315 def test_execute(self):
316 316 view = self.client[:]
317 317 # self.client.debug=True
318 318 execute = view.execute
319 319 ar = execute('c=30', block=False)
320 320 self.assertTrue(isinstance(ar, AsyncResult))
321 321 ar = execute('d=[0,1,2]', block=False)
322 322 self.client.wait(ar, 1)
323 323 self.assertEquals(len(ar.get()), len(self.client))
324 324 for c in view['c']:
325 325 self.assertEquals(c, 30)
326 326
327 327 def test_abort(self):
328 328 view = self.client[-1]
329 329 ar = view.execute('import time; time.sleep(1)', block=False)
330 330 ar2 = view.apply_async(lambda : 2)
331 331 ar3 = view.apply_async(lambda : 3)
332 332 view.abort(ar2)
333 333 view.abort(ar3.msg_ids)
334 334 self.assertRaises(error.TaskAborted, ar2.get)
335 335 self.assertRaises(error.TaskAborted, ar3.get)
336 336
337 337 def test_abort_all(self):
338 338 """view.abort() aborts all outstanding tasks"""
339 339 view = self.client[-1]
340 340 ars = [ view.apply_async(time.sleep, 0.25) for i in range(10) ]
341 341 view.abort()
342 342 view.wait(timeout=5)
343 343 for ar in ars[5:]:
344 344 self.assertRaises(error.TaskAborted, ar.get)
345 345
346 346 def test_temp_flags(self):
347 347 view = self.client[-1]
348 348 view.block=True
349 349 with view.temp_flags(block=False):
350 350 self.assertFalse(view.block)
351 351 self.assertTrue(view.block)
352 352
353 353 @dec.known_failure_py3
354 354 def test_importer(self):
355 355 view = self.client[-1]
356 356 view.clear(block=True)
357 357 with view.importer:
358 358 import re
359 359
360 360 @interactive
361 361 def findall(pat, s):
362 362 # this globals() step isn't necessary in real code
363 363 # only to prevent a closure in the test
364 364 re = globals()['re']
365 365 return re.findall(pat, s)
366 366
367 367 self.assertEquals(view.apply_sync(findall, '\w+', 'hello world'), 'hello world'.split())
368 368
369 # parallel magic tests
370
371 def test_magic_px_blocking(self):
372 ip = get_ipython()
373 v = self.client[-1]
374 v.activate()
375 v.block=True
376
377 ip.magic('px a=5')
378 self.assertEquals(v['a'], 5)
379 ip.magic('px a=10')
380 self.assertEquals(v['a'], 10)
381 sio = StringIO()
382 savestdout = sys.stdout
383 sys.stdout = sio
384 # just 'print a' worst ~99% of the time, but this ensures that
385 # the stdout message has arrived when the result is finished:
386 ip.magic('px import sys,time;print (a); sys.stdout.flush();time.sleep(0.2)')
387 sys.stdout = savestdout
388 buf = sio.getvalue()
389 self.assertTrue('[stdout:' in buf, buf)
390 self.assertTrue(buf.rstrip().endswith('10'))
391 self.assertRaisesRemote(ZeroDivisionError, ip.magic, 'px 1/0')
392
393 def test_magic_px_nonblocking(self):
394 ip = get_ipython()
395 v = self.client[-1]
396 v.activate()
397 v.block=False
398
399 ip.magic('px a=5')
400 self.assertEquals(v['a'], 5)
401 ip.magic('px a=10')
402 self.assertEquals(v['a'], 10)
403 sio = StringIO()
404 savestdout = sys.stdout
405 sys.stdout = sio
406 ip.magic('px print a')
407 sys.stdout = savestdout
408 buf = sio.getvalue()
409 self.assertFalse('[stdout:%i]'%v.targets in buf)
410 ip.magic('px 1/0')
411 ar = v.get_result(-1)
412 self.assertRaisesRemote(ZeroDivisionError, ar.get)
413
414 def test_magic_autopx_blocking(self):
415 ip = get_ipython()
416 v = self.client[-1]
417 v.activate()
418 v.block=True
419
420 sio = StringIO()
421 savestdout = sys.stdout
422 sys.stdout = sio
423 ip.magic('autopx')
424 ip.run_cell('\n'.join(('a=5','b=10','c=0')))
425 ip.run_cell('b*=2')
426 ip.run_cell('print (b)')
427 ip.run_cell("b/c")
428 ip.magic('autopx')
429 sys.stdout = savestdout
430 output = sio.getvalue().strip()
431 self.assertTrue(output.startswith('%autopx enabled'))
432 self.assertTrue(output.endswith('%autopx disabled'))
433 self.assertTrue('RemoteError: ZeroDivisionError' in output)
434 ar = v.get_result(-1)
435 self.assertEquals(v['a'], 5)
436 self.assertEquals(v['b'], 20)
437 self.assertRaisesRemote(ZeroDivisionError, ar.get)
438
439 def test_magic_autopx_nonblocking(self):
440 ip = get_ipython()
441 v = self.client[-1]
442 v.activate()
443 v.block=False
444
445 sio = StringIO()
446 savestdout = sys.stdout
447 sys.stdout = sio
448 ip.magic('autopx')
449 ip.run_cell('\n'.join(('a=5','b=10','c=0')))
450 ip.run_cell('print (b)')
451 ip.run_cell('import time; time.sleep(0.1)')
452 ip.run_cell("b/c")
453 ip.run_cell('b*=2')
454 ip.magic('autopx')
455 sys.stdout = savestdout
456 output = sio.getvalue().strip()
457 self.assertTrue(output.startswith('%autopx enabled'))
458 self.assertTrue(output.endswith('%autopx disabled'))
459 self.assertFalse('ZeroDivisionError' in output)
460 ar = v.get_result(-2)
461 self.assertRaisesRemote(ZeroDivisionError, ar.get)
462 # prevent TaskAborted on pulls, due to ZeroDivisionError
463 time.sleep(0.5)
464 self.assertEquals(v['a'], 5)
465 # b*=2 will not fire, due to abort
466 self.assertEquals(v['b'], 10)
467
468 def test_magic_result(self):
469 ip = get_ipython()
470 v = self.client[-1]
471 v.activate()
472 v['a'] = 111
473 ra = v['a']
474
475 ar = ip.magic('result')
476 self.assertEquals(ar.msg_ids, [v.history[-1]])
477 self.assertEquals(ar.get(), 111)
478 ar = ip.magic('result -2')
479 self.assertEquals(ar.msg_ids, [v.history[-2]])
480
481 369 def test_unicode_execute(self):
482 370 """test executing unicode strings"""
483 371 v = self.client[-1]
484 372 v.block=True
485 373 if sys.version_info[0] >= 3:
486 374 code="a='é'"
487 375 else:
488 376 code=u"a=u'é'"
489 377 v.execute(code)
490 378 self.assertEquals(v['a'], u'é')
491 379
492 380 def test_unicode_apply_result(self):
493 381 """test unicode apply results"""
494 382 v = self.client[-1]
495 383 r = v.apply_sync(lambda : u'é')
496 384 self.assertEquals(r, u'é')
497 385
498 386 def test_unicode_apply_arg(self):
499 387 """test passing unicode arguments to apply"""
500 388 v = self.client[-1]
501 389
502 390 @interactive
503 391 def check_unicode(a, check):
504 392 assert isinstance(a, unicode), "%r is not unicode"%a
505 393 assert isinstance(check, bytes), "%r is not bytes"%check
506 394 assert a.encode('utf8') == check, "%s != %s"%(a,check)
507 395
508 396 for s in [ u'é', u'ßø®∫',u'asdf' ]:
509 397 try:
510 398 v.apply_sync(check_unicode, s, s.encode('utf8'))
511 399 except error.RemoteError as e:
512 400 if e.ename == 'AssertionError':
513 401 self.fail(e.evalue)
514 402 else:
515 403 raise e
516 404
517 405 def test_map_reference(self):
518 406 """view.map(<Reference>, *seqs) should work"""
519 407 v = self.client[:]
520 408 v.scatter('n', self.client.ids, flatten=True)
521 409 v.execute("f = lambda x,y: x*y")
522 410 rf = pmod.Reference('f')
523 411 nlist = list(range(10))
524 412 mlist = nlist[::-1]
525 413 expected = [ m*n for m,n in zip(mlist, nlist) ]
526 414 result = v.map_sync(rf, mlist, nlist)
527 415 self.assertEquals(result, expected)
528 416
529 417 def test_apply_reference(self):
530 418 """view.apply(<Reference>, *args) should work"""
531 419 v = self.client[:]
532 420 v.scatter('n', self.client.ids, flatten=True)
533 421 v.execute("f = lambda x: n*x")
534 422 rf = pmod.Reference('f')
535 423 result = v.apply_sync(rf, 5)
536 424 expected = [ 5*id for id in self.client.ids ]
537 425 self.assertEquals(result, expected)
538 426
539 427 def test_eval_reference(self):
540 428 v = self.client[self.client.ids[0]]
541 429 v['g'] = range(5)
542 430 rg = pmod.Reference('g[0]')
543 431 echo = lambda x:x
544 432 self.assertEquals(v.apply_sync(echo, rg), 0)
545 433
546 434 def test_reference_nameerror(self):
547 435 v = self.client[self.client.ids[0]]
548 436 r = pmod.Reference('elvis_has_left')
549 437 echo = lambda x:x
550 438 self.assertRaisesRemote(NameError, v.apply_sync, echo, r)
551 439
552 440 def test_single_engine_map(self):
553 441 e0 = self.client[self.client.ids[0]]
554 442 r = range(5)
555 443 check = [ -1*i for i in r ]
556 444 result = e0.map_sync(lambda x: -1*x, r)
557 445 self.assertEquals(result, check)
558 446
559 447 def test_len(self):
560 448 """len(view) makes sense"""
561 449 e0 = self.client[self.client.ids[0]]
562 450 yield self.assertEquals(len(e0), 1)
563 451 v = self.client[:]
564 452 yield self.assertEquals(len(v), len(self.client.ids))
565 453 v = self.client.direct_view('all')
566 454 yield self.assertEquals(len(v), len(self.client.ids))
567 455 v = self.client[:2]
568 456 yield self.assertEquals(len(v), 2)
569 457 v = self.client[:1]
570 458 yield self.assertEquals(len(v), 1)
571 459 v = self.client.load_balanced_view()
572 460 yield self.assertEquals(len(v), len(self.client.ids))
573 461 # parametric tests seem to require manual closing?
574 462 self.client.close()
575 463
576 464
577 465 # begin execute tests
578 def _wait_for(self, f, timeout=10):
579 tic = time.time()
580 while time.time() <= tic + timeout:
581 if f():
582 return
583 time.sleep(0.1)
584 self.client.spin()
585 if not f():
586 print "Warning: Awaited condition never arrived"
587
588 466
589 467 def test_execute_reply(self):
590 468 e0 = self.client[self.client.ids[0]]
591 469 e0.block = True
592 470 ar = e0.execute("5", silent=False)
593 471 er = ar.get()
594 472 self._wait_for(lambda : bool(er.pyout))
595 473 self.assertEquals(str(er), "<ExecuteReply[%i]: 5>" % er.execution_count)
596 474 self.assertEquals(er.pyout['data']['text/plain'], '5')
597 475
598 476 def test_execute_reply_stdout(self):
599 477 e0 = self.client[self.client.ids[0]]
600 478 e0.block = True
601 479 ar = e0.execute("print (5)", silent=False)
602 480 er = ar.get()
603 481 self._wait_for(lambda : bool(er.stdout))
604 482 self.assertEquals(er.stdout.strip(), '5')
605 483
606 484 def test_execute_pyout(self):
607 485 """execute triggers pyout with silent=False"""
608 486 view = self.client[:]
609 487 ar = view.execute("5", silent=False, block=True)
610 488 self._wait_for(lambda : all(ar.pyout))
611 489
612 490 expected = [{'text/plain' : '5'}] * len(view)
613 491 mimes = [ out['data'] for out in ar.pyout ]
614 492 self.assertEquals(mimes, expected)
615 493
616 494 def test_execute_silent(self):
617 495 """execute does not trigger pyout with silent=True"""
618 496 view = self.client[:]
619 497 ar = view.execute("5", block=True)
620 498 expected = [None] * len(view)
621 499 self.assertEquals(ar.pyout, expected)
622 500
623 501 def test_execute_magic(self):
624 502 """execute accepts IPython commands"""
625 503 view = self.client[:]
626 504 view.execute("a = 5")
627 505 ar = view.execute("%whos", block=True)
628 506 # this will raise, if that failed
629 507 ar.get(5)
630 508 self._wait_for(lambda : all(ar.stdout))
631 509 for stdout in ar.stdout:
632 510 lines = stdout.splitlines()
633 511 self.assertEquals(lines[0].split(), ['Variable', 'Type', 'Data/Info'])
634 512 found = False
635 513 for line in lines[2:]:
636 514 split = line.split()
637 515 if split == ['a', 'int', '5']:
638 516 found = True
639 517 break
640 518 self.assertTrue(found, "whos output wrong: %s" % stdout)
641 519
642 520 def test_execute_displaypub(self):
643 521 """execute tracks display_pub output"""
644 522 view = self.client[:]
645 523 view.execute("from IPython.core.display import *")
646 524 ar = view.execute("[ display(i) for i in range(5) ]", block=True)
647 525
648 526 self._wait_for(lambda : all(len(er.outputs) >= 5 for er in ar))
649 527 expected = [ {u'text/plain' : unicode(j)} for j in range(5) ]
650 528 for outputs in ar.outputs:
651 529 mimes = [ out['data'] for out in outputs ]
652 530 self.assertEquals(mimes, expected)
653 531
654 532 def test_apply_displaypub(self):
655 533 """apply tracks display_pub output"""
656 534 view = self.client[:]
657 535 view.execute("from IPython.core.display import *")
658 536
659 537 @interactive
660 538 def publish():
661 539 [ display(i) for i in range(5) ]
662 540
663 541 ar = view.apply_async(publish)
664 542 ar.get(5)
665 543 self._wait_for(lambda : all(len(out) >= 5 for out in ar.outputs))
666 544 expected = [ {u'text/plain' : unicode(j)} for j in range(5) ]
667 545 for outputs in ar.outputs:
668 546 mimes = [ out['data'] for out in outputs ]
669 547 self.assertEquals(mimes, expected)
670 548
671 549 def test_execute_raises(self):
672 550 """exceptions in execute requests raise appropriately"""
673 551 view = self.client[-1]
674 552 ar = view.execute("1/0")
675 553 self.assertRaisesRemote(ZeroDivisionError, ar.get, 2)
676 554
677 555 @dec.skipif_not_matplotlib
678 556 def test_magic_pylab(self):
679 557 """%pylab works on engines"""
680 558 view = self.client[-1]
681 559 ar = view.execute("%pylab inline")
682 560 # at least check if this raised:
683 561 reply = ar.get(5)
684 562 # include imports, in case user config
685 563 ar = view.execute("plot(rand(100))", silent=False)
686 564 reply = ar.get(5)
687 565 self._wait_for(lambda : all(ar.outputs))
688 566 self.assertEquals(len(reply.outputs), 1)
689 567 output = reply.outputs[0]
690 568 self.assertTrue("data" in output)
691 569 data = output['data']
692 570 self.assertTrue("image/png" in data)
693 571
694 572
@@ -1,73 +1,75 b''
1 1 import __builtin__
2 2 import sys
3 3 from base64 import encodestring
4 4
5 5 from IPython.core.displayhook import DisplayHook
6 6 from IPython.utils.traitlets import Instance, Dict
7 7 from session import extract_header, Session
8 8
9 9 class ZMQDisplayHook(object):
10 10 """A simple displayhook that publishes the object's repr over a ZeroMQ
11 11 socket."""
12 12 topic=None
13 13
14 14 def __init__(self, session, pub_socket):
15 15 self.session = session
16 16 self.pub_socket = pub_socket
17 17 self.parent_header = {}
18 18
19 19 def __call__(self, obj):
20 20 if obj is None:
21 21 return
22 22
23 23 __builtin__._ = obj
24 24 sys.stdout.flush()
25 25 sys.stderr.flush()
26 26 msg = self.session.send(self.pub_socket, u'pyout', {u'data':repr(obj)},
27 27 parent=self.parent_header, ident=self.topic)
28 28
29 29 def set_parent(self, parent):
30 30 self.parent_header = extract_header(parent)
31 31
32 32
33 33 def _encode_binary(format_dict):
34 encoded = format_dict.copy()
34 35 pngdata = format_dict.get('image/png')
35 if pngdata is not None:
36 format_dict['image/png'] = encodestring(pngdata).decode('ascii')
36 if isinstance(pngdata, bytes):
37 encoded['image/png'] = encodestring(pngdata).decode('ascii')
37 38 jpegdata = format_dict.get('image/jpeg')
38 if jpegdata is not None:
39 format_dict['image/jpeg'] = encodestring(jpegdata).decode('ascii')
39 if isinstance(jpegdata, bytes):
40 encoded['image/jpeg'] = encodestring(jpegdata).decode('ascii')
41
42 return encoded
40 43
41 44
42 45 class ZMQShellDisplayHook(DisplayHook):
43 46 """A displayhook subclass that publishes data using ZeroMQ. This is intended
44 47 to work with an InteractiveShell instance. It sends a dict of different
45 48 representations of the object."""
46 49 topic=None
47 50
48 51 session = Instance(Session)
49 52 pub_socket = Instance('zmq.Socket')
50 53 parent_header = Dict({})
51 54
52 55 def set_parent(self, parent):
53 56 """Set the parent for outbound messages."""
54 57 self.parent_header = extract_header(parent)
55 58
56 59 def start_displayhook(self):
57 60 self.msg = self.session.msg(u'pyout', {}, parent=self.parent_header)
58 61
59 62 def write_output_prompt(self):
60 63 """Write the output prompt."""
61 64 self.msg['content']['execution_count'] = self.prompt_count
62 65
63 66 def write_format_data(self, format_dict):
64 _encode_binary(format_dict)
65 self.msg['content']['data'] = format_dict
67 self.msg['content']['data'] = _encode_binary(format_dict)
66 68
67 69 def finish_displayhook(self):
68 70 """Finish up all displayhook activities."""
69 71 sys.stdout.flush()
70 72 sys.stderr.flush()
71 73 self.session.send(self.pub_socket, self.msg, ident=self.topic)
72 74 self.msg = None
73 75
@@ -1,919 +1,921 b''
1 1 #!/usr/bin/env python
2 2 """A simple interactive kernel that talks to a frontend over 0MQ.
3 3
4 4 Things to do:
5 5
6 6 * Implement `set_parent` logic. Right before doing exec, the Kernel should
7 7 call set_parent on all the PUB objects with the message about to be executed.
8 8 * Implement random port and security key logic.
9 9 * Implement control messages.
10 10 * Implement event loop and poll version.
11 11 """
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Imports
15 15 #-----------------------------------------------------------------------------
16 16 from __future__ import print_function
17 17
18 18 # Standard library imports
19 19 import __builtin__
20 20 import atexit
21 21 import sys
22 22 import time
23 23 import traceback
24 24 import logging
25 25 import uuid
26 26
27 27 from datetime import datetime
28 28 from signal import (
29 29 signal, getsignal, default_int_handler, SIGINT, SIG_IGN
30 30 )
31 31
32 32 # System library imports
33 33 import zmq
34 34 from zmq.eventloop import ioloop
35 35 from zmq.eventloop.zmqstream import ZMQStream
36 36
37 37 # Local imports
38 38 from IPython.core import pylabtools
39 39 from IPython.config.configurable import Configurable
40 40 from IPython.config.application import boolean_flag, catch_config_error
41 41 from IPython.core.application import ProfileDir
42 42 from IPython.core.error import StdinNotImplementedError
43 43 from IPython.core.shellapp import (
44 44 InteractiveShellApp, shell_flags, shell_aliases
45 45 )
46 46 from IPython.utils import io
47 47 from IPython.utils import py3compat
48 48 from IPython.utils.frame import extract_module_locals
49 49 from IPython.utils.jsonutil import json_clean
50 50 from IPython.utils.traitlets import (
51 51 Any, Instance, Float, Dict, CaselessStrEnum, List, Set, Integer, Unicode
52 52 )
53 53
54 54 from entry_point import base_launch_kernel
55 55 from kernelapp import KernelApp, kernel_flags, kernel_aliases
56 56 from serialize import serialize_object, unpack_apply_message
57 57 from session import Session, Message
58 58 from zmqshell import ZMQInteractiveShell
59 59
60 60
61 61 #-----------------------------------------------------------------------------
62 62 # Main kernel class
63 63 #-----------------------------------------------------------------------------
64 64
65 65 class Kernel(Configurable):
66 66
67 67 #---------------------------------------------------------------------------
68 68 # Kernel interface
69 69 #---------------------------------------------------------------------------
70 70
71 71 # attribute to override with a GUI
72 72 eventloop = Any(None)
73 73 def _eventloop_changed(self, name, old, new):
74 74 """schedule call to eventloop from IOLoop"""
75 75 loop = ioloop.IOLoop.instance()
76 76 loop.add_timeout(time.time()+0.1, self.enter_eventloop)
77 77
78 78 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
79 79 session = Instance(Session)
80 80 profile_dir = Instance('IPython.core.profiledir.ProfileDir')
81 81 shell_streams = List()
82 82 control_stream = Instance(ZMQStream)
83 83 iopub_socket = Instance(zmq.Socket)
84 84 stdin_socket = Instance(zmq.Socket)
85 85 log = Instance(logging.Logger)
86 86
87 87 user_module = Any()
88 88 def _user_module_changed(self, name, old, new):
89 89 if self.shell is not None:
90 90 self.shell.user_module = new
91 91
92 92 user_ns = Dict(default_value=None)
93 93 def _user_ns_changed(self, name, old, new):
94 94 if self.shell is not None:
95 95 self.shell.user_ns = new
96 96 self.shell.init_user_ns()
97 97
98 98 # identities:
99 99 int_id = Integer(-1)
100 100 ident = Unicode()
101 101
102 102 def _ident_default(self):
103 103 return unicode(uuid.uuid4())
104 104
105 105
106 106 # Private interface
107 107
108 108 # Time to sleep after flushing the stdout/err buffers in each execute
109 109 # cycle. While this introduces a hard limit on the minimal latency of the
110 110 # execute cycle, it helps prevent output synchronization problems for
111 111 # clients.
112 112 # Units are in seconds. The minimum zmq latency on local host is probably
113 113 # ~150 microseconds, set this to 500us for now. We may need to increase it
114 114 # a little if it's not enough after more interactive testing.
115 115 _execute_sleep = Float(0.0005, config=True)
116 116
117 117 # Frequency of the kernel's event loop.
118 118 # Units are in seconds, kernel subclasses for GUI toolkits may need to
119 119 # adapt to milliseconds.
120 120 _poll_interval = Float(0.05, config=True)
121 121
122 122 # If the shutdown was requested over the network, we leave here the
123 123 # necessary reply message so it can be sent by our registered atexit
124 124 # handler. This ensures that the reply is only sent to clients truly at
125 125 # the end of our shutdown process (which happens after the underlying
126 126 # IPython shell's own shutdown).
127 127 _shutdown_message = None
128 128
129 129 # This is a dict of port number that the kernel is listening on. It is set
130 130 # by record_ports and used by connect_request.
131 131 _recorded_ports = Dict()
132 132
133 133 # set of aborted msg_ids
134 134 aborted = Set()
135 135
136 136
137 137 def __init__(self, **kwargs):
138 138 super(Kernel, self).__init__(**kwargs)
139 139
140 140 # Initialize the InteractiveShell subclass
141 141 self.shell = ZMQInteractiveShell.instance(config=self.config,
142 142 profile_dir = self.profile_dir,
143 143 user_module = self.user_module,
144 144 user_ns = self.user_ns,
145 145 )
146 146 self.shell.displayhook.session = self.session
147 147 self.shell.displayhook.pub_socket = self.iopub_socket
148 148 self.shell.displayhook.topic = self._topic('pyout')
149 149 self.shell.display_pub.session = self.session
150 150 self.shell.display_pub.pub_socket = self.iopub_socket
151 151
152 152 # TMP - hack while developing
153 153 self.shell._reply_content = None
154 154
155 155 # Build dict of handlers for message types
156 156 msg_types = [ 'execute_request', 'complete_request',
157 157 'object_info_request', 'history_request',
158 158 'connect_request', 'shutdown_request',
159 159 'apply_request',
160 160 ]
161 161 self.shell_handlers = {}
162 162 for msg_type in msg_types:
163 163 self.shell_handlers[msg_type] = getattr(self, msg_type)
164 164
165 165 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
166 166 self.control_handlers = {}
167 167 for msg_type in control_msg_types:
168 168 self.control_handlers[msg_type] = getattr(self, msg_type)
169 169
170 170 def dispatch_control(self, msg):
171 171 """dispatch control requests"""
172 172 idents,msg = self.session.feed_identities(msg, copy=False)
173 173 try:
174 174 msg = self.session.unserialize(msg, content=True, copy=False)
175 175 except:
176 176 self.log.error("Invalid Control Message", exc_info=True)
177 177 return
178 178
179 179 self.log.debug("Control received: %s", msg)
180 180
181 181 header = msg['header']
182 182 msg_id = header['msg_id']
183 183 msg_type = header['msg_type']
184 184
185 185 handler = self.control_handlers.get(msg_type, None)
186 186 if handler is None:
187 187 self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
188 188 else:
189 189 try:
190 190 handler(self.control_stream, idents, msg)
191 191 except Exception:
192 192 self.log.error("Exception in control handler:", exc_info=True)
193 193
194 194 def dispatch_shell(self, stream, msg):
195 195 """dispatch shell requests"""
196 196 # flush control requests first
197 197 if self.control_stream:
198 198 self.control_stream.flush()
199 199
200 200 idents,msg = self.session.feed_identities(msg, copy=False)
201 201 try:
202 202 msg = self.session.unserialize(msg, content=True, copy=False)
203 203 except:
204 204 self.log.error("Invalid Message", exc_info=True)
205 205 return
206 206
207 207 header = msg['header']
208 208 msg_id = header['msg_id']
209 209 msg_type = msg['header']['msg_type']
210 210
211 211 # Print some info about this message and leave a '--->' marker, so it's
212 212 # easier to trace visually the message chain when debugging. Each
213 213 # handler prints its message at the end.
214 214 self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
215 215 self.log.debug(' Content: %s\n --->\n ', msg['content'])
216 216
217 217 if msg_id in self.aborted:
218 218 self.aborted.remove(msg_id)
219 219 # is it safe to assume a msg_id will not be resubmitted?
220 220 reply_type = msg_type.split('_')[0] + '_reply'
221 221 status = {'status' : 'aborted'}
222 222 sub = {'engine' : self.ident}
223 223 sub.update(status)
224 224 reply_msg = self.session.send(stream, reply_type, subheader=sub,
225 225 content=status, parent=msg, ident=idents)
226 226 return
227 227
228 228 handler = self.shell_handlers.get(msg_type, None)
229 229 if handler is None:
230 230 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
231 231 else:
232 232 # ensure default_int_handler during handler call
233 233 sig = signal(SIGINT, default_int_handler)
234 234 try:
235 235 handler(stream, idents, msg)
236 236 except Exception:
237 237 self.log.error("Exception in message handler:", exc_info=True)
238 238 finally:
239 239 signal(SIGINT, sig)
240 240
241 241 def enter_eventloop(self):
242 242 """enter eventloop"""
243 243 self.log.info("entering eventloop")
244 244 # restore default_int_handler
245 245 signal(SIGINT, default_int_handler)
246 246 while self.eventloop is not None:
247 247 try:
248 248 self.eventloop(self)
249 249 except KeyboardInterrupt:
250 250 # Ctrl-C shouldn't crash the kernel
251 251 self.log.error("KeyboardInterrupt caught in kernel")
252 252 continue
253 253 else:
254 254 # eventloop exited cleanly, this means we should stop (right?)
255 255 self.eventloop = None
256 256 break
257 257 self.log.info("exiting eventloop")
258 258 # if eventloop exits, IOLoop should stop
259 259 ioloop.IOLoop.instance().stop()
260 260
261 261 def start(self):
262 262 """register dispatchers for streams"""
263 263 self.shell.exit_now = False
264 264 if self.control_stream:
265 265 self.control_stream.on_recv(self.dispatch_control, copy=False)
266 266
267 267 def make_dispatcher(stream):
268 268 def dispatcher(msg):
269 269 return self.dispatch_shell(stream, msg)
270 270 return dispatcher
271 271
272 272 for s in self.shell_streams:
273 273 s.on_recv(make_dispatcher(s), copy=False)
274 274
275 275 def do_one_iteration(self):
276 276 """step eventloop just once"""
277 277 if self.control_stream:
278 278 self.control_stream.flush()
279 279 for stream in self.shell_streams:
280 280 # handle at most one request per iteration
281 281 stream.flush(zmq.POLLIN, 1)
282 282 stream.flush(zmq.POLLOUT)
283 283
284 284
285 285 def record_ports(self, ports):
286 286 """Record the ports that this kernel is using.
287 287
288 288 The creator of the Kernel instance must call this methods if they
289 289 want the :meth:`connect_request` method to return the port numbers.
290 290 """
291 291 self._recorded_ports = ports
292 292
293 293 #---------------------------------------------------------------------------
294 294 # Kernel request handlers
295 295 #---------------------------------------------------------------------------
296 296
297 297 def _make_subheader(self):
298 298 """init subheader dict, for execute/apply_reply"""
299 299 return {
300 300 'dependencies_met' : True,
301 301 'engine' : self.ident,
302 302 'started': datetime.now(),
303 303 }
304 304
305 305 def _publish_pyin(self, code, parent, execution_count):
306 306 """Publish the code request on the pyin stream."""
307 307
308 308 self.session.send(self.iopub_socket, u'pyin',
309 309 {u'code':code, u'execution_count': execution_count},
310 310 parent=parent, ident=self._topic('pyin')
311 311 )
312 312
313 313 def execute_request(self, stream, ident, parent):
314 314
315 315 self.session.send(self.iopub_socket,
316 316 u'status',
317 317 {u'execution_state':u'busy'},
318 318 parent=parent,
319 319 ident=self._topic('status'),
320 320 )
321 321
322 322 try:
323 323 content = parent[u'content']
324 324 code = content[u'code']
325 325 silent = content[u'silent']
326 326 except:
327 327 self.log.error("Got bad msg: ")
328 328 self.log.error("%s", parent)
329 329 return
330 330
331 331 sub = self._make_subheader()
332 332
333 333 shell = self.shell # we'll need this a lot here
334 334
335 335 # Replace raw_input. Note that is not sufficient to replace
336 336 # raw_input in the user namespace.
337 337 if content.get('allow_stdin', False):
338 338 raw_input = lambda prompt='': self._raw_input(prompt, ident, parent)
339 339 else:
340 340 raw_input = lambda prompt='' : self._no_raw_input()
341 341
342 342 if py3compat.PY3:
343 343 __builtin__.input = raw_input
344 344 else:
345 345 __builtin__.raw_input = raw_input
346 346
347 347 # Set the parent message of the display hook and out streams.
348 348 shell.displayhook.set_parent(parent)
349 349 shell.display_pub.set_parent(parent)
350 350 sys.stdout.set_parent(parent)
351 351 sys.stderr.set_parent(parent)
352 352
353 353 # Re-broadcast our input for the benefit of listening clients, and
354 354 # start computing output
355 355 if not silent:
356 356 self._publish_pyin(code, parent, shell.execution_count)
357 357
358 358 reply_content = {}
359 359 try:
360 360 # FIXME: the shell calls the exception handler itself.
361 361 shell.run_cell(code, store_history=not silent, silent=silent)
362 362 except:
363 363 status = u'error'
364 364 # FIXME: this code right now isn't being used yet by default,
365 365 # because the run_cell() call above directly fires off exception
366 366 # reporting. This code, therefore, is only active in the scenario
367 367 # where runlines itself has an unhandled exception. We need to
368 368 # uniformize this, for all exception construction to come from a
369 369 # single location in the codbase.
370 370 etype, evalue, tb = sys.exc_info()
371 371 tb_list = traceback.format_exception(etype, evalue, tb)
372 372 reply_content.update(shell._showtraceback(etype, evalue, tb_list))
373 373 else:
374 374 status = u'ok'
375 375
376 376 reply_content[u'status'] = status
377 377
378 378 # Return the execution counter so clients can display prompts
379 379 reply_content['execution_count'] = shell.execution_count - 1
380 380
381 381 # FIXME - fish exception info out of shell, possibly left there by
382 382 # runlines. We'll need to clean up this logic later.
383 383 if shell._reply_content is not None:
384 384 reply_content.update(shell._reply_content)
385 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute')
386 reply_content['engine_info'] = e_info
385 387 # reset after use
386 388 shell._reply_content = None
387 389
388 390 # At this point, we can tell whether the main code execution succeeded
389 391 # or not. If it did, we proceed to evaluate user_variables/expressions
390 392 if reply_content['status'] == 'ok':
391 393 reply_content[u'user_variables'] = \
392 394 shell.user_variables(content.get(u'user_variables', []))
393 395 reply_content[u'user_expressions'] = \
394 396 shell.user_expressions(content.get(u'user_expressions', {}))
395 397 else:
396 398 # If there was an error, don't even try to compute variables or
397 399 # expressions
398 400 reply_content[u'user_variables'] = {}
399 401 reply_content[u'user_expressions'] = {}
400 402
401 403 # Payloads should be retrieved regardless of outcome, so we can both
402 404 # recover partial output (that could have been generated early in a
403 405 # block, before an error) and clear the payload system always.
404 406 reply_content[u'payload'] = shell.payload_manager.read_payload()
405 407 # Be agressive about clearing the payload because we don't want
406 408 # it to sit in memory until the next execute_request comes in.
407 409 shell.payload_manager.clear_payload()
408 410
409 411 # Flush output before sending the reply.
410 412 sys.stdout.flush()
411 413 sys.stderr.flush()
412 414 # FIXME: on rare occasions, the flush doesn't seem to make it to the
413 415 # clients... This seems to mitigate the problem, but we definitely need
414 416 # to better understand what's going on.
415 417 if self._execute_sleep:
416 418 time.sleep(self._execute_sleep)
417 419
418 420 # Send the reply.
419 421 reply_content = json_clean(reply_content)
420 422
421 423 sub['status'] = reply_content['status']
422 424 if reply_content['status'] == 'error' and \
423 425 reply_content['ename'] == 'UnmetDependency':
424 426 sub['dependencies_met'] = False
425 427
426 428 reply_msg = self.session.send(stream, u'execute_reply',
427 429 reply_content, parent, subheader=sub,
428 430 ident=ident)
429 431
430 432 self.log.debug("%s", reply_msg)
431 433
432 434 if not silent and reply_msg['content']['status'] == u'error':
433 435 self._abort_queues()
434 436
435 437 self.session.send(self.iopub_socket,
436 438 u'status',
437 439 {u'execution_state':u'idle'},
438 440 parent=parent,
439 441 ident=self._topic('status'))
440 442
441 443 def complete_request(self, stream, ident, parent):
442 444 txt, matches = self._complete(parent)
443 445 matches = {'matches' : matches,
444 446 'matched_text' : txt,
445 447 'status' : 'ok'}
446 448 matches = json_clean(matches)
447 449 completion_msg = self.session.send(stream, 'complete_reply',
448 450 matches, parent, ident)
449 451 self.log.debug("%s", completion_msg)
450 452
451 453 def object_info_request(self, stream, ident, parent):
452 454 content = parent['content']
453 455 object_info = self.shell.object_inspect(content['oname'],
454 456 detail_level = content.get('detail_level', 0)
455 457 )
456 458 # Before we send this object over, we scrub it for JSON usage
457 459 oinfo = json_clean(object_info)
458 460 msg = self.session.send(stream, 'object_info_reply',
459 461 oinfo, parent, ident)
460 462 self.log.debug("%s", msg)
461 463
462 464 def history_request(self, stream, ident, parent):
463 465 # We need to pull these out, as passing **kwargs doesn't work with
464 466 # unicode keys before Python 2.6.5.
465 467 hist_access_type = parent['content']['hist_access_type']
466 468 raw = parent['content']['raw']
467 469 output = parent['content']['output']
468 470 if hist_access_type == 'tail':
469 471 n = parent['content']['n']
470 472 hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
471 473 include_latest=True)
472 474
473 475 elif hist_access_type == 'range':
474 476 session = parent['content']['session']
475 477 start = parent['content']['start']
476 478 stop = parent['content']['stop']
477 479 hist = self.shell.history_manager.get_range(session, start, stop,
478 480 raw=raw, output=output)
479 481
480 482 elif hist_access_type == 'search':
481 483 pattern = parent['content']['pattern']
482 484 hist = self.shell.history_manager.search(pattern, raw=raw,
483 485 output=output)
484 486
485 487 else:
486 488 hist = []
487 489 hist = list(hist)
488 490 content = {'history' : hist}
489 491 content = json_clean(content)
490 492 msg = self.session.send(stream, 'history_reply',
491 493 content, parent, ident)
492 494 self.log.debug("Sending history reply with %i entries", len(hist))
493 495
494 496 def connect_request(self, stream, ident, parent):
495 497 if self._recorded_ports is not None:
496 498 content = self._recorded_ports.copy()
497 499 else:
498 500 content = {}
499 501 msg = self.session.send(stream, 'connect_reply',
500 502 content, parent, ident)
501 503 self.log.debug("%s", msg)
502 504
503 505 def shutdown_request(self, stream, ident, parent):
504 506 self.shell.exit_now = True
505 507 content = dict(status='ok')
506 508 content.update(parent['content'])
507 509 self.session.send(stream, u'shutdown_reply', content, parent, ident=ident)
508 510 # same content, but different msg_id for broadcasting on IOPub
509 511 self._shutdown_message = self.session.msg(u'shutdown_reply',
510 512 content, parent
511 513 )
512 514
513 515 self._at_shutdown()
514 516 # call sys.exit after a short delay
515 517 loop = ioloop.IOLoop.instance()
516 518 loop.add_timeout(time.time()+0.1, loop.stop)
517 519
518 520 #---------------------------------------------------------------------------
519 521 # Engine methods
520 522 #---------------------------------------------------------------------------
521 523
522 524 def apply_request(self, stream, ident, parent):
523 525 try:
524 526 content = parent[u'content']
525 527 bufs = parent[u'buffers']
526 528 msg_id = parent['header']['msg_id']
527 529 except:
528 530 self.log.error("Got bad msg: %s", parent, exc_info=True)
529 531 return
530 532
531 533 # Set the parent message of the display hook and out streams.
532 534 self.shell.displayhook.set_parent(parent)
533 535 self.shell.display_pub.set_parent(parent)
534 536 sys.stdout.set_parent(parent)
535 537 sys.stderr.set_parent(parent)
536 538
537 539 # pyin_msg = self.session.msg(u'pyin',{u'code':code}, parent=parent)
538 540 # self.iopub_socket.send(pyin_msg)
539 541 # self.session.send(self.iopub_socket, u'pyin', {u'code':code},parent=parent)
540 542 sub = self._make_subheader()
541 543 try:
542 544 working = self.shell.user_ns
543 545
544 546 prefix = "_"+str(msg_id).replace("-","")+"_"
545 547
546 548 f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
547 549
548 550 fname = getattr(f, '__name__', 'f')
549 551
550 552 fname = prefix+"f"
551 553 argname = prefix+"args"
552 554 kwargname = prefix+"kwargs"
553 555 resultname = prefix+"result"
554 556
555 557 ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
556 558 # print ns
557 559 working.update(ns)
558 560 code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
559 561 try:
560 562 exec code in self.shell.user_global_ns, self.shell.user_ns
561 563 result = working.get(resultname)
562 564 finally:
563 565 for key in ns.iterkeys():
564 566 working.pop(key)
565 567
566 568 packed_result,buf = serialize_object(result)
567 569 result_buf = [packed_result]+buf
568 570 except:
569 571 exc_content = self._wrap_exception('apply')
570 572 # exc_msg = self.session.msg(u'pyerr', exc_content, parent)
571 573 self.session.send(self.iopub_socket, u'pyerr', exc_content, parent=parent,
572 574 ident=self._topic('pyerr'))
573 575 reply_content = exc_content
574 576 result_buf = []
575 577
576 578 if exc_content['ename'] == 'UnmetDependency':
577 579 sub['dependencies_met'] = False
578 580 else:
579 581 reply_content = {'status' : 'ok'}
580 582
581 583 # put 'ok'/'error' status in header, for scheduler introspection:
582 584 sub['status'] = reply_content['status']
583 585
584 586 # flush i/o
585 587 sys.stdout.flush()
586 588 sys.stderr.flush()
587 589
588 590 reply_msg = self.session.send(stream, u'apply_reply', reply_content,
589 591 parent=parent, ident=ident,buffers=result_buf, subheader=sub)
590 592
591 593 #---------------------------------------------------------------------------
592 594 # Control messages
593 595 #---------------------------------------------------------------------------
594 596
595 597 def abort_request(self, stream, ident, parent):
596 598 """abort a specifig msg by id"""
597 599 msg_ids = parent['content'].get('msg_ids', None)
598 600 if isinstance(msg_ids, basestring):
599 601 msg_ids = [msg_ids]
600 602 if not msg_ids:
601 603 self.abort_queues()
602 604 for mid in msg_ids:
603 605 self.aborted.add(str(mid))
604 606
605 607 content = dict(status='ok')
606 608 reply_msg = self.session.send(stream, 'abort_reply', content=content,
607 609 parent=parent, ident=ident)
608 610 self.log.debug("%s", reply_msg)
609 611
610 612 def clear_request(self, stream, idents, parent):
611 613 """Clear our namespace."""
612 614 self.shell.reset(False)
613 615 msg = self.session.send(stream, 'clear_reply', ident=idents, parent=parent,
614 616 content = dict(status='ok'))
615 617
616 618
617 619 #---------------------------------------------------------------------------
618 620 # Protected interface
619 621 #---------------------------------------------------------------------------
620 622
621 623
622 624 def _wrap_exception(self, method=None):
623 625 # import here, because _wrap_exception is only used in parallel,
624 626 # and parallel has higher min pyzmq version
625 627 from IPython.parallel.error import wrap_exception
626 628 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method=method)
627 629 content = wrap_exception(e_info)
628 630 return content
629 631
630 632 def _topic(self, topic):
631 633 """prefixed topic for IOPub messages"""
632 634 if self.int_id >= 0:
633 635 base = "engine.%i" % self.int_id
634 636 else:
635 637 base = "kernel.%s" % self.ident
636 638
637 639 return py3compat.cast_bytes("%s.%s" % (base, topic))
638 640
639 641 def _abort_queues(self):
640 642 for stream in self.shell_streams:
641 643 if stream:
642 644 self._abort_queue(stream)
643 645
644 646 def _abort_queue(self, stream):
645 647 poller = zmq.Poller()
646 648 poller.register(stream.socket, zmq.POLLIN)
647 649 while True:
648 650 idents,msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
649 651 if msg is None:
650 652 return
651 653
652 654 self.log.info("Aborting:")
653 655 self.log.info("%s", msg)
654 656 msg_type = msg['header']['msg_type']
655 657 reply_type = msg_type.split('_')[0] + '_reply'
656 658
657 659 status = {'status' : 'aborted'}
658 660 sub = {'engine' : self.ident}
659 661 sub.update(status)
660 662 reply_msg = self.session.send(stream, reply_type, subheader=sub,
661 663 content=status, parent=msg, ident=idents)
662 664 self.log.debug("%s", reply_msg)
663 665 # We need to wait a bit for requests to come in. This can probably
664 666 # be set shorter for true asynchronous clients.
665 667 poller.poll(50)
666 668
667 669
668 670 def _no_raw_input(self):
669 671 """Raise StdinNotImplentedError if active frontend doesn't support
670 672 stdin."""
671 673 raise StdinNotImplementedError("raw_input was called, but this "
672 674 "frontend does not support stdin.")
673 675
674 676 def _raw_input(self, prompt, ident, parent):
675 677 # Flush output before making the request.
676 678 sys.stderr.flush()
677 679 sys.stdout.flush()
678 680
679 681 # Send the input request.
680 682 content = json_clean(dict(prompt=prompt))
681 683 self.session.send(self.stdin_socket, u'input_request', content, parent,
682 684 ident=ident)
683 685
684 686 # Await a response.
685 687 while True:
686 688 try:
687 689 ident, reply = self.session.recv(self.stdin_socket, 0)
688 690 except Exception:
689 691 self.log.warn("Invalid Message:", exc_info=True)
690 692 else:
691 693 break
692 694 try:
693 695 value = reply['content']['value']
694 696 except:
695 697 self.log.error("Got bad raw_input reply: ")
696 698 self.log.error("%s", parent)
697 699 value = ''
698 700 if value == '\x04':
699 701 # EOF
700 702 raise EOFError
701 703 return value
702 704
703 705 def _complete(self, msg):
704 706 c = msg['content']
705 707 try:
706 708 cpos = int(c['cursor_pos'])
707 709 except:
708 710 # If we don't get something that we can convert to an integer, at
709 711 # least attempt the completion guessing the cursor is at the end of
710 712 # the text, if there's any, and otherwise of the line
711 713 cpos = len(c['text'])
712 714 if cpos==0:
713 715 cpos = len(c['line'])
714 716 return self.shell.complete(c['text'], c['line'], cpos)
715 717
716 718 def _object_info(self, context):
717 719 symbol, leftover = self._symbol_from_context(context)
718 720 if symbol is not None and not leftover:
719 721 doc = getattr(symbol, '__doc__', '')
720 722 else:
721 723 doc = ''
722 724 object_info = dict(docstring = doc)
723 725 return object_info
724 726
725 727 def _symbol_from_context(self, context):
726 728 if not context:
727 729 return None, context
728 730
729 731 base_symbol_string = context[0]
730 732 symbol = self.shell.user_ns.get(base_symbol_string, None)
731 733 if symbol is None:
732 734 symbol = __builtin__.__dict__.get(base_symbol_string, None)
733 735 if symbol is None:
734 736 return None, context
735 737
736 738 context = context[1:]
737 739 for i, name in enumerate(context):
738 740 new_symbol = getattr(symbol, name, None)
739 741 if new_symbol is None:
740 742 return symbol, context[i:]
741 743 else:
742 744 symbol = new_symbol
743 745
744 746 return symbol, []
745 747
746 748 def _at_shutdown(self):
747 749 """Actions taken at shutdown by the kernel, called by python's atexit.
748 750 """
749 751 # io.rprint("Kernel at_shutdown") # dbg
750 752 if self._shutdown_message is not None:
751 753 self.session.send(self.iopub_socket, self._shutdown_message, ident=self._topic('shutdown'))
752 754 self.log.debug("%s", self._shutdown_message)
753 755 [ s.flush(zmq.POLLOUT) for s in self.shell_streams ]
754 756
755 757 #-----------------------------------------------------------------------------
756 758 # Aliases and Flags for the IPKernelApp
757 759 #-----------------------------------------------------------------------------
758 760
759 761 flags = dict(kernel_flags)
760 762 flags.update(shell_flags)
761 763
762 764 addflag = lambda *args: flags.update(boolean_flag(*args))
763 765
764 766 flags['pylab'] = (
765 767 {'IPKernelApp' : {'pylab' : 'auto'}},
766 768 """Pre-load matplotlib and numpy for interactive use with
767 769 the default matplotlib backend."""
768 770 )
769 771
770 772 aliases = dict(kernel_aliases)
771 773 aliases.update(shell_aliases)
772 774
773 775 # it's possible we don't want short aliases for *all* of these:
774 776 aliases.update(dict(
775 777 pylab='IPKernelApp.pylab',
776 778 ))
777 779
778 780 #-----------------------------------------------------------------------------
779 781 # The IPKernelApp class
780 782 #-----------------------------------------------------------------------------
781 783
782 784 class IPKernelApp(KernelApp, InteractiveShellApp):
783 785 name = 'ipkernel'
784 786
785 787 aliases = Dict(aliases)
786 788 flags = Dict(flags)
787 789 classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session]
788 790
789 791 # configurables
790 792 pylab = CaselessStrEnum(['tk', 'qt', 'wx', 'gtk', 'osx', 'inline', 'auto'],
791 793 config=True,
792 794 help="""Pre-load matplotlib and numpy for interactive use,
793 795 selecting a particular matplotlib backend and loop integration.
794 796 """
795 797 )
796 798
797 799 @catch_config_error
798 800 def initialize(self, argv=None):
799 801 super(IPKernelApp, self).initialize(argv)
800 802 self.init_path()
801 803 self.init_shell()
802 804 self.init_extensions()
803 805 self.init_code()
804 806
805 807 def init_kernel(self):
806 808
807 809 shell_stream = ZMQStream(self.shell_socket)
808 810
809 811 kernel = Kernel(config=self.config, session=self.session,
810 812 shell_streams=[shell_stream],
811 813 iopub_socket=self.iopub_socket,
812 814 stdin_socket=self.stdin_socket,
813 815 log=self.log,
814 816 profile_dir=self.profile_dir,
815 817 )
816 818 self.kernel = kernel
817 819 kernel.record_ports(self.ports)
818 820 shell = kernel.shell
819 821 if self.pylab:
820 822 try:
821 823 gui, backend = pylabtools.find_gui_and_backend(self.pylab)
822 824 shell.enable_pylab(gui, import_all=self.pylab_import_all)
823 825 except Exception:
824 826 self.log.error("Pylab initialization failed", exc_info=True)
825 827 # print exception straight to stdout, because normally
826 828 # _showtraceback associates the reply with an execution,
827 829 # which means frontends will never draw it, as this exception
828 830 # is not associated with any execute request.
829 831
830 832 # replace pyerr-sending traceback with stdout
831 833 _showtraceback = shell._showtraceback
832 834 def print_tb(etype, evalue, stb):
833 835 print ("Error initializing pylab, pylab mode will not "
834 836 "be active", file=io.stderr)
835 837 print (shell.InteractiveTB.stb2text(stb), file=io.stdout)
836 838 shell._showtraceback = print_tb
837 839
838 840 # send the traceback over stdout
839 841 shell.showtraceback(tb_offset=0)
840 842
841 843 # restore proper _showtraceback method
842 844 shell._showtraceback = _showtraceback
843 845
844 846
845 847 def init_shell(self):
846 848 self.shell = self.kernel.shell
847 849 self.shell.configurables.append(self)
848 850
849 851
850 852 #-----------------------------------------------------------------------------
851 853 # Kernel main and launch functions
852 854 #-----------------------------------------------------------------------------
853 855
854 856 def launch_kernel(*args, **kwargs):
855 857 """Launches a localhost IPython kernel, binding to the specified ports.
856 858
857 859 This function simply calls entry_point.base_launch_kernel with the right
858 860 first command to start an ipkernel. See base_launch_kernel for arguments.
859 861
860 862 Returns
861 863 -------
862 864 A tuple of form:
863 865 (kernel_process, shell_port, iopub_port, stdin_port, hb_port)
864 866 where kernel_process is a Popen object and the ports are integers.
865 867 """
866 868 return base_launch_kernel('from IPython.zmq.ipkernel import main; main()',
867 869 *args, **kwargs)
868 870
869 871
870 872 def embed_kernel(module=None, local_ns=None, **kwargs):
871 873 """Embed and start an IPython kernel in a given scope.
872 874
873 875 Parameters
874 876 ----------
875 877 module : ModuleType, optional
876 878 The module to load into IPython globals (default: caller)
877 879 local_ns : dict, optional
878 880 The namespace to load into IPython user namespace (default: caller)
879 881
880 882 kwargs : various, optional
881 883 Further keyword args are relayed to the KernelApp constructor,
882 884 allowing configuration of the Kernel. Will only have an effect
883 885 on the first embed_kernel call for a given process.
884 886
885 887 """
886 888 # get the app if it exists, or set it up if it doesn't
887 889 if IPKernelApp.initialized():
888 890 app = IPKernelApp.instance()
889 891 else:
890 892 app = IPKernelApp.instance(**kwargs)
891 893 app.initialize([])
892 894 # Undo unnecessary sys module mangling from init_sys_modules.
893 895 # This would not be necessary if we could prevent it
894 896 # in the first place by using a different InteractiveShell
895 897 # subclass, as in the regular embed case.
896 898 main = app.kernel.shell._orig_sys_modules_main_mod
897 899 if main is not None:
898 900 sys.modules[app.kernel.shell._orig_sys_modules_main_name] = main
899 901
900 902 # load the calling scope if not given
901 903 (caller_module, caller_locals) = extract_module_locals(1)
902 904 if module is None:
903 905 module = caller_module
904 906 if local_ns is None:
905 907 local_ns = caller_locals
906 908
907 909 app.kernel.user_module = module
908 910 app.kernel.user_ns = local_ns
909 911 app.start()
910 912
911 913 def main():
912 914 """Run an IPKernel as an application"""
913 915 app = IPKernelApp.instance()
914 916 app.initialize()
915 917 app.start()
916 918
917 919
918 920 if __name__ == '__main__':
919 921 main()
@@ -1,540 +1,539 b''
1 1 """A ZMQ-based subclass of InteractiveShell.
2 2
3 3 This code is meant to ease the refactoring of the base InteractiveShell into
4 4 something with a cleaner architecture for 2-process use, without actually
5 5 breaking InteractiveShell itself. So we're doing something a bit ugly, where
6 6 we subclass and override what we want to fix. Once this is working well, we
7 7 can go back to the base class and refactor the code for a cleaner inheritance
8 8 implementation that doesn't rely on so much monkeypatching.
9 9
10 10 But this lets us maintain a fully working IPython as we develop the new
11 11 machinery. This should thus be thought of as scaffolding.
12 12 """
13 13 #-----------------------------------------------------------------------------
14 14 # Imports
15 15 #-----------------------------------------------------------------------------
16 16 from __future__ import print_function
17 17
18 18 # Stdlib
19 19 import inspect
20 20 import os
21 21 import sys
22 22 import time
23 23 from subprocess import Popen, PIPE
24 24
25 25 # System library imports
26 26 from zmq.eventloop import ioloop
27 27
28 28 # Our own
29 29 from IPython.core.interactiveshell import (
30 30 InteractiveShell, InteractiveShellABC
31 31 )
32 32 from IPython.core import page, pylabtools
33 33 from IPython.core.autocall import ZMQExitAutocall
34 34 from IPython.core.displaypub import DisplayPublisher
35 35 from IPython.core.macro import Macro
36 36 from IPython.core.magics import MacroToEdit
37 37 from IPython.core.payloadpage import install_payload_page
38 38 from IPython.lib.kernel import (
39 39 get_connection_file, get_connection_info, connect_qtconsole
40 40 )
41 41 from IPython.testing.skipdoctest import skip_doctest
42 42 from IPython.utils import io
43 43 from IPython.utils.jsonutil import json_clean
44 44 from IPython.utils.path import get_py_filename
45 45 from IPython.utils.process import arg_split
46 46 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes
47 47 from IPython.utils.warn import warn, error
48 48 from IPython.zmq.displayhook import ZMQShellDisplayHook, _encode_binary
49 49 from IPython.zmq.session import extract_header
50 50 from session import Session
51 51
52 52 #-----------------------------------------------------------------------------
53 53 # Functions and classes
54 54 #-----------------------------------------------------------------------------
55 55
56 56 class ZMQDisplayPublisher(DisplayPublisher):
57 57 """A display publisher that publishes data using a ZeroMQ PUB socket."""
58 58
59 59 session = Instance(Session)
60 60 pub_socket = Instance('zmq.Socket')
61 61 parent_header = Dict({})
62 62 topic = CBytes(b'displaypub')
63 63
64 64 def set_parent(self, parent):
65 65 """Set the parent for outbound messages."""
66 66 self.parent_header = extract_header(parent)
67 67
68 68 def _flush_streams(self):
69 69 """flush IO Streams prior to display"""
70 70 sys.stdout.flush()
71 71 sys.stderr.flush()
72 72
73 73 def publish(self, source, data, metadata=None):
74 74 self._flush_streams()
75 75 if metadata is None:
76 76 metadata = {}
77 77 self._validate_data(source, data, metadata)
78 78 content = {}
79 79 content['source'] = source
80 _encode_binary(data)
81 content['data'] = data
80 content['data'] = _encode_binary(data)
82 81 content['metadata'] = metadata
83 82 self.session.send(
84 83 self.pub_socket, u'display_data', json_clean(content),
85 84 parent=self.parent_header, ident=self.topic,
86 85 )
87 86
88 87 def clear_output(self, stdout=True, stderr=True, other=True):
89 88 content = dict(stdout=stdout, stderr=stderr, other=other)
90 89
91 90 if stdout:
92 91 print('\r', file=sys.stdout, end='')
93 92 if stderr:
94 93 print('\r', file=sys.stderr, end='')
95 94
96 95 self._flush_streams()
97 96
98 97 self.session.send(
99 98 self.pub_socket, u'clear_output', content,
100 99 parent=self.parent_header, ident=self.topic,
101 100 )
102 101
103 102 class ZMQInteractiveShell(InteractiveShell):
104 103 """A subclass of InteractiveShell for ZMQ."""
105 104
106 105 displayhook_class = Type(ZMQShellDisplayHook)
107 106 display_pub_class = Type(ZMQDisplayPublisher)
108 107
109 108 # Override the traitlet in the parent class, because there's no point using
110 109 # readline for the kernel. Can be removed when the readline code is moved
111 110 # to the terminal frontend.
112 111 colors_force = CBool(True)
113 112 readline_use = CBool(False)
114 113 # autoindent has no meaning in a zmqshell, and attempting to enable it
115 114 # will print a warning in the absence of readline.
116 115 autoindent = CBool(False)
117 116
118 117 exiter = Instance(ZMQExitAutocall)
119 118 def _exiter_default(self):
120 119 return ZMQExitAutocall(self)
121 120
122 121 def _exit_now_changed(self, name, old, new):
123 122 """stop eventloop when exit_now fires"""
124 123 if new:
125 124 loop = ioloop.IOLoop.instance()
126 125 loop.add_timeout(time.time()+0.1, loop.stop)
127 126
128 127 keepkernel_on_exit = None
129 128
130 129 # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no
131 130 # interactive input being read; we provide event loop support in ipkernel
132 131 from .eventloops import enable_gui
133 132 enable_gui = staticmethod(enable_gui)
134 133
135 134 def init_environment(self):
136 135 """Configure the user's environment.
137 136
138 137 """
139 138 env = os.environ
140 139 # These two ensure 'ls' produces nice coloring on BSD-derived systems
141 140 env['TERM'] = 'xterm-color'
142 141 env['CLICOLOR'] = '1'
143 142 # Since normal pagers don't work at all (over pexpect we don't have
144 143 # single-key control of the subprocess), try to disable paging in
145 144 # subprocesses as much as possible.
146 145 env['PAGER'] = 'cat'
147 146 env['GIT_PAGER'] = 'cat'
148 147
149 148 # And install the payload version of page.
150 149 install_payload_page()
151 150
152 151 def auto_rewrite_input(self, cmd):
153 152 """Called to show the auto-rewritten input for autocall and friends.
154 153
155 154 FIXME: this payload is currently not correctly processed by the
156 155 frontend.
157 156 """
158 157 new = self.prompt_manager.render('rewrite') + cmd
159 158 payload = dict(
160 159 source='IPython.zmq.zmqshell.ZMQInteractiveShell.auto_rewrite_input',
161 160 transformed_input=new,
162 161 )
163 162 self.payload_manager.write_payload(payload)
164 163
165 164 def ask_exit(self):
166 165 """Engage the exit actions."""
167 166 self.exit_now = True
168 167 payload = dict(
169 168 source='IPython.zmq.zmqshell.ZMQInteractiveShell.ask_exit',
170 169 exit=True,
171 170 keepkernel=self.keepkernel_on_exit,
172 171 )
173 172 self.payload_manager.write_payload(payload)
174 173
175 174 def _showtraceback(self, etype, evalue, stb):
176 175
177 176 exc_content = {
178 177 u'traceback' : stb,
179 178 u'ename' : unicode(etype.__name__),
180 179 u'evalue' : unicode(evalue)
181 180 }
182 181
183 182 dh = self.displayhook
184 183 # Send exception info over pub socket for other clients than the caller
185 184 # to pick up
186 185 topic = None
187 186 if dh.topic:
188 187 topic = dh.topic.replace(b'pyout', b'pyerr')
189 188
190 189 exc_msg = dh.session.send(dh.pub_socket, u'pyerr', json_clean(exc_content), dh.parent_header, ident=topic)
191 190
192 191 # FIXME - Hack: store exception info in shell object. Right now, the
193 192 # caller is reading this info after the fact, we need to fix this logic
194 193 # to remove this hack. Even uglier, we need to store the error status
195 194 # here, because in the main loop, the logic that sets it is being
196 195 # skipped because runlines swallows the exceptions.
197 196 exc_content[u'status'] = u'error'
198 197 self._reply_content = exc_content
199 198 # /FIXME
200 199
201 200 return exc_content
202 201
203 202 #------------------------------------------------------------------------
204 203 # Magic overrides
205 204 #------------------------------------------------------------------------
206 205 # Once the base class stops inheriting from magic, this code needs to be
207 206 # moved into a separate machinery as well. For now, at least isolate here
208 207 # the magics which this class needs to implement differently from the base
209 208 # class, or that are unique to it.
210 209
211 210 def magic_doctest_mode(self,parameter_s=''):
212 211 """Toggle doctest mode on and off.
213 212
214 213 This mode is intended to make IPython behave as much as possible like a
215 214 plain Python shell, from the perspective of how its prompts, exceptions
216 215 and output look. This makes it easy to copy and paste parts of a
217 216 session into doctests. It does so by:
218 217
219 218 - Changing the prompts to the classic ``>>>`` ones.
220 219 - Changing the exception reporting mode to 'Plain'.
221 220 - Disabling pretty-printing of output.
222 221
223 222 Note that IPython also supports the pasting of code snippets that have
224 223 leading '>>>' and '...' prompts in them. This means that you can paste
225 224 doctests from files or docstrings (even if they have leading
226 225 whitespace), and the code will execute correctly. You can then use
227 226 '%history -t' to see the translated history; this will give you the
228 227 input after removal of all the leading prompts and whitespace, which
229 228 can be pasted back into an editor.
230 229
231 230 With these features, you can switch into this mode easily whenever you
232 231 need to do testing and changes to doctests, without having to leave
233 232 your existing IPython session.
234 233 """
235 234
236 235 from IPython.utils.ipstruct import Struct
237 236
238 237 # Shorthands
239 238 shell = self.shell
240 239 disp_formatter = self.shell.display_formatter
241 240 ptformatter = disp_formatter.formatters['text/plain']
242 241 # dstore is a data store kept in the instance metadata bag to track any
243 242 # changes we make, so we can undo them later.
244 243 dstore = shell.meta.setdefault('doctest_mode', Struct())
245 244 save_dstore = dstore.setdefault
246 245
247 246 # save a few values we'll need to recover later
248 247 mode = save_dstore('mode', False)
249 248 save_dstore('rc_pprint', ptformatter.pprint)
250 249 save_dstore('rc_plain_text_only',disp_formatter.plain_text_only)
251 250 save_dstore('xmode', shell.InteractiveTB.mode)
252 251
253 252 if mode == False:
254 253 # turn on
255 254 ptformatter.pprint = False
256 255 disp_formatter.plain_text_only = True
257 256 shell.magic_xmode('Plain')
258 257 else:
259 258 # turn off
260 259 ptformatter.pprint = dstore.rc_pprint
261 260 disp_formatter.plain_text_only = dstore.rc_plain_text_only
262 261 shell.magic_xmode(dstore.xmode)
263 262
264 263 # Store new mode and inform on console
265 264 dstore.mode = bool(1-int(mode))
266 265 mode_label = ['OFF','ON'][dstore.mode]
267 266 print('Doctest mode is:', mode_label)
268 267
269 268 # Send the payload back so that clients can modify their prompt display
270 269 payload = dict(
271 270 source='IPython.zmq.zmqshell.ZMQInteractiveShell.magic_doctest_mode',
272 271 mode=dstore.mode)
273 272 self.payload_manager.write_payload(payload)
274 273
275 274 @skip_doctest
276 275 def magic_edit(self,parameter_s='',last_call=['','']):
277 276 """Bring up an editor and execute the resulting code.
278 277
279 278 Usage:
280 279 %edit [options] [args]
281 280
282 281 %edit runs an external text editor. You will need to set the command for
283 282 this editor via the ``TerminalInteractiveShell.editor`` option in your
284 283 configuration file before it will work.
285 284
286 285 This command allows you to conveniently edit multi-line code right in
287 286 your IPython session.
288 287
289 288 If called without arguments, %edit opens up an empty editor with a
290 289 temporary file and will execute the contents of this file when you
291 290 close it (don't forget to save it!).
292 291
293 292
294 293 Options:
295 294
296 295 -n <number>: open the editor at a specified line number. By default,
297 296 the IPython editor hook uses the unix syntax 'editor +N filename', but
298 297 you can configure this by providing your own modified hook if your
299 298 favorite editor supports line-number specifications with a different
300 299 syntax.
301 300
302 301 -p: this will call the editor with the same data as the previous time
303 302 it was used, regardless of how long ago (in your current session) it
304 303 was.
305 304
306 305 -r: use 'raw' input. This option only applies to input taken from the
307 306 user's history. By default, the 'processed' history is used, so that
308 307 magics are loaded in their transformed version to valid Python. If
309 308 this option is given, the raw input as typed as the command line is
310 309 used instead. When you exit the editor, it will be executed by
311 310 IPython's own processor.
312 311
313 312 -x: do not execute the edited code immediately upon exit. This is
314 313 mainly useful if you are editing programs which need to be called with
315 314 command line arguments, which you can then do using %run.
316 315
317 316
318 317 Arguments:
319 318
320 319 If arguments are given, the following possibilites exist:
321 320
322 321 - The arguments are numbers or pairs of colon-separated numbers (like
323 322 1 4:8 9). These are interpreted as lines of previous input to be
324 323 loaded into the editor. The syntax is the same of the %macro command.
325 324
326 325 - If the argument doesn't start with a number, it is evaluated as a
327 326 variable and its contents loaded into the editor. You can thus edit
328 327 any string which contains python code (including the result of
329 328 previous edits).
330 329
331 330 - If the argument is the name of an object (other than a string),
332 331 IPython will try to locate the file where it was defined and open the
333 332 editor at the point where it is defined. You can use `%edit function`
334 333 to load an editor exactly at the point where 'function' is defined,
335 334 edit it and have the file be executed automatically.
336 335
337 336 If the object is a macro (see %macro for details), this opens up your
338 337 specified editor with a temporary file containing the macro's data.
339 338 Upon exit, the macro is reloaded with the contents of the file.
340 339
341 340 Note: opening at an exact line is only supported under Unix, and some
342 341 editors (like kedit and gedit up to Gnome 2.8) do not understand the
343 342 '+NUMBER' parameter necessary for this feature. Good editors like
344 343 (X)Emacs, vi, jed, pico and joe all do.
345 344
346 345 - If the argument is not found as a variable, IPython will look for a
347 346 file with that name (adding .py if necessary) and load it into the
348 347 editor. It will execute its contents with execfile() when you exit,
349 348 loading any code in the file into your interactive namespace.
350 349
351 350 After executing your code, %edit will return as output the code you
352 351 typed in the editor (except when it was an existing file). This way
353 352 you can reload the code in further invocations of %edit as a variable,
354 353 via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
355 354 the output.
356 355
357 356 Note that %edit is also available through the alias %ed.
358 357
359 358 This is an example of creating a simple function inside the editor and
360 359 then modifying it. First, start up the editor:
361 360
362 361 In [1]: ed
363 362 Editing... done. Executing edited code...
364 363 Out[1]: 'def foo():n print "foo() was defined in an editing session"n'
365 364
366 365 We can then call the function foo():
367 366
368 367 In [2]: foo()
369 368 foo() was defined in an editing session
370 369
371 370 Now we edit foo. IPython automatically loads the editor with the
372 371 (temporary) file where foo() was previously defined:
373 372
374 373 In [3]: ed foo
375 374 Editing... done. Executing edited code...
376 375
377 376 And if we call foo() again we get the modified version:
378 377
379 378 In [4]: foo()
380 379 foo() has now been changed!
381 380
382 381 Here is an example of how to edit a code snippet successive
383 382 times. First we call the editor:
384 383
385 384 In [5]: ed
386 385 Editing... done. Executing edited code...
387 386 hello
388 387 Out[5]: "print 'hello'n"
389 388
390 389 Now we call it again with the previous output (stored in _):
391 390
392 391 In [6]: ed _
393 392 Editing... done. Executing edited code...
394 393 hello world
395 394 Out[6]: "print 'hello world'n"
396 395
397 396 Now we call it with the output #8 (stored in _8, also as Out[8]):
398 397
399 398 In [7]: ed _8
400 399 Editing... done. Executing edited code...
401 400 hello again
402 401 Out[7]: "print 'hello again'n"
403 402 """
404 403
405 404 opts,args = self.parse_options(parameter_s,'prn:')
406 405
407 406 try:
408 407 filename, lineno, _ = self._find_edit_target(args, opts, last_call)
409 408 except MacroToEdit as e:
410 409 # TODO: Implement macro editing over 2 processes.
411 410 print("Macro editing not yet implemented in 2-process model.")
412 411 return
413 412
414 413 # Make sure we send to the client an absolute path, in case the working
415 414 # directory of client and kernel don't match
416 415 filename = os.path.abspath(filename)
417 416
418 417 payload = {
419 418 'source' : 'IPython.zmq.zmqshell.ZMQInteractiveShell.edit_magic',
420 419 'filename' : filename,
421 420 'line_number' : lineno
422 421 }
423 422 self.payload_manager.write_payload(payload)
424 423
425 424 # A few magics that are adapted to the specifics of using pexpect and a
426 425 # remote terminal
427 426
428 427 def magic_clear(self, arg_s):
429 428 """Clear the terminal."""
430 429 if os.name == 'posix':
431 430 self.shell.system("clear")
432 431 else:
433 432 self.shell.system("cls")
434 433
435 434 if os.name == 'nt':
436 435 # This is the usual name in windows
437 436 magic_cls = magic_clear
438 437
439 438 # Terminal pagers won't work over pexpect, but we do have our own pager
440 439
441 440 def magic_less(self, arg_s):
442 441 """Show a file through the pager.
443 442
444 443 Files ending in .py are syntax-highlighted."""
445 444 cont = open(arg_s).read()
446 445 if arg_s.endswith('.py'):
447 446 cont = self.shell.pycolorize(cont)
448 447 page.page(cont)
449 448
450 449 magic_more = magic_less
451 450
452 451 # Man calls a pager, so we also need to redefine it
453 452 if os.name == 'posix':
454 453 def magic_man(self, arg_s):
455 454 """Find the man page for the given command and display in pager."""
456 455 page.page(self.shell.getoutput('man %s | col -b' % arg_s,
457 456 split=False))
458 457
459 458 # FIXME: this is specific to the GUI, so we should let the gui app load
460 459 # magics at startup that are only for the gui. Once the gui app has proper
461 460 # profile and configuration management, we can have it initialize a kernel
462 461 # with a special config file that provides these.
463 462 def magic_guiref(self, arg_s):
464 463 """Show a basic reference about the GUI console."""
465 464 from IPython.core.usage import gui_reference
466 465 page.page(gui_reference, auto_html=True)
467 466
468 467 def magic_connect_info(self, arg_s):
469 468 """Print information for connecting other clients to this kernel
470 469
471 470 It will print the contents of this session's connection file, as well as
472 471 shortcuts for local clients.
473 472
474 473 In the simplest case, when called from the most recently launched kernel,
475 474 secondary clients can be connected, simply with:
476 475
477 476 $> ipython <app> --existing
478 477
479 478 """
480 479
481 480 from IPython.core.application import BaseIPythonApplication as BaseIPApp
482 481
483 482 if BaseIPApp.initialized():
484 483 app = BaseIPApp.instance()
485 484 security_dir = app.profile_dir.security_dir
486 485 profile = app.profile
487 486 else:
488 487 profile = 'default'
489 488 security_dir = ''
490 489
491 490 try:
492 491 connection_file = get_connection_file()
493 492 info = get_connection_info(unpack=False)
494 493 except Exception as e:
495 494 error("Could not get connection info: %r" % e)
496 495 return
497 496
498 497 # add profile flag for non-default profile
499 498 profile_flag = "--profile %s" % profile if profile != 'default' else ""
500 499
501 500 # if it's in the security dir, truncate to basename
502 501 if security_dir == os.path.dirname(connection_file):
503 502 connection_file = os.path.basename(connection_file)
504 503
505 504
506 505 print (info + '\n')
507 506 print ("Paste the above JSON into a file, and connect with:\n"
508 507 " $> ipython <app> --existing <file>\n"
509 508 "or, if you are local, you can connect with just:\n"
510 509 " $> ipython <app> --existing {0} {1}\n"
511 510 "or even just:\n"
512 511 " $> ipython <app> --existing {1}\n"
513 512 "if this is the most recent IPython session you have started.".format(
514 513 connection_file, profile_flag
515 514 )
516 515 )
517 516
518 517 def magic_qtconsole(self, arg_s):
519 518 """Open a qtconsole connected to this kernel.
520 519
521 520 Useful for connecting a qtconsole to running notebooks, for better
522 521 debugging.
523 522 """
524 523 try:
525 524 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
526 525 except Exception as e:
527 526 error("Could not start qtconsole: %r" % e)
528 527 return
529 528
530 529 def set_next_input(self, text):
531 530 """Send the specified text to the frontend to be presented at the next
532 531 input cell."""
533 532 payload = dict(
534 533 source='IPython.zmq.zmqshell.ZMQInteractiveShell.set_next_input',
535 534 text=text
536 535 )
537 536 self.payload_manager.write_payload(payload)
538 537
539 538
540 539 InteractiveShellABC.register(ZMQInteractiveShell)
@@ -1,151 +1,149 b''
1 1 .. _parallelmpi:
2 2
3 3 =======================
4 4 Using MPI with IPython
5 5 =======================
6 6
7 7 Often, a parallel algorithm will require moving data between the engines. One
8 8 way of accomplishing this is by doing a pull and then a push using the
9 9 multiengine client. However, this will be slow as all the data has to go
10 10 through the controller to the client and then back through the controller, to
11 11 its final destination.
12 12
13 13 A much better way of moving data between engines is to use a message passing
14 14 library, such as the Message Passing Interface (MPI) [MPI]_. IPython's
15 15 parallel computing architecture has been designed from the ground up to
16 16 integrate with MPI. This document describes how to use MPI with IPython.
17 17
18 18 Additional installation requirements
19 19 ====================================
20 20
21 21 If you want to use MPI with IPython, you will need to install:
22 22
23 23 * A standard MPI implementation such as OpenMPI [OpenMPI]_ or MPICH.
24 24 * The mpi4py [mpi4py]_ package.
25 25
26 26 .. note::
27 27
28 28 The mpi4py package is not a strict requirement. However, you need to
29 29 have *some* way of calling MPI from Python. You also need some way of
30 30 making sure that :func:`MPI_Init` is called when the IPython engines start
31 31 up. There are a number of ways of doing this and a good number of
32 32 associated subtleties. We highly recommend just using mpi4py as it
33 33 takes care of most of these problems. If you want to do something
34 34 different, let us know and we can help you get started.
35 35
36 36 Starting the engines with MPI enabled
37 37 =====================================
38 38
39 39 To use code that calls MPI, there are typically two things that MPI requires.
40 40
41 41 1. The process that wants to call MPI must be started using
42 42 :command:`mpiexec` or a batch system (like PBS) that has MPI support.
43 43 2. Once the process starts, it must call :func:`MPI_Init`.
44 44
45 45 There are a couple of ways that you can start the IPython engines and get
46 46 these things to happen.
47 47
48 48 Automatic starting using :command:`mpiexec` and :command:`ipcluster`
49 49 --------------------------------------------------------------------
50 50
51 51 The easiest approach is to use the `MPI` Launchers in :command:`ipcluster`,
52 52 which will first start a controller and then a set of engines using
53 53 :command:`mpiexec`::
54 54
55 55 $ ipcluster start -n 4 --engines=MPIEngineSetLauncher
56 56
57 57 This approach is best as interrupting :command:`ipcluster` will automatically
58 58 stop and clean up the controller and engines.
59 59
60 60 Manual starting using :command:`mpiexec`
61 61 ----------------------------------------
62 62
63 63 If you want to start the IPython engines using the :command:`mpiexec`, just
64 64 do::
65 65
66 66 $ mpiexec -n 4 ipengine --mpi=mpi4py
67 67
68 68 This requires that you already have a controller running and that the FURL
69 69 files for the engines are in place. We also have built in support for
70 70 PyTrilinos [PyTrilinos]_, which can be used (assuming is installed) by
71 71 starting the engines with::
72 72
73 73 $ mpiexec -n 4 ipengine --mpi=pytrilinos
74 74
75 75 Automatic starting using PBS and :command:`ipcluster`
76 76 ------------------------------------------------------
77 77
78 78 The :command:`ipcluster` command also has built-in integration with PBS. For
79 79 more information on this approach, see our documentation on :ref:`ipcluster
80 80 <parallel_process>`.
81 81
82 82 Actually using MPI
83 83 ==================
84 84
85 85 Once the engines are running with MPI enabled, you are ready to go. You can
86 86 now call any code that uses MPI in the IPython engines. And, all of this can
87 87 be done interactively. Here we show a simple example that uses mpi4py
88 88 [mpi4py]_ version 1.1.0 or later.
89 89
90 90 First, lets define a simply function that uses MPI to calculate the sum of a
91 91 distributed array. Save the following text in a file called :file:`psum.py`:
92 92
93 93 .. sourcecode:: python
94 94
95 95 from mpi4py import MPI
96 96 import numpy as np
97 97
98 98 def psum(a):
99 99 s = np.sum(a)
100 100 rcvBuf = np.array(0.0,'d')
101 101 MPI.COMM_WORLD.Allreduce([s, MPI.DOUBLE],
102 102 [rcvBuf, MPI.DOUBLE],
103 103 op=MPI.SUM)
104 104 return rcvBuf
105 105
106 106 Now, start an IPython cluster::
107 107
108 108 $ ipcluster start --profile=mpi -n 4
109 109
110 110 .. note::
111 111
112 112 It is assumed here that the mpi profile has been set up, as described :ref:`here
113 113 <parallel_process>`.
114 114
115 115 Finally, connect to the cluster and use this function interactively. In this
116 116 case, we create a random array on each engine and sum up all the random arrays
117 117 using our :func:`psum` function:
118 118
119 119 .. sourcecode:: ipython
120 120
121 121 In [1]: from IPython.parallel import Client
122 122
123 In [2]: %load_ext parallel_magic
124
125 123 In [3]: c = Client(profile='mpi')
126 124
127 125 In [4]: view = c[:]
128 126
129 In [5]: view.activate()
127 In [5]: view.activate() # enabe magics
130 128
131 129 # run the contents of the file on each engine:
132 130 In [6]: view.run('psum.py')
133 131
134 In [6]: px a = np.random.rand(100)
132 In [6]: %px a = np.random.rand(100)
135 133 Parallel execution on engines: [0,1,2,3]
136 134
137 In [8]: px s = psum(a)
135 In [8]: %px s = psum(a)
138 136 Parallel execution on engines: [0,1,2,3]
139 137
140 138 In [9]: view['s']
141 139 Out[9]: [187.451545803,187.451545803,187.451545803,187.451545803]
142 140
143 141 Any Python code that makes calls to MPI can be used in this manner, including
144 142 compiled C, C++ and Fortran libraries that have been exposed to Python.
145 143
146 144 .. [MPI] Message Passing Interface. http://www-unix.mcs.anl.gov/mpi/
147 145 .. [mpi4py] MPI for Python. mpi4py: http://mpi4py.scipy.org/
148 146 .. [OpenMPI] Open MPI. http://www.open-mpi.org/
149 147 .. [PyTrilinos] PyTrilinos. http://trilinos.sandia.gov/packages/pytrilinos/
150 148
151 149
@@ -1,865 +1,942 b''
1 1 .. _parallel_multiengine:
2 2
3 3 ==========================
4 4 IPython's Direct interface
5 5 ==========================
6 6
7 7 The direct, or multiengine, interface represents one possible way of working with a set of
8 8 IPython engines. The basic idea behind the multiengine interface is that the
9 9 capabilities of each engine are directly and explicitly exposed to the user.
10 10 Thus, in the multiengine interface, each engine is given an id that is used to
11 11 identify the engine and give it work to do. This interface is very intuitive
12 12 and is designed with interactive usage in mind, and is the best place for
13 13 new users of IPython to begin.
14 14
15 15 Starting the IPython controller and engines
16 16 ===========================================
17 17
18 18 To follow along with this tutorial, you will need to start the IPython
19 19 controller and four IPython engines. The simplest way of doing this is to use
20 20 the :command:`ipcluster` command::
21 21
22 22 $ ipcluster start -n 4
23 23
24 24 For more detailed information about starting the controller and engines, see
25 25 our :ref:`introduction <parallel_overview>` to using IPython for parallel computing.
26 26
27 27 Creating a ``DirectView`` instance
28 28 ==================================
29 29
30 30 The first step is to import the IPython :mod:`IPython.parallel`
31 31 module and then create a :class:`.Client` instance:
32 32
33 33 .. sourcecode:: ipython
34 34
35 35 In [1]: from IPython.parallel import Client
36 36
37 37 In [2]: rc = Client()
38 38
39 39 This form assumes that the default connection information (stored in
40 40 :file:`ipcontroller-client.json` found in :file:`IPYTHONDIR/profile_default/security`) is
41 41 accurate. If the controller was started on a remote machine, you must copy that connection
42 42 file to the client machine, or enter its contents as arguments to the Client constructor:
43 43
44 44 .. sourcecode:: ipython
45 45
46 46 # If you have copied the json connector file from the controller:
47 47 In [2]: rc = Client('/path/to/ipcontroller-client.json')
48 48 # or to connect with a specific profile you have set up:
49 49 In [3]: rc = Client(profile='mpi')
50 50
51 51
52 52 To make sure there are engines connected to the controller, users can get a list
53 53 of engine ids:
54 54
55 55 .. sourcecode:: ipython
56 56
57 57 In [3]: rc.ids
58 58 Out[3]: [0, 1, 2, 3]
59 59
60 60 Here we see that there are four engines ready to do work for us.
61 61
62 62 For direct execution, we will make use of a :class:`DirectView` object, which can be
63 63 constructed via list-access to the client:
64 64
65 65 .. sourcecode:: ipython
66 66
67 67 In [4]: dview = rc[:] # use all engines
68 68
69 69 .. seealso::
70 70
71 71 For more information, see the in-depth explanation of :ref:`Views <parallel_details>`.
72 72
73 73
74 74 Quick and easy parallelism
75 75 ==========================
76 76
77 77 In many cases, you simply want to apply a Python function to a sequence of
78 78 objects, but *in parallel*. The client interface provides a simple way
79 79 of accomplishing this: using the DirectView's :meth:`~DirectView.map` method.
80 80
81 81 Parallel map
82 82 ------------
83 83
84 84 Python's builtin :func:`map` functions allows a function to be applied to a
85 85 sequence element-by-element. This type of code is typically trivial to
86 86 parallelize. In fact, since IPython's interface is all about functions anyway,
87 87 you can just use the builtin :func:`map` with a :class:`RemoteFunction`, or a
88 88 DirectView's :meth:`map` method:
89 89
90 90 .. sourcecode:: ipython
91 91
92 92 In [62]: serial_result = map(lambda x:x**10, range(32))
93 93
94 94 In [63]: parallel_result = dview.map_sync(lambda x: x**10, range(32))
95 95
96 96 In [67]: serial_result==parallel_result
97 97 Out[67]: True
98 98
99 99
100 100 .. note::
101 101
102 102 The :class:`DirectView`'s version of :meth:`map` does
103 103 not do dynamic load balancing. For a load balanced version, use a
104 104 :class:`LoadBalancedView`.
105 105
106 106 .. seealso::
107 107
108 108 :meth:`map` is implemented via :class:`ParallelFunction`.
109 109
110 110 Remote function decorators
111 111 --------------------------
112 112
113 113 Remote functions are just like normal functions, but when they are called,
114 114 they execute on one or more engines, rather than locally. IPython provides
115 115 two decorators:
116 116
117 117 .. sourcecode:: ipython
118 118
119 119 In [10]: @dview.remote(block=True)
120 120 ....: def getpid():
121 121 ....: import os
122 122 ....: return os.getpid()
123 123 ....:
124 124
125 125 In [11]: getpid()
126 126 Out[11]: [12345, 12346, 12347, 12348]
127 127
128 128 The ``@parallel`` decorator creates parallel functions, that break up an element-wise
129 129 operations and distribute them, reconstructing the result.
130 130
131 131 .. sourcecode:: ipython
132 132
133 133 In [12]: import numpy as np
134 134
135 135 In [13]: A = np.random.random((64,48))
136 136
137 137 In [14]: @dview.parallel(block=True)
138 138 ....: def pmul(A,B):
139 139 ....: return A*B
140 140
141 141 In [15]: C_local = A*A
142 142
143 143 In [16]: C_remote = pmul(A,A)
144 144
145 145 In [17]: (C_local == C_remote).all()
146 146 Out[17]: True
147 147
148 148 Calling a ``@parallel`` function *does not* correspond to map. It is used for splitting
149 149 element-wise operations that operate on a sequence or array. For ``map`` behavior,
150 150 parallel functions do have a map method.
151 151
152 152 ==================== ============================ =============================
153 153 call pfunc(seq) pfunc.map(seq)
154 154 ==================== ============================ =============================
155 155 # of tasks # of engines (1 per engine) # of engines (1 per engine)
156 156 # of remote calls # of engines (1 per engine) ``len(seq)``
157 157 argument to remote ``seq[i:j]`` (sub-sequence) ``seq[i]`` (single element)
158 158 ==================== ============================ =============================
159 159
160 160 A quick example to illustrate the difference in arguments for the two modes:
161 161
162 162 .. sourcecode:: ipython
163 163
164 164 In [16]: @dview.parallel(block=True)
165 165 ....: def echo(x):
166 166 ....: return str(x)
167 167 ....:
168 168
169 169 In [17]: echo(range(5))
170 170 Out[17]: ['[0, 1]', '[2]', '[3]', '[4]']
171 171
172 172 In [18]: echo.map(range(5))
173 173 Out[18]: ['0', '1', '2', '3', '4']
174 174
175 175
176 176 .. seealso::
177 177
178 178 See the :func:`~.remotefunction.parallel` and :func:`~.remotefunction.remote`
179 179 decorators for options.
180 180
181 181 Calling Python functions
182 182 ========================
183 183
184 184 The most basic type of operation that can be performed on the engines is to
185 185 execute Python code or call Python functions. Executing Python code can be
186 186 done in blocking or non-blocking mode (non-blocking is default) using the
187 187 :meth:`.View.execute` method, and calling functions can be done via the
188 188 :meth:`.View.apply` method.
189 189
190 190 apply
191 191 -----
192 192
193 193 The main method for doing remote execution (in fact, all methods that
194 194 communicate with the engines are built on top of it), is :meth:`View.apply`.
195 195
196 196 We strive to provide the cleanest interface we can, so `apply` has the following
197 197 signature:
198 198
199 199 .. sourcecode:: python
200 200
201 201 view.apply(f, *args, **kwargs)
202 202
203 203 There are various ways to call functions with IPython, and these flags are set as
204 204 attributes of the View. The ``DirectView`` has just two of these flags:
205 205
206 206 dv.block : bool
207 207 whether to wait for the result, or return an :class:`AsyncResult` object
208 208 immediately
209 209 dv.track : bool
210 210 whether to instruct pyzmq to track when zeromq is done sending the message.
211 211 This is primarily useful for non-copying sends of numpy arrays that you plan to
212 212 edit in-place. You need to know when it becomes safe to edit the buffer
213 213 without corrupting the message.
214 214 dv.targets : int, list of ints
215 215 which targets this view is associated with.
216 216
217 217
218 218 Creating a view is simple: index-access on a client creates a :class:`.DirectView`.
219 219
220 220 .. sourcecode:: ipython
221 221
222 222 In [4]: view = rc[1:3]
223 223 Out[4]: <DirectView [1, 2]>
224 224
225 225 In [5]: view.apply<tab>
226 226 view.apply view.apply_async view.apply_sync
227 227
228 228 For convenience, you can set block temporarily for a single call with the extra sync/async methods.
229 229
230 230 Blocking execution
231 231 ------------------
232 232
233 233 In blocking mode, the :class:`.DirectView` object (called ``dview`` in
234 234 these examples) submits the command to the controller, which places the
235 235 command in the engines' queues for execution. The :meth:`apply` call then
236 236 blocks until the engines are done executing the command:
237 237
238 238 .. sourcecode:: ipython
239 239
240 240 In [2]: dview = rc[:] # A DirectView of all engines
241 241 In [3]: dview.block=True
242 242 In [4]: dview['a'] = 5
243 243
244 244 In [5]: dview['b'] = 10
245 245
246 246 In [6]: dview.apply(lambda x: a+b+x, 27)
247 247 Out[6]: [42, 42, 42, 42]
248 248
249 249 You can also select blocking execution on a call-by-call basis with the :meth:`apply_sync`
250 250 method:
251 251
252 252 In [7]: dview.block=False
253 253
254 254 In [8]: dview.apply_sync(lambda x: a+b+x, 27)
255 255 Out[8]: [42, 42, 42, 42]
256 256
257 257 Python commands can be executed as strings on specific engines by using a View's ``execute``
258 258 method:
259 259
260 260 .. sourcecode:: ipython
261 261
262 262 In [6]: rc[::2].execute('c=a+b')
263 263
264 264 In [7]: rc[1::2].execute('c=a-b')
265 265
266 266 In [8]: dview['c'] # shorthand for dview.pull('c', block=True)
267 267 Out[8]: [15, -5, 15, -5]
268 268
269 269
270 270 Non-blocking execution
271 271 ----------------------
272 272
273 273 In non-blocking mode, :meth:`apply` submits the command to be executed and
274 274 then returns a :class:`AsyncResult` object immediately. The
275 275 :class:`AsyncResult` object gives you a way of getting a result at a later
276 276 time through its :meth:`get` method.
277 277
278 278 .. seealso::
279 279
280 280 Docs on the :ref:`AsyncResult <parallel_asyncresult>` object.
281 281
282 282 This allows you to quickly submit long running commands without blocking your
283 283 local Python/IPython session:
284 284
285 285 .. sourcecode:: ipython
286 286
287 287 # define our function
288 288 In [6]: def wait(t):
289 289 ....: import time
290 290 ....: tic = time.time()
291 291 ....: time.sleep(t)
292 292 ....: return time.time()-tic
293 293
294 294 # In non-blocking mode
295 295 In [7]: ar = dview.apply_async(wait, 2)
296 296
297 297 # Now block for the result
298 298 In [8]: ar.get()
299 299 Out[8]: [2.0006198883056641, 1.9997570514678955, 1.9996809959411621, 2.0003249645233154]
300 300
301 301 # Again in non-blocking mode
302 302 In [9]: ar = dview.apply_async(wait, 10)
303 303
304 304 # Poll to see if the result is ready
305 305 In [10]: ar.ready()
306 306 Out[10]: False
307 307
308 308 # ask for the result, but wait a maximum of 1 second:
309 309 In [45]: ar.get(1)
310 310 ---------------------------------------------------------------------------
311 311 TimeoutError Traceback (most recent call last)
312 312 /home/you/<ipython-input-45-7cd858bbb8e0> in <module>()
313 313 ----> 1 ar.get(1)
314 314
315 315 /path/to/site-packages/IPython/parallel/asyncresult.pyc in get(self, timeout)
316 316 62 raise self._exception
317 317 63 else:
318 318 ---> 64 raise error.TimeoutError("Result not ready.")
319 319 65
320 320 66 def ready(self):
321 321
322 322 TimeoutError: Result not ready.
323 323
324 324 .. Note::
325 325
326 326 Note the import inside the function. This is a common model, to ensure
327 327 that the appropriate modules are imported where the task is run. You can
328 328 also manually import modules into the engine(s) namespace(s) via
329 329 :meth:`view.execute('import numpy')`.
330 330
331 331 Often, it is desirable to wait until a set of :class:`AsyncResult` objects
332 332 are done. For this, there is a the method :meth:`wait`. This method takes a
333 333 tuple of :class:`AsyncResult` objects (or `msg_ids` or indices to the client's History),
334 334 and blocks until all of the associated results are ready:
335 335
336 336 .. sourcecode:: ipython
337 337
338 338 In [72]: dview.block=False
339 339
340 340 # A trivial list of AsyncResults objects
341 341 In [73]: pr_list = [dview.apply_async(wait, 3) for i in range(10)]
342 342
343 343 # Wait until all of them are done
344 344 In [74]: dview.wait(pr_list)
345 345
346 346 # Then, their results are ready using get() or the `.r` attribute
347 347 In [75]: pr_list[0].get()
348 348 Out[75]: [2.9982571601867676, 2.9982588291168213, 2.9987530708312988, 2.9990990161895752]
349 349
350 350
351 351
352 352 The ``block`` and ``targets`` keyword arguments and attributes
353 353 --------------------------------------------------------------
354 354
355 355 Most DirectView methods (excluding :meth:`apply`) accept ``block`` and
356 356 ``targets`` as keyword arguments. As we have seen above, these keyword arguments control the
357 357 blocking mode and which engines the command is applied to. The :class:`View` class also has
358 358 :attr:`block` and :attr:`targets` attributes that control the default behavior when the keyword
359 359 arguments are not provided. Thus the following logic is used for :attr:`block` and :attr:`targets`:
360 360
361 361 * If no keyword argument is provided, the instance attributes are used.
362 362 * Keyword argument, if provided override the instance attributes for
363 363 the duration of a single call.
364 364
365 365 The following examples demonstrate how to use the instance attributes:
366 366
367 367 .. sourcecode:: ipython
368 368
369 369 In [16]: dview.targets = [0,2]
370 370
371 371 In [17]: dview.block = False
372 372
373 373 In [18]: ar = dview.apply(lambda : 10)
374 374
375 375 In [19]: ar.get()
376 376 Out[19]: [10, 10]
377 377
378 378 In [16]: dview.targets = v.client.ids # all engines (4)
379 379
380 380 In [21]: dview.block = True
381 381
382 382 In [22]: dview.apply(lambda : 42)
383 383 Out[22]: [42, 42, 42, 42]
384 384
385 385 The :attr:`block` and :attr:`targets` instance attributes of the
386 386 :class:`.DirectView` also determine the behavior of the parallel magic commands.
387 387
388 388 Parallel magic commands
389 389 -----------------------
390 390
391 391 We provide a few IPython magic commands (``%px``, ``%autopx`` and ``%result``)
392 that make it more pleasant to execute Python commands on the engines
393 interactively. These are simply shortcuts to :meth:`execute` and
394 :meth:`get_result` of the :class:`DirectView`. The ``%px`` magic executes a single
395 Python command on the engines specified by the :attr:`targets` attribute of the
396 :class:`DirectView` instance:
392 that make it a bit more pleasant to execute Python commands on the engines interactively.
393 These are simply shortcuts to :meth:`.DirectView.execute`
394 and :meth:`.AsyncResult.display_outputs` methods repsectively.
395 The ``%px`` magic executes a single Python command on the engines
396 specified by the :attr:`targets` attribute of the :class:`DirectView` instance:
397 397
398 398 .. sourcecode:: ipython
399 399
400 400 # Create a DirectView for all targets
401 401 In [22]: dv = rc[:]
402 402
403 403 # Make this DirectView active for parallel magic commands
404 404 In [23]: dv.activate()
405 405
406 406 In [24]: dv.block=True
407 407
408 408 # import numpy here and everywhere
409 409 In [25]: with dv.sync_imports():
410 410 ....: import numpy
411 411 importing numpy on engine(s)
412 412
413 413 In [27]: %px a = numpy.random.rand(2,2)
414 414 Parallel execution on engines: [0, 1, 2, 3]
415 415
416 In [28]: %px ev = numpy.linalg.eigvals(a)
416 In [28]: %px numpy.linalg.eigvals(a)
417 417 Parallel execution on engines: [0, 1, 2, 3]
418 [0] Out[68]: array([ 0.77120707, -0.19448286])
419 [1] Out[68]: array([ 1.10815921, 0.05110369])
420 [2] Out[68]: array([ 0.74625527, -0.37475081])
421 [3] Out[68]: array([ 0.72931905, 0.07159743])
422
423 In [29]: %px print 'hi'
424 Parallel execution on engine(s): [0, 1, 2, 3]
425 [stdout:0] hi
426 [stdout:1] hi
427 [stdout:2] hi
428 [stdout:3] hi
429
430
431 Since engines are IPython as well, you can even run magics remotely:
432
433 .. sourcecode:: ipython
434
435 In [28]: %px %pylab inline
436 Parallel execution on engine(s): [0, 1, 2, 3]
437 [stdout:0]
438 Welcome to pylab, a matplotlib-based Python environment...
439 For more information, type 'help(pylab)'.
440 [stdout:1]
441 Welcome to pylab, a matplotlib-based Python environment...
442 For more information, type 'help(pylab)'.
443 [stdout:2]
444 Welcome to pylab, a matplotlib-based Python environment...
445 For more information, type 'help(pylab)'.
446 [stdout:3]
447 Welcome to pylab, a matplotlib-based Python environment...
448 For more information, type 'help(pylab)'.
449
450 And once in pylab mode with the inline backend,
451 you can make plots and they will be displayed in your frontend
452 if it suports the inline figures (e.g. notebook or qtconsole):
453
454 .. sourcecode:: ipython
455
456 In [40]: %px plot(rand(100))
457 Parallel execution on engine(s): [0, 1, 2, 3]
458 <plot0>
459 <plot1>
460 <plot2>
461 <plot3>
462 [0] Out[79]: [<matplotlib.lines.Line2D at 0x10a6286d0>]
463 [1] Out[79]: [<matplotlib.lines.Line2D at 0x10b9476d0>]
464 [2] Out[79]: [<matplotlib.lines.Line2D at 0x110652750>]
465 [3] Out[79]: [<matplotlib.lines.Line2D at 0x10c6566d0>]
418 466
419 In [28]: dv['ev']
420 Out[28]: [ array([ 1.09522024, -0.09645227]),
421 ....: array([ 1.21435496, -0.35546712]),
422 ....: array([ 0.72180653, 0.07133042]),
423 ....: array([ 1.46384341, 1.04353244e-04])
424 ....: ]
425 467
426 The ``%result`` magic gets the most recent result, or takes an argument
427 specifying the index of the result to be requested. It is simply a shortcut to the
428 :meth:`get_result` method:
468 ``%%px`` Cell Magic
469 *******************
470
471 `%%px` can also be used as a Cell Magic, which accepts ``--[no]block`` flags,
472 and a ``--group-outputs`` argument, which adjust how the outputs of multiple
473 engines are presented.
474
475 .. seealso::
476
477 :meth:`.AsyncResult.display_outputs` for the grouping options.
429 478
430 479 .. sourcecode:: ipython
480
481 In [50]: %%px --block --group-outputs=engine
482 ....: import numpy as np
483 ....: A = np.random.random((2,2))
484 ....: ev = numpy.linalg.eigvals(A)
485 ....: print ev
486 ....: ev.max()
487 ....:
488 Parallel execution on engine(s): [0, 1, 2, 3]
489 [stdout:0] [ 0.60640442 0.95919621]
490 [0] Out[73]: 0.9591962130899806
491 [stdout:1] [ 0.38501813 1.29430871]
492 [1] Out[73]: 1.2943087091452372
493 [stdout:2] [-0.85925141 0.9387692 ]
494 [2] Out[73]: 0.93876920456230284
495 [stdout:3] [ 0.37998269 1.24218246]
496 [3] Out[73]: 1.2421824618493817
497
498 ``%result`` Magic
499 *****************
500
501 If you are using ``%px`` in non-blocking mode, you won't get output.
502 You can use ``%result`` to display the outputs of the latest command,
503 just as is done when ``%px`` is blocking:
504
505 .. sourcecode:: ipython
506
507 In [39]: dv.block = False
431 508
432 In [29]: dv.apply_async(lambda : ev)
509 In [40]: %px print 'hi'
510 Async parallel execution on engine(s): [0, 1, 2, 3]
433 511
434 In [30]: %result
435 Out[30]: [ [ 1.28167017 0.14197338],
436 ....: [-0.14093616 1.27877273],
437 ....: [-0.37023573 1.06779409],
438 ....: [ 0.83664764 -0.25602658] ]
512 In [41]: %result
513 [stdout:0] hi
514 [stdout:1] hi
515 [stdout:2] hi
516 [stdout:3] hi
517
518 ``%result`` simply calls :meth:`.AsyncResult.display_outputs` on the most recent request.
519 You can pass integers as indices if you want a result other than the latest,
520 e.g. ``%result -2``, or ``%result 0`` for the first.
521
522
523 ``%autopx``
524 ***********
439 525
440 526 The ``%autopx`` magic switches to a mode where everything you type is executed
441 on the engines given by the :attr:`targets` attribute:
527 on the engines until you do ``%autopx`` again.
442 528
443 529 .. sourcecode:: ipython
444 530
445 In [30]: dv.block=False
531 In [30]: dv.block=True
446 532
447 533 In [31]: %autopx
448 Auto Parallel Enabled
449 Type %autopx to disable
534 %autopx enabled
450 535
451 536 In [32]: max_evals = []
452 <IPython.parallel.AsyncResult object at 0x17b8a70>
453 537
454 538 In [33]: for i in range(100):
455 539 ....: a = numpy.random.rand(10,10)
456 540 ....: a = a+a.transpose()
457 541 ....: evals = numpy.linalg.eigvals(a)
458 542 ....: max_evals.append(evals[0].real)
459 543 ....:
460 ....:
461 <IPython.parallel.AsyncResult object at 0x17af8f0>
462
463 In [34]: %autopx
464 Auto Parallel Disabled
465 544
466 In [35]: dv.block=True
467
468 In [36]: px ans= "Average max eigenvalue is: %f"%(sum(max_evals)/len(max_evals))
469 Parallel execution on engines: [0, 1, 2, 3]
545 In [34]: print "Average max eigenvalue is: %f" % (sum(max_evals)/len(max_evals))
546 [stdout:0] Average max eigenvalue is: 10.193101
547 [stdout:1] Average max eigenvalue is: 10.064508
548 [stdout:2] Average max eigenvalue is: 10.055724
549 [stdout:3] Average max eigenvalue is: 10.086876
470 550
471 In [37]: dv['ans']
472 Out[37]: [ 'Average max eigenvalue is: 10.1387247332',
473 ....: 'Average max eigenvalue is: 10.2076902286',
474 ....: 'Average max eigenvalue is: 10.1891484655',
475 ....: 'Average max eigenvalue is: 10.1158837784',]
551 In [35]: %autopx
552 Auto Parallel Disabled
476 553
477 554
478 555 Moving Python objects around
479 556 ============================
480 557
481 558 In addition to calling functions and executing code on engines, you can
482 559 transfer Python objects to and from your IPython session and the engines. In
483 560 IPython, these operations are called :meth:`push` (sending an object to the
484 561 engines) and :meth:`pull` (getting an object from the engines).
485 562
486 563 Basic push and pull
487 564 -------------------
488 565
489 566 Here are some examples of how you use :meth:`push` and :meth:`pull`:
490 567
491 568 .. sourcecode:: ipython
492 569
493 570 In [38]: dview.push(dict(a=1.03234,b=3453))
494 571 Out[38]: [None,None,None,None]
495 572
496 573 In [39]: dview.pull('a')
497 574 Out[39]: [ 1.03234, 1.03234, 1.03234, 1.03234]
498 575
499 576 In [40]: dview.pull('b', targets=0)
500 577 Out[40]: 3453
501 578
502 579 In [41]: dview.pull(('a','b'))
503 580 Out[41]: [ [1.03234, 3453], [1.03234, 3453], [1.03234, 3453], [1.03234, 3453] ]
504 581
505 582 In [43]: dview.push(dict(c='speed'))
506 583 Out[43]: [None,None,None,None]
507 584
508 585 In non-blocking mode :meth:`push` and :meth:`pull` also return
509 586 :class:`AsyncResult` objects:
510 587
511 588 .. sourcecode:: ipython
512 589
513 590 In [48]: ar = dview.pull('a', block=False)
514 591
515 592 In [49]: ar.get()
516 593 Out[49]: [1.03234, 1.03234, 1.03234, 1.03234]
517 594
518 595
519 596 Dictionary interface
520 597 --------------------
521 598
522 599 Since a Python namespace is just a :class:`dict`, :class:`DirectView` objects provide
523 600 dictionary-style access by key and methods such as :meth:`get` and
524 601 :meth:`update` for convenience. This make the remote namespaces of the engines
525 602 appear as a local dictionary. Underneath, these methods call :meth:`apply`:
526 603
527 604 .. sourcecode:: ipython
528 605
529 606 In [51]: dview['a']=['foo','bar']
530 607
531 608 In [52]: dview['a']
532 609 Out[52]: [ ['foo', 'bar'], ['foo', 'bar'], ['foo', 'bar'], ['foo', 'bar'] ]
533 610
534 611 Scatter and gather
535 612 ------------------
536 613
537 614 Sometimes it is useful to partition a sequence and push the partitions to
538 615 different engines. In MPI language, this is know as scatter/gather and we
539 616 follow that terminology. However, it is important to remember that in
540 617 IPython's :class:`Client` class, :meth:`scatter` is from the
541 618 interactive IPython session to the engines and :meth:`gather` is from the
542 619 engines back to the interactive IPython session. For scatter/gather operations
543 620 between engines, MPI, pyzmq, or some other direct interconnect should be used.
544 621
545 622 .. sourcecode:: ipython
546 623
547 624 In [58]: dview.scatter('a',range(16))
548 625 Out[58]: [None,None,None,None]
549 626
550 627 In [59]: dview['a']
551 628 Out[59]: [ [0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15] ]
552 629
553 630 In [60]: dview.gather('a')
554 631 Out[60]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
555 632
556 633 Other things to look at
557 634 =======================
558 635
559 636 How to do parallel list comprehensions
560 637 --------------------------------------
561 638
562 639 In many cases list comprehensions are nicer than using the map function. While
563 640 we don't have fully parallel list comprehensions, it is simple to get the
564 641 basic effect using :meth:`scatter` and :meth:`gather`:
565 642
566 643 .. sourcecode:: ipython
567 644
568 645 In [66]: dview.scatter('x',range(64))
569 646
570 647 In [67]: %px y = [i**10 for i in x]
571 648 Parallel execution on engines: [0, 1, 2, 3]
572 649 Out[67]:
573 650
574 651 In [68]: y = dview.gather('y')
575 652
576 653 In [69]: print y
577 654 [0, 1, 1024, 59049, 1048576, 9765625, 60466176, 282475249, 1073741824,...]
578 655
579 656 Remote imports
580 657 --------------
581 658
582 659 Sometimes you will want to import packages both in your interactive session
583 660 and on your remote engines. This can be done with the :class:`ContextManager`
584 661 created by a DirectView's :meth:`sync_imports` method:
585 662
586 663 .. sourcecode:: ipython
587 664
588 665 In [69]: with dview.sync_imports():
589 666 ....: import numpy
590 667 importing numpy on engine(s)
591 668
592 669 Any imports made inside the block will also be performed on the view's engines.
593 670 sync_imports also takes a `local` boolean flag that defaults to True, which specifies
594 671 whether the local imports should also be performed. However, support for `local=False`
595 672 has not been implemented, so only packages that can be imported locally will work
596 673 this way.
597 674
598 675 You can also specify imports via the ``@require`` decorator. This is a decorator
599 676 designed for use in Dependencies, but can be used to handle remote imports as well.
600 677 Modules or module names passed to ``@require`` will be imported before the decorated
601 678 function is called. If they cannot be imported, the decorated function will never
602 679 execution, and will fail with an UnmetDependencyError.
603 680
604 681 .. sourcecode:: ipython
605 682
606 683 In [69]: from IPython.parallel import require
607 684
608 685 In [70]: @require('re'):
609 686 ....: def findall(pat, x):
610 687 ....: # re is guaranteed to be available
611 688 ....: return re.findall(pat, x)
612 689
613 690 # you can also pass modules themselves, that you already have locally:
614 691 In [71]: @require(time):
615 692 ....: def wait(t):
616 693 ....: time.sleep(t)
617 694 ....: return t
618 695
619 696 .. _parallel_exceptions:
620 697
621 698 Parallel exceptions
622 699 -------------------
623 700
624 701 In the multiengine interface, parallel commands can raise Python exceptions,
625 702 just like serial commands. But, it is a little subtle, because a single
626 703 parallel command can actually raise multiple exceptions (one for each engine
627 704 the command was run on). To express this idea, we have a
628 705 :exc:`CompositeError` exception class that will be raised in most cases. The
629 706 :exc:`CompositeError` class is a special type of exception that wraps one or
630 707 more other types of exceptions. Here is how it works:
631 708
632 709 .. sourcecode:: ipython
633 710
634 711 In [76]: dview.block=True
635 712
636 713 In [77]: dview.execute('1/0')
637 714 ---------------------------------------------------------------------------
638 715 CompositeError Traceback (most recent call last)
639 716 /home/user/<ipython-input-10-5d56b303a66c> in <module>()
640 717 ----> 1 dview.execute('1/0')
641 718
642 719 /path/to/site-packages/IPython/parallel/client/view.pyc in execute(self, code, targets, block)
643 720 591 default: self.block
644 721 592 """
645 722 --> 593 return self._really_apply(util._execute, args=(code,), block=block, targets=targets)
646 723 594
647 724 595 def run(self, filename, targets=None, block=None):
648 725
649 726 /home/user/<string> in _really_apply(self, f, args, kwargs, targets, block, track)
650 727
651 728 /path/to/site-packages/IPython/parallel/client/view.pyc in sync_results(f, self, *args, **kwargs)
652 729 55 def sync_results(f, self, *args, **kwargs):
653 730 56 """sync relevant results from self.client to our results attribute."""
654 731 ---> 57 ret = f(self, *args, **kwargs)
655 732 58 delta = self.outstanding.difference(self.client.outstanding)
656 733 59 completed = self.outstanding.intersection(delta)
657 734
658 735 /home/user/<string> in _really_apply(self, f, args, kwargs, targets, block, track)
659 736
660 737 /path/to/site-packages/IPython/parallel/client/view.pyc in save_ids(f, self, *args, **kwargs)
661 738 44 n_previous = len(self.client.history)
662 739 45 try:
663 740 ---> 46 ret = f(self, *args, **kwargs)
664 741 47 finally:
665 742 48 nmsgs = len(self.client.history) - n_previous
666 743
667 744 /path/to/site-packages/IPython/parallel/client/view.pyc in _really_apply(self, f, args, kwargs, targets, block, track)
668 745 529 if block:
669 746 530 try:
670 747 --> 531 return ar.get()
671 748 532 except KeyboardInterrupt:
672 749 533 pass
673 750
674 751 /path/to/site-packages/IPython/parallel/client/asyncresult.pyc in get(self, timeout)
675 752 101 return self._result
676 753 102 else:
677 754 --> 103 raise self._exception
678 755 104 else:
679 756 105 raise error.TimeoutError("Result not ready.")
680 757
681 758 CompositeError: one or more exceptions from call to method: _execute
682 759 [0:apply]: ZeroDivisionError: integer division or modulo by zero
683 760 [1:apply]: ZeroDivisionError: integer division or modulo by zero
684 761 [2:apply]: ZeroDivisionError: integer division or modulo by zero
685 762 [3:apply]: ZeroDivisionError: integer division or modulo by zero
686 763
687 764 Notice how the error message printed when :exc:`CompositeError` is raised has
688 765 information about the individual exceptions that were raised on each engine.
689 766 If you want, you can even raise one of these original exceptions:
690 767
691 768 .. sourcecode:: ipython
692 769
693 770 In [80]: try:
694 771 ....: dview.execute('1/0')
695 772 ....: except parallel.error.CompositeError, e:
696 773 ....: e.raise_exception()
697 774 ....:
698 775 ....:
699 776 ---------------------------------------------------------------------------
700 777 RemoteError Traceback (most recent call last)
701 778 /home/user/<ipython-input-17-8597e7e39858> in <module>()
702 779 2 dview.execute('1/0')
703 780 3 except CompositeError as e:
704 781 ----> 4 e.raise_exception()
705 782
706 783 /path/to/site-packages/IPython/parallel/error.pyc in raise_exception(self, excid)
707 784 266 raise IndexError("an exception with index %i does not exist"%excid)
708 785 267 else:
709 786 --> 268 raise RemoteError(en, ev, etb, ei)
710 787 269
711 788 270
712 789
713 790 RemoteError: ZeroDivisionError(integer division or modulo by zero)
714 791 Traceback (most recent call last):
715 792 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
716 793 exec code in working,working
717 794 File "<string>", line 1, in <module>
718 795 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
719 796 exec code in globals()
720 797 File "<string>", line 1, in <module>
721 798 ZeroDivisionError: integer division or modulo by zero
722 799
723 800 If you are working in IPython, you can simple type ``%debug`` after one of
724 801 these :exc:`CompositeError` exceptions is raised, and inspect the exception
725 802 instance:
726 803
727 804 .. sourcecode:: ipython
728 805
729 806 In [81]: dview.execute('1/0')
730 807 ---------------------------------------------------------------------------
731 808 CompositeError Traceback (most recent call last)
732 809 /home/user/<ipython-input-10-5d56b303a66c> in <module>()
733 810 ----> 1 dview.execute('1/0')
734 811
735 812 /path/to/site-packages/IPython/parallel/client/view.pyc in execute(self, code, targets, block)
736 813 591 default: self.block
737 814 592 """
738 815 --> 593 return self._really_apply(util._execute, args=(code,), block=block, targets=targets)
739 816 594
740 817 595 def run(self, filename, targets=None, block=None):
741 818
742 819 /home/user/<string> in _really_apply(self, f, args, kwargs, targets, block, track)
743 820
744 821 /path/to/site-packages/IPython/parallel/client/view.pyc in sync_results(f, self, *args, **kwargs)
745 822 55 def sync_results(f, self, *args, **kwargs):
746 823 56 """sync relevant results from self.client to our results attribute."""
747 824 ---> 57 ret = f(self, *args, **kwargs)
748 825 58 delta = self.outstanding.difference(self.client.outstanding)
749 826 59 completed = self.outstanding.intersection(delta)
750 827
751 828 /home/user/<string> in _really_apply(self, f, args, kwargs, targets, block, track)
752 829
753 830 /path/to/site-packages/IPython/parallel/client/view.pyc in save_ids(f, self, *args, **kwargs)
754 831 44 n_previous = len(self.client.history)
755 832 45 try:
756 833 ---> 46 ret = f(self, *args, **kwargs)
757 834 47 finally:
758 835 48 nmsgs = len(self.client.history) - n_previous
759 836
760 837 /path/to/site-packages/IPython/parallel/client/view.pyc in _really_apply(self, f, args, kwargs, targets, block, track)
761 838 529 if block:
762 839 530 try:
763 840 --> 531 return ar.get()
764 841 532 except KeyboardInterrupt:
765 842 533 pass
766 843
767 844 /path/to/site-packages/IPython/parallel/client/asyncresult.pyc in get(self, timeout)
768 845 101 return self._result
769 846 102 else:
770 847 --> 103 raise self._exception
771 848 104 else:
772 849 105 raise error.TimeoutError("Result not ready.")
773 850
774 851 CompositeError: one or more exceptions from call to method: _execute
775 852 [0:apply]: ZeroDivisionError: integer division or modulo by zero
776 853 [1:apply]: ZeroDivisionError: integer division or modulo by zero
777 854 [2:apply]: ZeroDivisionError: integer division or modulo by zero
778 855 [3:apply]: ZeroDivisionError: integer division or modulo by zero
779 856
780 857 In [82]: %debug
781 858 > /path/to/site-packages/IPython/parallel/client/asyncresult.py(103)get()
782 859 102 else:
783 860 --> 103 raise self._exception
784 861 104 else:
785 862
786 863 # With the debugger running, self._exception is the exceptions instance. We can tab complete
787 864 # on it and see the extra methods that are available.
788 865 ipdb> self._exception.<tab>
789 866 e.__class__ e.__getitem__ e.__new__ e.__setstate__ e.args
790 867 e.__delattr__ e.__getslice__ e.__reduce__ e.__str__ e.elist
791 868 e.__dict__ e.__hash__ e.__reduce_ex__ e.__weakref__ e.message
792 869 e.__doc__ e.__init__ e.__repr__ e._get_engine_str e.print_tracebacks
793 870 e.__getattribute__ e.__module__ e.__setattr__ e._get_traceback e.raise_exception
794 871 ipdb> self._exception.print_tracebacks()
795 872 [0:apply]:
796 873 Traceback (most recent call last):
797 874 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
798 875 exec code in working,working
799 876 File "<string>", line 1, in <module>
800 877 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
801 878 exec code in globals()
802 879 File "<string>", line 1, in <module>
803 880 ZeroDivisionError: integer division or modulo by zero
804 881
805 882
806 883 [1:apply]:
807 884 Traceback (most recent call last):
808 885 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
809 886 exec code in working,working
810 887 File "<string>", line 1, in <module>
811 888 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
812 889 exec code in globals()
813 890 File "<string>", line 1, in <module>
814 891 ZeroDivisionError: integer division or modulo by zero
815 892
816 893
817 894 [2:apply]:
818 895 Traceback (most recent call last):
819 896 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
820 897 exec code in working,working
821 898 File "<string>", line 1, in <module>
822 899 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
823 900 exec code in globals()
824 901 File "<string>", line 1, in <module>
825 902 ZeroDivisionError: integer division or modulo by zero
826 903
827 904
828 905 [3:apply]:
829 906 Traceback (most recent call last):
830 907 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
831 908 exec code in working,working
832 909 File "<string>", line 1, in <module>
833 910 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
834 911 exec code in globals()
835 912 File "<string>", line 1, in <module>
836 913 ZeroDivisionError: integer division or modulo by zero
837 914
838 915
839 916 All of this same error handling magic even works in non-blocking mode:
840 917
841 918 .. sourcecode:: ipython
842 919
843 920 In [83]: dview.block=False
844 921
845 922 In [84]: ar = dview.execute('1/0')
846 923
847 924 In [85]: ar.get()
848 925 ---------------------------------------------------------------------------
849 926 CompositeError Traceback (most recent call last)
850 927 /home/user/<ipython-input-21-8531eb3d26fb> in <module>()
851 928 ----> 1 ar.get()
852 929
853 930 /path/to/site-packages/IPython/parallel/client/asyncresult.pyc in get(self, timeout)
854 931 101 return self._result
855 932 102 else:
856 933 --> 103 raise self._exception
857 934 104 else:
858 935 105 raise error.TimeoutError("Result not ready.")
859 936
860 937 CompositeError: one or more exceptions from call to method: _execute
861 938 [0:apply]: ZeroDivisionError: integer division or modulo by zero
862 939 [1:apply]: ZeroDivisionError: integer division or modulo by zero
863 940 [2:apply]: ZeroDivisionError: integer division or modulo by zero
864 941 [3:apply]: ZeroDivisionError: integer division or modulo by zero
865 942
General Comments 0
You need to be logged in to leave comments. Login now