##// END OF EJS Templates
Merge pull request #1768 from minrk/parallelmagics...
Fernando Perez -
r7060:a1360828 merge
parent child Browse files
Show More
@@ -0,0 +1,342 b''
1 # -*- coding: utf-8 -*-
2 """Test Parallel magics
3
4 Authors:
5
6 * Min RK
7 """
8 #-------------------------------------------------------------------------------
9 # Copyright (C) 2011 The IPython Development Team
10 #
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
13 #-------------------------------------------------------------------------------
14
15 #-------------------------------------------------------------------------------
16 # Imports
17 #-------------------------------------------------------------------------------
18
19 import sys
20 import time
21
22 import zmq
23 from nose import SkipTest
24
25 from IPython.testing import decorators as dec
26 from IPython.testing.ipunittest import ParametricTestCase
27
28 from IPython import parallel as pmod
29 from IPython.parallel import error
30 from IPython.parallel import AsyncResult
31 from IPython.parallel.util import interactive
32
33 from IPython.parallel.tests import add_engines
34
35 from .clienttest import ClusterTestCase, capture_output, generate_output
36
37 def setup():
38 add_engines(3, total=True)
39
40 class TestParallelMagics(ClusterTestCase, ParametricTestCase):
41
42 def test_px_blocking(self):
43 ip = get_ipython()
44 v = self.client[-1:]
45 v.activate()
46 v.block=True
47
48 ip.magic('px a=5')
49 self.assertEquals(v['a'], [5])
50 ip.magic('px a=10')
51 self.assertEquals(v['a'], [10])
52 # just 'print a' works ~99% of the time, but this ensures that
53 # the stdout message has arrived when the result is finished:
54 with capture_output() as io:
55 ip.magic(
56 'px import sys,time;print(a);sys.stdout.flush();time.sleep(0.2)'
57 )
58 out = io.stdout
59 self.assertTrue('[stdout:' in out, out)
60 self.assertTrue(out.rstrip().endswith('10'))
61 self.assertRaisesRemote(ZeroDivisionError, ip.magic, 'px 1/0')
62
63 def test_cellpx_block_args(self):
64 """%%px --[no]block flags work"""
65 ip = get_ipython()
66 v = self.client[-1:]
67 v.activate()
68 v.block=False
69
70 for block in (True, False):
71 v.block = block
72
73 with capture_output() as io:
74 ip.run_cell_magic("px", "", "1")
75 if block:
76 self.assertTrue(io.stdout.startswith("Parallel"), io.stdout)
77 else:
78 self.assertTrue(io.stdout.startswith("Async"), io.stdout)
79
80 with capture_output() as io:
81 ip.run_cell_magic("px", "--block", "1")
82 self.assertTrue(io.stdout.startswith("Parallel"), io.stdout)
83
84 with capture_output() as io:
85 ip.run_cell_magic("px", "--noblock", "1")
86 self.assertTrue(io.stdout.startswith("Async"), io.stdout)
87
88 def test_cellpx_groupby_engine(self):
89 """%%px --group-outputs=engine"""
90 ip = get_ipython()
91 v = self.client[:]
92 v.block = True
93 v.activate()
94
95 v['generate_output'] = generate_output
96
97 with capture_output() as io:
98 ip.run_cell_magic('px', '--group-outputs=engine', 'generate_output()')
99
100 lines = io.stdout.strip().splitlines()[1:]
101 expected = [
102 ('[stdout:', '] stdout'),
103 'stdout2',
104 'IPython.core.display.HTML',
105 'IPython.core.display.Math',
106 ('] Out[', 'IPython.core.display.Math')
107 ] * len(v)
108
109 self.assertEquals(len(lines), len(expected), io.stdout)
110 for line,expect in zip(lines, expected):
111 if isinstance(expect, str):
112 expect = [expect]
113 for ex in expect:
114 self.assertTrue(ex in line, "Expected %r in %r" % (ex, line))
115
116 expected = [
117 ('[stderr:', '] stderr'),
118 'stderr2',
119 ] * len(v)
120
121 lines = io.stderr.strip().splitlines()
122 self.assertEquals(len(lines), len(expected), io.stderr)
123 for line,expect in zip(lines, expected):
124 if isinstance(expect, str):
125 expect = [expect]
126 for ex in expect:
127 self.assertTrue(ex in line, "Expected %r in %r" % (ex, line))
128
129
130 def test_cellpx_groupby_order(self):
131 """%%px --group-outputs=order"""
132 ip = get_ipython()
133 v = self.client[:]
134 v.block = True
135 v.activate()
136
137 v['generate_output'] = generate_output
138
139 with capture_output() as io:
140 ip.run_cell_magic('px', '--group-outputs=order', 'generate_output()')
141
142 lines = io.stdout.strip().splitlines()[1:]
143 expected = []
144 expected.extend([
145 ('[stdout:', '] stdout'),
146 'stdout2',
147 ] * len(v))
148 expected.extend([
149 'IPython.core.display.HTML',
150 ] * len(v))
151 expected.extend([
152 'IPython.core.display.Math',
153 ] * len(v))
154 expected.extend([
155 ('] Out[', 'IPython.core.display.Math')
156 ] * len(v))
157
158 self.assertEquals(len(lines), len(expected), io.stdout)
159 for line,expect in zip(lines, expected):
160 if isinstance(expect, str):
161 expect = [expect]
162 for ex in expect:
163 self.assertTrue(ex in line, "Expected %r in %r" % (ex, line))
164
165 expected = [
166 ('[stderr:', '] stderr'),
167 'stderr2',
168 ] * len(v)
169
170 lines = io.stderr.strip().splitlines()
171 self.assertEquals(len(lines), len(expected), io.stderr)
172 for line,expect in zip(lines, expected):
173 if isinstance(expect, str):
174 expect = [expect]
175 for ex in expect:
176 self.assertTrue(ex in line, "Expected %r in %r" % (ex, line))
177
178 def test_cellpx_groupby_atype(self):
179 """%%px --group-outputs=type"""
180 ip = get_ipython()
181 v = self.client[:]
182 v.block = True
183 v.activate()
184
185 v['generate_output'] = generate_output
186
187 with capture_output() as io:
188 ip.run_cell_magic('px', '--group-outputs=type', 'generate_output()')
189
190 lines = io.stdout.strip().splitlines()[1:]
191
192 expected = []
193 expected.extend([
194 ('[stdout:', '] stdout'),
195 'stdout2',
196 ] * len(v))
197 expected.extend([
198 'IPython.core.display.HTML',
199 'IPython.core.display.Math',
200 ] * len(v))
201 expected.extend([
202 ('] Out[', 'IPython.core.display.Math')
203 ] * len(v))
204
205 self.assertEquals(len(lines), len(expected), io.stdout)
206 for line,expect in zip(lines, expected):
207 if isinstance(expect, str):
208 expect = [expect]
209 for ex in expect:
210 self.assertTrue(ex in line, "Expected %r in %r" % (ex, line))
211
212 expected = [
213 ('[stderr:', '] stderr'),
214 'stderr2',
215 ] * len(v)
216
217 lines = io.stderr.strip().splitlines()
218 self.assertEquals(len(lines), len(expected), io.stderr)
219 for line,expect in zip(lines, expected):
220 if isinstance(expect, str):
221 expect = [expect]
222 for ex in expect:
223 self.assertTrue(ex in line, "Expected %r in %r" % (ex, line))
224
225
226 def test_px_nonblocking(self):
227 ip = get_ipython()
228 v = self.client[-1:]
229 v.activate()
230 v.block=False
231
232 ip.magic('px a=5')
233 self.assertEquals(v['a'], [5])
234 ip.magic('px a=10')
235 self.assertEquals(v['a'], [10])
236 with capture_output() as io:
237 ar = ip.magic('px print (a)')
238 self.assertTrue(isinstance(ar, AsyncResult))
239 self.assertTrue('Async' in io.stdout)
240 self.assertFalse('[stdout:' in io.stdout)
241 ar = ip.magic('px 1/0')
242 self.assertRaisesRemote(ZeroDivisionError, ar.get)
243
244 def test_autopx_blocking(self):
245 ip = get_ipython()
246 v = self.client[-1]
247 v.activate()
248 v.block=True
249
250 with capture_output() as io:
251 ip.magic('autopx')
252 ip.run_cell('\n'.join(('a=5','b=12345','c=0')))
253 ip.run_cell('b*=2')
254 ip.run_cell('print (b)')
255 ip.run_cell('b')
256 ip.run_cell("b/c")
257 ip.magic('autopx')
258
259 output = io.stdout.strip()
260
261 self.assertTrue(output.startswith('%autopx enabled'), output)
262 self.assertTrue(output.endswith('%autopx disabled'), output)
263 self.assertTrue('RemoteError: ZeroDivisionError' in output, output)
264 self.assertTrue('] Out[' in output, output)
265 self.assertTrue(': 24690' in output, output)
266 ar = v.get_result(-1)
267 self.assertEquals(v['a'], 5)
268 self.assertEquals(v['b'], 24690)
269 self.assertRaisesRemote(ZeroDivisionError, ar.get)
270
271 def test_autopx_nonblocking(self):
272 ip = get_ipython()
273 v = self.client[-1]
274 v.activate()
275 v.block=False
276
277 with capture_output() as io:
278 ip.magic('autopx')
279 ip.run_cell('\n'.join(('a=5','b=10','c=0')))
280 ip.run_cell('print (b)')
281 ip.run_cell('import time; time.sleep(0.1)')
282 ip.run_cell("b/c")
283 ip.run_cell('b*=2')
284 ip.magic('autopx')
285
286 output = io.stdout.strip()
287
288 self.assertTrue(output.startswith('%autopx enabled'))
289 self.assertTrue(output.endswith('%autopx disabled'))
290 self.assertFalse('ZeroDivisionError' in output)
291 ar = v.get_result(-2)
292 self.assertRaisesRemote(ZeroDivisionError, ar.get)
293 # prevent TaskAborted on pulls, due to ZeroDivisionError
294 time.sleep(0.5)
295 self.assertEquals(v['a'], 5)
296 # b*=2 will not fire, due to abort
297 self.assertEquals(v['b'], 10)
298
299 def test_result(self):
300 ip = get_ipython()
301 v = self.client[-1]
302 v.activate()
303 data = dict(a=111,b=222)
304 v.push(data, block=True)
305
306 ip.magic('px a')
307 ip.magic('px b')
308 for idx, name in [
309 ('', 'b'),
310 ('-1', 'b'),
311 ('2', 'b'),
312 ('1', 'a'),
313 ('-2', 'a'),
314 ]:
315 with capture_output() as io:
316 ip.magic('result ' + idx)
317 output = io.stdout.strip()
318 msg = "expected %s output to include %s, but got: %s" % \
319 ('%result '+idx, str(data[name]), output)
320 self.assertTrue(str(data[name]) in output, msg)
321
322 @dec.skipif_not_matplotlib
323 def test_px_pylab(self):
324 """%pylab works on engines"""
325 ip = get_ipython()
326 v = self.client[-1]
327 v.block = True
328 v.activate()
329
330 with capture_output() as io:
331 ip.magic("px %pylab inline")
332
333 self.assertTrue("Welcome to pylab" in io.stdout, io.stdout)
334 self.assertTrue("backend_inline" in io.stdout, io.stdout)
335
336 with capture_output() as io:
337 ip.magic("px plot(rand(100))")
338
339 self.assertTrue('] Out[' in io.stdout, io.stdout)
340 self.assertTrue('matplotlib.lines' in io.stdout, io.stdout)
341
342
@@ -0,0 +1,228 b''
1 {
2 "metadata": {
3 "name": "Parallel Magics"
4 },
5 "nbformat": 3,
6 "worksheets": [
7 {
8 "cells": [
9 {
10 "cell_type": "heading",
11 "level": 1,
12 "source": [
13 "Using Parallel Magics"
14 ]
15 },
16 {
17 "cell_type": "markdown",
18 "source": [
19 "IPython has a few magics for working with your engines.",
20 "",
21 "This assumes you have started an IPython cluster, either with the notebook interface,",
22 "or the `ipcluster/controller/engine` commands."
23 ]
24 },
25 {
26 "cell_type": "code",
27 "collapsed": false,
28 "input": [
29 "from IPython import parallel",
30 "rc = parallel.Client()",
31 "dv = rc[:]",
32 "dv.block = True",
33 "dv"
34 ],
35 "language": "python",
36 "outputs": []
37 },
38 {
39 "cell_type": "markdown",
40 "source": [
41 "The parallel magics come from the `parallelmagics` IPython extension.",
42 "The magics are set to work with a particular View object,",
43 "so to activate them, you call the `activate()` method on a particular view:"
44 ]
45 },
46 {
47 "cell_type": "code",
48 "collapsed": true,
49 "input": [
50 "dv.activate()"
51 ],
52 "language": "python",
53 "outputs": []
54 },
55 {
56 "cell_type": "markdown",
57 "source": [
58 "Now we can execute code remotely with `%px`:"
59 ]
60 },
61 {
62 "cell_type": "code",
63 "collapsed": false,
64 "input": [
65 "%px a=5"
66 ],
67 "language": "python",
68 "outputs": []
69 },
70 {
71 "cell_type": "code",
72 "collapsed": false,
73 "input": [
74 "%px print a"
75 ],
76 "language": "python",
77 "outputs": []
78 },
79 {
80 "cell_type": "code",
81 "collapsed": false,
82 "input": [
83 "%px a"
84 ],
85 "language": "python",
86 "outputs": []
87 },
88 {
89 "cell_type": "markdown",
90 "source": [
91 "You don't have to wait for results:"
92 ]
93 },
94 {
95 "cell_type": "code",
96 "collapsed": true,
97 "input": [
98 "dv.block = False"
99 ],
100 "language": "python",
101 "outputs": []
102 },
103 {
104 "cell_type": "code",
105 "collapsed": false,
106 "input": [
107 "%px import time",
108 "%px time.sleep(5)",
109 "%px time.time()"
110 ],
111 "language": "python",
112 "outputs": []
113 },
114 {
115 "cell_type": "markdown",
116 "source": [
117 "But you will notice that this didn't output the result of the last command.",
118 "For this, we have `%result`, which displays the output of the latest request:"
119 ]
120 },
121 {
122 "cell_type": "code",
123 "collapsed": false,
124 "input": [
125 "%result"
126 ],
127 "language": "python",
128 "outputs": []
129 },
130 {
131 "cell_type": "markdown",
132 "source": [
133 "Remember, an IPython engine is IPython, so you can do magics remotely as well!"
134 ]
135 },
136 {
137 "cell_type": "code",
138 "collapsed": false,
139 "input": [
140 "dv.block = True",
141 "%px %pylab inline"
142 ],
143 "language": "python",
144 "outputs": []
145 },
146 {
147 "cell_type": "markdown",
148 "source": [
149 "`%%px` can also be used as a cell magic, for submitting whole blocks.",
150 "This one acceps `--block` and `--noblock` flags to specify",
151 "the blocking behavior, though the default is unchanged.",
152 ""
153 ]
154 },
155 {
156 "cell_type": "code",
157 "collapsed": true,
158 "input": [
159 "dv.scatter('id', dv.targets, flatten=True)",
160 "dv['stride'] = len(dv)"
161 ],
162 "language": "python",
163 "outputs": []
164 },
165 {
166 "cell_type": "code",
167 "collapsed": false,
168 "input": [
169 "%%px --noblock",
170 "x = linspace(0,pi,1000)",
171 "for n in range(id,12, stride):",
172 " print n",
173 " plt.plot(x,sin(n*x))",
174 "plt.title(\"Plot %i\" % id)"
175 ],
176 "language": "python",
177 "outputs": []
178 },
179 {
180 "cell_type": "code",
181 "collapsed": false,
182 "input": [
183 "%result"
184 ],
185 "language": "python",
186 "outputs": []
187 },
188 {
189 "cell_type": "markdown",
190 "source": [
191 "It also lets you choose some amount of the grouping of the outputs with `--group-outputs`:",
192 "",
193 "The choices are:",
194 "",
195 "* `engine` - all of an engine's output is collected together",
196 "* `type` - where stdout of each engine is grouped, etc. (the default)",
197 "* `order` - same as `type`, but individual displaypub outputs are interleaved.",
198 " That is, it will output the first plot from each engine, then the second from each,",
199 " etc."
200 ]
201 },
202 {
203 "cell_type": "code",
204 "collapsed": false,
205 "input": [
206 "%%px --group-outputs=engine",
207 "x = linspace(0,pi,1000)",
208 "for n in range(id,12, stride):",
209 " print n",
210 " plt.plot(x,sin(n*x))",
211 "plt.title(\"Plot %i\" % id)"
212 ],
213 "language": "python",
214 "outputs": []
215 },
216 {
217 "cell_type": "code",
218 "collapsed": true,
219 "input": [
220 ""
221 ],
222 "language": "python",
223 "outputs": []
224 }
225 ]
226 }
227 ]
228 } No newline at end of file
@@ -1,576 +1,576 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 """Magic functions for InteractiveShell.
2 """Magic functions for InteractiveShell.
3 """
3 """
4
4
5 #-----------------------------------------------------------------------------
5 #-----------------------------------------------------------------------------
6 # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
6 # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
7 # Copyright (C) 2001 Fernando Perez <fperez@colorado.edu>
7 # Copyright (C) 2001 Fernando Perez <fperez@colorado.edu>
8 # Copyright (C) 2008 The IPython Development Team
8 # Copyright (C) 2008 The IPython Development Team
9
9
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17 # Stdlib
17 # Stdlib
18 import os
18 import os
19 import re
19 import re
20 import sys
20 import sys
21 import types
21 import types
22 from getopt import getopt, GetoptError
22 from getopt import getopt, GetoptError
23
23
24 # Our own
24 # Our own
25 from IPython.config.configurable import Configurable
25 from IPython.config.configurable import Configurable
26 from IPython.core import oinspect
26 from IPython.core import oinspect
27 from IPython.core.error import UsageError
27 from IPython.core.error import UsageError
28 from IPython.core.prefilter import ESC_MAGIC
28 from IPython.core.prefilter import ESC_MAGIC
29 from IPython.external.decorator import decorator
29 from IPython.external.decorator import decorator
30 from IPython.utils.ipstruct import Struct
30 from IPython.utils.ipstruct import Struct
31 from IPython.utils.process import arg_split
31 from IPython.utils.process import arg_split
32 from IPython.utils.text import dedent
32 from IPython.utils.text import dedent
33 from IPython.utils.traitlets import Bool, Dict, Instance
33 from IPython.utils.traitlets import Bool, Dict, Instance
34 from IPython.utils.warn import error, warn
34 from IPython.utils.warn import error, warn
35
35
36 #-----------------------------------------------------------------------------
36 #-----------------------------------------------------------------------------
37 # Globals
37 # Globals
38 #-----------------------------------------------------------------------------
38 #-----------------------------------------------------------------------------
39
39
40 # A dict we'll use for each class that has magics, used as temporary storage to
40 # A dict we'll use for each class that has magics, used as temporary storage to
41 # pass information between the @line/cell_magic method decorators and the
41 # pass information between the @line/cell_magic method decorators and the
42 # @magics_class class decorator, because the method decorators have no
42 # @magics_class class decorator, because the method decorators have no
43 # access to the class when they run. See for more details:
43 # access to the class when they run. See for more details:
44 # http://stackoverflow.com/questions/2366713/can-a-python-decorator-of-an-instance-method-access-the-class
44 # http://stackoverflow.com/questions/2366713/can-a-python-decorator-of-an-instance-method-access-the-class
45
45
46 magics = dict(line={}, cell={})
46 magics = dict(line={}, cell={})
47
47
48 magic_kinds = ('line', 'cell')
48 magic_kinds = ('line', 'cell')
49 magic_spec = ('line', 'cell', 'line_cell')
49 magic_spec = ('line', 'cell', 'line_cell')
50
50
51 #-----------------------------------------------------------------------------
51 #-----------------------------------------------------------------------------
52 # Utility classes and functions
52 # Utility classes and functions
53 #-----------------------------------------------------------------------------
53 #-----------------------------------------------------------------------------
54
54
55 class Bunch: pass
55 class Bunch: pass
56
56
57
57
58 def on_off(tag):
58 def on_off(tag):
59 """Return an ON/OFF string for a 1/0 input. Simple utility function."""
59 """Return an ON/OFF string for a 1/0 input. Simple utility function."""
60 return ['OFF','ON'][tag]
60 return ['OFF','ON'][tag]
61
61
62
62
63 def compress_dhist(dh):
63 def compress_dhist(dh):
64 """Compress a directory history into a new one with at most 20 entries.
64 """Compress a directory history into a new one with at most 20 entries.
65
65
66 Return a new list made from the first and last 10 elements of dhist after
66 Return a new list made from the first and last 10 elements of dhist after
67 removal of duplicates.
67 removal of duplicates.
68 """
68 """
69 head, tail = dh[:-10], dh[-10:]
69 head, tail = dh[:-10], dh[-10:]
70
70
71 newhead = []
71 newhead = []
72 done = set()
72 done = set()
73 for h in head:
73 for h in head:
74 if h in done:
74 if h in done:
75 continue
75 continue
76 newhead.append(h)
76 newhead.append(h)
77 done.add(h)
77 done.add(h)
78
78
79 return newhead + tail
79 return newhead + tail
80
80
81
81
82 def needs_local_scope(func):
82 def needs_local_scope(func):
83 """Decorator to mark magic functions which need to local scope to run."""
83 """Decorator to mark magic functions which need to local scope to run."""
84 func.needs_local_scope = True
84 func.needs_local_scope = True
85 return func
85 return func
86
86
87 #-----------------------------------------------------------------------------
87 #-----------------------------------------------------------------------------
88 # Class and method decorators for registering magics
88 # Class and method decorators for registering magics
89 #-----------------------------------------------------------------------------
89 #-----------------------------------------------------------------------------
90
90
91 def magics_class(cls):
91 def magics_class(cls):
92 """Class decorator for all subclasses of the main Magics class.
92 """Class decorator for all subclasses of the main Magics class.
93
93
94 Any class that subclasses Magics *must* also apply this decorator, to
94 Any class that subclasses Magics *must* also apply this decorator, to
95 ensure that all the methods that have been decorated as line/cell magics
95 ensure that all the methods that have been decorated as line/cell magics
96 get correctly registered in the class instance. This is necessary because
96 get correctly registered in the class instance. This is necessary because
97 when method decorators run, the class does not exist yet, so they
97 when method decorators run, the class does not exist yet, so they
98 temporarily store their information into a module global. Application of
98 temporarily store their information into a module global. Application of
99 this class decorator copies that global data to the class instance and
99 this class decorator copies that global data to the class instance and
100 clears the global.
100 clears the global.
101
101
102 Obviously, this mechanism is not thread-safe, which means that the
102 Obviously, this mechanism is not thread-safe, which means that the
103 *creation* of subclasses of Magic should only be done in a single-thread
103 *creation* of subclasses of Magic should only be done in a single-thread
104 context. Instantiation of the classes has no restrictions. Given that
104 context. Instantiation of the classes has no restrictions. Given that
105 these classes are typically created at IPython startup time and before user
105 these classes are typically created at IPython startup time and before user
106 application code becomes active, in practice this should not pose any
106 application code becomes active, in practice this should not pose any
107 problems.
107 problems.
108 """
108 """
109 cls.registered = True
109 cls.registered = True
110 cls.magics = dict(line = magics['line'],
110 cls.magics = dict(line = magics['line'],
111 cell = magics['cell'])
111 cell = magics['cell'])
112 magics['line'] = {}
112 magics['line'] = {}
113 magics['cell'] = {}
113 magics['cell'] = {}
114 return cls
114 return cls
115
115
116
116
117 def record_magic(dct, magic_kind, magic_name, func):
117 def record_magic(dct, magic_kind, magic_name, func):
118 """Utility function to store a function as a magic of a specific kind.
118 """Utility function to store a function as a magic of a specific kind.
119
119
120 Parameters
120 Parameters
121 ----------
121 ----------
122 dct : dict
122 dct : dict
123 A dictionary with 'line' and 'cell' subdicts.
123 A dictionary with 'line' and 'cell' subdicts.
124
124
125 magic_kind : str
125 magic_kind : str
126 Kind of magic to be stored.
126 Kind of magic to be stored.
127
127
128 magic_name : str
128 magic_name : str
129 Key to store the magic as.
129 Key to store the magic as.
130
130
131 func : function
131 func : function
132 Callable object to store.
132 Callable object to store.
133 """
133 """
134 if magic_kind == 'line_cell':
134 if magic_kind == 'line_cell':
135 dct['line'][magic_name] = dct['cell'][magic_name] = func
135 dct['line'][magic_name] = dct['cell'][magic_name] = func
136 else:
136 else:
137 dct[magic_kind][magic_name] = func
137 dct[magic_kind][magic_name] = func
138
138
139
139
140 def validate_type(magic_kind):
140 def validate_type(magic_kind):
141 """Ensure that the given magic_kind is valid.
141 """Ensure that the given magic_kind is valid.
142
142
143 Check that the given magic_kind is one of the accepted spec types (stored
143 Check that the given magic_kind is one of the accepted spec types (stored
144 in the global `magic_spec`), raise ValueError otherwise.
144 in the global `magic_spec`), raise ValueError otherwise.
145 """
145 """
146 if magic_kind not in magic_spec:
146 if magic_kind not in magic_spec:
147 raise ValueError('magic_kind must be one of %s, %s given' %
147 raise ValueError('magic_kind must be one of %s, %s given' %
148 magic_kinds, magic_kind)
148 magic_kinds, magic_kind)
149
149
150
150
151 # The docstrings for the decorator below will be fairly similar for the two
151 # The docstrings for the decorator below will be fairly similar for the two
152 # types (method and function), so we generate them here once and reuse the
152 # types (method and function), so we generate them here once and reuse the
153 # templates below.
153 # templates below.
154 _docstring_template = \
154 _docstring_template = \
155 """Decorate the given {0} as {1} magic.
155 """Decorate the given {0} as {1} magic.
156
156
157 The decorator can be used with or without arguments, as follows.
157 The decorator can be used with or without arguments, as follows.
158
158
159 i) without arguments: it will create a {1} magic named as the {0} being
159 i) without arguments: it will create a {1} magic named as the {0} being
160 decorated::
160 decorated::
161
161
162 @deco
162 @deco
163 def foo(...)
163 def foo(...)
164
164
165 will create a {1} magic named `foo`.
165 will create a {1} magic named `foo`.
166
166
167 ii) with one string argument: which will be used as the actual name of the
167 ii) with one string argument: which will be used as the actual name of the
168 resulting magic::
168 resulting magic::
169
169
170 @deco('bar')
170 @deco('bar')
171 def foo(...)
171 def foo(...)
172
172
173 will create a {1} magic named `bar`.
173 will create a {1} magic named `bar`.
174 """
174 """
175
175
176 # These two are decorator factories. While they are conceptually very similar,
176 # These two are decorator factories. While they are conceptually very similar,
177 # there are enough differences in the details that it's simpler to have them
177 # there are enough differences in the details that it's simpler to have them
178 # written as completely standalone functions rather than trying to share code
178 # written as completely standalone functions rather than trying to share code
179 # and make a single one with convoluted logic.
179 # and make a single one with convoluted logic.
180
180
181 def _method_magic_marker(magic_kind):
181 def _method_magic_marker(magic_kind):
182 """Decorator factory for methods in Magics subclasses.
182 """Decorator factory for methods in Magics subclasses.
183 """
183 """
184
184
185 validate_type(magic_kind)
185 validate_type(magic_kind)
186
186
187 # This is a closure to capture the magic_kind. We could also use a class,
187 # This is a closure to capture the magic_kind. We could also use a class,
188 # but it's overkill for just that one bit of state.
188 # but it's overkill for just that one bit of state.
189 def magic_deco(arg):
189 def magic_deco(arg):
190 call = lambda f, *a, **k: f(*a, **k)
190 call = lambda f, *a, **k: f(*a, **k)
191
191
192 if callable(arg):
192 if callable(arg):
193 # "Naked" decorator call (just @foo, no args)
193 # "Naked" decorator call (just @foo, no args)
194 func = arg
194 func = arg
195 name = func.func_name
195 name = func.func_name
196 retval = decorator(call, func)
196 retval = decorator(call, func)
197 record_magic(magics, magic_kind, name, name)
197 record_magic(magics, magic_kind, name, name)
198 elif isinstance(arg, basestring):
198 elif isinstance(arg, basestring):
199 # Decorator called with arguments (@foo('bar'))
199 # Decorator called with arguments (@foo('bar'))
200 name = arg
200 name = arg
201 def mark(func, *a, **kw):
201 def mark(func, *a, **kw):
202 record_magic(magics, magic_kind, name, func.func_name)
202 record_magic(magics, magic_kind, name, func.func_name)
203 return decorator(call, func)
203 return decorator(call, func)
204 retval = mark
204 retval = mark
205 else:
205 else:
206 raise TypeError("Decorator can only be called with "
206 raise TypeError("Decorator can only be called with "
207 "string or function")
207 "string or function")
208 return retval
208 return retval
209
209
210 # Ensure the resulting decorator has a usable docstring
210 # Ensure the resulting decorator has a usable docstring
211 magic_deco.__doc__ = _docstring_template.format('method', magic_kind)
211 magic_deco.__doc__ = _docstring_template.format('method', magic_kind)
212 return magic_deco
212 return magic_deco
213
213
214
214
215 def _function_magic_marker(magic_kind):
215 def _function_magic_marker(magic_kind):
216 """Decorator factory for standalone functions.
216 """Decorator factory for standalone functions.
217 """
217 """
218 validate_type(magic_kind)
218 validate_type(magic_kind)
219
219
220 # This is a closure to capture the magic_kind. We could also use a class,
220 # This is a closure to capture the magic_kind. We could also use a class,
221 # but it's overkill for just that one bit of state.
221 # but it's overkill for just that one bit of state.
222 def magic_deco(arg):
222 def magic_deco(arg):
223 call = lambda f, *a, **k: f(*a, **k)
223 call = lambda f, *a, **k: f(*a, **k)
224
224
225 # Find get_ipython() in the caller's namespace
225 # Find get_ipython() in the caller's namespace
226 caller = sys._getframe(1)
226 caller = sys._getframe(1)
227 for ns in ['f_locals', 'f_globals', 'f_builtins']:
227 for ns in ['f_locals', 'f_globals', 'f_builtins']:
228 get_ipython = getattr(caller, ns).get('get_ipython')
228 get_ipython = getattr(caller, ns).get('get_ipython')
229 if get_ipython is not None:
229 if get_ipython is not None:
230 break
230 break
231 else:
231 else:
232 raise NameError('Decorator can only run in context where '
232 raise NameError('Decorator can only run in context where '
233 '`get_ipython` exists')
233 '`get_ipython` exists')
234
234
235 ip = get_ipython()
235 ip = get_ipython()
236
236
237 if callable(arg):
237 if callable(arg):
238 # "Naked" decorator call (just @foo, no args)
238 # "Naked" decorator call (just @foo, no args)
239 func = arg
239 func = arg
240 name = func.func_name
240 name = func.func_name
241 ip.register_magic_function(func, magic_kind, name)
241 ip.register_magic_function(func, magic_kind, name)
242 retval = decorator(call, func)
242 retval = decorator(call, func)
243 elif isinstance(arg, basestring):
243 elif isinstance(arg, basestring):
244 # Decorator called with arguments (@foo('bar'))
244 # Decorator called with arguments (@foo('bar'))
245 name = arg
245 name = arg
246 def mark(func, *a, **kw):
246 def mark(func, *a, **kw):
247 ip.register_magic_function(func, magic_kind, name)
247 ip.register_magic_function(func, magic_kind, name)
248 return decorator(call, func)
248 return decorator(call, func)
249 retval = mark
249 retval = mark
250 else:
250 else:
251 raise TypeError("Decorator can only be called with "
251 raise TypeError("Decorator can only be called with "
252 "string or function")
252 "string or function")
253 return retval
253 return retval
254
254
255 # Ensure the resulting decorator has a usable docstring
255 # Ensure the resulting decorator has a usable docstring
256 ds = _docstring_template.format('function', magic_kind)
256 ds = _docstring_template.format('function', magic_kind)
257
257
258 ds += dedent("""
258 ds += dedent("""
259 Note: this decorator can only be used in a context where IPython is already
259 Note: this decorator can only be used in a context where IPython is already
260 active, so that the `get_ipython()` call succeeds. You can therefore use
260 active, so that the `get_ipython()` call succeeds. You can therefore use
261 it in your startup files loaded after IPython initializes, but *not* in the
261 it in your startup files loaded after IPython initializes, but *not* in the
262 IPython configuration file itself, which is executed before IPython is
262 IPython configuration file itself, which is executed before IPython is
263 fully up and running. Any file located in the `startup` subdirectory of
263 fully up and running. Any file located in the `startup` subdirectory of
264 your configuration profile will be OK in this sense.
264 your configuration profile will be OK in this sense.
265 """)
265 """)
266
266
267 magic_deco.__doc__ = ds
267 magic_deco.__doc__ = ds
268 return magic_deco
268 return magic_deco
269
269
270
270
271 # Create the actual decorators for public use
271 # Create the actual decorators for public use
272
272
273 # These three are used to decorate methods in class definitions
273 # These three are used to decorate methods in class definitions
274 line_magic = _method_magic_marker('line')
274 line_magic = _method_magic_marker('line')
275 cell_magic = _method_magic_marker('cell')
275 cell_magic = _method_magic_marker('cell')
276 line_cell_magic = _method_magic_marker('line_cell')
276 line_cell_magic = _method_magic_marker('line_cell')
277
277
278 # These three decorate standalone functions and perform the decoration
278 # These three decorate standalone functions and perform the decoration
279 # immediately. They can only run where get_ipython() works
279 # immediately. They can only run where get_ipython() works
280 register_line_magic = _function_magic_marker('line')
280 register_line_magic = _function_magic_marker('line')
281 register_cell_magic = _function_magic_marker('cell')
281 register_cell_magic = _function_magic_marker('cell')
282 register_line_cell_magic = _function_magic_marker('line_cell')
282 register_line_cell_magic = _function_magic_marker('line_cell')
283
283
284 #-----------------------------------------------------------------------------
284 #-----------------------------------------------------------------------------
285 # Core Magic classes
285 # Core Magic classes
286 #-----------------------------------------------------------------------------
286 #-----------------------------------------------------------------------------
287
287
288 class MagicsManager(Configurable):
288 class MagicsManager(Configurable):
289 """Object that handles all magic-related functionality for IPython.
289 """Object that handles all magic-related functionality for IPython.
290 """
290 """
291 # Non-configurable class attributes
291 # Non-configurable class attributes
292
292
293 # A two-level dict, first keyed by magic type, then by magic function, and
293 # A two-level dict, first keyed by magic type, then by magic function, and
294 # holding the actual callable object as value. This is the dict used for
294 # holding the actual callable object as value. This is the dict used for
295 # magic function dispatch
295 # magic function dispatch
296 magics = Dict
296 magics = Dict
297
297
298 # A registry of the original objects that we've been given holding magics.
298 # A registry of the original objects that we've been given holding magics.
299 registry = Dict
299 registry = Dict
300
300
301 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
301 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
302
302
303 auto_magic = Bool(True, config=True, help=
303 auto_magic = Bool(True, config=True, help=
304 "Automatically call line magics without requiring explicit % prefix")
304 "Automatically call line magics without requiring explicit % prefix")
305
305
306 _auto_status = [
306 _auto_status = [
307 'Automagic is OFF, % prefix IS needed for line magics.',
307 'Automagic is OFF, % prefix IS needed for line magics.',
308 'Automagic is ON, % prefix IS NOT needed for line magics.']
308 'Automagic is ON, % prefix IS NOT needed for line magics.']
309
309
310 user_magics = Instance('IPython.core.magics.UserMagics')
310 user_magics = Instance('IPython.core.magics.UserMagics')
311
311
312 def __init__(self, shell=None, config=None, user_magics=None, **traits):
312 def __init__(self, shell=None, config=None, user_magics=None, **traits):
313
313
314 super(MagicsManager, self).__init__(shell=shell, config=config,
314 super(MagicsManager, self).__init__(shell=shell, config=config,
315 user_magics=user_magics, **traits)
315 user_magics=user_magics, **traits)
316 self.magics = dict(line={}, cell={})
316 self.magics = dict(line={}, cell={})
317 # Let's add the user_magics to the registry for uniformity, so *all*
317 # Let's add the user_magics to the registry for uniformity, so *all*
318 # registered magic containers can be found there.
318 # registered magic containers can be found there.
319 self.registry[user_magics.__class__.__name__] = user_magics
319 self.registry[user_magics.__class__.__name__] = user_magics
320
320
321 def auto_status(self):
321 def auto_status(self):
322 """Return descriptive string with automagic status."""
322 """Return descriptive string with automagic status."""
323 return self._auto_status[self.auto_magic]
323 return self._auto_status[self.auto_magic]
324
324
325 def lsmagic(self):
325 def lsmagic(self):
326 """Return a dict of currently available magic functions.
326 """Return a dict of currently available magic functions.
327
327
328 The return dict has the keys 'line' and 'cell', corresponding to the
328 The return dict has the keys 'line' and 'cell', corresponding to the
329 two types of magics we support. Each value is a list of names.
329 two types of magics we support. Each value is a list of names.
330 """
330 """
331 return self.magics
331 return self.magics
332
332
333 def register(self, *magic_objects):
333 def register(self, *magic_objects):
334 """Register one or more instances of Magics.
334 """Register one or more instances of Magics.
335
335
336 Take one or more classes or instances of classes that subclass the main
336 Take one or more classes or instances of classes that subclass the main
337 `core.Magic` class, and register them with IPython to use the magic
337 `core.Magic` class, and register them with IPython to use the magic
338 functions they provide. The registration process will then ensure that
338 functions they provide. The registration process will then ensure that
339 any methods that have decorated to provide line and/or cell magics will
339 any methods that have decorated to provide line and/or cell magics will
340 be recognized with the `%x`/`%%x` syntax as a line/cell magic
340 be recognized with the `%x`/`%%x` syntax as a line/cell magic
341 respectively.
341 respectively.
342
342
343 If classes are given, they will be instantiated with the default
343 If classes are given, they will be instantiated with the default
344 constructor. If your classes need a custom constructor, you should
344 constructor. If your classes need a custom constructor, you should
345 instanitate them first and pass the instance.
345 instanitate them first and pass the instance.
346
346
347 The provided arguments can be an arbitrary mix of classes and instances.
347 The provided arguments can be an arbitrary mix of classes and instances.
348
348
349 Parameters
349 Parameters
350 ----------
350 ----------
351 magic_objects : one or more classes or instances
351 magic_objects : one or more classes or instances
352 """
352 """
353 # Start by validating them to ensure they have all had their magic
353 # Start by validating them to ensure they have all had their magic
354 # methods registered at the instance level
354 # methods registered at the instance level
355 for m in magic_objects:
355 for m in magic_objects:
356 if not m.registered:
356 if not m.registered:
357 raise ValueError("Class of magics %r was constructed without "
357 raise ValueError("Class of magics %r was constructed without "
358 "the @register_macics class decorator")
358 "the @register_macics class decorator")
359 if type(m) is type:
359 if type(m) is type:
360 # If we're given an uninstantiated class
360 # If we're given an uninstantiated class
361 m = m(self.shell)
361 m = m(self.shell)
362
362
363 # Now that we have an instance, we can register it and update the
363 # Now that we have an instance, we can register it and update the
364 # table of callables
364 # table of callables
365 self.registry[m.__class__.__name__] = m
365 self.registry[m.__class__.__name__] = m
366 for mtype in magic_kinds:
366 for mtype in magic_kinds:
367 self.magics[mtype].update(m.magics[mtype])
367 self.magics[mtype].update(m.magics[mtype])
368
368
369 def register_function(self, func, magic_kind='line', magic_name=None):
369 def register_function(self, func, magic_kind='line', magic_name=None):
370 """Expose a standalone function as magic function for IPython.
370 """Expose a standalone function as magic function for IPython.
371
371
372 This will create an IPython magic (line, cell or both) from a
372 This will create an IPython magic (line, cell or both) from a
373 standalone function. The functions should have the following
373 standalone function. The functions should have the following
374 signatures:
374 signatures:
375
375
376 * For line magics: `def f(line)`
376 * For line magics: `def f(line)`
377 * For cell magics: `def f(line, cell)`
377 * For cell magics: `def f(line, cell)`
378 * For a function that does both: `def f(line, cell=None)`
378 * For a function that does both: `def f(line, cell=None)`
379
379
380 In the latter case, the function will be called with `cell==None` when
380 In the latter case, the function will be called with `cell==None` when
381 invoked as `%f`, and with cell as a string when invoked as `%%f`.
381 invoked as `%f`, and with cell as a string when invoked as `%%f`.
382
382
383 Parameters
383 Parameters
384 ----------
384 ----------
385 func : callable
385 func : callable
386 Function to be registered as a magic.
386 Function to be registered as a magic.
387
387
388 magic_kind : str
388 magic_kind : str
389 Kind of magic, one of 'line', 'cell' or 'line_cell'
389 Kind of magic, one of 'line', 'cell' or 'line_cell'
390
390
391 magic_name : optional str
391 magic_name : optional str
392 If given, the name the magic will have in the IPython namespace. By
392 If given, the name the magic will have in the IPython namespace. By
393 default, the name of the function itself is used.
393 default, the name of the function itself is used.
394 """
394 """
395
395
396 # Create the new method in the user_magics and register it in the
396 # Create the new method in the user_magics and register it in the
397 # global table
397 # global table
398 validate_type(magic_kind)
398 validate_type(magic_kind)
399 magic_name = func.func_name if magic_name is None else magic_name
399 magic_name = func.func_name if magic_name is None else magic_name
400 setattr(self.user_magics, magic_name, func)
400 setattr(self.user_magics, magic_name, func)
401 record_magic(self.magics, magic_kind, magic_name, func)
401 record_magic(self.magics, magic_kind, magic_name, func)
402
402
403 def define_magic(self, name, func):
403 def define_magic(self, name, func):
404 """[Deprecated] Expose own function as magic function for IPython.
404 """[Deprecated] Expose own function as magic function for IPython.
405
405
406 Example::
406 Example::
407
407
408 def foo_impl(self, parameter_s=''):
408 def foo_impl(self, parameter_s=''):
409 'My very own magic!. (Use docstrings, IPython reads them).'
409 'My very own magic!. (Use docstrings, IPython reads them).'
410 print 'Magic function. Passed parameter is between < >:'
410 print 'Magic function. Passed parameter is between < >:'
411 print '<%s>' % parameter_s
411 print '<%s>' % parameter_s
412 print 'The self object is:', self
412 print 'The self object is:', self
413
413
414 ip.define_magic('foo',foo_impl)
414 ip.define_magic('foo',foo_impl)
415 """
415 """
416 meth = types.MethodType(func, self.user_magics)
416 meth = types.MethodType(func, self.user_magics)
417 setattr(self.user_magics, name, meth)
417 setattr(self.user_magics, name, meth)
418 record_magic(self.magics, 'line', name, meth)
418 record_magic(self.magics, 'line', name, meth)
419
419
420 # Key base class that provides the central functionality for magics.
420 # Key base class that provides the central functionality for magics.
421
421
422 class Magics(object):
422 class Magics(object):
423 """Base class for implementing magic functions.
423 """Base class for implementing magic functions.
424
424
425 Shell functions which can be reached as %function_name. All magic
425 Shell functions which can be reached as %function_name. All magic
426 functions should accept a string, which they can parse for their own
426 functions should accept a string, which they can parse for their own
427 needs. This can make some functions easier to type, eg `%cd ../`
427 needs. This can make some functions easier to type, eg `%cd ../`
428 vs. `%cd("../")`
428 vs. `%cd("../")`
429
429
430 Classes providing magic functions need to subclass this class, and they
430 Classes providing magic functions need to subclass this class, and they
431 MUST:
431 MUST:
432
432
433 - Use the method decorators `@line_magic` and `@cell_magic` to decorate
433 - Use the method decorators `@line_magic` and `@cell_magic` to decorate
434 individual methods as magic functions, AND
434 individual methods as magic functions, AND
435
435
436 - Use the class decorator `@magics_class` to ensure that the magic
436 - Use the class decorator `@magics_class` to ensure that the magic
437 methods are properly registered at the instance level upon instance
437 methods are properly registered at the instance level upon instance
438 initialization.
438 initialization.
439
439
440 See :mod:`magic_functions` for examples of actual implementation classes.
440 See :mod:`magic_functions` for examples of actual implementation classes.
441 """
441 """
442 # Dict holding all command-line options for each magic.
442 # Dict holding all command-line options for each magic.
443 options_table = None
443 options_table = None
444 # Dict for the mapping of magic names to methods, set by class decorator
444 # Dict for the mapping of magic names to methods, set by class decorator
445 magics = None
445 magics = None
446 # Flag to check that the class decorator was properly applied
446 # Flag to check that the class decorator was properly applied
447 registered = False
447 registered = False
448 # Instance of IPython shell
448 # Instance of IPython shell
449 shell = None
449 shell = None
450
450
451 def __init__(self, shell):
451 def __init__(self, shell):
452 if not(self.__class__.registered):
452 if not(self.__class__.registered):
453 raise ValueError('Magics subclass without registration - '
453 raise ValueError('Magics subclass without registration - '
454 'did you forget to apply @magics_class?')
454 'did you forget to apply @magics_class?')
455 self.shell = shell
455 self.shell = shell
456 self.options_table = {}
456 self.options_table = {}
457 # The method decorators are run when the instance doesn't exist yet, so
457 # The method decorators are run when the instance doesn't exist yet, so
458 # they can only record the names of the methods they are supposed to
458 # they can only record the names of the methods they are supposed to
459 # grab. Only now, that the instance exists, can we create the proper
459 # grab. Only now, that the instance exists, can we create the proper
460 # mapping to bound methods. So we read the info off the original names
460 # mapping to bound methods. So we read the info off the original names
461 # table and replace each method name by the actual bound method.
461 # table and replace each method name by the actual bound method.
462 for mtype in magic_kinds:
462 for mtype in magic_kinds:
463 tab = self.magics[mtype]
463 tab = self.magics[mtype]
464 # must explicitly use keys, as we're mutating this puppy
464 # must explicitly use keys, as we're mutating this puppy
465 for magic_name in tab.keys():
465 for magic_name in tab.keys():
466 meth_name = tab[magic_name]
466 meth_name = tab[magic_name]
467 if isinstance(meth_name, basestring):
467 if isinstance(meth_name, basestring):
468 tab[magic_name] = getattr(self, meth_name)
468 tab[magic_name] = getattr(self, meth_name)
469
469
470 def arg_err(self,func):
470 def arg_err(self,func):
471 """Print docstring if incorrect arguments were passed"""
471 """Print docstring if incorrect arguments were passed"""
472 print 'Error in arguments:'
472 print 'Error in arguments:'
473 print oinspect.getdoc(func)
473 print oinspect.getdoc(func)
474
474
475 def format_latex(self, strng):
475 def format_latex(self, strng):
476 """Format a string for latex inclusion."""
476 """Format a string for latex inclusion."""
477
477
478 # Characters that need to be escaped for latex:
478 # Characters that need to be escaped for latex:
479 escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE)
479 escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE)
480 # Magic command names as headers:
480 # Magic command names as headers:
481 cmd_name_re = re.compile(r'^(%s.*?):' % ESC_MAGIC,
481 cmd_name_re = re.compile(r'^(%s.*?):' % ESC_MAGIC,
482 re.MULTILINE)
482 re.MULTILINE)
483 # Magic commands
483 # Magic commands
484 cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % ESC_MAGIC,
484 cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % ESC_MAGIC,
485 re.MULTILINE)
485 re.MULTILINE)
486 # Paragraph continue
486 # Paragraph continue
487 par_re = re.compile(r'\\$',re.MULTILINE)
487 par_re = re.compile(r'\\$',re.MULTILINE)
488
488
489 # The "\n" symbol
489 # The "\n" symbol
490 newline_re = re.compile(r'\\n')
490 newline_re = re.compile(r'\\n')
491
491
492 # Now build the string for output:
492 # Now build the string for output:
493 #strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng)
493 #strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng)
494 strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:',
494 strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:',
495 strng)
495 strng)
496 strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng)
496 strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng)
497 strng = par_re.sub(r'\\\\',strng)
497 strng = par_re.sub(r'\\\\',strng)
498 strng = escape_re.sub(r'\\\1',strng)
498 strng = escape_re.sub(r'\\\1',strng)
499 strng = newline_re.sub(r'\\textbackslash{}n',strng)
499 strng = newline_re.sub(r'\\textbackslash{}n',strng)
500 return strng
500 return strng
501
501
502 def parse_options(self, arg_str, opt_str, *long_opts, **kw):
502 def parse_options(self, arg_str, opt_str, *long_opts, **kw):
503 """Parse options passed to an argument string.
503 """Parse options passed to an argument string.
504
504
505 The interface is similar to that of getopt(), but it returns back a
505 The interface is similar to that of getopt(), but it returns back a
506 Struct with the options as keys and the stripped argument string still
506 Struct with the options as keys and the stripped argument string still
507 as a string.
507 as a string.
508
508
509 arg_str is quoted as a true sys.argv vector by using shlex.split.
509 arg_str is quoted as a true sys.argv vector by using shlex.split.
510 This allows us to easily expand variables, glob files, quote
510 This allows us to easily expand variables, glob files, quote
511 arguments, etc.
511 arguments, etc.
512
512
513 Options:
513 Options:
514 -mode: default 'string'. If given as 'list', the argument string is
514 -mode: default 'string'. If given as 'list', the argument string is
515 returned as a list (split on whitespace) instead of a string.
515 returned as a list (split on whitespace) instead of a string.
516
516
517 -list_all: put all option values in lists. Normally only options
517 -list_all: put all option values in lists. Normally only options
518 appearing more than once are put in a list.
518 appearing more than once are put in a list.
519
519
520 -posix (True): whether to split the input line in POSIX mode or not,
520 -posix (True): whether to split the input line in POSIX mode or not,
521 as per the conventions outlined in the shlex module from the
521 as per the conventions outlined in the shlex module from the
522 standard library."""
522 standard library."""
523
523
524 # inject default options at the beginning of the input line
524 # inject default options at the beginning of the input line
525 caller = sys._getframe(1).f_code.co_name
525 caller = sys._getframe(1).f_code.co_name
526 arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str)
526 arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str)
527
527
528 mode = kw.get('mode','string')
528 mode = kw.get('mode','string')
529 if mode not in ['string','list']:
529 if mode not in ['string','list']:
530 raise ValueError,'incorrect mode given: %s' % mode
530 raise ValueError,'incorrect mode given: %s' % mode
531 # Get options
531 # Get options
532 list_all = kw.get('list_all',0)
532 list_all = kw.get('list_all',0)
533 posix = kw.get('posix', os.name == 'posix')
533 posix = kw.get('posix', os.name == 'posix')
534 strict = kw.get('strict', True)
534 strict = kw.get('strict', True)
535
535
536 # Check if we have more than one argument to warrant extra processing:
536 # Check if we have more than one argument to warrant extra processing:
537 odict = {} # Dictionary with options
537 odict = {} # Dictionary with options
538 args = arg_str.split()
538 args = arg_str.split()
539 if len(args) >= 1:
539 if len(args) >= 1:
540 # If the list of inputs only has 0 or 1 thing in it, there's no
540 # If the list of inputs only has 0 or 1 thing in it, there's no
541 # need to look for options
541 # need to look for options
542 argv = arg_split(arg_str, posix, strict)
542 argv = arg_split(arg_str, posix, strict)
543 # Do regular option processing
543 # Do regular option processing
544 try:
544 try:
545 opts,args = getopt(argv,opt_str,*long_opts)
545 opts,args = getopt(argv, opt_str, long_opts)
546 except GetoptError,e:
546 except GetoptError,e:
547 raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str,
547 raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str,
548 " ".join(long_opts)))
548 " ".join(long_opts)))
549 for o,a in opts:
549 for o,a in opts:
550 if o.startswith('--'):
550 if o.startswith('--'):
551 o = o[2:]
551 o = o[2:]
552 else:
552 else:
553 o = o[1:]
553 o = o[1:]
554 try:
554 try:
555 odict[o].append(a)
555 odict[o].append(a)
556 except AttributeError:
556 except AttributeError:
557 odict[o] = [odict[o],a]
557 odict[o] = [odict[o],a]
558 except KeyError:
558 except KeyError:
559 if list_all:
559 if list_all:
560 odict[o] = [a]
560 odict[o] = [a]
561 else:
561 else:
562 odict[o] = a
562 odict[o] = a
563
563
564 # Prepare opts,args for return
564 # Prepare opts,args for return
565 opts = Struct(odict)
565 opts = Struct(odict)
566 if mode == 'string':
566 if mode == 'string':
567 args = ' '.join(args)
567 args = ' '.join(args)
568
568
569 return opts,args
569 return opts,args
570
570
571 def default_option(self, fn, optstr):
571 def default_option(self, fn, optstr):
572 """Make an entry in the options_table for fn, with value optstr"""
572 """Make an entry in the options_table for fn, with value optstr"""
573
573
574 if fn not in self.lsmagic():
574 if fn not in self.lsmagic():
575 error("%s is not a magic function" % fn)
575 error("%s is not a magic function" % fn)
576 self.options_table[fn] = optstr
576 self.options_table[fn] = optstr
@@ -1,547 +1,556 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """Tests for various magic functions.
2 """Tests for various magic functions.
3
3
4 Needs to be run by nose (to make ipython session available).
4 Needs to be run by nose (to make ipython session available).
5 """
5 """
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7
7
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9 # Imports
9 # Imports
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11
11
12 import io
12 import io
13 import os
13 import os
14 import sys
14 import sys
15 from StringIO import StringIO
15 from StringIO import StringIO
16 from unittest import TestCase
16 from unittest import TestCase
17
17
18 import nose.tools as nt
18 import nose.tools as nt
19
19
20 from IPython.core import magic
20 from IPython.core import magic
21 from IPython.core.magic import (Magics, magics_class, line_magic,
21 from IPython.core.magic import (Magics, magics_class, line_magic,
22 cell_magic, line_cell_magic,
22 cell_magic, line_cell_magic,
23 register_line_magic, register_cell_magic,
23 register_line_magic, register_cell_magic,
24 register_line_cell_magic)
24 register_line_cell_magic)
25 from IPython.core.magics import execution
25 from IPython.core.magics import execution
26 from IPython.nbformat.v3.tests.nbexamples import nb0
26 from IPython.nbformat.v3.tests.nbexamples import nb0
27 from IPython.nbformat import current
27 from IPython.nbformat import current
28 from IPython.testing import decorators as dec
28 from IPython.testing import decorators as dec
29 from IPython.testing import tools as tt
29 from IPython.testing import tools as tt
30 from IPython.utils import py3compat
30 from IPython.utils import py3compat
31 from IPython.utils.tempdir import TemporaryDirectory
31 from IPython.utils.tempdir import TemporaryDirectory
32
32
33 #-----------------------------------------------------------------------------
33 #-----------------------------------------------------------------------------
34 # Test functions begin
34 # Test functions begin
35 #-----------------------------------------------------------------------------
35 #-----------------------------------------------------------------------------
36
36
37 @magic.magics_class
37 @magic.magics_class
38 class DummyMagics(magic.Magics): pass
38 class DummyMagics(magic.Magics): pass
39
39
40 def test_rehashx():
40 def test_rehashx():
41 # clear up everything
41 # clear up everything
42 _ip = get_ipython()
42 _ip = get_ipython()
43 _ip.alias_manager.alias_table.clear()
43 _ip.alias_manager.alias_table.clear()
44 del _ip.db['syscmdlist']
44 del _ip.db['syscmdlist']
45
45
46 _ip.magic('rehashx')
46 _ip.magic('rehashx')
47 # Practically ALL ipython development systems will have more than 10 aliases
47 # Practically ALL ipython development systems will have more than 10 aliases
48
48
49 yield (nt.assert_true, len(_ip.alias_manager.alias_table) > 10)
49 yield (nt.assert_true, len(_ip.alias_manager.alias_table) > 10)
50 for key, val in _ip.alias_manager.alias_table.iteritems():
50 for key, val in _ip.alias_manager.alias_table.iteritems():
51 # we must strip dots from alias names
51 # we must strip dots from alias names
52 nt.assert_true('.' not in key)
52 nt.assert_true('.' not in key)
53
53
54 # rehashx must fill up syscmdlist
54 # rehashx must fill up syscmdlist
55 scoms = _ip.db['syscmdlist']
55 scoms = _ip.db['syscmdlist']
56 yield (nt.assert_true, len(scoms) > 10)
56 yield (nt.assert_true, len(scoms) > 10)
57
57
58
58
59 def test_magic_parse_options():
59 def test_magic_parse_options():
60 """Test that we don't mangle paths when parsing magic options."""
60 """Test that we don't mangle paths when parsing magic options."""
61 ip = get_ipython()
61 ip = get_ipython()
62 path = 'c:\\x'
62 path = 'c:\\x'
63 m = DummyMagics(ip)
63 m = DummyMagics(ip)
64 opts = m.parse_options('-f %s' % path,'f:')[0]
64 opts = m.parse_options('-f %s' % path,'f:')[0]
65 # argv splitting is os-dependent
65 # argv splitting is os-dependent
66 if os.name == 'posix':
66 if os.name == 'posix':
67 expected = 'c:x'
67 expected = 'c:x'
68 else:
68 else:
69 expected = path
69 expected = path
70 nt.assert_equals(opts['f'], expected)
70 nt.assert_equals(opts['f'], expected)
71
71
72 def test_magic_parse_long_options():
73 """Magic.parse_options can handle --foo=bar long options"""
74 ip = get_ipython()
75 m = DummyMagics(ip)
76 opts, _ = m.parse_options('--foo --bar=bubble', 'a', 'foo', 'bar=')
77 nt.assert_true('foo' in opts)
78 nt.assert_true('bar' in opts)
79 nt.assert_true(opts['bar'], "bubble")
80
72
81
73 @dec.skip_without('sqlite3')
82 @dec.skip_without('sqlite3')
74 def doctest_hist_f():
83 def doctest_hist_f():
75 """Test %hist -f with temporary filename.
84 """Test %hist -f with temporary filename.
76
85
77 In [9]: import tempfile
86 In [9]: import tempfile
78
87
79 In [10]: tfile = tempfile.mktemp('.py','tmp-ipython-')
88 In [10]: tfile = tempfile.mktemp('.py','tmp-ipython-')
80
89
81 In [11]: %hist -nl -f $tfile 3
90 In [11]: %hist -nl -f $tfile 3
82
91
83 In [13]: import os; os.unlink(tfile)
92 In [13]: import os; os.unlink(tfile)
84 """
93 """
85
94
86
95
87 @dec.skip_without('sqlite3')
96 @dec.skip_without('sqlite3')
88 def doctest_hist_r():
97 def doctest_hist_r():
89 """Test %hist -r
98 """Test %hist -r
90
99
91 XXX - This test is not recording the output correctly. For some reason, in
100 XXX - This test is not recording the output correctly. For some reason, in
92 testing mode the raw history isn't getting populated. No idea why.
101 testing mode the raw history isn't getting populated. No idea why.
93 Disabling the output checking for now, though at least we do run it.
102 Disabling the output checking for now, though at least we do run it.
94
103
95 In [1]: 'hist' in _ip.lsmagic()
104 In [1]: 'hist' in _ip.lsmagic()
96 Out[1]: True
105 Out[1]: True
97
106
98 In [2]: x=1
107 In [2]: x=1
99
108
100 In [3]: %hist -rl 2
109 In [3]: %hist -rl 2
101 x=1 # random
110 x=1 # random
102 %hist -r 2
111 %hist -r 2
103 """
112 """
104
113
105
114
106 @dec.skip_without('sqlite3')
115 @dec.skip_without('sqlite3')
107 def doctest_hist_op():
116 def doctest_hist_op():
108 """Test %hist -op
117 """Test %hist -op
109
118
110 In [1]: class b(float):
119 In [1]: class b(float):
111 ...: pass
120 ...: pass
112 ...:
121 ...:
113
122
114 In [2]: class s(object):
123 In [2]: class s(object):
115 ...: def __str__(self):
124 ...: def __str__(self):
116 ...: return 's'
125 ...: return 's'
117 ...:
126 ...:
118
127
119 In [3]:
128 In [3]:
120
129
121 In [4]: class r(b):
130 In [4]: class r(b):
122 ...: def __repr__(self):
131 ...: def __repr__(self):
123 ...: return 'r'
132 ...: return 'r'
124 ...:
133 ...:
125
134
126 In [5]: class sr(s,r): pass
135 In [5]: class sr(s,r): pass
127 ...:
136 ...:
128
137
129 In [6]:
138 In [6]:
130
139
131 In [7]: bb=b()
140 In [7]: bb=b()
132
141
133 In [8]: ss=s()
142 In [8]: ss=s()
134
143
135 In [9]: rr=r()
144 In [9]: rr=r()
136
145
137 In [10]: ssrr=sr()
146 In [10]: ssrr=sr()
138
147
139 In [11]: 4.5
148 In [11]: 4.5
140 Out[11]: 4.5
149 Out[11]: 4.5
141
150
142 In [12]: str(ss)
151 In [12]: str(ss)
143 Out[12]: 's'
152 Out[12]: 's'
144
153
145 In [13]:
154 In [13]:
146
155
147 In [14]: %hist -op
156 In [14]: %hist -op
148 >>> class b:
157 >>> class b:
149 ... pass
158 ... pass
150 ...
159 ...
151 >>> class s(b):
160 >>> class s(b):
152 ... def __str__(self):
161 ... def __str__(self):
153 ... return 's'
162 ... return 's'
154 ...
163 ...
155 >>>
164 >>>
156 >>> class r(b):
165 >>> class r(b):
157 ... def __repr__(self):
166 ... def __repr__(self):
158 ... return 'r'
167 ... return 'r'
159 ...
168 ...
160 >>> class sr(s,r): pass
169 >>> class sr(s,r): pass
161 >>>
170 >>>
162 >>> bb=b()
171 >>> bb=b()
163 >>> ss=s()
172 >>> ss=s()
164 >>> rr=r()
173 >>> rr=r()
165 >>> ssrr=sr()
174 >>> ssrr=sr()
166 >>> 4.5
175 >>> 4.5
167 4.5
176 4.5
168 >>> str(ss)
177 >>> str(ss)
169 's'
178 's'
170 >>>
179 >>>
171 """
180 """
172
181
173
182
174 @dec.skip_without('sqlite3')
183 @dec.skip_without('sqlite3')
175 def test_macro():
184 def test_macro():
176 ip = get_ipython()
185 ip = get_ipython()
177 ip.history_manager.reset() # Clear any existing history.
186 ip.history_manager.reset() # Clear any existing history.
178 cmds = ["a=1", "def b():\n return a**2", "print(a,b())"]
187 cmds = ["a=1", "def b():\n return a**2", "print(a,b())"]
179 for i, cmd in enumerate(cmds, start=1):
188 for i, cmd in enumerate(cmds, start=1):
180 ip.history_manager.store_inputs(i, cmd)
189 ip.history_manager.store_inputs(i, cmd)
181 ip.magic("macro test 1-3")
190 ip.magic("macro test 1-3")
182 nt.assert_equal(ip.user_ns["test"].value, "\n".join(cmds)+"\n")
191 nt.assert_equal(ip.user_ns["test"].value, "\n".join(cmds)+"\n")
183
192
184 # List macros.
193 # List macros.
185 assert "test" in ip.magic("macro")
194 assert "test" in ip.magic("macro")
186
195
187
196
188 @dec.skip_without('sqlite3')
197 @dec.skip_without('sqlite3')
189 def test_macro_run():
198 def test_macro_run():
190 """Test that we can run a multi-line macro successfully."""
199 """Test that we can run a multi-line macro successfully."""
191 ip = get_ipython()
200 ip = get_ipython()
192 ip.history_manager.reset()
201 ip.history_manager.reset()
193 cmds = ["a=10", "a+=1", py3compat.doctest_refactor_print("print a"),
202 cmds = ["a=10", "a+=1", py3compat.doctest_refactor_print("print a"),
194 "%macro test 2-3"]
203 "%macro test 2-3"]
195 for cmd in cmds:
204 for cmd in cmds:
196 ip.run_cell(cmd, store_history=True)
205 ip.run_cell(cmd, store_history=True)
197 nt.assert_equal(ip.user_ns["test"].value,
206 nt.assert_equal(ip.user_ns["test"].value,
198 py3compat.doctest_refactor_print("a+=1\nprint a\n"))
207 py3compat.doctest_refactor_print("a+=1\nprint a\n"))
199 with tt.AssertPrints("12"):
208 with tt.AssertPrints("12"):
200 ip.run_cell("test")
209 ip.run_cell("test")
201 with tt.AssertPrints("13"):
210 with tt.AssertPrints("13"):
202 ip.run_cell("test")
211 ip.run_cell("test")
203
212
204
213
205 @dec.skipif_not_numpy
214 @dec.skipif_not_numpy
206 def test_numpy_reset_array_undec():
215 def test_numpy_reset_array_undec():
207 "Test '%reset array' functionality"
216 "Test '%reset array' functionality"
208 _ip.ex('import numpy as np')
217 _ip.ex('import numpy as np')
209 _ip.ex('a = np.empty(2)')
218 _ip.ex('a = np.empty(2)')
210 yield (nt.assert_true, 'a' in _ip.user_ns)
219 yield (nt.assert_true, 'a' in _ip.user_ns)
211 _ip.magic('reset -f array')
220 _ip.magic('reset -f array')
212 yield (nt.assert_false, 'a' in _ip.user_ns)
221 yield (nt.assert_false, 'a' in _ip.user_ns)
213
222
214 def test_reset_out():
223 def test_reset_out():
215 "Test '%reset out' magic"
224 "Test '%reset out' magic"
216 _ip.run_cell("parrot = 'dead'", store_history=True)
225 _ip.run_cell("parrot = 'dead'", store_history=True)
217 # test '%reset -f out', make an Out prompt
226 # test '%reset -f out', make an Out prompt
218 _ip.run_cell("parrot", store_history=True)
227 _ip.run_cell("parrot", store_history=True)
219 nt.assert_true('dead' in [_ip.user_ns[x] for x in '_','__','___'])
228 nt.assert_true('dead' in [_ip.user_ns[x] for x in '_','__','___'])
220 _ip.magic('reset -f out')
229 _ip.magic('reset -f out')
221 nt.assert_false('dead' in [_ip.user_ns[x] for x in '_','__','___'])
230 nt.assert_false('dead' in [_ip.user_ns[x] for x in '_','__','___'])
222 nt.assert_true(len(_ip.user_ns['Out']) == 0)
231 nt.assert_true(len(_ip.user_ns['Out']) == 0)
223
232
224 def test_reset_in():
233 def test_reset_in():
225 "Test '%reset in' magic"
234 "Test '%reset in' magic"
226 # test '%reset -f in'
235 # test '%reset -f in'
227 _ip.run_cell("parrot", store_history=True)
236 _ip.run_cell("parrot", store_history=True)
228 nt.assert_true('parrot' in [_ip.user_ns[x] for x in '_i','_ii','_iii'])
237 nt.assert_true('parrot' in [_ip.user_ns[x] for x in '_i','_ii','_iii'])
229 _ip.magic('%reset -f in')
238 _ip.magic('%reset -f in')
230 nt.assert_false('parrot' in [_ip.user_ns[x] for x in '_i','_ii','_iii'])
239 nt.assert_false('parrot' in [_ip.user_ns[x] for x in '_i','_ii','_iii'])
231 nt.assert_true(len(set(_ip.user_ns['In'])) == 1)
240 nt.assert_true(len(set(_ip.user_ns['In'])) == 1)
232
241
233 def test_reset_dhist():
242 def test_reset_dhist():
234 "Test '%reset dhist' magic"
243 "Test '%reset dhist' magic"
235 _ip.run_cell("tmp = [d for d in _dh]") # copy before clearing
244 _ip.run_cell("tmp = [d for d in _dh]") # copy before clearing
236 _ip.magic('cd ' + os.path.dirname(nt.__file__))
245 _ip.magic('cd ' + os.path.dirname(nt.__file__))
237 _ip.magic('cd -')
246 _ip.magic('cd -')
238 nt.assert_true(len(_ip.user_ns['_dh']) > 0)
247 nt.assert_true(len(_ip.user_ns['_dh']) > 0)
239 _ip.magic('reset -f dhist')
248 _ip.magic('reset -f dhist')
240 nt.assert_true(len(_ip.user_ns['_dh']) == 0)
249 nt.assert_true(len(_ip.user_ns['_dh']) == 0)
241 _ip.run_cell("_dh = [d for d in tmp]") #restore
250 _ip.run_cell("_dh = [d for d in tmp]") #restore
242
251
243 def test_reset_in_length():
252 def test_reset_in_length():
244 "Test that '%reset in' preserves In[] length"
253 "Test that '%reset in' preserves In[] length"
245 _ip.run_cell("print 'foo'")
254 _ip.run_cell("print 'foo'")
246 _ip.run_cell("reset -f in")
255 _ip.run_cell("reset -f in")
247 nt.assert_true(len(_ip.user_ns['In']) == _ip.displayhook.prompt_count+1)
256 nt.assert_true(len(_ip.user_ns['In']) == _ip.displayhook.prompt_count+1)
248
257
249 def test_time():
258 def test_time():
250 _ip.magic('time None')
259 _ip.magic('time None')
251
260
252 def test_tb_syntaxerror():
261 def test_tb_syntaxerror():
253 """test %tb after a SyntaxError"""
262 """test %tb after a SyntaxError"""
254 ip = get_ipython()
263 ip = get_ipython()
255 ip.run_cell("for")
264 ip.run_cell("for")
256
265
257 # trap and validate stdout
266 # trap and validate stdout
258 save_stdout = sys.stdout
267 save_stdout = sys.stdout
259 try:
268 try:
260 sys.stdout = StringIO()
269 sys.stdout = StringIO()
261 ip.run_cell("%tb")
270 ip.run_cell("%tb")
262 out = sys.stdout.getvalue()
271 out = sys.stdout.getvalue()
263 finally:
272 finally:
264 sys.stdout = save_stdout
273 sys.stdout = save_stdout
265 # trim output, and only check the last line
274 # trim output, and only check the last line
266 last_line = out.rstrip().splitlines()[-1].strip()
275 last_line = out.rstrip().splitlines()[-1].strip()
267 nt.assert_equals(last_line, "SyntaxError: invalid syntax")
276 nt.assert_equals(last_line, "SyntaxError: invalid syntax")
268
277
269
278
270 @py3compat.doctest_refactor_print
279 @py3compat.doctest_refactor_print
271 def doctest_time():
280 def doctest_time():
272 """
281 """
273 In [10]: %time None
282 In [10]: %time None
274 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
283 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
275 Wall time: 0.00 s
284 Wall time: 0.00 s
276
285
277 In [11]: def f(kmjy):
286 In [11]: def f(kmjy):
278 ....: %time print 2*kmjy
287 ....: %time print 2*kmjy
279
288
280 In [12]: f(3)
289 In [12]: f(3)
281 6
290 6
282 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
291 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
283 Wall time: 0.00 s
292 Wall time: 0.00 s
284 """
293 """
285
294
286
295
287 def test_doctest_mode():
296 def test_doctest_mode():
288 "Toggle doctest_mode twice, it should be a no-op and run without error"
297 "Toggle doctest_mode twice, it should be a no-op and run without error"
289 _ip.magic('doctest_mode')
298 _ip.magic('doctest_mode')
290 _ip.magic('doctest_mode')
299 _ip.magic('doctest_mode')
291
300
292
301
293 def test_parse_options():
302 def test_parse_options():
294 """Tests for basic options parsing in magics."""
303 """Tests for basic options parsing in magics."""
295 # These are only the most minimal of tests, more should be added later. At
304 # These are only the most minimal of tests, more should be added later. At
296 # the very least we check that basic text/unicode calls work OK.
305 # the very least we check that basic text/unicode calls work OK.
297 m = DummyMagics(_ip)
306 m = DummyMagics(_ip)
298 nt.assert_equal(m.parse_options('foo', '')[1], 'foo')
307 nt.assert_equal(m.parse_options('foo', '')[1], 'foo')
299 nt.assert_equal(m.parse_options(u'foo', '')[1], u'foo')
308 nt.assert_equal(m.parse_options(u'foo', '')[1], u'foo')
300
309
301
310
302 def test_dirops():
311 def test_dirops():
303 """Test various directory handling operations."""
312 """Test various directory handling operations."""
304 # curpath = lambda :os.path.splitdrive(os.getcwdu())[1].replace('\\','/')
313 # curpath = lambda :os.path.splitdrive(os.getcwdu())[1].replace('\\','/')
305 curpath = os.getcwdu
314 curpath = os.getcwdu
306 startdir = os.getcwdu()
315 startdir = os.getcwdu()
307 ipdir = os.path.realpath(_ip.ipython_dir)
316 ipdir = os.path.realpath(_ip.ipython_dir)
308 try:
317 try:
309 _ip.magic('cd "%s"' % ipdir)
318 _ip.magic('cd "%s"' % ipdir)
310 nt.assert_equal(curpath(), ipdir)
319 nt.assert_equal(curpath(), ipdir)
311 _ip.magic('cd -')
320 _ip.magic('cd -')
312 nt.assert_equal(curpath(), startdir)
321 nt.assert_equal(curpath(), startdir)
313 _ip.magic('pushd "%s"' % ipdir)
322 _ip.magic('pushd "%s"' % ipdir)
314 nt.assert_equal(curpath(), ipdir)
323 nt.assert_equal(curpath(), ipdir)
315 _ip.magic('popd')
324 _ip.magic('popd')
316 nt.assert_equal(curpath(), startdir)
325 nt.assert_equal(curpath(), startdir)
317 finally:
326 finally:
318 os.chdir(startdir)
327 os.chdir(startdir)
319
328
320
329
321 def test_xmode():
330 def test_xmode():
322 # Calling xmode three times should be a no-op
331 # Calling xmode three times should be a no-op
323 xmode = _ip.InteractiveTB.mode
332 xmode = _ip.InteractiveTB.mode
324 for i in range(3):
333 for i in range(3):
325 _ip.magic("xmode")
334 _ip.magic("xmode")
326 nt.assert_equal(_ip.InteractiveTB.mode, xmode)
335 nt.assert_equal(_ip.InteractiveTB.mode, xmode)
327
336
328 def test_reset_hard():
337 def test_reset_hard():
329 monitor = []
338 monitor = []
330 class A(object):
339 class A(object):
331 def __del__(self):
340 def __del__(self):
332 monitor.append(1)
341 monitor.append(1)
333 def __repr__(self):
342 def __repr__(self):
334 return "<A instance>"
343 return "<A instance>"
335
344
336 _ip.user_ns["a"] = A()
345 _ip.user_ns["a"] = A()
337 _ip.run_cell("a")
346 _ip.run_cell("a")
338
347
339 nt.assert_equal(monitor, [])
348 nt.assert_equal(monitor, [])
340 _ip.magic("reset -f")
349 _ip.magic("reset -f")
341 nt.assert_equal(monitor, [1])
350 nt.assert_equal(monitor, [1])
342
351
343 class TestXdel(tt.TempFileMixin):
352 class TestXdel(tt.TempFileMixin):
344 def test_xdel(self):
353 def test_xdel(self):
345 """Test that references from %run are cleared by xdel."""
354 """Test that references from %run are cleared by xdel."""
346 src = ("class A(object):\n"
355 src = ("class A(object):\n"
347 " monitor = []\n"
356 " monitor = []\n"
348 " def __del__(self):\n"
357 " def __del__(self):\n"
349 " self.monitor.append(1)\n"
358 " self.monitor.append(1)\n"
350 "a = A()\n")
359 "a = A()\n")
351 self.mktmp(src)
360 self.mktmp(src)
352 # %run creates some hidden references...
361 # %run creates some hidden references...
353 _ip.magic("run %s" % self.fname)
362 _ip.magic("run %s" % self.fname)
354 # ... as does the displayhook.
363 # ... as does the displayhook.
355 _ip.run_cell("a")
364 _ip.run_cell("a")
356
365
357 monitor = _ip.user_ns["A"].monitor
366 monitor = _ip.user_ns["A"].monitor
358 nt.assert_equal(monitor, [])
367 nt.assert_equal(monitor, [])
359
368
360 _ip.magic("xdel a")
369 _ip.magic("xdel a")
361
370
362 # Check that a's __del__ method has been called.
371 # Check that a's __del__ method has been called.
363 nt.assert_equal(monitor, [1])
372 nt.assert_equal(monitor, [1])
364
373
365 def doctest_who():
374 def doctest_who():
366 """doctest for %who
375 """doctest for %who
367
376
368 In [1]: %reset -f
377 In [1]: %reset -f
369
378
370 In [2]: alpha = 123
379 In [2]: alpha = 123
371
380
372 In [3]: beta = 'beta'
381 In [3]: beta = 'beta'
373
382
374 In [4]: %who int
383 In [4]: %who int
375 alpha
384 alpha
376
385
377 In [5]: %who str
386 In [5]: %who str
378 beta
387 beta
379
388
380 In [6]: %whos
389 In [6]: %whos
381 Variable Type Data/Info
390 Variable Type Data/Info
382 ----------------------------
391 ----------------------------
383 alpha int 123
392 alpha int 123
384 beta str beta
393 beta str beta
385
394
386 In [7]: %who_ls
395 In [7]: %who_ls
387 Out[7]: ['alpha', 'beta']
396 Out[7]: ['alpha', 'beta']
388 """
397 """
389
398
390 def test_whos():
399 def test_whos():
391 """Check that whos is protected against objects where repr() fails."""
400 """Check that whos is protected against objects where repr() fails."""
392 class A(object):
401 class A(object):
393 def __repr__(self):
402 def __repr__(self):
394 raise Exception()
403 raise Exception()
395 _ip.user_ns['a'] = A()
404 _ip.user_ns['a'] = A()
396 _ip.magic("whos")
405 _ip.magic("whos")
397
406
398 @py3compat.u_format
407 @py3compat.u_format
399 def doctest_precision():
408 def doctest_precision():
400 """doctest for %precision
409 """doctest for %precision
401
410
402 In [1]: f = get_ipython().display_formatter.formatters['text/plain']
411 In [1]: f = get_ipython().display_formatter.formatters['text/plain']
403
412
404 In [2]: %precision 5
413 In [2]: %precision 5
405 Out[2]: {u}'%.5f'
414 Out[2]: {u}'%.5f'
406
415
407 In [3]: f.float_format
416 In [3]: f.float_format
408 Out[3]: {u}'%.5f'
417 Out[3]: {u}'%.5f'
409
418
410 In [4]: %precision %e
419 In [4]: %precision %e
411 Out[4]: {u}'%e'
420 Out[4]: {u}'%e'
412
421
413 In [5]: f(3.1415927)
422 In [5]: f(3.1415927)
414 Out[5]: {u}'3.141593e+00'
423 Out[5]: {u}'3.141593e+00'
415 """
424 """
416
425
417 def test_psearch():
426 def test_psearch():
418 with tt.AssertPrints("dict.fromkeys"):
427 with tt.AssertPrints("dict.fromkeys"):
419 _ip.run_cell("dict.fr*?")
428 _ip.run_cell("dict.fr*?")
420
429
421 def test_timeit_shlex():
430 def test_timeit_shlex():
422 """test shlex issues with timeit (#1109)"""
431 """test shlex issues with timeit (#1109)"""
423 _ip.ex("def f(*a,**kw): pass")
432 _ip.ex("def f(*a,**kw): pass")
424 _ip.magic('timeit -n1 "this is a bug".count(" ")')
433 _ip.magic('timeit -n1 "this is a bug".count(" ")')
425 _ip.magic('timeit -r1 -n1 f(" ", 1)')
434 _ip.magic('timeit -r1 -n1 f(" ", 1)')
426 _ip.magic('timeit -r1 -n1 f(" ", 1, " ", 2, " ")')
435 _ip.magic('timeit -r1 -n1 f(" ", 1, " ", 2, " ")')
427 _ip.magic('timeit -r1 -n1 ("a " + "b")')
436 _ip.magic('timeit -r1 -n1 ("a " + "b")')
428 _ip.magic('timeit -r1 -n1 f("a " + "b")')
437 _ip.magic('timeit -r1 -n1 f("a " + "b")')
429 _ip.magic('timeit -r1 -n1 f("a " + "b ")')
438 _ip.magic('timeit -r1 -n1 f("a " + "b ")')
430
439
431
440
432 def test_timeit_arguments():
441 def test_timeit_arguments():
433 "Test valid timeit arguments, should not cause SyntaxError (GH #1269)"
442 "Test valid timeit arguments, should not cause SyntaxError (GH #1269)"
434 _ip.magic("timeit ('#')")
443 _ip.magic("timeit ('#')")
435
444
436
445
437 @dec.skipif(execution.profile is None)
446 @dec.skipif(execution.profile is None)
438 def test_prun_quotes():
447 def test_prun_quotes():
439 "Test that prun does not clobber string escapes (GH #1302)"
448 "Test that prun does not clobber string escapes (GH #1302)"
440 _ip.magic("prun -q x = '\t'")
449 _ip.magic("prun -q x = '\t'")
441 nt.assert_equal(_ip.user_ns['x'], '\t')
450 nt.assert_equal(_ip.user_ns['x'], '\t')
442
451
443 def test_extension():
452 def test_extension():
444 tmpdir = TemporaryDirectory()
453 tmpdir = TemporaryDirectory()
445 orig_ipython_dir = _ip.ipython_dir
454 orig_ipython_dir = _ip.ipython_dir
446 try:
455 try:
447 _ip.ipython_dir = tmpdir.name
456 _ip.ipython_dir = tmpdir.name
448 nt.assert_raises(ImportError, _ip.magic, "load_ext daft_extension")
457 nt.assert_raises(ImportError, _ip.magic, "load_ext daft_extension")
449 url = os.path.join(os.path.dirname(__file__), "daft_extension.py")
458 url = os.path.join(os.path.dirname(__file__), "daft_extension.py")
450 _ip.magic("install_ext %s" % url)
459 _ip.magic("install_ext %s" % url)
451 _ip.user_ns.pop('arq', None)
460 _ip.user_ns.pop('arq', None)
452 _ip.magic("load_ext daft_extension")
461 _ip.magic("load_ext daft_extension")
453 tt.assert_equal(_ip.user_ns['arq'], 185)
462 tt.assert_equal(_ip.user_ns['arq'], 185)
454 _ip.magic("unload_ext daft_extension")
463 _ip.magic("unload_ext daft_extension")
455 assert 'arq' not in _ip.user_ns
464 assert 'arq' not in _ip.user_ns
456 finally:
465 finally:
457 _ip.ipython_dir = orig_ipython_dir
466 _ip.ipython_dir = orig_ipython_dir
458
467
459 def test_notebook_export_json():
468 def test_notebook_export_json():
460 with TemporaryDirectory() as td:
469 with TemporaryDirectory() as td:
461 outfile = os.path.join(td, "nb.ipynb")
470 outfile = os.path.join(td, "nb.ipynb")
462 _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
471 _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
463 _ip.magic("notebook -e %s" % outfile)
472 _ip.magic("notebook -e %s" % outfile)
464
473
465 def test_notebook_export_py():
474 def test_notebook_export_py():
466 with TemporaryDirectory() as td:
475 with TemporaryDirectory() as td:
467 outfile = os.path.join(td, "nb.py")
476 outfile = os.path.join(td, "nb.py")
468 _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
477 _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
469 _ip.magic("notebook -e %s" % outfile)
478 _ip.magic("notebook -e %s" % outfile)
470
479
471 def test_notebook_reformat_py():
480 def test_notebook_reformat_py():
472 with TemporaryDirectory() as td:
481 with TemporaryDirectory() as td:
473 infile = os.path.join(td, "nb.ipynb")
482 infile = os.path.join(td, "nb.ipynb")
474 with io.open(infile, 'w', encoding='utf-8') as f:
483 with io.open(infile, 'w', encoding='utf-8') as f:
475 current.write(nb0, f, 'json')
484 current.write(nb0, f, 'json')
476
485
477 _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
486 _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
478 _ip.magic("notebook -f py %s" % infile)
487 _ip.magic("notebook -f py %s" % infile)
479
488
480 def test_notebook_reformat_json():
489 def test_notebook_reformat_json():
481 with TemporaryDirectory() as td:
490 with TemporaryDirectory() as td:
482 infile = os.path.join(td, "nb.py")
491 infile = os.path.join(td, "nb.py")
483 with io.open(infile, 'w', encoding='utf-8') as f:
492 with io.open(infile, 'w', encoding='utf-8') as f:
484 current.write(nb0, f, 'py')
493 current.write(nb0, f, 'py')
485
494
486 _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
495 _ip.ex(py3compat.u_format(u"u = {u}'héllo'"))
487 _ip.magic("notebook -f ipynb %s" % infile)
496 _ip.magic("notebook -f ipynb %s" % infile)
488 _ip.magic("notebook -f json %s" % infile)
497 _ip.magic("notebook -f json %s" % infile)
489
498
490 def test_env():
499 def test_env():
491 env = _ip.magic("env")
500 env = _ip.magic("env")
492 assert isinstance(env, dict), type(env)
501 assert isinstance(env, dict), type(env)
493
502
494
503
495 class CellMagicTestCase(TestCase):
504 class CellMagicTestCase(TestCase):
496
505
497 def check_ident(self, magic):
506 def check_ident(self, magic):
498 # Manually called, we get the result
507 # Manually called, we get the result
499 out = _ip.run_cell_magic(magic, 'a', 'b')
508 out = _ip.run_cell_magic(magic, 'a', 'b')
500 nt.assert_equals(out, ('a','b'))
509 nt.assert_equals(out, ('a','b'))
501 # Via run_cell, it goes into the user's namespace via displayhook
510 # Via run_cell, it goes into the user's namespace via displayhook
502 _ip.run_cell('%%' + magic +' c\nd')
511 _ip.run_cell('%%' + magic +' c\nd')
503 nt.assert_equals(_ip.user_ns['_'], ('c','d'))
512 nt.assert_equals(_ip.user_ns['_'], ('c','d'))
504
513
505 def test_cell_magic_func_deco(self):
514 def test_cell_magic_func_deco(self):
506 "Cell magic using simple decorator"
515 "Cell magic using simple decorator"
507 @register_cell_magic
516 @register_cell_magic
508 def cellm(line, cell):
517 def cellm(line, cell):
509 return line, cell
518 return line, cell
510
519
511 self.check_ident('cellm')
520 self.check_ident('cellm')
512
521
513 def test_cell_magic_reg(self):
522 def test_cell_magic_reg(self):
514 "Cell magic manually registered"
523 "Cell magic manually registered"
515 def cellm(line, cell):
524 def cellm(line, cell):
516 return line, cell
525 return line, cell
517
526
518 _ip.register_magic_function(cellm, 'cell', 'cellm2')
527 _ip.register_magic_function(cellm, 'cell', 'cellm2')
519 self.check_ident('cellm2')
528 self.check_ident('cellm2')
520
529
521 def test_cell_magic_class(self):
530 def test_cell_magic_class(self):
522 "Cell magics declared via a class"
531 "Cell magics declared via a class"
523 @magics_class
532 @magics_class
524 class MyMagics(Magics):
533 class MyMagics(Magics):
525
534
526 @cell_magic
535 @cell_magic
527 def cellm3(self, line, cell):
536 def cellm3(self, line, cell):
528 return line, cell
537 return line, cell
529
538
530 _ip.register_magics(MyMagics)
539 _ip.register_magics(MyMagics)
531 self.check_ident('cellm3')
540 self.check_ident('cellm3')
532
541
533 def test_cell_magic_class2(self):
542 def test_cell_magic_class2(self):
534 "Cell magics declared via a class, #2"
543 "Cell magics declared via a class, #2"
535 @magics_class
544 @magics_class
536 class MyMagics2(Magics):
545 class MyMagics2(Magics):
537
546
538 @cell_magic('cellm4')
547 @cell_magic('cellm4')
539 def cellm33(self, line, cell):
548 def cellm33(self, line, cell):
540 return line, cell
549 return line, cell
541
550
542 _ip.register_magics(MyMagics2)
551 _ip.register_magics(MyMagics2)
543 self.check_ident('cellm4')
552 self.check_ident('cellm4')
544 # Check that nothing is registered as 'cellm33'
553 # Check that nothing is registered as 'cellm33'
545 c33 = _ip.find_cell_magic('cellm33')
554 c33 = _ip.find_cell_magic('cellm33')
546 nt.assert_equals(c33, None)
555 nt.assert_equals(c33, None)
547
556
@@ -1,316 +1,343 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 """
2 """
3 =============
3 =============
4 parallelmagic
4 parallelmagic
5 =============
5 =============
6
6
7 Magic command interface for interactive parallel work.
7 Magic command interface for interactive parallel work.
8
8
9 Usage
9 Usage
10 =====
10 =====
11
11
12 ``%autopx``
12 ``%autopx``
13
13
14 @AUTOPX_DOC@
14 {AUTOPX_DOC}
15
15
16 ``%px``
16 ``%px``
17
17
18 @PX_DOC@
18 {PX_DOC}
19
19
20 ``%result``
20 ``%result``
21
21
22 @RESULT_DOC@
22 {RESULT_DOC}
23
23
24 """
24 """
25
25
26 #-----------------------------------------------------------------------------
26 #-----------------------------------------------------------------------------
27 # Copyright (C) 2008 The IPython Development Team
27 # Copyright (C) 2008 The IPython Development Team
28 #
28 #
29 # Distributed under the terms of the BSD License. The full license is in
29 # Distributed under the terms of the BSD License. The full license is in
30 # the file COPYING, distributed as part of this software.
30 # the file COPYING, distributed as part of this software.
31 #-----------------------------------------------------------------------------
31 #-----------------------------------------------------------------------------
32
32
33 #-----------------------------------------------------------------------------
33 #-----------------------------------------------------------------------------
34 # Imports
34 # Imports
35 #-----------------------------------------------------------------------------
35 #-----------------------------------------------------------------------------
36
36
37 import ast
37 import ast
38 import re
38 import re
39
39
40 from IPython.core.magic import Magics, magics_class, line_magic
40 from IPython.core.error import UsageError
41 from IPython.core.magic import Magics, magics_class, line_magic, cell_magic
41 from IPython.testing.skipdoctest import skip_doctest
42 from IPython.testing.skipdoctest import skip_doctest
42
43
43 #-----------------------------------------------------------------------------
44 #-----------------------------------------------------------------------------
44 # Definitions of magic functions for use with IPython
45 # Definitions of magic functions for use with IPython
45 #-----------------------------------------------------------------------------
46 #-----------------------------------------------------------------------------
46
47
47 NO_ACTIVE_VIEW = """
48
48 Use activate() on a DirectView object to activate it for magics.
49 NO_ACTIVE_VIEW = "Use activate() on a DirectView object to use it with magics."
49 """
50
50
51
51
52 @magics_class
52 @magics_class
53 class ParallelMagics(Magics):
53 class ParallelMagics(Magics):
54 """A set of magics useful when controlling a parallel IPython cluster.
54 """A set of magics useful when controlling a parallel IPython cluster.
55 """
55 """
56
56
57 def __init__(self, shell):
58 super(ParallelMagics, self).__init__(shell)
59 # A flag showing if autopx is activated or not
57 # A flag showing if autopx is activated or not
60 self.autopx = False
58 _autopx = False
59 # the current view used by the magics:
60 active_view = None
61
61
62 @skip_doctest
62 @skip_doctest
63 @line_magic
63 @line_magic
64 def result(self, parameter_s=''):
64 def result(self, parameter_s=''):
65 """Print the result of command i on all engines..
65 """Print the result of command i on all engines.
66
66
67 To use this a :class:`DirectView` instance must be created
67 To use this a :class:`DirectView` instance must be created
68 and then activated by calling its :meth:`activate` method.
68 and then activated by calling its :meth:`activate` method.
69
69
70 This lets you recall the results of %px computations after
71 asynchronous submission (view.block=False).
72
70 Then you can do the following::
73 Then you can do the following::
71
74
72 In [23]: %result
75 In [23]: %px os.getpid()
73 Out[23]:
76 Async parallel execution on engine(s): all
74 <Results List>
77
75 [0] In [6]: a = 10
78 In [24]: %result
76 [1] In [6]: a = 10
79 [ 8] Out[10]: 60920
77
80 [ 9] Out[10]: 60921
78 In [22]: %result 6
81 [10] Out[10]: 60922
79 Out[22]:
82 [11] Out[10]: 60923
80 <Results List>
81 [0] In [6]: a = 10
82 [1] In [6]: a = 10
83 """
83 """
84
84 if self.active_view is None:
85 if self.active_view is None:
85 print NO_ACTIVE_VIEW
86 raise UsageError(NO_ACTIVE_VIEW)
86 return
87
87
88 stride = len(self.active_view)
88 try:
89 try:
89 index = int(parameter_s)
90 index = int(parameter_s)
90 except:
91 except:
91 index = None
92 index = -1
92 result = self.active_view.get_result(index)
93 msg_ids = self.active_view.history[stride * index:(stride * (index + 1)) or None]
93 return result
94
95 result = self.active_view.get_result(msg_ids)
96
97 result.get()
98 result.display_outputs()
94
99
95 @skip_doctest
100 @skip_doctest
96 @line_magic
101 @line_magic
97 def px(self, parameter_s=''):
102 def px(self, parameter_s=''):
98 """Executes the given python command in parallel.
103 """Executes the given python command in parallel.
99
104
100 To use this a :class:`DirectView` instance must be created
105 To use this a :class:`DirectView` instance must be created
101 and then activated by calling its :meth:`activate` method.
106 and then activated by calling its :meth:`activate` method.
102
107
103 Then you can do the following::
108 Then you can do the following::
104
109
105 In [24]: %px a = 5
110 In [24]: %px a = os.getpid()
106 Parallel execution on engine(s): all
111 Parallel execution on engine(s): all
107 Out[24]:
112
108 <Results List>
113 In [25]: %px print a
109 [0] In [7]: a = 5
114 [stdout:0] 1234
110 [1] In [7]: a = 5
115 [stdout:1] 1235
116 [stdout:2] 1236
117 [stdout:3] 1237
111 """
118 """
119 return self.parallel_execute(parameter_s)
120
121 def parallel_execute(self, cell, block=None, groupby='type'):
122 """implementation used by %px and %%parallel"""
112
123
113 if self.active_view is None:
124 if self.active_view is None:
114 print NO_ACTIVE_VIEW
125 raise UsageError(NO_ACTIVE_VIEW)
115 return
126
116 print "Parallel execution on engine(s): %s" % self.active_view.targets
127 # defaults:
117 result = self.active_view.execute(parameter_s, block=False)
128 block = self.active_view.block if block is None else block
118 if self.active_view.block:
129
130 base = "Parallel" if block else "Async parallel"
131 print base + " execution on engine(s): %s" % self.active_view.targets
132
133 result = self.active_view.execute(cell, silent=False, block=False)
134 if block:
119 result.get()
135 result.get()
120 self._maybe_display_output(result)
136 result.display_outputs(groupby)
137 else:
138 # return AsyncResult only on non-blocking submission
139 return result
140
141 @skip_doctest
142 @cell_magic('px')
143 def cell_px(self, line='', cell=None):
144 """Executes the given python command in parallel.
145
146 Cell magic usage:
147
148 %%px [-o] [-e] [--group-options=type|engine|order] [--[no]block]
149
150 Options (%%px cell magic only):
151
152 -o: collate outputs in oder (same as group-outputs=order)
153
154 -e: group outputs by engine (same as group-outputs=engine)
155
156 --group-outputs=type [default behavior]:
157 each output type (stdout, stderr, displaypub) for all engines
158 displayed together.
159
160 --group-outputs=order:
161 The same as 'type', but individual displaypub outputs (e.g. plots)
162 will be interleaved, so it will display all of the first plots,
163 then all of the second plots, etc.
164
165 --group-outputs=engine:
166 All of an engine's output is displayed before moving on to the next.
167
168 --[no]block:
169 Whether or not to block for the execution to complete
170 (and display the results). If unspecified, the active view's
171
172
173 To use this a :class:`DirectView` instance must be created
174 and then activated by calling its :meth:`activate` method.
175
176 Then you can do the following::
177
178 In [24]: %%parallel --noblock a = os.getpid()
179 Async parallel execution on engine(s): all
180
181 In [25]: %px print a
182 [stdout:0] 1234
183 [stdout:1] 1235
184 [stdout:2] 1236
185 [stdout:3] 1237
186 """
187
188 block = None
189 groupby = 'type'
190 # as a cell magic, we accept args
191 opts, _ = self.parse_options(line, 'oe', 'group-outputs=', 'block', 'noblock')
192
193 if 'group-outputs' in opts:
194 groupby = opts['group-outputs']
195 elif 'o' in opts:
196 groupby = 'order'
197 elif 'e' in opts:
198 groupby = 'engine'
199
200 if 'block' in opts:
201 block = True
202 elif 'noblock' in opts:
203 block = False
204
205 return self.parallel_execute(cell, block=block, groupby=groupby)
121
206
122 @skip_doctest
207 @skip_doctest
123 @line_magic
208 @line_magic
124 def autopx(self, parameter_s=''):
209 def autopx(self, parameter_s=''):
125 """Toggles auto parallel mode.
210 """Toggles auto parallel mode.
126
211
127 To use this a :class:`DirectView` instance must be created
212 To use this a :class:`DirectView` instance must be created
128 and then activated by calling its :meth:`activate` method. Once this
213 and then activated by calling its :meth:`activate` method. Once this
129 is called, all commands typed at the command line are send to
214 is called, all commands typed at the command line are send to
130 the engines to be executed in parallel. To control which engine
215 the engines to be executed in parallel. To control which engine
131 are used, set the ``targets`` attributed of the multiengine client
216 are used, set the ``targets`` attributed of the multiengine client
132 before entering ``%autopx`` mode.
217 before entering ``%autopx`` mode.
133
218
134 Then you can do the following::
219 Then you can do the following::
135
220
136 In [25]: %autopx
221 In [25]: %autopx
137 %autopx to enabled
222 %autopx to enabled
138
223
139 In [26]: a = 10
224 In [26]: a = 10
140 Parallel execution on engine(s): [0,1,2,3]
225 Parallel execution on engine(s): [0,1,2,3]
141 In [27]: print a
226 In [27]: print a
142 Parallel execution on engine(s): [0,1,2,3]
227 Parallel execution on engine(s): [0,1,2,3]
143 [stdout:0] 10
228 [stdout:0] 10
144 [stdout:1] 10
229 [stdout:1] 10
145 [stdout:2] 10
230 [stdout:2] 10
146 [stdout:3] 10
231 [stdout:3] 10
147
232
148
233
149 In [27]: %autopx
234 In [27]: %autopx
150 %autopx disabled
235 %autopx disabled
151 """
236 """
152 if self.autopx:
237 if self._autopx:
153 self._disable_autopx()
238 self._disable_autopx()
154 else:
239 else:
155 self._enable_autopx()
240 self._enable_autopx()
156
241
157 def _enable_autopx(self):
242 def _enable_autopx(self):
158 """Enable %autopx mode by saving the original run_cell and installing
243 """Enable %autopx mode by saving the original run_cell and installing
159 pxrun_cell.
244 pxrun_cell.
160 """
245 """
161 if self.active_view is None:
246 if self.active_view is None:
162 print NO_ACTIVE_VIEW
247 raise UsageError(NO_ACTIVE_VIEW)
163 return
164
248
165 # override run_cell and run_code
249 # override run_cell
166 self._original_run_cell = self.shell.run_cell
250 self._original_run_cell = self.shell.run_cell
167 self.shell.run_cell = self.pxrun_cell
251 self.shell.run_cell = self.pxrun_cell
168 self._original_run_code = self.shell.run_code
169 self.shell.run_code = self.pxrun_code
170
252
171 self.autopx = True
253 self._autopx = True
172 print "%autopx enabled"
254 print "%autopx enabled"
173
255
174 def _disable_autopx(self):
256 def _disable_autopx(self):
175 """Disable %autopx by restoring the original InteractiveShell.run_cell.
257 """Disable %autopx by restoring the original InteractiveShell.run_cell.
176 """
258 """
177 if self.autopx:
259 if self._autopx:
178 self.shell.run_cell = self._original_run_cell
260 self.shell.run_cell = self._original_run_cell
179 self.shell.run_code = self._original_run_code
261 self._autopx = False
180 self.autopx = False
181 print "%autopx disabled"
262 print "%autopx disabled"
182
263
183 def _maybe_display_output(self, result):
184 """Maybe display the output of a parallel result.
185
186 If self.active_view.block is True, wait for the result
187 and display the result. Otherwise, this is a noop.
188 """
189 if isinstance(result.stdout, basestring):
190 # single result
191 stdouts = [result.stdout.rstrip()]
192 else:
193 stdouts = [s.rstrip() for s in result.stdout]
194
195 targets = self.active_view.targets
196 if isinstance(targets, int):
197 targets = [targets]
198 elif targets == 'all':
199 targets = self.active_view.client.ids
200
201 if any(stdouts):
202 for eid,stdout in zip(targets, stdouts):
203 print '[stdout:%i]'%eid, stdout
204
205
206 def pxrun_cell(self, raw_cell, store_history=False, silent=False):
264 def pxrun_cell(self, raw_cell, store_history=False, silent=False):
207 """drop-in replacement for InteractiveShell.run_cell.
265 """drop-in replacement for InteractiveShell.run_cell.
208
266
209 This executes code remotely, instead of in the local namespace.
267 This executes code remotely, instead of in the local namespace.
210
268
211 See InteractiveShell.run_cell for details.
269 See InteractiveShell.run_cell for details.
212 """
270 """
213
271
214 if (not raw_cell) or raw_cell.isspace():
272 if (not raw_cell) or raw_cell.isspace():
215 return
273 return
216
274
217 ipself = self.shell
275 ipself = self.shell
218
276
219 with ipself.builtin_trap:
277 with ipself.builtin_trap:
220 cell = ipself.prefilter_manager.prefilter_lines(raw_cell)
278 cell = ipself.prefilter_manager.prefilter_lines(raw_cell)
221
279
222 # Store raw and processed history
280 # Store raw and processed history
223 if store_history:
281 if store_history:
224 ipself.history_manager.store_inputs(ipself.execution_count,
282 ipself.history_manager.store_inputs(ipself.execution_count,
225 cell, raw_cell)
283 cell, raw_cell)
226
284
227 # ipself.logger.log(cell, raw_cell)
285 # ipself.logger.log(cell, raw_cell)
228
286
229 cell_name = ipself.compile.cache(cell, ipself.execution_count)
287 cell_name = ipself.compile.cache(cell, ipself.execution_count)
230
288
231 try:
289 try:
232 ast.parse(cell, filename=cell_name)
290 ast.parse(cell, filename=cell_name)
233 except (OverflowError, SyntaxError, ValueError, TypeError,
291 except (OverflowError, SyntaxError, ValueError, TypeError,
234 MemoryError):
292 MemoryError):
235 # Case 1
293 # Case 1
236 ipself.showsyntaxerror()
294 ipself.showsyntaxerror()
237 ipself.execution_count += 1
295 ipself.execution_count += 1
238 return None
296 return None
239 except NameError:
297 except NameError:
240 # ignore name errors, because we don't know the remote keys
298 # ignore name errors, because we don't know the remote keys
241 pass
299 pass
242
300
243 if store_history:
301 if store_history:
244 # Write output to the database. Does nothing unless
302 # Write output to the database. Does nothing unless
245 # history output logging is enabled.
303 # history output logging is enabled.
246 ipself.history_manager.store_output(ipself.execution_count)
304 ipself.history_manager.store_output(ipself.execution_count)
247 # Each cell is a *single* input, regardless of how many lines it has
305 # Each cell is a *single* input, regardless of how many lines it has
248 ipself.execution_count += 1
306 ipself.execution_count += 1
249 if re.search(r'get_ipython\(\)\.magic\(u?["\']%?autopx', cell):
307 if re.search(r'get_ipython\(\)\.magic\(u?["\']%?autopx', cell):
250 self._disable_autopx()
308 self._disable_autopx()
251 return False
309 return False
252 else:
310 else:
253 try:
311 try:
254 result = self.active_view.execute(cell, silent=False, block=False)
312 result = self.active_view.execute(cell, silent=False, block=False)
255 except:
313 except:
256 ipself.showtraceback()
314 ipself.showtraceback()
257 return True
315 return True
258 else:
316 else:
259 if self.active_view.block:
317 if self.active_view.block:
260 try:
318 try:
261 result.get()
319 result.get()
262 except:
320 except:
263 self.shell.showtraceback()
321 self.shell.showtraceback()
264 return True
322 return True
265 else:
323 else:
266 self._maybe_display_output(result)
324 with ipself.builtin_trap:
267 return False
325 result.display_outputs()
268
269 def pxrun_code(self, code_obj):
270 """drop-in replacement for InteractiveShell.run_code.
271
272 This executes code remotely, instead of in the local namespace.
273
274 See InteractiveShell.run_code for details.
275 """
276 ipself = self.shell
277 # check code object for the autopx magic
278 if 'get_ipython' in code_obj.co_names and 'magic' in code_obj.co_names \
279 and any( [ isinstance(c, basestring) and 'autopx' in c
280 for c in code_obj.co_consts ]):
281 self._disable_autopx()
282 return False
283 else:
284 try:
285 result = self.active_view.execute(code_obj, block=False)
286 except:
287 ipself.showtraceback()
288 return True
289 else:
290 if self.active_view.block:
291 try:
292 result.get()
293 except:
294 self.shell.showtraceback()
295 return True
296 else:
297 self._maybe_display_output(result)
298 return False
326 return False
299
327
300
328
301 __doc__ = __doc__.replace('@AUTOPX_DOC@',
329 __doc__ = __doc__.format(
302 " " + ParallelMagics.autopx.__doc__)
330 AUTOPX_DOC = ' '*8 + ParallelMagics.autopx.__doc__,
303 __doc__ = __doc__.replace('@PX_DOC@',
331 PX_DOC = ' '*8 + ParallelMagics.px.__doc__,
304 " " + ParallelMagics.px.__doc__)
332 RESULT_DOC = ' '*8 + ParallelMagics.result.__doc__
305 __doc__ = __doc__.replace('@RESULT_DOC@',
333 )
306 " " + ParallelMagics.result.__doc__)
307
334
308 _loaded = False
335 _loaded = False
309
336
310
337
311 def load_ipython_extension(ip):
338 def load_ipython_extension(ip):
312 """Load the extension in IPython."""
339 """Load the extension in IPython."""
313 global _loaded
340 global _loaded
314 if not _loaded:
341 if not _loaded:
315 ip.register_magics(ParallelMagics)
342 ip.register_magics(ParallelMagics)
316 _loaded = True
343 _loaded = True
@@ -1,517 +1,651 b''
1 """AsyncResult objects for the client
1 """AsyncResult objects for the client
2
2
3 Authors:
3 Authors:
4
4
5 * MinRK
5 * MinRK
6 """
6 """
7 #-----------------------------------------------------------------------------
7 #-----------------------------------------------------------------------------
8 # Copyright (C) 2010-2011 The IPython Development Team
8 # Copyright (C) 2010-2011 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17
17
18 import sys
18 import sys
19 import time
19 import time
20 from datetime import datetime
20 from datetime import datetime
21
21
22 from zmq import MessageTracker
22 from zmq import MessageTracker
23
23
24 from IPython.core.display import clear_output
24 from IPython.core.display import clear_output, display
25 from IPython.external.decorator import decorator
25 from IPython.external.decorator import decorator
26 from IPython.parallel import error
26 from IPython.parallel import error
27
27
28 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
29 # Functions
29 # Functions
30 #-----------------------------------------------------------------------------
30 #-----------------------------------------------------------------------------
31
31
32 def _total_seconds(td):
32 def _total_seconds(td):
33 """timedelta.total_seconds was added in 2.7"""
33 """timedelta.total_seconds was added in 2.7"""
34 try:
34 try:
35 # Python >= 2.7
35 # Python >= 2.7
36 return td.total_seconds()
36 return td.total_seconds()
37 except AttributeError:
37 except AttributeError:
38 # Python 2.6
38 # Python 2.6
39 return 1e-6 * (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6)
39 return 1e-6 * (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6)
40
40
41 #-----------------------------------------------------------------------------
41 #-----------------------------------------------------------------------------
42 # Classes
42 # Classes
43 #-----------------------------------------------------------------------------
43 #-----------------------------------------------------------------------------
44
44
45 # global empty tracker that's always done:
45 # global empty tracker that's always done:
46 finished_tracker = MessageTracker()
46 finished_tracker = MessageTracker()
47
47
48 @decorator
48 @decorator
49 def check_ready(f, self, *args, **kwargs):
49 def check_ready(f, self, *args, **kwargs):
50 """Call spin() to sync state prior to calling the method."""
50 """Call spin() to sync state prior to calling the method."""
51 self.wait(0)
51 self.wait(0)
52 if not self._ready:
52 if not self._ready:
53 raise error.TimeoutError("result not ready")
53 raise error.TimeoutError("result not ready")
54 return f(self, *args, **kwargs)
54 return f(self, *args, **kwargs)
55
55
56 class AsyncResult(object):
56 class AsyncResult(object):
57 """Class for representing results of non-blocking calls.
57 """Class for representing results of non-blocking calls.
58
58
59 Provides the same interface as :py:class:`multiprocessing.pool.AsyncResult`.
59 Provides the same interface as :py:class:`multiprocessing.pool.AsyncResult`.
60 """
60 """
61
61
62 msg_ids = None
62 msg_ids = None
63 _targets = None
63 _targets = None
64 _tracker = None
64 _tracker = None
65 _single_result = False
65 _single_result = False
66
66
67 def __init__(self, client, msg_ids, fname='unknown', targets=None, tracker=None):
67 def __init__(self, client, msg_ids, fname='unknown', targets=None, tracker=None):
68 if isinstance(msg_ids, basestring):
68 if isinstance(msg_ids, basestring):
69 # always a list
69 # always a list
70 msg_ids = [msg_ids]
70 msg_ids = [msg_ids]
71 if tracker is None:
71 if tracker is None:
72 # default to always done
72 # default to always done
73 tracker = finished_tracker
73 tracker = finished_tracker
74 self._client = client
74 self._client = client
75 self.msg_ids = msg_ids
75 self.msg_ids = msg_ids
76 self._fname=fname
76 self._fname=fname
77 self._targets = targets
77 self._targets = targets
78 self._tracker = tracker
78 self._tracker = tracker
79 self._ready = False
79 self._ready = False
80 self._success = None
80 self._success = None
81 self._metadata = None
81 self._metadata = None
82 if len(msg_ids) == 1:
82 if len(msg_ids) == 1:
83 self._single_result = not isinstance(targets, (list, tuple))
83 self._single_result = not isinstance(targets, (list, tuple))
84 else:
84 else:
85 self._single_result = False
85 self._single_result = False
86
86
87 def __repr__(self):
87 def __repr__(self):
88 if self._ready:
88 if self._ready:
89 return "<%s: finished>"%(self.__class__.__name__)
89 return "<%s: finished>"%(self.__class__.__name__)
90 else:
90 else:
91 return "<%s: %s>"%(self.__class__.__name__,self._fname)
91 return "<%s: %s>"%(self.__class__.__name__,self._fname)
92
92
93
93
94 def _reconstruct_result(self, res):
94 def _reconstruct_result(self, res):
95 """Reconstruct our result from actual result list (always a list)
95 """Reconstruct our result from actual result list (always a list)
96
96
97 Override me in subclasses for turning a list of results
97 Override me in subclasses for turning a list of results
98 into the expected form.
98 into the expected form.
99 """
99 """
100 if self._single_result:
100 if self._single_result:
101 return res[0]
101 return res[0]
102 else:
102 else:
103 return res
103 return res
104
104
105 def get(self, timeout=-1):
105 def get(self, timeout=-1):
106 """Return the result when it arrives.
106 """Return the result when it arrives.
107
107
108 If `timeout` is not ``None`` and the result does not arrive within
108 If `timeout` is not ``None`` and the result does not arrive within
109 `timeout` seconds then ``TimeoutError`` is raised. If the
109 `timeout` seconds then ``TimeoutError`` is raised. If the
110 remote call raised an exception then that exception will be reraised
110 remote call raised an exception then that exception will be reraised
111 by get() inside a `RemoteError`.
111 by get() inside a `RemoteError`.
112 """
112 """
113 if not self.ready():
113 if not self.ready():
114 self.wait(timeout)
114 self.wait(timeout)
115
115
116 if self._ready:
116 if self._ready:
117 if self._success:
117 if self._success:
118 return self._result
118 return self._result
119 else:
119 else:
120 raise self._exception
120 raise self._exception
121 else:
121 else:
122 raise error.TimeoutError("Result not ready.")
122 raise error.TimeoutError("Result not ready.")
123
123
124 def ready(self):
124 def ready(self):
125 """Return whether the call has completed."""
125 """Return whether the call has completed."""
126 if not self._ready:
126 if not self._ready:
127 self.wait(0)
127 self.wait(0)
128 return self._ready
128 return self._ready
129
129
130 def wait(self, timeout=-1):
130 def wait(self, timeout=-1):
131 """Wait until the result is available or until `timeout` seconds pass.
131 """Wait until the result is available or until `timeout` seconds pass.
132
132
133 This method always returns None.
133 This method always returns None.
134 """
134 """
135 if self._ready:
135 if self._ready:
136 return
136 return
137 self._ready = self._client.wait(self.msg_ids, timeout)
137 self._ready = self._client.wait(self.msg_ids, timeout)
138 if self._ready:
138 if self._ready:
139 try:
139 try:
140 results = map(self._client.results.get, self.msg_ids)
140 results = map(self._client.results.get, self.msg_ids)
141 self._result = results
141 self._result = results
142 if self._single_result:
142 if self._single_result:
143 r = results[0]
143 r = results[0]
144 if isinstance(r, Exception):
144 if isinstance(r, Exception):
145 raise r
145 raise r
146 else:
146 else:
147 results = error.collect_exceptions(results, self._fname)
147 results = error.collect_exceptions(results, self._fname)
148 self._result = self._reconstruct_result(results)
148 self._result = self._reconstruct_result(results)
149 except Exception, e:
149 except Exception, e:
150 self._exception = e
150 self._exception = e
151 self._success = False
151 self._success = False
152 else:
152 else:
153 self._success = True
153 self._success = True
154 finally:
154 finally:
155 self._metadata = map(self._client.metadata.get, self.msg_ids)
155 self._metadata = map(self._client.metadata.get, self.msg_ids)
156
156
157
157
158 def successful(self):
158 def successful(self):
159 """Return whether the call completed without raising an exception.
159 """Return whether the call completed without raising an exception.
160
160
161 Will raise ``AssertionError`` if the result is not ready.
161 Will raise ``AssertionError`` if the result is not ready.
162 """
162 """
163 assert self.ready()
163 assert self.ready()
164 return self._success
164 return self._success
165
165
166 #----------------------------------------------------------------
166 #----------------------------------------------------------------
167 # Extra methods not in mp.pool.AsyncResult
167 # Extra methods not in mp.pool.AsyncResult
168 #----------------------------------------------------------------
168 #----------------------------------------------------------------
169
169
170 def get_dict(self, timeout=-1):
170 def get_dict(self, timeout=-1):
171 """Get the results as a dict, keyed by engine_id.
171 """Get the results as a dict, keyed by engine_id.
172
172
173 timeout behavior is described in `get()`.
173 timeout behavior is described in `get()`.
174 """
174 """
175
175
176 results = self.get(timeout)
176 results = self.get(timeout)
177 engine_ids = [ md['engine_id'] for md in self._metadata ]
177 engine_ids = [ md['engine_id'] for md in self._metadata ]
178 bycount = sorted(engine_ids, key=lambda k: engine_ids.count(k))
178 bycount = sorted(engine_ids, key=lambda k: engine_ids.count(k))
179 maxcount = bycount.count(bycount[-1])
179 maxcount = bycount.count(bycount[-1])
180 if maxcount > 1:
180 if maxcount > 1:
181 raise ValueError("Cannot build dict, %i jobs ran on engine #%i"%(
181 raise ValueError("Cannot build dict, %i jobs ran on engine #%i"%(
182 maxcount, bycount[-1]))
182 maxcount, bycount[-1]))
183
183
184 return dict(zip(engine_ids,results))
184 return dict(zip(engine_ids,results))
185
185
186 @property
186 @property
187 def result(self):
187 def result(self):
188 """result property wrapper for `get(timeout=0)`."""
188 """result property wrapper for `get(timeout=0)`."""
189 return self.get()
189 return self.get()
190
190
191 # abbreviated alias:
191 # abbreviated alias:
192 r = result
192 r = result
193
193
194 @property
194 @property
195 @check_ready
195 @check_ready
196 def metadata(self):
196 def metadata(self):
197 """property for accessing execution metadata."""
197 """property for accessing execution metadata."""
198 if self._single_result:
198 if self._single_result:
199 return self._metadata[0]
199 return self._metadata[0]
200 else:
200 else:
201 return self._metadata
201 return self._metadata
202
202
203 @property
203 @property
204 def result_dict(self):
204 def result_dict(self):
205 """result property as a dict."""
205 """result property as a dict."""
206 return self.get_dict()
206 return self.get_dict()
207
207
208 def __dict__(self):
208 def __dict__(self):
209 return self.get_dict(0)
209 return self.get_dict(0)
210
210
211 def abort(self):
211 def abort(self):
212 """abort my tasks."""
212 """abort my tasks."""
213 assert not self.ready(), "Can't abort, I am already done!"
213 assert not self.ready(), "Can't abort, I am already done!"
214 return self._client.abort(self.msg_ids, targets=self._targets, block=True)
214 return self._client.abort(self.msg_ids, targets=self._targets, block=True)
215
215
216 @property
216 @property
217 def sent(self):
217 def sent(self):
218 """check whether my messages have been sent."""
218 """check whether my messages have been sent."""
219 return self._tracker.done
219 return self._tracker.done
220
220
221 def wait_for_send(self, timeout=-1):
221 def wait_for_send(self, timeout=-1):
222 """wait for pyzmq send to complete.
222 """wait for pyzmq send to complete.
223
223
224 This is necessary when sending arrays that you intend to edit in-place.
224 This is necessary when sending arrays that you intend to edit in-place.
225 `timeout` is in seconds, and will raise TimeoutError if it is reached
225 `timeout` is in seconds, and will raise TimeoutError if it is reached
226 before the send completes.
226 before the send completes.
227 """
227 """
228 return self._tracker.wait(timeout)
228 return self._tracker.wait(timeout)
229
229
230 #-------------------------------------
230 #-------------------------------------
231 # dict-access
231 # dict-access
232 #-------------------------------------
232 #-------------------------------------
233
233
234 @check_ready
234 @check_ready
235 def __getitem__(self, key):
235 def __getitem__(self, key):
236 """getitem returns result value(s) if keyed by int/slice, or metadata if key is str.
236 """getitem returns result value(s) if keyed by int/slice, or metadata if key is str.
237 """
237 """
238 if isinstance(key, int):
238 if isinstance(key, int):
239 return error.collect_exceptions([self._result[key]], self._fname)[0]
239 return error.collect_exceptions([self._result[key]], self._fname)[0]
240 elif isinstance(key, slice):
240 elif isinstance(key, slice):
241 return error.collect_exceptions(self._result[key], self._fname)
241 return error.collect_exceptions(self._result[key], self._fname)
242 elif isinstance(key, basestring):
242 elif isinstance(key, basestring):
243 values = [ md[key] for md in self._metadata ]
243 values = [ md[key] for md in self._metadata ]
244 if self._single_result:
244 if self._single_result:
245 return values[0]
245 return values[0]
246 else:
246 else:
247 return values
247 return values
248 else:
248 else:
249 raise TypeError("Invalid key type %r, must be 'int','slice', or 'str'"%type(key))
249 raise TypeError("Invalid key type %r, must be 'int','slice', or 'str'"%type(key))
250
250
251 def __getattr__(self, key):
251 def __getattr__(self, key):
252 """getattr maps to getitem for convenient attr access to metadata."""
252 """getattr maps to getitem for convenient attr access to metadata."""
253 try:
253 try:
254 return self.__getitem__(key)
254 return self.__getitem__(key)
255 except (error.TimeoutError, KeyError):
255 except (error.TimeoutError, KeyError):
256 raise AttributeError("%r object has no attribute %r"%(
256 raise AttributeError("%r object has no attribute %r"%(
257 self.__class__.__name__, key))
257 self.__class__.__name__, key))
258
258
259 # asynchronous iterator:
259 # asynchronous iterator:
260 def __iter__(self):
260 def __iter__(self):
261 if self._single_result:
261 if self._single_result:
262 raise TypeError("AsyncResults with a single result are not iterable.")
262 raise TypeError("AsyncResults with a single result are not iterable.")
263 try:
263 try:
264 rlist = self.get(0)
264 rlist = self.get(0)
265 except error.TimeoutError:
265 except error.TimeoutError:
266 # wait for each result individually
266 # wait for each result individually
267 for msg_id in self.msg_ids:
267 for msg_id in self.msg_ids:
268 ar = AsyncResult(self._client, msg_id, self._fname)
268 ar = AsyncResult(self._client, msg_id, self._fname)
269 yield ar.get()
269 yield ar.get()
270 else:
270 else:
271 # already done
271 # already done
272 for r in rlist:
272 for r in rlist:
273 yield r
273 yield r
274
274
275 def __len__(self):
275 def __len__(self):
276 return len(self.msg_ids)
276 return len(self.msg_ids)
277
277
278 #-------------------------------------
278 #-------------------------------------
279 # Sugar methods and attributes
279 # Sugar methods and attributes
280 #-------------------------------------
280 #-------------------------------------
281
281
282 def timedelta(self, start, end, start_key=min, end_key=max):
282 def timedelta(self, start, end, start_key=min, end_key=max):
283 """compute the difference between two sets of timestamps
283 """compute the difference between two sets of timestamps
284
284
285 The default behavior is to use the earliest of the first
285 The default behavior is to use the earliest of the first
286 and the latest of the second list, but this can be changed
286 and the latest of the second list, but this can be changed
287 by passing a different
287 by passing a different
288
288
289 Parameters
289 Parameters
290 ----------
290 ----------
291
291
292 start : one or more datetime objects (e.g. ar.submitted)
292 start : one or more datetime objects (e.g. ar.submitted)
293 end : one or more datetime objects (e.g. ar.received)
293 end : one or more datetime objects (e.g. ar.received)
294 start_key : callable
294 start_key : callable
295 Function to call on `start` to extract the relevant
295 Function to call on `start` to extract the relevant
296 entry [defalt: min]
296 entry [defalt: min]
297 end_key : callable
297 end_key : callable
298 Function to call on `end` to extract the relevant
298 Function to call on `end` to extract the relevant
299 entry [default: max]
299 entry [default: max]
300
300
301 Returns
301 Returns
302 -------
302 -------
303
303
304 dt : float
304 dt : float
305 The time elapsed (in seconds) between the two selected timestamps.
305 The time elapsed (in seconds) between the two selected timestamps.
306 """
306 """
307 if not isinstance(start, datetime):
307 if not isinstance(start, datetime):
308 # handle single_result AsyncResults, where ar.stamp is single object,
308 # handle single_result AsyncResults, where ar.stamp is single object,
309 # not a list
309 # not a list
310 start = start_key(start)
310 start = start_key(start)
311 if not isinstance(end, datetime):
311 if not isinstance(end, datetime):
312 # handle single_result AsyncResults, where ar.stamp is single object,
312 # handle single_result AsyncResults, where ar.stamp is single object,
313 # not a list
313 # not a list
314 end = end_key(end)
314 end = end_key(end)
315 return _total_seconds(end - start)
315 return _total_seconds(end - start)
316
316
317 @property
317 @property
318 def progress(self):
318 def progress(self):
319 """the number of tasks which have been completed at this point.
319 """the number of tasks which have been completed at this point.
320
320
321 Fractional progress would be given by 1.0 * ar.progress / len(ar)
321 Fractional progress would be given by 1.0 * ar.progress / len(ar)
322 """
322 """
323 self.wait(0)
323 self.wait(0)
324 return len(self) - len(set(self.msg_ids).intersection(self._client.outstanding))
324 return len(self) - len(set(self.msg_ids).intersection(self._client.outstanding))
325
325
326 @property
326 @property
327 def elapsed(self):
327 def elapsed(self):
328 """elapsed time since initial submission"""
328 """elapsed time since initial submission"""
329 if self.ready():
329 if self.ready():
330 return self.wall_time
330 return self.wall_time
331
331
332 now = submitted = datetime.now()
332 now = submitted = datetime.now()
333 for msg_id in self.msg_ids:
333 for msg_id in self.msg_ids:
334 if msg_id in self._client.metadata:
334 if msg_id in self._client.metadata:
335 stamp = self._client.metadata[msg_id]['submitted']
335 stamp = self._client.metadata[msg_id]['submitted']
336 if stamp and stamp < submitted:
336 if stamp and stamp < submitted:
337 submitted = stamp
337 submitted = stamp
338 return _total_seconds(now-submitted)
338 return _total_seconds(now-submitted)
339
339
340 @property
340 @property
341 @check_ready
341 @check_ready
342 def serial_time(self):
342 def serial_time(self):
343 """serial computation time of a parallel calculation
343 """serial computation time of a parallel calculation
344
344
345 Computed as the sum of (completed-started) of each task
345 Computed as the sum of (completed-started) of each task
346 """
346 """
347 t = 0
347 t = 0
348 for md in self._metadata:
348 for md in self._metadata:
349 t += _total_seconds(md['completed'] - md['started'])
349 t += _total_seconds(md['completed'] - md['started'])
350 return t
350 return t
351
351
352 @property
352 @property
353 @check_ready
353 @check_ready
354 def wall_time(self):
354 def wall_time(self):
355 """actual computation time of a parallel calculation
355 """actual computation time of a parallel calculation
356
356
357 Computed as the time between the latest `received` stamp
357 Computed as the time between the latest `received` stamp
358 and the earliest `submitted`.
358 and the earliest `submitted`.
359
359
360 Only reliable if Client was spinning/waiting when the task finished, because
360 Only reliable if Client was spinning/waiting when the task finished, because
361 the `received` timestamp is created when a result is pulled off of the zmq queue,
361 the `received` timestamp is created when a result is pulled off of the zmq queue,
362 which happens as a result of `client.spin()`.
362 which happens as a result of `client.spin()`.
363
363
364 For similar comparison of other timestamp pairs, check out AsyncResult.timedelta.
364 For similar comparison of other timestamp pairs, check out AsyncResult.timedelta.
365
365
366 """
366 """
367 return self.timedelta(self.submitted, self.received)
367 return self.timedelta(self.submitted, self.received)
368
368
369 def wait_interactive(self, interval=1., timeout=None):
369 def wait_interactive(self, interval=1., timeout=None):
370 """interactive wait, printing progress at regular intervals"""
370 """interactive wait, printing progress at regular intervals"""
371 N = len(self)
371 N = len(self)
372 tic = time.time()
372 tic = time.time()
373 while not self.ready() and (timeout is None or time.time() - tic <= timeout):
373 while not self.ready() and (timeout is None or time.time() - tic <= timeout):
374 self.wait(interval)
374 self.wait(interval)
375 clear_output()
375 clear_output()
376 print "%4i/%i tasks finished after %4i s" % (self.progress, N, self.elapsed),
376 print "%4i/%i tasks finished after %4i s" % (self.progress, N, self.elapsed),
377 sys.stdout.flush()
377 sys.stdout.flush()
378 print
378 print
379 print "done"
379 print "done"
380
380
381 def _republish_displaypub(self, content, eid):
382 """republish individual displaypub content dicts"""
383 try:
384 ip = get_ipython()
385 except NameError:
386 # displaypub is meaningless outside IPython
387 return
388 md = content['metadata'] or {}
389 md['engine'] = eid
390 ip.display_pub.publish(content['source'], content['data'], md)
391
392
393 def _display_single_result(self):
394
395 print self.stdout
396 print >> sys.stderr, self.stderr
397
398 try:
399 get_ipython()
400 except NameError:
401 # displaypub is meaningless outside IPython
402 return
403
404 for output in self.outputs:
405 self._republish_displaypub(output, self.engine_id)
406
407 if self.pyout is not None:
408 display(self.get())
409
410 @check_ready
411 def display_outputs(self, groupby="type"):
412 """republish the outputs of the computation
413
414 Parameters
415 ----------
416
417 groupby : str [default: type]
418 if 'type':
419 Group outputs by type (show all stdout, then all stderr, etc.):
420
421 [stdout:1] foo
422 [stdout:2] foo
423 [stderr:1] bar
424 [stderr:2] bar
425 if 'engine':
426 Display outputs for each engine before moving on to the next:
427
428 [stdout:1] foo
429 [stderr:1] bar
430 [stdout:2] foo
431 [stderr:2] bar
432
433 if 'order':
434 Like 'type', but further collate individual displaypub
435 outputs. This is meant for cases of each command producing
436 several plots, and you would like to see all of the first
437 plots together, then all of the second plots, and so on.
438 """
439 # flush iopub, just in case
440 self._client._flush_iopub(self._client._iopub_socket)
441 if self._single_result:
442 self._display_single_result()
443 return
444
445 stdouts = [s.rstrip() for s in self.stdout]
446 stderrs = [s.rstrip() for s in self.stderr]
447 pyouts = [p for p in self.pyout]
448 output_lists = self.outputs
449 results = self.get()
450
451 targets = self.engine_id
452
453 if groupby == "engine":
454 for eid,stdout,stderr,outputs,r,pyout in zip(
455 targets, stdouts, stderrs, output_lists, results, pyouts
456 ):
457 if stdout:
458 print '[stdout:%i]' % eid, stdout
459 if stderr:
460 print >> sys.stderr, '[stderr:%i]' % eid, stderr
461
462 try:
463 get_ipython()
464 except NameError:
465 # displaypub is meaningless outside IPython
466 return
467
468 for output in outputs:
469 self._republish_displaypub(output, eid)
470
471 if pyout is not None:
472 display(r)
473
474 elif groupby in ('type', 'order'):
475 # republish stdout:
476 if any(stdouts):
477 for eid,stdout in zip(targets, stdouts):
478 print '[stdout:%i]' % eid, stdout
479
480 # republish stderr:
481 if any(stderrs):
482 for eid,stderr in zip(targets, stderrs):
483 print >> sys.stderr, '[stderr:%i]' % eid, stderr
484
485 try:
486 get_ipython()
487 except NameError:
488 # displaypub is meaningless outside IPython
489 return
490
491 if groupby == 'order':
492 output_dict = dict((eid, outputs) for eid,outputs in zip(targets, output_lists))
493 N = max(len(outputs) for outputs in output_lists)
494 for i in range(N):
495 for eid in targets:
496 outputs = output_dict[eid]
497 if len(outputs) >= N:
498 self._republish_displaypub(outputs[i], eid)
499 else:
500 # republish displaypub output
501 for eid,outputs in zip(targets, output_lists):
502 for output in outputs:
503 self._republish_displaypub(output, eid)
504
505 # finally, add pyout:
506 for eid,r,pyout in zip(targets, results, pyouts):
507 if pyout is not None:
508 display(r)
509
510 else:
511 raise ValueError("groupby must be one of 'type', 'engine', 'collate', not %r" % groupby)
512
513
514
381
515
382 class AsyncMapResult(AsyncResult):
516 class AsyncMapResult(AsyncResult):
383 """Class for representing results of non-blocking gathers.
517 """Class for representing results of non-blocking gathers.
384
518
385 This will properly reconstruct the gather.
519 This will properly reconstruct the gather.
386
520
387 This class is iterable at any time, and will wait on results as they come.
521 This class is iterable at any time, and will wait on results as they come.
388
522
389 If ordered=False, then the first results to arrive will come first, otherwise
523 If ordered=False, then the first results to arrive will come first, otherwise
390 results will be yielded in the order they were submitted.
524 results will be yielded in the order they were submitted.
391
525
392 """
526 """
393
527
394 def __init__(self, client, msg_ids, mapObject, fname='', ordered=True):
528 def __init__(self, client, msg_ids, mapObject, fname='', ordered=True):
395 AsyncResult.__init__(self, client, msg_ids, fname=fname)
529 AsyncResult.__init__(self, client, msg_ids, fname=fname)
396 self._mapObject = mapObject
530 self._mapObject = mapObject
397 self._single_result = False
531 self._single_result = False
398 self.ordered = ordered
532 self.ordered = ordered
399
533
400 def _reconstruct_result(self, res):
534 def _reconstruct_result(self, res):
401 """Perform the gather on the actual results."""
535 """Perform the gather on the actual results."""
402 return self._mapObject.joinPartitions(res)
536 return self._mapObject.joinPartitions(res)
403
537
404 # asynchronous iterator:
538 # asynchronous iterator:
405 def __iter__(self):
539 def __iter__(self):
406 it = self._ordered_iter if self.ordered else self._unordered_iter
540 it = self._ordered_iter if self.ordered else self._unordered_iter
407 for r in it():
541 for r in it():
408 yield r
542 yield r
409
543
410 # asynchronous ordered iterator:
544 # asynchronous ordered iterator:
411 def _ordered_iter(self):
545 def _ordered_iter(self):
412 """iterator for results *as they arrive*, preserving submission order."""
546 """iterator for results *as they arrive*, preserving submission order."""
413 try:
547 try:
414 rlist = self.get(0)
548 rlist = self.get(0)
415 except error.TimeoutError:
549 except error.TimeoutError:
416 # wait for each result individually
550 # wait for each result individually
417 for msg_id in self.msg_ids:
551 for msg_id in self.msg_ids:
418 ar = AsyncResult(self._client, msg_id, self._fname)
552 ar = AsyncResult(self._client, msg_id, self._fname)
419 rlist = ar.get()
553 rlist = ar.get()
420 try:
554 try:
421 for r in rlist:
555 for r in rlist:
422 yield r
556 yield r
423 except TypeError:
557 except TypeError:
424 # flattened, not a list
558 # flattened, not a list
425 # this could get broken by flattened data that returns iterables
559 # this could get broken by flattened data that returns iterables
426 # but most calls to map do not expose the `flatten` argument
560 # but most calls to map do not expose the `flatten` argument
427 yield rlist
561 yield rlist
428 else:
562 else:
429 # already done
563 # already done
430 for r in rlist:
564 for r in rlist:
431 yield r
565 yield r
432
566
433 # asynchronous unordered iterator:
567 # asynchronous unordered iterator:
434 def _unordered_iter(self):
568 def _unordered_iter(self):
435 """iterator for results *as they arrive*, on FCFS basis, ignoring submission order."""
569 """iterator for results *as they arrive*, on FCFS basis, ignoring submission order."""
436 try:
570 try:
437 rlist = self.get(0)
571 rlist = self.get(0)
438 except error.TimeoutError:
572 except error.TimeoutError:
439 pending = set(self.msg_ids)
573 pending = set(self.msg_ids)
440 while pending:
574 while pending:
441 try:
575 try:
442 self._client.wait(pending, 1e-3)
576 self._client.wait(pending, 1e-3)
443 except error.TimeoutError:
577 except error.TimeoutError:
444 # ignore timeout error, because that only means
578 # ignore timeout error, because that only means
445 # *some* jobs are outstanding
579 # *some* jobs are outstanding
446 pass
580 pass
447 # update ready set with those no longer outstanding:
581 # update ready set with those no longer outstanding:
448 ready = pending.difference(self._client.outstanding)
582 ready = pending.difference(self._client.outstanding)
449 # update pending to exclude those that are finished
583 # update pending to exclude those that are finished
450 pending = pending.difference(ready)
584 pending = pending.difference(ready)
451 while ready:
585 while ready:
452 msg_id = ready.pop()
586 msg_id = ready.pop()
453 ar = AsyncResult(self._client, msg_id, self._fname)
587 ar = AsyncResult(self._client, msg_id, self._fname)
454 rlist = ar.get()
588 rlist = ar.get()
455 try:
589 try:
456 for r in rlist:
590 for r in rlist:
457 yield r
591 yield r
458 except TypeError:
592 except TypeError:
459 # flattened, not a list
593 # flattened, not a list
460 # this could get broken by flattened data that returns iterables
594 # this could get broken by flattened data that returns iterables
461 # but most calls to map do not expose the `flatten` argument
595 # but most calls to map do not expose the `flatten` argument
462 yield rlist
596 yield rlist
463 else:
597 else:
464 # already done
598 # already done
465 for r in rlist:
599 for r in rlist:
466 yield r
600 yield r
467
601
468
602
469
603
470 class AsyncHubResult(AsyncResult):
604 class AsyncHubResult(AsyncResult):
471 """Class to wrap pending results that must be requested from the Hub.
605 """Class to wrap pending results that must be requested from the Hub.
472
606
473 Note that waiting/polling on these objects requires polling the Hubover the network,
607 Note that waiting/polling on these objects requires polling the Hubover the network,
474 so use `AsyncHubResult.wait()` sparingly.
608 so use `AsyncHubResult.wait()` sparingly.
475 """
609 """
476
610
477 def wait(self, timeout=-1):
611 def wait(self, timeout=-1):
478 """wait for result to complete."""
612 """wait for result to complete."""
479 start = time.time()
613 start = time.time()
480 if self._ready:
614 if self._ready:
481 return
615 return
482 local_ids = filter(lambda msg_id: msg_id in self._client.outstanding, self.msg_ids)
616 local_ids = filter(lambda msg_id: msg_id in self._client.outstanding, self.msg_ids)
483 local_ready = self._client.wait(local_ids, timeout)
617 local_ready = self._client.wait(local_ids, timeout)
484 if local_ready:
618 if local_ready:
485 remote_ids = filter(lambda msg_id: msg_id not in self._client.results, self.msg_ids)
619 remote_ids = filter(lambda msg_id: msg_id not in self._client.results, self.msg_ids)
486 if not remote_ids:
620 if not remote_ids:
487 self._ready = True
621 self._ready = True
488 else:
622 else:
489 rdict = self._client.result_status(remote_ids, status_only=False)
623 rdict = self._client.result_status(remote_ids, status_only=False)
490 pending = rdict['pending']
624 pending = rdict['pending']
491 while pending and (timeout < 0 or time.time() < start+timeout):
625 while pending and (timeout < 0 or time.time() < start+timeout):
492 rdict = self._client.result_status(remote_ids, status_only=False)
626 rdict = self._client.result_status(remote_ids, status_only=False)
493 pending = rdict['pending']
627 pending = rdict['pending']
494 if pending:
628 if pending:
495 time.sleep(0.1)
629 time.sleep(0.1)
496 if not pending:
630 if not pending:
497 self._ready = True
631 self._ready = True
498 if self._ready:
632 if self._ready:
499 try:
633 try:
500 results = map(self._client.results.get, self.msg_ids)
634 results = map(self._client.results.get, self.msg_ids)
501 self._result = results
635 self._result = results
502 if self._single_result:
636 if self._single_result:
503 r = results[0]
637 r = results[0]
504 if isinstance(r, Exception):
638 if isinstance(r, Exception):
505 raise r
639 raise r
506 else:
640 else:
507 results = error.collect_exceptions(results, self._fname)
641 results = error.collect_exceptions(results, self._fname)
508 self._result = self._reconstruct_result(results)
642 self._result = self._reconstruct_result(results)
509 except Exception, e:
643 except Exception, e:
510 self._exception = e
644 self._exception = e
511 self._success = False
645 self._success = False
512 else:
646 else:
513 self._success = True
647 self._success = True
514 finally:
648 finally:
515 self._metadata = map(self._client.metadata.get, self.msg_ids)
649 self._metadata = map(self._client.metadata.get, self.msg_ids)
516
650
517 __all__ = ['AsyncResult', 'AsyncMapResult', 'AsyncHubResult'] No newline at end of file
651 __all__ = ['AsyncResult', 'AsyncMapResult', 'AsyncHubResult']
@@ -1,1628 +1,1655 b''
1 """A semi-synchronous Client for the ZMQ cluster
1 """A semi-synchronous Client for the ZMQ cluster
2
2
3 Authors:
3 Authors:
4
4
5 * MinRK
5 * MinRK
6 """
6 """
7 #-----------------------------------------------------------------------------
7 #-----------------------------------------------------------------------------
8 # Copyright (C) 2010-2011 The IPython Development Team
8 # Copyright (C) 2010-2011 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17
17
18 import os
18 import os
19 import json
19 import json
20 import sys
20 import sys
21 from threading import Thread, Event
21 from threading import Thread, Event
22 import time
22 import time
23 import warnings
23 import warnings
24 from datetime import datetime
24 from datetime import datetime
25 from getpass import getpass
25 from getpass import getpass
26 from pprint import pprint
26 from pprint import pprint
27
27
28 pjoin = os.path.join
28 pjoin = os.path.join
29
29
30 import zmq
30 import zmq
31 # from zmq.eventloop import ioloop, zmqstream
31 # from zmq.eventloop import ioloop, zmqstream
32
32
33 from IPython.config.configurable import MultipleInstanceError
33 from IPython.config.configurable import MultipleInstanceError
34 from IPython.core.application import BaseIPythonApplication
34 from IPython.core.application import BaseIPythonApplication
35
35
36 from IPython.utils.coloransi import TermColors
36 from IPython.utils.jsonutil import rekey
37 from IPython.utils.jsonutil import rekey
37 from IPython.utils.localinterfaces import LOCAL_IPS
38 from IPython.utils.localinterfaces import LOCAL_IPS
38 from IPython.utils.path import get_ipython_dir
39 from IPython.utils.path import get_ipython_dir
39 from IPython.utils.py3compat import cast_bytes
40 from IPython.utils.py3compat import cast_bytes
40 from IPython.utils.traitlets import (HasTraits, Integer, Instance, Unicode,
41 from IPython.utils.traitlets import (HasTraits, Integer, Instance, Unicode,
41 Dict, List, Bool, Set, Any)
42 Dict, List, Bool, Set, Any)
42 from IPython.external.decorator import decorator
43 from IPython.external.decorator import decorator
43 from IPython.external.ssh import tunnel
44 from IPython.external.ssh import tunnel
44
45
45 from IPython.parallel import Reference
46 from IPython.parallel import Reference
46 from IPython.parallel import error
47 from IPython.parallel import error
47 from IPython.parallel import util
48 from IPython.parallel import util
48
49
49 from IPython.zmq.session import Session, Message
50 from IPython.zmq.session import Session, Message
50
51
51 from .asyncresult import AsyncResult, AsyncHubResult
52 from .asyncresult import AsyncResult, AsyncHubResult
52 from IPython.core.profiledir import ProfileDir, ProfileDirError
53 from IPython.core.profiledir import ProfileDir, ProfileDirError
53 from .view import DirectView, LoadBalancedView
54 from .view import DirectView, LoadBalancedView
54
55
55 if sys.version_info[0] >= 3:
56 if sys.version_info[0] >= 3:
56 # xrange is used in a couple 'isinstance' tests in py2
57 # xrange is used in a couple 'isinstance' tests in py2
57 # should be just 'range' in 3k
58 # should be just 'range' in 3k
58 xrange = range
59 xrange = range
59
60
60 #--------------------------------------------------------------------------
61 #--------------------------------------------------------------------------
61 # Decorators for Client methods
62 # Decorators for Client methods
62 #--------------------------------------------------------------------------
63 #--------------------------------------------------------------------------
63
64
64 @decorator
65 @decorator
65 def spin_first(f, self, *args, **kwargs):
66 def spin_first(f, self, *args, **kwargs):
66 """Call spin() to sync state prior to calling the method."""
67 """Call spin() to sync state prior to calling the method."""
67 self.spin()
68 self.spin()
68 return f(self, *args, **kwargs)
69 return f(self, *args, **kwargs)
69
70
70
71
71 #--------------------------------------------------------------------------
72 #--------------------------------------------------------------------------
72 # Classes
73 # Classes
73 #--------------------------------------------------------------------------
74 #--------------------------------------------------------------------------
74
75
75
76
76 class ExecuteReply(object):
77 class ExecuteReply(object):
77 """wrapper for finished Execute results"""
78 """wrapper for finished Execute results"""
78 def __init__(self, msg_id, content, metadata):
79 def __init__(self, msg_id, content, metadata):
79 self.msg_id = msg_id
80 self.msg_id = msg_id
80 self._content = content
81 self._content = content
81 self.execution_count = content['execution_count']
82 self.execution_count = content['execution_count']
82 self.metadata = metadata
83 self.metadata = metadata
83
84
84 def __getitem__(self, key):
85 def __getitem__(self, key):
85 return self.metadata[key]
86 return self.metadata[key]
86
87
87 def __getattr__(self, key):
88 def __getattr__(self, key):
88 if key not in self.metadata:
89 if key not in self.metadata:
89 raise AttributeError(key)
90 raise AttributeError(key)
90 return self.metadata[key]
91 return self.metadata[key]
91
92
92 def __repr__(self):
93 def __repr__(self):
93 pyout = self.metadata['pyout'] or {}
94 pyout = self.metadata['pyout'] or {'data':{}}
94 text_out = pyout.get('data', {}).get('text/plain', '')
95 text_out = pyout['data'].get('text/plain', '')
95 if len(text_out) > 32:
96 if len(text_out) > 32:
96 text_out = text_out[:29] + '...'
97 text_out = text_out[:29] + '...'
97
98
98 return "<ExecuteReply[%i]: %s>" % (self.execution_count, text_out)
99 return "<ExecuteReply[%i]: %s>" % (self.execution_count, text_out)
99
100
101 def _repr_pretty_(self, p, cycle):
102 pyout = self.metadata['pyout'] or {'data':{}}
103 text_out = pyout['data'].get('text/plain', '')
104
105 if not text_out:
106 return
107
108 try:
109 ip = get_ipython()
110 except NameError:
111 colors = "NoColor"
112 else:
113 colors = ip.colors
114
115 if colors == "NoColor":
116 out = normal = ""
117 else:
118 out = TermColors.Red
119 normal = TermColors.Normal
120
121 p.text(
122 u'[%i] ' % self.metadata['engine_id'] +
123 out + u'Out[%i]: ' % self.execution_count +
124 normal + text_out
125 )
126
100 def _repr_html_(self):
127 def _repr_html_(self):
101 pyout = self.metadata['pyout'] or {'data':{}}
128 pyout = self.metadata['pyout'] or {'data':{}}
102 return pyout['data'].get("text/html")
129 return pyout['data'].get("text/html")
103
130
104 def _repr_latex_(self):
131 def _repr_latex_(self):
105 pyout = self.metadata['pyout'] or {'data':{}}
132 pyout = self.metadata['pyout'] or {'data':{}}
106 return pyout['data'].get("text/latex")
133 return pyout['data'].get("text/latex")
107
134
108 def _repr_json_(self):
135 def _repr_json_(self):
109 pyout = self.metadata['pyout'] or {'data':{}}
136 pyout = self.metadata['pyout'] or {'data':{}}
110 return pyout['data'].get("application/json")
137 return pyout['data'].get("application/json")
111
138
112 def _repr_javascript_(self):
139 def _repr_javascript_(self):
113 pyout = self.metadata['pyout'] or {'data':{}}
140 pyout = self.metadata['pyout'] or {'data':{}}
114 return pyout['data'].get("application/javascript")
141 return pyout['data'].get("application/javascript")
115
142
116 def _repr_png_(self):
143 def _repr_png_(self):
117 pyout = self.metadata['pyout'] or {'data':{}}
144 pyout = self.metadata['pyout'] or {'data':{}}
118 return pyout['data'].get("image/png")
145 return pyout['data'].get("image/png")
119
146
120 def _repr_jpeg_(self):
147 def _repr_jpeg_(self):
121 pyout = self.metadata['pyout'] or {'data':{}}
148 pyout = self.metadata['pyout'] or {'data':{}}
122 return pyout['data'].get("image/jpeg")
149 return pyout['data'].get("image/jpeg")
123
150
124 def _repr_svg_(self):
151 def _repr_svg_(self):
125 pyout = self.metadata['pyout'] or {'data':{}}
152 pyout = self.metadata['pyout'] or {'data':{}}
126 return pyout['data'].get("image/svg+xml")
153 return pyout['data'].get("image/svg+xml")
127
154
128
155
129 class Metadata(dict):
156 class Metadata(dict):
130 """Subclass of dict for initializing metadata values.
157 """Subclass of dict for initializing metadata values.
131
158
132 Attribute access works on keys.
159 Attribute access works on keys.
133
160
134 These objects have a strict set of keys - errors will raise if you try
161 These objects have a strict set of keys - errors will raise if you try
135 to add new keys.
162 to add new keys.
136 """
163 """
137 def __init__(self, *args, **kwargs):
164 def __init__(self, *args, **kwargs):
138 dict.__init__(self)
165 dict.__init__(self)
139 md = {'msg_id' : None,
166 md = {'msg_id' : None,
140 'submitted' : None,
167 'submitted' : None,
141 'started' : None,
168 'started' : None,
142 'completed' : None,
169 'completed' : None,
143 'received' : None,
170 'received' : None,
144 'engine_uuid' : None,
171 'engine_uuid' : None,
145 'engine_id' : None,
172 'engine_id' : None,
146 'follow' : None,
173 'follow' : None,
147 'after' : None,
174 'after' : None,
148 'status' : None,
175 'status' : None,
149
176
150 'pyin' : None,
177 'pyin' : None,
151 'pyout' : None,
178 'pyout' : None,
152 'pyerr' : None,
179 'pyerr' : None,
153 'stdout' : '',
180 'stdout' : '',
154 'stderr' : '',
181 'stderr' : '',
155 'outputs' : [],
182 'outputs' : [],
156 }
183 }
157 self.update(md)
184 self.update(md)
158 self.update(dict(*args, **kwargs))
185 self.update(dict(*args, **kwargs))
159
186
160 def __getattr__(self, key):
187 def __getattr__(self, key):
161 """getattr aliased to getitem"""
188 """getattr aliased to getitem"""
162 if key in self.iterkeys():
189 if key in self.iterkeys():
163 return self[key]
190 return self[key]
164 else:
191 else:
165 raise AttributeError(key)
192 raise AttributeError(key)
166
193
167 def __setattr__(self, key, value):
194 def __setattr__(self, key, value):
168 """setattr aliased to setitem, with strict"""
195 """setattr aliased to setitem, with strict"""
169 if key in self.iterkeys():
196 if key in self.iterkeys():
170 self[key] = value
197 self[key] = value
171 else:
198 else:
172 raise AttributeError(key)
199 raise AttributeError(key)
173
200
174 def __setitem__(self, key, value):
201 def __setitem__(self, key, value):
175 """strict static key enforcement"""
202 """strict static key enforcement"""
176 if key in self.iterkeys():
203 if key in self.iterkeys():
177 dict.__setitem__(self, key, value)
204 dict.__setitem__(self, key, value)
178 else:
205 else:
179 raise KeyError(key)
206 raise KeyError(key)
180
207
181
208
182 class Client(HasTraits):
209 class Client(HasTraits):
183 """A semi-synchronous client to the IPython ZMQ cluster
210 """A semi-synchronous client to the IPython ZMQ cluster
184
211
185 Parameters
212 Parameters
186 ----------
213 ----------
187
214
188 url_or_file : bytes or unicode; zmq url or path to ipcontroller-client.json
215 url_or_file : bytes or unicode; zmq url or path to ipcontroller-client.json
189 Connection information for the Hub's registration. If a json connector
216 Connection information for the Hub's registration. If a json connector
190 file is given, then likely no further configuration is necessary.
217 file is given, then likely no further configuration is necessary.
191 [Default: use profile]
218 [Default: use profile]
192 profile : bytes
219 profile : bytes
193 The name of the Cluster profile to be used to find connector information.
220 The name of the Cluster profile to be used to find connector information.
194 If run from an IPython application, the default profile will be the same
221 If run from an IPython application, the default profile will be the same
195 as the running application, otherwise it will be 'default'.
222 as the running application, otherwise it will be 'default'.
196 context : zmq.Context
223 context : zmq.Context
197 Pass an existing zmq.Context instance, otherwise the client will create its own.
224 Pass an existing zmq.Context instance, otherwise the client will create its own.
198 debug : bool
225 debug : bool
199 flag for lots of message printing for debug purposes
226 flag for lots of message printing for debug purposes
200 timeout : int/float
227 timeout : int/float
201 time (in seconds) to wait for connection replies from the Hub
228 time (in seconds) to wait for connection replies from the Hub
202 [Default: 10]
229 [Default: 10]
203
230
204 #-------------- session related args ----------------
231 #-------------- session related args ----------------
205
232
206 config : Config object
233 config : Config object
207 If specified, this will be relayed to the Session for configuration
234 If specified, this will be relayed to the Session for configuration
208 username : str
235 username : str
209 set username for the session object
236 set username for the session object
210 packer : str (import_string) or callable
237 packer : str (import_string) or callable
211 Can be either the simple keyword 'json' or 'pickle', or an import_string to a
238 Can be either the simple keyword 'json' or 'pickle', or an import_string to a
212 function to serialize messages. Must support same input as
239 function to serialize messages. Must support same input as
213 JSON, and output must be bytes.
240 JSON, and output must be bytes.
214 You can pass a callable directly as `pack`
241 You can pass a callable directly as `pack`
215 unpacker : str (import_string) or callable
242 unpacker : str (import_string) or callable
216 The inverse of packer. Only necessary if packer is specified as *not* one
243 The inverse of packer. Only necessary if packer is specified as *not* one
217 of 'json' or 'pickle'.
244 of 'json' or 'pickle'.
218
245
219 #-------------- ssh related args ----------------
246 #-------------- ssh related args ----------------
220 # These are args for configuring the ssh tunnel to be used
247 # These are args for configuring the ssh tunnel to be used
221 # credentials are used to forward connections over ssh to the Controller
248 # credentials are used to forward connections over ssh to the Controller
222 # Note that the ip given in `addr` needs to be relative to sshserver
249 # Note that the ip given in `addr` needs to be relative to sshserver
223 # The most basic case is to leave addr as pointing to localhost (127.0.0.1),
250 # The most basic case is to leave addr as pointing to localhost (127.0.0.1),
224 # and set sshserver as the same machine the Controller is on. However,
251 # and set sshserver as the same machine the Controller is on. However,
225 # the only requirement is that sshserver is able to see the Controller
252 # the only requirement is that sshserver is able to see the Controller
226 # (i.e. is within the same trusted network).
253 # (i.e. is within the same trusted network).
227
254
228 sshserver : str
255 sshserver : str
229 A string of the form passed to ssh, i.e. 'server.tld' or 'user@server.tld:port'
256 A string of the form passed to ssh, i.e. 'server.tld' or 'user@server.tld:port'
230 If keyfile or password is specified, and this is not, it will default to
257 If keyfile or password is specified, and this is not, it will default to
231 the ip given in addr.
258 the ip given in addr.
232 sshkey : str; path to ssh private key file
259 sshkey : str; path to ssh private key file
233 This specifies a key to be used in ssh login, default None.
260 This specifies a key to be used in ssh login, default None.
234 Regular default ssh keys will be used without specifying this argument.
261 Regular default ssh keys will be used without specifying this argument.
235 password : str
262 password : str
236 Your ssh password to sshserver. Note that if this is left None,
263 Your ssh password to sshserver. Note that if this is left None,
237 you will be prompted for it if passwordless key based login is unavailable.
264 you will be prompted for it if passwordless key based login is unavailable.
238 paramiko : bool
265 paramiko : bool
239 flag for whether to use paramiko instead of shell ssh for tunneling.
266 flag for whether to use paramiko instead of shell ssh for tunneling.
240 [default: True on win32, False else]
267 [default: True on win32, False else]
241
268
242 ------- exec authentication args -------
269 ------- exec authentication args -------
243 If even localhost is untrusted, you can have some protection against
270 If even localhost is untrusted, you can have some protection against
244 unauthorized execution by signing messages with HMAC digests.
271 unauthorized execution by signing messages with HMAC digests.
245 Messages are still sent as cleartext, so if someone can snoop your
272 Messages are still sent as cleartext, so if someone can snoop your
246 loopback traffic this will not protect your privacy, but will prevent
273 loopback traffic this will not protect your privacy, but will prevent
247 unauthorized execution.
274 unauthorized execution.
248
275
249 exec_key : str
276 exec_key : str
250 an authentication key or file containing a key
277 an authentication key or file containing a key
251 default: None
278 default: None
252
279
253
280
254 Attributes
281 Attributes
255 ----------
282 ----------
256
283
257 ids : list of int engine IDs
284 ids : list of int engine IDs
258 requesting the ids attribute always synchronizes
285 requesting the ids attribute always synchronizes
259 the registration state. To request ids without synchronization,
286 the registration state. To request ids without synchronization,
260 use semi-private _ids attributes.
287 use semi-private _ids attributes.
261
288
262 history : list of msg_ids
289 history : list of msg_ids
263 a list of msg_ids, keeping track of all the execution
290 a list of msg_ids, keeping track of all the execution
264 messages you have submitted in order.
291 messages you have submitted in order.
265
292
266 outstanding : set of msg_ids
293 outstanding : set of msg_ids
267 a set of msg_ids that have been submitted, but whose
294 a set of msg_ids that have been submitted, but whose
268 results have not yet been received.
295 results have not yet been received.
269
296
270 results : dict
297 results : dict
271 a dict of all our results, keyed by msg_id
298 a dict of all our results, keyed by msg_id
272
299
273 block : bool
300 block : bool
274 determines default behavior when block not specified
301 determines default behavior when block not specified
275 in execution methods
302 in execution methods
276
303
277 Methods
304 Methods
278 -------
305 -------
279
306
280 spin
307 spin
281 flushes incoming results and registration state changes
308 flushes incoming results and registration state changes
282 control methods spin, and requesting `ids` also ensures up to date
309 control methods spin, and requesting `ids` also ensures up to date
283
310
284 wait
311 wait
285 wait on one or more msg_ids
312 wait on one or more msg_ids
286
313
287 execution methods
314 execution methods
288 apply
315 apply
289 legacy: execute, run
316 legacy: execute, run
290
317
291 data movement
318 data movement
292 push, pull, scatter, gather
319 push, pull, scatter, gather
293
320
294 query methods
321 query methods
295 queue_status, get_result, purge, result_status
322 queue_status, get_result, purge, result_status
296
323
297 control methods
324 control methods
298 abort, shutdown
325 abort, shutdown
299
326
300 """
327 """
301
328
302
329
303 block = Bool(False)
330 block = Bool(False)
304 outstanding = Set()
331 outstanding = Set()
305 results = Instance('collections.defaultdict', (dict,))
332 results = Instance('collections.defaultdict', (dict,))
306 metadata = Instance('collections.defaultdict', (Metadata,))
333 metadata = Instance('collections.defaultdict', (Metadata,))
307 history = List()
334 history = List()
308 debug = Bool(False)
335 debug = Bool(False)
309 _spin_thread = Any()
336 _spin_thread = Any()
310 _stop_spinning = Any()
337 _stop_spinning = Any()
311
338
312 profile=Unicode()
339 profile=Unicode()
313 def _profile_default(self):
340 def _profile_default(self):
314 if BaseIPythonApplication.initialized():
341 if BaseIPythonApplication.initialized():
315 # an IPython app *might* be running, try to get its profile
342 # an IPython app *might* be running, try to get its profile
316 try:
343 try:
317 return BaseIPythonApplication.instance().profile
344 return BaseIPythonApplication.instance().profile
318 except (AttributeError, MultipleInstanceError):
345 except (AttributeError, MultipleInstanceError):
319 # could be a *different* subclass of config.Application,
346 # could be a *different* subclass of config.Application,
320 # which would raise one of these two errors.
347 # which would raise one of these two errors.
321 return u'default'
348 return u'default'
322 else:
349 else:
323 return u'default'
350 return u'default'
324
351
325
352
326 _outstanding_dict = Instance('collections.defaultdict', (set,))
353 _outstanding_dict = Instance('collections.defaultdict', (set,))
327 _ids = List()
354 _ids = List()
328 _connected=Bool(False)
355 _connected=Bool(False)
329 _ssh=Bool(False)
356 _ssh=Bool(False)
330 _context = Instance('zmq.Context')
357 _context = Instance('zmq.Context')
331 _config = Dict()
358 _config = Dict()
332 _engines=Instance(util.ReverseDict, (), {})
359 _engines=Instance(util.ReverseDict, (), {})
333 # _hub_socket=Instance('zmq.Socket')
360 # _hub_socket=Instance('zmq.Socket')
334 _query_socket=Instance('zmq.Socket')
361 _query_socket=Instance('zmq.Socket')
335 _control_socket=Instance('zmq.Socket')
362 _control_socket=Instance('zmq.Socket')
336 _iopub_socket=Instance('zmq.Socket')
363 _iopub_socket=Instance('zmq.Socket')
337 _notification_socket=Instance('zmq.Socket')
364 _notification_socket=Instance('zmq.Socket')
338 _mux_socket=Instance('zmq.Socket')
365 _mux_socket=Instance('zmq.Socket')
339 _task_socket=Instance('zmq.Socket')
366 _task_socket=Instance('zmq.Socket')
340 _task_scheme=Unicode()
367 _task_scheme=Unicode()
341 _closed = False
368 _closed = False
342 _ignored_control_replies=Integer(0)
369 _ignored_control_replies=Integer(0)
343 _ignored_hub_replies=Integer(0)
370 _ignored_hub_replies=Integer(0)
344
371
345 def __new__(self, *args, **kw):
372 def __new__(self, *args, **kw):
346 # don't raise on positional args
373 # don't raise on positional args
347 return HasTraits.__new__(self, **kw)
374 return HasTraits.__new__(self, **kw)
348
375
349 def __init__(self, url_or_file=None, profile=None, profile_dir=None, ipython_dir=None,
376 def __init__(self, url_or_file=None, profile=None, profile_dir=None, ipython_dir=None,
350 context=None, debug=False, exec_key=None,
377 context=None, debug=False, exec_key=None,
351 sshserver=None, sshkey=None, password=None, paramiko=None,
378 sshserver=None, sshkey=None, password=None, paramiko=None,
352 timeout=10, **extra_args
379 timeout=10, **extra_args
353 ):
380 ):
354 if profile:
381 if profile:
355 super(Client, self).__init__(debug=debug, profile=profile)
382 super(Client, self).__init__(debug=debug, profile=profile)
356 else:
383 else:
357 super(Client, self).__init__(debug=debug)
384 super(Client, self).__init__(debug=debug)
358 if context is None:
385 if context is None:
359 context = zmq.Context.instance()
386 context = zmq.Context.instance()
360 self._context = context
387 self._context = context
361 self._stop_spinning = Event()
388 self._stop_spinning = Event()
362
389
363 self._setup_profile_dir(self.profile, profile_dir, ipython_dir)
390 self._setup_profile_dir(self.profile, profile_dir, ipython_dir)
364 if self._cd is not None:
391 if self._cd is not None:
365 if url_or_file is None:
392 if url_or_file is None:
366 url_or_file = pjoin(self._cd.security_dir, 'ipcontroller-client.json')
393 url_or_file = pjoin(self._cd.security_dir, 'ipcontroller-client.json')
367 if url_or_file is None:
394 if url_or_file is None:
368 raise ValueError(
395 raise ValueError(
369 "I can't find enough information to connect to a hub!"
396 "I can't find enough information to connect to a hub!"
370 " Please specify at least one of url_or_file or profile."
397 " Please specify at least one of url_or_file or profile."
371 )
398 )
372
399
373 if not util.is_url(url_or_file):
400 if not util.is_url(url_or_file):
374 # it's not a url, try for a file
401 # it's not a url, try for a file
375 if not os.path.exists(url_or_file):
402 if not os.path.exists(url_or_file):
376 if self._cd:
403 if self._cd:
377 url_or_file = os.path.join(self._cd.security_dir, url_or_file)
404 url_or_file = os.path.join(self._cd.security_dir, url_or_file)
378 if not os.path.exists(url_or_file):
405 if not os.path.exists(url_or_file):
379 raise IOError("Connection file not found: %r" % url_or_file)
406 raise IOError("Connection file not found: %r" % url_or_file)
380 with open(url_or_file) as f:
407 with open(url_or_file) as f:
381 cfg = json.loads(f.read())
408 cfg = json.loads(f.read())
382 else:
409 else:
383 cfg = {'url':url_or_file}
410 cfg = {'url':url_or_file}
384
411
385 # sync defaults from args, json:
412 # sync defaults from args, json:
386 if sshserver:
413 if sshserver:
387 cfg['ssh'] = sshserver
414 cfg['ssh'] = sshserver
388 if exec_key:
415 if exec_key:
389 cfg['exec_key'] = exec_key
416 cfg['exec_key'] = exec_key
390 exec_key = cfg['exec_key']
417 exec_key = cfg['exec_key']
391 location = cfg.setdefault('location', None)
418 location = cfg.setdefault('location', None)
392 cfg['url'] = util.disambiguate_url(cfg['url'], location)
419 cfg['url'] = util.disambiguate_url(cfg['url'], location)
393 url = cfg['url']
420 url = cfg['url']
394 proto,addr,port = util.split_url(url)
421 proto,addr,port = util.split_url(url)
395 if location is not None and addr == '127.0.0.1':
422 if location is not None and addr == '127.0.0.1':
396 # location specified, and connection is expected to be local
423 # location specified, and connection is expected to be local
397 if location not in LOCAL_IPS and not sshserver:
424 if location not in LOCAL_IPS and not sshserver:
398 # load ssh from JSON *only* if the controller is not on
425 # load ssh from JSON *only* if the controller is not on
399 # this machine
426 # this machine
400 sshserver=cfg['ssh']
427 sshserver=cfg['ssh']
401 if location not in LOCAL_IPS and not sshserver:
428 if location not in LOCAL_IPS and not sshserver:
402 # warn if no ssh specified, but SSH is probably needed
429 # warn if no ssh specified, but SSH is probably needed
403 # This is only a warning, because the most likely cause
430 # This is only a warning, because the most likely cause
404 # is a local Controller on a laptop whose IP is dynamic
431 # is a local Controller on a laptop whose IP is dynamic
405 warnings.warn("""
432 warnings.warn("""
406 Controller appears to be listening on localhost, but not on this machine.
433 Controller appears to be listening on localhost, but not on this machine.
407 If this is true, you should specify Client(...,sshserver='you@%s')
434 If this is true, you should specify Client(...,sshserver='you@%s')
408 or instruct your controller to listen on an external IP."""%location,
435 or instruct your controller to listen on an external IP."""%location,
409 RuntimeWarning)
436 RuntimeWarning)
410 elif not sshserver:
437 elif not sshserver:
411 # otherwise sync with cfg
438 # otherwise sync with cfg
412 sshserver = cfg['ssh']
439 sshserver = cfg['ssh']
413
440
414 self._config = cfg
441 self._config = cfg
415
442
416 self._ssh = bool(sshserver or sshkey or password)
443 self._ssh = bool(sshserver or sshkey or password)
417 if self._ssh and sshserver is None:
444 if self._ssh and sshserver is None:
418 # default to ssh via localhost
445 # default to ssh via localhost
419 sshserver = url.split('://')[1].split(':')[0]
446 sshserver = url.split('://')[1].split(':')[0]
420 if self._ssh and password is None:
447 if self._ssh and password is None:
421 if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko):
448 if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko):
422 password=False
449 password=False
423 else:
450 else:
424 password = getpass("SSH Password for %s: "%sshserver)
451 password = getpass("SSH Password for %s: "%sshserver)
425 ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko)
452 ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko)
426
453
427 # configure and construct the session
454 # configure and construct the session
428 if exec_key is not None:
455 if exec_key is not None:
429 if os.path.isfile(exec_key):
456 if os.path.isfile(exec_key):
430 extra_args['keyfile'] = exec_key
457 extra_args['keyfile'] = exec_key
431 else:
458 else:
432 exec_key = cast_bytes(exec_key)
459 exec_key = cast_bytes(exec_key)
433 extra_args['key'] = exec_key
460 extra_args['key'] = exec_key
434 self.session = Session(**extra_args)
461 self.session = Session(**extra_args)
435
462
436 self._query_socket = self._context.socket(zmq.DEALER)
463 self._query_socket = self._context.socket(zmq.DEALER)
437 self._query_socket.setsockopt(zmq.IDENTITY, self.session.bsession)
464 self._query_socket.setsockopt(zmq.IDENTITY, self.session.bsession)
438 if self._ssh:
465 if self._ssh:
439 tunnel.tunnel_connection(self._query_socket, url, sshserver, **ssh_kwargs)
466 tunnel.tunnel_connection(self._query_socket, url, sshserver, **ssh_kwargs)
440 else:
467 else:
441 self._query_socket.connect(url)
468 self._query_socket.connect(url)
442
469
443 self.session.debug = self.debug
470 self.session.debug = self.debug
444
471
445 self._notification_handlers = {'registration_notification' : self._register_engine,
472 self._notification_handlers = {'registration_notification' : self._register_engine,
446 'unregistration_notification' : self._unregister_engine,
473 'unregistration_notification' : self._unregister_engine,
447 'shutdown_notification' : lambda msg: self.close(),
474 'shutdown_notification' : lambda msg: self.close(),
448 }
475 }
449 self._queue_handlers = {'execute_reply' : self._handle_execute_reply,
476 self._queue_handlers = {'execute_reply' : self._handle_execute_reply,
450 'apply_reply' : self._handle_apply_reply}
477 'apply_reply' : self._handle_apply_reply}
451 self._connect(sshserver, ssh_kwargs, timeout)
478 self._connect(sshserver, ssh_kwargs, timeout)
452
479
453 def __del__(self):
480 def __del__(self):
454 """cleanup sockets, but _not_ context."""
481 """cleanup sockets, but _not_ context."""
455 self.close()
482 self.close()
456
483
457 def _setup_profile_dir(self, profile, profile_dir, ipython_dir):
484 def _setup_profile_dir(self, profile, profile_dir, ipython_dir):
458 if ipython_dir is None:
485 if ipython_dir is None:
459 ipython_dir = get_ipython_dir()
486 ipython_dir = get_ipython_dir()
460 if profile_dir is not None:
487 if profile_dir is not None:
461 try:
488 try:
462 self._cd = ProfileDir.find_profile_dir(profile_dir)
489 self._cd = ProfileDir.find_profile_dir(profile_dir)
463 return
490 return
464 except ProfileDirError:
491 except ProfileDirError:
465 pass
492 pass
466 elif profile is not None:
493 elif profile is not None:
467 try:
494 try:
468 self._cd = ProfileDir.find_profile_dir_by_name(
495 self._cd = ProfileDir.find_profile_dir_by_name(
469 ipython_dir, profile)
496 ipython_dir, profile)
470 return
497 return
471 except ProfileDirError:
498 except ProfileDirError:
472 pass
499 pass
473 self._cd = None
500 self._cd = None
474
501
475 def _update_engines(self, engines):
502 def _update_engines(self, engines):
476 """Update our engines dict and _ids from a dict of the form: {id:uuid}."""
503 """Update our engines dict and _ids from a dict of the form: {id:uuid}."""
477 for k,v in engines.iteritems():
504 for k,v in engines.iteritems():
478 eid = int(k)
505 eid = int(k)
479 self._engines[eid] = v
506 self._engines[eid] = v
480 self._ids.append(eid)
507 self._ids.append(eid)
481 self._ids = sorted(self._ids)
508 self._ids = sorted(self._ids)
482 if sorted(self._engines.keys()) != range(len(self._engines)) and \
509 if sorted(self._engines.keys()) != range(len(self._engines)) and \
483 self._task_scheme == 'pure' and self._task_socket:
510 self._task_scheme == 'pure' and self._task_socket:
484 self._stop_scheduling_tasks()
511 self._stop_scheduling_tasks()
485
512
486 def _stop_scheduling_tasks(self):
513 def _stop_scheduling_tasks(self):
487 """Stop scheduling tasks because an engine has been unregistered
514 """Stop scheduling tasks because an engine has been unregistered
488 from a pure ZMQ scheduler.
515 from a pure ZMQ scheduler.
489 """
516 """
490 self._task_socket.close()
517 self._task_socket.close()
491 self._task_socket = None
518 self._task_socket = None
492 msg = "An engine has been unregistered, and we are using pure " +\
519 msg = "An engine has been unregistered, and we are using pure " +\
493 "ZMQ task scheduling. Task farming will be disabled."
520 "ZMQ task scheduling. Task farming will be disabled."
494 if self.outstanding:
521 if self.outstanding:
495 msg += " If you were running tasks when this happened, " +\
522 msg += " If you were running tasks when this happened, " +\
496 "some `outstanding` msg_ids may never resolve."
523 "some `outstanding` msg_ids may never resolve."
497 warnings.warn(msg, RuntimeWarning)
524 warnings.warn(msg, RuntimeWarning)
498
525
499 def _build_targets(self, targets):
526 def _build_targets(self, targets):
500 """Turn valid target IDs or 'all' into two lists:
527 """Turn valid target IDs or 'all' into two lists:
501 (int_ids, uuids).
528 (int_ids, uuids).
502 """
529 """
503 if not self._ids:
530 if not self._ids:
504 # flush notification socket if no engines yet, just in case
531 # flush notification socket if no engines yet, just in case
505 if not self.ids:
532 if not self.ids:
506 raise error.NoEnginesRegistered("Can't build targets without any engines")
533 raise error.NoEnginesRegistered("Can't build targets without any engines")
507
534
508 if targets is None:
535 if targets is None:
509 targets = self._ids
536 targets = self._ids
510 elif isinstance(targets, basestring):
537 elif isinstance(targets, basestring):
511 if targets.lower() == 'all':
538 if targets.lower() == 'all':
512 targets = self._ids
539 targets = self._ids
513 else:
540 else:
514 raise TypeError("%r not valid str target, must be 'all'"%(targets))
541 raise TypeError("%r not valid str target, must be 'all'"%(targets))
515 elif isinstance(targets, int):
542 elif isinstance(targets, int):
516 if targets < 0:
543 if targets < 0:
517 targets = self.ids[targets]
544 targets = self.ids[targets]
518 if targets not in self._ids:
545 if targets not in self._ids:
519 raise IndexError("No such engine: %i"%targets)
546 raise IndexError("No such engine: %i"%targets)
520 targets = [targets]
547 targets = [targets]
521
548
522 if isinstance(targets, slice):
549 if isinstance(targets, slice):
523 indices = range(len(self._ids))[targets]
550 indices = range(len(self._ids))[targets]
524 ids = self.ids
551 ids = self.ids
525 targets = [ ids[i] for i in indices ]
552 targets = [ ids[i] for i in indices ]
526
553
527 if not isinstance(targets, (tuple, list, xrange)):
554 if not isinstance(targets, (tuple, list, xrange)):
528 raise TypeError("targets by int/slice/collection of ints only, not %s"%(type(targets)))
555 raise TypeError("targets by int/slice/collection of ints only, not %s"%(type(targets)))
529
556
530 return [cast_bytes(self._engines[t]) for t in targets], list(targets)
557 return [cast_bytes(self._engines[t]) for t in targets], list(targets)
531
558
532 def _connect(self, sshserver, ssh_kwargs, timeout):
559 def _connect(self, sshserver, ssh_kwargs, timeout):
533 """setup all our socket connections to the cluster. This is called from
560 """setup all our socket connections to the cluster. This is called from
534 __init__."""
561 __init__."""
535
562
536 # Maybe allow reconnecting?
563 # Maybe allow reconnecting?
537 if self._connected:
564 if self._connected:
538 return
565 return
539 self._connected=True
566 self._connected=True
540
567
541 def connect_socket(s, url):
568 def connect_socket(s, url):
542 url = util.disambiguate_url(url, self._config['location'])
569 url = util.disambiguate_url(url, self._config['location'])
543 if self._ssh:
570 if self._ssh:
544 return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs)
571 return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs)
545 else:
572 else:
546 return s.connect(url)
573 return s.connect(url)
547
574
548 self.session.send(self._query_socket, 'connection_request')
575 self.session.send(self._query_socket, 'connection_request')
549 # use Poller because zmq.select has wrong units in pyzmq 2.1.7
576 # use Poller because zmq.select has wrong units in pyzmq 2.1.7
550 poller = zmq.Poller()
577 poller = zmq.Poller()
551 poller.register(self._query_socket, zmq.POLLIN)
578 poller.register(self._query_socket, zmq.POLLIN)
552 # poll expects milliseconds, timeout is seconds
579 # poll expects milliseconds, timeout is seconds
553 evts = poller.poll(timeout*1000)
580 evts = poller.poll(timeout*1000)
554 if not evts:
581 if not evts:
555 raise error.TimeoutError("Hub connection request timed out")
582 raise error.TimeoutError("Hub connection request timed out")
556 idents,msg = self.session.recv(self._query_socket,mode=0)
583 idents,msg = self.session.recv(self._query_socket,mode=0)
557 if self.debug:
584 if self.debug:
558 pprint(msg)
585 pprint(msg)
559 msg = Message(msg)
586 msg = Message(msg)
560 content = msg.content
587 content = msg.content
561 self._config['registration'] = dict(content)
588 self._config['registration'] = dict(content)
562 if content.status == 'ok':
589 if content.status == 'ok':
563 ident = self.session.bsession
590 ident = self.session.bsession
564 if content.mux:
591 if content.mux:
565 self._mux_socket = self._context.socket(zmq.DEALER)
592 self._mux_socket = self._context.socket(zmq.DEALER)
566 self._mux_socket.setsockopt(zmq.IDENTITY, ident)
593 self._mux_socket.setsockopt(zmq.IDENTITY, ident)
567 connect_socket(self._mux_socket, content.mux)
594 connect_socket(self._mux_socket, content.mux)
568 if content.task:
595 if content.task:
569 self._task_scheme, task_addr = content.task
596 self._task_scheme, task_addr = content.task
570 self._task_socket = self._context.socket(zmq.DEALER)
597 self._task_socket = self._context.socket(zmq.DEALER)
571 self._task_socket.setsockopt(zmq.IDENTITY, ident)
598 self._task_socket.setsockopt(zmq.IDENTITY, ident)
572 connect_socket(self._task_socket, task_addr)
599 connect_socket(self._task_socket, task_addr)
573 if content.notification:
600 if content.notification:
574 self._notification_socket = self._context.socket(zmq.SUB)
601 self._notification_socket = self._context.socket(zmq.SUB)
575 connect_socket(self._notification_socket, content.notification)
602 connect_socket(self._notification_socket, content.notification)
576 self._notification_socket.setsockopt(zmq.SUBSCRIBE, b'')
603 self._notification_socket.setsockopt(zmq.SUBSCRIBE, b'')
577 # if content.query:
604 # if content.query:
578 # self._query_socket = self._context.socket(zmq.DEALER)
605 # self._query_socket = self._context.socket(zmq.DEALER)
579 # self._query_socket.setsockopt(zmq.IDENTITY, self.session.bsession)
606 # self._query_socket.setsockopt(zmq.IDENTITY, self.session.bsession)
580 # connect_socket(self._query_socket, content.query)
607 # connect_socket(self._query_socket, content.query)
581 if content.control:
608 if content.control:
582 self._control_socket = self._context.socket(zmq.DEALER)
609 self._control_socket = self._context.socket(zmq.DEALER)
583 self._control_socket.setsockopt(zmq.IDENTITY, ident)
610 self._control_socket.setsockopt(zmq.IDENTITY, ident)
584 connect_socket(self._control_socket, content.control)
611 connect_socket(self._control_socket, content.control)
585 if content.iopub:
612 if content.iopub:
586 self._iopub_socket = self._context.socket(zmq.SUB)
613 self._iopub_socket = self._context.socket(zmq.SUB)
587 self._iopub_socket.setsockopt(zmq.SUBSCRIBE, b'')
614 self._iopub_socket.setsockopt(zmq.SUBSCRIBE, b'')
588 self._iopub_socket.setsockopt(zmq.IDENTITY, ident)
615 self._iopub_socket.setsockopt(zmq.IDENTITY, ident)
589 connect_socket(self._iopub_socket, content.iopub)
616 connect_socket(self._iopub_socket, content.iopub)
590 self._update_engines(dict(content.engines))
617 self._update_engines(dict(content.engines))
591 else:
618 else:
592 self._connected = False
619 self._connected = False
593 raise Exception("Failed to connect!")
620 raise Exception("Failed to connect!")
594
621
595 #--------------------------------------------------------------------------
622 #--------------------------------------------------------------------------
596 # handlers and callbacks for incoming messages
623 # handlers and callbacks for incoming messages
597 #--------------------------------------------------------------------------
624 #--------------------------------------------------------------------------
598
625
599 def _unwrap_exception(self, content):
626 def _unwrap_exception(self, content):
600 """unwrap exception, and remap engine_id to int."""
627 """unwrap exception, and remap engine_id to int."""
601 e = error.unwrap_exception(content)
628 e = error.unwrap_exception(content)
602 # print e.traceback
629 # print e.traceback
603 if e.engine_info:
630 if e.engine_info:
604 e_uuid = e.engine_info['engine_uuid']
631 e_uuid = e.engine_info['engine_uuid']
605 eid = self._engines[e_uuid]
632 eid = self._engines[e_uuid]
606 e.engine_info['engine_id'] = eid
633 e.engine_info['engine_id'] = eid
607 return e
634 return e
608
635
609 def _extract_metadata(self, header, parent, content):
636 def _extract_metadata(self, header, parent, content):
610 md = {'msg_id' : parent['msg_id'],
637 md = {'msg_id' : parent['msg_id'],
611 'received' : datetime.now(),
638 'received' : datetime.now(),
612 'engine_uuid' : header.get('engine', None),
639 'engine_uuid' : header.get('engine', None),
613 'follow' : parent.get('follow', []),
640 'follow' : parent.get('follow', []),
614 'after' : parent.get('after', []),
641 'after' : parent.get('after', []),
615 'status' : content['status'],
642 'status' : content['status'],
616 }
643 }
617
644
618 if md['engine_uuid'] is not None:
645 if md['engine_uuid'] is not None:
619 md['engine_id'] = self._engines.get(md['engine_uuid'], None)
646 md['engine_id'] = self._engines.get(md['engine_uuid'], None)
620
647
621 if 'date' in parent:
648 if 'date' in parent:
622 md['submitted'] = parent['date']
649 md['submitted'] = parent['date']
623 if 'started' in header:
650 if 'started' in header:
624 md['started'] = header['started']
651 md['started'] = header['started']
625 if 'date' in header:
652 if 'date' in header:
626 md['completed'] = header['date']
653 md['completed'] = header['date']
627 return md
654 return md
628
655
629 def _register_engine(self, msg):
656 def _register_engine(self, msg):
630 """Register a new engine, and update our connection info."""
657 """Register a new engine, and update our connection info."""
631 content = msg['content']
658 content = msg['content']
632 eid = content['id']
659 eid = content['id']
633 d = {eid : content['queue']}
660 d = {eid : content['queue']}
634 self._update_engines(d)
661 self._update_engines(d)
635
662
636 def _unregister_engine(self, msg):
663 def _unregister_engine(self, msg):
637 """Unregister an engine that has died."""
664 """Unregister an engine that has died."""
638 content = msg['content']
665 content = msg['content']
639 eid = int(content['id'])
666 eid = int(content['id'])
640 if eid in self._ids:
667 if eid in self._ids:
641 self._ids.remove(eid)
668 self._ids.remove(eid)
642 uuid = self._engines.pop(eid)
669 uuid = self._engines.pop(eid)
643
670
644 self._handle_stranded_msgs(eid, uuid)
671 self._handle_stranded_msgs(eid, uuid)
645
672
646 if self._task_socket and self._task_scheme == 'pure':
673 if self._task_socket and self._task_scheme == 'pure':
647 self._stop_scheduling_tasks()
674 self._stop_scheduling_tasks()
648
675
649 def _handle_stranded_msgs(self, eid, uuid):
676 def _handle_stranded_msgs(self, eid, uuid):
650 """Handle messages known to be on an engine when the engine unregisters.
677 """Handle messages known to be on an engine when the engine unregisters.
651
678
652 It is possible that this will fire prematurely - that is, an engine will
679 It is possible that this will fire prematurely - that is, an engine will
653 go down after completing a result, and the client will be notified
680 go down after completing a result, and the client will be notified
654 of the unregistration and later receive the successful result.
681 of the unregistration and later receive the successful result.
655 """
682 """
656
683
657 outstanding = self._outstanding_dict[uuid]
684 outstanding = self._outstanding_dict[uuid]
658
685
659 for msg_id in list(outstanding):
686 for msg_id in list(outstanding):
660 if msg_id in self.results:
687 if msg_id in self.results:
661 # we already
688 # we already
662 continue
689 continue
663 try:
690 try:
664 raise error.EngineError("Engine %r died while running task %r"%(eid, msg_id))
691 raise error.EngineError("Engine %r died while running task %r"%(eid, msg_id))
665 except:
692 except:
666 content = error.wrap_exception()
693 content = error.wrap_exception()
667 # build a fake message:
694 # build a fake message:
668 parent = {}
695 parent = {}
669 header = {}
696 header = {}
670 parent['msg_id'] = msg_id
697 parent['msg_id'] = msg_id
671 header['engine'] = uuid
698 header['engine'] = uuid
672 header['date'] = datetime.now()
699 header['date'] = datetime.now()
673 msg = dict(parent_header=parent, header=header, content=content)
700 msg = dict(parent_header=parent, header=header, content=content)
674 self._handle_apply_reply(msg)
701 self._handle_apply_reply(msg)
675
702
676 def _handle_execute_reply(self, msg):
703 def _handle_execute_reply(self, msg):
677 """Save the reply to an execute_request into our results.
704 """Save the reply to an execute_request into our results.
678
705
679 execute messages are never actually used. apply is used instead.
706 execute messages are never actually used. apply is used instead.
680 """
707 """
681
708
682 parent = msg['parent_header']
709 parent = msg['parent_header']
683 msg_id = parent['msg_id']
710 msg_id = parent['msg_id']
684 if msg_id not in self.outstanding:
711 if msg_id not in self.outstanding:
685 if msg_id in self.history:
712 if msg_id in self.history:
686 print ("got stale result: %s"%msg_id)
713 print ("got stale result: %s"%msg_id)
687 else:
714 else:
688 print ("got unknown result: %s"%msg_id)
715 print ("got unknown result: %s"%msg_id)
689 else:
716 else:
690 self.outstanding.remove(msg_id)
717 self.outstanding.remove(msg_id)
691
718
692 content = msg['content']
719 content = msg['content']
693 header = msg['header']
720 header = msg['header']
694
721
695 # construct metadata:
722 # construct metadata:
696 md = self.metadata[msg_id]
723 md = self.metadata[msg_id]
697 md.update(self._extract_metadata(header, parent, content))
724 md.update(self._extract_metadata(header, parent, content))
698 # is this redundant?
725 # is this redundant?
699 self.metadata[msg_id] = md
726 self.metadata[msg_id] = md
700
727
701 e_outstanding = self._outstanding_dict[md['engine_uuid']]
728 e_outstanding = self._outstanding_dict[md['engine_uuid']]
702 if msg_id in e_outstanding:
729 if msg_id in e_outstanding:
703 e_outstanding.remove(msg_id)
730 e_outstanding.remove(msg_id)
704
731
705 # construct result:
732 # construct result:
706 if content['status'] == 'ok':
733 if content['status'] == 'ok':
707 self.results[msg_id] = ExecuteReply(msg_id, content, md)
734 self.results[msg_id] = ExecuteReply(msg_id, content, md)
708 elif content['status'] == 'aborted':
735 elif content['status'] == 'aborted':
709 self.results[msg_id] = error.TaskAborted(msg_id)
736 self.results[msg_id] = error.TaskAborted(msg_id)
710 elif content['status'] == 'resubmitted':
737 elif content['status'] == 'resubmitted':
711 # TODO: handle resubmission
738 # TODO: handle resubmission
712 pass
739 pass
713 else:
740 else:
714 self.results[msg_id] = self._unwrap_exception(content)
741 self.results[msg_id] = self._unwrap_exception(content)
715
742
716 def _handle_apply_reply(self, msg):
743 def _handle_apply_reply(self, msg):
717 """Save the reply to an apply_request into our results."""
744 """Save the reply to an apply_request into our results."""
718 parent = msg['parent_header']
745 parent = msg['parent_header']
719 msg_id = parent['msg_id']
746 msg_id = parent['msg_id']
720 if msg_id not in self.outstanding:
747 if msg_id not in self.outstanding:
721 if msg_id in self.history:
748 if msg_id in self.history:
722 print ("got stale result: %s"%msg_id)
749 print ("got stale result: %s"%msg_id)
723 print self.results[msg_id]
750 print self.results[msg_id]
724 print msg
751 print msg
725 else:
752 else:
726 print ("got unknown result: %s"%msg_id)
753 print ("got unknown result: %s"%msg_id)
727 else:
754 else:
728 self.outstanding.remove(msg_id)
755 self.outstanding.remove(msg_id)
729 content = msg['content']
756 content = msg['content']
730 header = msg['header']
757 header = msg['header']
731
758
732 # construct metadata:
759 # construct metadata:
733 md = self.metadata[msg_id]
760 md = self.metadata[msg_id]
734 md.update(self._extract_metadata(header, parent, content))
761 md.update(self._extract_metadata(header, parent, content))
735 # is this redundant?
762 # is this redundant?
736 self.metadata[msg_id] = md
763 self.metadata[msg_id] = md
737
764
738 e_outstanding = self._outstanding_dict[md['engine_uuid']]
765 e_outstanding = self._outstanding_dict[md['engine_uuid']]
739 if msg_id in e_outstanding:
766 if msg_id in e_outstanding:
740 e_outstanding.remove(msg_id)
767 e_outstanding.remove(msg_id)
741
768
742 # construct result:
769 # construct result:
743 if content['status'] == 'ok':
770 if content['status'] == 'ok':
744 self.results[msg_id] = util.unserialize_object(msg['buffers'])[0]
771 self.results[msg_id] = util.unserialize_object(msg['buffers'])[0]
745 elif content['status'] == 'aborted':
772 elif content['status'] == 'aborted':
746 self.results[msg_id] = error.TaskAborted(msg_id)
773 self.results[msg_id] = error.TaskAborted(msg_id)
747 elif content['status'] == 'resubmitted':
774 elif content['status'] == 'resubmitted':
748 # TODO: handle resubmission
775 # TODO: handle resubmission
749 pass
776 pass
750 else:
777 else:
751 self.results[msg_id] = self._unwrap_exception(content)
778 self.results[msg_id] = self._unwrap_exception(content)
752
779
753 def _flush_notifications(self):
780 def _flush_notifications(self):
754 """Flush notifications of engine registrations waiting
781 """Flush notifications of engine registrations waiting
755 in ZMQ queue."""
782 in ZMQ queue."""
756 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
783 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
757 while msg is not None:
784 while msg is not None:
758 if self.debug:
785 if self.debug:
759 pprint(msg)
786 pprint(msg)
760 msg_type = msg['header']['msg_type']
787 msg_type = msg['header']['msg_type']
761 handler = self._notification_handlers.get(msg_type, None)
788 handler = self._notification_handlers.get(msg_type, None)
762 if handler is None:
789 if handler is None:
763 raise Exception("Unhandled message type: %s"%msg.msg_type)
790 raise Exception("Unhandled message type: %s"%msg.msg_type)
764 else:
791 else:
765 handler(msg)
792 handler(msg)
766 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
793 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
767
794
768 def _flush_results(self, sock):
795 def _flush_results(self, sock):
769 """Flush task or queue results waiting in ZMQ queue."""
796 """Flush task or queue results waiting in ZMQ queue."""
770 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
797 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
771 while msg is not None:
798 while msg is not None:
772 if self.debug:
799 if self.debug:
773 pprint(msg)
800 pprint(msg)
774 msg_type = msg['header']['msg_type']
801 msg_type = msg['header']['msg_type']
775 handler = self._queue_handlers.get(msg_type, None)
802 handler = self._queue_handlers.get(msg_type, None)
776 if handler is None:
803 if handler is None:
777 raise Exception("Unhandled message type: %s"%msg.msg_type)
804 raise Exception("Unhandled message type: %s"%msg.msg_type)
778 else:
805 else:
779 handler(msg)
806 handler(msg)
780 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
807 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
781
808
782 def _flush_control(self, sock):
809 def _flush_control(self, sock):
783 """Flush replies from the control channel waiting
810 """Flush replies from the control channel waiting
784 in the ZMQ queue.
811 in the ZMQ queue.
785
812
786 Currently: ignore them."""
813 Currently: ignore them."""
787 if self._ignored_control_replies <= 0:
814 if self._ignored_control_replies <= 0:
788 return
815 return
789 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
816 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
790 while msg is not None:
817 while msg is not None:
791 self._ignored_control_replies -= 1
818 self._ignored_control_replies -= 1
792 if self.debug:
819 if self.debug:
793 pprint(msg)
820 pprint(msg)
794 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
821 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
795
822
796 def _flush_ignored_control(self):
823 def _flush_ignored_control(self):
797 """flush ignored control replies"""
824 """flush ignored control replies"""
798 while self._ignored_control_replies > 0:
825 while self._ignored_control_replies > 0:
799 self.session.recv(self._control_socket)
826 self.session.recv(self._control_socket)
800 self._ignored_control_replies -= 1
827 self._ignored_control_replies -= 1
801
828
802 def _flush_ignored_hub_replies(self):
829 def _flush_ignored_hub_replies(self):
803 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
830 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
804 while msg is not None:
831 while msg is not None:
805 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
832 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
806
833
807 def _flush_iopub(self, sock):
834 def _flush_iopub(self, sock):
808 """Flush replies from the iopub channel waiting
835 """Flush replies from the iopub channel waiting
809 in the ZMQ queue.
836 in the ZMQ queue.
810 """
837 """
811 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
838 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
812 while msg is not None:
839 while msg is not None:
813 if self.debug:
840 if self.debug:
814 pprint(msg)
841 pprint(msg)
815 parent = msg['parent_header']
842 parent = msg['parent_header']
816 # ignore IOPub messages with no parent.
843 # ignore IOPub messages with no parent.
817 # Caused by print statements or warnings from before the first execution.
844 # Caused by print statements or warnings from before the first execution.
818 if not parent:
845 if not parent:
819 continue
846 continue
820 msg_id = parent['msg_id']
847 msg_id = parent['msg_id']
821 content = msg['content']
848 content = msg['content']
822 header = msg['header']
849 header = msg['header']
823 msg_type = msg['header']['msg_type']
850 msg_type = msg['header']['msg_type']
824
851
825 # init metadata:
852 # init metadata:
826 md = self.metadata[msg_id]
853 md = self.metadata[msg_id]
827
854
828 if msg_type == 'stream':
855 if msg_type == 'stream':
829 name = content['name']
856 name = content['name']
830 s = md[name] or ''
857 s = md[name] or ''
831 md[name] = s + content['data']
858 md[name] = s + content['data']
832 elif msg_type == 'pyerr':
859 elif msg_type == 'pyerr':
833 md.update({'pyerr' : self._unwrap_exception(content)})
860 md.update({'pyerr' : self._unwrap_exception(content)})
834 elif msg_type == 'pyin':
861 elif msg_type == 'pyin':
835 md.update({'pyin' : content['code']})
862 md.update({'pyin' : content['code']})
836 elif msg_type == 'display_data':
863 elif msg_type == 'display_data':
837 md['outputs'].append(content)
864 md['outputs'].append(content)
838 elif msg_type == 'pyout':
865 elif msg_type == 'pyout':
839 md['pyout'] = content
866 md['pyout'] = content
840 else:
867 else:
841 # unhandled msg_type (status, etc.)
868 # unhandled msg_type (status, etc.)
842 pass
869 pass
843
870
844 # reduntant?
871 # reduntant?
845 self.metadata[msg_id] = md
872 self.metadata[msg_id] = md
846
873
847 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
874 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
848
875
849 #--------------------------------------------------------------------------
876 #--------------------------------------------------------------------------
850 # len, getitem
877 # len, getitem
851 #--------------------------------------------------------------------------
878 #--------------------------------------------------------------------------
852
879
853 def __len__(self):
880 def __len__(self):
854 """len(client) returns # of engines."""
881 """len(client) returns # of engines."""
855 return len(self.ids)
882 return len(self.ids)
856
883
857 def __getitem__(self, key):
884 def __getitem__(self, key):
858 """index access returns DirectView multiplexer objects
885 """index access returns DirectView multiplexer objects
859
886
860 Must be int, slice, or list/tuple/xrange of ints"""
887 Must be int, slice, or list/tuple/xrange of ints"""
861 if not isinstance(key, (int, slice, tuple, list, xrange)):
888 if not isinstance(key, (int, slice, tuple, list, xrange)):
862 raise TypeError("key by int/slice/iterable of ints only, not %s"%(type(key)))
889 raise TypeError("key by int/slice/iterable of ints only, not %s"%(type(key)))
863 else:
890 else:
864 return self.direct_view(key)
891 return self.direct_view(key)
865
892
866 #--------------------------------------------------------------------------
893 #--------------------------------------------------------------------------
867 # Begin public methods
894 # Begin public methods
868 #--------------------------------------------------------------------------
895 #--------------------------------------------------------------------------
869
896
870 @property
897 @property
871 def ids(self):
898 def ids(self):
872 """Always up-to-date ids property."""
899 """Always up-to-date ids property."""
873 self._flush_notifications()
900 self._flush_notifications()
874 # always copy:
901 # always copy:
875 return list(self._ids)
902 return list(self._ids)
876
903
877 def close(self):
904 def close(self):
878 if self._closed:
905 if self._closed:
879 return
906 return
880 self.stop_spin_thread()
907 self.stop_spin_thread()
881 snames = filter(lambda n: n.endswith('socket'), dir(self))
908 snames = filter(lambda n: n.endswith('socket'), dir(self))
882 for socket in map(lambda name: getattr(self, name), snames):
909 for socket in map(lambda name: getattr(self, name), snames):
883 if isinstance(socket, zmq.Socket) and not socket.closed:
910 if isinstance(socket, zmq.Socket) and not socket.closed:
884 socket.close()
911 socket.close()
885 self._closed = True
912 self._closed = True
886
913
887 def _spin_every(self, interval=1):
914 def _spin_every(self, interval=1):
888 """target func for use in spin_thread"""
915 """target func for use in spin_thread"""
889 while True:
916 while True:
890 if self._stop_spinning.is_set():
917 if self._stop_spinning.is_set():
891 return
918 return
892 time.sleep(interval)
919 time.sleep(interval)
893 self.spin()
920 self.spin()
894
921
895 def spin_thread(self, interval=1):
922 def spin_thread(self, interval=1):
896 """call Client.spin() in a background thread on some regular interval
923 """call Client.spin() in a background thread on some regular interval
897
924
898 This helps ensure that messages don't pile up too much in the zmq queue
925 This helps ensure that messages don't pile up too much in the zmq queue
899 while you are working on other things, or just leaving an idle terminal.
926 while you are working on other things, or just leaving an idle terminal.
900
927
901 It also helps limit potential padding of the `received` timestamp
928 It also helps limit potential padding of the `received` timestamp
902 on AsyncResult objects, used for timings.
929 on AsyncResult objects, used for timings.
903
930
904 Parameters
931 Parameters
905 ----------
932 ----------
906
933
907 interval : float, optional
934 interval : float, optional
908 The interval on which to spin the client in the background thread
935 The interval on which to spin the client in the background thread
909 (simply passed to time.sleep).
936 (simply passed to time.sleep).
910
937
911 Notes
938 Notes
912 -----
939 -----
913
940
914 For precision timing, you may want to use this method to put a bound
941 For precision timing, you may want to use this method to put a bound
915 on the jitter (in seconds) in `received` timestamps used
942 on the jitter (in seconds) in `received` timestamps used
916 in AsyncResult.wall_time.
943 in AsyncResult.wall_time.
917
944
918 """
945 """
919 if self._spin_thread is not None:
946 if self._spin_thread is not None:
920 self.stop_spin_thread()
947 self.stop_spin_thread()
921 self._stop_spinning.clear()
948 self._stop_spinning.clear()
922 self._spin_thread = Thread(target=self._spin_every, args=(interval,))
949 self._spin_thread = Thread(target=self._spin_every, args=(interval,))
923 self._spin_thread.daemon = True
950 self._spin_thread.daemon = True
924 self._spin_thread.start()
951 self._spin_thread.start()
925
952
926 def stop_spin_thread(self):
953 def stop_spin_thread(self):
927 """stop background spin_thread, if any"""
954 """stop background spin_thread, if any"""
928 if self._spin_thread is not None:
955 if self._spin_thread is not None:
929 self._stop_spinning.set()
956 self._stop_spinning.set()
930 self._spin_thread.join()
957 self._spin_thread.join()
931 self._spin_thread = None
958 self._spin_thread = None
932
959
933 def spin(self):
960 def spin(self):
934 """Flush any registration notifications and execution results
961 """Flush any registration notifications and execution results
935 waiting in the ZMQ queue.
962 waiting in the ZMQ queue.
936 """
963 """
937 if self._notification_socket:
964 if self._notification_socket:
938 self._flush_notifications()
965 self._flush_notifications()
939 if self._iopub_socket:
966 if self._iopub_socket:
940 self._flush_iopub(self._iopub_socket)
967 self._flush_iopub(self._iopub_socket)
941 if self._mux_socket:
968 if self._mux_socket:
942 self._flush_results(self._mux_socket)
969 self._flush_results(self._mux_socket)
943 if self._task_socket:
970 if self._task_socket:
944 self._flush_results(self._task_socket)
971 self._flush_results(self._task_socket)
945 if self._control_socket:
972 if self._control_socket:
946 self._flush_control(self._control_socket)
973 self._flush_control(self._control_socket)
947 if self._query_socket:
974 if self._query_socket:
948 self._flush_ignored_hub_replies()
975 self._flush_ignored_hub_replies()
949
976
950 def wait(self, jobs=None, timeout=-1):
977 def wait(self, jobs=None, timeout=-1):
951 """waits on one or more `jobs`, for up to `timeout` seconds.
978 """waits on one or more `jobs`, for up to `timeout` seconds.
952
979
953 Parameters
980 Parameters
954 ----------
981 ----------
955
982
956 jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects
983 jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects
957 ints are indices to self.history
984 ints are indices to self.history
958 strs are msg_ids
985 strs are msg_ids
959 default: wait on all outstanding messages
986 default: wait on all outstanding messages
960 timeout : float
987 timeout : float
961 a time in seconds, after which to give up.
988 a time in seconds, after which to give up.
962 default is -1, which means no timeout
989 default is -1, which means no timeout
963
990
964 Returns
991 Returns
965 -------
992 -------
966
993
967 True : when all msg_ids are done
994 True : when all msg_ids are done
968 False : timeout reached, some msg_ids still outstanding
995 False : timeout reached, some msg_ids still outstanding
969 """
996 """
970 tic = time.time()
997 tic = time.time()
971 if jobs is None:
998 if jobs is None:
972 theids = self.outstanding
999 theids = self.outstanding
973 else:
1000 else:
974 if isinstance(jobs, (int, basestring, AsyncResult)):
1001 if isinstance(jobs, (int, basestring, AsyncResult)):
975 jobs = [jobs]
1002 jobs = [jobs]
976 theids = set()
1003 theids = set()
977 for job in jobs:
1004 for job in jobs:
978 if isinstance(job, int):
1005 if isinstance(job, int):
979 # index access
1006 # index access
980 job = self.history[job]
1007 job = self.history[job]
981 elif isinstance(job, AsyncResult):
1008 elif isinstance(job, AsyncResult):
982 map(theids.add, job.msg_ids)
1009 map(theids.add, job.msg_ids)
983 continue
1010 continue
984 theids.add(job)
1011 theids.add(job)
985 if not theids.intersection(self.outstanding):
1012 if not theids.intersection(self.outstanding):
986 return True
1013 return True
987 self.spin()
1014 self.spin()
988 while theids.intersection(self.outstanding):
1015 while theids.intersection(self.outstanding):
989 if timeout >= 0 and ( time.time()-tic ) > timeout:
1016 if timeout >= 0 and ( time.time()-tic ) > timeout:
990 break
1017 break
991 time.sleep(1e-3)
1018 time.sleep(1e-3)
992 self.spin()
1019 self.spin()
993 return len(theids.intersection(self.outstanding)) == 0
1020 return len(theids.intersection(self.outstanding)) == 0
994
1021
995 #--------------------------------------------------------------------------
1022 #--------------------------------------------------------------------------
996 # Control methods
1023 # Control methods
997 #--------------------------------------------------------------------------
1024 #--------------------------------------------------------------------------
998
1025
999 @spin_first
1026 @spin_first
1000 def clear(self, targets=None, block=None):
1027 def clear(self, targets=None, block=None):
1001 """Clear the namespace in target(s)."""
1028 """Clear the namespace in target(s)."""
1002 block = self.block if block is None else block
1029 block = self.block if block is None else block
1003 targets = self._build_targets(targets)[0]
1030 targets = self._build_targets(targets)[0]
1004 for t in targets:
1031 for t in targets:
1005 self.session.send(self._control_socket, 'clear_request', content={}, ident=t)
1032 self.session.send(self._control_socket, 'clear_request', content={}, ident=t)
1006 error = False
1033 error = False
1007 if block:
1034 if block:
1008 self._flush_ignored_control()
1035 self._flush_ignored_control()
1009 for i in range(len(targets)):
1036 for i in range(len(targets)):
1010 idents,msg = self.session.recv(self._control_socket,0)
1037 idents,msg = self.session.recv(self._control_socket,0)
1011 if self.debug:
1038 if self.debug:
1012 pprint(msg)
1039 pprint(msg)
1013 if msg['content']['status'] != 'ok':
1040 if msg['content']['status'] != 'ok':
1014 error = self._unwrap_exception(msg['content'])
1041 error = self._unwrap_exception(msg['content'])
1015 else:
1042 else:
1016 self._ignored_control_replies += len(targets)
1043 self._ignored_control_replies += len(targets)
1017 if error:
1044 if error:
1018 raise error
1045 raise error
1019
1046
1020
1047
1021 @spin_first
1048 @spin_first
1022 def abort(self, jobs=None, targets=None, block=None):
1049 def abort(self, jobs=None, targets=None, block=None):
1023 """Abort specific jobs from the execution queues of target(s).
1050 """Abort specific jobs from the execution queues of target(s).
1024
1051
1025 This is a mechanism to prevent jobs that have already been submitted
1052 This is a mechanism to prevent jobs that have already been submitted
1026 from executing.
1053 from executing.
1027
1054
1028 Parameters
1055 Parameters
1029 ----------
1056 ----------
1030
1057
1031 jobs : msg_id, list of msg_ids, or AsyncResult
1058 jobs : msg_id, list of msg_ids, or AsyncResult
1032 The jobs to be aborted
1059 The jobs to be aborted
1033
1060
1034 If unspecified/None: abort all outstanding jobs.
1061 If unspecified/None: abort all outstanding jobs.
1035
1062
1036 """
1063 """
1037 block = self.block if block is None else block
1064 block = self.block if block is None else block
1038 jobs = jobs if jobs is not None else list(self.outstanding)
1065 jobs = jobs if jobs is not None else list(self.outstanding)
1039 targets = self._build_targets(targets)[0]
1066 targets = self._build_targets(targets)[0]
1040
1067
1041 msg_ids = []
1068 msg_ids = []
1042 if isinstance(jobs, (basestring,AsyncResult)):
1069 if isinstance(jobs, (basestring,AsyncResult)):
1043 jobs = [jobs]
1070 jobs = [jobs]
1044 bad_ids = filter(lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs)
1071 bad_ids = filter(lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs)
1045 if bad_ids:
1072 if bad_ids:
1046 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1073 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1047 for j in jobs:
1074 for j in jobs:
1048 if isinstance(j, AsyncResult):
1075 if isinstance(j, AsyncResult):
1049 msg_ids.extend(j.msg_ids)
1076 msg_ids.extend(j.msg_ids)
1050 else:
1077 else:
1051 msg_ids.append(j)
1078 msg_ids.append(j)
1052 content = dict(msg_ids=msg_ids)
1079 content = dict(msg_ids=msg_ids)
1053 for t in targets:
1080 for t in targets:
1054 self.session.send(self._control_socket, 'abort_request',
1081 self.session.send(self._control_socket, 'abort_request',
1055 content=content, ident=t)
1082 content=content, ident=t)
1056 error = False
1083 error = False
1057 if block:
1084 if block:
1058 self._flush_ignored_control()
1085 self._flush_ignored_control()
1059 for i in range(len(targets)):
1086 for i in range(len(targets)):
1060 idents,msg = self.session.recv(self._control_socket,0)
1087 idents,msg = self.session.recv(self._control_socket,0)
1061 if self.debug:
1088 if self.debug:
1062 pprint(msg)
1089 pprint(msg)
1063 if msg['content']['status'] != 'ok':
1090 if msg['content']['status'] != 'ok':
1064 error = self._unwrap_exception(msg['content'])
1091 error = self._unwrap_exception(msg['content'])
1065 else:
1092 else:
1066 self._ignored_control_replies += len(targets)
1093 self._ignored_control_replies += len(targets)
1067 if error:
1094 if error:
1068 raise error
1095 raise error
1069
1096
1070 @spin_first
1097 @spin_first
1071 def shutdown(self, targets=None, restart=False, hub=False, block=None):
1098 def shutdown(self, targets=None, restart=False, hub=False, block=None):
1072 """Terminates one or more engine processes, optionally including the hub."""
1099 """Terminates one or more engine processes, optionally including the hub."""
1073 block = self.block if block is None else block
1100 block = self.block if block is None else block
1074 if hub:
1101 if hub:
1075 targets = 'all'
1102 targets = 'all'
1076 targets = self._build_targets(targets)[0]
1103 targets = self._build_targets(targets)[0]
1077 for t in targets:
1104 for t in targets:
1078 self.session.send(self._control_socket, 'shutdown_request',
1105 self.session.send(self._control_socket, 'shutdown_request',
1079 content={'restart':restart},ident=t)
1106 content={'restart':restart},ident=t)
1080 error = False
1107 error = False
1081 if block or hub:
1108 if block or hub:
1082 self._flush_ignored_control()
1109 self._flush_ignored_control()
1083 for i in range(len(targets)):
1110 for i in range(len(targets)):
1084 idents,msg = self.session.recv(self._control_socket, 0)
1111 idents,msg = self.session.recv(self._control_socket, 0)
1085 if self.debug:
1112 if self.debug:
1086 pprint(msg)
1113 pprint(msg)
1087 if msg['content']['status'] != 'ok':
1114 if msg['content']['status'] != 'ok':
1088 error = self._unwrap_exception(msg['content'])
1115 error = self._unwrap_exception(msg['content'])
1089 else:
1116 else:
1090 self._ignored_control_replies += len(targets)
1117 self._ignored_control_replies += len(targets)
1091
1118
1092 if hub:
1119 if hub:
1093 time.sleep(0.25)
1120 time.sleep(0.25)
1094 self.session.send(self._query_socket, 'shutdown_request')
1121 self.session.send(self._query_socket, 'shutdown_request')
1095 idents,msg = self.session.recv(self._query_socket, 0)
1122 idents,msg = self.session.recv(self._query_socket, 0)
1096 if self.debug:
1123 if self.debug:
1097 pprint(msg)
1124 pprint(msg)
1098 if msg['content']['status'] != 'ok':
1125 if msg['content']['status'] != 'ok':
1099 error = self._unwrap_exception(msg['content'])
1126 error = self._unwrap_exception(msg['content'])
1100
1127
1101 if error:
1128 if error:
1102 raise error
1129 raise error
1103
1130
1104 #--------------------------------------------------------------------------
1131 #--------------------------------------------------------------------------
1105 # Execution related methods
1132 # Execution related methods
1106 #--------------------------------------------------------------------------
1133 #--------------------------------------------------------------------------
1107
1134
1108 def _maybe_raise(self, result):
1135 def _maybe_raise(self, result):
1109 """wrapper for maybe raising an exception if apply failed."""
1136 """wrapper for maybe raising an exception if apply failed."""
1110 if isinstance(result, error.RemoteError):
1137 if isinstance(result, error.RemoteError):
1111 raise result
1138 raise result
1112
1139
1113 return result
1140 return result
1114
1141
1115 def send_apply_request(self, socket, f, args=None, kwargs=None, subheader=None, track=False,
1142 def send_apply_request(self, socket, f, args=None, kwargs=None, subheader=None, track=False,
1116 ident=None):
1143 ident=None):
1117 """construct and send an apply message via a socket.
1144 """construct and send an apply message via a socket.
1118
1145
1119 This is the principal method with which all engine execution is performed by views.
1146 This is the principal method with which all engine execution is performed by views.
1120 """
1147 """
1121
1148
1122 if self._closed:
1149 if self._closed:
1123 raise RuntimeError("Client cannot be used after its sockets have been closed")
1150 raise RuntimeError("Client cannot be used after its sockets have been closed")
1124
1151
1125 # defaults:
1152 # defaults:
1126 args = args if args is not None else []
1153 args = args if args is not None else []
1127 kwargs = kwargs if kwargs is not None else {}
1154 kwargs = kwargs if kwargs is not None else {}
1128 subheader = subheader if subheader is not None else {}
1155 subheader = subheader if subheader is not None else {}
1129
1156
1130 # validate arguments
1157 # validate arguments
1131 if not callable(f) and not isinstance(f, Reference):
1158 if not callable(f) and not isinstance(f, Reference):
1132 raise TypeError("f must be callable, not %s"%type(f))
1159 raise TypeError("f must be callable, not %s"%type(f))
1133 if not isinstance(args, (tuple, list)):
1160 if not isinstance(args, (tuple, list)):
1134 raise TypeError("args must be tuple or list, not %s"%type(args))
1161 raise TypeError("args must be tuple or list, not %s"%type(args))
1135 if not isinstance(kwargs, dict):
1162 if not isinstance(kwargs, dict):
1136 raise TypeError("kwargs must be dict, not %s"%type(kwargs))
1163 raise TypeError("kwargs must be dict, not %s"%type(kwargs))
1137 if not isinstance(subheader, dict):
1164 if not isinstance(subheader, dict):
1138 raise TypeError("subheader must be dict, not %s"%type(subheader))
1165 raise TypeError("subheader must be dict, not %s"%type(subheader))
1139
1166
1140 bufs = util.pack_apply_message(f,args,kwargs)
1167 bufs = util.pack_apply_message(f,args,kwargs)
1141
1168
1142 msg = self.session.send(socket, "apply_request", buffers=bufs, ident=ident,
1169 msg = self.session.send(socket, "apply_request", buffers=bufs, ident=ident,
1143 subheader=subheader, track=track)
1170 subheader=subheader, track=track)
1144
1171
1145 msg_id = msg['header']['msg_id']
1172 msg_id = msg['header']['msg_id']
1146 self.outstanding.add(msg_id)
1173 self.outstanding.add(msg_id)
1147 if ident:
1174 if ident:
1148 # possibly routed to a specific engine
1175 # possibly routed to a specific engine
1149 if isinstance(ident, list):
1176 if isinstance(ident, list):
1150 ident = ident[-1]
1177 ident = ident[-1]
1151 if ident in self._engines.values():
1178 if ident in self._engines.values():
1152 # save for later, in case of engine death
1179 # save for later, in case of engine death
1153 self._outstanding_dict[ident].add(msg_id)
1180 self._outstanding_dict[ident].add(msg_id)
1154 self.history.append(msg_id)
1181 self.history.append(msg_id)
1155 self.metadata[msg_id]['submitted'] = datetime.now()
1182 self.metadata[msg_id]['submitted'] = datetime.now()
1156
1183
1157 return msg
1184 return msg
1158
1185
1159 def send_execute_request(self, socket, code, silent=True, subheader=None, ident=None):
1186 def send_execute_request(self, socket, code, silent=True, subheader=None, ident=None):
1160 """construct and send an execute request via a socket.
1187 """construct and send an execute request via a socket.
1161
1188
1162 """
1189 """
1163
1190
1164 if self._closed:
1191 if self._closed:
1165 raise RuntimeError("Client cannot be used after its sockets have been closed")
1192 raise RuntimeError("Client cannot be used after its sockets have been closed")
1166
1193
1167 # defaults:
1194 # defaults:
1168 subheader = subheader if subheader is not None else {}
1195 subheader = subheader if subheader is not None else {}
1169
1196
1170 # validate arguments
1197 # validate arguments
1171 if not isinstance(code, basestring):
1198 if not isinstance(code, basestring):
1172 raise TypeError("code must be text, not %s" % type(code))
1199 raise TypeError("code must be text, not %s" % type(code))
1173 if not isinstance(subheader, dict):
1200 if not isinstance(subheader, dict):
1174 raise TypeError("subheader must be dict, not %s" % type(subheader))
1201 raise TypeError("subheader must be dict, not %s" % type(subheader))
1175
1202
1176 content = dict(code=code, silent=bool(silent), user_variables=[], user_expressions={})
1203 content = dict(code=code, silent=bool(silent), user_variables=[], user_expressions={})
1177
1204
1178
1205
1179 msg = self.session.send(socket, "execute_request", content=content, ident=ident,
1206 msg = self.session.send(socket, "execute_request", content=content, ident=ident,
1180 subheader=subheader)
1207 subheader=subheader)
1181
1208
1182 msg_id = msg['header']['msg_id']
1209 msg_id = msg['header']['msg_id']
1183 self.outstanding.add(msg_id)
1210 self.outstanding.add(msg_id)
1184 if ident:
1211 if ident:
1185 # possibly routed to a specific engine
1212 # possibly routed to a specific engine
1186 if isinstance(ident, list):
1213 if isinstance(ident, list):
1187 ident = ident[-1]
1214 ident = ident[-1]
1188 if ident in self._engines.values():
1215 if ident in self._engines.values():
1189 # save for later, in case of engine death
1216 # save for later, in case of engine death
1190 self._outstanding_dict[ident].add(msg_id)
1217 self._outstanding_dict[ident].add(msg_id)
1191 self.history.append(msg_id)
1218 self.history.append(msg_id)
1192 self.metadata[msg_id]['submitted'] = datetime.now()
1219 self.metadata[msg_id]['submitted'] = datetime.now()
1193
1220
1194 return msg
1221 return msg
1195
1222
1196 #--------------------------------------------------------------------------
1223 #--------------------------------------------------------------------------
1197 # construct a View object
1224 # construct a View object
1198 #--------------------------------------------------------------------------
1225 #--------------------------------------------------------------------------
1199
1226
1200 def load_balanced_view(self, targets=None):
1227 def load_balanced_view(self, targets=None):
1201 """construct a DirectView object.
1228 """construct a DirectView object.
1202
1229
1203 If no arguments are specified, create a LoadBalancedView
1230 If no arguments are specified, create a LoadBalancedView
1204 using all engines.
1231 using all engines.
1205
1232
1206 Parameters
1233 Parameters
1207 ----------
1234 ----------
1208
1235
1209 targets: list,slice,int,etc. [default: use all engines]
1236 targets: list,slice,int,etc. [default: use all engines]
1210 The subset of engines across which to load-balance
1237 The subset of engines across which to load-balance
1211 """
1238 """
1212 if targets == 'all':
1239 if targets == 'all':
1213 targets = None
1240 targets = None
1214 if targets is not None:
1241 if targets is not None:
1215 targets = self._build_targets(targets)[1]
1242 targets = self._build_targets(targets)[1]
1216 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
1243 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
1217
1244
1218 def direct_view(self, targets='all'):
1245 def direct_view(self, targets='all'):
1219 """construct a DirectView object.
1246 """construct a DirectView object.
1220
1247
1221 If no targets are specified, create a DirectView using all engines.
1248 If no targets are specified, create a DirectView using all engines.
1222
1249
1223 rc.direct_view('all') is distinguished from rc[:] in that 'all' will
1250 rc.direct_view('all') is distinguished from rc[:] in that 'all' will
1224 evaluate the target engines at each execution, whereas rc[:] will connect to
1251 evaluate the target engines at each execution, whereas rc[:] will connect to
1225 all *current* engines, and that list will not change.
1252 all *current* engines, and that list will not change.
1226
1253
1227 That is, 'all' will always use all engines, whereas rc[:] will not use
1254 That is, 'all' will always use all engines, whereas rc[:] will not use
1228 engines added after the DirectView is constructed.
1255 engines added after the DirectView is constructed.
1229
1256
1230 Parameters
1257 Parameters
1231 ----------
1258 ----------
1232
1259
1233 targets: list,slice,int,etc. [default: use all engines]
1260 targets: list,slice,int,etc. [default: use all engines]
1234 The engines to use for the View
1261 The engines to use for the View
1235 """
1262 """
1236 single = isinstance(targets, int)
1263 single = isinstance(targets, int)
1237 # allow 'all' to be lazily evaluated at each execution
1264 # allow 'all' to be lazily evaluated at each execution
1238 if targets != 'all':
1265 if targets != 'all':
1239 targets = self._build_targets(targets)[1]
1266 targets = self._build_targets(targets)[1]
1240 if single:
1267 if single:
1241 targets = targets[0]
1268 targets = targets[0]
1242 return DirectView(client=self, socket=self._mux_socket, targets=targets)
1269 return DirectView(client=self, socket=self._mux_socket, targets=targets)
1243
1270
1244 #--------------------------------------------------------------------------
1271 #--------------------------------------------------------------------------
1245 # Query methods
1272 # Query methods
1246 #--------------------------------------------------------------------------
1273 #--------------------------------------------------------------------------
1247
1274
1248 @spin_first
1275 @spin_first
1249 def get_result(self, indices_or_msg_ids=None, block=None):
1276 def get_result(self, indices_or_msg_ids=None, block=None):
1250 """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object.
1277 """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object.
1251
1278
1252 If the client already has the results, no request to the Hub will be made.
1279 If the client already has the results, no request to the Hub will be made.
1253
1280
1254 This is a convenient way to construct AsyncResult objects, which are wrappers
1281 This is a convenient way to construct AsyncResult objects, which are wrappers
1255 that include metadata about execution, and allow for awaiting results that
1282 that include metadata about execution, and allow for awaiting results that
1256 were not submitted by this Client.
1283 were not submitted by this Client.
1257
1284
1258 It can also be a convenient way to retrieve the metadata associated with
1285 It can also be a convenient way to retrieve the metadata associated with
1259 blocking execution, since it always retrieves
1286 blocking execution, since it always retrieves
1260
1287
1261 Examples
1288 Examples
1262 --------
1289 --------
1263 ::
1290 ::
1264
1291
1265 In [10]: r = client.apply()
1292 In [10]: r = client.apply()
1266
1293
1267 Parameters
1294 Parameters
1268 ----------
1295 ----------
1269
1296
1270 indices_or_msg_ids : integer history index, str msg_id, or list of either
1297 indices_or_msg_ids : integer history index, str msg_id, or list of either
1271 The indices or msg_ids of indices to be retrieved
1298 The indices or msg_ids of indices to be retrieved
1272
1299
1273 block : bool
1300 block : bool
1274 Whether to wait for the result to be done
1301 Whether to wait for the result to be done
1275
1302
1276 Returns
1303 Returns
1277 -------
1304 -------
1278
1305
1279 AsyncResult
1306 AsyncResult
1280 A single AsyncResult object will always be returned.
1307 A single AsyncResult object will always be returned.
1281
1308
1282 AsyncHubResult
1309 AsyncHubResult
1283 A subclass of AsyncResult that retrieves results from the Hub
1310 A subclass of AsyncResult that retrieves results from the Hub
1284
1311
1285 """
1312 """
1286 block = self.block if block is None else block
1313 block = self.block if block is None else block
1287 if indices_or_msg_ids is None:
1314 if indices_or_msg_ids is None:
1288 indices_or_msg_ids = -1
1315 indices_or_msg_ids = -1
1289
1316
1290 if not isinstance(indices_or_msg_ids, (list,tuple)):
1317 if not isinstance(indices_or_msg_ids, (list,tuple)):
1291 indices_or_msg_ids = [indices_or_msg_ids]
1318 indices_or_msg_ids = [indices_or_msg_ids]
1292
1319
1293 theids = []
1320 theids = []
1294 for id in indices_or_msg_ids:
1321 for id in indices_or_msg_ids:
1295 if isinstance(id, int):
1322 if isinstance(id, int):
1296 id = self.history[id]
1323 id = self.history[id]
1297 if not isinstance(id, basestring):
1324 if not isinstance(id, basestring):
1298 raise TypeError("indices must be str or int, not %r"%id)
1325 raise TypeError("indices must be str or int, not %r"%id)
1299 theids.append(id)
1326 theids.append(id)
1300
1327
1301 local_ids = filter(lambda msg_id: msg_id in self.history or msg_id in self.results, theids)
1328 local_ids = filter(lambda msg_id: msg_id in self.history or msg_id in self.results, theids)
1302 remote_ids = filter(lambda msg_id: msg_id not in local_ids, theids)
1329 remote_ids = filter(lambda msg_id: msg_id not in local_ids, theids)
1303
1330
1304 if remote_ids:
1331 if remote_ids:
1305 ar = AsyncHubResult(self, msg_ids=theids)
1332 ar = AsyncHubResult(self, msg_ids=theids)
1306 else:
1333 else:
1307 ar = AsyncResult(self, msg_ids=theids)
1334 ar = AsyncResult(self, msg_ids=theids)
1308
1335
1309 if block:
1336 if block:
1310 ar.wait()
1337 ar.wait()
1311
1338
1312 return ar
1339 return ar
1313
1340
1314 @spin_first
1341 @spin_first
1315 def resubmit(self, indices_or_msg_ids=None, subheader=None, block=None):
1342 def resubmit(self, indices_or_msg_ids=None, subheader=None, block=None):
1316 """Resubmit one or more tasks.
1343 """Resubmit one or more tasks.
1317
1344
1318 in-flight tasks may not be resubmitted.
1345 in-flight tasks may not be resubmitted.
1319
1346
1320 Parameters
1347 Parameters
1321 ----------
1348 ----------
1322
1349
1323 indices_or_msg_ids : integer history index, str msg_id, or list of either
1350 indices_or_msg_ids : integer history index, str msg_id, or list of either
1324 The indices or msg_ids of indices to be retrieved
1351 The indices or msg_ids of indices to be retrieved
1325
1352
1326 block : bool
1353 block : bool
1327 Whether to wait for the result to be done
1354 Whether to wait for the result to be done
1328
1355
1329 Returns
1356 Returns
1330 -------
1357 -------
1331
1358
1332 AsyncHubResult
1359 AsyncHubResult
1333 A subclass of AsyncResult that retrieves results from the Hub
1360 A subclass of AsyncResult that retrieves results from the Hub
1334
1361
1335 """
1362 """
1336 block = self.block if block is None else block
1363 block = self.block if block is None else block
1337 if indices_or_msg_ids is None:
1364 if indices_or_msg_ids is None:
1338 indices_or_msg_ids = -1
1365 indices_or_msg_ids = -1
1339
1366
1340 if not isinstance(indices_or_msg_ids, (list,tuple)):
1367 if not isinstance(indices_or_msg_ids, (list,tuple)):
1341 indices_or_msg_ids = [indices_or_msg_ids]
1368 indices_or_msg_ids = [indices_or_msg_ids]
1342
1369
1343 theids = []
1370 theids = []
1344 for id in indices_or_msg_ids:
1371 for id in indices_or_msg_ids:
1345 if isinstance(id, int):
1372 if isinstance(id, int):
1346 id = self.history[id]
1373 id = self.history[id]
1347 if not isinstance(id, basestring):
1374 if not isinstance(id, basestring):
1348 raise TypeError("indices must be str or int, not %r"%id)
1375 raise TypeError("indices must be str or int, not %r"%id)
1349 theids.append(id)
1376 theids.append(id)
1350
1377
1351 content = dict(msg_ids = theids)
1378 content = dict(msg_ids = theids)
1352
1379
1353 self.session.send(self._query_socket, 'resubmit_request', content)
1380 self.session.send(self._query_socket, 'resubmit_request', content)
1354
1381
1355 zmq.select([self._query_socket], [], [])
1382 zmq.select([self._query_socket], [], [])
1356 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1383 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1357 if self.debug:
1384 if self.debug:
1358 pprint(msg)
1385 pprint(msg)
1359 content = msg['content']
1386 content = msg['content']
1360 if content['status'] != 'ok':
1387 if content['status'] != 'ok':
1361 raise self._unwrap_exception(content)
1388 raise self._unwrap_exception(content)
1362 mapping = content['resubmitted']
1389 mapping = content['resubmitted']
1363 new_ids = [ mapping[msg_id] for msg_id in theids ]
1390 new_ids = [ mapping[msg_id] for msg_id in theids ]
1364
1391
1365 ar = AsyncHubResult(self, msg_ids=new_ids)
1392 ar = AsyncHubResult(self, msg_ids=new_ids)
1366
1393
1367 if block:
1394 if block:
1368 ar.wait()
1395 ar.wait()
1369
1396
1370 return ar
1397 return ar
1371
1398
1372 @spin_first
1399 @spin_first
1373 def result_status(self, msg_ids, status_only=True):
1400 def result_status(self, msg_ids, status_only=True):
1374 """Check on the status of the result(s) of the apply request with `msg_ids`.
1401 """Check on the status of the result(s) of the apply request with `msg_ids`.
1375
1402
1376 If status_only is False, then the actual results will be retrieved, else
1403 If status_only is False, then the actual results will be retrieved, else
1377 only the status of the results will be checked.
1404 only the status of the results will be checked.
1378
1405
1379 Parameters
1406 Parameters
1380 ----------
1407 ----------
1381
1408
1382 msg_ids : list of msg_ids
1409 msg_ids : list of msg_ids
1383 if int:
1410 if int:
1384 Passed as index to self.history for convenience.
1411 Passed as index to self.history for convenience.
1385 status_only : bool (default: True)
1412 status_only : bool (default: True)
1386 if False:
1413 if False:
1387 Retrieve the actual results of completed tasks.
1414 Retrieve the actual results of completed tasks.
1388
1415
1389 Returns
1416 Returns
1390 -------
1417 -------
1391
1418
1392 results : dict
1419 results : dict
1393 There will always be the keys 'pending' and 'completed', which will
1420 There will always be the keys 'pending' and 'completed', which will
1394 be lists of msg_ids that are incomplete or complete. If `status_only`
1421 be lists of msg_ids that are incomplete or complete. If `status_only`
1395 is False, then completed results will be keyed by their `msg_id`.
1422 is False, then completed results will be keyed by their `msg_id`.
1396 """
1423 """
1397 if not isinstance(msg_ids, (list,tuple)):
1424 if not isinstance(msg_ids, (list,tuple)):
1398 msg_ids = [msg_ids]
1425 msg_ids = [msg_ids]
1399
1426
1400 theids = []
1427 theids = []
1401 for msg_id in msg_ids:
1428 for msg_id in msg_ids:
1402 if isinstance(msg_id, int):
1429 if isinstance(msg_id, int):
1403 msg_id = self.history[msg_id]
1430 msg_id = self.history[msg_id]
1404 if not isinstance(msg_id, basestring):
1431 if not isinstance(msg_id, basestring):
1405 raise TypeError("msg_ids must be str, not %r"%msg_id)
1432 raise TypeError("msg_ids must be str, not %r"%msg_id)
1406 theids.append(msg_id)
1433 theids.append(msg_id)
1407
1434
1408 completed = []
1435 completed = []
1409 local_results = {}
1436 local_results = {}
1410
1437
1411 # comment this block out to temporarily disable local shortcut:
1438 # comment this block out to temporarily disable local shortcut:
1412 for msg_id in theids:
1439 for msg_id in theids:
1413 if msg_id in self.results:
1440 if msg_id in self.results:
1414 completed.append(msg_id)
1441 completed.append(msg_id)
1415 local_results[msg_id] = self.results[msg_id]
1442 local_results[msg_id] = self.results[msg_id]
1416 theids.remove(msg_id)
1443 theids.remove(msg_id)
1417
1444
1418 if theids: # some not locally cached
1445 if theids: # some not locally cached
1419 content = dict(msg_ids=theids, status_only=status_only)
1446 content = dict(msg_ids=theids, status_only=status_only)
1420 msg = self.session.send(self._query_socket, "result_request", content=content)
1447 msg = self.session.send(self._query_socket, "result_request", content=content)
1421 zmq.select([self._query_socket], [], [])
1448 zmq.select([self._query_socket], [], [])
1422 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1449 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1423 if self.debug:
1450 if self.debug:
1424 pprint(msg)
1451 pprint(msg)
1425 content = msg['content']
1452 content = msg['content']
1426 if content['status'] != 'ok':
1453 if content['status'] != 'ok':
1427 raise self._unwrap_exception(content)
1454 raise self._unwrap_exception(content)
1428 buffers = msg['buffers']
1455 buffers = msg['buffers']
1429 else:
1456 else:
1430 content = dict(completed=[],pending=[])
1457 content = dict(completed=[],pending=[])
1431
1458
1432 content['completed'].extend(completed)
1459 content['completed'].extend(completed)
1433
1460
1434 if status_only:
1461 if status_only:
1435 return content
1462 return content
1436
1463
1437 failures = []
1464 failures = []
1438 # load cached results into result:
1465 # load cached results into result:
1439 content.update(local_results)
1466 content.update(local_results)
1440
1467
1441 # update cache with results:
1468 # update cache with results:
1442 for msg_id in sorted(theids):
1469 for msg_id in sorted(theids):
1443 if msg_id in content['completed']:
1470 if msg_id in content['completed']:
1444 rec = content[msg_id]
1471 rec = content[msg_id]
1445 parent = rec['header']
1472 parent = rec['header']
1446 header = rec['result_header']
1473 header = rec['result_header']
1447 rcontent = rec['result_content']
1474 rcontent = rec['result_content']
1448 iodict = rec['io']
1475 iodict = rec['io']
1449 if isinstance(rcontent, str):
1476 if isinstance(rcontent, str):
1450 rcontent = self.session.unpack(rcontent)
1477 rcontent = self.session.unpack(rcontent)
1451
1478
1452 md = self.metadata[msg_id]
1479 md = self.metadata[msg_id]
1453 md.update(self._extract_metadata(header, parent, rcontent))
1480 md.update(self._extract_metadata(header, parent, rcontent))
1454 if rec.get('received'):
1481 if rec.get('received'):
1455 md['received'] = rec['received']
1482 md['received'] = rec['received']
1456 md.update(iodict)
1483 md.update(iodict)
1457
1484
1458 if rcontent['status'] == 'ok':
1485 if rcontent['status'] == 'ok':
1459 res,buffers = util.unserialize_object(buffers)
1486 res,buffers = util.unserialize_object(buffers)
1460 else:
1487 else:
1461 print rcontent
1488 print rcontent
1462 res = self._unwrap_exception(rcontent)
1489 res = self._unwrap_exception(rcontent)
1463 failures.append(res)
1490 failures.append(res)
1464
1491
1465 self.results[msg_id] = res
1492 self.results[msg_id] = res
1466 content[msg_id] = res
1493 content[msg_id] = res
1467
1494
1468 if len(theids) == 1 and failures:
1495 if len(theids) == 1 and failures:
1469 raise failures[0]
1496 raise failures[0]
1470
1497
1471 error.collect_exceptions(failures, "result_status")
1498 error.collect_exceptions(failures, "result_status")
1472 return content
1499 return content
1473
1500
1474 @spin_first
1501 @spin_first
1475 def queue_status(self, targets='all', verbose=False):
1502 def queue_status(self, targets='all', verbose=False):
1476 """Fetch the status of engine queues.
1503 """Fetch the status of engine queues.
1477
1504
1478 Parameters
1505 Parameters
1479 ----------
1506 ----------
1480
1507
1481 targets : int/str/list of ints/strs
1508 targets : int/str/list of ints/strs
1482 the engines whose states are to be queried.
1509 the engines whose states are to be queried.
1483 default : all
1510 default : all
1484 verbose : bool
1511 verbose : bool
1485 Whether to return lengths only, or lists of ids for each element
1512 Whether to return lengths only, or lists of ids for each element
1486 """
1513 """
1487 if targets == 'all':
1514 if targets == 'all':
1488 # allow 'all' to be evaluated on the engine
1515 # allow 'all' to be evaluated on the engine
1489 engine_ids = None
1516 engine_ids = None
1490 else:
1517 else:
1491 engine_ids = self._build_targets(targets)[1]
1518 engine_ids = self._build_targets(targets)[1]
1492 content = dict(targets=engine_ids, verbose=verbose)
1519 content = dict(targets=engine_ids, verbose=verbose)
1493 self.session.send(self._query_socket, "queue_request", content=content)
1520 self.session.send(self._query_socket, "queue_request", content=content)
1494 idents,msg = self.session.recv(self._query_socket, 0)
1521 idents,msg = self.session.recv(self._query_socket, 0)
1495 if self.debug:
1522 if self.debug:
1496 pprint(msg)
1523 pprint(msg)
1497 content = msg['content']
1524 content = msg['content']
1498 status = content.pop('status')
1525 status = content.pop('status')
1499 if status != 'ok':
1526 if status != 'ok':
1500 raise self._unwrap_exception(content)
1527 raise self._unwrap_exception(content)
1501 content = rekey(content)
1528 content = rekey(content)
1502 if isinstance(targets, int):
1529 if isinstance(targets, int):
1503 return content[targets]
1530 return content[targets]
1504 else:
1531 else:
1505 return content
1532 return content
1506
1533
1507 @spin_first
1534 @spin_first
1508 def purge_results(self, jobs=[], targets=[]):
1535 def purge_results(self, jobs=[], targets=[]):
1509 """Tell the Hub to forget results.
1536 """Tell the Hub to forget results.
1510
1537
1511 Individual results can be purged by msg_id, or the entire
1538 Individual results can be purged by msg_id, or the entire
1512 history of specific targets can be purged.
1539 history of specific targets can be purged.
1513
1540
1514 Use `purge_results('all')` to scrub everything from the Hub's db.
1541 Use `purge_results('all')` to scrub everything from the Hub's db.
1515
1542
1516 Parameters
1543 Parameters
1517 ----------
1544 ----------
1518
1545
1519 jobs : str or list of str or AsyncResult objects
1546 jobs : str or list of str or AsyncResult objects
1520 the msg_ids whose results should be forgotten.
1547 the msg_ids whose results should be forgotten.
1521 targets : int/str/list of ints/strs
1548 targets : int/str/list of ints/strs
1522 The targets, by int_id, whose entire history is to be purged.
1549 The targets, by int_id, whose entire history is to be purged.
1523
1550
1524 default : None
1551 default : None
1525 """
1552 """
1526 if not targets and not jobs:
1553 if not targets and not jobs:
1527 raise ValueError("Must specify at least one of `targets` and `jobs`")
1554 raise ValueError("Must specify at least one of `targets` and `jobs`")
1528 if targets:
1555 if targets:
1529 targets = self._build_targets(targets)[1]
1556 targets = self._build_targets(targets)[1]
1530
1557
1531 # construct msg_ids from jobs
1558 # construct msg_ids from jobs
1532 if jobs == 'all':
1559 if jobs == 'all':
1533 msg_ids = jobs
1560 msg_ids = jobs
1534 else:
1561 else:
1535 msg_ids = []
1562 msg_ids = []
1536 if isinstance(jobs, (basestring,AsyncResult)):
1563 if isinstance(jobs, (basestring,AsyncResult)):
1537 jobs = [jobs]
1564 jobs = [jobs]
1538 bad_ids = filter(lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs)
1565 bad_ids = filter(lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs)
1539 if bad_ids:
1566 if bad_ids:
1540 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1567 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1541 for j in jobs:
1568 for j in jobs:
1542 if isinstance(j, AsyncResult):
1569 if isinstance(j, AsyncResult):
1543 msg_ids.extend(j.msg_ids)
1570 msg_ids.extend(j.msg_ids)
1544 else:
1571 else:
1545 msg_ids.append(j)
1572 msg_ids.append(j)
1546
1573
1547 content = dict(engine_ids=targets, msg_ids=msg_ids)
1574 content = dict(engine_ids=targets, msg_ids=msg_ids)
1548 self.session.send(self._query_socket, "purge_request", content=content)
1575 self.session.send(self._query_socket, "purge_request", content=content)
1549 idents, msg = self.session.recv(self._query_socket, 0)
1576 idents, msg = self.session.recv(self._query_socket, 0)
1550 if self.debug:
1577 if self.debug:
1551 pprint(msg)
1578 pprint(msg)
1552 content = msg['content']
1579 content = msg['content']
1553 if content['status'] != 'ok':
1580 if content['status'] != 'ok':
1554 raise self._unwrap_exception(content)
1581 raise self._unwrap_exception(content)
1555
1582
1556 @spin_first
1583 @spin_first
1557 def hub_history(self):
1584 def hub_history(self):
1558 """Get the Hub's history
1585 """Get the Hub's history
1559
1586
1560 Just like the Client, the Hub has a history, which is a list of msg_ids.
1587 Just like the Client, the Hub has a history, which is a list of msg_ids.
1561 This will contain the history of all clients, and, depending on configuration,
1588 This will contain the history of all clients, and, depending on configuration,
1562 may contain history across multiple cluster sessions.
1589 may contain history across multiple cluster sessions.
1563
1590
1564 Any msg_id returned here is a valid argument to `get_result`.
1591 Any msg_id returned here is a valid argument to `get_result`.
1565
1592
1566 Returns
1593 Returns
1567 -------
1594 -------
1568
1595
1569 msg_ids : list of strs
1596 msg_ids : list of strs
1570 list of all msg_ids, ordered by task submission time.
1597 list of all msg_ids, ordered by task submission time.
1571 """
1598 """
1572
1599
1573 self.session.send(self._query_socket, "history_request", content={})
1600 self.session.send(self._query_socket, "history_request", content={})
1574 idents, msg = self.session.recv(self._query_socket, 0)
1601 idents, msg = self.session.recv(self._query_socket, 0)
1575
1602
1576 if self.debug:
1603 if self.debug:
1577 pprint(msg)
1604 pprint(msg)
1578 content = msg['content']
1605 content = msg['content']
1579 if content['status'] != 'ok':
1606 if content['status'] != 'ok':
1580 raise self._unwrap_exception(content)
1607 raise self._unwrap_exception(content)
1581 else:
1608 else:
1582 return content['history']
1609 return content['history']
1583
1610
1584 @spin_first
1611 @spin_first
1585 def db_query(self, query, keys=None):
1612 def db_query(self, query, keys=None):
1586 """Query the Hub's TaskRecord database
1613 """Query the Hub's TaskRecord database
1587
1614
1588 This will return a list of task record dicts that match `query`
1615 This will return a list of task record dicts that match `query`
1589
1616
1590 Parameters
1617 Parameters
1591 ----------
1618 ----------
1592
1619
1593 query : mongodb query dict
1620 query : mongodb query dict
1594 The search dict. See mongodb query docs for details.
1621 The search dict. See mongodb query docs for details.
1595 keys : list of strs [optional]
1622 keys : list of strs [optional]
1596 The subset of keys to be returned. The default is to fetch everything but buffers.
1623 The subset of keys to be returned. The default is to fetch everything but buffers.
1597 'msg_id' will *always* be included.
1624 'msg_id' will *always* be included.
1598 """
1625 """
1599 if isinstance(keys, basestring):
1626 if isinstance(keys, basestring):
1600 keys = [keys]
1627 keys = [keys]
1601 content = dict(query=query, keys=keys)
1628 content = dict(query=query, keys=keys)
1602 self.session.send(self._query_socket, "db_request", content=content)
1629 self.session.send(self._query_socket, "db_request", content=content)
1603 idents, msg = self.session.recv(self._query_socket, 0)
1630 idents, msg = self.session.recv(self._query_socket, 0)
1604 if self.debug:
1631 if self.debug:
1605 pprint(msg)
1632 pprint(msg)
1606 content = msg['content']
1633 content = msg['content']
1607 if content['status'] != 'ok':
1634 if content['status'] != 'ok':
1608 raise self._unwrap_exception(content)
1635 raise self._unwrap_exception(content)
1609
1636
1610 records = content['records']
1637 records = content['records']
1611
1638
1612 buffer_lens = content['buffer_lens']
1639 buffer_lens = content['buffer_lens']
1613 result_buffer_lens = content['result_buffer_lens']
1640 result_buffer_lens = content['result_buffer_lens']
1614 buffers = msg['buffers']
1641 buffers = msg['buffers']
1615 has_bufs = buffer_lens is not None
1642 has_bufs = buffer_lens is not None
1616 has_rbufs = result_buffer_lens is not None
1643 has_rbufs = result_buffer_lens is not None
1617 for i,rec in enumerate(records):
1644 for i,rec in enumerate(records):
1618 # relink buffers
1645 # relink buffers
1619 if has_bufs:
1646 if has_bufs:
1620 blen = buffer_lens[i]
1647 blen = buffer_lens[i]
1621 rec['buffers'], buffers = buffers[:blen],buffers[blen:]
1648 rec['buffers'], buffers = buffers[:blen],buffers[blen:]
1622 if has_rbufs:
1649 if has_rbufs:
1623 blen = result_buffer_lens[i]
1650 blen = result_buffer_lens[i]
1624 rec['result_buffers'], buffers = buffers[:blen],buffers[blen:]
1651 rec['result_buffers'], buffers = buffers[:blen],buffers[blen:]
1625
1652
1626 return records
1653 return records
1627
1654
1628 __all__ = [ 'Client' ]
1655 __all__ = [ 'Client' ]
@@ -1,144 +1,213 b''
1 """base class for parallel client tests
1 """base class for parallel client tests
2
2
3 Authors:
3 Authors:
4
4
5 * Min RK
5 * Min RK
6 """
6 """
7
7
8 #-------------------------------------------------------------------------------
8 #-------------------------------------------------------------------------------
9 # Copyright (C) 2011 The IPython Development Team
9 # Copyright (C) 2011 The IPython Development Team
10 #
10 #
11 # Distributed under the terms of the BSD License. The full license is in
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
12 # the file COPYING, distributed as part of this software.
13 #-------------------------------------------------------------------------------
13 #-------------------------------------------------------------------------------
14
14
15 import sys
15 import sys
16 import tempfile
16 import tempfile
17 import time
17 import time
18 from StringIO import StringIO
18
19
19 from nose import SkipTest
20 from nose import SkipTest
20
21
21 import zmq
22 import zmq
22 from zmq.tests import BaseZMQTestCase
23 from zmq.tests import BaseZMQTestCase
23
24
24 from IPython.external.decorator import decorator
25 from IPython.external.decorator import decorator
25
26
26 from IPython.parallel import error
27 from IPython.parallel import error
27 from IPython.parallel import Client
28 from IPython.parallel import Client
28
29
29 from IPython.parallel.tests import launchers, add_engines
30 from IPython.parallel.tests import launchers, add_engines
30
31
31 # simple tasks for use in apply tests
32 # simple tasks for use in apply tests
32
33
33 def segfault():
34 def segfault():
34 """this will segfault"""
35 """this will segfault"""
35 import ctypes
36 import ctypes
36 ctypes.memset(-1,0,1)
37 ctypes.memset(-1,0,1)
37
38
38 def crash():
39 def crash():
39 """from stdlib crashers in the test suite"""
40 """from stdlib crashers in the test suite"""
40 import types
41 import types
41 if sys.platform.startswith('win'):
42 if sys.platform.startswith('win'):
42 import ctypes
43 import ctypes
43 ctypes.windll.kernel32.SetErrorMode(0x0002);
44 ctypes.windll.kernel32.SetErrorMode(0x0002);
44 args = [ 0, 0, 0, 0, b'\x04\x71\x00\x00', (), (), (), '', '', 1, b'']
45 args = [ 0, 0, 0, 0, b'\x04\x71\x00\x00', (), (), (), '', '', 1, b'']
45 if sys.version_info[0] >= 3:
46 if sys.version_info[0] >= 3:
46 # Python3 adds 'kwonlyargcount' as the second argument to Code
47 # Python3 adds 'kwonlyargcount' as the second argument to Code
47 args.insert(1, 0)
48 args.insert(1, 0)
48
49
49 co = types.CodeType(*args)
50 co = types.CodeType(*args)
50 exec(co)
51 exec(co)
51
52
52 def wait(n):
53 def wait(n):
53 """sleep for a time"""
54 """sleep for a time"""
54 import time
55 import time
55 time.sleep(n)
56 time.sleep(n)
56 return n
57 return n
57
58
58 def raiser(eclass):
59 def raiser(eclass):
59 """raise an exception"""
60 """raise an exception"""
60 raise eclass()
61 raise eclass()
61
62
63 def generate_output():
64 """function for testing output
65
66 publishes two outputs of each type, and returns
67 a rich displayable object.
68 """
69
70 import sys
71 from IPython.core.display import display, HTML, Math
72
73 print "stdout"
74 print >> sys.stderr, "stderr"
75
76 display(HTML("<b>HTML</b>"))
77
78 print "stdout2"
79 print >> sys.stderr, "stderr2"
80
81 display(Math(r"\alpha=\beta"))
82
83 return Math("42")
84
62 # test decorator for skipping tests when libraries are unavailable
85 # test decorator for skipping tests when libraries are unavailable
63 def skip_without(*names):
86 def skip_without(*names):
64 """skip a test if some names are not importable"""
87 """skip a test if some names are not importable"""
65 @decorator
88 @decorator
66 def skip_without_names(f, *args, **kwargs):
89 def skip_without_names(f, *args, **kwargs):
67 """decorator to skip tests in the absence of numpy."""
90 """decorator to skip tests in the absence of numpy."""
68 for name in names:
91 for name in names:
69 try:
92 try:
70 __import__(name)
93 __import__(name)
71 except ImportError:
94 except ImportError:
72 raise SkipTest
95 raise SkipTest
73 return f(*args, **kwargs)
96 return f(*args, **kwargs)
74 return skip_without_names
97 return skip_without_names
75
98
99 #-------------------------------------------------------------------------------
100 # Classes
101 #-------------------------------------------------------------------------------
102
103 class CapturedIO(object):
104 """Simple object for containing captured stdout/err StringIO objects"""
105
106 def __init__(self, stdout, stderr):
107 self.stdout_io = stdout
108 self.stderr_io = stderr
109
110 @property
111 def stdout(self):
112 return self.stdout_io.getvalue()
113
114 @property
115 def stderr(self):
116 return self.stderr_io.getvalue()
117
118
119 class capture_output(object):
120 """context manager for capturing stdout/err"""
121
122 def __enter__(self):
123 self.sys_stdout = sys.stdout
124 self.sys_stderr = sys.stderr
125 stdout = sys.stdout = StringIO()
126 stderr = sys.stderr = StringIO()
127 return CapturedIO(stdout, stderr)
128
129 def __exit__(self, exc_type, exc_value, traceback):
130 sys.stdout = self.sys_stdout
131 sys.stderr = self.sys_stderr
132
133
76 class ClusterTestCase(BaseZMQTestCase):
134 class ClusterTestCase(BaseZMQTestCase):
77
135
78 def add_engines(self, n=1, block=True):
136 def add_engines(self, n=1, block=True):
79 """add multiple engines to our cluster"""
137 """add multiple engines to our cluster"""
80 self.engines.extend(add_engines(n))
138 self.engines.extend(add_engines(n))
81 if block:
139 if block:
82 self.wait_on_engines()
140 self.wait_on_engines()
83
141
84 def minimum_engines(self, n=1, block=True):
142 def minimum_engines(self, n=1, block=True):
85 """add engines until there are at least n connected"""
143 """add engines until there are at least n connected"""
86 self.engines.extend(add_engines(n, total=True))
144 self.engines.extend(add_engines(n, total=True))
87 if block:
145 if block:
88 self.wait_on_engines()
146 self.wait_on_engines()
89
147
90
148
91 def wait_on_engines(self, timeout=5):
149 def wait_on_engines(self, timeout=5):
92 """wait for our engines to connect."""
150 """wait for our engines to connect."""
93 n = len(self.engines)+self.base_engine_count
151 n = len(self.engines)+self.base_engine_count
94 tic = time.time()
152 tic = time.time()
95 while time.time()-tic < timeout and len(self.client.ids) < n:
153 while time.time()-tic < timeout and len(self.client.ids) < n:
96 time.sleep(0.1)
154 time.sleep(0.1)
97
155
98 assert not len(self.client.ids) < n, "waiting for engines timed out"
156 assert not len(self.client.ids) < n, "waiting for engines timed out"
99
157
100 def connect_client(self):
158 def connect_client(self):
101 """connect a client with my Context, and track its sockets for cleanup"""
159 """connect a client with my Context, and track its sockets for cleanup"""
102 c = Client(profile='iptest', context=self.context)
160 c = Client(profile='iptest', context=self.context)
103 for name in filter(lambda n:n.endswith('socket'), dir(c)):
161 for name in filter(lambda n:n.endswith('socket'), dir(c)):
104 s = getattr(c, name)
162 s = getattr(c, name)
105 s.setsockopt(zmq.LINGER, 0)
163 s.setsockopt(zmq.LINGER, 0)
106 self.sockets.append(s)
164 self.sockets.append(s)
107 return c
165 return c
108
166
109 def assertRaisesRemote(self, etype, f, *args, **kwargs):
167 def assertRaisesRemote(self, etype, f, *args, **kwargs):
110 try:
168 try:
111 try:
169 try:
112 f(*args, **kwargs)
170 f(*args, **kwargs)
113 except error.CompositeError as e:
171 except error.CompositeError as e:
114 e.raise_exception()
172 e.raise_exception()
115 except error.RemoteError as e:
173 except error.RemoteError as e:
116 self.assertEquals(etype.__name__, e.ename, "Should have raised %r, but raised %r"%(etype.__name__, e.ename))
174 self.assertEquals(etype.__name__, e.ename, "Should have raised %r, but raised %r"%(etype.__name__, e.ename))
117 else:
175 else:
118 self.fail("should have raised a RemoteError")
176 self.fail("should have raised a RemoteError")
119
177
178 def _wait_for(self, f, timeout=10):
179 """wait for a condition"""
180 tic = time.time()
181 while time.time() <= tic + timeout:
182 if f():
183 return
184 time.sleep(0.1)
185 self.client.spin()
186 if not f():
187 print "Warning: Awaited condition never arrived"
188
120 def setUp(self):
189 def setUp(self):
121 BaseZMQTestCase.setUp(self)
190 BaseZMQTestCase.setUp(self)
122 self.client = self.connect_client()
191 self.client = self.connect_client()
123 # start every test with clean engine namespaces:
192 # start every test with clean engine namespaces:
124 self.client.clear(block=True)
193 self.client.clear(block=True)
125 self.base_engine_count=len(self.client.ids)
194 self.base_engine_count=len(self.client.ids)
126 self.engines=[]
195 self.engines=[]
127
196
128 def tearDown(self):
197 def tearDown(self):
129 # self.client.clear(block=True)
198 # self.client.clear(block=True)
130 # close fds:
199 # close fds:
131 for e in filter(lambda e: e.poll() is not None, launchers):
200 for e in filter(lambda e: e.poll() is not None, launchers):
132 launchers.remove(e)
201 launchers.remove(e)
133
202
134 # allow flushing of incoming messages to prevent crash on socket close
203 # allow flushing of incoming messages to prevent crash on socket close
135 self.client.wait(timeout=2)
204 self.client.wait(timeout=2)
136 # time.sleep(2)
205 # time.sleep(2)
137 self.client.spin()
206 self.client.spin()
138 self.client.close()
207 self.client.close()
139 BaseZMQTestCase.tearDown(self)
208 BaseZMQTestCase.tearDown(self)
140 # this will be redundant when pyzmq merges PR #88
209 # this will be redundant when pyzmq merges PR #88
141 # self.context.term()
210 # self.context.term()
142 # print tempfile.TemporaryFile().fileno(),
211 # print tempfile.TemporaryFile().fileno(),
143 # sys.stdout.flush()
212 # sys.stdout.flush()
144 No newline at end of file
213
@@ -1,694 +1,572 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """test View objects
2 """test View objects
3
3
4 Authors:
4 Authors:
5
5
6 * Min RK
6 * Min RK
7 """
7 """
8 #-------------------------------------------------------------------------------
8 #-------------------------------------------------------------------------------
9 # Copyright (C) 2011 The IPython Development Team
9 # Copyright (C) 2011 The IPython Development Team
10 #
10 #
11 # Distributed under the terms of the BSD License. The full license is in
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
12 # the file COPYING, distributed as part of this software.
13 #-------------------------------------------------------------------------------
13 #-------------------------------------------------------------------------------
14
14
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-------------------------------------------------------------------------------
17 #-------------------------------------------------------------------------------
18
18
19 import sys
19 import sys
20 import time
20 import time
21 from tempfile import mktemp
21 from tempfile import mktemp
22 from StringIO import StringIO
22 from StringIO import StringIO
23
23
24 import zmq
24 import zmq
25 from nose import SkipTest
25 from nose import SkipTest
26
26
27 from IPython.testing import decorators as dec
27 from IPython.testing import decorators as dec
28 from IPython.testing.ipunittest import ParametricTestCase
28 from IPython.testing.ipunittest import ParametricTestCase
29
29
30 from IPython import parallel as pmod
30 from IPython import parallel as pmod
31 from IPython.parallel import error
31 from IPython.parallel import error
32 from IPython.parallel import AsyncResult, AsyncHubResult, AsyncMapResult
32 from IPython.parallel import AsyncResult, AsyncHubResult, AsyncMapResult
33 from IPython.parallel import DirectView
33 from IPython.parallel import DirectView
34 from IPython.parallel.util import interactive
34 from IPython.parallel.util import interactive
35
35
36 from IPython.parallel.tests import add_engines
36 from IPython.parallel.tests import add_engines
37
37
38 from .clienttest import ClusterTestCase, crash, wait, skip_without
38 from .clienttest import ClusterTestCase, crash, wait, skip_without
39
39
40 def setup():
40 def setup():
41 add_engines(3, total=True)
41 add_engines(3, total=True)
42
42
43 class TestView(ClusterTestCase, ParametricTestCase):
43 class TestView(ClusterTestCase, ParametricTestCase):
44
44
45 def test_z_crash_mux(self):
45 def test_z_crash_mux(self):
46 """test graceful handling of engine death (direct)"""
46 """test graceful handling of engine death (direct)"""
47 raise SkipTest("crash tests disabled, due to undesirable crash reports")
47 raise SkipTest("crash tests disabled, due to undesirable crash reports")
48 # self.add_engines(1)
48 # self.add_engines(1)
49 eid = self.client.ids[-1]
49 eid = self.client.ids[-1]
50 ar = self.client[eid].apply_async(crash)
50 ar = self.client[eid].apply_async(crash)
51 self.assertRaisesRemote(error.EngineError, ar.get, 10)
51 self.assertRaisesRemote(error.EngineError, ar.get, 10)
52 eid = ar.engine_id
52 eid = ar.engine_id
53 tic = time.time()
53 tic = time.time()
54 while eid in self.client.ids and time.time()-tic < 5:
54 while eid in self.client.ids and time.time()-tic < 5:
55 time.sleep(.01)
55 time.sleep(.01)
56 self.client.spin()
56 self.client.spin()
57 self.assertFalse(eid in self.client.ids, "Engine should have died")
57 self.assertFalse(eid in self.client.ids, "Engine should have died")
58
58
59 def test_push_pull(self):
59 def test_push_pull(self):
60 """test pushing and pulling"""
60 """test pushing and pulling"""
61 data = dict(a=10, b=1.05, c=range(10), d={'e':(1,2),'f':'hi'})
61 data = dict(a=10, b=1.05, c=range(10), d={'e':(1,2),'f':'hi'})
62 t = self.client.ids[-1]
62 t = self.client.ids[-1]
63 v = self.client[t]
63 v = self.client[t]
64 push = v.push
64 push = v.push
65 pull = v.pull
65 pull = v.pull
66 v.block=True
66 v.block=True
67 nengines = len(self.client)
67 nengines = len(self.client)
68 push({'data':data})
68 push({'data':data})
69 d = pull('data')
69 d = pull('data')
70 self.assertEquals(d, data)
70 self.assertEquals(d, data)
71 self.client[:].push({'data':data})
71 self.client[:].push({'data':data})
72 d = self.client[:].pull('data', block=True)
72 d = self.client[:].pull('data', block=True)
73 self.assertEquals(d, nengines*[data])
73 self.assertEquals(d, nengines*[data])
74 ar = push({'data':data}, block=False)
74 ar = push({'data':data}, block=False)
75 self.assertTrue(isinstance(ar, AsyncResult))
75 self.assertTrue(isinstance(ar, AsyncResult))
76 r = ar.get()
76 r = ar.get()
77 ar = self.client[:].pull('data', block=False)
77 ar = self.client[:].pull('data', block=False)
78 self.assertTrue(isinstance(ar, AsyncResult))
78 self.assertTrue(isinstance(ar, AsyncResult))
79 r = ar.get()
79 r = ar.get()
80 self.assertEquals(r, nengines*[data])
80 self.assertEquals(r, nengines*[data])
81 self.client[:].push(dict(a=10,b=20))
81 self.client[:].push(dict(a=10,b=20))
82 r = self.client[:].pull(('a','b'), block=True)
82 r = self.client[:].pull(('a','b'), block=True)
83 self.assertEquals(r, nengines*[[10,20]])
83 self.assertEquals(r, nengines*[[10,20]])
84
84
85 def test_push_pull_function(self):
85 def test_push_pull_function(self):
86 "test pushing and pulling functions"
86 "test pushing and pulling functions"
87 def testf(x):
87 def testf(x):
88 return 2.0*x
88 return 2.0*x
89
89
90 t = self.client.ids[-1]
90 t = self.client.ids[-1]
91 v = self.client[t]
91 v = self.client[t]
92 v.block=True
92 v.block=True
93 push = v.push
93 push = v.push
94 pull = v.pull
94 pull = v.pull
95 execute = v.execute
95 execute = v.execute
96 push({'testf':testf})
96 push({'testf':testf})
97 r = pull('testf')
97 r = pull('testf')
98 self.assertEqual(r(1.0), testf(1.0))
98 self.assertEqual(r(1.0), testf(1.0))
99 execute('r = testf(10)')
99 execute('r = testf(10)')
100 r = pull('r')
100 r = pull('r')
101 self.assertEquals(r, testf(10))
101 self.assertEquals(r, testf(10))
102 ar = self.client[:].push({'testf':testf}, block=False)
102 ar = self.client[:].push({'testf':testf}, block=False)
103 ar.get()
103 ar.get()
104 ar = self.client[:].pull('testf', block=False)
104 ar = self.client[:].pull('testf', block=False)
105 rlist = ar.get()
105 rlist = ar.get()
106 for r in rlist:
106 for r in rlist:
107 self.assertEqual(r(1.0), testf(1.0))
107 self.assertEqual(r(1.0), testf(1.0))
108 execute("def g(x): return x*x")
108 execute("def g(x): return x*x")
109 r = pull(('testf','g'))
109 r = pull(('testf','g'))
110 self.assertEquals((r[0](10),r[1](10)), (testf(10), 100))
110 self.assertEquals((r[0](10),r[1](10)), (testf(10), 100))
111
111
112 def test_push_function_globals(self):
112 def test_push_function_globals(self):
113 """test that pushed functions have access to globals"""
113 """test that pushed functions have access to globals"""
114 @interactive
114 @interactive
115 def geta():
115 def geta():
116 return a
116 return a
117 # self.add_engines(1)
117 # self.add_engines(1)
118 v = self.client[-1]
118 v = self.client[-1]
119 v.block=True
119 v.block=True
120 v['f'] = geta
120 v['f'] = geta
121 self.assertRaisesRemote(NameError, v.execute, 'b=f()')
121 self.assertRaisesRemote(NameError, v.execute, 'b=f()')
122 v.execute('a=5')
122 v.execute('a=5')
123 v.execute('b=f()')
123 v.execute('b=f()')
124 self.assertEquals(v['b'], 5)
124 self.assertEquals(v['b'], 5)
125
125
126 def test_push_function_defaults(self):
126 def test_push_function_defaults(self):
127 """test that pushed functions preserve default args"""
127 """test that pushed functions preserve default args"""
128 def echo(a=10):
128 def echo(a=10):
129 return a
129 return a
130 v = self.client[-1]
130 v = self.client[-1]
131 v.block=True
131 v.block=True
132 v['f'] = echo
132 v['f'] = echo
133 v.execute('b=f()')
133 v.execute('b=f()')
134 self.assertEquals(v['b'], 10)
134 self.assertEquals(v['b'], 10)
135
135
136 def test_get_result(self):
136 def test_get_result(self):
137 """test getting results from the Hub."""
137 """test getting results from the Hub."""
138 c = pmod.Client(profile='iptest')
138 c = pmod.Client(profile='iptest')
139 # self.add_engines(1)
139 # self.add_engines(1)
140 t = c.ids[-1]
140 t = c.ids[-1]
141 v = c[t]
141 v = c[t]
142 v2 = self.client[t]
142 v2 = self.client[t]
143 ar = v.apply_async(wait, 1)
143 ar = v.apply_async(wait, 1)
144 # give the monitor time to notice the message
144 # give the monitor time to notice the message
145 time.sleep(.25)
145 time.sleep(.25)
146 ahr = v2.get_result(ar.msg_ids)
146 ahr = v2.get_result(ar.msg_ids)
147 self.assertTrue(isinstance(ahr, AsyncHubResult))
147 self.assertTrue(isinstance(ahr, AsyncHubResult))
148 self.assertEquals(ahr.get(), ar.get())
148 self.assertEquals(ahr.get(), ar.get())
149 ar2 = v2.get_result(ar.msg_ids)
149 ar2 = v2.get_result(ar.msg_ids)
150 self.assertFalse(isinstance(ar2, AsyncHubResult))
150 self.assertFalse(isinstance(ar2, AsyncHubResult))
151 c.spin()
151 c.spin()
152 c.close()
152 c.close()
153
153
154 def test_run_newline(self):
154 def test_run_newline(self):
155 """test that run appends newline to files"""
155 """test that run appends newline to files"""
156 tmpfile = mktemp()
156 tmpfile = mktemp()
157 with open(tmpfile, 'w') as f:
157 with open(tmpfile, 'w') as f:
158 f.write("""def g():
158 f.write("""def g():
159 return 5
159 return 5
160 """)
160 """)
161 v = self.client[-1]
161 v = self.client[-1]
162 v.run(tmpfile, block=True)
162 v.run(tmpfile, block=True)
163 self.assertEquals(v.apply_sync(lambda f: f(), pmod.Reference('g')), 5)
163 self.assertEquals(v.apply_sync(lambda f: f(), pmod.Reference('g')), 5)
164
164
165 def test_apply_tracked(self):
165 def test_apply_tracked(self):
166 """test tracking for apply"""
166 """test tracking for apply"""
167 # self.add_engines(1)
167 # self.add_engines(1)
168 t = self.client.ids[-1]
168 t = self.client.ids[-1]
169 v = self.client[t]
169 v = self.client[t]
170 v.block=False
170 v.block=False
171 def echo(n=1024*1024, **kwargs):
171 def echo(n=1024*1024, **kwargs):
172 with v.temp_flags(**kwargs):
172 with v.temp_flags(**kwargs):
173 return v.apply(lambda x: x, 'x'*n)
173 return v.apply(lambda x: x, 'x'*n)
174 ar = echo(1, track=False)
174 ar = echo(1, track=False)
175 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
175 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
176 self.assertTrue(ar.sent)
176 self.assertTrue(ar.sent)
177 ar = echo(track=True)
177 ar = echo(track=True)
178 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
178 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
179 self.assertEquals(ar.sent, ar._tracker.done)
179 self.assertEquals(ar.sent, ar._tracker.done)
180 ar._tracker.wait()
180 ar._tracker.wait()
181 self.assertTrue(ar.sent)
181 self.assertTrue(ar.sent)
182
182
183 def test_push_tracked(self):
183 def test_push_tracked(self):
184 t = self.client.ids[-1]
184 t = self.client.ids[-1]
185 ns = dict(x='x'*1024*1024)
185 ns = dict(x='x'*1024*1024)
186 v = self.client[t]
186 v = self.client[t]
187 ar = v.push(ns, block=False, track=False)
187 ar = v.push(ns, block=False, track=False)
188 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
188 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
189 self.assertTrue(ar.sent)
189 self.assertTrue(ar.sent)
190
190
191 ar = v.push(ns, block=False, track=True)
191 ar = v.push(ns, block=False, track=True)
192 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
192 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
193 ar._tracker.wait()
193 ar._tracker.wait()
194 self.assertEquals(ar.sent, ar._tracker.done)
194 self.assertEquals(ar.sent, ar._tracker.done)
195 self.assertTrue(ar.sent)
195 self.assertTrue(ar.sent)
196 ar.get()
196 ar.get()
197
197
198 def test_scatter_tracked(self):
198 def test_scatter_tracked(self):
199 t = self.client.ids
199 t = self.client.ids
200 x='x'*1024*1024
200 x='x'*1024*1024
201 ar = self.client[t].scatter('x', x, block=False, track=False)
201 ar = self.client[t].scatter('x', x, block=False, track=False)
202 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
202 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
203 self.assertTrue(ar.sent)
203 self.assertTrue(ar.sent)
204
204
205 ar = self.client[t].scatter('x', x, block=False, track=True)
205 ar = self.client[t].scatter('x', x, block=False, track=True)
206 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
206 self.assertTrue(isinstance(ar._tracker, zmq.MessageTracker))
207 self.assertEquals(ar.sent, ar._tracker.done)
207 self.assertEquals(ar.sent, ar._tracker.done)
208 ar._tracker.wait()
208 ar._tracker.wait()
209 self.assertTrue(ar.sent)
209 self.assertTrue(ar.sent)
210 ar.get()
210 ar.get()
211
211
212 def test_remote_reference(self):
212 def test_remote_reference(self):
213 v = self.client[-1]
213 v = self.client[-1]
214 v['a'] = 123
214 v['a'] = 123
215 ra = pmod.Reference('a')
215 ra = pmod.Reference('a')
216 b = v.apply_sync(lambda x: x, ra)
216 b = v.apply_sync(lambda x: x, ra)
217 self.assertEquals(b, 123)
217 self.assertEquals(b, 123)
218
218
219
219
220 def test_scatter_gather(self):
220 def test_scatter_gather(self):
221 view = self.client[:]
221 view = self.client[:]
222 seq1 = range(16)
222 seq1 = range(16)
223 view.scatter('a', seq1)
223 view.scatter('a', seq1)
224 seq2 = view.gather('a', block=True)
224 seq2 = view.gather('a', block=True)
225 self.assertEquals(seq2, seq1)
225 self.assertEquals(seq2, seq1)
226 self.assertRaisesRemote(NameError, view.gather, 'asdf', block=True)
226 self.assertRaisesRemote(NameError, view.gather, 'asdf', block=True)
227
227
228 @skip_without('numpy')
228 @skip_without('numpy')
229 def test_scatter_gather_numpy(self):
229 def test_scatter_gather_numpy(self):
230 import numpy
230 import numpy
231 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
231 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
232 view = self.client[:]
232 view = self.client[:]
233 a = numpy.arange(64)
233 a = numpy.arange(64)
234 view.scatter('a', a)
234 view.scatter('a', a)
235 b = view.gather('a', block=True)
235 b = view.gather('a', block=True)
236 assert_array_equal(b, a)
236 assert_array_equal(b, a)
237
237
238 def test_scatter_gather_lazy(self):
238 def test_scatter_gather_lazy(self):
239 """scatter/gather with targets='all'"""
239 """scatter/gather with targets='all'"""
240 view = self.client.direct_view(targets='all')
240 view = self.client.direct_view(targets='all')
241 x = range(64)
241 x = range(64)
242 view.scatter('x', x)
242 view.scatter('x', x)
243 gathered = view.gather('x', block=True)
243 gathered = view.gather('x', block=True)
244 self.assertEquals(gathered, x)
244 self.assertEquals(gathered, x)
245
245
246
246
247 @dec.known_failure_py3
247 @dec.known_failure_py3
248 @skip_without('numpy')
248 @skip_without('numpy')
249 def test_push_numpy_nocopy(self):
249 def test_push_numpy_nocopy(self):
250 import numpy
250 import numpy
251 view = self.client[:]
251 view = self.client[:]
252 a = numpy.arange(64)
252 a = numpy.arange(64)
253 view['A'] = a
253 view['A'] = a
254 @interactive
254 @interactive
255 def check_writeable(x):
255 def check_writeable(x):
256 return x.flags.writeable
256 return x.flags.writeable
257
257
258 for flag in view.apply_sync(check_writeable, pmod.Reference('A')):
258 for flag in view.apply_sync(check_writeable, pmod.Reference('A')):
259 self.assertFalse(flag, "array is writeable, push shouldn't have pickled it")
259 self.assertFalse(flag, "array is writeable, push shouldn't have pickled it")
260
260
261 view.push(dict(B=a))
261 view.push(dict(B=a))
262 for flag in view.apply_sync(check_writeable, pmod.Reference('B')):
262 for flag in view.apply_sync(check_writeable, pmod.Reference('B')):
263 self.assertFalse(flag, "array is writeable, push shouldn't have pickled it")
263 self.assertFalse(flag, "array is writeable, push shouldn't have pickled it")
264
264
265 @skip_without('numpy')
265 @skip_without('numpy')
266 def test_apply_numpy(self):
266 def test_apply_numpy(self):
267 """view.apply(f, ndarray)"""
267 """view.apply(f, ndarray)"""
268 import numpy
268 import numpy
269 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
269 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
270
270
271 A = numpy.random.random((100,100))
271 A = numpy.random.random((100,100))
272 view = self.client[-1]
272 view = self.client[-1]
273 for dt in [ 'int32', 'uint8', 'float32', 'float64' ]:
273 for dt in [ 'int32', 'uint8', 'float32', 'float64' ]:
274 B = A.astype(dt)
274 B = A.astype(dt)
275 C = view.apply_sync(lambda x:x, B)
275 C = view.apply_sync(lambda x:x, B)
276 assert_array_equal(B,C)
276 assert_array_equal(B,C)
277
277
278 def test_map(self):
278 def test_map(self):
279 view = self.client[:]
279 view = self.client[:]
280 def f(x):
280 def f(x):
281 return x**2
281 return x**2
282 data = range(16)
282 data = range(16)
283 r = view.map_sync(f, data)
283 r = view.map_sync(f, data)
284 self.assertEquals(r, map(f, data))
284 self.assertEquals(r, map(f, data))
285
285
286 def test_map_iterable(self):
286 def test_map_iterable(self):
287 """test map on iterables (direct)"""
287 """test map on iterables (direct)"""
288 view = self.client[:]
288 view = self.client[:]
289 # 101 is prime, so it won't be evenly distributed
289 # 101 is prime, so it won't be evenly distributed
290 arr = range(101)
290 arr = range(101)
291 # ensure it will be an iterator, even in Python 3
291 # ensure it will be an iterator, even in Python 3
292 it = iter(arr)
292 it = iter(arr)
293 r = view.map_sync(lambda x:x, arr)
293 r = view.map_sync(lambda x:x, arr)
294 self.assertEquals(r, list(arr))
294 self.assertEquals(r, list(arr))
295
295
296 def test_scatterGatherNonblocking(self):
296 def test_scatterGatherNonblocking(self):
297 data = range(16)
297 data = range(16)
298 view = self.client[:]
298 view = self.client[:]
299 view.scatter('a', data, block=False)
299 view.scatter('a', data, block=False)
300 ar = view.gather('a', block=False)
300 ar = view.gather('a', block=False)
301 self.assertEquals(ar.get(), data)
301 self.assertEquals(ar.get(), data)
302
302
303 @skip_without('numpy')
303 @skip_without('numpy')
304 def test_scatter_gather_numpy_nonblocking(self):
304 def test_scatter_gather_numpy_nonblocking(self):
305 import numpy
305 import numpy
306 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
306 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
307 a = numpy.arange(64)
307 a = numpy.arange(64)
308 view = self.client[:]
308 view = self.client[:]
309 ar = view.scatter('a', a, block=False)
309 ar = view.scatter('a', a, block=False)
310 self.assertTrue(isinstance(ar, AsyncResult))
310 self.assertTrue(isinstance(ar, AsyncResult))
311 amr = view.gather('a', block=False)
311 amr = view.gather('a', block=False)
312 self.assertTrue(isinstance(amr, AsyncMapResult))
312 self.assertTrue(isinstance(amr, AsyncMapResult))
313 assert_array_equal(amr.get(), a)
313 assert_array_equal(amr.get(), a)
314
314
315 def test_execute(self):
315 def test_execute(self):
316 view = self.client[:]
316 view = self.client[:]
317 # self.client.debug=True
317 # self.client.debug=True
318 execute = view.execute
318 execute = view.execute
319 ar = execute('c=30', block=False)
319 ar = execute('c=30', block=False)
320 self.assertTrue(isinstance(ar, AsyncResult))
320 self.assertTrue(isinstance(ar, AsyncResult))
321 ar = execute('d=[0,1,2]', block=False)
321 ar = execute('d=[0,1,2]', block=False)
322 self.client.wait(ar, 1)
322 self.client.wait(ar, 1)
323 self.assertEquals(len(ar.get()), len(self.client))
323 self.assertEquals(len(ar.get()), len(self.client))
324 for c in view['c']:
324 for c in view['c']:
325 self.assertEquals(c, 30)
325 self.assertEquals(c, 30)
326
326
327 def test_abort(self):
327 def test_abort(self):
328 view = self.client[-1]
328 view = self.client[-1]
329 ar = view.execute('import time; time.sleep(1)', block=False)
329 ar = view.execute('import time; time.sleep(1)', block=False)
330 ar2 = view.apply_async(lambda : 2)
330 ar2 = view.apply_async(lambda : 2)
331 ar3 = view.apply_async(lambda : 3)
331 ar3 = view.apply_async(lambda : 3)
332 view.abort(ar2)
332 view.abort(ar2)
333 view.abort(ar3.msg_ids)
333 view.abort(ar3.msg_ids)
334 self.assertRaises(error.TaskAborted, ar2.get)
334 self.assertRaises(error.TaskAborted, ar2.get)
335 self.assertRaises(error.TaskAborted, ar3.get)
335 self.assertRaises(error.TaskAborted, ar3.get)
336
336
337 def test_abort_all(self):
337 def test_abort_all(self):
338 """view.abort() aborts all outstanding tasks"""
338 """view.abort() aborts all outstanding tasks"""
339 view = self.client[-1]
339 view = self.client[-1]
340 ars = [ view.apply_async(time.sleep, 0.25) for i in range(10) ]
340 ars = [ view.apply_async(time.sleep, 0.25) for i in range(10) ]
341 view.abort()
341 view.abort()
342 view.wait(timeout=5)
342 view.wait(timeout=5)
343 for ar in ars[5:]:
343 for ar in ars[5:]:
344 self.assertRaises(error.TaskAborted, ar.get)
344 self.assertRaises(error.TaskAborted, ar.get)
345
345
346 def test_temp_flags(self):
346 def test_temp_flags(self):
347 view = self.client[-1]
347 view = self.client[-1]
348 view.block=True
348 view.block=True
349 with view.temp_flags(block=False):
349 with view.temp_flags(block=False):
350 self.assertFalse(view.block)
350 self.assertFalse(view.block)
351 self.assertTrue(view.block)
351 self.assertTrue(view.block)
352
352
353 @dec.known_failure_py3
353 @dec.known_failure_py3
354 def test_importer(self):
354 def test_importer(self):
355 view = self.client[-1]
355 view = self.client[-1]
356 view.clear(block=True)
356 view.clear(block=True)
357 with view.importer:
357 with view.importer:
358 import re
358 import re
359
359
360 @interactive
360 @interactive
361 def findall(pat, s):
361 def findall(pat, s):
362 # this globals() step isn't necessary in real code
362 # this globals() step isn't necessary in real code
363 # only to prevent a closure in the test
363 # only to prevent a closure in the test
364 re = globals()['re']
364 re = globals()['re']
365 return re.findall(pat, s)
365 return re.findall(pat, s)
366
366
367 self.assertEquals(view.apply_sync(findall, '\w+', 'hello world'), 'hello world'.split())
367 self.assertEquals(view.apply_sync(findall, '\w+', 'hello world'), 'hello world'.split())
368
368
369 # parallel magic tests
370
371 def test_magic_px_blocking(self):
372 ip = get_ipython()
373 v = self.client[-1]
374 v.activate()
375 v.block=True
376
377 ip.magic('px a=5')
378 self.assertEquals(v['a'], 5)
379 ip.magic('px a=10')
380 self.assertEquals(v['a'], 10)
381 sio = StringIO()
382 savestdout = sys.stdout
383 sys.stdout = sio
384 # just 'print a' worst ~99% of the time, but this ensures that
385 # the stdout message has arrived when the result is finished:
386 ip.magic('px import sys,time;print (a); sys.stdout.flush();time.sleep(0.2)')
387 sys.stdout = savestdout
388 buf = sio.getvalue()
389 self.assertTrue('[stdout:' in buf, buf)
390 self.assertTrue(buf.rstrip().endswith('10'))
391 self.assertRaisesRemote(ZeroDivisionError, ip.magic, 'px 1/0')
392
393 def test_magic_px_nonblocking(self):
394 ip = get_ipython()
395 v = self.client[-1]
396 v.activate()
397 v.block=False
398
399 ip.magic('px a=5')
400 self.assertEquals(v['a'], 5)
401 ip.magic('px a=10')
402 self.assertEquals(v['a'], 10)
403 sio = StringIO()
404 savestdout = sys.stdout
405 sys.stdout = sio
406 ip.magic('px print a')
407 sys.stdout = savestdout
408 buf = sio.getvalue()
409 self.assertFalse('[stdout:%i]'%v.targets in buf)
410 ip.magic('px 1/0')
411 ar = v.get_result(-1)
412 self.assertRaisesRemote(ZeroDivisionError, ar.get)
413
414 def test_magic_autopx_blocking(self):
415 ip = get_ipython()
416 v = self.client[-1]
417 v.activate()
418 v.block=True
419
420 sio = StringIO()
421 savestdout = sys.stdout
422 sys.stdout = sio
423 ip.magic('autopx')
424 ip.run_cell('\n'.join(('a=5','b=10','c=0')))
425 ip.run_cell('b*=2')
426 ip.run_cell('print (b)')
427 ip.run_cell("b/c")
428 ip.magic('autopx')
429 sys.stdout = savestdout
430 output = sio.getvalue().strip()
431 self.assertTrue(output.startswith('%autopx enabled'))
432 self.assertTrue(output.endswith('%autopx disabled'))
433 self.assertTrue('RemoteError: ZeroDivisionError' in output)
434 ar = v.get_result(-1)
435 self.assertEquals(v['a'], 5)
436 self.assertEquals(v['b'], 20)
437 self.assertRaisesRemote(ZeroDivisionError, ar.get)
438
439 def test_magic_autopx_nonblocking(self):
440 ip = get_ipython()
441 v = self.client[-1]
442 v.activate()
443 v.block=False
444
445 sio = StringIO()
446 savestdout = sys.stdout
447 sys.stdout = sio
448 ip.magic('autopx')
449 ip.run_cell('\n'.join(('a=5','b=10','c=0')))
450 ip.run_cell('print (b)')
451 ip.run_cell('import time; time.sleep(0.1)')
452 ip.run_cell("b/c")
453 ip.run_cell('b*=2')
454 ip.magic('autopx')
455 sys.stdout = savestdout
456 output = sio.getvalue().strip()
457 self.assertTrue(output.startswith('%autopx enabled'))
458 self.assertTrue(output.endswith('%autopx disabled'))
459 self.assertFalse('ZeroDivisionError' in output)
460 ar = v.get_result(-2)
461 self.assertRaisesRemote(ZeroDivisionError, ar.get)
462 # prevent TaskAborted on pulls, due to ZeroDivisionError
463 time.sleep(0.5)
464 self.assertEquals(v['a'], 5)
465 # b*=2 will not fire, due to abort
466 self.assertEquals(v['b'], 10)
467
468 def test_magic_result(self):
469 ip = get_ipython()
470 v = self.client[-1]
471 v.activate()
472 v['a'] = 111
473 ra = v['a']
474
475 ar = ip.magic('result')
476 self.assertEquals(ar.msg_ids, [v.history[-1]])
477 self.assertEquals(ar.get(), 111)
478 ar = ip.magic('result -2')
479 self.assertEquals(ar.msg_ids, [v.history[-2]])
480
481 def test_unicode_execute(self):
369 def test_unicode_execute(self):
482 """test executing unicode strings"""
370 """test executing unicode strings"""
483 v = self.client[-1]
371 v = self.client[-1]
484 v.block=True
372 v.block=True
485 if sys.version_info[0] >= 3:
373 if sys.version_info[0] >= 3:
486 code="a='é'"
374 code="a='é'"
487 else:
375 else:
488 code=u"a=u'é'"
376 code=u"a=u'é'"
489 v.execute(code)
377 v.execute(code)
490 self.assertEquals(v['a'], u'é')
378 self.assertEquals(v['a'], u'é')
491
379
492 def test_unicode_apply_result(self):
380 def test_unicode_apply_result(self):
493 """test unicode apply results"""
381 """test unicode apply results"""
494 v = self.client[-1]
382 v = self.client[-1]
495 r = v.apply_sync(lambda : u'é')
383 r = v.apply_sync(lambda : u'é')
496 self.assertEquals(r, u'é')
384 self.assertEquals(r, u'é')
497
385
498 def test_unicode_apply_arg(self):
386 def test_unicode_apply_arg(self):
499 """test passing unicode arguments to apply"""
387 """test passing unicode arguments to apply"""
500 v = self.client[-1]
388 v = self.client[-1]
501
389
502 @interactive
390 @interactive
503 def check_unicode(a, check):
391 def check_unicode(a, check):
504 assert isinstance(a, unicode), "%r is not unicode"%a
392 assert isinstance(a, unicode), "%r is not unicode"%a
505 assert isinstance(check, bytes), "%r is not bytes"%check
393 assert isinstance(check, bytes), "%r is not bytes"%check
506 assert a.encode('utf8') == check, "%s != %s"%(a,check)
394 assert a.encode('utf8') == check, "%s != %s"%(a,check)
507
395
508 for s in [ u'é', u'ßø®∫',u'asdf' ]:
396 for s in [ u'é', u'ßø®∫',u'asdf' ]:
509 try:
397 try:
510 v.apply_sync(check_unicode, s, s.encode('utf8'))
398 v.apply_sync(check_unicode, s, s.encode('utf8'))
511 except error.RemoteError as e:
399 except error.RemoteError as e:
512 if e.ename == 'AssertionError':
400 if e.ename == 'AssertionError':
513 self.fail(e.evalue)
401 self.fail(e.evalue)
514 else:
402 else:
515 raise e
403 raise e
516
404
517 def test_map_reference(self):
405 def test_map_reference(self):
518 """view.map(<Reference>, *seqs) should work"""
406 """view.map(<Reference>, *seqs) should work"""
519 v = self.client[:]
407 v = self.client[:]
520 v.scatter('n', self.client.ids, flatten=True)
408 v.scatter('n', self.client.ids, flatten=True)
521 v.execute("f = lambda x,y: x*y")
409 v.execute("f = lambda x,y: x*y")
522 rf = pmod.Reference('f')
410 rf = pmod.Reference('f')
523 nlist = list(range(10))
411 nlist = list(range(10))
524 mlist = nlist[::-1]
412 mlist = nlist[::-1]
525 expected = [ m*n for m,n in zip(mlist, nlist) ]
413 expected = [ m*n for m,n in zip(mlist, nlist) ]
526 result = v.map_sync(rf, mlist, nlist)
414 result = v.map_sync(rf, mlist, nlist)
527 self.assertEquals(result, expected)
415 self.assertEquals(result, expected)
528
416
529 def test_apply_reference(self):
417 def test_apply_reference(self):
530 """view.apply(<Reference>, *args) should work"""
418 """view.apply(<Reference>, *args) should work"""
531 v = self.client[:]
419 v = self.client[:]
532 v.scatter('n', self.client.ids, flatten=True)
420 v.scatter('n', self.client.ids, flatten=True)
533 v.execute("f = lambda x: n*x")
421 v.execute("f = lambda x: n*x")
534 rf = pmod.Reference('f')
422 rf = pmod.Reference('f')
535 result = v.apply_sync(rf, 5)
423 result = v.apply_sync(rf, 5)
536 expected = [ 5*id for id in self.client.ids ]
424 expected = [ 5*id for id in self.client.ids ]
537 self.assertEquals(result, expected)
425 self.assertEquals(result, expected)
538
426
539 def test_eval_reference(self):
427 def test_eval_reference(self):
540 v = self.client[self.client.ids[0]]
428 v = self.client[self.client.ids[0]]
541 v['g'] = range(5)
429 v['g'] = range(5)
542 rg = pmod.Reference('g[0]')
430 rg = pmod.Reference('g[0]')
543 echo = lambda x:x
431 echo = lambda x:x
544 self.assertEquals(v.apply_sync(echo, rg), 0)
432 self.assertEquals(v.apply_sync(echo, rg), 0)
545
433
546 def test_reference_nameerror(self):
434 def test_reference_nameerror(self):
547 v = self.client[self.client.ids[0]]
435 v = self.client[self.client.ids[0]]
548 r = pmod.Reference('elvis_has_left')
436 r = pmod.Reference('elvis_has_left')
549 echo = lambda x:x
437 echo = lambda x:x
550 self.assertRaisesRemote(NameError, v.apply_sync, echo, r)
438 self.assertRaisesRemote(NameError, v.apply_sync, echo, r)
551
439
552 def test_single_engine_map(self):
440 def test_single_engine_map(self):
553 e0 = self.client[self.client.ids[0]]
441 e0 = self.client[self.client.ids[0]]
554 r = range(5)
442 r = range(5)
555 check = [ -1*i for i in r ]
443 check = [ -1*i for i in r ]
556 result = e0.map_sync(lambda x: -1*x, r)
444 result = e0.map_sync(lambda x: -1*x, r)
557 self.assertEquals(result, check)
445 self.assertEquals(result, check)
558
446
559 def test_len(self):
447 def test_len(self):
560 """len(view) makes sense"""
448 """len(view) makes sense"""
561 e0 = self.client[self.client.ids[0]]
449 e0 = self.client[self.client.ids[0]]
562 yield self.assertEquals(len(e0), 1)
450 yield self.assertEquals(len(e0), 1)
563 v = self.client[:]
451 v = self.client[:]
564 yield self.assertEquals(len(v), len(self.client.ids))
452 yield self.assertEquals(len(v), len(self.client.ids))
565 v = self.client.direct_view('all')
453 v = self.client.direct_view('all')
566 yield self.assertEquals(len(v), len(self.client.ids))
454 yield self.assertEquals(len(v), len(self.client.ids))
567 v = self.client[:2]
455 v = self.client[:2]
568 yield self.assertEquals(len(v), 2)
456 yield self.assertEquals(len(v), 2)
569 v = self.client[:1]
457 v = self.client[:1]
570 yield self.assertEquals(len(v), 1)
458 yield self.assertEquals(len(v), 1)
571 v = self.client.load_balanced_view()
459 v = self.client.load_balanced_view()
572 yield self.assertEquals(len(v), len(self.client.ids))
460 yield self.assertEquals(len(v), len(self.client.ids))
573 # parametric tests seem to require manual closing?
461 # parametric tests seem to require manual closing?
574 self.client.close()
462 self.client.close()
575
463
576
464
577 # begin execute tests
465 # begin execute tests
578 def _wait_for(self, f, timeout=10):
579 tic = time.time()
580 while time.time() <= tic + timeout:
581 if f():
582 return
583 time.sleep(0.1)
584 self.client.spin()
585 if not f():
586 print "Warning: Awaited condition never arrived"
587
588
466
589 def test_execute_reply(self):
467 def test_execute_reply(self):
590 e0 = self.client[self.client.ids[0]]
468 e0 = self.client[self.client.ids[0]]
591 e0.block = True
469 e0.block = True
592 ar = e0.execute("5", silent=False)
470 ar = e0.execute("5", silent=False)
593 er = ar.get()
471 er = ar.get()
594 self._wait_for(lambda : bool(er.pyout))
472 self._wait_for(lambda : bool(er.pyout))
595 self.assertEquals(str(er), "<ExecuteReply[%i]: 5>" % er.execution_count)
473 self.assertEquals(str(er), "<ExecuteReply[%i]: 5>" % er.execution_count)
596 self.assertEquals(er.pyout['data']['text/plain'], '5')
474 self.assertEquals(er.pyout['data']['text/plain'], '5')
597
475
598 def test_execute_reply_stdout(self):
476 def test_execute_reply_stdout(self):
599 e0 = self.client[self.client.ids[0]]
477 e0 = self.client[self.client.ids[0]]
600 e0.block = True
478 e0.block = True
601 ar = e0.execute("print (5)", silent=False)
479 ar = e0.execute("print (5)", silent=False)
602 er = ar.get()
480 er = ar.get()
603 self._wait_for(lambda : bool(er.stdout))
481 self._wait_for(lambda : bool(er.stdout))
604 self.assertEquals(er.stdout.strip(), '5')
482 self.assertEquals(er.stdout.strip(), '5')
605
483
606 def test_execute_pyout(self):
484 def test_execute_pyout(self):
607 """execute triggers pyout with silent=False"""
485 """execute triggers pyout with silent=False"""
608 view = self.client[:]
486 view = self.client[:]
609 ar = view.execute("5", silent=False, block=True)
487 ar = view.execute("5", silent=False, block=True)
610 self._wait_for(lambda : all(ar.pyout))
488 self._wait_for(lambda : all(ar.pyout))
611
489
612 expected = [{'text/plain' : '5'}] * len(view)
490 expected = [{'text/plain' : '5'}] * len(view)
613 mimes = [ out['data'] for out in ar.pyout ]
491 mimes = [ out['data'] for out in ar.pyout ]
614 self.assertEquals(mimes, expected)
492 self.assertEquals(mimes, expected)
615
493
616 def test_execute_silent(self):
494 def test_execute_silent(self):
617 """execute does not trigger pyout with silent=True"""
495 """execute does not trigger pyout with silent=True"""
618 view = self.client[:]
496 view = self.client[:]
619 ar = view.execute("5", block=True)
497 ar = view.execute("5", block=True)
620 expected = [None] * len(view)
498 expected = [None] * len(view)
621 self.assertEquals(ar.pyout, expected)
499 self.assertEquals(ar.pyout, expected)
622
500
623 def test_execute_magic(self):
501 def test_execute_magic(self):
624 """execute accepts IPython commands"""
502 """execute accepts IPython commands"""
625 view = self.client[:]
503 view = self.client[:]
626 view.execute("a = 5")
504 view.execute("a = 5")
627 ar = view.execute("%whos", block=True)
505 ar = view.execute("%whos", block=True)
628 # this will raise, if that failed
506 # this will raise, if that failed
629 ar.get(5)
507 ar.get(5)
630 self._wait_for(lambda : all(ar.stdout))
508 self._wait_for(lambda : all(ar.stdout))
631 for stdout in ar.stdout:
509 for stdout in ar.stdout:
632 lines = stdout.splitlines()
510 lines = stdout.splitlines()
633 self.assertEquals(lines[0].split(), ['Variable', 'Type', 'Data/Info'])
511 self.assertEquals(lines[0].split(), ['Variable', 'Type', 'Data/Info'])
634 found = False
512 found = False
635 for line in lines[2:]:
513 for line in lines[2:]:
636 split = line.split()
514 split = line.split()
637 if split == ['a', 'int', '5']:
515 if split == ['a', 'int', '5']:
638 found = True
516 found = True
639 break
517 break
640 self.assertTrue(found, "whos output wrong: %s" % stdout)
518 self.assertTrue(found, "whos output wrong: %s" % stdout)
641
519
642 def test_execute_displaypub(self):
520 def test_execute_displaypub(self):
643 """execute tracks display_pub output"""
521 """execute tracks display_pub output"""
644 view = self.client[:]
522 view = self.client[:]
645 view.execute("from IPython.core.display import *")
523 view.execute("from IPython.core.display import *")
646 ar = view.execute("[ display(i) for i in range(5) ]", block=True)
524 ar = view.execute("[ display(i) for i in range(5) ]", block=True)
647
525
648 self._wait_for(lambda : all(len(er.outputs) >= 5 for er in ar))
526 self._wait_for(lambda : all(len(er.outputs) >= 5 for er in ar))
649 expected = [ {u'text/plain' : unicode(j)} for j in range(5) ]
527 expected = [ {u'text/plain' : unicode(j)} for j in range(5) ]
650 for outputs in ar.outputs:
528 for outputs in ar.outputs:
651 mimes = [ out['data'] for out in outputs ]
529 mimes = [ out['data'] for out in outputs ]
652 self.assertEquals(mimes, expected)
530 self.assertEquals(mimes, expected)
653
531
654 def test_apply_displaypub(self):
532 def test_apply_displaypub(self):
655 """apply tracks display_pub output"""
533 """apply tracks display_pub output"""
656 view = self.client[:]
534 view = self.client[:]
657 view.execute("from IPython.core.display import *")
535 view.execute("from IPython.core.display import *")
658
536
659 @interactive
537 @interactive
660 def publish():
538 def publish():
661 [ display(i) for i in range(5) ]
539 [ display(i) for i in range(5) ]
662
540
663 ar = view.apply_async(publish)
541 ar = view.apply_async(publish)
664 ar.get(5)
542 ar.get(5)
665 self._wait_for(lambda : all(len(out) >= 5 for out in ar.outputs))
543 self._wait_for(lambda : all(len(out) >= 5 for out in ar.outputs))
666 expected = [ {u'text/plain' : unicode(j)} for j in range(5) ]
544 expected = [ {u'text/plain' : unicode(j)} for j in range(5) ]
667 for outputs in ar.outputs:
545 for outputs in ar.outputs:
668 mimes = [ out['data'] for out in outputs ]
546 mimes = [ out['data'] for out in outputs ]
669 self.assertEquals(mimes, expected)
547 self.assertEquals(mimes, expected)
670
548
671 def test_execute_raises(self):
549 def test_execute_raises(self):
672 """exceptions in execute requests raise appropriately"""
550 """exceptions in execute requests raise appropriately"""
673 view = self.client[-1]
551 view = self.client[-1]
674 ar = view.execute("1/0")
552 ar = view.execute("1/0")
675 self.assertRaisesRemote(ZeroDivisionError, ar.get, 2)
553 self.assertRaisesRemote(ZeroDivisionError, ar.get, 2)
676
554
677 @dec.skipif_not_matplotlib
555 @dec.skipif_not_matplotlib
678 def test_magic_pylab(self):
556 def test_magic_pylab(self):
679 """%pylab works on engines"""
557 """%pylab works on engines"""
680 view = self.client[-1]
558 view = self.client[-1]
681 ar = view.execute("%pylab inline")
559 ar = view.execute("%pylab inline")
682 # at least check if this raised:
560 # at least check if this raised:
683 reply = ar.get(5)
561 reply = ar.get(5)
684 # include imports, in case user config
562 # include imports, in case user config
685 ar = view.execute("plot(rand(100))", silent=False)
563 ar = view.execute("plot(rand(100))", silent=False)
686 reply = ar.get(5)
564 reply = ar.get(5)
687 self._wait_for(lambda : all(ar.outputs))
565 self._wait_for(lambda : all(ar.outputs))
688 self.assertEquals(len(reply.outputs), 1)
566 self.assertEquals(len(reply.outputs), 1)
689 output = reply.outputs[0]
567 output = reply.outputs[0]
690 self.assertTrue("data" in output)
568 self.assertTrue("data" in output)
691 data = output['data']
569 data = output['data']
692 self.assertTrue("image/png" in data)
570 self.assertTrue("image/png" in data)
693
571
694
572
@@ -1,73 +1,75 b''
1 import __builtin__
1 import __builtin__
2 import sys
2 import sys
3 from base64 import encodestring
3 from base64 import encodestring
4
4
5 from IPython.core.displayhook import DisplayHook
5 from IPython.core.displayhook import DisplayHook
6 from IPython.utils.traitlets import Instance, Dict
6 from IPython.utils.traitlets import Instance, Dict
7 from session import extract_header, Session
7 from session import extract_header, Session
8
8
9 class ZMQDisplayHook(object):
9 class ZMQDisplayHook(object):
10 """A simple displayhook that publishes the object's repr over a ZeroMQ
10 """A simple displayhook that publishes the object's repr over a ZeroMQ
11 socket."""
11 socket."""
12 topic=None
12 topic=None
13
13
14 def __init__(self, session, pub_socket):
14 def __init__(self, session, pub_socket):
15 self.session = session
15 self.session = session
16 self.pub_socket = pub_socket
16 self.pub_socket = pub_socket
17 self.parent_header = {}
17 self.parent_header = {}
18
18
19 def __call__(self, obj):
19 def __call__(self, obj):
20 if obj is None:
20 if obj is None:
21 return
21 return
22
22
23 __builtin__._ = obj
23 __builtin__._ = obj
24 sys.stdout.flush()
24 sys.stdout.flush()
25 sys.stderr.flush()
25 sys.stderr.flush()
26 msg = self.session.send(self.pub_socket, u'pyout', {u'data':repr(obj)},
26 msg = self.session.send(self.pub_socket, u'pyout', {u'data':repr(obj)},
27 parent=self.parent_header, ident=self.topic)
27 parent=self.parent_header, ident=self.topic)
28
28
29 def set_parent(self, parent):
29 def set_parent(self, parent):
30 self.parent_header = extract_header(parent)
30 self.parent_header = extract_header(parent)
31
31
32
32
33 def _encode_binary(format_dict):
33 def _encode_binary(format_dict):
34 encoded = format_dict.copy()
34 pngdata = format_dict.get('image/png')
35 pngdata = format_dict.get('image/png')
35 if pngdata is not None:
36 if isinstance(pngdata, bytes):
36 format_dict['image/png'] = encodestring(pngdata).decode('ascii')
37 encoded['image/png'] = encodestring(pngdata).decode('ascii')
37 jpegdata = format_dict.get('image/jpeg')
38 jpegdata = format_dict.get('image/jpeg')
38 if jpegdata is not None:
39 if isinstance(jpegdata, bytes):
39 format_dict['image/jpeg'] = encodestring(jpegdata).decode('ascii')
40 encoded['image/jpeg'] = encodestring(jpegdata).decode('ascii')
41
42 return encoded
40
43
41
44
42 class ZMQShellDisplayHook(DisplayHook):
45 class ZMQShellDisplayHook(DisplayHook):
43 """A displayhook subclass that publishes data using ZeroMQ. This is intended
46 """A displayhook subclass that publishes data using ZeroMQ. This is intended
44 to work with an InteractiveShell instance. It sends a dict of different
47 to work with an InteractiveShell instance. It sends a dict of different
45 representations of the object."""
48 representations of the object."""
46 topic=None
49 topic=None
47
50
48 session = Instance(Session)
51 session = Instance(Session)
49 pub_socket = Instance('zmq.Socket')
52 pub_socket = Instance('zmq.Socket')
50 parent_header = Dict({})
53 parent_header = Dict({})
51
54
52 def set_parent(self, parent):
55 def set_parent(self, parent):
53 """Set the parent for outbound messages."""
56 """Set the parent for outbound messages."""
54 self.parent_header = extract_header(parent)
57 self.parent_header = extract_header(parent)
55
58
56 def start_displayhook(self):
59 def start_displayhook(self):
57 self.msg = self.session.msg(u'pyout', {}, parent=self.parent_header)
60 self.msg = self.session.msg(u'pyout', {}, parent=self.parent_header)
58
61
59 def write_output_prompt(self):
62 def write_output_prompt(self):
60 """Write the output prompt."""
63 """Write the output prompt."""
61 self.msg['content']['execution_count'] = self.prompt_count
64 self.msg['content']['execution_count'] = self.prompt_count
62
65
63 def write_format_data(self, format_dict):
66 def write_format_data(self, format_dict):
64 _encode_binary(format_dict)
67 self.msg['content']['data'] = _encode_binary(format_dict)
65 self.msg['content']['data'] = format_dict
66
68
67 def finish_displayhook(self):
69 def finish_displayhook(self):
68 """Finish up all displayhook activities."""
70 """Finish up all displayhook activities."""
69 sys.stdout.flush()
71 sys.stdout.flush()
70 sys.stderr.flush()
72 sys.stderr.flush()
71 self.session.send(self.pub_socket, self.msg, ident=self.topic)
73 self.session.send(self.pub_socket, self.msg, ident=self.topic)
72 self.msg = None
74 self.msg = None
73
75
@@ -1,919 +1,921 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 """A simple interactive kernel that talks to a frontend over 0MQ.
2 """A simple interactive kernel that talks to a frontend over 0MQ.
3
3
4 Things to do:
4 Things to do:
5
5
6 * Implement `set_parent` logic. Right before doing exec, the Kernel should
6 * Implement `set_parent` logic. Right before doing exec, the Kernel should
7 call set_parent on all the PUB objects with the message about to be executed.
7 call set_parent on all the PUB objects with the message about to be executed.
8 * Implement random port and security key logic.
8 * Implement random port and security key logic.
9 * Implement control messages.
9 * Implement control messages.
10 * Implement event loop and poll version.
10 * Implement event loop and poll version.
11 """
11 """
12
12
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Imports
14 # Imports
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 from __future__ import print_function
16 from __future__ import print_function
17
17
18 # Standard library imports
18 # Standard library imports
19 import __builtin__
19 import __builtin__
20 import atexit
20 import atexit
21 import sys
21 import sys
22 import time
22 import time
23 import traceback
23 import traceback
24 import logging
24 import logging
25 import uuid
25 import uuid
26
26
27 from datetime import datetime
27 from datetime import datetime
28 from signal import (
28 from signal import (
29 signal, getsignal, default_int_handler, SIGINT, SIG_IGN
29 signal, getsignal, default_int_handler, SIGINT, SIG_IGN
30 )
30 )
31
31
32 # System library imports
32 # System library imports
33 import zmq
33 import zmq
34 from zmq.eventloop import ioloop
34 from zmq.eventloop import ioloop
35 from zmq.eventloop.zmqstream import ZMQStream
35 from zmq.eventloop.zmqstream import ZMQStream
36
36
37 # Local imports
37 # Local imports
38 from IPython.core import pylabtools
38 from IPython.core import pylabtools
39 from IPython.config.configurable import Configurable
39 from IPython.config.configurable import Configurable
40 from IPython.config.application import boolean_flag, catch_config_error
40 from IPython.config.application import boolean_flag, catch_config_error
41 from IPython.core.application import ProfileDir
41 from IPython.core.application import ProfileDir
42 from IPython.core.error import StdinNotImplementedError
42 from IPython.core.error import StdinNotImplementedError
43 from IPython.core.shellapp import (
43 from IPython.core.shellapp import (
44 InteractiveShellApp, shell_flags, shell_aliases
44 InteractiveShellApp, shell_flags, shell_aliases
45 )
45 )
46 from IPython.utils import io
46 from IPython.utils import io
47 from IPython.utils import py3compat
47 from IPython.utils import py3compat
48 from IPython.utils.frame import extract_module_locals
48 from IPython.utils.frame import extract_module_locals
49 from IPython.utils.jsonutil import json_clean
49 from IPython.utils.jsonutil import json_clean
50 from IPython.utils.traitlets import (
50 from IPython.utils.traitlets import (
51 Any, Instance, Float, Dict, CaselessStrEnum, List, Set, Integer, Unicode
51 Any, Instance, Float, Dict, CaselessStrEnum, List, Set, Integer, Unicode
52 )
52 )
53
53
54 from entry_point import base_launch_kernel
54 from entry_point import base_launch_kernel
55 from kernelapp import KernelApp, kernel_flags, kernel_aliases
55 from kernelapp import KernelApp, kernel_flags, kernel_aliases
56 from serialize import serialize_object, unpack_apply_message
56 from serialize import serialize_object, unpack_apply_message
57 from session import Session, Message
57 from session import Session, Message
58 from zmqshell import ZMQInteractiveShell
58 from zmqshell import ZMQInteractiveShell
59
59
60
60
61 #-----------------------------------------------------------------------------
61 #-----------------------------------------------------------------------------
62 # Main kernel class
62 # Main kernel class
63 #-----------------------------------------------------------------------------
63 #-----------------------------------------------------------------------------
64
64
65 class Kernel(Configurable):
65 class Kernel(Configurable):
66
66
67 #---------------------------------------------------------------------------
67 #---------------------------------------------------------------------------
68 # Kernel interface
68 # Kernel interface
69 #---------------------------------------------------------------------------
69 #---------------------------------------------------------------------------
70
70
71 # attribute to override with a GUI
71 # attribute to override with a GUI
72 eventloop = Any(None)
72 eventloop = Any(None)
73 def _eventloop_changed(self, name, old, new):
73 def _eventloop_changed(self, name, old, new):
74 """schedule call to eventloop from IOLoop"""
74 """schedule call to eventloop from IOLoop"""
75 loop = ioloop.IOLoop.instance()
75 loop = ioloop.IOLoop.instance()
76 loop.add_timeout(time.time()+0.1, self.enter_eventloop)
76 loop.add_timeout(time.time()+0.1, self.enter_eventloop)
77
77
78 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
78 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
79 session = Instance(Session)
79 session = Instance(Session)
80 profile_dir = Instance('IPython.core.profiledir.ProfileDir')
80 profile_dir = Instance('IPython.core.profiledir.ProfileDir')
81 shell_streams = List()
81 shell_streams = List()
82 control_stream = Instance(ZMQStream)
82 control_stream = Instance(ZMQStream)
83 iopub_socket = Instance(zmq.Socket)
83 iopub_socket = Instance(zmq.Socket)
84 stdin_socket = Instance(zmq.Socket)
84 stdin_socket = Instance(zmq.Socket)
85 log = Instance(logging.Logger)
85 log = Instance(logging.Logger)
86
86
87 user_module = Any()
87 user_module = Any()
88 def _user_module_changed(self, name, old, new):
88 def _user_module_changed(self, name, old, new):
89 if self.shell is not None:
89 if self.shell is not None:
90 self.shell.user_module = new
90 self.shell.user_module = new
91
91
92 user_ns = Dict(default_value=None)
92 user_ns = Dict(default_value=None)
93 def _user_ns_changed(self, name, old, new):
93 def _user_ns_changed(self, name, old, new):
94 if self.shell is not None:
94 if self.shell is not None:
95 self.shell.user_ns = new
95 self.shell.user_ns = new
96 self.shell.init_user_ns()
96 self.shell.init_user_ns()
97
97
98 # identities:
98 # identities:
99 int_id = Integer(-1)
99 int_id = Integer(-1)
100 ident = Unicode()
100 ident = Unicode()
101
101
102 def _ident_default(self):
102 def _ident_default(self):
103 return unicode(uuid.uuid4())
103 return unicode(uuid.uuid4())
104
104
105
105
106 # Private interface
106 # Private interface
107
107
108 # Time to sleep after flushing the stdout/err buffers in each execute
108 # Time to sleep after flushing the stdout/err buffers in each execute
109 # cycle. While this introduces a hard limit on the minimal latency of the
109 # cycle. While this introduces a hard limit on the minimal latency of the
110 # execute cycle, it helps prevent output synchronization problems for
110 # execute cycle, it helps prevent output synchronization problems for
111 # clients.
111 # clients.
112 # Units are in seconds. The minimum zmq latency on local host is probably
112 # Units are in seconds. The minimum zmq latency on local host is probably
113 # ~150 microseconds, set this to 500us for now. We may need to increase it
113 # ~150 microseconds, set this to 500us for now. We may need to increase it
114 # a little if it's not enough after more interactive testing.
114 # a little if it's not enough after more interactive testing.
115 _execute_sleep = Float(0.0005, config=True)
115 _execute_sleep = Float(0.0005, config=True)
116
116
117 # Frequency of the kernel's event loop.
117 # Frequency of the kernel's event loop.
118 # Units are in seconds, kernel subclasses for GUI toolkits may need to
118 # Units are in seconds, kernel subclasses for GUI toolkits may need to
119 # adapt to milliseconds.
119 # adapt to milliseconds.
120 _poll_interval = Float(0.05, config=True)
120 _poll_interval = Float(0.05, config=True)
121
121
122 # If the shutdown was requested over the network, we leave here the
122 # If the shutdown was requested over the network, we leave here the
123 # necessary reply message so it can be sent by our registered atexit
123 # necessary reply message so it can be sent by our registered atexit
124 # handler. This ensures that the reply is only sent to clients truly at
124 # handler. This ensures that the reply is only sent to clients truly at
125 # the end of our shutdown process (which happens after the underlying
125 # the end of our shutdown process (which happens after the underlying
126 # IPython shell's own shutdown).
126 # IPython shell's own shutdown).
127 _shutdown_message = None
127 _shutdown_message = None
128
128
129 # This is a dict of port number that the kernel is listening on. It is set
129 # This is a dict of port number that the kernel is listening on. It is set
130 # by record_ports and used by connect_request.
130 # by record_ports and used by connect_request.
131 _recorded_ports = Dict()
131 _recorded_ports = Dict()
132
132
133 # set of aborted msg_ids
133 # set of aborted msg_ids
134 aborted = Set()
134 aborted = Set()
135
135
136
136
137 def __init__(self, **kwargs):
137 def __init__(self, **kwargs):
138 super(Kernel, self).__init__(**kwargs)
138 super(Kernel, self).__init__(**kwargs)
139
139
140 # Initialize the InteractiveShell subclass
140 # Initialize the InteractiveShell subclass
141 self.shell = ZMQInteractiveShell.instance(config=self.config,
141 self.shell = ZMQInteractiveShell.instance(config=self.config,
142 profile_dir = self.profile_dir,
142 profile_dir = self.profile_dir,
143 user_module = self.user_module,
143 user_module = self.user_module,
144 user_ns = self.user_ns,
144 user_ns = self.user_ns,
145 )
145 )
146 self.shell.displayhook.session = self.session
146 self.shell.displayhook.session = self.session
147 self.shell.displayhook.pub_socket = self.iopub_socket
147 self.shell.displayhook.pub_socket = self.iopub_socket
148 self.shell.displayhook.topic = self._topic('pyout')
148 self.shell.displayhook.topic = self._topic('pyout')
149 self.shell.display_pub.session = self.session
149 self.shell.display_pub.session = self.session
150 self.shell.display_pub.pub_socket = self.iopub_socket
150 self.shell.display_pub.pub_socket = self.iopub_socket
151
151
152 # TMP - hack while developing
152 # TMP - hack while developing
153 self.shell._reply_content = None
153 self.shell._reply_content = None
154
154
155 # Build dict of handlers for message types
155 # Build dict of handlers for message types
156 msg_types = [ 'execute_request', 'complete_request',
156 msg_types = [ 'execute_request', 'complete_request',
157 'object_info_request', 'history_request',
157 'object_info_request', 'history_request',
158 'connect_request', 'shutdown_request',
158 'connect_request', 'shutdown_request',
159 'apply_request',
159 'apply_request',
160 ]
160 ]
161 self.shell_handlers = {}
161 self.shell_handlers = {}
162 for msg_type in msg_types:
162 for msg_type in msg_types:
163 self.shell_handlers[msg_type] = getattr(self, msg_type)
163 self.shell_handlers[msg_type] = getattr(self, msg_type)
164
164
165 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
165 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
166 self.control_handlers = {}
166 self.control_handlers = {}
167 for msg_type in control_msg_types:
167 for msg_type in control_msg_types:
168 self.control_handlers[msg_type] = getattr(self, msg_type)
168 self.control_handlers[msg_type] = getattr(self, msg_type)
169
169
170 def dispatch_control(self, msg):
170 def dispatch_control(self, msg):
171 """dispatch control requests"""
171 """dispatch control requests"""
172 idents,msg = self.session.feed_identities(msg, copy=False)
172 idents,msg = self.session.feed_identities(msg, copy=False)
173 try:
173 try:
174 msg = self.session.unserialize(msg, content=True, copy=False)
174 msg = self.session.unserialize(msg, content=True, copy=False)
175 except:
175 except:
176 self.log.error("Invalid Control Message", exc_info=True)
176 self.log.error("Invalid Control Message", exc_info=True)
177 return
177 return
178
178
179 self.log.debug("Control received: %s", msg)
179 self.log.debug("Control received: %s", msg)
180
180
181 header = msg['header']
181 header = msg['header']
182 msg_id = header['msg_id']
182 msg_id = header['msg_id']
183 msg_type = header['msg_type']
183 msg_type = header['msg_type']
184
184
185 handler = self.control_handlers.get(msg_type, None)
185 handler = self.control_handlers.get(msg_type, None)
186 if handler is None:
186 if handler is None:
187 self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
187 self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
188 else:
188 else:
189 try:
189 try:
190 handler(self.control_stream, idents, msg)
190 handler(self.control_stream, idents, msg)
191 except Exception:
191 except Exception:
192 self.log.error("Exception in control handler:", exc_info=True)
192 self.log.error("Exception in control handler:", exc_info=True)
193
193
194 def dispatch_shell(self, stream, msg):
194 def dispatch_shell(self, stream, msg):
195 """dispatch shell requests"""
195 """dispatch shell requests"""
196 # flush control requests first
196 # flush control requests first
197 if self.control_stream:
197 if self.control_stream:
198 self.control_stream.flush()
198 self.control_stream.flush()
199
199
200 idents,msg = self.session.feed_identities(msg, copy=False)
200 idents,msg = self.session.feed_identities(msg, copy=False)
201 try:
201 try:
202 msg = self.session.unserialize(msg, content=True, copy=False)
202 msg = self.session.unserialize(msg, content=True, copy=False)
203 except:
203 except:
204 self.log.error("Invalid Message", exc_info=True)
204 self.log.error("Invalid Message", exc_info=True)
205 return
205 return
206
206
207 header = msg['header']
207 header = msg['header']
208 msg_id = header['msg_id']
208 msg_id = header['msg_id']
209 msg_type = msg['header']['msg_type']
209 msg_type = msg['header']['msg_type']
210
210
211 # Print some info about this message and leave a '--->' marker, so it's
211 # Print some info about this message and leave a '--->' marker, so it's
212 # easier to trace visually the message chain when debugging. Each
212 # easier to trace visually the message chain when debugging. Each
213 # handler prints its message at the end.
213 # handler prints its message at the end.
214 self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
214 self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
215 self.log.debug(' Content: %s\n --->\n ', msg['content'])
215 self.log.debug(' Content: %s\n --->\n ', msg['content'])
216
216
217 if msg_id in self.aborted:
217 if msg_id in self.aborted:
218 self.aborted.remove(msg_id)
218 self.aborted.remove(msg_id)
219 # is it safe to assume a msg_id will not be resubmitted?
219 # is it safe to assume a msg_id will not be resubmitted?
220 reply_type = msg_type.split('_')[0] + '_reply'
220 reply_type = msg_type.split('_')[0] + '_reply'
221 status = {'status' : 'aborted'}
221 status = {'status' : 'aborted'}
222 sub = {'engine' : self.ident}
222 sub = {'engine' : self.ident}
223 sub.update(status)
223 sub.update(status)
224 reply_msg = self.session.send(stream, reply_type, subheader=sub,
224 reply_msg = self.session.send(stream, reply_type, subheader=sub,
225 content=status, parent=msg, ident=idents)
225 content=status, parent=msg, ident=idents)
226 return
226 return
227
227
228 handler = self.shell_handlers.get(msg_type, None)
228 handler = self.shell_handlers.get(msg_type, None)
229 if handler is None:
229 if handler is None:
230 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
230 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
231 else:
231 else:
232 # ensure default_int_handler during handler call
232 # ensure default_int_handler during handler call
233 sig = signal(SIGINT, default_int_handler)
233 sig = signal(SIGINT, default_int_handler)
234 try:
234 try:
235 handler(stream, idents, msg)
235 handler(stream, idents, msg)
236 except Exception:
236 except Exception:
237 self.log.error("Exception in message handler:", exc_info=True)
237 self.log.error("Exception in message handler:", exc_info=True)
238 finally:
238 finally:
239 signal(SIGINT, sig)
239 signal(SIGINT, sig)
240
240
241 def enter_eventloop(self):
241 def enter_eventloop(self):
242 """enter eventloop"""
242 """enter eventloop"""
243 self.log.info("entering eventloop")
243 self.log.info("entering eventloop")
244 # restore default_int_handler
244 # restore default_int_handler
245 signal(SIGINT, default_int_handler)
245 signal(SIGINT, default_int_handler)
246 while self.eventloop is not None:
246 while self.eventloop is not None:
247 try:
247 try:
248 self.eventloop(self)
248 self.eventloop(self)
249 except KeyboardInterrupt:
249 except KeyboardInterrupt:
250 # Ctrl-C shouldn't crash the kernel
250 # Ctrl-C shouldn't crash the kernel
251 self.log.error("KeyboardInterrupt caught in kernel")
251 self.log.error("KeyboardInterrupt caught in kernel")
252 continue
252 continue
253 else:
253 else:
254 # eventloop exited cleanly, this means we should stop (right?)
254 # eventloop exited cleanly, this means we should stop (right?)
255 self.eventloop = None
255 self.eventloop = None
256 break
256 break
257 self.log.info("exiting eventloop")
257 self.log.info("exiting eventloop")
258 # if eventloop exits, IOLoop should stop
258 # if eventloop exits, IOLoop should stop
259 ioloop.IOLoop.instance().stop()
259 ioloop.IOLoop.instance().stop()
260
260
261 def start(self):
261 def start(self):
262 """register dispatchers for streams"""
262 """register dispatchers for streams"""
263 self.shell.exit_now = False
263 self.shell.exit_now = False
264 if self.control_stream:
264 if self.control_stream:
265 self.control_stream.on_recv(self.dispatch_control, copy=False)
265 self.control_stream.on_recv(self.dispatch_control, copy=False)
266
266
267 def make_dispatcher(stream):
267 def make_dispatcher(stream):
268 def dispatcher(msg):
268 def dispatcher(msg):
269 return self.dispatch_shell(stream, msg)
269 return self.dispatch_shell(stream, msg)
270 return dispatcher
270 return dispatcher
271
271
272 for s in self.shell_streams:
272 for s in self.shell_streams:
273 s.on_recv(make_dispatcher(s), copy=False)
273 s.on_recv(make_dispatcher(s), copy=False)
274
274
275 def do_one_iteration(self):
275 def do_one_iteration(self):
276 """step eventloop just once"""
276 """step eventloop just once"""
277 if self.control_stream:
277 if self.control_stream:
278 self.control_stream.flush()
278 self.control_stream.flush()
279 for stream in self.shell_streams:
279 for stream in self.shell_streams:
280 # handle at most one request per iteration
280 # handle at most one request per iteration
281 stream.flush(zmq.POLLIN, 1)
281 stream.flush(zmq.POLLIN, 1)
282 stream.flush(zmq.POLLOUT)
282 stream.flush(zmq.POLLOUT)
283
283
284
284
285 def record_ports(self, ports):
285 def record_ports(self, ports):
286 """Record the ports that this kernel is using.
286 """Record the ports that this kernel is using.
287
287
288 The creator of the Kernel instance must call this methods if they
288 The creator of the Kernel instance must call this methods if they
289 want the :meth:`connect_request` method to return the port numbers.
289 want the :meth:`connect_request` method to return the port numbers.
290 """
290 """
291 self._recorded_ports = ports
291 self._recorded_ports = ports
292
292
293 #---------------------------------------------------------------------------
293 #---------------------------------------------------------------------------
294 # Kernel request handlers
294 # Kernel request handlers
295 #---------------------------------------------------------------------------
295 #---------------------------------------------------------------------------
296
296
297 def _make_subheader(self):
297 def _make_subheader(self):
298 """init subheader dict, for execute/apply_reply"""
298 """init subheader dict, for execute/apply_reply"""
299 return {
299 return {
300 'dependencies_met' : True,
300 'dependencies_met' : True,
301 'engine' : self.ident,
301 'engine' : self.ident,
302 'started': datetime.now(),
302 'started': datetime.now(),
303 }
303 }
304
304
305 def _publish_pyin(self, code, parent, execution_count):
305 def _publish_pyin(self, code, parent, execution_count):
306 """Publish the code request on the pyin stream."""
306 """Publish the code request on the pyin stream."""
307
307
308 self.session.send(self.iopub_socket, u'pyin',
308 self.session.send(self.iopub_socket, u'pyin',
309 {u'code':code, u'execution_count': execution_count},
309 {u'code':code, u'execution_count': execution_count},
310 parent=parent, ident=self._topic('pyin')
310 parent=parent, ident=self._topic('pyin')
311 )
311 )
312
312
313 def execute_request(self, stream, ident, parent):
313 def execute_request(self, stream, ident, parent):
314
314
315 self.session.send(self.iopub_socket,
315 self.session.send(self.iopub_socket,
316 u'status',
316 u'status',
317 {u'execution_state':u'busy'},
317 {u'execution_state':u'busy'},
318 parent=parent,
318 parent=parent,
319 ident=self._topic('status'),
319 ident=self._topic('status'),
320 )
320 )
321
321
322 try:
322 try:
323 content = parent[u'content']
323 content = parent[u'content']
324 code = content[u'code']
324 code = content[u'code']
325 silent = content[u'silent']
325 silent = content[u'silent']
326 except:
326 except:
327 self.log.error("Got bad msg: ")
327 self.log.error("Got bad msg: ")
328 self.log.error("%s", parent)
328 self.log.error("%s", parent)
329 return
329 return
330
330
331 sub = self._make_subheader()
331 sub = self._make_subheader()
332
332
333 shell = self.shell # we'll need this a lot here
333 shell = self.shell # we'll need this a lot here
334
334
335 # Replace raw_input. Note that is not sufficient to replace
335 # Replace raw_input. Note that is not sufficient to replace
336 # raw_input in the user namespace.
336 # raw_input in the user namespace.
337 if content.get('allow_stdin', False):
337 if content.get('allow_stdin', False):
338 raw_input = lambda prompt='': self._raw_input(prompt, ident, parent)
338 raw_input = lambda prompt='': self._raw_input(prompt, ident, parent)
339 else:
339 else:
340 raw_input = lambda prompt='' : self._no_raw_input()
340 raw_input = lambda prompt='' : self._no_raw_input()
341
341
342 if py3compat.PY3:
342 if py3compat.PY3:
343 __builtin__.input = raw_input
343 __builtin__.input = raw_input
344 else:
344 else:
345 __builtin__.raw_input = raw_input
345 __builtin__.raw_input = raw_input
346
346
347 # Set the parent message of the display hook and out streams.
347 # Set the parent message of the display hook and out streams.
348 shell.displayhook.set_parent(parent)
348 shell.displayhook.set_parent(parent)
349 shell.display_pub.set_parent(parent)
349 shell.display_pub.set_parent(parent)
350 sys.stdout.set_parent(parent)
350 sys.stdout.set_parent(parent)
351 sys.stderr.set_parent(parent)
351 sys.stderr.set_parent(parent)
352
352
353 # Re-broadcast our input for the benefit of listening clients, and
353 # Re-broadcast our input for the benefit of listening clients, and
354 # start computing output
354 # start computing output
355 if not silent:
355 if not silent:
356 self._publish_pyin(code, parent, shell.execution_count)
356 self._publish_pyin(code, parent, shell.execution_count)
357
357
358 reply_content = {}
358 reply_content = {}
359 try:
359 try:
360 # FIXME: the shell calls the exception handler itself.
360 # FIXME: the shell calls the exception handler itself.
361 shell.run_cell(code, store_history=not silent, silent=silent)
361 shell.run_cell(code, store_history=not silent, silent=silent)
362 except:
362 except:
363 status = u'error'
363 status = u'error'
364 # FIXME: this code right now isn't being used yet by default,
364 # FIXME: this code right now isn't being used yet by default,
365 # because the run_cell() call above directly fires off exception
365 # because the run_cell() call above directly fires off exception
366 # reporting. This code, therefore, is only active in the scenario
366 # reporting. This code, therefore, is only active in the scenario
367 # where runlines itself has an unhandled exception. We need to
367 # where runlines itself has an unhandled exception. We need to
368 # uniformize this, for all exception construction to come from a
368 # uniformize this, for all exception construction to come from a
369 # single location in the codbase.
369 # single location in the codbase.
370 etype, evalue, tb = sys.exc_info()
370 etype, evalue, tb = sys.exc_info()
371 tb_list = traceback.format_exception(etype, evalue, tb)
371 tb_list = traceback.format_exception(etype, evalue, tb)
372 reply_content.update(shell._showtraceback(etype, evalue, tb_list))
372 reply_content.update(shell._showtraceback(etype, evalue, tb_list))
373 else:
373 else:
374 status = u'ok'
374 status = u'ok'
375
375
376 reply_content[u'status'] = status
376 reply_content[u'status'] = status
377
377
378 # Return the execution counter so clients can display prompts
378 # Return the execution counter so clients can display prompts
379 reply_content['execution_count'] = shell.execution_count - 1
379 reply_content['execution_count'] = shell.execution_count - 1
380
380
381 # FIXME - fish exception info out of shell, possibly left there by
381 # FIXME - fish exception info out of shell, possibly left there by
382 # runlines. We'll need to clean up this logic later.
382 # runlines. We'll need to clean up this logic later.
383 if shell._reply_content is not None:
383 if shell._reply_content is not None:
384 reply_content.update(shell._reply_content)
384 reply_content.update(shell._reply_content)
385 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute')
386 reply_content['engine_info'] = e_info
385 # reset after use
387 # reset after use
386 shell._reply_content = None
388 shell._reply_content = None
387
389
388 # At this point, we can tell whether the main code execution succeeded
390 # At this point, we can tell whether the main code execution succeeded
389 # or not. If it did, we proceed to evaluate user_variables/expressions
391 # or not. If it did, we proceed to evaluate user_variables/expressions
390 if reply_content['status'] == 'ok':
392 if reply_content['status'] == 'ok':
391 reply_content[u'user_variables'] = \
393 reply_content[u'user_variables'] = \
392 shell.user_variables(content.get(u'user_variables', []))
394 shell.user_variables(content.get(u'user_variables', []))
393 reply_content[u'user_expressions'] = \
395 reply_content[u'user_expressions'] = \
394 shell.user_expressions(content.get(u'user_expressions', {}))
396 shell.user_expressions(content.get(u'user_expressions', {}))
395 else:
397 else:
396 # If there was an error, don't even try to compute variables or
398 # If there was an error, don't even try to compute variables or
397 # expressions
399 # expressions
398 reply_content[u'user_variables'] = {}
400 reply_content[u'user_variables'] = {}
399 reply_content[u'user_expressions'] = {}
401 reply_content[u'user_expressions'] = {}
400
402
401 # Payloads should be retrieved regardless of outcome, so we can both
403 # Payloads should be retrieved regardless of outcome, so we can both
402 # recover partial output (that could have been generated early in a
404 # recover partial output (that could have been generated early in a
403 # block, before an error) and clear the payload system always.
405 # block, before an error) and clear the payload system always.
404 reply_content[u'payload'] = shell.payload_manager.read_payload()
406 reply_content[u'payload'] = shell.payload_manager.read_payload()
405 # Be agressive about clearing the payload because we don't want
407 # Be agressive about clearing the payload because we don't want
406 # it to sit in memory until the next execute_request comes in.
408 # it to sit in memory until the next execute_request comes in.
407 shell.payload_manager.clear_payload()
409 shell.payload_manager.clear_payload()
408
410
409 # Flush output before sending the reply.
411 # Flush output before sending the reply.
410 sys.stdout.flush()
412 sys.stdout.flush()
411 sys.stderr.flush()
413 sys.stderr.flush()
412 # FIXME: on rare occasions, the flush doesn't seem to make it to the
414 # FIXME: on rare occasions, the flush doesn't seem to make it to the
413 # clients... This seems to mitigate the problem, but we definitely need
415 # clients... This seems to mitigate the problem, but we definitely need
414 # to better understand what's going on.
416 # to better understand what's going on.
415 if self._execute_sleep:
417 if self._execute_sleep:
416 time.sleep(self._execute_sleep)
418 time.sleep(self._execute_sleep)
417
419
418 # Send the reply.
420 # Send the reply.
419 reply_content = json_clean(reply_content)
421 reply_content = json_clean(reply_content)
420
422
421 sub['status'] = reply_content['status']
423 sub['status'] = reply_content['status']
422 if reply_content['status'] == 'error' and \
424 if reply_content['status'] == 'error' and \
423 reply_content['ename'] == 'UnmetDependency':
425 reply_content['ename'] == 'UnmetDependency':
424 sub['dependencies_met'] = False
426 sub['dependencies_met'] = False
425
427
426 reply_msg = self.session.send(stream, u'execute_reply',
428 reply_msg = self.session.send(stream, u'execute_reply',
427 reply_content, parent, subheader=sub,
429 reply_content, parent, subheader=sub,
428 ident=ident)
430 ident=ident)
429
431
430 self.log.debug("%s", reply_msg)
432 self.log.debug("%s", reply_msg)
431
433
432 if not silent and reply_msg['content']['status'] == u'error':
434 if not silent and reply_msg['content']['status'] == u'error':
433 self._abort_queues()
435 self._abort_queues()
434
436
435 self.session.send(self.iopub_socket,
437 self.session.send(self.iopub_socket,
436 u'status',
438 u'status',
437 {u'execution_state':u'idle'},
439 {u'execution_state':u'idle'},
438 parent=parent,
440 parent=parent,
439 ident=self._topic('status'))
441 ident=self._topic('status'))
440
442
441 def complete_request(self, stream, ident, parent):
443 def complete_request(self, stream, ident, parent):
442 txt, matches = self._complete(parent)
444 txt, matches = self._complete(parent)
443 matches = {'matches' : matches,
445 matches = {'matches' : matches,
444 'matched_text' : txt,
446 'matched_text' : txt,
445 'status' : 'ok'}
447 'status' : 'ok'}
446 matches = json_clean(matches)
448 matches = json_clean(matches)
447 completion_msg = self.session.send(stream, 'complete_reply',
449 completion_msg = self.session.send(stream, 'complete_reply',
448 matches, parent, ident)
450 matches, parent, ident)
449 self.log.debug("%s", completion_msg)
451 self.log.debug("%s", completion_msg)
450
452
451 def object_info_request(self, stream, ident, parent):
453 def object_info_request(self, stream, ident, parent):
452 content = parent['content']
454 content = parent['content']
453 object_info = self.shell.object_inspect(content['oname'],
455 object_info = self.shell.object_inspect(content['oname'],
454 detail_level = content.get('detail_level', 0)
456 detail_level = content.get('detail_level', 0)
455 )
457 )
456 # Before we send this object over, we scrub it for JSON usage
458 # Before we send this object over, we scrub it for JSON usage
457 oinfo = json_clean(object_info)
459 oinfo = json_clean(object_info)
458 msg = self.session.send(stream, 'object_info_reply',
460 msg = self.session.send(stream, 'object_info_reply',
459 oinfo, parent, ident)
461 oinfo, parent, ident)
460 self.log.debug("%s", msg)
462 self.log.debug("%s", msg)
461
463
462 def history_request(self, stream, ident, parent):
464 def history_request(self, stream, ident, parent):
463 # We need to pull these out, as passing **kwargs doesn't work with
465 # We need to pull these out, as passing **kwargs doesn't work with
464 # unicode keys before Python 2.6.5.
466 # unicode keys before Python 2.6.5.
465 hist_access_type = parent['content']['hist_access_type']
467 hist_access_type = parent['content']['hist_access_type']
466 raw = parent['content']['raw']
468 raw = parent['content']['raw']
467 output = parent['content']['output']
469 output = parent['content']['output']
468 if hist_access_type == 'tail':
470 if hist_access_type == 'tail':
469 n = parent['content']['n']
471 n = parent['content']['n']
470 hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
472 hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
471 include_latest=True)
473 include_latest=True)
472
474
473 elif hist_access_type == 'range':
475 elif hist_access_type == 'range':
474 session = parent['content']['session']
476 session = parent['content']['session']
475 start = parent['content']['start']
477 start = parent['content']['start']
476 stop = parent['content']['stop']
478 stop = parent['content']['stop']
477 hist = self.shell.history_manager.get_range(session, start, stop,
479 hist = self.shell.history_manager.get_range(session, start, stop,
478 raw=raw, output=output)
480 raw=raw, output=output)
479
481
480 elif hist_access_type == 'search':
482 elif hist_access_type == 'search':
481 pattern = parent['content']['pattern']
483 pattern = parent['content']['pattern']
482 hist = self.shell.history_manager.search(pattern, raw=raw,
484 hist = self.shell.history_manager.search(pattern, raw=raw,
483 output=output)
485 output=output)
484
486
485 else:
487 else:
486 hist = []
488 hist = []
487 hist = list(hist)
489 hist = list(hist)
488 content = {'history' : hist}
490 content = {'history' : hist}
489 content = json_clean(content)
491 content = json_clean(content)
490 msg = self.session.send(stream, 'history_reply',
492 msg = self.session.send(stream, 'history_reply',
491 content, parent, ident)
493 content, parent, ident)
492 self.log.debug("Sending history reply with %i entries", len(hist))
494 self.log.debug("Sending history reply with %i entries", len(hist))
493
495
494 def connect_request(self, stream, ident, parent):
496 def connect_request(self, stream, ident, parent):
495 if self._recorded_ports is not None:
497 if self._recorded_ports is not None:
496 content = self._recorded_ports.copy()
498 content = self._recorded_ports.copy()
497 else:
499 else:
498 content = {}
500 content = {}
499 msg = self.session.send(stream, 'connect_reply',
501 msg = self.session.send(stream, 'connect_reply',
500 content, parent, ident)
502 content, parent, ident)
501 self.log.debug("%s", msg)
503 self.log.debug("%s", msg)
502
504
503 def shutdown_request(self, stream, ident, parent):
505 def shutdown_request(self, stream, ident, parent):
504 self.shell.exit_now = True
506 self.shell.exit_now = True
505 content = dict(status='ok')
507 content = dict(status='ok')
506 content.update(parent['content'])
508 content.update(parent['content'])
507 self.session.send(stream, u'shutdown_reply', content, parent, ident=ident)
509 self.session.send(stream, u'shutdown_reply', content, parent, ident=ident)
508 # same content, but different msg_id for broadcasting on IOPub
510 # same content, but different msg_id for broadcasting on IOPub
509 self._shutdown_message = self.session.msg(u'shutdown_reply',
511 self._shutdown_message = self.session.msg(u'shutdown_reply',
510 content, parent
512 content, parent
511 )
513 )
512
514
513 self._at_shutdown()
515 self._at_shutdown()
514 # call sys.exit after a short delay
516 # call sys.exit after a short delay
515 loop = ioloop.IOLoop.instance()
517 loop = ioloop.IOLoop.instance()
516 loop.add_timeout(time.time()+0.1, loop.stop)
518 loop.add_timeout(time.time()+0.1, loop.stop)
517
519
518 #---------------------------------------------------------------------------
520 #---------------------------------------------------------------------------
519 # Engine methods
521 # Engine methods
520 #---------------------------------------------------------------------------
522 #---------------------------------------------------------------------------
521
523
522 def apply_request(self, stream, ident, parent):
524 def apply_request(self, stream, ident, parent):
523 try:
525 try:
524 content = parent[u'content']
526 content = parent[u'content']
525 bufs = parent[u'buffers']
527 bufs = parent[u'buffers']
526 msg_id = parent['header']['msg_id']
528 msg_id = parent['header']['msg_id']
527 except:
529 except:
528 self.log.error("Got bad msg: %s", parent, exc_info=True)
530 self.log.error("Got bad msg: %s", parent, exc_info=True)
529 return
531 return
530
532
531 # Set the parent message of the display hook and out streams.
533 # Set the parent message of the display hook and out streams.
532 self.shell.displayhook.set_parent(parent)
534 self.shell.displayhook.set_parent(parent)
533 self.shell.display_pub.set_parent(parent)
535 self.shell.display_pub.set_parent(parent)
534 sys.stdout.set_parent(parent)
536 sys.stdout.set_parent(parent)
535 sys.stderr.set_parent(parent)
537 sys.stderr.set_parent(parent)
536
538
537 # pyin_msg = self.session.msg(u'pyin',{u'code':code}, parent=parent)
539 # pyin_msg = self.session.msg(u'pyin',{u'code':code}, parent=parent)
538 # self.iopub_socket.send(pyin_msg)
540 # self.iopub_socket.send(pyin_msg)
539 # self.session.send(self.iopub_socket, u'pyin', {u'code':code},parent=parent)
541 # self.session.send(self.iopub_socket, u'pyin', {u'code':code},parent=parent)
540 sub = self._make_subheader()
542 sub = self._make_subheader()
541 try:
543 try:
542 working = self.shell.user_ns
544 working = self.shell.user_ns
543
545
544 prefix = "_"+str(msg_id).replace("-","")+"_"
546 prefix = "_"+str(msg_id).replace("-","")+"_"
545
547
546 f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
548 f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
547
549
548 fname = getattr(f, '__name__', 'f')
550 fname = getattr(f, '__name__', 'f')
549
551
550 fname = prefix+"f"
552 fname = prefix+"f"
551 argname = prefix+"args"
553 argname = prefix+"args"
552 kwargname = prefix+"kwargs"
554 kwargname = prefix+"kwargs"
553 resultname = prefix+"result"
555 resultname = prefix+"result"
554
556
555 ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
557 ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
556 # print ns
558 # print ns
557 working.update(ns)
559 working.update(ns)
558 code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
560 code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
559 try:
561 try:
560 exec code in self.shell.user_global_ns, self.shell.user_ns
562 exec code in self.shell.user_global_ns, self.shell.user_ns
561 result = working.get(resultname)
563 result = working.get(resultname)
562 finally:
564 finally:
563 for key in ns.iterkeys():
565 for key in ns.iterkeys():
564 working.pop(key)
566 working.pop(key)
565
567
566 packed_result,buf = serialize_object(result)
568 packed_result,buf = serialize_object(result)
567 result_buf = [packed_result]+buf
569 result_buf = [packed_result]+buf
568 except:
570 except:
569 exc_content = self._wrap_exception('apply')
571 exc_content = self._wrap_exception('apply')
570 # exc_msg = self.session.msg(u'pyerr', exc_content, parent)
572 # exc_msg = self.session.msg(u'pyerr', exc_content, parent)
571 self.session.send(self.iopub_socket, u'pyerr', exc_content, parent=parent,
573 self.session.send(self.iopub_socket, u'pyerr', exc_content, parent=parent,
572 ident=self._topic('pyerr'))
574 ident=self._topic('pyerr'))
573 reply_content = exc_content
575 reply_content = exc_content
574 result_buf = []
576 result_buf = []
575
577
576 if exc_content['ename'] == 'UnmetDependency':
578 if exc_content['ename'] == 'UnmetDependency':
577 sub['dependencies_met'] = False
579 sub['dependencies_met'] = False
578 else:
580 else:
579 reply_content = {'status' : 'ok'}
581 reply_content = {'status' : 'ok'}
580
582
581 # put 'ok'/'error' status in header, for scheduler introspection:
583 # put 'ok'/'error' status in header, for scheduler introspection:
582 sub['status'] = reply_content['status']
584 sub['status'] = reply_content['status']
583
585
584 # flush i/o
586 # flush i/o
585 sys.stdout.flush()
587 sys.stdout.flush()
586 sys.stderr.flush()
588 sys.stderr.flush()
587
589
588 reply_msg = self.session.send(stream, u'apply_reply', reply_content,
590 reply_msg = self.session.send(stream, u'apply_reply', reply_content,
589 parent=parent, ident=ident,buffers=result_buf, subheader=sub)
591 parent=parent, ident=ident,buffers=result_buf, subheader=sub)
590
592
591 #---------------------------------------------------------------------------
593 #---------------------------------------------------------------------------
592 # Control messages
594 # Control messages
593 #---------------------------------------------------------------------------
595 #---------------------------------------------------------------------------
594
596
595 def abort_request(self, stream, ident, parent):
597 def abort_request(self, stream, ident, parent):
596 """abort a specifig msg by id"""
598 """abort a specifig msg by id"""
597 msg_ids = parent['content'].get('msg_ids', None)
599 msg_ids = parent['content'].get('msg_ids', None)
598 if isinstance(msg_ids, basestring):
600 if isinstance(msg_ids, basestring):
599 msg_ids = [msg_ids]
601 msg_ids = [msg_ids]
600 if not msg_ids:
602 if not msg_ids:
601 self.abort_queues()
603 self.abort_queues()
602 for mid in msg_ids:
604 for mid in msg_ids:
603 self.aborted.add(str(mid))
605 self.aborted.add(str(mid))
604
606
605 content = dict(status='ok')
607 content = dict(status='ok')
606 reply_msg = self.session.send(stream, 'abort_reply', content=content,
608 reply_msg = self.session.send(stream, 'abort_reply', content=content,
607 parent=parent, ident=ident)
609 parent=parent, ident=ident)
608 self.log.debug("%s", reply_msg)
610 self.log.debug("%s", reply_msg)
609
611
610 def clear_request(self, stream, idents, parent):
612 def clear_request(self, stream, idents, parent):
611 """Clear our namespace."""
613 """Clear our namespace."""
612 self.shell.reset(False)
614 self.shell.reset(False)
613 msg = self.session.send(stream, 'clear_reply', ident=idents, parent=parent,
615 msg = self.session.send(stream, 'clear_reply', ident=idents, parent=parent,
614 content = dict(status='ok'))
616 content = dict(status='ok'))
615
617
616
618
617 #---------------------------------------------------------------------------
619 #---------------------------------------------------------------------------
618 # Protected interface
620 # Protected interface
619 #---------------------------------------------------------------------------
621 #---------------------------------------------------------------------------
620
622
621
623
622 def _wrap_exception(self, method=None):
624 def _wrap_exception(self, method=None):
623 # import here, because _wrap_exception is only used in parallel,
625 # import here, because _wrap_exception is only used in parallel,
624 # and parallel has higher min pyzmq version
626 # and parallel has higher min pyzmq version
625 from IPython.parallel.error import wrap_exception
627 from IPython.parallel.error import wrap_exception
626 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method=method)
628 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method=method)
627 content = wrap_exception(e_info)
629 content = wrap_exception(e_info)
628 return content
630 return content
629
631
630 def _topic(self, topic):
632 def _topic(self, topic):
631 """prefixed topic for IOPub messages"""
633 """prefixed topic for IOPub messages"""
632 if self.int_id >= 0:
634 if self.int_id >= 0:
633 base = "engine.%i" % self.int_id
635 base = "engine.%i" % self.int_id
634 else:
636 else:
635 base = "kernel.%s" % self.ident
637 base = "kernel.%s" % self.ident
636
638
637 return py3compat.cast_bytes("%s.%s" % (base, topic))
639 return py3compat.cast_bytes("%s.%s" % (base, topic))
638
640
639 def _abort_queues(self):
641 def _abort_queues(self):
640 for stream in self.shell_streams:
642 for stream in self.shell_streams:
641 if stream:
643 if stream:
642 self._abort_queue(stream)
644 self._abort_queue(stream)
643
645
644 def _abort_queue(self, stream):
646 def _abort_queue(self, stream):
645 poller = zmq.Poller()
647 poller = zmq.Poller()
646 poller.register(stream.socket, zmq.POLLIN)
648 poller.register(stream.socket, zmq.POLLIN)
647 while True:
649 while True:
648 idents,msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
650 idents,msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
649 if msg is None:
651 if msg is None:
650 return
652 return
651
653
652 self.log.info("Aborting:")
654 self.log.info("Aborting:")
653 self.log.info("%s", msg)
655 self.log.info("%s", msg)
654 msg_type = msg['header']['msg_type']
656 msg_type = msg['header']['msg_type']
655 reply_type = msg_type.split('_')[0] + '_reply'
657 reply_type = msg_type.split('_')[0] + '_reply'
656
658
657 status = {'status' : 'aborted'}
659 status = {'status' : 'aborted'}
658 sub = {'engine' : self.ident}
660 sub = {'engine' : self.ident}
659 sub.update(status)
661 sub.update(status)
660 reply_msg = self.session.send(stream, reply_type, subheader=sub,
662 reply_msg = self.session.send(stream, reply_type, subheader=sub,
661 content=status, parent=msg, ident=idents)
663 content=status, parent=msg, ident=idents)
662 self.log.debug("%s", reply_msg)
664 self.log.debug("%s", reply_msg)
663 # We need to wait a bit for requests to come in. This can probably
665 # We need to wait a bit for requests to come in. This can probably
664 # be set shorter for true asynchronous clients.
666 # be set shorter for true asynchronous clients.
665 poller.poll(50)
667 poller.poll(50)
666
668
667
669
668 def _no_raw_input(self):
670 def _no_raw_input(self):
669 """Raise StdinNotImplentedError if active frontend doesn't support
671 """Raise StdinNotImplentedError if active frontend doesn't support
670 stdin."""
672 stdin."""
671 raise StdinNotImplementedError("raw_input was called, but this "
673 raise StdinNotImplementedError("raw_input was called, but this "
672 "frontend does not support stdin.")
674 "frontend does not support stdin.")
673
675
674 def _raw_input(self, prompt, ident, parent):
676 def _raw_input(self, prompt, ident, parent):
675 # Flush output before making the request.
677 # Flush output before making the request.
676 sys.stderr.flush()
678 sys.stderr.flush()
677 sys.stdout.flush()
679 sys.stdout.flush()
678
680
679 # Send the input request.
681 # Send the input request.
680 content = json_clean(dict(prompt=prompt))
682 content = json_clean(dict(prompt=prompt))
681 self.session.send(self.stdin_socket, u'input_request', content, parent,
683 self.session.send(self.stdin_socket, u'input_request', content, parent,
682 ident=ident)
684 ident=ident)
683
685
684 # Await a response.
686 # Await a response.
685 while True:
687 while True:
686 try:
688 try:
687 ident, reply = self.session.recv(self.stdin_socket, 0)
689 ident, reply = self.session.recv(self.stdin_socket, 0)
688 except Exception:
690 except Exception:
689 self.log.warn("Invalid Message:", exc_info=True)
691 self.log.warn("Invalid Message:", exc_info=True)
690 else:
692 else:
691 break
693 break
692 try:
694 try:
693 value = reply['content']['value']
695 value = reply['content']['value']
694 except:
696 except:
695 self.log.error("Got bad raw_input reply: ")
697 self.log.error("Got bad raw_input reply: ")
696 self.log.error("%s", parent)
698 self.log.error("%s", parent)
697 value = ''
699 value = ''
698 if value == '\x04':
700 if value == '\x04':
699 # EOF
701 # EOF
700 raise EOFError
702 raise EOFError
701 return value
703 return value
702
704
703 def _complete(self, msg):
705 def _complete(self, msg):
704 c = msg['content']
706 c = msg['content']
705 try:
707 try:
706 cpos = int(c['cursor_pos'])
708 cpos = int(c['cursor_pos'])
707 except:
709 except:
708 # If we don't get something that we can convert to an integer, at
710 # If we don't get something that we can convert to an integer, at
709 # least attempt the completion guessing the cursor is at the end of
711 # least attempt the completion guessing the cursor is at the end of
710 # the text, if there's any, and otherwise of the line
712 # the text, if there's any, and otherwise of the line
711 cpos = len(c['text'])
713 cpos = len(c['text'])
712 if cpos==0:
714 if cpos==0:
713 cpos = len(c['line'])
715 cpos = len(c['line'])
714 return self.shell.complete(c['text'], c['line'], cpos)
716 return self.shell.complete(c['text'], c['line'], cpos)
715
717
716 def _object_info(self, context):
718 def _object_info(self, context):
717 symbol, leftover = self._symbol_from_context(context)
719 symbol, leftover = self._symbol_from_context(context)
718 if symbol is not None and not leftover:
720 if symbol is not None and not leftover:
719 doc = getattr(symbol, '__doc__', '')
721 doc = getattr(symbol, '__doc__', '')
720 else:
722 else:
721 doc = ''
723 doc = ''
722 object_info = dict(docstring = doc)
724 object_info = dict(docstring = doc)
723 return object_info
725 return object_info
724
726
725 def _symbol_from_context(self, context):
727 def _symbol_from_context(self, context):
726 if not context:
728 if not context:
727 return None, context
729 return None, context
728
730
729 base_symbol_string = context[0]
731 base_symbol_string = context[0]
730 symbol = self.shell.user_ns.get(base_symbol_string, None)
732 symbol = self.shell.user_ns.get(base_symbol_string, None)
731 if symbol is None:
733 if symbol is None:
732 symbol = __builtin__.__dict__.get(base_symbol_string, None)
734 symbol = __builtin__.__dict__.get(base_symbol_string, None)
733 if symbol is None:
735 if symbol is None:
734 return None, context
736 return None, context
735
737
736 context = context[1:]
738 context = context[1:]
737 for i, name in enumerate(context):
739 for i, name in enumerate(context):
738 new_symbol = getattr(symbol, name, None)
740 new_symbol = getattr(symbol, name, None)
739 if new_symbol is None:
741 if new_symbol is None:
740 return symbol, context[i:]
742 return symbol, context[i:]
741 else:
743 else:
742 symbol = new_symbol
744 symbol = new_symbol
743
745
744 return symbol, []
746 return symbol, []
745
747
746 def _at_shutdown(self):
748 def _at_shutdown(self):
747 """Actions taken at shutdown by the kernel, called by python's atexit.
749 """Actions taken at shutdown by the kernel, called by python's atexit.
748 """
750 """
749 # io.rprint("Kernel at_shutdown") # dbg
751 # io.rprint("Kernel at_shutdown") # dbg
750 if self._shutdown_message is not None:
752 if self._shutdown_message is not None:
751 self.session.send(self.iopub_socket, self._shutdown_message, ident=self._topic('shutdown'))
753 self.session.send(self.iopub_socket, self._shutdown_message, ident=self._topic('shutdown'))
752 self.log.debug("%s", self._shutdown_message)
754 self.log.debug("%s", self._shutdown_message)
753 [ s.flush(zmq.POLLOUT) for s in self.shell_streams ]
755 [ s.flush(zmq.POLLOUT) for s in self.shell_streams ]
754
756
755 #-----------------------------------------------------------------------------
757 #-----------------------------------------------------------------------------
756 # Aliases and Flags for the IPKernelApp
758 # Aliases and Flags for the IPKernelApp
757 #-----------------------------------------------------------------------------
759 #-----------------------------------------------------------------------------
758
760
759 flags = dict(kernel_flags)
761 flags = dict(kernel_flags)
760 flags.update(shell_flags)
762 flags.update(shell_flags)
761
763
762 addflag = lambda *args: flags.update(boolean_flag(*args))
764 addflag = lambda *args: flags.update(boolean_flag(*args))
763
765
764 flags['pylab'] = (
766 flags['pylab'] = (
765 {'IPKernelApp' : {'pylab' : 'auto'}},
767 {'IPKernelApp' : {'pylab' : 'auto'}},
766 """Pre-load matplotlib and numpy for interactive use with
768 """Pre-load matplotlib and numpy for interactive use with
767 the default matplotlib backend."""
769 the default matplotlib backend."""
768 )
770 )
769
771
770 aliases = dict(kernel_aliases)
772 aliases = dict(kernel_aliases)
771 aliases.update(shell_aliases)
773 aliases.update(shell_aliases)
772
774
773 # it's possible we don't want short aliases for *all* of these:
775 # it's possible we don't want short aliases for *all* of these:
774 aliases.update(dict(
776 aliases.update(dict(
775 pylab='IPKernelApp.pylab',
777 pylab='IPKernelApp.pylab',
776 ))
778 ))
777
779
778 #-----------------------------------------------------------------------------
780 #-----------------------------------------------------------------------------
779 # The IPKernelApp class
781 # The IPKernelApp class
780 #-----------------------------------------------------------------------------
782 #-----------------------------------------------------------------------------
781
783
782 class IPKernelApp(KernelApp, InteractiveShellApp):
784 class IPKernelApp(KernelApp, InteractiveShellApp):
783 name = 'ipkernel'
785 name = 'ipkernel'
784
786
785 aliases = Dict(aliases)
787 aliases = Dict(aliases)
786 flags = Dict(flags)
788 flags = Dict(flags)
787 classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session]
789 classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session]
788
790
789 # configurables
791 # configurables
790 pylab = CaselessStrEnum(['tk', 'qt', 'wx', 'gtk', 'osx', 'inline', 'auto'],
792 pylab = CaselessStrEnum(['tk', 'qt', 'wx', 'gtk', 'osx', 'inline', 'auto'],
791 config=True,
793 config=True,
792 help="""Pre-load matplotlib and numpy for interactive use,
794 help="""Pre-load matplotlib and numpy for interactive use,
793 selecting a particular matplotlib backend and loop integration.
795 selecting a particular matplotlib backend and loop integration.
794 """
796 """
795 )
797 )
796
798
797 @catch_config_error
799 @catch_config_error
798 def initialize(self, argv=None):
800 def initialize(self, argv=None):
799 super(IPKernelApp, self).initialize(argv)
801 super(IPKernelApp, self).initialize(argv)
800 self.init_path()
802 self.init_path()
801 self.init_shell()
803 self.init_shell()
802 self.init_extensions()
804 self.init_extensions()
803 self.init_code()
805 self.init_code()
804
806
805 def init_kernel(self):
807 def init_kernel(self):
806
808
807 shell_stream = ZMQStream(self.shell_socket)
809 shell_stream = ZMQStream(self.shell_socket)
808
810
809 kernel = Kernel(config=self.config, session=self.session,
811 kernel = Kernel(config=self.config, session=self.session,
810 shell_streams=[shell_stream],
812 shell_streams=[shell_stream],
811 iopub_socket=self.iopub_socket,
813 iopub_socket=self.iopub_socket,
812 stdin_socket=self.stdin_socket,
814 stdin_socket=self.stdin_socket,
813 log=self.log,
815 log=self.log,
814 profile_dir=self.profile_dir,
816 profile_dir=self.profile_dir,
815 )
817 )
816 self.kernel = kernel
818 self.kernel = kernel
817 kernel.record_ports(self.ports)
819 kernel.record_ports(self.ports)
818 shell = kernel.shell
820 shell = kernel.shell
819 if self.pylab:
821 if self.pylab:
820 try:
822 try:
821 gui, backend = pylabtools.find_gui_and_backend(self.pylab)
823 gui, backend = pylabtools.find_gui_and_backend(self.pylab)
822 shell.enable_pylab(gui, import_all=self.pylab_import_all)
824 shell.enable_pylab(gui, import_all=self.pylab_import_all)
823 except Exception:
825 except Exception:
824 self.log.error("Pylab initialization failed", exc_info=True)
826 self.log.error("Pylab initialization failed", exc_info=True)
825 # print exception straight to stdout, because normally
827 # print exception straight to stdout, because normally
826 # _showtraceback associates the reply with an execution,
828 # _showtraceback associates the reply with an execution,
827 # which means frontends will never draw it, as this exception
829 # which means frontends will never draw it, as this exception
828 # is not associated with any execute request.
830 # is not associated with any execute request.
829
831
830 # replace pyerr-sending traceback with stdout
832 # replace pyerr-sending traceback with stdout
831 _showtraceback = shell._showtraceback
833 _showtraceback = shell._showtraceback
832 def print_tb(etype, evalue, stb):
834 def print_tb(etype, evalue, stb):
833 print ("Error initializing pylab, pylab mode will not "
835 print ("Error initializing pylab, pylab mode will not "
834 "be active", file=io.stderr)
836 "be active", file=io.stderr)
835 print (shell.InteractiveTB.stb2text(stb), file=io.stdout)
837 print (shell.InteractiveTB.stb2text(stb), file=io.stdout)
836 shell._showtraceback = print_tb
838 shell._showtraceback = print_tb
837
839
838 # send the traceback over stdout
840 # send the traceback over stdout
839 shell.showtraceback(tb_offset=0)
841 shell.showtraceback(tb_offset=0)
840
842
841 # restore proper _showtraceback method
843 # restore proper _showtraceback method
842 shell._showtraceback = _showtraceback
844 shell._showtraceback = _showtraceback
843
845
844
846
845 def init_shell(self):
847 def init_shell(self):
846 self.shell = self.kernel.shell
848 self.shell = self.kernel.shell
847 self.shell.configurables.append(self)
849 self.shell.configurables.append(self)
848
850
849
851
850 #-----------------------------------------------------------------------------
852 #-----------------------------------------------------------------------------
851 # Kernel main and launch functions
853 # Kernel main and launch functions
852 #-----------------------------------------------------------------------------
854 #-----------------------------------------------------------------------------
853
855
854 def launch_kernel(*args, **kwargs):
856 def launch_kernel(*args, **kwargs):
855 """Launches a localhost IPython kernel, binding to the specified ports.
857 """Launches a localhost IPython kernel, binding to the specified ports.
856
858
857 This function simply calls entry_point.base_launch_kernel with the right
859 This function simply calls entry_point.base_launch_kernel with the right
858 first command to start an ipkernel. See base_launch_kernel for arguments.
860 first command to start an ipkernel. See base_launch_kernel for arguments.
859
861
860 Returns
862 Returns
861 -------
863 -------
862 A tuple of form:
864 A tuple of form:
863 (kernel_process, shell_port, iopub_port, stdin_port, hb_port)
865 (kernel_process, shell_port, iopub_port, stdin_port, hb_port)
864 where kernel_process is a Popen object and the ports are integers.
866 where kernel_process is a Popen object and the ports are integers.
865 """
867 """
866 return base_launch_kernel('from IPython.zmq.ipkernel import main; main()',
868 return base_launch_kernel('from IPython.zmq.ipkernel import main; main()',
867 *args, **kwargs)
869 *args, **kwargs)
868
870
869
871
870 def embed_kernel(module=None, local_ns=None, **kwargs):
872 def embed_kernel(module=None, local_ns=None, **kwargs):
871 """Embed and start an IPython kernel in a given scope.
873 """Embed and start an IPython kernel in a given scope.
872
874
873 Parameters
875 Parameters
874 ----------
876 ----------
875 module : ModuleType, optional
877 module : ModuleType, optional
876 The module to load into IPython globals (default: caller)
878 The module to load into IPython globals (default: caller)
877 local_ns : dict, optional
879 local_ns : dict, optional
878 The namespace to load into IPython user namespace (default: caller)
880 The namespace to load into IPython user namespace (default: caller)
879
881
880 kwargs : various, optional
882 kwargs : various, optional
881 Further keyword args are relayed to the KernelApp constructor,
883 Further keyword args are relayed to the KernelApp constructor,
882 allowing configuration of the Kernel. Will only have an effect
884 allowing configuration of the Kernel. Will only have an effect
883 on the first embed_kernel call for a given process.
885 on the first embed_kernel call for a given process.
884
886
885 """
887 """
886 # get the app if it exists, or set it up if it doesn't
888 # get the app if it exists, or set it up if it doesn't
887 if IPKernelApp.initialized():
889 if IPKernelApp.initialized():
888 app = IPKernelApp.instance()
890 app = IPKernelApp.instance()
889 else:
891 else:
890 app = IPKernelApp.instance(**kwargs)
892 app = IPKernelApp.instance(**kwargs)
891 app.initialize([])
893 app.initialize([])
892 # Undo unnecessary sys module mangling from init_sys_modules.
894 # Undo unnecessary sys module mangling from init_sys_modules.
893 # This would not be necessary if we could prevent it
895 # This would not be necessary if we could prevent it
894 # in the first place by using a different InteractiveShell
896 # in the first place by using a different InteractiveShell
895 # subclass, as in the regular embed case.
897 # subclass, as in the regular embed case.
896 main = app.kernel.shell._orig_sys_modules_main_mod
898 main = app.kernel.shell._orig_sys_modules_main_mod
897 if main is not None:
899 if main is not None:
898 sys.modules[app.kernel.shell._orig_sys_modules_main_name] = main
900 sys.modules[app.kernel.shell._orig_sys_modules_main_name] = main
899
901
900 # load the calling scope if not given
902 # load the calling scope if not given
901 (caller_module, caller_locals) = extract_module_locals(1)
903 (caller_module, caller_locals) = extract_module_locals(1)
902 if module is None:
904 if module is None:
903 module = caller_module
905 module = caller_module
904 if local_ns is None:
906 if local_ns is None:
905 local_ns = caller_locals
907 local_ns = caller_locals
906
908
907 app.kernel.user_module = module
909 app.kernel.user_module = module
908 app.kernel.user_ns = local_ns
910 app.kernel.user_ns = local_ns
909 app.start()
911 app.start()
910
912
911 def main():
913 def main():
912 """Run an IPKernel as an application"""
914 """Run an IPKernel as an application"""
913 app = IPKernelApp.instance()
915 app = IPKernelApp.instance()
914 app.initialize()
916 app.initialize()
915 app.start()
917 app.start()
916
918
917
919
918 if __name__ == '__main__':
920 if __name__ == '__main__':
919 main()
921 main()
@@ -1,540 +1,539 b''
1 """A ZMQ-based subclass of InteractiveShell.
1 """A ZMQ-based subclass of InteractiveShell.
2
2
3 This code is meant to ease the refactoring of the base InteractiveShell into
3 This code is meant to ease the refactoring of the base InteractiveShell into
4 something with a cleaner architecture for 2-process use, without actually
4 something with a cleaner architecture for 2-process use, without actually
5 breaking InteractiveShell itself. So we're doing something a bit ugly, where
5 breaking InteractiveShell itself. So we're doing something a bit ugly, where
6 we subclass and override what we want to fix. Once this is working well, we
6 we subclass and override what we want to fix. Once this is working well, we
7 can go back to the base class and refactor the code for a cleaner inheritance
7 can go back to the base class and refactor the code for a cleaner inheritance
8 implementation that doesn't rely on so much monkeypatching.
8 implementation that doesn't rely on so much monkeypatching.
9
9
10 But this lets us maintain a fully working IPython as we develop the new
10 But this lets us maintain a fully working IPython as we develop the new
11 machinery. This should thus be thought of as scaffolding.
11 machinery. This should thus be thought of as scaffolding.
12 """
12 """
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Imports
14 # Imports
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 from __future__ import print_function
16 from __future__ import print_function
17
17
18 # Stdlib
18 # Stdlib
19 import inspect
19 import inspect
20 import os
20 import os
21 import sys
21 import sys
22 import time
22 import time
23 from subprocess import Popen, PIPE
23 from subprocess import Popen, PIPE
24
24
25 # System library imports
25 # System library imports
26 from zmq.eventloop import ioloop
26 from zmq.eventloop import ioloop
27
27
28 # Our own
28 # Our own
29 from IPython.core.interactiveshell import (
29 from IPython.core.interactiveshell import (
30 InteractiveShell, InteractiveShellABC
30 InteractiveShell, InteractiveShellABC
31 )
31 )
32 from IPython.core import page, pylabtools
32 from IPython.core import page, pylabtools
33 from IPython.core.autocall import ZMQExitAutocall
33 from IPython.core.autocall import ZMQExitAutocall
34 from IPython.core.displaypub import DisplayPublisher
34 from IPython.core.displaypub import DisplayPublisher
35 from IPython.core.macro import Macro
35 from IPython.core.macro import Macro
36 from IPython.core.magics import MacroToEdit
36 from IPython.core.magics import MacroToEdit
37 from IPython.core.payloadpage import install_payload_page
37 from IPython.core.payloadpage import install_payload_page
38 from IPython.lib.kernel import (
38 from IPython.lib.kernel import (
39 get_connection_file, get_connection_info, connect_qtconsole
39 get_connection_file, get_connection_info, connect_qtconsole
40 )
40 )
41 from IPython.testing.skipdoctest import skip_doctest
41 from IPython.testing.skipdoctest import skip_doctest
42 from IPython.utils import io
42 from IPython.utils import io
43 from IPython.utils.jsonutil import json_clean
43 from IPython.utils.jsonutil import json_clean
44 from IPython.utils.path import get_py_filename
44 from IPython.utils.path import get_py_filename
45 from IPython.utils.process import arg_split
45 from IPython.utils.process import arg_split
46 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes
46 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes
47 from IPython.utils.warn import warn, error
47 from IPython.utils.warn import warn, error
48 from IPython.zmq.displayhook import ZMQShellDisplayHook, _encode_binary
48 from IPython.zmq.displayhook import ZMQShellDisplayHook, _encode_binary
49 from IPython.zmq.session import extract_header
49 from IPython.zmq.session import extract_header
50 from session import Session
50 from session import Session
51
51
52 #-----------------------------------------------------------------------------
52 #-----------------------------------------------------------------------------
53 # Functions and classes
53 # Functions and classes
54 #-----------------------------------------------------------------------------
54 #-----------------------------------------------------------------------------
55
55
56 class ZMQDisplayPublisher(DisplayPublisher):
56 class ZMQDisplayPublisher(DisplayPublisher):
57 """A display publisher that publishes data using a ZeroMQ PUB socket."""
57 """A display publisher that publishes data using a ZeroMQ PUB socket."""
58
58
59 session = Instance(Session)
59 session = Instance(Session)
60 pub_socket = Instance('zmq.Socket')
60 pub_socket = Instance('zmq.Socket')
61 parent_header = Dict({})
61 parent_header = Dict({})
62 topic = CBytes(b'displaypub')
62 topic = CBytes(b'displaypub')
63
63
64 def set_parent(self, parent):
64 def set_parent(self, parent):
65 """Set the parent for outbound messages."""
65 """Set the parent for outbound messages."""
66 self.parent_header = extract_header(parent)
66 self.parent_header = extract_header(parent)
67
67
68 def _flush_streams(self):
68 def _flush_streams(self):
69 """flush IO Streams prior to display"""
69 """flush IO Streams prior to display"""
70 sys.stdout.flush()
70 sys.stdout.flush()
71 sys.stderr.flush()
71 sys.stderr.flush()
72
72
73 def publish(self, source, data, metadata=None):
73 def publish(self, source, data, metadata=None):
74 self._flush_streams()
74 self._flush_streams()
75 if metadata is None:
75 if metadata is None:
76 metadata = {}
76 metadata = {}
77 self._validate_data(source, data, metadata)
77 self._validate_data(source, data, metadata)
78 content = {}
78 content = {}
79 content['source'] = source
79 content['source'] = source
80 _encode_binary(data)
80 content['data'] = _encode_binary(data)
81 content['data'] = data
82 content['metadata'] = metadata
81 content['metadata'] = metadata
83 self.session.send(
82 self.session.send(
84 self.pub_socket, u'display_data', json_clean(content),
83 self.pub_socket, u'display_data', json_clean(content),
85 parent=self.parent_header, ident=self.topic,
84 parent=self.parent_header, ident=self.topic,
86 )
85 )
87
86
88 def clear_output(self, stdout=True, stderr=True, other=True):
87 def clear_output(self, stdout=True, stderr=True, other=True):
89 content = dict(stdout=stdout, stderr=stderr, other=other)
88 content = dict(stdout=stdout, stderr=stderr, other=other)
90
89
91 if stdout:
90 if stdout:
92 print('\r', file=sys.stdout, end='')
91 print('\r', file=sys.stdout, end='')
93 if stderr:
92 if stderr:
94 print('\r', file=sys.stderr, end='')
93 print('\r', file=sys.stderr, end='')
95
94
96 self._flush_streams()
95 self._flush_streams()
97
96
98 self.session.send(
97 self.session.send(
99 self.pub_socket, u'clear_output', content,
98 self.pub_socket, u'clear_output', content,
100 parent=self.parent_header, ident=self.topic,
99 parent=self.parent_header, ident=self.topic,
101 )
100 )
102
101
103 class ZMQInteractiveShell(InteractiveShell):
102 class ZMQInteractiveShell(InteractiveShell):
104 """A subclass of InteractiveShell for ZMQ."""
103 """A subclass of InteractiveShell for ZMQ."""
105
104
106 displayhook_class = Type(ZMQShellDisplayHook)
105 displayhook_class = Type(ZMQShellDisplayHook)
107 display_pub_class = Type(ZMQDisplayPublisher)
106 display_pub_class = Type(ZMQDisplayPublisher)
108
107
109 # Override the traitlet in the parent class, because there's no point using
108 # Override the traitlet in the parent class, because there's no point using
110 # readline for the kernel. Can be removed when the readline code is moved
109 # readline for the kernel. Can be removed when the readline code is moved
111 # to the terminal frontend.
110 # to the terminal frontend.
112 colors_force = CBool(True)
111 colors_force = CBool(True)
113 readline_use = CBool(False)
112 readline_use = CBool(False)
114 # autoindent has no meaning in a zmqshell, and attempting to enable it
113 # autoindent has no meaning in a zmqshell, and attempting to enable it
115 # will print a warning in the absence of readline.
114 # will print a warning in the absence of readline.
116 autoindent = CBool(False)
115 autoindent = CBool(False)
117
116
118 exiter = Instance(ZMQExitAutocall)
117 exiter = Instance(ZMQExitAutocall)
119 def _exiter_default(self):
118 def _exiter_default(self):
120 return ZMQExitAutocall(self)
119 return ZMQExitAutocall(self)
121
120
122 def _exit_now_changed(self, name, old, new):
121 def _exit_now_changed(self, name, old, new):
123 """stop eventloop when exit_now fires"""
122 """stop eventloop when exit_now fires"""
124 if new:
123 if new:
125 loop = ioloop.IOLoop.instance()
124 loop = ioloop.IOLoop.instance()
126 loop.add_timeout(time.time()+0.1, loop.stop)
125 loop.add_timeout(time.time()+0.1, loop.stop)
127
126
128 keepkernel_on_exit = None
127 keepkernel_on_exit = None
129
128
130 # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no
129 # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no
131 # interactive input being read; we provide event loop support in ipkernel
130 # interactive input being read; we provide event loop support in ipkernel
132 from .eventloops import enable_gui
131 from .eventloops import enable_gui
133 enable_gui = staticmethod(enable_gui)
132 enable_gui = staticmethod(enable_gui)
134
133
135 def init_environment(self):
134 def init_environment(self):
136 """Configure the user's environment.
135 """Configure the user's environment.
137
136
138 """
137 """
139 env = os.environ
138 env = os.environ
140 # These two ensure 'ls' produces nice coloring on BSD-derived systems
139 # These two ensure 'ls' produces nice coloring on BSD-derived systems
141 env['TERM'] = 'xterm-color'
140 env['TERM'] = 'xterm-color'
142 env['CLICOLOR'] = '1'
141 env['CLICOLOR'] = '1'
143 # Since normal pagers don't work at all (over pexpect we don't have
142 # Since normal pagers don't work at all (over pexpect we don't have
144 # single-key control of the subprocess), try to disable paging in
143 # single-key control of the subprocess), try to disable paging in
145 # subprocesses as much as possible.
144 # subprocesses as much as possible.
146 env['PAGER'] = 'cat'
145 env['PAGER'] = 'cat'
147 env['GIT_PAGER'] = 'cat'
146 env['GIT_PAGER'] = 'cat'
148
147
149 # And install the payload version of page.
148 # And install the payload version of page.
150 install_payload_page()
149 install_payload_page()
151
150
152 def auto_rewrite_input(self, cmd):
151 def auto_rewrite_input(self, cmd):
153 """Called to show the auto-rewritten input for autocall and friends.
152 """Called to show the auto-rewritten input for autocall and friends.
154
153
155 FIXME: this payload is currently not correctly processed by the
154 FIXME: this payload is currently not correctly processed by the
156 frontend.
155 frontend.
157 """
156 """
158 new = self.prompt_manager.render('rewrite') + cmd
157 new = self.prompt_manager.render('rewrite') + cmd
159 payload = dict(
158 payload = dict(
160 source='IPython.zmq.zmqshell.ZMQInteractiveShell.auto_rewrite_input',
159 source='IPython.zmq.zmqshell.ZMQInteractiveShell.auto_rewrite_input',
161 transformed_input=new,
160 transformed_input=new,
162 )
161 )
163 self.payload_manager.write_payload(payload)
162 self.payload_manager.write_payload(payload)
164
163
165 def ask_exit(self):
164 def ask_exit(self):
166 """Engage the exit actions."""
165 """Engage the exit actions."""
167 self.exit_now = True
166 self.exit_now = True
168 payload = dict(
167 payload = dict(
169 source='IPython.zmq.zmqshell.ZMQInteractiveShell.ask_exit',
168 source='IPython.zmq.zmqshell.ZMQInteractiveShell.ask_exit',
170 exit=True,
169 exit=True,
171 keepkernel=self.keepkernel_on_exit,
170 keepkernel=self.keepkernel_on_exit,
172 )
171 )
173 self.payload_manager.write_payload(payload)
172 self.payload_manager.write_payload(payload)
174
173
175 def _showtraceback(self, etype, evalue, stb):
174 def _showtraceback(self, etype, evalue, stb):
176
175
177 exc_content = {
176 exc_content = {
178 u'traceback' : stb,
177 u'traceback' : stb,
179 u'ename' : unicode(etype.__name__),
178 u'ename' : unicode(etype.__name__),
180 u'evalue' : unicode(evalue)
179 u'evalue' : unicode(evalue)
181 }
180 }
182
181
183 dh = self.displayhook
182 dh = self.displayhook
184 # Send exception info over pub socket for other clients than the caller
183 # Send exception info over pub socket for other clients than the caller
185 # to pick up
184 # to pick up
186 topic = None
185 topic = None
187 if dh.topic:
186 if dh.topic:
188 topic = dh.topic.replace(b'pyout', b'pyerr')
187 topic = dh.topic.replace(b'pyout', b'pyerr')
189
188
190 exc_msg = dh.session.send(dh.pub_socket, u'pyerr', json_clean(exc_content), dh.parent_header, ident=topic)
189 exc_msg = dh.session.send(dh.pub_socket, u'pyerr', json_clean(exc_content), dh.parent_header, ident=topic)
191
190
192 # FIXME - Hack: store exception info in shell object. Right now, the
191 # FIXME - Hack: store exception info in shell object. Right now, the
193 # caller is reading this info after the fact, we need to fix this logic
192 # caller is reading this info after the fact, we need to fix this logic
194 # to remove this hack. Even uglier, we need to store the error status
193 # to remove this hack. Even uglier, we need to store the error status
195 # here, because in the main loop, the logic that sets it is being
194 # here, because in the main loop, the logic that sets it is being
196 # skipped because runlines swallows the exceptions.
195 # skipped because runlines swallows the exceptions.
197 exc_content[u'status'] = u'error'
196 exc_content[u'status'] = u'error'
198 self._reply_content = exc_content
197 self._reply_content = exc_content
199 # /FIXME
198 # /FIXME
200
199
201 return exc_content
200 return exc_content
202
201
203 #------------------------------------------------------------------------
202 #------------------------------------------------------------------------
204 # Magic overrides
203 # Magic overrides
205 #------------------------------------------------------------------------
204 #------------------------------------------------------------------------
206 # Once the base class stops inheriting from magic, this code needs to be
205 # Once the base class stops inheriting from magic, this code needs to be
207 # moved into a separate machinery as well. For now, at least isolate here
206 # moved into a separate machinery as well. For now, at least isolate here
208 # the magics which this class needs to implement differently from the base
207 # the magics which this class needs to implement differently from the base
209 # class, or that are unique to it.
208 # class, or that are unique to it.
210
209
211 def magic_doctest_mode(self,parameter_s=''):
210 def magic_doctest_mode(self,parameter_s=''):
212 """Toggle doctest mode on and off.
211 """Toggle doctest mode on and off.
213
212
214 This mode is intended to make IPython behave as much as possible like a
213 This mode is intended to make IPython behave as much as possible like a
215 plain Python shell, from the perspective of how its prompts, exceptions
214 plain Python shell, from the perspective of how its prompts, exceptions
216 and output look. This makes it easy to copy and paste parts of a
215 and output look. This makes it easy to copy and paste parts of a
217 session into doctests. It does so by:
216 session into doctests. It does so by:
218
217
219 - Changing the prompts to the classic ``>>>`` ones.
218 - Changing the prompts to the classic ``>>>`` ones.
220 - Changing the exception reporting mode to 'Plain'.
219 - Changing the exception reporting mode to 'Plain'.
221 - Disabling pretty-printing of output.
220 - Disabling pretty-printing of output.
222
221
223 Note that IPython also supports the pasting of code snippets that have
222 Note that IPython also supports the pasting of code snippets that have
224 leading '>>>' and '...' prompts in them. This means that you can paste
223 leading '>>>' and '...' prompts in them. This means that you can paste
225 doctests from files or docstrings (even if they have leading
224 doctests from files or docstrings (even if they have leading
226 whitespace), and the code will execute correctly. You can then use
225 whitespace), and the code will execute correctly. You can then use
227 '%history -t' to see the translated history; this will give you the
226 '%history -t' to see the translated history; this will give you the
228 input after removal of all the leading prompts and whitespace, which
227 input after removal of all the leading prompts and whitespace, which
229 can be pasted back into an editor.
228 can be pasted back into an editor.
230
229
231 With these features, you can switch into this mode easily whenever you
230 With these features, you can switch into this mode easily whenever you
232 need to do testing and changes to doctests, without having to leave
231 need to do testing and changes to doctests, without having to leave
233 your existing IPython session.
232 your existing IPython session.
234 """
233 """
235
234
236 from IPython.utils.ipstruct import Struct
235 from IPython.utils.ipstruct import Struct
237
236
238 # Shorthands
237 # Shorthands
239 shell = self.shell
238 shell = self.shell
240 disp_formatter = self.shell.display_formatter
239 disp_formatter = self.shell.display_formatter
241 ptformatter = disp_formatter.formatters['text/plain']
240 ptformatter = disp_formatter.formatters['text/plain']
242 # dstore is a data store kept in the instance metadata bag to track any
241 # dstore is a data store kept in the instance metadata bag to track any
243 # changes we make, so we can undo them later.
242 # changes we make, so we can undo them later.
244 dstore = shell.meta.setdefault('doctest_mode', Struct())
243 dstore = shell.meta.setdefault('doctest_mode', Struct())
245 save_dstore = dstore.setdefault
244 save_dstore = dstore.setdefault
246
245
247 # save a few values we'll need to recover later
246 # save a few values we'll need to recover later
248 mode = save_dstore('mode', False)
247 mode = save_dstore('mode', False)
249 save_dstore('rc_pprint', ptformatter.pprint)
248 save_dstore('rc_pprint', ptformatter.pprint)
250 save_dstore('rc_plain_text_only',disp_formatter.plain_text_only)
249 save_dstore('rc_plain_text_only',disp_formatter.plain_text_only)
251 save_dstore('xmode', shell.InteractiveTB.mode)
250 save_dstore('xmode', shell.InteractiveTB.mode)
252
251
253 if mode == False:
252 if mode == False:
254 # turn on
253 # turn on
255 ptformatter.pprint = False
254 ptformatter.pprint = False
256 disp_formatter.plain_text_only = True
255 disp_formatter.plain_text_only = True
257 shell.magic_xmode('Plain')
256 shell.magic_xmode('Plain')
258 else:
257 else:
259 # turn off
258 # turn off
260 ptformatter.pprint = dstore.rc_pprint
259 ptformatter.pprint = dstore.rc_pprint
261 disp_formatter.plain_text_only = dstore.rc_plain_text_only
260 disp_formatter.plain_text_only = dstore.rc_plain_text_only
262 shell.magic_xmode(dstore.xmode)
261 shell.magic_xmode(dstore.xmode)
263
262
264 # Store new mode and inform on console
263 # Store new mode and inform on console
265 dstore.mode = bool(1-int(mode))
264 dstore.mode = bool(1-int(mode))
266 mode_label = ['OFF','ON'][dstore.mode]
265 mode_label = ['OFF','ON'][dstore.mode]
267 print('Doctest mode is:', mode_label)
266 print('Doctest mode is:', mode_label)
268
267
269 # Send the payload back so that clients can modify their prompt display
268 # Send the payload back so that clients can modify their prompt display
270 payload = dict(
269 payload = dict(
271 source='IPython.zmq.zmqshell.ZMQInteractiveShell.magic_doctest_mode',
270 source='IPython.zmq.zmqshell.ZMQInteractiveShell.magic_doctest_mode',
272 mode=dstore.mode)
271 mode=dstore.mode)
273 self.payload_manager.write_payload(payload)
272 self.payload_manager.write_payload(payload)
274
273
275 @skip_doctest
274 @skip_doctest
276 def magic_edit(self,parameter_s='',last_call=['','']):
275 def magic_edit(self,parameter_s='',last_call=['','']):
277 """Bring up an editor and execute the resulting code.
276 """Bring up an editor and execute the resulting code.
278
277
279 Usage:
278 Usage:
280 %edit [options] [args]
279 %edit [options] [args]
281
280
282 %edit runs an external text editor. You will need to set the command for
281 %edit runs an external text editor. You will need to set the command for
283 this editor via the ``TerminalInteractiveShell.editor`` option in your
282 this editor via the ``TerminalInteractiveShell.editor`` option in your
284 configuration file before it will work.
283 configuration file before it will work.
285
284
286 This command allows you to conveniently edit multi-line code right in
285 This command allows you to conveniently edit multi-line code right in
287 your IPython session.
286 your IPython session.
288
287
289 If called without arguments, %edit opens up an empty editor with a
288 If called without arguments, %edit opens up an empty editor with a
290 temporary file and will execute the contents of this file when you
289 temporary file and will execute the contents of this file when you
291 close it (don't forget to save it!).
290 close it (don't forget to save it!).
292
291
293
292
294 Options:
293 Options:
295
294
296 -n <number>: open the editor at a specified line number. By default,
295 -n <number>: open the editor at a specified line number. By default,
297 the IPython editor hook uses the unix syntax 'editor +N filename', but
296 the IPython editor hook uses the unix syntax 'editor +N filename', but
298 you can configure this by providing your own modified hook if your
297 you can configure this by providing your own modified hook if your
299 favorite editor supports line-number specifications with a different
298 favorite editor supports line-number specifications with a different
300 syntax.
299 syntax.
301
300
302 -p: this will call the editor with the same data as the previous time
301 -p: this will call the editor with the same data as the previous time
303 it was used, regardless of how long ago (in your current session) it
302 it was used, regardless of how long ago (in your current session) it
304 was.
303 was.
305
304
306 -r: use 'raw' input. This option only applies to input taken from the
305 -r: use 'raw' input. This option only applies to input taken from the
307 user's history. By default, the 'processed' history is used, so that
306 user's history. By default, the 'processed' history is used, so that
308 magics are loaded in their transformed version to valid Python. If
307 magics are loaded in their transformed version to valid Python. If
309 this option is given, the raw input as typed as the command line is
308 this option is given, the raw input as typed as the command line is
310 used instead. When you exit the editor, it will be executed by
309 used instead. When you exit the editor, it will be executed by
311 IPython's own processor.
310 IPython's own processor.
312
311
313 -x: do not execute the edited code immediately upon exit. This is
312 -x: do not execute the edited code immediately upon exit. This is
314 mainly useful if you are editing programs which need to be called with
313 mainly useful if you are editing programs which need to be called with
315 command line arguments, which you can then do using %run.
314 command line arguments, which you can then do using %run.
316
315
317
316
318 Arguments:
317 Arguments:
319
318
320 If arguments are given, the following possibilites exist:
319 If arguments are given, the following possibilites exist:
321
320
322 - The arguments are numbers or pairs of colon-separated numbers (like
321 - The arguments are numbers or pairs of colon-separated numbers (like
323 1 4:8 9). These are interpreted as lines of previous input to be
322 1 4:8 9). These are interpreted as lines of previous input to be
324 loaded into the editor. The syntax is the same of the %macro command.
323 loaded into the editor. The syntax is the same of the %macro command.
325
324
326 - If the argument doesn't start with a number, it is evaluated as a
325 - If the argument doesn't start with a number, it is evaluated as a
327 variable and its contents loaded into the editor. You can thus edit
326 variable and its contents loaded into the editor. You can thus edit
328 any string which contains python code (including the result of
327 any string which contains python code (including the result of
329 previous edits).
328 previous edits).
330
329
331 - If the argument is the name of an object (other than a string),
330 - If the argument is the name of an object (other than a string),
332 IPython will try to locate the file where it was defined and open the
331 IPython will try to locate the file where it was defined and open the
333 editor at the point where it is defined. You can use `%edit function`
332 editor at the point where it is defined. You can use `%edit function`
334 to load an editor exactly at the point where 'function' is defined,
333 to load an editor exactly at the point where 'function' is defined,
335 edit it and have the file be executed automatically.
334 edit it and have the file be executed automatically.
336
335
337 If the object is a macro (see %macro for details), this opens up your
336 If the object is a macro (see %macro for details), this opens up your
338 specified editor with a temporary file containing the macro's data.
337 specified editor with a temporary file containing the macro's data.
339 Upon exit, the macro is reloaded with the contents of the file.
338 Upon exit, the macro is reloaded with the contents of the file.
340
339
341 Note: opening at an exact line is only supported under Unix, and some
340 Note: opening at an exact line is only supported under Unix, and some
342 editors (like kedit and gedit up to Gnome 2.8) do not understand the
341 editors (like kedit and gedit up to Gnome 2.8) do not understand the
343 '+NUMBER' parameter necessary for this feature. Good editors like
342 '+NUMBER' parameter necessary for this feature. Good editors like
344 (X)Emacs, vi, jed, pico and joe all do.
343 (X)Emacs, vi, jed, pico and joe all do.
345
344
346 - If the argument is not found as a variable, IPython will look for a
345 - If the argument is not found as a variable, IPython will look for a
347 file with that name (adding .py if necessary) and load it into the
346 file with that name (adding .py if necessary) and load it into the
348 editor. It will execute its contents with execfile() when you exit,
347 editor. It will execute its contents with execfile() when you exit,
349 loading any code in the file into your interactive namespace.
348 loading any code in the file into your interactive namespace.
350
349
351 After executing your code, %edit will return as output the code you
350 After executing your code, %edit will return as output the code you
352 typed in the editor (except when it was an existing file). This way
351 typed in the editor (except when it was an existing file). This way
353 you can reload the code in further invocations of %edit as a variable,
352 you can reload the code in further invocations of %edit as a variable,
354 via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
353 via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
355 the output.
354 the output.
356
355
357 Note that %edit is also available through the alias %ed.
356 Note that %edit is also available through the alias %ed.
358
357
359 This is an example of creating a simple function inside the editor and
358 This is an example of creating a simple function inside the editor and
360 then modifying it. First, start up the editor:
359 then modifying it. First, start up the editor:
361
360
362 In [1]: ed
361 In [1]: ed
363 Editing... done. Executing edited code...
362 Editing... done. Executing edited code...
364 Out[1]: 'def foo():n print "foo() was defined in an editing session"n'
363 Out[1]: 'def foo():n print "foo() was defined in an editing session"n'
365
364
366 We can then call the function foo():
365 We can then call the function foo():
367
366
368 In [2]: foo()
367 In [2]: foo()
369 foo() was defined in an editing session
368 foo() was defined in an editing session
370
369
371 Now we edit foo. IPython automatically loads the editor with the
370 Now we edit foo. IPython automatically loads the editor with the
372 (temporary) file where foo() was previously defined:
371 (temporary) file where foo() was previously defined:
373
372
374 In [3]: ed foo
373 In [3]: ed foo
375 Editing... done. Executing edited code...
374 Editing... done. Executing edited code...
376
375
377 And if we call foo() again we get the modified version:
376 And if we call foo() again we get the modified version:
378
377
379 In [4]: foo()
378 In [4]: foo()
380 foo() has now been changed!
379 foo() has now been changed!
381
380
382 Here is an example of how to edit a code snippet successive
381 Here is an example of how to edit a code snippet successive
383 times. First we call the editor:
382 times. First we call the editor:
384
383
385 In [5]: ed
384 In [5]: ed
386 Editing... done. Executing edited code...
385 Editing... done. Executing edited code...
387 hello
386 hello
388 Out[5]: "print 'hello'n"
387 Out[5]: "print 'hello'n"
389
388
390 Now we call it again with the previous output (stored in _):
389 Now we call it again with the previous output (stored in _):
391
390
392 In [6]: ed _
391 In [6]: ed _
393 Editing... done. Executing edited code...
392 Editing... done. Executing edited code...
394 hello world
393 hello world
395 Out[6]: "print 'hello world'n"
394 Out[6]: "print 'hello world'n"
396
395
397 Now we call it with the output #8 (stored in _8, also as Out[8]):
396 Now we call it with the output #8 (stored in _8, also as Out[8]):
398
397
399 In [7]: ed _8
398 In [7]: ed _8
400 Editing... done. Executing edited code...
399 Editing... done. Executing edited code...
401 hello again
400 hello again
402 Out[7]: "print 'hello again'n"
401 Out[7]: "print 'hello again'n"
403 """
402 """
404
403
405 opts,args = self.parse_options(parameter_s,'prn:')
404 opts,args = self.parse_options(parameter_s,'prn:')
406
405
407 try:
406 try:
408 filename, lineno, _ = self._find_edit_target(args, opts, last_call)
407 filename, lineno, _ = self._find_edit_target(args, opts, last_call)
409 except MacroToEdit as e:
408 except MacroToEdit as e:
410 # TODO: Implement macro editing over 2 processes.
409 # TODO: Implement macro editing over 2 processes.
411 print("Macro editing not yet implemented in 2-process model.")
410 print("Macro editing not yet implemented in 2-process model.")
412 return
411 return
413
412
414 # Make sure we send to the client an absolute path, in case the working
413 # Make sure we send to the client an absolute path, in case the working
415 # directory of client and kernel don't match
414 # directory of client and kernel don't match
416 filename = os.path.abspath(filename)
415 filename = os.path.abspath(filename)
417
416
418 payload = {
417 payload = {
419 'source' : 'IPython.zmq.zmqshell.ZMQInteractiveShell.edit_magic',
418 'source' : 'IPython.zmq.zmqshell.ZMQInteractiveShell.edit_magic',
420 'filename' : filename,
419 'filename' : filename,
421 'line_number' : lineno
420 'line_number' : lineno
422 }
421 }
423 self.payload_manager.write_payload(payload)
422 self.payload_manager.write_payload(payload)
424
423
425 # A few magics that are adapted to the specifics of using pexpect and a
424 # A few magics that are adapted to the specifics of using pexpect and a
426 # remote terminal
425 # remote terminal
427
426
428 def magic_clear(self, arg_s):
427 def magic_clear(self, arg_s):
429 """Clear the terminal."""
428 """Clear the terminal."""
430 if os.name == 'posix':
429 if os.name == 'posix':
431 self.shell.system("clear")
430 self.shell.system("clear")
432 else:
431 else:
433 self.shell.system("cls")
432 self.shell.system("cls")
434
433
435 if os.name == 'nt':
434 if os.name == 'nt':
436 # This is the usual name in windows
435 # This is the usual name in windows
437 magic_cls = magic_clear
436 magic_cls = magic_clear
438
437
439 # Terminal pagers won't work over pexpect, but we do have our own pager
438 # Terminal pagers won't work over pexpect, but we do have our own pager
440
439
441 def magic_less(self, arg_s):
440 def magic_less(self, arg_s):
442 """Show a file through the pager.
441 """Show a file through the pager.
443
442
444 Files ending in .py are syntax-highlighted."""
443 Files ending in .py are syntax-highlighted."""
445 cont = open(arg_s).read()
444 cont = open(arg_s).read()
446 if arg_s.endswith('.py'):
445 if arg_s.endswith('.py'):
447 cont = self.shell.pycolorize(cont)
446 cont = self.shell.pycolorize(cont)
448 page.page(cont)
447 page.page(cont)
449
448
450 magic_more = magic_less
449 magic_more = magic_less
451
450
452 # Man calls a pager, so we also need to redefine it
451 # Man calls a pager, so we also need to redefine it
453 if os.name == 'posix':
452 if os.name == 'posix':
454 def magic_man(self, arg_s):
453 def magic_man(self, arg_s):
455 """Find the man page for the given command and display in pager."""
454 """Find the man page for the given command and display in pager."""
456 page.page(self.shell.getoutput('man %s | col -b' % arg_s,
455 page.page(self.shell.getoutput('man %s | col -b' % arg_s,
457 split=False))
456 split=False))
458
457
459 # FIXME: this is specific to the GUI, so we should let the gui app load
458 # FIXME: this is specific to the GUI, so we should let the gui app load
460 # magics at startup that are only for the gui. Once the gui app has proper
459 # magics at startup that are only for the gui. Once the gui app has proper
461 # profile and configuration management, we can have it initialize a kernel
460 # profile and configuration management, we can have it initialize a kernel
462 # with a special config file that provides these.
461 # with a special config file that provides these.
463 def magic_guiref(self, arg_s):
462 def magic_guiref(self, arg_s):
464 """Show a basic reference about the GUI console."""
463 """Show a basic reference about the GUI console."""
465 from IPython.core.usage import gui_reference
464 from IPython.core.usage import gui_reference
466 page.page(gui_reference, auto_html=True)
465 page.page(gui_reference, auto_html=True)
467
466
468 def magic_connect_info(self, arg_s):
467 def magic_connect_info(self, arg_s):
469 """Print information for connecting other clients to this kernel
468 """Print information for connecting other clients to this kernel
470
469
471 It will print the contents of this session's connection file, as well as
470 It will print the contents of this session's connection file, as well as
472 shortcuts for local clients.
471 shortcuts for local clients.
473
472
474 In the simplest case, when called from the most recently launched kernel,
473 In the simplest case, when called from the most recently launched kernel,
475 secondary clients can be connected, simply with:
474 secondary clients can be connected, simply with:
476
475
477 $> ipython <app> --existing
476 $> ipython <app> --existing
478
477
479 """
478 """
480
479
481 from IPython.core.application import BaseIPythonApplication as BaseIPApp
480 from IPython.core.application import BaseIPythonApplication as BaseIPApp
482
481
483 if BaseIPApp.initialized():
482 if BaseIPApp.initialized():
484 app = BaseIPApp.instance()
483 app = BaseIPApp.instance()
485 security_dir = app.profile_dir.security_dir
484 security_dir = app.profile_dir.security_dir
486 profile = app.profile
485 profile = app.profile
487 else:
486 else:
488 profile = 'default'
487 profile = 'default'
489 security_dir = ''
488 security_dir = ''
490
489
491 try:
490 try:
492 connection_file = get_connection_file()
491 connection_file = get_connection_file()
493 info = get_connection_info(unpack=False)
492 info = get_connection_info(unpack=False)
494 except Exception as e:
493 except Exception as e:
495 error("Could not get connection info: %r" % e)
494 error("Could not get connection info: %r" % e)
496 return
495 return
497
496
498 # add profile flag for non-default profile
497 # add profile flag for non-default profile
499 profile_flag = "--profile %s" % profile if profile != 'default' else ""
498 profile_flag = "--profile %s" % profile if profile != 'default' else ""
500
499
501 # if it's in the security dir, truncate to basename
500 # if it's in the security dir, truncate to basename
502 if security_dir == os.path.dirname(connection_file):
501 if security_dir == os.path.dirname(connection_file):
503 connection_file = os.path.basename(connection_file)
502 connection_file = os.path.basename(connection_file)
504
503
505
504
506 print (info + '\n')
505 print (info + '\n')
507 print ("Paste the above JSON into a file, and connect with:\n"
506 print ("Paste the above JSON into a file, and connect with:\n"
508 " $> ipython <app> --existing <file>\n"
507 " $> ipython <app> --existing <file>\n"
509 "or, if you are local, you can connect with just:\n"
508 "or, if you are local, you can connect with just:\n"
510 " $> ipython <app> --existing {0} {1}\n"
509 " $> ipython <app> --existing {0} {1}\n"
511 "or even just:\n"
510 "or even just:\n"
512 " $> ipython <app> --existing {1}\n"
511 " $> ipython <app> --existing {1}\n"
513 "if this is the most recent IPython session you have started.".format(
512 "if this is the most recent IPython session you have started.".format(
514 connection_file, profile_flag
513 connection_file, profile_flag
515 )
514 )
516 )
515 )
517
516
518 def magic_qtconsole(self, arg_s):
517 def magic_qtconsole(self, arg_s):
519 """Open a qtconsole connected to this kernel.
518 """Open a qtconsole connected to this kernel.
520
519
521 Useful for connecting a qtconsole to running notebooks, for better
520 Useful for connecting a qtconsole to running notebooks, for better
522 debugging.
521 debugging.
523 """
522 """
524 try:
523 try:
525 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
524 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
526 except Exception as e:
525 except Exception as e:
527 error("Could not start qtconsole: %r" % e)
526 error("Could not start qtconsole: %r" % e)
528 return
527 return
529
528
530 def set_next_input(self, text):
529 def set_next_input(self, text):
531 """Send the specified text to the frontend to be presented at the next
530 """Send the specified text to the frontend to be presented at the next
532 input cell."""
531 input cell."""
533 payload = dict(
532 payload = dict(
534 source='IPython.zmq.zmqshell.ZMQInteractiveShell.set_next_input',
533 source='IPython.zmq.zmqshell.ZMQInteractiveShell.set_next_input',
535 text=text
534 text=text
536 )
535 )
537 self.payload_manager.write_payload(payload)
536 self.payload_manager.write_payload(payload)
538
537
539
538
540 InteractiveShellABC.register(ZMQInteractiveShell)
539 InteractiveShellABC.register(ZMQInteractiveShell)
@@ -1,151 +1,149 b''
1 .. _parallelmpi:
1 .. _parallelmpi:
2
2
3 =======================
3 =======================
4 Using MPI with IPython
4 Using MPI with IPython
5 =======================
5 =======================
6
6
7 Often, a parallel algorithm will require moving data between the engines. One
7 Often, a parallel algorithm will require moving data between the engines. One
8 way of accomplishing this is by doing a pull and then a push using the
8 way of accomplishing this is by doing a pull and then a push using the
9 multiengine client. However, this will be slow as all the data has to go
9 multiengine client. However, this will be slow as all the data has to go
10 through the controller to the client and then back through the controller, to
10 through the controller to the client and then back through the controller, to
11 its final destination.
11 its final destination.
12
12
13 A much better way of moving data between engines is to use a message passing
13 A much better way of moving data between engines is to use a message passing
14 library, such as the Message Passing Interface (MPI) [MPI]_. IPython's
14 library, such as the Message Passing Interface (MPI) [MPI]_. IPython's
15 parallel computing architecture has been designed from the ground up to
15 parallel computing architecture has been designed from the ground up to
16 integrate with MPI. This document describes how to use MPI with IPython.
16 integrate with MPI. This document describes how to use MPI with IPython.
17
17
18 Additional installation requirements
18 Additional installation requirements
19 ====================================
19 ====================================
20
20
21 If you want to use MPI with IPython, you will need to install:
21 If you want to use MPI with IPython, you will need to install:
22
22
23 * A standard MPI implementation such as OpenMPI [OpenMPI]_ or MPICH.
23 * A standard MPI implementation such as OpenMPI [OpenMPI]_ or MPICH.
24 * The mpi4py [mpi4py]_ package.
24 * The mpi4py [mpi4py]_ package.
25
25
26 .. note::
26 .. note::
27
27
28 The mpi4py package is not a strict requirement. However, you need to
28 The mpi4py package is not a strict requirement. However, you need to
29 have *some* way of calling MPI from Python. You also need some way of
29 have *some* way of calling MPI from Python. You also need some way of
30 making sure that :func:`MPI_Init` is called when the IPython engines start
30 making sure that :func:`MPI_Init` is called when the IPython engines start
31 up. There are a number of ways of doing this and a good number of
31 up. There are a number of ways of doing this and a good number of
32 associated subtleties. We highly recommend just using mpi4py as it
32 associated subtleties. We highly recommend just using mpi4py as it
33 takes care of most of these problems. If you want to do something
33 takes care of most of these problems. If you want to do something
34 different, let us know and we can help you get started.
34 different, let us know and we can help you get started.
35
35
36 Starting the engines with MPI enabled
36 Starting the engines with MPI enabled
37 =====================================
37 =====================================
38
38
39 To use code that calls MPI, there are typically two things that MPI requires.
39 To use code that calls MPI, there are typically two things that MPI requires.
40
40
41 1. The process that wants to call MPI must be started using
41 1. The process that wants to call MPI must be started using
42 :command:`mpiexec` or a batch system (like PBS) that has MPI support.
42 :command:`mpiexec` or a batch system (like PBS) that has MPI support.
43 2. Once the process starts, it must call :func:`MPI_Init`.
43 2. Once the process starts, it must call :func:`MPI_Init`.
44
44
45 There are a couple of ways that you can start the IPython engines and get
45 There are a couple of ways that you can start the IPython engines and get
46 these things to happen.
46 these things to happen.
47
47
48 Automatic starting using :command:`mpiexec` and :command:`ipcluster`
48 Automatic starting using :command:`mpiexec` and :command:`ipcluster`
49 --------------------------------------------------------------------
49 --------------------------------------------------------------------
50
50
51 The easiest approach is to use the `MPI` Launchers in :command:`ipcluster`,
51 The easiest approach is to use the `MPI` Launchers in :command:`ipcluster`,
52 which will first start a controller and then a set of engines using
52 which will first start a controller and then a set of engines using
53 :command:`mpiexec`::
53 :command:`mpiexec`::
54
54
55 $ ipcluster start -n 4 --engines=MPIEngineSetLauncher
55 $ ipcluster start -n 4 --engines=MPIEngineSetLauncher
56
56
57 This approach is best as interrupting :command:`ipcluster` will automatically
57 This approach is best as interrupting :command:`ipcluster` will automatically
58 stop and clean up the controller and engines.
58 stop and clean up the controller and engines.
59
59
60 Manual starting using :command:`mpiexec`
60 Manual starting using :command:`mpiexec`
61 ----------------------------------------
61 ----------------------------------------
62
62
63 If you want to start the IPython engines using the :command:`mpiexec`, just
63 If you want to start the IPython engines using the :command:`mpiexec`, just
64 do::
64 do::
65
65
66 $ mpiexec -n 4 ipengine --mpi=mpi4py
66 $ mpiexec -n 4 ipengine --mpi=mpi4py
67
67
68 This requires that you already have a controller running and that the FURL
68 This requires that you already have a controller running and that the FURL
69 files for the engines are in place. We also have built in support for
69 files for the engines are in place. We also have built in support for
70 PyTrilinos [PyTrilinos]_, which can be used (assuming is installed) by
70 PyTrilinos [PyTrilinos]_, which can be used (assuming is installed) by
71 starting the engines with::
71 starting the engines with::
72
72
73 $ mpiexec -n 4 ipengine --mpi=pytrilinos
73 $ mpiexec -n 4 ipengine --mpi=pytrilinos
74
74
75 Automatic starting using PBS and :command:`ipcluster`
75 Automatic starting using PBS and :command:`ipcluster`
76 ------------------------------------------------------
76 ------------------------------------------------------
77
77
78 The :command:`ipcluster` command also has built-in integration with PBS. For
78 The :command:`ipcluster` command also has built-in integration with PBS. For
79 more information on this approach, see our documentation on :ref:`ipcluster
79 more information on this approach, see our documentation on :ref:`ipcluster
80 <parallel_process>`.
80 <parallel_process>`.
81
81
82 Actually using MPI
82 Actually using MPI
83 ==================
83 ==================
84
84
85 Once the engines are running with MPI enabled, you are ready to go. You can
85 Once the engines are running with MPI enabled, you are ready to go. You can
86 now call any code that uses MPI in the IPython engines. And, all of this can
86 now call any code that uses MPI in the IPython engines. And, all of this can
87 be done interactively. Here we show a simple example that uses mpi4py
87 be done interactively. Here we show a simple example that uses mpi4py
88 [mpi4py]_ version 1.1.0 or later.
88 [mpi4py]_ version 1.1.0 or later.
89
89
90 First, lets define a simply function that uses MPI to calculate the sum of a
90 First, lets define a simply function that uses MPI to calculate the sum of a
91 distributed array. Save the following text in a file called :file:`psum.py`:
91 distributed array. Save the following text in a file called :file:`psum.py`:
92
92
93 .. sourcecode:: python
93 .. sourcecode:: python
94
94
95 from mpi4py import MPI
95 from mpi4py import MPI
96 import numpy as np
96 import numpy as np
97
97
98 def psum(a):
98 def psum(a):
99 s = np.sum(a)
99 s = np.sum(a)
100 rcvBuf = np.array(0.0,'d')
100 rcvBuf = np.array(0.0,'d')
101 MPI.COMM_WORLD.Allreduce([s, MPI.DOUBLE],
101 MPI.COMM_WORLD.Allreduce([s, MPI.DOUBLE],
102 [rcvBuf, MPI.DOUBLE],
102 [rcvBuf, MPI.DOUBLE],
103 op=MPI.SUM)
103 op=MPI.SUM)
104 return rcvBuf
104 return rcvBuf
105
105
106 Now, start an IPython cluster::
106 Now, start an IPython cluster::
107
107
108 $ ipcluster start --profile=mpi -n 4
108 $ ipcluster start --profile=mpi -n 4
109
109
110 .. note::
110 .. note::
111
111
112 It is assumed here that the mpi profile has been set up, as described :ref:`here
112 It is assumed here that the mpi profile has been set up, as described :ref:`here
113 <parallel_process>`.
113 <parallel_process>`.
114
114
115 Finally, connect to the cluster and use this function interactively. In this
115 Finally, connect to the cluster and use this function interactively. In this
116 case, we create a random array on each engine and sum up all the random arrays
116 case, we create a random array on each engine and sum up all the random arrays
117 using our :func:`psum` function:
117 using our :func:`psum` function:
118
118
119 .. sourcecode:: ipython
119 .. sourcecode:: ipython
120
120
121 In [1]: from IPython.parallel import Client
121 In [1]: from IPython.parallel import Client
122
122
123 In [2]: %load_ext parallel_magic
124
125 In [3]: c = Client(profile='mpi')
123 In [3]: c = Client(profile='mpi')
126
124
127 In [4]: view = c[:]
125 In [4]: view = c[:]
128
126
129 In [5]: view.activate()
127 In [5]: view.activate() # enabe magics
130
128
131 # run the contents of the file on each engine:
129 # run the contents of the file on each engine:
132 In [6]: view.run('psum.py')
130 In [6]: view.run('psum.py')
133
131
134 In [6]: px a = np.random.rand(100)
132 In [6]: %px a = np.random.rand(100)
135 Parallel execution on engines: [0,1,2,3]
133 Parallel execution on engines: [0,1,2,3]
136
134
137 In [8]: px s = psum(a)
135 In [8]: %px s = psum(a)
138 Parallel execution on engines: [0,1,2,3]
136 Parallel execution on engines: [0,1,2,3]
139
137
140 In [9]: view['s']
138 In [9]: view['s']
141 Out[9]: [187.451545803,187.451545803,187.451545803,187.451545803]
139 Out[9]: [187.451545803,187.451545803,187.451545803,187.451545803]
142
140
143 Any Python code that makes calls to MPI can be used in this manner, including
141 Any Python code that makes calls to MPI can be used in this manner, including
144 compiled C, C++ and Fortran libraries that have been exposed to Python.
142 compiled C, C++ and Fortran libraries that have been exposed to Python.
145
143
146 .. [MPI] Message Passing Interface. http://www-unix.mcs.anl.gov/mpi/
144 .. [MPI] Message Passing Interface. http://www-unix.mcs.anl.gov/mpi/
147 .. [mpi4py] MPI for Python. mpi4py: http://mpi4py.scipy.org/
145 .. [mpi4py] MPI for Python. mpi4py: http://mpi4py.scipy.org/
148 .. [OpenMPI] Open MPI. http://www.open-mpi.org/
146 .. [OpenMPI] Open MPI. http://www.open-mpi.org/
149 .. [PyTrilinos] PyTrilinos. http://trilinos.sandia.gov/packages/pytrilinos/
147 .. [PyTrilinos] PyTrilinos. http://trilinos.sandia.gov/packages/pytrilinos/
150
148
151
149
@@ -1,865 +1,942 b''
1 .. _parallel_multiengine:
1 .. _parallel_multiengine:
2
2
3 ==========================
3 ==========================
4 IPython's Direct interface
4 IPython's Direct interface
5 ==========================
5 ==========================
6
6
7 The direct, or multiengine, interface represents one possible way of working with a set of
7 The direct, or multiengine, interface represents one possible way of working with a set of
8 IPython engines. The basic idea behind the multiengine interface is that the
8 IPython engines. The basic idea behind the multiengine interface is that the
9 capabilities of each engine are directly and explicitly exposed to the user.
9 capabilities of each engine are directly and explicitly exposed to the user.
10 Thus, in the multiengine interface, each engine is given an id that is used to
10 Thus, in the multiengine interface, each engine is given an id that is used to
11 identify the engine and give it work to do. This interface is very intuitive
11 identify the engine and give it work to do. This interface is very intuitive
12 and is designed with interactive usage in mind, and is the best place for
12 and is designed with interactive usage in mind, and is the best place for
13 new users of IPython to begin.
13 new users of IPython to begin.
14
14
15 Starting the IPython controller and engines
15 Starting the IPython controller and engines
16 ===========================================
16 ===========================================
17
17
18 To follow along with this tutorial, you will need to start the IPython
18 To follow along with this tutorial, you will need to start the IPython
19 controller and four IPython engines. The simplest way of doing this is to use
19 controller and four IPython engines. The simplest way of doing this is to use
20 the :command:`ipcluster` command::
20 the :command:`ipcluster` command::
21
21
22 $ ipcluster start -n 4
22 $ ipcluster start -n 4
23
23
24 For more detailed information about starting the controller and engines, see
24 For more detailed information about starting the controller and engines, see
25 our :ref:`introduction <parallel_overview>` to using IPython for parallel computing.
25 our :ref:`introduction <parallel_overview>` to using IPython for parallel computing.
26
26
27 Creating a ``DirectView`` instance
27 Creating a ``DirectView`` instance
28 ==================================
28 ==================================
29
29
30 The first step is to import the IPython :mod:`IPython.parallel`
30 The first step is to import the IPython :mod:`IPython.parallel`
31 module and then create a :class:`.Client` instance:
31 module and then create a :class:`.Client` instance:
32
32
33 .. sourcecode:: ipython
33 .. sourcecode:: ipython
34
34
35 In [1]: from IPython.parallel import Client
35 In [1]: from IPython.parallel import Client
36
36
37 In [2]: rc = Client()
37 In [2]: rc = Client()
38
38
39 This form assumes that the default connection information (stored in
39 This form assumes that the default connection information (stored in
40 :file:`ipcontroller-client.json` found in :file:`IPYTHONDIR/profile_default/security`) is
40 :file:`ipcontroller-client.json` found in :file:`IPYTHONDIR/profile_default/security`) is
41 accurate. If the controller was started on a remote machine, you must copy that connection
41 accurate. If the controller was started on a remote machine, you must copy that connection
42 file to the client machine, or enter its contents as arguments to the Client constructor:
42 file to the client machine, or enter its contents as arguments to the Client constructor:
43
43
44 .. sourcecode:: ipython
44 .. sourcecode:: ipython
45
45
46 # If you have copied the json connector file from the controller:
46 # If you have copied the json connector file from the controller:
47 In [2]: rc = Client('/path/to/ipcontroller-client.json')
47 In [2]: rc = Client('/path/to/ipcontroller-client.json')
48 # or to connect with a specific profile you have set up:
48 # or to connect with a specific profile you have set up:
49 In [3]: rc = Client(profile='mpi')
49 In [3]: rc = Client(profile='mpi')
50
50
51
51
52 To make sure there are engines connected to the controller, users can get a list
52 To make sure there are engines connected to the controller, users can get a list
53 of engine ids:
53 of engine ids:
54
54
55 .. sourcecode:: ipython
55 .. sourcecode:: ipython
56
56
57 In [3]: rc.ids
57 In [3]: rc.ids
58 Out[3]: [0, 1, 2, 3]
58 Out[3]: [0, 1, 2, 3]
59
59
60 Here we see that there are four engines ready to do work for us.
60 Here we see that there are four engines ready to do work for us.
61
61
62 For direct execution, we will make use of a :class:`DirectView` object, which can be
62 For direct execution, we will make use of a :class:`DirectView` object, which can be
63 constructed via list-access to the client:
63 constructed via list-access to the client:
64
64
65 .. sourcecode:: ipython
65 .. sourcecode:: ipython
66
66
67 In [4]: dview = rc[:] # use all engines
67 In [4]: dview = rc[:] # use all engines
68
68
69 .. seealso::
69 .. seealso::
70
70
71 For more information, see the in-depth explanation of :ref:`Views <parallel_details>`.
71 For more information, see the in-depth explanation of :ref:`Views <parallel_details>`.
72
72
73
73
74 Quick and easy parallelism
74 Quick and easy parallelism
75 ==========================
75 ==========================
76
76
77 In many cases, you simply want to apply a Python function to a sequence of
77 In many cases, you simply want to apply a Python function to a sequence of
78 objects, but *in parallel*. The client interface provides a simple way
78 objects, but *in parallel*. The client interface provides a simple way
79 of accomplishing this: using the DirectView's :meth:`~DirectView.map` method.
79 of accomplishing this: using the DirectView's :meth:`~DirectView.map` method.
80
80
81 Parallel map
81 Parallel map
82 ------------
82 ------------
83
83
84 Python's builtin :func:`map` functions allows a function to be applied to a
84 Python's builtin :func:`map` functions allows a function to be applied to a
85 sequence element-by-element. This type of code is typically trivial to
85 sequence element-by-element. This type of code is typically trivial to
86 parallelize. In fact, since IPython's interface is all about functions anyway,
86 parallelize. In fact, since IPython's interface is all about functions anyway,
87 you can just use the builtin :func:`map` with a :class:`RemoteFunction`, or a
87 you can just use the builtin :func:`map` with a :class:`RemoteFunction`, or a
88 DirectView's :meth:`map` method:
88 DirectView's :meth:`map` method:
89
89
90 .. sourcecode:: ipython
90 .. sourcecode:: ipython
91
91
92 In [62]: serial_result = map(lambda x:x**10, range(32))
92 In [62]: serial_result = map(lambda x:x**10, range(32))
93
93
94 In [63]: parallel_result = dview.map_sync(lambda x: x**10, range(32))
94 In [63]: parallel_result = dview.map_sync(lambda x: x**10, range(32))
95
95
96 In [67]: serial_result==parallel_result
96 In [67]: serial_result==parallel_result
97 Out[67]: True
97 Out[67]: True
98
98
99
99
100 .. note::
100 .. note::
101
101
102 The :class:`DirectView`'s version of :meth:`map` does
102 The :class:`DirectView`'s version of :meth:`map` does
103 not do dynamic load balancing. For a load balanced version, use a
103 not do dynamic load balancing. For a load balanced version, use a
104 :class:`LoadBalancedView`.
104 :class:`LoadBalancedView`.
105
105
106 .. seealso::
106 .. seealso::
107
107
108 :meth:`map` is implemented via :class:`ParallelFunction`.
108 :meth:`map` is implemented via :class:`ParallelFunction`.
109
109
110 Remote function decorators
110 Remote function decorators
111 --------------------------
111 --------------------------
112
112
113 Remote functions are just like normal functions, but when they are called,
113 Remote functions are just like normal functions, but when they are called,
114 they execute on one or more engines, rather than locally. IPython provides
114 they execute on one or more engines, rather than locally. IPython provides
115 two decorators:
115 two decorators:
116
116
117 .. sourcecode:: ipython
117 .. sourcecode:: ipython
118
118
119 In [10]: @dview.remote(block=True)
119 In [10]: @dview.remote(block=True)
120 ....: def getpid():
120 ....: def getpid():
121 ....: import os
121 ....: import os
122 ....: return os.getpid()
122 ....: return os.getpid()
123 ....:
123 ....:
124
124
125 In [11]: getpid()
125 In [11]: getpid()
126 Out[11]: [12345, 12346, 12347, 12348]
126 Out[11]: [12345, 12346, 12347, 12348]
127
127
128 The ``@parallel`` decorator creates parallel functions, that break up an element-wise
128 The ``@parallel`` decorator creates parallel functions, that break up an element-wise
129 operations and distribute them, reconstructing the result.
129 operations and distribute them, reconstructing the result.
130
130
131 .. sourcecode:: ipython
131 .. sourcecode:: ipython
132
132
133 In [12]: import numpy as np
133 In [12]: import numpy as np
134
134
135 In [13]: A = np.random.random((64,48))
135 In [13]: A = np.random.random((64,48))
136
136
137 In [14]: @dview.parallel(block=True)
137 In [14]: @dview.parallel(block=True)
138 ....: def pmul(A,B):
138 ....: def pmul(A,B):
139 ....: return A*B
139 ....: return A*B
140
140
141 In [15]: C_local = A*A
141 In [15]: C_local = A*A
142
142
143 In [16]: C_remote = pmul(A,A)
143 In [16]: C_remote = pmul(A,A)
144
144
145 In [17]: (C_local == C_remote).all()
145 In [17]: (C_local == C_remote).all()
146 Out[17]: True
146 Out[17]: True
147
147
148 Calling a ``@parallel`` function *does not* correspond to map. It is used for splitting
148 Calling a ``@parallel`` function *does not* correspond to map. It is used for splitting
149 element-wise operations that operate on a sequence or array. For ``map`` behavior,
149 element-wise operations that operate on a sequence or array. For ``map`` behavior,
150 parallel functions do have a map method.
150 parallel functions do have a map method.
151
151
152 ==================== ============================ =============================
152 ==================== ============================ =============================
153 call pfunc(seq) pfunc.map(seq)
153 call pfunc(seq) pfunc.map(seq)
154 ==================== ============================ =============================
154 ==================== ============================ =============================
155 # of tasks # of engines (1 per engine) # of engines (1 per engine)
155 # of tasks # of engines (1 per engine) # of engines (1 per engine)
156 # of remote calls # of engines (1 per engine) ``len(seq)``
156 # of remote calls # of engines (1 per engine) ``len(seq)``
157 argument to remote ``seq[i:j]`` (sub-sequence) ``seq[i]`` (single element)
157 argument to remote ``seq[i:j]`` (sub-sequence) ``seq[i]`` (single element)
158 ==================== ============================ =============================
158 ==================== ============================ =============================
159
159
160 A quick example to illustrate the difference in arguments for the two modes:
160 A quick example to illustrate the difference in arguments for the two modes:
161
161
162 .. sourcecode:: ipython
162 .. sourcecode:: ipython
163
163
164 In [16]: @dview.parallel(block=True)
164 In [16]: @dview.parallel(block=True)
165 ....: def echo(x):
165 ....: def echo(x):
166 ....: return str(x)
166 ....: return str(x)
167 ....:
167 ....:
168
168
169 In [17]: echo(range(5))
169 In [17]: echo(range(5))
170 Out[17]: ['[0, 1]', '[2]', '[3]', '[4]']
170 Out[17]: ['[0, 1]', '[2]', '[3]', '[4]']
171
171
172 In [18]: echo.map(range(5))
172 In [18]: echo.map(range(5))
173 Out[18]: ['0', '1', '2', '3', '4']
173 Out[18]: ['0', '1', '2', '3', '4']
174
174
175
175
176 .. seealso::
176 .. seealso::
177
177
178 See the :func:`~.remotefunction.parallel` and :func:`~.remotefunction.remote`
178 See the :func:`~.remotefunction.parallel` and :func:`~.remotefunction.remote`
179 decorators for options.
179 decorators for options.
180
180
181 Calling Python functions
181 Calling Python functions
182 ========================
182 ========================
183
183
184 The most basic type of operation that can be performed on the engines is to
184 The most basic type of operation that can be performed on the engines is to
185 execute Python code or call Python functions. Executing Python code can be
185 execute Python code or call Python functions. Executing Python code can be
186 done in blocking or non-blocking mode (non-blocking is default) using the
186 done in blocking or non-blocking mode (non-blocking is default) using the
187 :meth:`.View.execute` method, and calling functions can be done via the
187 :meth:`.View.execute` method, and calling functions can be done via the
188 :meth:`.View.apply` method.
188 :meth:`.View.apply` method.
189
189
190 apply
190 apply
191 -----
191 -----
192
192
193 The main method for doing remote execution (in fact, all methods that
193 The main method for doing remote execution (in fact, all methods that
194 communicate with the engines are built on top of it), is :meth:`View.apply`.
194 communicate with the engines are built on top of it), is :meth:`View.apply`.
195
195
196 We strive to provide the cleanest interface we can, so `apply` has the following
196 We strive to provide the cleanest interface we can, so `apply` has the following
197 signature:
197 signature:
198
198
199 .. sourcecode:: python
199 .. sourcecode:: python
200
200
201 view.apply(f, *args, **kwargs)
201 view.apply(f, *args, **kwargs)
202
202
203 There are various ways to call functions with IPython, and these flags are set as
203 There are various ways to call functions with IPython, and these flags are set as
204 attributes of the View. The ``DirectView`` has just two of these flags:
204 attributes of the View. The ``DirectView`` has just two of these flags:
205
205
206 dv.block : bool
206 dv.block : bool
207 whether to wait for the result, or return an :class:`AsyncResult` object
207 whether to wait for the result, or return an :class:`AsyncResult` object
208 immediately
208 immediately
209 dv.track : bool
209 dv.track : bool
210 whether to instruct pyzmq to track when zeromq is done sending the message.
210 whether to instruct pyzmq to track when zeromq is done sending the message.
211 This is primarily useful for non-copying sends of numpy arrays that you plan to
211 This is primarily useful for non-copying sends of numpy arrays that you plan to
212 edit in-place. You need to know when it becomes safe to edit the buffer
212 edit in-place. You need to know when it becomes safe to edit the buffer
213 without corrupting the message.
213 without corrupting the message.
214 dv.targets : int, list of ints
214 dv.targets : int, list of ints
215 which targets this view is associated with.
215 which targets this view is associated with.
216
216
217
217
218 Creating a view is simple: index-access on a client creates a :class:`.DirectView`.
218 Creating a view is simple: index-access on a client creates a :class:`.DirectView`.
219
219
220 .. sourcecode:: ipython
220 .. sourcecode:: ipython
221
221
222 In [4]: view = rc[1:3]
222 In [4]: view = rc[1:3]
223 Out[4]: <DirectView [1, 2]>
223 Out[4]: <DirectView [1, 2]>
224
224
225 In [5]: view.apply<tab>
225 In [5]: view.apply<tab>
226 view.apply view.apply_async view.apply_sync
226 view.apply view.apply_async view.apply_sync
227
227
228 For convenience, you can set block temporarily for a single call with the extra sync/async methods.
228 For convenience, you can set block temporarily for a single call with the extra sync/async methods.
229
229
230 Blocking execution
230 Blocking execution
231 ------------------
231 ------------------
232
232
233 In blocking mode, the :class:`.DirectView` object (called ``dview`` in
233 In blocking mode, the :class:`.DirectView` object (called ``dview`` in
234 these examples) submits the command to the controller, which places the
234 these examples) submits the command to the controller, which places the
235 command in the engines' queues for execution. The :meth:`apply` call then
235 command in the engines' queues for execution. The :meth:`apply` call then
236 blocks until the engines are done executing the command:
236 blocks until the engines are done executing the command:
237
237
238 .. sourcecode:: ipython
238 .. sourcecode:: ipython
239
239
240 In [2]: dview = rc[:] # A DirectView of all engines
240 In [2]: dview = rc[:] # A DirectView of all engines
241 In [3]: dview.block=True
241 In [3]: dview.block=True
242 In [4]: dview['a'] = 5
242 In [4]: dview['a'] = 5
243
243
244 In [5]: dview['b'] = 10
244 In [5]: dview['b'] = 10
245
245
246 In [6]: dview.apply(lambda x: a+b+x, 27)
246 In [6]: dview.apply(lambda x: a+b+x, 27)
247 Out[6]: [42, 42, 42, 42]
247 Out[6]: [42, 42, 42, 42]
248
248
249 You can also select blocking execution on a call-by-call basis with the :meth:`apply_sync`
249 You can also select blocking execution on a call-by-call basis with the :meth:`apply_sync`
250 method:
250 method:
251
251
252 In [7]: dview.block=False
252 In [7]: dview.block=False
253
253
254 In [8]: dview.apply_sync(lambda x: a+b+x, 27)
254 In [8]: dview.apply_sync(lambda x: a+b+x, 27)
255 Out[8]: [42, 42, 42, 42]
255 Out[8]: [42, 42, 42, 42]
256
256
257 Python commands can be executed as strings on specific engines by using a View's ``execute``
257 Python commands can be executed as strings on specific engines by using a View's ``execute``
258 method:
258 method:
259
259
260 .. sourcecode:: ipython
260 .. sourcecode:: ipython
261
261
262 In [6]: rc[::2].execute('c=a+b')
262 In [6]: rc[::2].execute('c=a+b')
263
263
264 In [7]: rc[1::2].execute('c=a-b')
264 In [7]: rc[1::2].execute('c=a-b')
265
265
266 In [8]: dview['c'] # shorthand for dview.pull('c', block=True)
266 In [8]: dview['c'] # shorthand for dview.pull('c', block=True)
267 Out[8]: [15, -5, 15, -5]
267 Out[8]: [15, -5, 15, -5]
268
268
269
269
270 Non-blocking execution
270 Non-blocking execution
271 ----------------------
271 ----------------------
272
272
273 In non-blocking mode, :meth:`apply` submits the command to be executed and
273 In non-blocking mode, :meth:`apply` submits the command to be executed and
274 then returns a :class:`AsyncResult` object immediately. The
274 then returns a :class:`AsyncResult` object immediately. The
275 :class:`AsyncResult` object gives you a way of getting a result at a later
275 :class:`AsyncResult` object gives you a way of getting a result at a later
276 time through its :meth:`get` method.
276 time through its :meth:`get` method.
277
277
278 .. seealso::
278 .. seealso::
279
279
280 Docs on the :ref:`AsyncResult <parallel_asyncresult>` object.
280 Docs on the :ref:`AsyncResult <parallel_asyncresult>` object.
281
281
282 This allows you to quickly submit long running commands without blocking your
282 This allows you to quickly submit long running commands without blocking your
283 local Python/IPython session:
283 local Python/IPython session:
284
284
285 .. sourcecode:: ipython
285 .. sourcecode:: ipython
286
286
287 # define our function
287 # define our function
288 In [6]: def wait(t):
288 In [6]: def wait(t):
289 ....: import time
289 ....: import time
290 ....: tic = time.time()
290 ....: tic = time.time()
291 ....: time.sleep(t)
291 ....: time.sleep(t)
292 ....: return time.time()-tic
292 ....: return time.time()-tic
293
293
294 # In non-blocking mode
294 # In non-blocking mode
295 In [7]: ar = dview.apply_async(wait, 2)
295 In [7]: ar = dview.apply_async(wait, 2)
296
296
297 # Now block for the result
297 # Now block for the result
298 In [8]: ar.get()
298 In [8]: ar.get()
299 Out[8]: [2.0006198883056641, 1.9997570514678955, 1.9996809959411621, 2.0003249645233154]
299 Out[8]: [2.0006198883056641, 1.9997570514678955, 1.9996809959411621, 2.0003249645233154]
300
300
301 # Again in non-blocking mode
301 # Again in non-blocking mode
302 In [9]: ar = dview.apply_async(wait, 10)
302 In [9]: ar = dview.apply_async(wait, 10)
303
303
304 # Poll to see if the result is ready
304 # Poll to see if the result is ready
305 In [10]: ar.ready()
305 In [10]: ar.ready()
306 Out[10]: False
306 Out[10]: False
307
307
308 # ask for the result, but wait a maximum of 1 second:
308 # ask for the result, but wait a maximum of 1 second:
309 In [45]: ar.get(1)
309 In [45]: ar.get(1)
310 ---------------------------------------------------------------------------
310 ---------------------------------------------------------------------------
311 TimeoutError Traceback (most recent call last)
311 TimeoutError Traceback (most recent call last)
312 /home/you/<ipython-input-45-7cd858bbb8e0> in <module>()
312 /home/you/<ipython-input-45-7cd858bbb8e0> in <module>()
313 ----> 1 ar.get(1)
313 ----> 1 ar.get(1)
314
314
315 /path/to/site-packages/IPython/parallel/asyncresult.pyc in get(self, timeout)
315 /path/to/site-packages/IPython/parallel/asyncresult.pyc in get(self, timeout)
316 62 raise self._exception
316 62 raise self._exception
317 63 else:
317 63 else:
318 ---> 64 raise error.TimeoutError("Result not ready.")
318 ---> 64 raise error.TimeoutError("Result not ready.")
319 65
319 65
320 66 def ready(self):
320 66 def ready(self):
321
321
322 TimeoutError: Result not ready.
322 TimeoutError: Result not ready.
323
323
324 .. Note::
324 .. Note::
325
325
326 Note the import inside the function. This is a common model, to ensure
326 Note the import inside the function. This is a common model, to ensure
327 that the appropriate modules are imported where the task is run. You can
327 that the appropriate modules are imported where the task is run. You can
328 also manually import modules into the engine(s) namespace(s) via
328 also manually import modules into the engine(s) namespace(s) via
329 :meth:`view.execute('import numpy')`.
329 :meth:`view.execute('import numpy')`.
330
330
331 Often, it is desirable to wait until a set of :class:`AsyncResult` objects
331 Often, it is desirable to wait until a set of :class:`AsyncResult` objects
332 are done. For this, there is a the method :meth:`wait`. This method takes a
332 are done. For this, there is a the method :meth:`wait`. This method takes a
333 tuple of :class:`AsyncResult` objects (or `msg_ids` or indices to the client's History),
333 tuple of :class:`AsyncResult` objects (or `msg_ids` or indices to the client's History),
334 and blocks until all of the associated results are ready:
334 and blocks until all of the associated results are ready:
335
335
336 .. sourcecode:: ipython
336 .. sourcecode:: ipython
337
337
338 In [72]: dview.block=False
338 In [72]: dview.block=False
339
339
340 # A trivial list of AsyncResults objects
340 # A trivial list of AsyncResults objects
341 In [73]: pr_list = [dview.apply_async(wait, 3) for i in range(10)]
341 In [73]: pr_list = [dview.apply_async(wait, 3) for i in range(10)]
342
342
343 # Wait until all of them are done
343 # Wait until all of them are done
344 In [74]: dview.wait(pr_list)
344 In [74]: dview.wait(pr_list)
345
345
346 # Then, their results are ready using get() or the `.r` attribute
346 # Then, their results are ready using get() or the `.r` attribute
347 In [75]: pr_list[0].get()
347 In [75]: pr_list[0].get()
348 Out[75]: [2.9982571601867676, 2.9982588291168213, 2.9987530708312988, 2.9990990161895752]
348 Out[75]: [2.9982571601867676, 2.9982588291168213, 2.9987530708312988, 2.9990990161895752]
349
349
350
350
351
351
352 The ``block`` and ``targets`` keyword arguments and attributes
352 The ``block`` and ``targets`` keyword arguments and attributes
353 --------------------------------------------------------------
353 --------------------------------------------------------------
354
354
355 Most DirectView methods (excluding :meth:`apply`) accept ``block`` and
355 Most DirectView methods (excluding :meth:`apply`) accept ``block`` and
356 ``targets`` as keyword arguments. As we have seen above, these keyword arguments control the
356 ``targets`` as keyword arguments. As we have seen above, these keyword arguments control the
357 blocking mode and which engines the command is applied to. The :class:`View` class also has
357 blocking mode and which engines the command is applied to. The :class:`View` class also has
358 :attr:`block` and :attr:`targets` attributes that control the default behavior when the keyword
358 :attr:`block` and :attr:`targets` attributes that control the default behavior when the keyword
359 arguments are not provided. Thus the following logic is used for :attr:`block` and :attr:`targets`:
359 arguments are not provided. Thus the following logic is used for :attr:`block` and :attr:`targets`:
360
360
361 * If no keyword argument is provided, the instance attributes are used.
361 * If no keyword argument is provided, the instance attributes are used.
362 * Keyword argument, if provided override the instance attributes for
362 * Keyword argument, if provided override the instance attributes for
363 the duration of a single call.
363 the duration of a single call.
364
364
365 The following examples demonstrate how to use the instance attributes:
365 The following examples demonstrate how to use the instance attributes:
366
366
367 .. sourcecode:: ipython
367 .. sourcecode:: ipython
368
368
369 In [16]: dview.targets = [0,2]
369 In [16]: dview.targets = [0,2]
370
370
371 In [17]: dview.block = False
371 In [17]: dview.block = False
372
372
373 In [18]: ar = dview.apply(lambda : 10)
373 In [18]: ar = dview.apply(lambda : 10)
374
374
375 In [19]: ar.get()
375 In [19]: ar.get()
376 Out[19]: [10, 10]
376 Out[19]: [10, 10]
377
377
378 In [16]: dview.targets = v.client.ids # all engines (4)
378 In [16]: dview.targets = v.client.ids # all engines (4)
379
379
380 In [21]: dview.block = True
380 In [21]: dview.block = True
381
381
382 In [22]: dview.apply(lambda : 42)
382 In [22]: dview.apply(lambda : 42)
383 Out[22]: [42, 42, 42, 42]
383 Out[22]: [42, 42, 42, 42]
384
384
385 The :attr:`block` and :attr:`targets` instance attributes of the
385 The :attr:`block` and :attr:`targets` instance attributes of the
386 :class:`.DirectView` also determine the behavior of the parallel magic commands.
386 :class:`.DirectView` also determine the behavior of the parallel magic commands.
387
387
388 Parallel magic commands
388 Parallel magic commands
389 -----------------------
389 -----------------------
390
390
391 We provide a few IPython magic commands (``%px``, ``%autopx`` and ``%result``)
391 We provide a few IPython magic commands (``%px``, ``%autopx`` and ``%result``)
392 that make it more pleasant to execute Python commands on the engines
392 that make it a bit more pleasant to execute Python commands on the engines interactively.
393 interactively. These are simply shortcuts to :meth:`execute` and
393 These are simply shortcuts to :meth:`.DirectView.execute`
394 :meth:`get_result` of the :class:`DirectView`. The ``%px`` magic executes a single
394 and :meth:`.AsyncResult.display_outputs` methods repsectively.
395 Python command on the engines specified by the :attr:`targets` attribute of the
395 The ``%px`` magic executes a single Python command on the engines
396 :class:`DirectView` instance:
396 specified by the :attr:`targets` attribute of the :class:`DirectView` instance:
397
397
398 .. sourcecode:: ipython
398 .. sourcecode:: ipython
399
399
400 # Create a DirectView for all targets
400 # Create a DirectView for all targets
401 In [22]: dv = rc[:]
401 In [22]: dv = rc[:]
402
402
403 # Make this DirectView active for parallel magic commands
403 # Make this DirectView active for parallel magic commands
404 In [23]: dv.activate()
404 In [23]: dv.activate()
405
405
406 In [24]: dv.block=True
406 In [24]: dv.block=True
407
407
408 # import numpy here and everywhere
408 # import numpy here and everywhere
409 In [25]: with dv.sync_imports():
409 In [25]: with dv.sync_imports():
410 ....: import numpy
410 ....: import numpy
411 importing numpy on engine(s)
411 importing numpy on engine(s)
412
412
413 In [27]: %px a = numpy.random.rand(2,2)
413 In [27]: %px a = numpy.random.rand(2,2)
414 Parallel execution on engines: [0, 1, 2, 3]
414 Parallel execution on engines: [0, 1, 2, 3]
415
415
416 In [28]: %px ev = numpy.linalg.eigvals(a)
416 In [28]: %px numpy.linalg.eigvals(a)
417 Parallel execution on engines: [0, 1, 2, 3]
417 Parallel execution on engines: [0, 1, 2, 3]
418 [0] Out[68]: array([ 0.77120707, -0.19448286])
419 [1] Out[68]: array([ 1.10815921, 0.05110369])
420 [2] Out[68]: array([ 0.74625527, -0.37475081])
421 [3] Out[68]: array([ 0.72931905, 0.07159743])
418
422
419 In [28]: dv['ev']
423 In [29]: %px print 'hi'
420 Out[28]: [ array([ 1.09522024, -0.09645227]),
424 Parallel execution on engine(s): [0, 1, 2, 3]
421 ....: array([ 1.21435496, -0.35546712]),
425 [stdout:0] hi
422 ....: array([ 0.72180653, 0.07133042]),
426 [stdout:1] hi
423 ....: array([ 1.46384341, 1.04353244e-04])
427 [stdout:2] hi
424 ....: ]
428 [stdout:3] hi
425
429
426 The ``%result`` magic gets the most recent result, or takes an argument
430
427 specifying the index of the result to be requested. It is simply a shortcut to the
431 Since engines are IPython as well, you can even run magics remotely:
428 :meth:`get_result` method:
432
433 .. sourcecode:: ipython
434
435 In [28]: %px %pylab inline
436 Parallel execution on engine(s): [0, 1, 2, 3]
437 [stdout:0]
438 Welcome to pylab, a matplotlib-based Python environment...
439 For more information, type 'help(pylab)'.
440 [stdout:1]
441 Welcome to pylab, a matplotlib-based Python environment...
442 For more information, type 'help(pylab)'.
443 [stdout:2]
444 Welcome to pylab, a matplotlib-based Python environment...
445 For more information, type 'help(pylab)'.
446 [stdout:3]
447 Welcome to pylab, a matplotlib-based Python environment...
448 For more information, type 'help(pylab)'.
449
450 And once in pylab mode with the inline backend,
451 you can make plots and they will be displayed in your frontend
452 if it suports the inline figures (e.g. notebook or qtconsole):
429
453
430 .. sourcecode:: ipython
454 .. sourcecode:: ipython
431
455
432 In [29]: dv.apply_async(lambda : ev)
456 In [40]: %px plot(rand(100))
457 Parallel execution on engine(s): [0, 1, 2, 3]
458 <plot0>
459 <plot1>
460 <plot2>
461 <plot3>
462 [0] Out[79]: [<matplotlib.lines.Line2D at 0x10a6286d0>]
463 [1] Out[79]: [<matplotlib.lines.Line2D at 0x10b9476d0>]
464 [2] Out[79]: [<matplotlib.lines.Line2D at 0x110652750>]
465 [3] Out[79]: [<matplotlib.lines.Line2D at 0x10c6566d0>]
466
467
468 ``%%px`` Cell Magic
469 *******************
470
471 `%%px` can also be used as a Cell Magic, which accepts ``--[no]block`` flags,
472 and a ``--group-outputs`` argument, which adjust how the outputs of multiple
473 engines are presented.
433
474
434 In [30]: %result
475 .. seealso::
435 Out[30]: [ [ 1.28167017 0.14197338],
476
436 ....: [-0.14093616 1.27877273],
477 :meth:`.AsyncResult.display_outputs` for the grouping options.
437 ....: [-0.37023573 1.06779409],
478
438 ....: [ 0.83664764 -0.25602658] ]
479 .. sourcecode:: ipython
480
481 In [50]: %%px --block --group-outputs=engine
482 ....: import numpy as np
483 ....: A = np.random.random((2,2))
484 ....: ev = numpy.linalg.eigvals(A)
485 ....: print ev
486 ....: ev.max()
487 ....:
488 Parallel execution on engine(s): [0, 1, 2, 3]
489 [stdout:0] [ 0.60640442 0.95919621]
490 [0] Out[73]: 0.9591962130899806
491 [stdout:1] [ 0.38501813 1.29430871]
492 [1] Out[73]: 1.2943087091452372
493 [stdout:2] [-0.85925141 0.9387692 ]
494 [2] Out[73]: 0.93876920456230284
495 [stdout:3] [ 0.37998269 1.24218246]
496 [3] Out[73]: 1.2421824618493817
497
498 ``%result`` Magic
499 *****************
500
501 If you are using ``%px`` in non-blocking mode, you won't get output.
502 You can use ``%result`` to display the outputs of the latest command,
503 just as is done when ``%px`` is blocking:
504
505 .. sourcecode:: ipython
506
507 In [39]: dv.block = False
508
509 In [40]: %px print 'hi'
510 Async parallel execution on engine(s): [0, 1, 2, 3]
511
512 In [41]: %result
513 [stdout:0] hi
514 [stdout:1] hi
515 [stdout:2] hi
516 [stdout:3] hi
517
518 ``%result`` simply calls :meth:`.AsyncResult.display_outputs` on the most recent request.
519 You can pass integers as indices if you want a result other than the latest,
520 e.g. ``%result -2``, or ``%result 0`` for the first.
521
522
523 ``%autopx``
524 ***********
439
525
440 The ``%autopx`` magic switches to a mode where everything you type is executed
526 The ``%autopx`` magic switches to a mode where everything you type is executed
441 on the engines given by the :attr:`targets` attribute:
527 on the engines until you do ``%autopx`` again.
442
528
443 .. sourcecode:: ipython
529 .. sourcecode:: ipython
444
530
445 In [30]: dv.block=False
531 In [30]: dv.block=True
446
532
447 In [31]: %autopx
533 In [31]: %autopx
448 Auto Parallel Enabled
534 %autopx enabled
449 Type %autopx to disable
450
535
451 In [32]: max_evals = []
536 In [32]: max_evals = []
452 <IPython.parallel.AsyncResult object at 0x17b8a70>
453
537
454 In [33]: for i in range(100):
538 In [33]: for i in range(100):
455 ....: a = numpy.random.rand(10,10)
539 ....: a = numpy.random.rand(10,10)
456 ....: a = a+a.transpose()
540 ....: a = a+a.transpose()
457 ....: evals = numpy.linalg.eigvals(a)
541 ....: evals = numpy.linalg.eigvals(a)
458 ....: max_evals.append(evals[0].real)
542 ....: max_evals.append(evals[0].real)
459 ....:
543 ....:
460 ....:
461 <IPython.parallel.AsyncResult object at 0x17af8f0>
462
463 In [34]: %autopx
464 Auto Parallel Disabled
465
544
466 In [35]: dv.block=True
545 In [34]: print "Average max eigenvalue is: %f" % (sum(max_evals)/len(max_evals))
546 [stdout:0] Average max eigenvalue is: 10.193101
547 [stdout:1] Average max eigenvalue is: 10.064508
548 [stdout:2] Average max eigenvalue is: 10.055724
549 [stdout:3] Average max eigenvalue is: 10.086876
467
550
468 In [36]: px ans= "Average max eigenvalue is: %f"%(sum(max_evals)/len(max_evals))
551 In [35]: %autopx
469 Parallel execution on engines: [0, 1, 2, 3]
552 Auto Parallel Disabled
470
471 In [37]: dv['ans']
472 Out[37]: [ 'Average max eigenvalue is: 10.1387247332',
473 ....: 'Average max eigenvalue is: 10.2076902286',
474 ....: 'Average max eigenvalue is: 10.1891484655',
475 ....: 'Average max eigenvalue is: 10.1158837784',]
476
553
477
554
478 Moving Python objects around
555 Moving Python objects around
479 ============================
556 ============================
480
557
481 In addition to calling functions and executing code on engines, you can
558 In addition to calling functions and executing code on engines, you can
482 transfer Python objects to and from your IPython session and the engines. In
559 transfer Python objects to and from your IPython session and the engines. In
483 IPython, these operations are called :meth:`push` (sending an object to the
560 IPython, these operations are called :meth:`push` (sending an object to the
484 engines) and :meth:`pull` (getting an object from the engines).
561 engines) and :meth:`pull` (getting an object from the engines).
485
562
486 Basic push and pull
563 Basic push and pull
487 -------------------
564 -------------------
488
565
489 Here are some examples of how you use :meth:`push` and :meth:`pull`:
566 Here are some examples of how you use :meth:`push` and :meth:`pull`:
490
567
491 .. sourcecode:: ipython
568 .. sourcecode:: ipython
492
569
493 In [38]: dview.push(dict(a=1.03234,b=3453))
570 In [38]: dview.push(dict(a=1.03234,b=3453))
494 Out[38]: [None,None,None,None]
571 Out[38]: [None,None,None,None]
495
572
496 In [39]: dview.pull('a')
573 In [39]: dview.pull('a')
497 Out[39]: [ 1.03234, 1.03234, 1.03234, 1.03234]
574 Out[39]: [ 1.03234, 1.03234, 1.03234, 1.03234]
498
575
499 In [40]: dview.pull('b', targets=0)
576 In [40]: dview.pull('b', targets=0)
500 Out[40]: 3453
577 Out[40]: 3453
501
578
502 In [41]: dview.pull(('a','b'))
579 In [41]: dview.pull(('a','b'))
503 Out[41]: [ [1.03234, 3453], [1.03234, 3453], [1.03234, 3453], [1.03234, 3453] ]
580 Out[41]: [ [1.03234, 3453], [1.03234, 3453], [1.03234, 3453], [1.03234, 3453] ]
504
581
505 In [43]: dview.push(dict(c='speed'))
582 In [43]: dview.push(dict(c='speed'))
506 Out[43]: [None,None,None,None]
583 Out[43]: [None,None,None,None]
507
584
508 In non-blocking mode :meth:`push` and :meth:`pull` also return
585 In non-blocking mode :meth:`push` and :meth:`pull` also return
509 :class:`AsyncResult` objects:
586 :class:`AsyncResult` objects:
510
587
511 .. sourcecode:: ipython
588 .. sourcecode:: ipython
512
589
513 In [48]: ar = dview.pull('a', block=False)
590 In [48]: ar = dview.pull('a', block=False)
514
591
515 In [49]: ar.get()
592 In [49]: ar.get()
516 Out[49]: [1.03234, 1.03234, 1.03234, 1.03234]
593 Out[49]: [1.03234, 1.03234, 1.03234, 1.03234]
517
594
518
595
519 Dictionary interface
596 Dictionary interface
520 --------------------
597 --------------------
521
598
522 Since a Python namespace is just a :class:`dict`, :class:`DirectView` objects provide
599 Since a Python namespace is just a :class:`dict`, :class:`DirectView` objects provide
523 dictionary-style access by key and methods such as :meth:`get` and
600 dictionary-style access by key and methods such as :meth:`get` and
524 :meth:`update` for convenience. This make the remote namespaces of the engines
601 :meth:`update` for convenience. This make the remote namespaces of the engines
525 appear as a local dictionary. Underneath, these methods call :meth:`apply`:
602 appear as a local dictionary. Underneath, these methods call :meth:`apply`:
526
603
527 .. sourcecode:: ipython
604 .. sourcecode:: ipython
528
605
529 In [51]: dview['a']=['foo','bar']
606 In [51]: dview['a']=['foo','bar']
530
607
531 In [52]: dview['a']
608 In [52]: dview['a']
532 Out[52]: [ ['foo', 'bar'], ['foo', 'bar'], ['foo', 'bar'], ['foo', 'bar'] ]
609 Out[52]: [ ['foo', 'bar'], ['foo', 'bar'], ['foo', 'bar'], ['foo', 'bar'] ]
533
610
534 Scatter and gather
611 Scatter and gather
535 ------------------
612 ------------------
536
613
537 Sometimes it is useful to partition a sequence and push the partitions to
614 Sometimes it is useful to partition a sequence and push the partitions to
538 different engines. In MPI language, this is know as scatter/gather and we
615 different engines. In MPI language, this is know as scatter/gather and we
539 follow that terminology. However, it is important to remember that in
616 follow that terminology. However, it is important to remember that in
540 IPython's :class:`Client` class, :meth:`scatter` is from the
617 IPython's :class:`Client` class, :meth:`scatter` is from the
541 interactive IPython session to the engines and :meth:`gather` is from the
618 interactive IPython session to the engines and :meth:`gather` is from the
542 engines back to the interactive IPython session. For scatter/gather operations
619 engines back to the interactive IPython session. For scatter/gather operations
543 between engines, MPI, pyzmq, or some other direct interconnect should be used.
620 between engines, MPI, pyzmq, or some other direct interconnect should be used.
544
621
545 .. sourcecode:: ipython
622 .. sourcecode:: ipython
546
623
547 In [58]: dview.scatter('a',range(16))
624 In [58]: dview.scatter('a',range(16))
548 Out[58]: [None,None,None,None]
625 Out[58]: [None,None,None,None]
549
626
550 In [59]: dview['a']
627 In [59]: dview['a']
551 Out[59]: [ [0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15] ]
628 Out[59]: [ [0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15] ]
552
629
553 In [60]: dview.gather('a')
630 In [60]: dview.gather('a')
554 Out[60]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
631 Out[60]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
555
632
556 Other things to look at
633 Other things to look at
557 =======================
634 =======================
558
635
559 How to do parallel list comprehensions
636 How to do parallel list comprehensions
560 --------------------------------------
637 --------------------------------------
561
638
562 In many cases list comprehensions are nicer than using the map function. While
639 In many cases list comprehensions are nicer than using the map function. While
563 we don't have fully parallel list comprehensions, it is simple to get the
640 we don't have fully parallel list comprehensions, it is simple to get the
564 basic effect using :meth:`scatter` and :meth:`gather`:
641 basic effect using :meth:`scatter` and :meth:`gather`:
565
642
566 .. sourcecode:: ipython
643 .. sourcecode:: ipython
567
644
568 In [66]: dview.scatter('x',range(64))
645 In [66]: dview.scatter('x',range(64))
569
646
570 In [67]: %px y = [i**10 for i in x]
647 In [67]: %px y = [i**10 for i in x]
571 Parallel execution on engines: [0, 1, 2, 3]
648 Parallel execution on engines: [0, 1, 2, 3]
572 Out[67]:
649 Out[67]:
573
650
574 In [68]: y = dview.gather('y')
651 In [68]: y = dview.gather('y')
575
652
576 In [69]: print y
653 In [69]: print y
577 [0, 1, 1024, 59049, 1048576, 9765625, 60466176, 282475249, 1073741824,...]
654 [0, 1, 1024, 59049, 1048576, 9765625, 60466176, 282475249, 1073741824,...]
578
655
579 Remote imports
656 Remote imports
580 --------------
657 --------------
581
658
582 Sometimes you will want to import packages both in your interactive session
659 Sometimes you will want to import packages both in your interactive session
583 and on your remote engines. This can be done with the :class:`ContextManager`
660 and on your remote engines. This can be done with the :class:`ContextManager`
584 created by a DirectView's :meth:`sync_imports` method:
661 created by a DirectView's :meth:`sync_imports` method:
585
662
586 .. sourcecode:: ipython
663 .. sourcecode:: ipython
587
664
588 In [69]: with dview.sync_imports():
665 In [69]: with dview.sync_imports():
589 ....: import numpy
666 ....: import numpy
590 importing numpy on engine(s)
667 importing numpy on engine(s)
591
668
592 Any imports made inside the block will also be performed on the view's engines.
669 Any imports made inside the block will also be performed on the view's engines.
593 sync_imports also takes a `local` boolean flag that defaults to True, which specifies
670 sync_imports also takes a `local` boolean flag that defaults to True, which specifies
594 whether the local imports should also be performed. However, support for `local=False`
671 whether the local imports should also be performed. However, support for `local=False`
595 has not been implemented, so only packages that can be imported locally will work
672 has not been implemented, so only packages that can be imported locally will work
596 this way.
673 this way.
597
674
598 You can also specify imports via the ``@require`` decorator. This is a decorator
675 You can also specify imports via the ``@require`` decorator. This is a decorator
599 designed for use in Dependencies, but can be used to handle remote imports as well.
676 designed for use in Dependencies, but can be used to handle remote imports as well.
600 Modules or module names passed to ``@require`` will be imported before the decorated
677 Modules or module names passed to ``@require`` will be imported before the decorated
601 function is called. If they cannot be imported, the decorated function will never
678 function is called. If they cannot be imported, the decorated function will never
602 execution, and will fail with an UnmetDependencyError.
679 execution, and will fail with an UnmetDependencyError.
603
680
604 .. sourcecode:: ipython
681 .. sourcecode:: ipython
605
682
606 In [69]: from IPython.parallel import require
683 In [69]: from IPython.parallel import require
607
684
608 In [70]: @require('re'):
685 In [70]: @require('re'):
609 ....: def findall(pat, x):
686 ....: def findall(pat, x):
610 ....: # re is guaranteed to be available
687 ....: # re is guaranteed to be available
611 ....: return re.findall(pat, x)
688 ....: return re.findall(pat, x)
612
689
613 # you can also pass modules themselves, that you already have locally:
690 # you can also pass modules themselves, that you already have locally:
614 In [71]: @require(time):
691 In [71]: @require(time):
615 ....: def wait(t):
692 ....: def wait(t):
616 ....: time.sleep(t)
693 ....: time.sleep(t)
617 ....: return t
694 ....: return t
618
695
619 .. _parallel_exceptions:
696 .. _parallel_exceptions:
620
697
621 Parallel exceptions
698 Parallel exceptions
622 -------------------
699 -------------------
623
700
624 In the multiengine interface, parallel commands can raise Python exceptions,
701 In the multiengine interface, parallel commands can raise Python exceptions,
625 just like serial commands. But, it is a little subtle, because a single
702 just like serial commands. But, it is a little subtle, because a single
626 parallel command can actually raise multiple exceptions (one for each engine
703 parallel command can actually raise multiple exceptions (one for each engine
627 the command was run on). To express this idea, we have a
704 the command was run on). To express this idea, we have a
628 :exc:`CompositeError` exception class that will be raised in most cases. The
705 :exc:`CompositeError` exception class that will be raised in most cases. The
629 :exc:`CompositeError` class is a special type of exception that wraps one or
706 :exc:`CompositeError` class is a special type of exception that wraps one or
630 more other types of exceptions. Here is how it works:
707 more other types of exceptions. Here is how it works:
631
708
632 .. sourcecode:: ipython
709 .. sourcecode:: ipython
633
710
634 In [76]: dview.block=True
711 In [76]: dview.block=True
635
712
636 In [77]: dview.execute('1/0')
713 In [77]: dview.execute('1/0')
637 ---------------------------------------------------------------------------
714 ---------------------------------------------------------------------------
638 CompositeError Traceback (most recent call last)
715 CompositeError Traceback (most recent call last)
639 /home/user/<ipython-input-10-5d56b303a66c> in <module>()
716 /home/user/<ipython-input-10-5d56b303a66c> in <module>()
640 ----> 1 dview.execute('1/0')
717 ----> 1 dview.execute('1/0')
641
718
642 /path/to/site-packages/IPython/parallel/client/view.pyc in execute(self, code, targets, block)
719 /path/to/site-packages/IPython/parallel/client/view.pyc in execute(self, code, targets, block)
643 591 default: self.block
720 591 default: self.block
644 592 """
721 592 """
645 --> 593 return self._really_apply(util._execute, args=(code,), block=block, targets=targets)
722 --> 593 return self._really_apply(util._execute, args=(code,), block=block, targets=targets)
646 594
723 594
647 595 def run(self, filename, targets=None, block=None):
724 595 def run(self, filename, targets=None, block=None):
648
725
649 /home/user/<string> in _really_apply(self, f, args, kwargs, targets, block, track)
726 /home/user/<string> in _really_apply(self, f, args, kwargs, targets, block, track)
650
727
651 /path/to/site-packages/IPython/parallel/client/view.pyc in sync_results(f, self, *args, **kwargs)
728 /path/to/site-packages/IPython/parallel/client/view.pyc in sync_results(f, self, *args, **kwargs)
652 55 def sync_results(f, self, *args, **kwargs):
729 55 def sync_results(f, self, *args, **kwargs):
653 56 """sync relevant results from self.client to our results attribute."""
730 56 """sync relevant results from self.client to our results attribute."""
654 ---> 57 ret = f(self, *args, **kwargs)
731 ---> 57 ret = f(self, *args, **kwargs)
655 58 delta = self.outstanding.difference(self.client.outstanding)
732 58 delta = self.outstanding.difference(self.client.outstanding)
656 59 completed = self.outstanding.intersection(delta)
733 59 completed = self.outstanding.intersection(delta)
657
734
658 /home/user/<string> in _really_apply(self, f, args, kwargs, targets, block, track)
735 /home/user/<string> in _really_apply(self, f, args, kwargs, targets, block, track)
659
736
660 /path/to/site-packages/IPython/parallel/client/view.pyc in save_ids(f, self, *args, **kwargs)
737 /path/to/site-packages/IPython/parallel/client/view.pyc in save_ids(f, self, *args, **kwargs)
661 44 n_previous = len(self.client.history)
738 44 n_previous = len(self.client.history)
662 45 try:
739 45 try:
663 ---> 46 ret = f(self, *args, **kwargs)
740 ---> 46 ret = f(self, *args, **kwargs)
664 47 finally:
741 47 finally:
665 48 nmsgs = len(self.client.history) - n_previous
742 48 nmsgs = len(self.client.history) - n_previous
666
743
667 /path/to/site-packages/IPython/parallel/client/view.pyc in _really_apply(self, f, args, kwargs, targets, block, track)
744 /path/to/site-packages/IPython/parallel/client/view.pyc in _really_apply(self, f, args, kwargs, targets, block, track)
668 529 if block:
745 529 if block:
669 530 try:
746 530 try:
670 --> 531 return ar.get()
747 --> 531 return ar.get()
671 532 except KeyboardInterrupt:
748 532 except KeyboardInterrupt:
672 533 pass
749 533 pass
673
750
674 /path/to/site-packages/IPython/parallel/client/asyncresult.pyc in get(self, timeout)
751 /path/to/site-packages/IPython/parallel/client/asyncresult.pyc in get(self, timeout)
675 101 return self._result
752 101 return self._result
676 102 else:
753 102 else:
677 --> 103 raise self._exception
754 --> 103 raise self._exception
678 104 else:
755 104 else:
679 105 raise error.TimeoutError("Result not ready.")
756 105 raise error.TimeoutError("Result not ready.")
680
757
681 CompositeError: one or more exceptions from call to method: _execute
758 CompositeError: one or more exceptions from call to method: _execute
682 [0:apply]: ZeroDivisionError: integer division or modulo by zero
759 [0:apply]: ZeroDivisionError: integer division or modulo by zero
683 [1:apply]: ZeroDivisionError: integer division or modulo by zero
760 [1:apply]: ZeroDivisionError: integer division or modulo by zero
684 [2:apply]: ZeroDivisionError: integer division or modulo by zero
761 [2:apply]: ZeroDivisionError: integer division or modulo by zero
685 [3:apply]: ZeroDivisionError: integer division or modulo by zero
762 [3:apply]: ZeroDivisionError: integer division or modulo by zero
686
763
687 Notice how the error message printed when :exc:`CompositeError` is raised has
764 Notice how the error message printed when :exc:`CompositeError` is raised has
688 information about the individual exceptions that were raised on each engine.
765 information about the individual exceptions that were raised on each engine.
689 If you want, you can even raise one of these original exceptions:
766 If you want, you can even raise one of these original exceptions:
690
767
691 .. sourcecode:: ipython
768 .. sourcecode:: ipython
692
769
693 In [80]: try:
770 In [80]: try:
694 ....: dview.execute('1/0')
771 ....: dview.execute('1/0')
695 ....: except parallel.error.CompositeError, e:
772 ....: except parallel.error.CompositeError, e:
696 ....: e.raise_exception()
773 ....: e.raise_exception()
697 ....:
774 ....:
698 ....:
775 ....:
699 ---------------------------------------------------------------------------
776 ---------------------------------------------------------------------------
700 RemoteError Traceback (most recent call last)
777 RemoteError Traceback (most recent call last)
701 /home/user/<ipython-input-17-8597e7e39858> in <module>()
778 /home/user/<ipython-input-17-8597e7e39858> in <module>()
702 2 dview.execute('1/0')
779 2 dview.execute('1/0')
703 3 except CompositeError as e:
780 3 except CompositeError as e:
704 ----> 4 e.raise_exception()
781 ----> 4 e.raise_exception()
705
782
706 /path/to/site-packages/IPython/parallel/error.pyc in raise_exception(self, excid)
783 /path/to/site-packages/IPython/parallel/error.pyc in raise_exception(self, excid)
707 266 raise IndexError("an exception with index %i does not exist"%excid)
784 266 raise IndexError("an exception with index %i does not exist"%excid)
708 267 else:
785 267 else:
709 --> 268 raise RemoteError(en, ev, etb, ei)
786 --> 268 raise RemoteError(en, ev, etb, ei)
710 269
787 269
711 270
788 270
712
789
713 RemoteError: ZeroDivisionError(integer division or modulo by zero)
790 RemoteError: ZeroDivisionError(integer division or modulo by zero)
714 Traceback (most recent call last):
791 Traceback (most recent call last):
715 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
792 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
716 exec code in working,working
793 exec code in working,working
717 File "<string>", line 1, in <module>
794 File "<string>", line 1, in <module>
718 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
795 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
719 exec code in globals()
796 exec code in globals()
720 File "<string>", line 1, in <module>
797 File "<string>", line 1, in <module>
721 ZeroDivisionError: integer division or modulo by zero
798 ZeroDivisionError: integer division or modulo by zero
722
799
723 If you are working in IPython, you can simple type ``%debug`` after one of
800 If you are working in IPython, you can simple type ``%debug`` after one of
724 these :exc:`CompositeError` exceptions is raised, and inspect the exception
801 these :exc:`CompositeError` exceptions is raised, and inspect the exception
725 instance:
802 instance:
726
803
727 .. sourcecode:: ipython
804 .. sourcecode:: ipython
728
805
729 In [81]: dview.execute('1/0')
806 In [81]: dview.execute('1/0')
730 ---------------------------------------------------------------------------
807 ---------------------------------------------------------------------------
731 CompositeError Traceback (most recent call last)
808 CompositeError Traceback (most recent call last)
732 /home/user/<ipython-input-10-5d56b303a66c> in <module>()
809 /home/user/<ipython-input-10-5d56b303a66c> in <module>()
733 ----> 1 dview.execute('1/0')
810 ----> 1 dview.execute('1/0')
734
811
735 /path/to/site-packages/IPython/parallel/client/view.pyc in execute(self, code, targets, block)
812 /path/to/site-packages/IPython/parallel/client/view.pyc in execute(self, code, targets, block)
736 591 default: self.block
813 591 default: self.block
737 592 """
814 592 """
738 --> 593 return self._really_apply(util._execute, args=(code,), block=block, targets=targets)
815 --> 593 return self._really_apply(util._execute, args=(code,), block=block, targets=targets)
739 594
816 594
740 595 def run(self, filename, targets=None, block=None):
817 595 def run(self, filename, targets=None, block=None):
741
818
742 /home/user/<string> in _really_apply(self, f, args, kwargs, targets, block, track)
819 /home/user/<string> in _really_apply(self, f, args, kwargs, targets, block, track)
743
820
744 /path/to/site-packages/IPython/parallel/client/view.pyc in sync_results(f, self, *args, **kwargs)
821 /path/to/site-packages/IPython/parallel/client/view.pyc in sync_results(f, self, *args, **kwargs)
745 55 def sync_results(f, self, *args, **kwargs):
822 55 def sync_results(f, self, *args, **kwargs):
746 56 """sync relevant results from self.client to our results attribute."""
823 56 """sync relevant results from self.client to our results attribute."""
747 ---> 57 ret = f(self, *args, **kwargs)
824 ---> 57 ret = f(self, *args, **kwargs)
748 58 delta = self.outstanding.difference(self.client.outstanding)
825 58 delta = self.outstanding.difference(self.client.outstanding)
749 59 completed = self.outstanding.intersection(delta)
826 59 completed = self.outstanding.intersection(delta)
750
827
751 /home/user/<string> in _really_apply(self, f, args, kwargs, targets, block, track)
828 /home/user/<string> in _really_apply(self, f, args, kwargs, targets, block, track)
752
829
753 /path/to/site-packages/IPython/parallel/client/view.pyc in save_ids(f, self, *args, **kwargs)
830 /path/to/site-packages/IPython/parallel/client/view.pyc in save_ids(f, self, *args, **kwargs)
754 44 n_previous = len(self.client.history)
831 44 n_previous = len(self.client.history)
755 45 try:
832 45 try:
756 ---> 46 ret = f(self, *args, **kwargs)
833 ---> 46 ret = f(self, *args, **kwargs)
757 47 finally:
834 47 finally:
758 48 nmsgs = len(self.client.history) - n_previous
835 48 nmsgs = len(self.client.history) - n_previous
759
836
760 /path/to/site-packages/IPython/parallel/client/view.pyc in _really_apply(self, f, args, kwargs, targets, block, track)
837 /path/to/site-packages/IPython/parallel/client/view.pyc in _really_apply(self, f, args, kwargs, targets, block, track)
761 529 if block:
838 529 if block:
762 530 try:
839 530 try:
763 --> 531 return ar.get()
840 --> 531 return ar.get()
764 532 except KeyboardInterrupt:
841 532 except KeyboardInterrupt:
765 533 pass
842 533 pass
766
843
767 /path/to/site-packages/IPython/parallel/client/asyncresult.pyc in get(self, timeout)
844 /path/to/site-packages/IPython/parallel/client/asyncresult.pyc in get(self, timeout)
768 101 return self._result
845 101 return self._result
769 102 else:
846 102 else:
770 --> 103 raise self._exception
847 --> 103 raise self._exception
771 104 else:
848 104 else:
772 105 raise error.TimeoutError("Result not ready.")
849 105 raise error.TimeoutError("Result not ready.")
773
850
774 CompositeError: one or more exceptions from call to method: _execute
851 CompositeError: one or more exceptions from call to method: _execute
775 [0:apply]: ZeroDivisionError: integer division or modulo by zero
852 [0:apply]: ZeroDivisionError: integer division or modulo by zero
776 [1:apply]: ZeroDivisionError: integer division or modulo by zero
853 [1:apply]: ZeroDivisionError: integer division or modulo by zero
777 [2:apply]: ZeroDivisionError: integer division or modulo by zero
854 [2:apply]: ZeroDivisionError: integer division or modulo by zero
778 [3:apply]: ZeroDivisionError: integer division or modulo by zero
855 [3:apply]: ZeroDivisionError: integer division or modulo by zero
779
856
780 In [82]: %debug
857 In [82]: %debug
781 > /path/to/site-packages/IPython/parallel/client/asyncresult.py(103)get()
858 > /path/to/site-packages/IPython/parallel/client/asyncresult.py(103)get()
782 102 else:
859 102 else:
783 --> 103 raise self._exception
860 --> 103 raise self._exception
784 104 else:
861 104 else:
785
862
786 # With the debugger running, self._exception is the exceptions instance. We can tab complete
863 # With the debugger running, self._exception is the exceptions instance. We can tab complete
787 # on it and see the extra methods that are available.
864 # on it and see the extra methods that are available.
788 ipdb> self._exception.<tab>
865 ipdb> self._exception.<tab>
789 e.__class__ e.__getitem__ e.__new__ e.__setstate__ e.args
866 e.__class__ e.__getitem__ e.__new__ e.__setstate__ e.args
790 e.__delattr__ e.__getslice__ e.__reduce__ e.__str__ e.elist
867 e.__delattr__ e.__getslice__ e.__reduce__ e.__str__ e.elist
791 e.__dict__ e.__hash__ e.__reduce_ex__ e.__weakref__ e.message
868 e.__dict__ e.__hash__ e.__reduce_ex__ e.__weakref__ e.message
792 e.__doc__ e.__init__ e.__repr__ e._get_engine_str e.print_tracebacks
869 e.__doc__ e.__init__ e.__repr__ e._get_engine_str e.print_tracebacks
793 e.__getattribute__ e.__module__ e.__setattr__ e._get_traceback e.raise_exception
870 e.__getattribute__ e.__module__ e.__setattr__ e._get_traceback e.raise_exception
794 ipdb> self._exception.print_tracebacks()
871 ipdb> self._exception.print_tracebacks()
795 [0:apply]:
872 [0:apply]:
796 Traceback (most recent call last):
873 Traceback (most recent call last):
797 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
874 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
798 exec code in working,working
875 exec code in working,working
799 File "<string>", line 1, in <module>
876 File "<string>", line 1, in <module>
800 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
877 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
801 exec code in globals()
878 exec code in globals()
802 File "<string>", line 1, in <module>
879 File "<string>", line 1, in <module>
803 ZeroDivisionError: integer division or modulo by zero
880 ZeroDivisionError: integer division or modulo by zero
804
881
805
882
806 [1:apply]:
883 [1:apply]:
807 Traceback (most recent call last):
884 Traceback (most recent call last):
808 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
885 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
809 exec code in working,working
886 exec code in working,working
810 File "<string>", line 1, in <module>
887 File "<string>", line 1, in <module>
811 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
888 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
812 exec code in globals()
889 exec code in globals()
813 File "<string>", line 1, in <module>
890 File "<string>", line 1, in <module>
814 ZeroDivisionError: integer division or modulo by zero
891 ZeroDivisionError: integer division or modulo by zero
815
892
816
893
817 [2:apply]:
894 [2:apply]:
818 Traceback (most recent call last):
895 Traceback (most recent call last):
819 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
896 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
820 exec code in working,working
897 exec code in working,working
821 File "<string>", line 1, in <module>
898 File "<string>", line 1, in <module>
822 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
899 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
823 exec code in globals()
900 exec code in globals()
824 File "<string>", line 1, in <module>
901 File "<string>", line 1, in <module>
825 ZeroDivisionError: integer division or modulo by zero
902 ZeroDivisionError: integer division or modulo by zero
826
903
827
904
828 [3:apply]:
905 [3:apply]:
829 Traceback (most recent call last):
906 Traceback (most recent call last):
830 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
907 File "/path/to/site-packages/IPython/parallel/engine/streamkernel.py", line 330, in apply_request
831 exec code in working,working
908 exec code in working,working
832 File "<string>", line 1, in <module>
909 File "<string>", line 1, in <module>
833 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
910 File "/path/to/site-packages/IPython/parallel/util.py", line 354, in _execute
834 exec code in globals()
911 exec code in globals()
835 File "<string>", line 1, in <module>
912 File "<string>", line 1, in <module>
836 ZeroDivisionError: integer division or modulo by zero
913 ZeroDivisionError: integer division or modulo by zero
837
914
838
915
839 All of this same error handling magic even works in non-blocking mode:
916 All of this same error handling magic even works in non-blocking mode:
840
917
841 .. sourcecode:: ipython
918 .. sourcecode:: ipython
842
919
843 In [83]: dview.block=False
920 In [83]: dview.block=False
844
921
845 In [84]: ar = dview.execute('1/0')
922 In [84]: ar = dview.execute('1/0')
846
923
847 In [85]: ar.get()
924 In [85]: ar.get()
848 ---------------------------------------------------------------------------
925 ---------------------------------------------------------------------------
849 CompositeError Traceback (most recent call last)
926 CompositeError Traceback (most recent call last)
850 /home/user/<ipython-input-21-8531eb3d26fb> in <module>()
927 /home/user/<ipython-input-21-8531eb3d26fb> in <module>()
851 ----> 1 ar.get()
928 ----> 1 ar.get()
852
929
853 /path/to/site-packages/IPython/parallel/client/asyncresult.pyc in get(self, timeout)
930 /path/to/site-packages/IPython/parallel/client/asyncresult.pyc in get(self, timeout)
854 101 return self._result
931 101 return self._result
855 102 else:
932 102 else:
856 --> 103 raise self._exception
933 --> 103 raise self._exception
857 104 else:
934 104 else:
858 105 raise error.TimeoutError("Result not ready.")
935 105 raise error.TimeoutError("Result not ready.")
859
936
860 CompositeError: one or more exceptions from call to method: _execute
937 CompositeError: one or more exceptions from call to method: _execute
861 [0:apply]: ZeroDivisionError: integer division or modulo by zero
938 [0:apply]: ZeroDivisionError: integer division or modulo by zero
862 [1:apply]: ZeroDivisionError: integer division or modulo by zero
939 [1:apply]: ZeroDivisionError: integer division or modulo by zero
863 [2:apply]: ZeroDivisionError: integer division or modulo by zero
940 [2:apply]: ZeroDivisionError: integer division or modulo by zero
864 [3:apply]: ZeroDivisionError: integer division or modulo by zero
941 [3:apply]: ZeroDivisionError: integer division or modulo by zero
865
942
General Comments 0
You need to be logged in to leave comments. Login now