##// END OF EJS Templates
remove user_variables...
MinRK -
Show More

The requested changes are too big and content was truncated. Show full diff

1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,734 +1,721 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """Tests for the key interactiveshell module.
2 """Tests for the key interactiveshell module.
3
3
4 Historically the main classes in interactiveshell have been under-tested. This
4 Historically the main classes in interactiveshell have been under-tested. This
5 module should grow as many single-method tests as possible to trap many of the
5 module should grow as many single-method tests as possible to trap many of the
6 recurring bugs we seem to encounter with high-level interaction.
6 recurring bugs we seem to encounter with high-level interaction.
7
8 Authors
9 -------
10 * Fernando Perez
11 """
7 """
12 #-----------------------------------------------------------------------------
13 # Copyright (C) 2011 The IPython Development Team
14 #
15 # Distributed under the terms of the BSD License. The full license is in
16 # the file COPYING, distributed as part of this software.
17 #-----------------------------------------------------------------------------
18
8
19 #-----------------------------------------------------------------------------
9 # Copyright (c) IPython Development Team.
20 # Imports
10 # Distributed under the terms of the Modified BSD License.
21 #-----------------------------------------------------------------------------
11
22 # stdlib
23 import ast
12 import ast
24 import os
13 import os
25 import signal
14 import signal
26 import shutil
15 import shutil
27 import sys
16 import sys
28 import tempfile
17 import tempfile
29 import unittest
18 import unittest
30 try:
19 try:
31 from unittest import mock
20 from unittest import mock
32 except ImportError:
21 except ImportError:
33 import mock
22 import mock
34 from os.path import join
23 from os.path import join
35
24
36 # third-party
37 import nose.tools as nt
25 import nose.tools as nt
38
26
39 # Our own
40 from IPython.core.inputtransformer import InputTransformer
27 from IPython.core.inputtransformer import InputTransformer
41 from IPython.testing.decorators import skipif, skip_win32, onlyif_unicode_paths
28 from IPython.testing.decorators import skipif, skip_win32, onlyif_unicode_paths
42 from IPython.testing import tools as tt
29 from IPython.testing import tools as tt
43 from IPython.utils import io
30 from IPython.utils import io
44 from IPython.utils import py3compat
31 from IPython.utils import py3compat
45 from IPython.utils.py3compat import unicode_type, PY3
32 from IPython.utils.py3compat import unicode_type, PY3
46
33
47 if PY3:
34 if PY3:
48 from io import StringIO
35 from io import StringIO
49 else:
36 else:
50 from StringIO import StringIO
37 from StringIO import StringIO
51
38
52 #-----------------------------------------------------------------------------
39 #-----------------------------------------------------------------------------
53 # Globals
40 # Globals
54 #-----------------------------------------------------------------------------
41 #-----------------------------------------------------------------------------
55 # This is used by every single test, no point repeating it ad nauseam
42 # This is used by every single test, no point repeating it ad nauseam
56 ip = get_ipython()
43 ip = get_ipython()
57
44
58 #-----------------------------------------------------------------------------
45 #-----------------------------------------------------------------------------
59 # Tests
46 # Tests
60 #-----------------------------------------------------------------------------
47 #-----------------------------------------------------------------------------
61
48
62 class InteractiveShellTestCase(unittest.TestCase):
49 class InteractiveShellTestCase(unittest.TestCase):
63 def test_naked_string_cells(self):
50 def test_naked_string_cells(self):
64 """Test that cells with only naked strings are fully executed"""
51 """Test that cells with only naked strings are fully executed"""
65 # First, single-line inputs
52 # First, single-line inputs
66 ip.run_cell('"a"\n')
53 ip.run_cell('"a"\n')
67 self.assertEqual(ip.user_ns['_'], 'a')
54 self.assertEqual(ip.user_ns['_'], 'a')
68 # And also multi-line cells
55 # And also multi-line cells
69 ip.run_cell('"""a\nb"""\n')
56 ip.run_cell('"""a\nb"""\n')
70 self.assertEqual(ip.user_ns['_'], 'a\nb')
57 self.assertEqual(ip.user_ns['_'], 'a\nb')
71
58
72 def test_run_empty_cell(self):
59 def test_run_empty_cell(self):
73 """Just make sure we don't get a horrible error with a blank
60 """Just make sure we don't get a horrible error with a blank
74 cell of input. Yes, I did overlook that."""
61 cell of input. Yes, I did overlook that."""
75 old_xc = ip.execution_count
62 old_xc = ip.execution_count
76 ip.run_cell('')
63 ip.run_cell('')
77 self.assertEqual(ip.execution_count, old_xc)
64 self.assertEqual(ip.execution_count, old_xc)
78
65
79 def test_run_cell_multiline(self):
66 def test_run_cell_multiline(self):
80 """Multi-block, multi-line cells must execute correctly.
67 """Multi-block, multi-line cells must execute correctly.
81 """
68 """
82 src = '\n'.join(["x=1",
69 src = '\n'.join(["x=1",
83 "y=2",
70 "y=2",
84 "if 1:",
71 "if 1:",
85 " x += 1",
72 " x += 1",
86 " y += 1",])
73 " y += 1",])
87 ip.run_cell(src)
74 ip.run_cell(src)
88 self.assertEqual(ip.user_ns['x'], 2)
75 self.assertEqual(ip.user_ns['x'], 2)
89 self.assertEqual(ip.user_ns['y'], 3)
76 self.assertEqual(ip.user_ns['y'], 3)
90
77
91 def test_multiline_string_cells(self):
78 def test_multiline_string_cells(self):
92 "Code sprinkled with multiline strings should execute (GH-306)"
79 "Code sprinkled with multiline strings should execute (GH-306)"
93 ip.run_cell('tmp=0')
80 ip.run_cell('tmp=0')
94 self.assertEqual(ip.user_ns['tmp'], 0)
81 self.assertEqual(ip.user_ns['tmp'], 0)
95 ip.run_cell('tmp=1;"""a\nb"""\n')
82 ip.run_cell('tmp=1;"""a\nb"""\n')
96 self.assertEqual(ip.user_ns['tmp'], 1)
83 self.assertEqual(ip.user_ns['tmp'], 1)
97
84
98 def test_dont_cache_with_semicolon(self):
85 def test_dont_cache_with_semicolon(self):
99 "Ending a line with semicolon should not cache the returned object (GH-307)"
86 "Ending a line with semicolon should not cache the returned object (GH-307)"
100 oldlen = len(ip.user_ns['Out'])
87 oldlen = len(ip.user_ns['Out'])
101 for cell in ['1;', '1;1;']:
88 for cell in ['1;', '1;1;']:
102 ip.run_cell(cell, store_history=True)
89 ip.run_cell(cell, store_history=True)
103 newlen = len(ip.user_ns['Out'])
90 newlen = len(ip.user_ns['Out'])
104 self.assertEqual(oldlen, newlen)
91 self.assertEqual(oldlen, newlen)
105 i = 0
92 i = 0
106 #also test the default caching behavior
93 #also test the default caching behavior
107 for cell in ['1', '1;1']:
94 for cell in ['1', '1;1']:
108 ip.run_cell(cell, store_history=True)
95 ip.run_cell(cell, store_history=True)
109 newlen = len(ip.user_ns['Out'])
96 newlen = len(ip.user_ns['Out'])
110 i += 1
97 i += 1
111 self.assertEqual(oldlen+i, newlen)
98 self.assertEqual(oldlen+i, newlen)
112
99
113 def test_In_variable(self):
100 def test_In_variable(self):
114 "Verify that In variable grows with user input (GH-284)"
101 "Verify that In variable grows with user input (GH-284)"
115 oldlen = len(ip.user_ns['In'])
102 oldlen = len(ip.user_ns['In'])
116 ip.run_cell('1;', store_history=True)
103 ip.run_cell('1;', store_history=True)
117 newlen = len(ip.user_ns['In'])
104 newlen = len(ip.user_ns['In'])
118 self.assertEqual(oldlen+1, newlen)
105 self.assertEqual(oldlen+1, newlen)
119 self.assertEqual(ip.user_ns['In'][-1],'1;')
106 self.assertEqual(ip.user_ns['In'][-1],'1;')
120
107
121 def test_magic_names_in_string(self):
108 def test_magic_names_in_string(self):
122 ip.run_cell('a = """\n%exit\n"""')
109 ip.run_cell('a = """\n%exit\n"""')
123 self.assertEqual(ip.user_ns['a'], '\n%exit\n')
110 self.assertEqual(ip.user_ns['a'], '\n%exit\n')
124
111
125 def test_trailing_newline(self):
112 def test_trailing_newline(self):
126 """test that running !(command) does not raise a SyntaxError"""
113 """test that running !(command) does not raise a SyntaxError"""
127 ip.run_cell('!(true)\n', False)
114 ip.run_cell('!(true)\n', False)
128 ip.run_cell('!(true)\n\n\n', False)
115 ip.run_cell('!(true)\n\n\n', False)
129
116
130 def test_gh_597(self):
117 def test_gh_597(self):
131 """Pretty-printing lists of objects with non-ascii reprs may cause
118 """Pretty-printing lists of objects with non-ascii reprs may cause
132 problems."""
119 problems."""
133 class Spam(object):
120 class Spam(object):
134 def __repr__(self):
121 def __repr__(self):
135 return "\xe9"*50
122 return "\xe9"*50
136 import IPython.core.formatters
123 import IPython.core.formatters
137 f = IPython.core.formatters.PlainTextFormatter()
124 f = IPython.core.formatters.PlainTextFormatter()
138 f([Spam(),Spam()])
125 f([Spam(),Spam()])
139
126
140
127
141 def test_future_flags(self):
128 def test_future_flags(self):
142 """Check that future flags are used for parsing code (gh-777)"""
129 """Check that future flags are used for parsing code (gh-777)"""
143 ip.run_cell('from __future__ import print_function')
130 ip.run_cell('from __future__ import print_function')
144 try:
131 try:
145 ip.run_cell('prfunc_return_val = print(1,2, sep=" ")')
132 ip.run_cell('prfunc_return_val = print(1,2, sep=" ")')
146 assert 'prfunc_return_val' in ip.user_ns
133 assert 'prfunc_return_val' in ip.user_ns
147 finally:
134 finally:
148 # Reset compiler flags so we don't mess up other tests.
135 # Reset compiler flags so we don't mess up other tests.
149 ip.compile.reset_compiler_flags()
136 ip.compile.reset_compiler_flags()
150
137
151 def test_future_unicode(self):
138 def test_future_unicode(self):
152 """Check that unicode_literals is imported from __future__ (gh #786)"""
139 """Check that unicode_literals is imported from __future__ (gh #786)"""
153 try:
140 try:
154 ip.run_cell(u'byte_str = "a"')
141 ip.run_cell(u'byte_str = "a"')
155 assert isinstance(ip.user_ns['byte_str'], str) # string literals are byte strings by default
142 assert isinstance(ip.user_ns['byte_str'], str) # string literals are byte strings by default
156 ip.run_cell('from __future__ import unicode_literals')
143 ip.run_cell('from __future__ import unicode_literals')
157 ip.run_cell(u'unicode_str = "a"')
144 ip.run_cell(u'unicode_str = "a"')
158 assert isinstance(ip.user_ns['unicode_str'], unicode_type) # strings literals are now unicode
145 assert isinstance(ip.user_ns['unicode_str'], unicode_type) # strings literals are now unicode
159 finally:
146 finally:
160 # Reset compiler flags so we don't mess up other tests.
147 # Reset compiler flags so we don't mess up other tests.
161 ip.compile.reset_compiler_flags()
148 ip.compile.reset_compiler_flags()
162
149
163 def test_can_pickle(self):
150 def test_can_pickle(self):
164 "Can we pickle objects defined interactively (GH-29)"
151 "Can we pickle objects defined interactively (GH-29)"
165 ip = get_ipython()
152 ip = get_ipython()
166 ip.reset()
153 ip.reset()
167 ip.run_cell(("class Mylist(list):\n"
154 ip.run_cell(("class Mylist(list):\n"
168 " def __init__(self,x=[]):\n"
155 " def __init__(self,x=[]):\n"
169 " list.__init__(self,x)"))
156 " list.__init__(self,x)"))
170 ip.run_cell("w=Mylist([1,2,3])")
157 ip.run_cell("w=Mylist([1,2,3])")
171
158
172 from pickle import dumps
159 from pickle import dumps
173
160
174 # We need to swap in our main module - this is only necessary
161 # We need to swap in our main module - this is only necessary
175 # inside the test framework, because IPython puts the interactive module
162 # inside the test framework, because IPython puts the interactive module
176 # in place (but the test framework undoes this).
163 # in place (but the test framework undoes this).
177 _main = sys.modules['__main__']
164 _main = sys.modules['__main__']
178 sys.modules['__main__'] = ip.user_module
165 sys.modules['__main__'] = ip.user_module
179 try:
166 try:
180 res = dumps(ip.user_ns["w"])
167 res = dumps(ip.user_ns["w"])
181 finally:
168 finally:
182 sys.modules['__main__'] = _main
169 sys.modules['__main__'] = _main
183 self.assertTrue(isinstance(res, bytes))
170 self.assertTrue(isinstance(res, bytes))
184
171
185 def test_global_ns(self):
172 def test_global_ns(self):
186 "Code in functions must be able to access variables outside them."
173 "Code in functions must be able to access variables outside them."
187 ip = get_ipython()
174 ip = get_ipython()
188 ip.run_cell("a = 10")
175 ip.run_cell("a = 10")
189 ip.run_cell(("def f(x):\n"
176 ip.run_cell(("def f(x):\n"
190 " return x + a"))
177 " return x + a"))
191 ip.run_cell("b = f(12)")
178 ip.run_cell("b = f(12)")
192 self.assertEqual(ip.user_ns["b"], 22)
179 self.assertEqual(ip.user_ns["b"], 22)
193
180
194 def test_bad_custom_tb(self):
181 def test_bad_custom_tb(self):
195 """Check that InteractiveShell is protected from bad custom exception handlers"""
182 """Check that InteractiveShell is protected from bad custom exception handlers"""
196 from IPython.utils import io
183 from IPython.utils import io
197 save_stderr = io.stderr
184 save_stderr = io.stderr
198 try:
185 try:
199 # capture stderr
186 # capture stderr
200 io.stderr = StringIO()
187 io.stderr = StringIO()
201 ip.set_custom_exc((IOError,), lambda etype,value,tb: 1/0)
188 ip.set_custom_exc((IOError,), lambda etype,value,tb: 1/0)
202 self.assertEqual(ip.custom_exceptions, (IOError,))
189 self.assertEqual(ip.custom_exceptions, (IOError,))
203 ip.run_cell(u'raise IOError("foo")')
190 ip.run_cell(u'raise IOError("foo")')
204 self.assertEqual(ip.custom_exceptions, ())
191 self.assertEqual(ip.custom_exceptions, ())
205 self.assertTrue("Custom TB Handler failed" in io.stderr.getvalue())
192 self.assertTrue("Custom TB Handler failed" in io.stderr.getvalue())
206 finally:
193 finally:
207 io.stderr = save_stderr
194 io.stderr = save_stderr
208
195
209 def test_bad_custom_tb_return(self):
196 def test_bad_custom_tb_return(self):
210 """Check that InteractiveShell is protected from bad return types in custom exception handlers"""
197 """Check that InteractiveShell is protected from bad return types in custom exception handlers"""
211 from IPython.utils import io
198 from IPython.utils import io
212 save_stderr = io.stderr
199 save_stderr = io.stderr
213 try:
200 try:
214 # capture stderr
201 # capture stderr
215 io.stderr = StringIO()
202 io.stderr = StringIO()
216 ip.set_custom_exc((NameError,),lambda etype,value,tb, tb_offset=None: 1)
203 ip.set_custom_exc((NameError,),lambda etype,value,tb, tb_offset=None: 1)
217 self.assertEqual(ip.custom_exceptions, (NameError,))
204 self.assertEqual(ip.custom_exceptions, (NameError,))
218 ip.run_cell(u'a=abracadabra')
205 ip.run_cell(u'a=abracadabra')
219 self.assertEqual(ip.custom_exceptions, ())
206 self.assertEqual(ip.custom_exceptions, ())
220 self.assertTrue("Custom TB Handler failed" in io.stderr.getvalue())
207 self.assertTrue("Custom TB Handler failed" in io.stderr.getvalue())
221 finally:
208 finally:
222 io.stderr = save_stderr
209 io.stderr = save_stderr
223
210
224 def test_drop_by_id(self):
211 def test_drop_by_id(self):
225 myvars = {"a":object(), "b":object(), "c": object()}
212 myvars = {"a":object(), "b":object(), "c": object()}
226 ip.push(myvars, interactive=False)
213 ip.push(myvars, interactive=False)
227 for name in myvars:
214 for name in myvars:
228 assert name in ip.user_ns, name
215 assert name in ip.user_ns, name
229 assert name in ip.user_ns_hidden, name
216 assert name in ip.user_ns_hidden, name
230 ip.user_ns['b'] = 12
217 ip.user_ns['b'] = 12
231 ip.drop_by_id(myvars)
218 ip.drop_by_id(myvars)
232 for name in ["a", "c"]:
219 for name in ["a", "c"]:
233 assert name not in ip.user_ns, name
220 assert name not in ip.user_ns, name
234 assert name not in ip.user_ns_hidden, name
221 assert name not in ip.user_ns_hidden, name
235 assert ip.user_ns['b'] == 12
222 assert ip.user_ns['b'] == 12
236 ip.reset()
223 ip.reset()
237
224
238 def test_var_expand(self):
225 def test_var_expand(self):
239 ip.user_ns['f'] = u'Ca\xf1o'
226 ip.user_ns['f'] = u'Ca\xf1o'
240 self.assertEqual(ip.var_expand(u'echo $f'), u'echo Ca\xf1o')
227 self.assertEqual(ip.var_expand(u'echo $f'), u'echo Ca\xf1o')
241 self.assertEqual(ip.var_expand(u'echo {f}'), u'echo Ca\xf1o')
228 self.assertEqual(ip.var_expand(u'echo {f}'), u'echo Ca\xf1o')
242 self.assertEqual(ip.var_expand(u'echo {f[:-1]}'), u'echo Ca\xf1')
229 self.assertEqual(ip.var_expand(u'echo {f[:-1]}'), u'echo Ca\xf1')
243 self.assertEqual(ip.var_expand(u'echo {1*2}'), u'echo 2')
230 self.assertEqual(ip.var_expand(u'echo {1*2}'), u'echo 2')
244
231
245 ip.user_ns['f'] = b'Ca\xc3\xb1o'
232 ip.user_ns['f'] = b'Ca\xc3\xb1o'
246 # This should not raise any exception:
233 # This should not raise any exception:
247 ip.var_expand(u'echo $f')
234 ip.var_expand(u'echo $f')
248
235
249 def test_var_expand_local(self):
236 def test_var_expand_local(self):
250 """Test local variable expansion in !system and %magic calls"""
237 """Test local variable expansion in !system and %magic calls"""
251 # !system
238 # !system
252 ip.run_cell('def test():\n'
239 ip.run_cell('def test():\n'
253 ' lvar = "ttt"\n'
240 ' lvar = "ttt"\n'
254 ' ret = !echo {lvar}\n'
241 ' ret = !echo {lvar}\n'
255 ' return ret[0]\n')
242 ' return ret[0]\n')
256 res = ip.user_ns['test']()
243 res = ip.user_ns['test']()
257 nt.assert_in('ttt', res)
244 nt.assert_in('ttt', res)
258
245
259 # %magic
246 # %magic
260 ip.run_cell('def makemacro():\n'
247 ip.run_cell('def makemacro():\n'
261 ' macroname = "macro_var_expand_locals"\n'
248 ' macroname = "macro_var_expand_locals"\n'
262 ' %macro {macroname} codestr\n')
249 ' %macro {macroname} codestr\n')
263 ip.user_ns['codestr'] = "str(12)"
250 ip.user_ns['codestr'] = "str(12)"
264 ip.run_cell('makemacro()')
251 ip.run_cell('makemacro()')
265 nt.assert_in('macro_var_expand_locals', ip.user_ns)
252 nt.assert_in('macro_var_expand_locals', ip.user_ns)
266
253
267 def test_var_expand_self(self):
254 def test_var_expand_self(self):
268 """Test variable expansion with the name 'self', which was failing.
255 """Test variable expansion with the name 'self', which was failing.
269
256
270 See https://github.com/ipython/ipython/issues/1878#issuecomment-7698218
257 See https://github.com/ipython/ipython/issues/1878#issuecomment-7698218
271 """
258 """
272 ip.run_cell('class cTest:\n'
259 ip.run_cell('class cTest:\n'
273 ' classvar="see me"\n'
260 ' classvar="see me"\n'
274 ' def test(self):\n'
261 ' def test(self):\n'
275 ' res = !echo Variable: {self.classvar}\n'
262 ' res = !echo Variable: {self.classvar}\n'
276 ' return res[0]\n')
263 ' return res[0]\n')
277 nt.assert_in('see me', ip.user_ns['cTest']().test())
264 nt.assert_in('see me', ip.user_ns['cTest']().test())
278
265
279 def test_bad_var_expand(self):
266 def test_bad_var_expand(self):
280 """var_expand on invalid formats shouldn't raise"""
267 """var_expand on invalid formats shouldn't raise"""
281 # SyntaxError
268 # SyntaxError
282 self.assertEqual(ip.var_expand(u"{'a':5}"), u"{'a':5}")
269 self.assertEqual(ip.var_expand(u"{'a':5}"), u"{'a':5}")
283 # NameError
270 # NameError
284 self.assertEqual(ip.var_expand(u"{asdf}"), u"{asdf}")
271 self.assertEqual(ip.var_expand(u"{asdf}"), u"{asdf}")
285 # ZeroDivisionError
272 # ZeroDivisionError
286 self.assertEqual(ip.var_expand(u"{1/0}"), u"{1/0}")
273 self.assertEqual(ip.var_expand(u"{1/0}"), u"{1/0}")
287
274
288 def test_silent_postexec(self):
275 def test_silent_postexec(self):
289 """run_cell(silent=True) doesn't invoke pre/post_run_cell callbacks"""
276 """run_cell(silent=True) doesn't invoke pre/post_run_cell callbacks"""
290 pre_explicit = mock.Mock()
277 pre_explicit = mock.Mock()
291 pre_always = mock.Mock()
278 pre_always = mock.Mock()
292 post_explicit = mock.Mock()
279 post_explicit = mock.Mock()
293 post_always = mock.Mock()
280 post_always = mock.Mock()
294
281
295 ip.events.register('pre_run_cell', pre_explicit)
282 ip.events.register('pre_run_cell', pre_explicit)
296 ip.events.register('pre_execute', pre_always)
283 ip.events.register('pre_execute', pre_always)
297 ip.events.register('post_run_cell', post_explicit)
284 ip.events.register('post_run_cell', post_explicit)
298 ip.events.register('post_execute', post_always)
285 ip.events.register('post_execute', post_always)
299
286
300 try:
287 try:
301 ip.run_cell("1", silent=True)
288 ip.run_cell("1", silent=True)
302 assert pre_always.called
289 assert pre_always.called
303 assert not pre_explicit.called
290 assert not pre_explicit.called
304 assert post_always.called
291 assert post_always.called
305 assert not post_explicit.called
292 assert not post_explicit.called
306 # double-check that non-silent exec did what we expected
293 # double-check that non-silent exec did what we expected
307 # silent to avoid
294 # silent to avoid
308 ip.run_cell("1")
295 ip.run_cell("1")
309 assert pre_explicit.called
296 assert pre_explicit.called
310 assert post_explicit.called
297 assert post_explicit.called
311 finally:
298 finally:
312 # remove post-exec
299 # remove post-exec
313 ip.events.reset_all()
300 ip.events.reset_all()
314
301
315 def test_silent_noadvance(self):
302 def test_silent_noadvance(self):
316 """run_cell(silent=True) doesn't advance execution_count"""
303 """run_cell(silent=True) doesn't advance execution_count"""
317 ec = ip.execution_count
304 ec = ip.execution_count
318 # silent should force store_history=False
305 # silent should force store_history=False
319 ip.run_cell("1", store_history=True, silent=True)
306 ip.run_cell("1", store_history=True, silent=True)
320
307
321 self.assertEqual(ec, ip.execution_count)
308 self.assertEqual(ec, ip.execution_count)
322 # double-check that non-silent exec did what we expected
309 # double-check that non-silent exec did what we expected
323 # silent to avoid
310 # silent to avoid
324 ip.run_cell("1", store_history=True)
311 ip.run_cell("1", store_history=True)
325 self.assertEqual(ec+1, ip.execution_count)
312 self.assertEqual(ec+1, ip.execution_count)
326
313
327 def test_silent_nodisplayhook(self):
314 def test_silent_nodisplayhook(self):
328 """run_cell(silent=True) doesn't trigger displayhook"""
315 """run_cell(silent=True) doesn't trigger displayhook"""
329 d = dict(called=False)
316 d = dict(called=False)
330
317
331 trap = ip.display_trap
318 trap = ip.display_trap
332 save_hook = trap.hook
319 save_hook = trap.hook
333
320
334 def failing_hook(*args, **kwargs):
321 def failing_hook(*args, **kwargs):
335 d['called'] = True
322 d['called'] = True
336
323
337 try:
324 try:
338 trap.hook = failing_hook
325 trap.hook = failing_hook
339 ip.run_cell("1", silent=True)
326 ip.run_cell("1", silent=True)
340 self.assertFalse(d['called'])
327 self.assertFalse(d['called'])
341 # double-check that non-silent exec did what we expected
328 # double-check that non-silent exec did what we expected
342 # silent to avoid
329 # silent to avoid
343 ip.run_cell("1")
330 ip.run_cell("1")
344 self.assertTrue(d['called'])
331 self.assertTrue(d['called'])
345 finally:
332 finally:
346 trap.hook = save_hook
333 trap.hook = save_hook
347
334
348 @skipif(sys.version_info[0] >= 3, "softspace removed in py3")
335 @skipif(sys.version_info[0] >= 3, "softspace removed in py3")
349 def test_print_softspace(self):
336 def test_print_softspace(self):
350 """Verify that softspace is handled correctly when executing multiple
337 """Verify that softspace is handled correctly when executing multiple
351 statements.
338 statements.
352
339
353 In [1]: print 1; print 2
340 In [1]: print 1; print 2
354 1
341 1
355 2
342 2
356
343
357 In [2]: print 1,; print 2
344 In [2]: print 1,; print 2
358 1 2
345 1 2
359 """
346 """
360
347
361 def test_ofind_line_magic(self):
348 def test_ofind_line_magic(self):
362 from IPython.core.magic import register_line_magic
349 from IPython.core.magic import register_line_magic
363
350
364 @register_line_magic
351 @register_line_magic
365 def lmagic(line):
352 def lmagic(line):
366 "A line magic"
353 "A line magic"
367
354
368 # Get info on line magic
355 # Get info on line magic
369 lfind = ip._ofind('lmagic')
356 lfind = ip._ofind('lmagic')
370 info = dict(found=True, isalias=False, ismagic=True,
357 info = dict(found=True, isalias=False, ismagic=True,
371 namespace = 'IPython internal', obj= lmagic.__wrapped__,
358 namespace = 'IPython internal', obj= lmagic.__wrapped__,
372 parent = None)
359 parent = None)
373 nt.assert_equal(lfind, info)
360 nt.assert_equal(lfind, info)
374
361
375 def test_ofind_cell_magic(self):
362 def test_ofind_cell_magic(self):
376 from IPython.core.magic import register_cell_magic
363 from IPython.core.magic import register_cell_magic
377
364
378 @register_cell_magic
365 @register_cell_magic
379 def cmagic(line, cell):
366 def cmagic(line, cell):
380 "A cell magic"
367 "A cell magic"
381
368
382 # Get info on cell magic
369 # Get info on cell magic
383 find = ip._ofind('cmagic')
370 find = ip._ofind('cmagic')
384 info = dict(found=True, isalias=False, ismagic=True,
371 info = dict(found=True, isalias=False, ismagic=True,
385 namespace = 'IPython internal', obj= cmagic.__wrapped__,
372 namespace = 'IPython internal', obj= cmagic.__wrapped__,
386 parent = None)
373 parent = None)
387 nt.assert_equal(find, info)
374 nt.assert_equal(find, info)
388
375
389 def test_custom_exception(self):
376 def test_custom_exception(self):
390 called = []
377 called = []
391 def my_handler(shell, etype, value, tb, tb_offset=None):
378 def my_handler(shell, etype, value, tb, tb_offset=None):
392 called.append(etype)
379 called.append(etype)
393 shell.showtraceback((etype, value, tb), tb_offset=tb_offset)
380 shell.showtraceback((etype, value, tb), tb_offset=tb_offset)
394
381
395 ip.set_custom_exc((ValueError,), my_handler)
382 ip.set_custom_exc((ValueError,), my_handler)
396 try:
383 try:
397 ip.run_cell("raise ValueError('test')")
384 ip.run_cell("raise ValueError('test')")
398 # Check that this was called, and only once.
385 # Check that this was called, and only once.
399 self.assertEqual(called, [ValueError])
386 self.assertEqual(called, [ValueError])
400 finally:
387 finally:
401 # Reset the custom exception hook
388 # Reset the custom exception hook
402 ip.set_custom_exc((), None)
389 ip.set_custom_exc((), None)
403
390
404 @skipif(sys.version_info[0] >= 3, "no differences with __future__ in py3")
391 @skipif(sys.version_info[0] >= 3, "no differences with __future__ in py3")
405 def test_future_environment(self):
392 def test_future_environment(self):
406 "Can we run code with & without the shell's __future__ imports?"
393 "Can we run code with & without the shell's __future__ imports?"
407 ip.run_cell("from __future__ import division")
394 ip.run_cell("from __future__ import division")
408 ip.run_cell("a = 1/2", shell_futures=True)
395 ip.run_cell("a = 1/2", shell_futures=True)
409 self.assertEqual(ip.user_ns['a'], 0.5)
396 self.assertEqual(ip.user_ns['a'], 0.5)
410 ip.run_cell("b = 1/2", shell_futures=False)
397 ip.run_cell("b = 1/2", shell_futures=False)
411 self.assertEqual(ip.user_ns['b'], 0)
398 self.assertEqual(ip.user_ns['b'], 0)
412
399
413 ip.compile.reset_compiler_flags()
400 ip.compile.reset_compiler_flags()
414 # This shouldn't leak to the shell's compiler
401 # This shouldn't leak to the shell's compiler
415 ip.run_cell("from __future__ import division \nc=1/2", shell_futures=False)
402 ip.run_cell("from __future__ import division \nc=1/2", shell_futures=False)
416 self.assertEqual(ip.user_ns['c'], 0.5)
403 self.assertEqual(ip.user_ns['c'], 0.5)
417 ip.run_cell("d = 1/2", shell_futures=True)
404 ip.run_cell("d = 1/2", shell_futures=True)
418 self.assertEqual(ip.user_ns['d'], 0)
405 self.assertEqual(ip.user_ns['d'], 0)
419
406
420
407
421 class TestSafeExecfileNonAsciiPath(unittest.TestCase):
408 class TestSafeExecfileNonAsciiPath(unittest.TestCase):
422
409
423 @onlyif_unicode_paths
410 @onlyif_unicode_paths
424 def setUp(self):
411 def setUp(self):
425 self.BASETESTDIR = tempfile.mkdtemp()
412 self.BASETESTDIR = tempfile.mkdtemp()
426 self.TESTDIR = join(self.BASETESTDIR, u"åäö")
413 self.TESTDIR = join(self.BASETESTDIR, u"åäö")
427 os.mkdir(self.TESTDIR)
414 os.mkdir(self.TESTDIR)
428 with open(join(self.TESTDIR, u"åäötestscript.py"), "w") as sfile:
415 with open(join(self.TESTDIR, u"åäötestscript.py"), "w") as sfile:
429 sfile.write("pass\n")
416 sfile.write("pass\n")
430 self.oldpath = py3compat.getcwd()
417 self.oldpath = py3compat.getcwd()
431 os.chdir(self.TESTDIR)
418 os.chdir(self.TESTDIR)
432 self.fname = u"åäötestscript.py"
419 self.fname = u"åäötestscript.py"
433
420
434 def tearDown(self):
421 def tearDown(self):
435 os.chdir(self.oldpath)
422 os.chdir(self.oldpath)
436 shutil.rmtree(self.BASETESTDIR)
423 shutil.rmtree(self.BASETESTDIR)
437
424
438 @onlyif_unicode_paths
425 @onlyif_unicode_paths
439 def test_1(self):
426 def test_1(self):
440 """Test safe_execfile with non-ascii path
427 """Test safe_execfile with non-ascii path
441 """
428 """
442 ip.safe_execfile(self.fname, {}, raise_exceptions=True)
429 ip.safe_execfile(self.fname, {}, raise_exceptions=True)
443
430
444 class ExitCodeChecks(tt.TempFileMixin):
431 class ExitCodeChecks(tt.TempFileMixin):
445 def test_exit_code_ok(self):
432 def test_exit_code_ok(self):
446 self.system('exit 0')
433 self.system('exit 0')
447 self.assertEqual(ip.user_ns['_exit_code'], 0)
434 self.assertEqual(ip.user_ns['_exit_code'], 0)
448
435
449 def test_exit_code_error(self):
436 def test_exit_code_error(self):
450 self.system('exit 1')
437 self.system('exit 1')
451 self.assertEqual(ip.user_ns['_exit_code'], 1)
438 self.assertEqual(ip.user_ns['_exit_code'], 1)
452
439
453 @skipif(not hasattr(signal, 'SIGALRM'))
440 @skipif(not hasattr(signal, 'SIGALRM'))
454 def test_exit_code_signal(self):
441 def test_exit_code_signal(self):
455 self.mktmp("import signal, time\n"
442 self.mktmp("import signal, time\n"
456 "signal.setitimer(signal.ITIMER_REAL, 0.1)\n"
443 "signal.setitimer(signal.ITIMER_REAL, 0.1)\n"
457 "time.sleep(1)\n")
444 "time.sleep(1)\n")
458 self.system("%s %s" % (sys.executable, self.fname))
445 self.system("%s %s" % (sys.executable, self.fname))
459 self.assertEqual(ip.user_ns['_exit_code'], -signal.SIGALRM)
446 self.assertEqual(ip.user_ns['_exit_code'], -signal.SIGALRM)
460
447
461 class TestSystemRaw(unittest.TestCase, ExitCodeChecks):
448 class TestSystemRaw(unittest.TestCase, ExitCodeChecks):
462 system = ip.system_raw
449 system = ip.system_raw
463
450
464 @onlyif_unicode_paths
451 @onlyif_unicode_paths
465 def test_1(self):
452 def test_1(self):
466 """Test system_raw with non-ascii cmd
453 """Test system_raw with non-ascii cmd
467 """
454 """
468 cmd = u'''python -c "'åäö'" '''
455 cmd = u'''python -c "'åäö'" '''
469 ip.system_raw(cmd)
456 ip.system_raw(cmd)
470
457
471 # TODO: Exit codes are currently ignored on Windows.
458 # TODO: Exit codes are currently ignored on Windows.
472 class TestSystemPipedExitCode(unittest.TestCase, ExitCodeChecks):
459 class TestSystemPipedExitCode(unittest.TestCase, ExitCodeChecks):
473 system = ip.system_piped
460 system = ip.system_piped
474
461
475 @skip_win32
462 @skip_win32
476 def test_exit_code_ok(self):
463 def test_exit_code_ok(self):
477 ExitCodeChecks.test_exit_code_ok(self)
464 ExitCodeChecks.test_exit_code_ok(self)
478
465
479 @skip_win32
466 @skip_win32
480 def test_exit_code_error(self):
467 def test_exit_code_error(self):
481 ExitCodeChecks.test_exit_code_error(self)
468 ExitCodeChecks.test_exit_code_error(self)
482
469
483 @skip_win32
470 @skip_win32
484 def test_exit_code_signal(self):
471 def test_exit_code_signal(self):
485 ExitCodeChecks.test_exit_code_signal(self)
472 ExitCodeChecks.test_exit_code_signal(self)
486
473
487 class TestModules(unittest.TestCase, tt.TempFileMixin):
474 class TestModules(unittest.TestCase, tt.TempFileMixin):
488 def test_extraneous_loads(self):
475 def test_extraneous_loads(self):
489 """Test we're not loading modules on startup that we shouldn't.
476 """Test we're not loading modules on startup that we shouldn't.
490 """
477 """
491 self.mktmp("import sys\n"
478 self.mktmp("import sys\n"
492 "print('numpy' in sys.modules)\n"
479 "print('numpy' in sys.modules)\n"
493 "print('IPython.parallel' in sys.modules)\n"
480 "print('IPython.parallel' in sys.modules)\n"
494 "print('IPython.kernel.zmq' in sys.modules)\n"
481 "print('IPython.kernel.zmq' in sys.modules)\n"
495 )
482 )
496 out = "False\nFalse\nFalse\n"
483 out = "False\nFalse\nFalse\n"
497 tt.ipexec_validate(self.fname, out)
484 tt.ipexec_validate(self.fname, out)
498
485
499 class Negator(ast.NodeTransformer):
486 class Negator(ast.NodeTransformer):
500 """Negates all number literals in an AST."""
487 """Negates all number literals in an AST."""
501 def visit_Num(self, node):
488 def visit_Num(self, node):
502 node.n = -node.n
489 node.n = -node.n
503 return node
490 return node
504
491
505 class TestAstTransform(unittest.TestCase):
492 class TestAstTransform(unittest.TestCase):
506 def setUp(self):
493 def setUp(self):
507 self.negator = Negator()
494 self.negator = Negator()
508 ip.ast_transformers.append(self.negator)
495 ip.ast_transformers.append(self.negator)
509
496
510 def tearDown(self):
497 def tearDown(self):
511 ip.ast_transformers.remove(self.negator)
498 ip.ast_transformers.remove(self.negator)
512
499
513 def test_run_cell(self):
500 def test_run_cell(self):
514 with tt.AssertPrints('-34'):
501 with tt.AssertPrints('-34'):
515 ip.run_cell('print (12 + 22)')
502 ip.run_cell('print (12 + 22)')
516
503
517 # A named reference to a number shouldn't be transformed.
504 # A named reference to a number shouldn't be transformed.
518 ip.user_ns['n'] = 55
505 ip.user_ns['n'] = 55
519 with tt.AssertNotPrints('-55'):
506 with tt.AssertNotPrints('-55'):
520 ip.run_cell('print (n)')
507 ip.run_cell('print (n)')
521
508
522 def test_timeit(self):
509 def test_timeit(self):
523 called = set()
510 called = set()
524 def f(x):
511 def f(x):
525 called.add(x)
512 called.add(x)
526 ip.push({'f':f})
513 ip.push({'f':f})
527
514
528 with tt.AssertPrints("best of "):
515 with tt.AssertPrints("best of "):
529 ip.run_line_magic("timeit", "-n1 f(1)")
516 ip.run_line_magic("timeit", "-n1 f(1)")
530 self.assertEqual(called, set([-1]))
517 self.assertEqual(called, set([-1]))
531 called.clear()
518 called.clear()
532
519
533 with tt.AssertPrints("best of "):
520 with tt.AssertPrints("best of "):
534 ip.run_cell_magic("timeit", "-n1 f(2)", "f(3)")
521 ip.run_cell_magic("timeit", "-n1 f(2)", "f(3)")
535 self.assertEqual(called, set([-2, -3]))
522 self.assertEqual(called, set([-2, -3]))
536
523
537 def test_time(self):
524 def test_time(self):
538 called = []
525 called = []
539 def f(x):
526 def f(x):
540 called.append(x)
527 called.append(x)
541 ip.push({'f':f})
528 ip.push({'f':f})
542
529
543 # Test with an expression
530 # Test with an expression
544 with tt.AssertPrints("Wall time: "):
531 with tt.AssertPrints("Wall time: "):
545 ip.run_line_magic("time", "f(5+9)")
532 ip.run_line_magic("time", "f(5+9)")
546 self.assertEqual(called, [-14])
533 self.assertEqual(called, [-14])
547 called[:] = []
534 called[:] = []
548
535
549 # Test with a statement (different code path)
536 # Test with a statement (different code path)
550 with tt.AssertPrints("Wall time: "):
537 with tt.AssertPrints("Wall time: "):
551 ip.run_line_magic("time", "a = f(-3 + -2)")
538 ip.run_line_magic("time", "a = f(-3 + -2)")
552 self.assertEqual(called, [5])
539 self.assertEqual(called, [5])
553
540
554 def test_macro(self):
541 def test_macro(self):
555 ip.push({'a':10})
542 ip.push({'a':10})
556 # The AST transformation makes this do a+=-1
543 # The AST transformation makes this do a+=-1
557 ip.define_macro("amacro", "a+=1\nprint(a)")
544 ip.define_macro("amacro", "a+=1\nprint(a)")
558
545
559 with tt.AssertPrints("9"):
546 with tt.AssertPrints("9"):
560 ip.run_cell("amacro")
547 ip.run_cell("amacro")
561 with tt.AssertPrints("8"):
548 with tt.AssertPrints("8"):
562 ip.run_cell("amacro")
549 ip.run_cell("amacro")
563
550
564 class IntegerWrapper(ast.NodeTransformer):
551 class IntegerWrapper(ast.NodeTransformer):
565 """Wraps all integers in a call to Integer()"""
552 """Wraps all integers in a call to Integer()"""
566 def visit_Num(self, node):
553 def visit_Num(self, node):
567 if isinstance(node.n, int):
554 if isinstance(node.n, int):
568 return ast.Call(func=ast.Name(id='Integer', ctx=ast.Load()),
555 return ast.Call(func=ast.Name(id='Integer', ctx=ast.Load()),
569 args=[node], keywords=[])
556 args=[node], keywords=[])
570 return node
557 return node
571
558
572 class TestAstTransform2(unittest.TestCase):
559 class TestAstTransform2(unittest.TestCase):
573 def setUp(self):
560 def setUp(self):
574 self.intwrapper = IntegerWrapper()
561 self.intwrapper = IntegerWrapper()
575 ip.ast_transformers.append(self.intwrapper)
562 ip.ast_transformers.append(self.intwrapper)
576
563
577 self.calls = []
564 self.calls = []
578 def Integer(*args):
565 def Integer(*args):
579 self.calls.append(args)
566 self.calls.append(args)
580 return args
567 return args
581 ip.push({"Integer": Integer})
568 ip.push({"Integer": Integer})
582
569
583 def tearDown(self):
570 def tearDown(self):
584 ip.ast_transformers.remove(self.intwrapper)
571 ip.ast_transformers.remove(self.intwrapper)
585 del ip.user_ns['Integer']
572 del ip.user_ns['Integer']
586
573
587 def test_run_cell(self):
574 def test_run_cell(self):
588 ip.run_cell("n = 2")
575 ip.run_cell("n = 2")
589 self.assertEqual(self.calls, [(2,)])
576 self.assertEqual(self.calls, [(2,)])
590
577
591 # This shouldn't throw an error
578 # This shouldn't throw an error
592 ip.run_cell("o = 2.0")
579 ip.run_cell("o = 2.0")
593 self.assertEqual(ip.user_ns['o'], 2.0)
580 self.assertEqual(ip.user_ns['o'], 2.0)
594
581
595 def test_timeit(self):
582 def test_timeit(self):
596 called = set()
583 called = set()
597 def f(x):
584 def f(x):
598 called.add(x)
585 called.add(x)
599 ip.push({'f':f})
586 ip.push({'f':f})
600
587
601 with tt.AssertPrints("best of "):
588 with tt.AssertPrints("best of "):
602 ip.run_line_magic("timeit", "-n1 f(1)")
589 ip.run_line_magic("timeit", "-n1 f(1)")
603 self.assertEqual(called, set([(1,)]))
590 self.assertEqual(called, set([(1,)]))
604 called.clear()
591 called.clear()
605
592
606 with tt.AssertPrints("best of "):
593 with tt.AssertPrints("best of "):
607 ip.run_cell_magic("timeit", "-n1 f(2)", "f(3)")
594 ip.run_cell_magic("timeit", "-n1 f(2)", "f(3)")
608 self.assertEqual(called, set([(2,), (3,)]))
595 self.assertEqual(called, set([(2,), (3,)]))
609
596
610 class ErrorTransformer(ast.NodeTransformer):
597 class ErrorTransformer(ast.NodeTransformer):
611 """Throws an error when it sees a number."""
598 """Throws an error when it sees a number."""
612 def visit_Num(self):
599 def visit_Num(self):
613 raise ValueError("test")
600 raise ValueError("test")
614
601
615 class TestAstTransformError(unittest.TestCase):
602 class TestAstTransformError(unittest.TestCase):
616 def test_unregistering(self):
603 def test_unregistering(self):
617 err_transformer = ErrorTransformer()
604 err_transformer = ErrorTransformer()
618 ip.ast_transformers.append(err_transformer)
605 ip.ast_transformers.append(err_transformer)
619
606
620 with tt.AssertPrints("unregister", channel='stderr'):
607 with tt.AssertPrints("unregister", channel='stderr'):
621 ip.run_cell("1 + 2")
608 ip.run_cell("1 + 2")
622
609
623 # This should have been removed.
610 # This should have been removed.
624 nt.assert_not_in(err_transformer, ip.ast_transformers)
611 nt.assert_not_in(err_transformer, ip.ast_transformers)
625
612
626 def test__IPYTHON__():
613 def test__IPYTHON__():
627 # This shouldn't raise a NameError, that's all
614 # This shouldn't raise a NameError, that's all
628 __IPYTHON__
615 __IPYTHON__
629
616
630
617
631 class DummyRepr(object):
618 class DummyRepr(object):
632 def __repr__(self):
619 def __repr__(self):
633 return "DummyRepr"
620 return "DummyRepr"
634
621
635 def _repr_html_(self):
622 def _repr_html_(self):
636 return "<b>dummy</b>"
623 return "<b>dummy</b>"
637
624
638 def _repr_javascript_(self):
625 def _repr_javascript_(self):
639 return "console.log('hi');", {'key': 'value'}
626 return "console.log('hi');", {'key': 'value'}
640
627
641
628
642 def test_user_variables():
629 def test_user_variables():
643 # enable all formatters
630 # enable all formatters
644 ip.display_formatter.active_types = ip.display_formatter.format_types
631 ip.display_formatter.active_types = ip.display_formatter.format_types
645
632
646 ip.user_ns['dummy'] = d = DummyRepr()
633 ip.user_ns['dummy'] = d = DummyRepr()
647 keys = set(['dummy', 'doesnotexist'])
634 keys = set(['dummy', 'doesnotexist'])
648 r = ip.user_variables(keys)
635 r = ip.user_expressions({ key:key for key in keys})
649
636
650 nt.assert_equal(keys, set(r.keys()))
637 nt.assert_equal(keys, set(r.keys()))
651 dummy = r['dummy']
638 dummy = r['dummy']
652 nt.assert_equal(set(['status', 'data', 'metadata']), set(dummy.keys()))
639 nt.assert_equal(set(['status', 'data', 'metadata']), set(dummy.keys()))
653 nt.assert_equal(dummy['status'], 'ok')
640 nt.assert_equal(dummy['status'], 'ok')
654 data = dummy['data']
641 data = dummy['data']
655 metadata = dummy['metadata']
642 metadata = dummy['metadata']
656 nt.assert_equal(data.get('text/html'), d._repr_html_())
643 nt.assert_equal(data.get('text/html'), d._repr_html_())
657 js, jsmd = d._repr_javascript_()
644 js, jsmd = d._repr_javascript_()
658 nt.assert_equal(data.get('application/javascript'), js)
645 nt.assert_equal(data.get('application/javascript'), js)
659 nt.assert_equal(metadata.get('application/javascript'), jsmd)
646 nt.assert_equal(metadata.get('application/javascript'), jsmd)
660
647
661 dne = r['doesnotexist']
648 dne = r['doesnotexist']
662 nt.assert_equal(dne['status'], 'error')
649 nt.assert_equal(dne['status'], 'error')
663 nt.assert_equal(dne['ename'], 'KeyError')
650 nt.assert_equal(dne['ename'], 'NameError')
664
651
665 # back to text only
652 # back to text only
666 ip.display_formatter.active_types = ['text/plain']
653 ip.display_formatter.active_types = ['text/plain']
667
654
668 def test_user_expression():
655 def test_user_expression():
669 # enable all formatters
656 # enable all formatters
670 ip.display_formatter.active_types = ip.display_formatter.format_types
657 ip.display_formatter.active_types = ip.display_formatter.format_types
671 query = {
658 query = {
672 'a' : '1 + 2',
659 'a' : '1 + 2',
673 'b' : '1/0',
660 'b' : '1/0',
674 }
661 }
675 r = ip.user_expressions(query)
662 r = ip.user_expressions(query)
676 import pprint
663 import pprint
677 pprint.pprint(r)
664 pprint.pprint(r)
678 nt.assert_equal(set(r.keys()), set(query.keys()))
665 nt.assert_equal(set(r.keys()), set(query.keys()))
679 a = r['a']
666 a = r['a']
680 nt.assert_equal(set(['status', 'data', 'metadata']), set(a.keys()))
667 nt.assert_equal(set(['status', 'data', 'metadata']), set(a.keys()))
681 nt.assert_equal(a['status'], 'ok')
668 nt.assert_equal(a['status'], 'ok')
682 data = a['data']
669 data = a['data']
683 metadata = a['metadata']
670 metadata = a['metadata']
684 nt.assert_equal(data.get('text/plain'), '3')
671 nt.assert_equal(data.get('text/plain'), '3')
685
672
686 b = r['b']
673 b = r['b']
687 nt.assert_equal(b['status'], 'error')
674 nt.assert_equal(b['status'], 'error')
688 nt.assert_equal(b['ename'], 'ZeroDivisionError')
675 nt.assert_equal(b['ename'], 'ZeroDivisionError')
689
676
690 # back to text only
677 # back to text only
691 ip.display_formatter.active_types = ['text/plain']
678 ip.display_formatter.active_types = ['text/plain']
692
679
693
680
694
681
695
682
696
683
697 class TestSyntaxErrorTransformer(unittest.TestCase):
684 class TestSyntaxErrorTransformer(unittest.TestCase):
698 """Check that SyntaxError raised by an input transformer is handled by run_cell()"""
685 """Check that SyntaxError raised by an input transformer is handled by run_cell()"""
699
686
700 class SyntaxErrorTransformer(InputTransformer):
687 class SyntaxErrorTransformer(InputTransformer):
701
688
702 def push(self, line):
689 def push(self, line):
703 pos = line.find('syntaxerror')
690 pos = line.find('syntaxerror')
704 if pos >= 0:
691 if pos >= 0:
705 e = SyntaxError('input contains "syntaxerror"')
692 e = SyntaxError('input contains "syntaxerror"')
706 e.text = line
693 e.text = line
707 e.offset = pos + 1
694 e.offset = pos + 1
708 raise e
695 raise e
709 return line
696 return line
710
697
711 def reset(self):
698 def reset(self):
712 pass
699 pass
713
700
714 def setUp(self):
701 def setUp(self):
715 self.transformer = TestSyntaxErrorTransformer.SyntaxErrorTransformer()
702 self.transformer = TestSyntaxErrorTransformer.SyntaxErrorTransformer()
716 ip.input_splitter.python_line_transforms.append(self.transformer)
703 ip.input_splitter.python_line_transforms.append(self.transformer)
717 ip.input_transformer_manager.python_line_transforms.append(self.transformer)
704 ip.input_transformer_manager.python_line_transforms.append(self.transformer)
718
705
719 def tearDown(self):
706 def tearDown(self):
720 ip.input_splitter.python_line_transforms.remove(self.transformer)
707 ip.input_splitter.python_line_transforms.remove(self.transformer)
721 ip.input_transformer_manager.python_line_transforms.remove(self.transformer)
708 ip.input_transformer_manager.python_line_transforms.remove(self.transformer)
722
709
723 def test_syntaxerror_input_transformer(self):
710 def test_syntaxerror_input_transformer(self):
724 with tt.AssertPrints('1234'):
711 with tt.AssertPrints('1234'):
725 ip.run_cell('1234')
712 ip.run_cell('1234')
726 with tt.AssertPrints('SyntaxError: invalid syntax'):
713 with tt.AssertPrints('SyntaxError: invalid syntax'):
727 ip.run_cell('1 2 3') # plain python syntax error
714 ip.run_cell('1 2 3') # plain python syntax error
728 with tt.AssertPrints('SyntaxError: input contains "syntaxerror"'):
715 with tt.AssertPrints('SyntaxError: input contains "syntaxerror"'):
729 ip.run_cell('2345 # syntaxerror') # input transformer syntax error
716 ip.run_cell('2345 # syntaxerror') # input transformer syntax error
730 with tt.AssertPrints('3456'):
717 with tt.AssertPrints('3456'):
731 ip.run_cell('3456')
718 ip.run_cell('3456')
732
719
733
720
734
721
@@ -1,624 +1,621 b''
1 // Copyright (c) IPython Development Team.
1 // Copyright (c) IPython Development Team.
2 // Distributed under the terms of the Modified BSD License.
2 // Distributed under the terms of the Modified BSD License.
3
3
4 //============================================================================
4 //============================================================================
5 // Kernel
5 // Kernel
6 //============================================================================
6 //============================================================================
7
7
8 /**
8 /**
9 * @module IPython
9 * @module IPython
10 * @namespace IPython
10 * @namespace IPython
11 * @submodule Kernel
11 * @submodule Kernel
12 */
12 */
13
13
14 var IPython = (function (IPython) {
14 var IPython = (function (IPython) {
15 "use strict";
15 "use strict";
16
16
17 var utils = IPython.utils;
17 var utils = IPython.utils;
18
18
19 // Initialization and connection.
19 // Initialization and connection.
20 /**
20 /**
21 * A Kernel Class to communicate with the Python kernel
21 * A Kernel Class to communicate with the Python kernel
22 * @Class Kernel
22 * @Class Kernel
23 */
23 */
24 var Kernel = function (kernel_service_url) {
24 var Kernel = function (kernel_service_url) {
25 this.kernel_id = null;
25 this.kernel_id = null;
26 this.shell_channel = null;
26 this.shell_channel = null;
27 this.iopub_channel = null;
27 this.iopub_channel = null;
28 this.stdin_channel = null;
28 this.stdin_channel = null;
29 this.kernel_service_url = kernel_service_url;
29 this.kernel_service_url = kernel_service_url;
30 this.running = false;
30 this.running = false;
31 this.username = "username";
31 this.username = "username";
32 this.session_id = utils.uuid();
32 this.session_id = utils.uuid();
33 this._msg_callbacks = {};
33 this._msg_callbacks = {};
34 this.post = $.post;
34 this.post = $.post;
35
35
36 if (typeof(WebSocket) !== 'undefined') {
36 if (typeof(WebSocket) !== 'undefined') {
37 this.WebSocket = WebSocket;
37 this.WebSocket = WebSocket;
38 } else if (typeof(MozWebSocket) !== 'undefined') {
38 } else if (typeof(MozWebSocket) !== 'undefined') {
39 this.WebSocket = MozWebSocket;
39 this.WebSocket = MozWebSocket;
40 } else {
40 } else {
41 alert('Your browser does not have WebSocket support, please try Chrome, Safari or Firefox ≥ 6. Firefox 4 and 5 are also supported by you have to enable WebSockets in about:config.');
41 alert('Your browser does not have WebSocket support, please try Chrome, Safari or Firefox ≥ 6. Firefox 4 and 5 are also supported by you have to enable WebSockets in about:config.');
42 }
42 }
43
43
44 this.bind_events();
44 this.bind_events();
45 this.init_iopub_handlers();
45 this.init_iopub_handlers();
46 this.comm_manager = new IPython.CommManager(this);
46 this.comm_manager = new IPython.CommManager(this);
47 this.widget_manager = new IPython.WidgetManager(this.comm_manager);
47 this.widget_manager = new IPython.WidgetManager(this.comm_manager);
48
48
49 this.last_msg_id = null;
49 this.last_msg_id = null;
50 this.last_msg_callbacks = {};
50 this.last_msg_callbacks = {};
51 };
51 };
52
52
53
53
54 Kernel.prototype._get_msg = function (msg_type, content, metadata) {
54 Kernel.prototype._get_msg = function (msg_type, content, metadata) {
55 var msg = {
55 var msg = {
56 header : {
56 header : {
57 msg_id : utils.uuid(),
57 msg_id : utils.uuid(),
58 username : this.username,
58 username : this.username,
59 session : this.session_id,
59 session : this.session_id,
60 msg_type : msg_type
60 msg_type : msg_type
61 },
61 },
62 metadata : metadata || {},
62 metadata : metadata || {},
63 content : content,
63 content : content,
64 parent_header : {}
64 parent_header : {}
65 };
65 };
66 return msg;
66 return msg;
67 };
67 };
68
68
69 Kernel.prototype.bind_events = function () {
69 Kernel.prototype.bind_events = function () {
70 var that = this;
70 var that = this;
71 $([IPython.events]).on('send_input_reply.Kernel', function(evt, data) {
71 $([IPython.events]).on('send_input_reply.Kernel', function(evt, data) {
72 that.send_input_reply(data);
72 that.send_input_reply(data);
73 });
73 });
74 };
74 };
75
75
76 // Initialize the iopub handlers
76 // Initialize the iopub handlers
77
77
78 Kernel.prototype.init_iopub_handlers = function () {
78 Kernel.prototype.init_iopub_handlers = function () {
79 var output_msg_types = ['stream', 'display_data', 'execute_result', 'error'];
79 var output_msg_types = ['stream', 'display_data', 'execute_result', 'error'];
80 this._iopub_handlers = {};
80 this._iopub_handlers = {};
81 this.register_iopub_handler('status', $.proxy(this._handle_status_message, this));
81 this.register_iopub_handler('status', $.proxy(this._handle_status_message, this));
82 this.register_iopub_handler('clear_output', $.proxy(this._handle_clear_output, this));
82 this.register_iopub_handler('clear_output', $.proxy(this._handle_clear_output, this));
83
83
84 for (var i=0; i < output_msg_types.length; i++) {
84 for (var i=0; i < output_msg_types.length; i++) {
85 this.register_iopub_handler(output_msg_types[i], $.proxy(this._handle_output_message, this));
85 this.register_iopub_handler(output_msg_types[i], $.proxy(this._handle_output_message, this));
86 }
86 }
87 };
87 };
88
88
89 /**
89 /**
90 * Start the Python kernel
90 * Start the Python kernel
91 * @method start
91 * @method start
92 */
92 */
93 Kernel.prototype.start = function (params) {
93 Kernel.prototype.start = function (params) {
94 params = params || {};
94 params = params || {};
95 if (!this.running) {
95 if (!this.running) {
96 var qs = $.param(params);
96 var qs = $.param(params);
97 this.post(utils.url_join_encode(this.kernel_service_url) + '?' + qs,
97 this.post(utils.url_join_encode(this.kernel_service_url) + '?' + qs,
98 $.proxy(this._kernel_started, this),
98 $.proxy(this._kernel_started, this),
99 'json'
99 'json'
100 );
100 );
101 }
101 }
102 };
102 };
103
103
104 /**
104 /**
105 * Restart the python kernel.
105 * Restart the python kernel.
106 *
106 *
107 * Emit a 'status_restarting.Kernel' event with
107 * Emit a 'status_restarting.Kernel' event with
108 * the current object as parameter
108 * the current object as parameter
109 *
109 *
110 * @method restart
110 * @method restart
111 */
111 */
112 Kernel.prototype.restart = function () {
112 Kernel.prototype.restart = function () {
113 $([IPython.events]).trigger('status_restarting.Kernel', {kernel: this});
113 $([IPython.events]).trigger('status_restarting.Kernel', {kernel: this});
114 if (this.running) {
114 if (this.running) {
115 this.stop_channels();
115 this.stop_channels();
116 this.post(utils.url_join_encode(this.kernel_url, "restart"),
116 this.post(utils.url_join_encode(this.kernel_url, "restart"),
117 $.proxy(this._kernel_started, this),
117 $.proxy(this._kernel_started, this),
118 'json'
118 'json'
119 );
119 );
120 }
120 }
121 };
121 };
122
122
123
123
124 Kernel.prototype._kernel_started = function (json) {
124 Kernel.prototype._kernel_started = function (json) {
125 console.log("Kernel started: ", json.id);
125 console.log("Kernel started: ", json.id);
126 this.running = true;
126 this.running = true;
127 this.kernel_id = json.id;
127 this.kernel_id = json.id;
128 // trailing 's' in https will become wss for secure web sockets
128 // trailing 's' in https will become wss for secure web sockets
129 this.ws_host = location.protocol.replace('http', 'ws') + "//" + location.host;
129 this.ws_host = location.protocol.replace('http', 'ws') + "//" + location.host;
130 this.kernel_url = utils.url_path_join(this.kernel_service_url, this.kernel_id);
130 this.kernel_url = utils.url_path_join(this.kernel_service_url, this.kernel_id);
131 this.start_channels();
131 this.start_channels();
132 };
132 };
133
133
134
134
135 Kernel.prototype._websocket_closed = function(ws_url, early) {
135 Kernel.prototype._websocket_closed = function(ws_url, early) {
136 this.stop_channels();
136 this.stop_channels();
137 $([IPython.events]).trigger('websocket_closed.Kernel',
137 $([IPython.events]).trigger('websocket_closed.Kernel',
138 {ws_url: ws_url, kernel: this, early: early}
138 {ws_url: ws_url, kernel: this, early: early}
139 );
139 );
140 };
140 };
141
141
142 /**
142 /**
143 * Start the `shell`and `iopub` channels.
143 * Start the `shell`and `iopub` channels.
144 * Will stop and restart them if they already exist.
144 * Will stop and restart them if they already exist.
145 *
145 *
146 * @method start_channels
146 * @method start_channels
147 */
147 */
148 Kernel.prototype.start_channels = function () {
148 Kernel.prototype.start_channels = function () {
149 var that = this;
149 var that = this;
150 this.stop_channels();
150 this.stop_channels();
151 var ws_host_url = this.ws_host + this.kernel_url;
151 var ws_host_url = this.ws_host + this.kernel_url;
152 console.log("Starting WebSockets:", ws_host_url);
152 console.log("Starting WebSockets:", ws_host_url);
153 this.shell_channel = new this.WebSocket(
153 this.shell_channel = new this.WebSocket(
154 this.ws_host + utils.url_join_encode(this.kernel_url, "shell")
154 this.ws_host + utils.url_join_encode(this.kernel_url, "shell")
155 );
155 );
156 this.stdin_channel = new this.WebSocket(
156 this.stdin_channel = new this.WebSocket(
157 this.ws_host + utils.url_join_encode(this.kernel_url, "stdin")
157 this.ws_host + utils.url_join_encode(this.kernel_url, "stdin")
158 );
158 );
159 this.iopub_channel = new this.WebSocket(
159 this.iopub_channel = new this.WebSocket(
160 this.ws_host + utils.url_join_encode(this.kernel_url, "iopub")
160 this.ws_host + utils.url_join_encode(this.kernel_url, "iopub")
161 );
161 );
162
162
163 var already_called_onclose = false; // only alert once
163 var already_called_onclose = false; // only alert once
164 var ws_closed_early = function(evt){
164 var ws_closed_early = function(evt){
165 if (already_called_onclose){
165 if (already_called_onclose){
166 return;
166 return;
167 }
167 }
168 already_called_onclose = true;
168 already_called_onclose = true;
169 if ( ! evt.wasClean ){
169 if ( ! evt.wasClean ){
170 that._websocket_closed(ws_host_url, true);
170 that._websocket_closed(ws_host_url, true);
171 }
171 }
172 };
172 };
173 var ws_closed_late = function(evt){
173 var ws_closed_late = function(evt){
174 if (already_called_onclose){
174 if (already_called_onclose){
175 return;
175 return;
176 }
176 }
177 already_called_onclose = true;
177 already_called_onclose = true;
178 if ( ! evt.wasClean ){
178 if ( ! evt.wasClean ){
179 that._websocket_closed(ws_host_url, false);
179 that._websocket_closed(ws_host_url, false);
180 }
180 }
181 };
181 };
182 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
182 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
183 for (var i=0; i < channels.length; i++) {
183 for (var i=0; i < channels.length; i++) {
184 channels[i].onopen = $.proxy(this._ws_opened, this);
184 channels[i].onopen = $.proxy(this._ws_opened, this);
185 channels[i].onclose = ws_closed_early;
185 channels[i].onclose = ws_closed_early;
186 }
186 }
187 // switch from early-close to late-close message after 1s
187 // switch from early-close to late-close message after 1s
188 setTimeout(function() {
188 setTimeout(function() {
189 for (var i=0; i < channels.length; i++) {
189 for (var i=0; i < channels.length; i++) {
190 if (channels[i] !== null) {
190 if (channels[i] !== null) {
191 channels[i].onclose = ws_closed_late;
191 channels[i].onclose = ws_closed_late;
192 }
192 }
193 }
193 }
194 }, 1000);
194 }, 1000);
195 this.shell_channel.onmessage = $.proxy(this._handle_shell_reply, this);
195 this.shell_channel.onmessage = $.proxy(this._handle_shell_reply, this);
196 this.iopub_channel.onmessage = $.proxy(this._handle_iopub_message, this);
196 this.iopub_channel.onmessage = $.proxy(this._handle_iopub_message, this);
197 this.stdin_channel.onmessage = $.proxy(this._handle_input_request, this);
197 this.stdin_channel.onmessage = $.proxy(this._handle_input_request, this);
198 };
198 };
199
199
200 /**
200 /**
201 * Handle a websocket entering the open state
201 * Handle a websocket entering the open state
202 * sends session and cookie authentication info as first message.
202 * sends session and cookie authentication info as first message.
203 * Once all sockets are open, signal the Kernel.status_started event.
203 * Once all sockets are open, signal the Kernel.status_started event.
204 * @method _ws_opened
204 * @method _ws_opened
205 */
205 */
206 Kernel.prototype._ws_opened = function (evt) {
206 Kernel.prototype._ws_opened = function (evt) {
207 // send the session id so the Session object Python-side
207 // send the session id so the Session object Python-side
208 // has the same identity
208 // has the same identity
209 evt.target.send(this.session_id + ':' + document.cookie);
209 evt.target.send(this.session_id + ':' + document.cookie);
210
210
211 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
211 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
212 for (var i=0; i < channels.length; i++) {
212 for (var i=0; i < channels.length; i++) {
213 // if any channel is not ready, don't trigger event.
213 // if any channel is not ready, don't trigger event.
214 if ( !channels[i].readyState ) return;
214 if ( !channels[i].readyState ) return;
215 }
215 }
216 // all events ready, trigger started event.
216 // all events ready, trigger started event.
217 $([IPython.events]).trigger('status_started.Kernel', {kernel: this});
217 $([IPython.events]).trigger('status_started.Kernel', {kernel: this});
218 };
218 };
219
219
220 /**
220 /**
221 * Stop the websocket channels.
221 * Stop the websocket channels.
222 * @method stop_channels
222 * @method stop_channels
223 */
223 */
224 Kernel.prototype.stop_channels = function () {
224 Kernel.prototype.stop_channels = function () {
225 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
225 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
226 for (var i=0; i < channels.length; i++) {
226 for (var i=0; i < channels.length; i++) {
227 if ( channels[i] !== null ) {
227 if ( channels[i] !== null ) {
228 channels[i].onclose = null;
228 channels[i].onclose = null;
229 channels[i].close();
229 channels[i].close();
230 }
230 }
231 }
231 }
232 this.shell_channel = this.iopub_channel = this.stdin_channel = null;
232 this.shell_channel = this.iopub_channel = this.stdin_channel = null;
233 };
233 };
234
234
235 // Main public methods.
235 // Main public methods.
236
236
237 // send a message on the Kernel's shell channel
237 // send a message on the Kernel's shell channel
238 Kernel.prototype.send_shell_message = function (msg_type, content, callbacks, metadata) {
238 Kernel.prototype.send_shell_message = function (msg_type, content, callbacks, metadata) {
239 var msg = this._get_msg(msg_type, content, metadata);
239 var msg = this._get_msg(msg_type, content, metadata);
240 this.shell_channel.send(JSON.stringify(msg));
240 this.shell_channel.send(JSON.stringify(msg));
241 this.set_callbacks_for_msg(msg.header.msg_id, callbacks);
241 this.set_callbacks_for_msg(msg.header.msg_id, callbacks);
242 return msg.header.msg_id;
242 return msg.header.msg_id;
243 };
243 };
244
244
245 /**
245 /**
246 * Get kernel info
246 * Get kernel info
247 *
247 *
248 * @param callback {function}
248 * @param callback {function}
249 * @method object_info
249 * @method object_info
250 *
250 *
251 * When calling this method, pass a callback function that expects one argument.
251 * When calling this method, pass a callback function that expects one argument.
252 * The callback will be passed the complete `kernel_info_reply` message documented
252 * The callback will be passed the complete `kernel_info_reply` message documented
253 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#kernel-info)
253 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#kernel-info)
254 */
254 */
255 Kernel.prototype.kernel_info = function (callback) {
255 Kernel.prototype.kernel_info = function (callback) {
256 var callbacks;
256 var callbacks;
257 if (callback) {
257 if (callback) {
258 callbacks = { shell : { reply : callback } };
258 callbacks = { shell : { reply : callback } };
259 }
259 }
260 return this.send_shell_message("kernel_info_request", {}, callbacks);
260 return this.send_shell_message("kernel_info_request", {}, callbacks);
261 };
261 };
262
262
263 /**
263 /**
264 * Get info on an object
264 * Get info on an object
265 *
265 *
266 * @param objname {string}
266 * @param objname {string}
267 * @param callback {function}
267 * @param callback {function}
268 * @method object_info
268 * @method object_info
269 *
269 *
270 * When calling this method, pass a callback function that expects one argument.
270 * When calling this method, pass a callback function that expects one argument.
271 * The callback will be passed the complete `object_info_reply` message documented
271 * The callback will be passed the complete `object_info_reply` message documented
272 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#object-information)
272 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#object-information)
273 */
273 */
274 Kernel.prototype.object_info = function (objname, callback) {
274 Kernel.prototype.object_info = function (objname, callback) {
275 var callbacks;
275 var callbacks;
276 if (callback) {
276 if (callback) {
277 callbacks = { shell : { reply : callback } };
277 callbacks = { shell : { reply : callback } };
278 }
278 }
279
279
280 if (typeof(objname) !== null && objname !== null) {
280 if (typeof(objname) !== null && objname !== null) {
281 var content = {
281 var content = {
282 oname : objname.toString(),
282 oname : objname.toString(),
283 detail_level : 0,
283 detail_level : 0,
284 };
284 };
285 return this.send_shell_message("object_info_request", content, callbacks);
285 return this.send_shell_message("object_info_request", content, callbacks);
286 }
286 }
287 return;
287 return;
288 };
288 };
289
289
290 /**
290 /**
291 * Execute given code into kernel, and pass result to callback.
291 * Execute given code into kernel, and pass result to callback.
292 *
292 *
293 * @async
293 * @async
294 * @method execute
294 * @method execute
295 * @param {string} code
295 * @param {string} code
296 * @param [callbacks] {Object} With the following keys (all optional)
296 * @param [callbacks] {Object} With the following keys (all optional)
297 * @param callbacks.shell.reply {function}
297 * @param callbacks.shell.reply {function}
298 * @param callbacks.shell.payload.[payload_name] {function}
298 * @param callbacks.shell.payload.[payload_name] {function}
299 * @param callbacks.iopub.output {function}
299 * @param callbacks.iopub.output {function}
300 * @param callbacks.iopub.clear_output {function}
300 * @param callbacks.iopub.clear_output {function}
301 * @param callbacks.input {function}
301 * @param callbacks.input {function}
302 * @param {object} [options]
302 * @param {object} [options]
303 * @param [options.silent=false] {Boolean}
303 * @param [options.silent=false] {Boolean}
304 * @param [options.user_expressions=empty_dict] {Dict}
304 * @param [options.user_expressions=empty_dict] {Dict}
305 * @param [options.user_variables=empty_list] {List od Strings}
306 * @param [options.allow_stdin=false] {Boolean} true|false
305 * @param [options.allow_stdin=false] {Boolean} true|false
307 *
306 *
308 * @example
307 * @example
309 *
308 *
310 * The options object should contain the options for the execute call. Its default
309 * The options object should contain the options for the execute call. Its default
311 * values are:
310 * values are:
312 *
311 *
313 * options = {
312 * options = {
314 * silent : true,
313 * silent : true,
315 * user_variables : [],
316 * user_expressions : {},
314 * user_expressions : {},
317 * allow_stdin : false
315 * allow_stdin : false
318 * }
316 * }
319 *
317 *
320 * When calling this method pass a callbacks structure of the form:
318 * When calling this method pass a callbacks structure of the form:
321 *
319 *
322 * callbacks = {
320 * callbacks = {
323 * shell : {
321 * shell : {
324 * reply : execute_reply_callback,
322 * reply : execute_reply_callback,
325 * payload : {
323 * payload : {
326 * set_next_input : set_next_input_callback,
324 * set_next_input : set_next_input_callback,
327 * }
325 * }
328 * },
326 * },
329 * iopub : {
327 * iopub : {
330 * output : output_callback,
328 * output : output_callback,
331 * clear_output : clear_output_callback,
329 * clear_output : clear_output_callback,
332 * },
330 * },
333 * input : raw_input_callback
331 * input : raw_input_callback
334 * }
332 * }
335 *
333 *
336 * Each callback will be passed the entire message as a single arugment.
334 * Each callback will be passed the entire message as a single arugment.
337 * Payload handlers will be passed the corresponding payload and the execute_reply message.
335 * Payload handlers will be passed the corresponding payload and the execute_reply message.
338 */
336 */
339 Kernel.prototype.execute = function (code, callbacks, options) {
337 Kernel.prototype.execute = function (code, callbacks, options) {
340
338
341 var content = {
339 var content = {
342 code : code,
340 code : code,
343 silent : true,
341 silent : true,
344 store_history : false,
342 store_history : false,
345 user_variables : [],
346 user_expressions : {},
343 user_expressions : {},
347 allow_stdin : false
344 allow_stdin : false
348 };
345 };
349 callbacks = callbacks || {};
346 callbacks = callbacks || {};
350 if (callbacks.input !== undefined) {
347 if (callbacks.input !== undefined) {
351 content.allow_stdin = true;
348 content.allow_stdin = true;
352 }
349 }
353 $.extend(true, content, options);
350 $.extend(true, content, options);
354 $([IPython.events]).trigger('execution_request.Kernel', {kernel: this, content:content});
351 $([IPython.events]).trigger('execution_request.Kernel', {kernel: this, content:content});
355 return this.send_shell_message("execute_request", content, callbacks);
352 return this.send_shell_message("execute_request", content, callbacks);
356 };
353 };
357
354
358 /**
355 /**
359 * When calling this method, pass a function to be called with the `complete_reply` message
356 * When calling this method, pass a function to be called with the `complete_reply` message
360 * as its only argument when it arrives.
357 * as its only argument when it arrives.
361 *
358 *
362 * `complete_reply` is documented
359 * `complete_reply` is documented
363 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#complete)
360 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#complete)
364 *
361 *
365 * @method complete
362 * @method complete
366 * @param line {integer}
363 * @param line {integer}
367 * @param cursor_pos {integer}
364 * @param cursor_pos {integer}
368 * @param callback {function}
365 * @param callback {function}
369 *
366 *
370 */
367 */
371 Kernel.prototype.complete = function (line, cursor_pos, callback) {
368 Kernel.prototype.complete = function (line, cursor_pos, callback) {
372 var callbacks;
369 var callbacks;
373 if (callback) {
370 if (callback) {
374 callbacks = { shell : { reply : callback } };
371 callbacks = { shell : { reply : callback } };
375 }
372 }
376 var content = {
373 var content = {
377 text : '',
374 text : '',
378 line : line,
375 line : line,
379 block : null,
376 block : null,
380 cursor_pos : cursor_pos
377 cursor_pos : cursor_pos
381 };
378 };
382 return this.send_shell_message("complete_request", content, callbacks);
379 return this.send_shell_message("complete_request", content, callbacks);
383 };
380 };
384
381
385
382
386 Kernel.prototype.interrupt = function () {
383 Kernel.prototype.interrupt = function () {
387 if (this.running) {
384 if (this.running) {
388 $([IPython.events]).trigger('status_interrupting.Kernel', {kernel: this});
385 $([IPython.events]).trigger('status_interrupting.Kernel', {kernel: this});
389 this.post(utils.url_join_encode(this.kernel_url, "interrupt"));
386 this.post(utils.url_join_encode(this.kernel_url, "interrupt"));
390 }
387 }
391 };
388 };
392
389
393
390
394 Kernel.prototype.kill = function () {
391 Kernel.prototype.kill = function () {
395 if (this.running) {
392 if (this.running) {
396 this.running = false;
393 this.running = false;
397 var settings = {
394 var settings = {
398 cache : false,
395 cache : false,
399 type : "DELETE",
396 type : "DELETE",
400 error : utils.log_ajax_error,
397 error : utils.log_ajax_error,
401 };
398 };
402 $.ajax(utils.url_join_encode(this.kernel_url), settings);
399 $.ajax(utils.url_join_encode(this.kernel_url), settings);
403 }
400 }
404 };
401 };
405
402
406 Kernel.prototype.send_input_reply = function (input) {
403 Kernel.prototype.send_input_reply = function (input) {
407 var content = {
404 var content = {
408 value : input,
405 value : input,
409 };
406 };
410 $([IPython.events]).trigger('input_reply.Kernel', {kernel: this, content:content});
407 $([IPython.events]).trigger('input_reply.Kernel', {kernel: this, content:content});
411 var msg = this._get_msg("input_reply", content);
408 var msg = this._get_msg("input_reply", content);
412 this.stdin_channel.send(JSON.stringify(msg));
409 this.stdin_channel.send(JSON.stringify(msg));
413 return msg.header.msg_id;
410 return msg.header.msg_id;
414 };
411 };
415
412
416
413
417 // Reply handlers
414 // Reply handlers
418
415
419 Kernel.prototype.register_iopub_handler = function (msg_type, callback) {
416 Kernel.prototype.register_iopub_handler = function (msg_type, callback) {
420 this._iopub_handlers[msg_type] = callback;
417 this._iopub_handlers[msg_type] = callback;
421 };
418 };
422
419
423 Kernel.prototype.get_iopub_handler = function (msg_type) {
420 Kernel.prototype.get_iopub_handler = function (msg_type) {
424 // get iopub handler for a specific message type
421 // get iopub handler for a specific message type
425 return this._iopub_handlers[msg_type];
422 return this._iopub_handlers[msg_type];
426 };
423 };
427
424
428
425
429 Kernel.prototype.get_callbacks_for_msg = function (msg_id) {
426 Kernel.prototype.get_callbacks_for_msg = function (msg_id) {
430 // get callbacks for a specific message
427 // get callbacks for a specific message
431 if (msg_id == this.last_msg_id) {
428 if (msg_id == this.last_msg_id) {
432 return this.last_msg_callbacks;
429 return this.last_msg_callbacks;
433 } else {
430 } else {
434 return this._msg_callbacks[msg_id];
431 return this._msg_callbacks[msg_id];
435 }
432 }
436 };
433 };
437
434
438
435
439 Kernel.prototype.clear_callbacks_for_msg = function (msg_id) {
436 Kernel.prototype.clear_callbacks_for_msg = function (msg_id) {
440 if (this._msg_callbacks[msg_id] !== undefined ) {
437 if (this._msg_callbacks[msg_id] !== undefined ) {
441 delete this._msg_callbacks[msg_id];
438 delete this._msg_callbacks[msg_id];
442 }
439 }
443 };
440 };
444
441
445 Kernel.prototype._finish_shell = function (msg_id) {
442 Kernel.prototype._finish_shell = function (msg_id) {
446 var callbacks = this._msg_callbacks[msg_id];
443 var callbacks = this._msg_callbacks[msg_id];
447 if (callbacks !== undefined) {
444 if (callbacks !== undefined) {
448 callbacks.shell_done = true;
445 callbacks.shell_done = true;
449 if (callbacks.iopub_done) {
446 if (callbacks.iopub_done) {
450 this.clear_callbacks_for_msg(msg_id);
447 this.clear_callbacks_for_msg(msg_id);
451 }
448 }
452 }
449 }
453 };
450 };
454
451
455 Kernel.prototype._finish_iopub = function (msg_id) {
452 Kernel.prototype._finish_iopub = function (msg_id) {
456 var callbacks = this._msg_callbacks[msg_id];
453 var callbacks = this._msg_callbacks[msg_id];
457 if (callbacks !== undefined) {
454 if (callbacks !== undefined) {
458 callbacks.iopub_done = true;
455 callbacks.iopub_done = true;
459 if (!callbacks.shell_done) {
456 if (!callbacks.shell_done) {
460 this.clear_callbacks_for_msg(msg_id);
457 this.clear_callbacks_for_msg(msg_id);
461 }
458 }
462 }
459 }
463 };
460 };
464
461
465 /* Set callbacks for a particular message.
462 /* Set callbacks for a particular message.
466 * Callbacks should be a struct of the following form:
463 * Callbacks should be a struct of the following form:
467 * shell : {
464 * shell : {
468 *
465 *
469 * }
466 * }
470
467
471 */
468 */
472 Kernel.prototype.set_callbacks_for_msg = function (msg_id, callbacks) {
469 Kernel.prototype.set_callbacks_for_msg = function (msg_id, callbacks) {
473 this.last_msg_id = msg_id;
470 this.last_msg_id = msg_id;
474 if (callbacks) {
471 if (callbacks) {
475 // shallow-copy mapping, because we will modify it at the top level
472 // shallow-copy mapping, because we will modify it at the top level
476 var cbcopy = this._msg_callbacks[msg_id] = this.last_msg_callbacks = {};
473 var cbcopy = this._msg_callbacks[msg_id] = this.last_msg_callbacks = {};
477 cbcopy.shell = callbacks.shell;
474 cbcopy.shell = callbacks.shell;
478 cbcopy.iopub = callbacks.iopub;
475 cbcopy.iopub = callbacks.iopub;
479 cbcopy.input = callbacks.input;
476 cbcopy.input = callbacks.input;
480 cbcopy.shell_done = (!callbacks.shell);
477 cbcopy.shell_done = (!callbacks.shell);
481 cbcopy.iopub_done = (!callbacks.iopub);
478 cbcopy.iopub_done = (!callbacks.iopub);
482 } else {
479 } else {
483 this.last_msg_callbacks = {};
480 this.last_msg_callbacks = {};
484 }
481 }
485 };
482 };
486
483
487
484
488 Kernel.prototype._handle_shell_reply = function (e) {
485 Kernel.prototype._handle_shell_reply = function (e) {
489 var reply = $.parseJSON(e.data);
486 var reply = $.parseJSON(e.data);
490 $([IPython.events]).trigger('shell_reply.Kernel', {kernel: this, reply:reply});
487 $([IPython.events]).trigger('shell_reply.Kernel', {kernel: this, reply:reply});
491 var content = reply.content;
488 var content = reply.content;
492 var metadata = reply.metadata;
489 var metadata = reply.metadata;
493 var parent_id = reply.parent_header.msg_id;
490 var parent_id = reply.parent_header.msg_id;
494 var callbacks = this.get_callbacks_for_msg(parent_id);
491 var callbacks = this.get_callbacks_for_msg(parent_id);
495 if (!callbacks || !callbacks.shell) {
492 if (!callbacks || !callbacks.shell) {
496 return;
493 return;
497 }
494 }
498 var shell_callbacks = callbacks.shell;
495 var shell_callbacks = callbacks.shell;
499
496
500 // signal that shell callbacks are done
497 // signal that shell callbacks are done
501 this._finish_shell(parent_id);
498 this._finish_shell(parent_id);
502
499
503 if (shell_callbacks.reply !== undefined) {
500 if (shell_callbacks.reply !== undefined) {
504 shell_callbacks.reply(reply);
501 shell_callbacks.reply(reply);
505 }
502 }
506 if (content.payload && shell_callbacks.payload) {
503 if (content.payload && shell_callbacks.payload) {
507 this._handle_payloads(content.payload, shell_callbacks.payload, reply);
504 this._handle_payloads(content.payload, shell_callbacks.payload, reply);
508 }
505 }
509 };
506 };
510
507
511
508
512 Kernel.prototype._handle_payloads = function (payloads, payload_callbacks, msg) {
509 Kernel.prototype._handle_payloads = function (payloads, payload_callbacks, msg) {
513 var l = payloads.length;
510 var l = payloads.length;
514 // Payloads are handled by triggering events because we don't want the Kernel
511 // Payloads are handled by triggering events because we don't want the Kernel
515 // to depend on the Notebook or Pager classes.
512 // to depend on the Notebook or Pager classes.
516 for (var i=0; i<l; i++) {
513 for (var i=0; i<l; i++) {
517 var payload = payloads[i];
514 var payload = payloads[i];
518 var callback = payload_callbacks[payload.source];
515 var callback = payload_callbacks[payload.source];
519 if (callback) {
516 if (callback) {
520 callback(payload, msg);
517 callback(payload, msg);
521 }
518 }
522 }
519 }
523 };
520 };
524
521
525 Kernel.prototype._handle_status_message = function (msg) {
522 Kernel.prototype._handle_status_message = function (msg) {
526 var execution_state = msg.content.execution_state;
523 var execution_state = msg.content.execution_state;
527 var parent_id = msg.parent_header.msg_id;
524 var parent_id = msg.parent_header.msg_id;
528
525
529 // dispatch status msg callbacks, if any
526 // dispatch status msg callbacks, if any
530 var callbacks = this.get_callbacks_for_msg(parent_id);
527 var callbacks = this.get_callbacks_for_msg(parent_id);
531 if (callbacks && callbacks.iopub && callbacks.iopub.status) {
528 if (callbacks && callbacks.iopub && callbacks.iopub.status) {
532 try {
529 try {
533 callbacks.iopub.status(msg);
530 callbacks.iopub.status(msg);
534 } catch (e) {
531 } catch (e) {
535 console.log("Exception in status msg handler", e, e.stack);
532 console.log("Exception in status msg handler", e, e.stack);
536 }
533 }
537 }
534 }
538
535
539 if (execution_state === 'busy') {
536 if (execution_state === 'busy') {
540 $([IPython.events]).trigger('status_busy.Kernel', {kernel: this});
537 $([IPython.events]).trigger('status_busy.Kernel', {kernel: this});
541 } else if (execution_state === 'idle') {
538 } else if (execution_state === 'idle') {
542 // signal that iopub callbacks are (probably) done
539 // signal that iopub callbacks are (probably) done
543 // async output may still arrive,
540 // async output may still arrive,
544 // but only for the most recent request
541 // but only for the most recent request
545 this._finish_iopub(parent_id);
542 this._finish_iopub(parent_id);
546
543
547 // trigger status_idle event
544 // trigger status_idle event
548 $([IPython.events]).trigger('status_idle.Kernel', {kernel: this});
545 $([IPython.events]).trigger('status_idle.Kernel', {kernel: this});
549 } else if (execution_state === 'restarting') {
546 } else if (execution_state === 'restarting') {
550 // autorestarting is distinct from restarting,
547 // autorestarting is distinct from restarting,
551 // in that it means the kernel died and the server is restarting it.
548 // in that it means the kernel died and the server is restarting it.
552 // status_restarting sets the notification widget,
549 // status_restarting sets the notification widget,
553 // autorestart shows the more prominent dialog.
550 // autorestart shows the more prominent dialog.
554 $([IPython.events]).trigger('status_autorestarting.Kernel', {kernel: this});
551 $([IPython.events]).trigger('status_autorestarting.Kernel', {kernel: this});
555 $([IPython.events]).trigger('status_restarting.Kernel', {kernel: this});
552 $([IPython.events]).trigger('status_restarting.Kernel', {kernel: this});
556 } else if (execution_state === 'dead') {
553 } else if (execution_state === 'dead') {
557 this.stop_channels();
554 this.stop_channels();
558 $([IPython.events]).trigger('status_dead.Kernel', {kernel: this});
555 $([IPython.events]).trigger('status_dead.Kernel', {kernel: this});
559 }
556 }
560 };
557 };
561
558
562
559
563 // handle clear_output message
560 // handle clear_output message
564 Kernel.prototype._handle_clear_output = function (msg) {
561 Kernel.prototype._handle_clear_output = function (msg) {
565 var callbacks = this.get_callbacks_for_msg(msg.parent_header.msg_id);
562 var callbacks = this.get_callbacks_for_msg(msg.parent_header.msg_id);
566 if (!callbacks || !callbacks.iopub) {
563 if (!callbacks || !callbacks.iopub) {
567 return;
564 return;
568 }
565 }
569 var callback = callbacks.iopub.clear_output;
566 var callback = callbacks.iopub.clear_output;
570 if (callback) {
567 if (callback) {
571 callback(msg);
568 callback(msg);
572 }
569 }
573 };
570 };
574
571
575
572
576 // handle an output message (execute_result, display_data, etc.)
573 // handle an output message (execute_result, display_data, etc.)
577 Kernel.prototype._handle_output_message = function (msg) {
574 Kernel.prototype._handle_output_message = function (msg) {
578 var callbacks = this.get_callbacks_for_msg(msg.parent_header.msg_id);
575 var callbacks = this.get_callbacks_for_msg(msg.parent_header.msg_id);
579 if (!callbacks || !callbacks.iopub) {
576 if (!callbacks || !callbacks.iopub) {
580 return;
577 return;
581 }
578 }
582 var callback = callbacks.iopub.output;
579 var callback = callbacks.iopub.output;
583 if (callback) {
580 if (callback) {
584 callback(msg);
581 callback(msg);
585 }
582 }
586 };
583 };
587
584
588 // dispatch IOPub messages to respective handlers.
585 // dispatch IOPub messages to respective handlers.
589 // each message type should have a handler.
586 // each message type should have a handler.
590 Kernel.prototype._handle_iopub_message = function (e) {
587 Kernel.prototype._handle_iopub_message = function (e) {
591 var msg = $.parseJSON(e.data);
588 var msg = $.parseJSON(e.data);
592
589
593 var handler = this.get_iopub_handler(msg.header.msg_type);
590 var handler = this.get_iopub_handler(msg.header.msg_type);
594 if (handler !== undefined) {
591 if (handler !== undefined) {
595 handler(msg);
592 handler(msg);
596 }
593 }
597 };
594 };
598
595
599
596
600 Kernel.prototype._handle_input_request = function (e) {
597 Kernel.prototype._handle_input_request = function (e) {
601 var request = $.parseJSON(e.data);
598 var request = $.parseJSON(e.data);
602 var header = request.header;
599 var header = request.header;
603 var content = request.content;
600 var content = request.content;
604 var metadata = request.metadata;
601 var metadata = request.metadata;
605 var msg_type = header.msg_type;
602 var msg_type = header.msg_type;
606 if (msg_type !== 'input_request') {
603 if (msg_type !== 'input_request') {
607 console.log("Invalid input request!", request);
604 console.log("Invalid input request!", request);
608 return;
605 return;
609 }
606 }
610 var callbacks = this.get_callbacks_for_msg(request.parent_header.msg_id);
607 var callbacks = this.get_callbacks_for_msg(request.parent_header.msg_id);
611 if (callbacks) {
608 if (callbacks) {
612 if (callbacks.input) {
609 if (callbacks.input) {
613 callbacks.input(request);
610 callbacks.input(request);
614 }
611 }
615 }
612 }
616 };
613 };
617
614
618
615
619 IPython.Kernel = Kernel;
616 IPython.Kernel = Kernel;
620
617
621 return IPython;
618 return IPython;
622
619
623 }(IPython));
620 }(IPython));
624
621
@@ -1,637 +1,618 b''
1 """Base classes to manage a Client's interaction with a running kernel
1 """Base classes to manage a Client's interaction with a running kernel"""
2 """
3
2
4 #-----------------------------------------------------------------------------
3 # Copyright (c) IPython Development Team.
5 # Copyright (C) 2013 The IPython Development Team
4 # Distributed under the terms of the Modified BSD License.
6 #
7 # Distributed under the terms of the BSD License. The full license is in
8 # the file COPYING, distributed as part of this software.
9 #-----------------------------------------------------------------------------
10
11 #-----------------------------------------------------------------------------
12 # Imports
13 #-----------------------------------------------------------------------------
14
5
15 from __future__ import absolute_import
6 from __future__ import absolute_import
16
7
17 # Standard library imports
18 import atexit
8 import atexit
19 import errno
9 import errno
20 from threading import Thread
10 from threading import Thread
21 import time
11 import time
22
12
23 import zmq
13 import zmq
24 # import ZMQError in top-level namespace, to avoid ugly attribute-error messages
14 # import ZMQError in top-level namespace, to avoid ugly attribute-error messages
25 # during garbage collection of threads at exit:
15 # during garbage collection of threads at exit:
26 from zmq import ZMQError
16 from zmq import ZMQError
27 from zmq.eventloop import ioloop, zmqstream
17 from zmq.eventloop import ioloop, zmqstream
28
18
29 # Local imports
19 # Local imports
30 from .channelsabc import (
20 from .channelsabc import (
31 ShellChannelABC, IOPubChannelABC,
21 ShellChannelABC, IOPubChannelABC,
32 HBChannelABC, StdInChannelABC,
22 HBChannelABC, StdInChannelABC,
33 )
23 )
34 from IPython.utils.py3compat import string_types, iteritems
24 from IPython.utils.py3compat import string_types, iteritems
35
25
36 #-----------------------------------------------------------------------------
26 #-----------------------------------------------------------------------------
37 # Constants and exceptions
27 # Constants and exceptions
38 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
39
29
40 class InvalidPortNumber(Exception):
30 class InvalidPortNumber(Exception):
41 pass
31 pass
42
32
43 #-----------------------------------------------------------------------------
33 #-----------------------------------------------------------------------------
44 # Utility functions
34 # Utility functions
45 #-----------------------------------------------------------------------------
35 #-----------------------------------------------------------------------------
46
36
47 # some utilities to validate message structure, these might get moved elsewhere
37 # some utilities to validate message structure, these might get moved elsewhere
48 # if they prove to have more generic utility
38 # if they prove to have more generic utility
49
39
50 def validate_string_list(lst):
40 def validate_string_list(lst):
51 """Validate that the input is a list of strings.
41 """Validate that the input is a list of strings.
52
42
53 Raises ValueError if not."""
43 Raises ValueError if not."""
54 if not isinstance(lst, list):
44 if not isinstance(lst, list):
55 raise ValueError('input %r must be a list' % lst)
45 raise ValueError('input %r must be a list' % lst)
56 for x in lst:
46 for x in lst:
57 if not isinstance(x, string_types):
47 if not isinstance(x, string_types):
58 raise ValueError('element %r in list must be a string' % x)
48 raise ValueError('element %r in list must be a string' % x)
59
49
60
50
61 def validate_string_dict(dct):
51 def validate_string_dict(dct):
62 """Validate that the input is a dict with string keys and values.
52 """Validate that the input is a dict with string keys and values.
63
53
64 Raises ValueError if not."""
54 Raises ValueError if not."""
65 for k,v in iteritems(dct):
55 for k,v in iteritems(dct):
66 if not isinstance(k, string_types):
56 if not isinstance(k, string_types):
67 raise ValueError('key %r in dict must be a string' % k)
57 raise ValueError('key %r in dict must be a string' % k)
68 if not isinstance(v, string_types):
58 if not isinstance(v, string_types):
69 raise ValueError('value %r in dict must be a string' % v)
59 raise ValueError('value %r in dict must be a string' % v)
70
60
71
61
72 #-----------------------------------------------------------------------------
62 #-----------------------------------------------------------------------------
73 # ZMQ Socket Channel classes
63 # ZMQ Socket Channel classes
74 #-----------------------------------------------------------------------------
64 #-----------------------------------------------------------------------------
75
65
76 class ZMQSocketChannel(Thread):
66 class ZMQSocketChannel(Thread):
77 """The base class for the channels that use ZMQ sockets."""
67 """The base class for the channels that use ZMQ sockets."""
78 context = None
68 context = None
79 session = None
69 session = None
80 socket = None
70 socket = None
81 ioloop = None
71 ioloop = None
82 stream = None
72 stream = None
83 _address = None
73 _address = None
84 _exiting = False
74 _exiting = False
85 proxy_methods = []
75 proxy_methods = []
86
76
87 def __init__(self, context, session, address):
77 def __init__(self, context, session, address):
88 """Create a channel.
78 """Create a channel.
89
79
90 Parameters
80 Parameters
91 ----------
81 ----------
92 context : :class:`zmq.Context`
82 context : :class:`zmq.Context`
93 The ZMQ context to use.
83 The ZMQ context to use.
94 session : :class:`session.Session`
84 session : :class:`session.Session`
95 The session to use.
85 The session to use.
96 address : zmq url
86 address : zmq url
97 Standard (ip, port) tuple that the kernel is listening on.
87 Standard (ip, port) tuple that the kernel is listening on.
98 """
88 """
99 super(ZMQSocketChannel, self).__init__()
89 super(ZMQSocketChannel, self).__init__()
100 self.daemon = True
90 self.daemon = True
101
91
102 self.context = context
92 self.context = context
103 self.session = session
93 self.session = session
104 if isinstance(address, tuple):
94 if isinstance(address, tuple):
105 if address[1] == 0:
95 if address[1] == 0:
106 message = 'The port number for a channel cannot be 0.'
96 message = 'The port number for a channel cannot be 0.'
107 raise InvalidPortNumber(message)
97 raise InvalidPortNumber(message)
108 address = "tcp://%s:%i" % address
98 address = "tcp://%s:%i" % address
109 self._address = address
99 self._address = address
110 atexit.register(self._notice_exit)
100 atexit.register(self._notice_exit)
111
101
112 def _notice_exit(self):
102 def _notice_exit(self):
113 self._exiting = True
103 self._exiting = True
114
104
115 def _run_loop(self):
105 def _run_loop(self):
116 """Run my loop, ignoring EINTR events in the poller"""
106 """Run my loop, ignoring EINTR events in the poller"""
117 while True:
107 while True:
118 try:
108 try:
119 self.ioloop.start()
109 self.ioloop.start()
120 except ZMQError as e:
110 except ZMQError as e:
121 if e.errno == errno.EINTR:
111 if e.errno == errno.EINTR:
122 continue
112 continue
123 else:
113 else:
124 raise
114 raise
125 except Exception:
115 except Exception:
126 if self._exiting:
116 if self._exiting:
127 break
117 break
128 else:
118 else:
129 raise
119 raise
130 else:
120 else:
131 break
121 break
132
122
133 def stop(self):
123 def stop(self):
134 """Stop the channel's event loop and join its thread.
124 """Stop the channel's event loop and join its thread.
135
125
136 This calls :meth:`~threading.Thread.join` and returns when the thread
126 This calls :meth:`~threading.Thread.join` and returns when the thread
137 terminates. :class:`RuntimeError` will be raised if
127 terminates. :class:`RuntimeError` will be raised if
138 :meth:`~threading.Thread.start` is called again.
128 :meth:`~threading.Thread.start` is called again.
139 """
129 """
140 if self.ioloop is not None:
130 if self.ioloop is not None:
141 self.ioloop.stop()
131 self.ioloop.stop()
142 self.join()
132 self.join()
143 self.close()
133 self.close()
144
134
145 def close(self):
135 def close(self):
146 if self.ioloop is not None:
136 if self.ioloop is not None:
147 try:
137 try:
148 self.ioloop.close(all_fds=True)
138 self.ioloop.close(all_fds=True)
149 except Exception:
139 except Exception:
150 pass
140 pass
151 if self.socket is not None:
141 if self.socket is not None:
152 try:
142 try:
153 self.socket.close(linger=0)
143 self.socket.close(linger=0)
154 except Exception:
144 except Exception:
155 pass
145 pass
156 self.socket = None
146 self.socket = None
157
147
158 @property
148 @property
159 def address(self):
149 def address(self):
160 """Get the channel's address as a zmq url string.
150 """Get the channel's address as a zmq url string.
161
151
162 These URLS have the form: 'tcp://127.0.0.1:5555'.
152 These URLS have the form: 'tcp://127.0.0.1:5555'.
163 """
153 """
164 return self._address
154 return self._address
165
155
166 def _queue_send(self, msg):
156 def _queue_send(self, msg):
167 """Queue a message to be sent from the IOLoop's thread.
157 """Queue a message to be sent from the IOLoop's thread.
168
158
169 Parameters
159 Parameters
170 ----------
160 ----------
171 msg : message to send
161 msg : message to send
172
162
173 This is threadsafe, as it uses IOLoop.add_callback to give the loop's
163 This is threadsafe, as it uses IOLoop.add_callback to give the loop's
174 thread control of the action.
164 thread control of the action.
175 """
165 """
176 def thread_send():
166 def thread_send():
177 self.session.send(self.stream, msg)
167 self.session.send(self.stream, msg)
178 self.ioloop.add_callback(thread_send)
168 self.ioloop.add_callback(thread_send)
179
169
180 def _handle_recv(self, msg):
170 def _handle_recv(self, msg):
181 """Callback for stream.on_recv.
171 """Callback for stream.on_recv.
182
172
183 Unpacks message, and calls handlers with it.
173 Unpacks message, and calls handlers with it.
184 """
174 """
185 ident,smsg = self.session.feed_identities(msg)
175 ident,smsg = self.session.feed_identities(msg)
186 self.call_handlers(self.session.unserialize(smsg))
176 self.call_handlers(self.session.unserialize(smsg))
187
177
188
178
189
179
190 class ShellChannel(ZMQSocketChannel):
180 class ShellChannel(ZMQSocketChannel):
191 """The shell channel for issuing request/replies to the kernel."""
181 """The shell channel for issuing request/replies to the kernel."""
192
182
193 command_queue = None
183 command_queue = None
194 # flag for whether execute requests should be allowed to call raw_input:
184 # flag for whether execute requests should be allowed to call raw_input:
195 allow_stdin = True
185 allow_stdin = True
196 proxy_methods = [
186 proxy_methods = [
197 'execute',
187 'execute',
198 'complete',
188 'complete',
199 'object_info',
189 'object_info',
200 'history',
190 'history',
201 'kernel_info',
191 'kernel_info',
202 'shutdown',
192 'shutdown',
203 ]
193 ]
204
194
205 def __init__(self, context, session, address):
195 def __init__(self, context, session, address):
206 super(ShellChannel, self).__init__(context, session, address)
196 super(ShellChannel, self).__init__(context, session, address)
207 self.ioloop = ioloop.IOLoop()
197 self.ioloop = ioloop.IOLoop()
208
198
209 def run(self):
199 def run(self):
210 """The thread's main activity. Call start() instead."""
200 """The thread's main activity. Call start() instead."""
211 self.socket = self.context.socket(zmq.DEALER)
201 self.socket = self.context.socket(zmq.DEALER)
212 self.socket.linger = 1000
202 self.socket.linger = 1000
213 self.socket.setsockopt(zmq.IDENTITY, self.session.bsession)
203 self.socket.setsockopt(zmq.IDENTITY, self.session.bsession)
214 self.socket.connect(self.address)
204 self.socket.connect(self.address)
215 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
205 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
216 self.stream.on_recv(self._handle_recv)
206 self.stream.on_recv(self._handle_recv)
217 self._run_loop()
207 self._run_loop()
218
208
219 def call_handlers(self, msg):
209 def call_handlers(self, msg):
220 """This method is called in the ioloop thread when a message arrives.
210 """This method is called in the ioloop thread when a message arrives.
221
211
222 Subclasses should override this method to handle incoming messages.
212 Subclasses should override this method to handle incoming messages.
223 It is important to remember that this method is called in the thread
213 It is important to remember that this method is called in the thread
224 so that some logic must be done to ensure that the application level
214 so that some logic must be done to ensure that the application level
225 handlers are called in the application thread.
215 handlers are called in the application thread.
226 """
216 """
227 raise NotImplementedError('call_handlers must be defined in a subclass.')
217 raise NotImplementedError('call_handlers must be defined in a subclass.')
228
218
229 def execute(self, code, silent=False, store_history=True,
219 def execute(self, code, silent=False, store_history=True,
230 user_variables=None, user_expressions=None, allow_stdin=None):
220 user_expressions=None, allow_stdin=None):
231 """Execute code in the kernel.
221 """Execute code in the kernel.
232
222
233 Parameters
223 Parameters
234 ----------
224 ----------
235 code : str
225 code : str
236 A string of Python code.
226 A string of Python code.
237
227
238 silent : bool, optional (default False)
228 silent : bool, optional (default False)
239 If set, the kernel will execute the code as quietly possible, and
229 If set, the kernel will execute the code as quietly possible, and
240 will force store_history to be False.
230 will force store_history to be False.
241
231
242 store_history : bool, optional (default True)
232 store_history : bool, optional (default True)
243 If set, the kernel will store command history. This is forced
233 If set, the kernel will store command history. This is forced
244 to be False if silent is True.
234 to be False if silent is True.
245
235
246 user_variables : list, optional
247 A list of variable names to pull from the user's namespace. They
248 will come back as a dict with these names as keys and their
249 :func:`repr` as values.
250
251 user_expressions : dict, optional
236 user_expressions : dict, optional
252 A dict mapping names to expressions to be evaluated in the user's
237 A dict mapping names to expressions to be evaluated in the user's
253 dict. The expression values are returned as strings formatted using
238 dict. The expression values are returned as strings formatted using
254 :func:`repr`.
239 :func:`repr`.
255
240
256 allow_stdin : bool, optional (default self.allow_stdin)
241 allow_stdin : bool, optional (default self.allow_stdin)
257 Flag for whether the kernel can send stdin requests to frontends.
242 Flag for whether the kernel can send stdin requests to frontends.
258
243
259 Some frontends (e.g. the Notebook) do not support stdin requests.
244 Some frontends (e.g. the Notebook) do not support stdin requests.
260 If raw_input is called from code executed from such a frontend, a
245 If raw_input is called from code executed from such a frontend, a
261 StdinNotImplementedError will be raised.
246 StdinNotImplementedError will be raised.
262
247
263 Returns
248 Returns
264 -------
249 -------
265 The msg_id of the message sent.
250 The msg_id of the message sent.
266 """
251 """
267 if user_variables is None:
268 user_variables = []
269 if user_expressions is None:
252 if user_expressions is None:
270 user_expressions = {}
253 user_expressions = {}
271 if allow_stdin is None:
254 if allow_stdin is None:
272 allow_stdin = self.allow_stdin
255 allow_stdin = self.allow_stdin
273
256
274
257
275 # Don't waste network traffic if inputs are invalid
258 # Don't waste network traffic if inputs are invalid
276 if not isinstance(code, string_types):
259 if not isinstance(code, string_types):
277 raise ValueError('code %r must be a string' % code)
260 raise ValueError('code %r must be a string' % code)
278 validate_string_list(user_variables)
279 validate_string_dict(user_expressions)
261 validate_string_dict(user_expressions)
280
262
281 # Create class for content/msg creation. Related to, but possibly
263 # Create class for content/msg creation. Related to, but possibly
282 # not in Session.
264 # not in Session.
283 content = dict(code=code, silent=silent, store_history=store_history,
265 content = dict(code=code, silent=silent, store_history=store_history,
284 user_variables=user_variables,
285 user_expressions=user_expressions,
266 user_expressions=user_expressions,
286 allow_stdin=allow_stdin,
267 allow_stdin=allow_stdin,
287 )
268 )
288 msg = self.session.msg('execute_request', content)
269 msg = self.session.msg('execute_request', content)
289 self._queue_send(msg)
270 self._queue_send(msg)
290 return msg['header']['msg_id']
271 return msg['header']['msg_id']
291
272
292 def complete(self, text, line, cursor_pos, block=None):
273 def complete(self, text, line, cursor_pos, block=None):
293 """Tab complete text in the kernel's namespace.
274 """Tab complete text in the kernel's namespace.
294
275
295 Parameters
276 Parameters
296 ----------
277 ----------
297 text : str
278 text : str
298 The text to complete.
279 The text to complete.
299 line : str
280 line : str
300 The full line of text that is the surrounding context for the
281 The full line of text that is the surrounding context for the
301 text to complete.
282 text to complete.
302 cursor_pos : int
283 cursor_pos : int
303 The position of the cursor in the line where the completion was
284 The position of the cursor in the line where the completion was
304 requested.
285 requested.
305 block : str, optional
286 block : str, optional
306 The full block of code in which the completion is being requested.
287 The full block of code in which the completion is being requested.
307
288
308 Returns
289 Returns
309 -------
290 -------
310 The msg_id of the message sent.
291 The msg_id of the message sent.
311 """
292 """
312 content = dict(text=text, line=line, block=block, cursor_pos=cursor_pos)
293 content = dict(text=text, line=line, block=block, cursor_pos=cursor_pos)
313 msg = self.session.msg('complete_request', content)
294 msg = self.session.msg('complete_request', content)
314 self._queue_send(msg)
295 self._queue_send(msg)
315 return msg['header']['msg_id']
296 return msg['header']['msg_id']
316
297
317 def object_info(self, oname, detail_level=0):
298 def object_info(self, oname, detail_level=0):
318 """Get metadata information about an object in the kernel's namespace.
299 """Get metadata information about an object in the kernel's namespace.
319
300
320 Parameters
301 Parameters
321 ----------
302 ----------
322 oname : str
303 oname : str
323 A string specifying the object name.
304 A string specifying the object name.
324 detail_level : int, optional
305 detail_level : int, optional
325 The level of detail for the introspection (0-2)
306 The level of detail for the introspection (0-2)
326
307
327 Returns
308 Returns
328 -------
309 -------
329 The msg_id of the message sent.
310 The msg_id of the message sent.
330 """
311 """
331 content = dict(oname=oname, detail_level=detail_level)
312 content = dict(oname=oname, detail_level=detail_level)
332 msg = self.session.msg('object_info_request', content)
313 msg = self.session.msg('object_info_request', content)
333 self._queue_send(msg)
314 self._queue_send(msg)
334 return msg['header']['msg_id']
315 return msg['header']['msg_id']
335
316
336 def history(self, raw=True, output=False, hist_access_type='range', **kwargs):
317 def history(self, raw=True, output=False, hist_access_type='range', **kwargs):
337 """Get entries from the kernel's history list.
318 """Get entries from the kernel's history list.
338
319
339 Parameters
320 Parameters
340 ----------
321 ----------
341 raw : bool
322 raw : bool
342 If True, return the raw input.
323 If True, return the raw input.
343 output : bool
324 output : bool
344 If True, then return the output as well.
325 If True, then return the output as well.
345 hist_access_type : str
326 hist_access_type : str
346 'range' (fill in session, start and stop params), 'tail' (fill in n)
327 'range' (fill in session, start and stop params), 'tail' (fill in n)
347 or 'search' (fill in pattern param).
328 or 'search' (fill in pattern param).
348
329
349 session : int
330 session : int
350 For a range request, the session from which to get lines. Session
331 For a range request, the session from which to get lines. Session
351 numbers are positive integers; negative ones count back from the
332 numbers are positive integers; negative ones count back from the
352 current session.
333 current session.
353 start : int
334 start : int
354 The first line number of a history range.
335 The first line number of a history range.
355 stop : int
336 stop : int
356 The final (excluded) line number of a history range.
337 The final (excluded) line number of a history range.
357
338
358 n : int
339 n : int
359 The number of lines of history to get for a tail request.
340 The number of lines of history to get for a tail request.
360
341
361 pattern : str
342 pattern : str
362 The glob-syntax pattern for a search request.
343 The glob-syntax pattern for a search request.
363
344
364 Returns
345 Returns
365 -------
346 -------
366 The msg_id of the message sent.
347 The msg_id of the message sent.
367 """
348 """
368 content = dict(raw=raw, output=output, hist_access_type=hist_access_type,
349 content = dict(raw=raw, output=output, hist_access_type=hist_access_type,
369 **kwargs)
350 **kwargs)
370 msg = self.session.msg('history_request', content)
351 msg = self.session.msg('history_request', content)
371 self._queue_send(msg)
352 self._queue_send(msg)
372 return msg['header']['msg_id']
353 return msg['header']['msg_id']
373
354
374 def kernel_info(self):
355 def kernel_info(self):
375 """Request kernel info."""
356 """Request kernel info."""
376 msg = self.session.msg('kernel_info_request')
357 msg = self.session.msg('kernel_info_request')
377 self._queue_send(msg)
358 self._queue_send(msg)
378 return msg['header']['msg_id']
359 return msg['header']['msg_id']
379
360
380 def shutdown(self, restart=False):
361 def shutdown(self, restart=False):
381 """Request an immediate kernel shutdown.
362 """Request an immediate kernel shutdown.
382
363
383 Upon receipt of the (empty) reply, client code can safely assume that
364 Upon receipt of the (empty) reply, client code can safely assume that
384 the kernel has shut down and it's safe to forcefully terminate it if
365 the kernel has shut down and it's safe to forcefully terminate it if
385 it's still alive.
366 it's still alive.
386
367
387 The kernel will send the reply via a function registered with Python's
368 The kernel will send the reply via a function registered with Python's
388 atexit module, ensuring it's truly done as the kernel is done with all
369 atexit module, ensuring it's truly done as the kernel is done with all
389 normal operation.
370 normal operation.
390 """
371 """
391 # Send quit message to kernel. Once we implement kernel-side setattr,
372 # Send quit message to kernel. Once we implement kernel-side setattr,
392 # this should probably be done that way, but for now this will do.
373 # this should probably be done that way, but for now this will do.
393 msg = self.session.msg('shutdown_request', {'restart':restart})
374 msg = self.session.msg('shutdown_request', {'restart':restart})
394 self._queue_send(msg)
375 self._queue_send(msg)
395 return msg['header']['msg_id']
376 return msg['header']['msg_id']
396
377
397
378
398
379
399 class IOPubChannel(ZMQSocketChannel):
380 class IOPubChannel(ZMQSocketChannel):
400 """The iopub channel which listens for messages that the kernel publishes.
381 """The iopub channel which listens for messages that the kernel publishes.
401
382
402 This channel is where all output is published to frontends.
383 This channel is where all output is published to frontends.
403 """
384 """
404
385
405 def __init__(self, context, session, address):
386 def __init__(self, context, session, address):
406 super(IOPubChannel, self).__init__(context, session, address)
387 super(IOPubChannel, self).__init__(context, session, address)
407 self.ioloop = ioloop.IOLoop()
388 self.ioloop = ioloop.IOLoop()
408
389
409 def run(self):
390 def run(self):
410 """The thread's main activity. Call start() instead."""
391 """The thread's main activity. Call start() instead."""
411 self.socket = self.context.socket(zmq.SUB)
392 self.socket = self.context.socket(zmq.SUB)
412 self.socket.linger = 1000
393 self.socket.linger = 1000
413 self.socket.setsockopt(zmq.SUBSCRIBE,b'')
394 self.socket.setsockopt(zmq.SUBSCRIBE,b'')
414 self.socket.setsockopt(zmq.IDENTITY, self.session.bsession)
395 self.socket.setsockopt(zmq.IDENTITY, self.session.bsession)
415 self.socket.connect(self.address)
396 self.socket.connect(self.address)
416 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
397 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
417 self.stream.on_recv(self._handle_recv)
398 self.stream.on_recv(self._handle_recv)
418 self._run_loop()
399 self._run_loop()
419
400
420 def call_handlers(self, msg):
401 def call_handlers(self, msg):
421 """This method is called in the ioloop thread when a message arrives.
402 """This method is called in the ioloop thread when a message arrives.
422
403
423 Subclasses should override this method to handle incoming messages.
404 Subclasses should override this method to handle incoming messages.
424 It is important to remember that this method is called in the thread
405 It is important to remember that this method is called in the thread
425 so that some logic must be done to ensure that the application leve
406 so that some logic must be done to ensure that the application leve
426 handlers are called in the application thread.
407 handlers are called in the application thread.
427 """
408 """
428 raise NotImplementedError('call_handlers must be defined in a subclass.')
409 raise NotImplementedError('call_handlers must be defined in a subclass.')
429
410
430 def flush(self, timeout=1.0):
411 def flush(self, timeout=1.0):
431 """Immediately processes all pending messages on the iopub channel.
412 """Immediately processes all pending messages on the iopub channel.
432
413
433 Callers should use this method to ensure that :meth:`call_handlers`
414 Callers should use this method to ensure that :meth:`call_handlers`
434 has been called for all messages that have been received on the
415 has been called for all messages that have been received on the
435 0MQ SUB socket of this channel.
416 0MQ SUB socket of this channel.
436
417
437 This method is thread safe.
418 This method is thread safe.
438
419
439 Parameters
420 Parameters
440 ----------
421 ----------
441 timeout : float, optional
422 timeout : float, optional
442 The maximum amount of time to spend flushing, in seconds. The
423 The maximum amount of time to spend flushing, in seconds. The
443 default is one second.
424 default is one second.
444 """
425 """
445 # We do the IOLoop callback process twice to ensure that the IOLoop
426 # We do the IOLoop callback process twice to ensure that the IOLoop
446 # gets to perform at least one full poll.
427 # gets to perform at least one full poll.
447 stop_time = time.time() + timeout
428 stop_time = time.time() + timeout
448 for i in range(2):
429 for i in range(2):
449 self._flushed = False
430 self._flushed = False
450 self.ioloop.add_callback(self._flush)
431 self.ioloop.add_callback(self._flush)
451 while not self._flushed and time.time() < stop_time:
432 while not self._flushed and time.time() < stop_time:
452 time.sleep(0.01)
433 time.sleep(0.01)
453
434
454 def _flush(self):
435 def _flush(self):
455 """Callback for :method:`self.flush`."""
436 """Callback for :method:`self.flush`."""
456 self.stream.flush()
437 self.stream.flush()
457 self._flushed = True
438 self._flushed = True
458
439
459
440
460 class StdInChannel(ZMQSocketChannel):
441 class StdInChannel(ZMQSocketChannel):
461 """The stdin channel to handle raw_input requests that the kernel makes."""
442 """The stdin channel to handle raw_input requests that the kernel makes."""
462
443
463 msg_queue = None
444 msg_queue = None
464 proxy_methods = ['input']
445 proxy_methods = ['input']
465
446
466 def __init__(self, context, session, address):
447 def __init__(self, context, session, address):
467 super(StdInChannel, self).__init__(context, session, address)
448 super(StdInChannel, self).__init__(context, session, address)
468 self.ioloop = ioloop.IOLoop()
449 self.ioloop = ioloop.IOLoop()
469
450
470 def run(self):
451 def run(self):
471 """The thread's main activity. Call start() instead."""
452 """The thread's main activity. Call start() instead."""
472 self.socket = self.context.socket(zmq.DEALER)
453 self.socket = self.context.socket(zmq.DEALER)
473 self.socket.linger = 1000
454 self.socket.linger = 1000
474 self.socket.setsockopt(zmq.IDENTITY, self.session.bsession)
455 self.socket.setsockopt(zmq.IDENTITY, self.session.bsession)
475 self.socket.connect(self.address)
456 self.socket.connect(self.address)
476 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
457 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
477 self.stream.on_recv(self._handle_recv)
458 self.stream.on_recv(self._handle_recv)
478 self._run_loop()
459 self._run_loop()
479
460
480 def call_handlers(self, msg):
461 def call_handlers(self, msg):
481 """This method is called in the ioloop thread when a message arrives.
462 """This method is called in the ioloop thread when a message arrives.
482
463
483 Subclasses should override this method to handle incoming messages.
464 Subclasses should override this method to handle incoming messages.
484 It is important to remember that this method is called in the thread
465 It is important to remember that this method is called in the thread
485 so that some logic must be done to ensure that the application leve
466 so that some logic must be done to ensure that the application leve
486 handlers are called in the application thread.
467 handlers are called in the application thread.
487 """
468 """
488 raise NotImplementedError('call_handlers must be defined in a subclass.')
469 raise NotImplementedError('call_handlers must be defined in a subclass.')
489
470
490 def input(self, string):
471 def input(self, string):
491 """Send a string of raw input to the kernel."""
472 """Send a string of raw input to the kernel."""
492 content = dict(value=string)
473 content = dict(value=string)
493 msg = self.session.msg('input_reply', content)
474 msg = self.session.msg('input_reply', content)
494 self._queue_send(msg)
475 self._queue_send(msg)
495
476
496
477
497 class HBChannel(ZMQSocketChannel):
478 class HBChannel(ZMQSocketChannel):
498 """The heartbeat channel which monitors the kernel heartbeat.
479 """The heartbeat channel which monitors the kernel heartbeat.
499
480
500 Note that the heartbeat channel is paused by default. As long as you start
481 Note that the heartbeat channel is paused by default. As long as you start
501 this channel, the kernel manager will ensure that it is paused and un-paused
482 this channel, the kernel manager will ensure that it is paused and un-paused
502 as appropriate.
483 as appropriate.
503 """
484 """
504
485
505 time_to_dead = 3.0
486 time_to_dead = 3.0
506 socket = None
487 socket = None
507 poller = None
488 poller = None
508 _running = None
489 _running = None
509 _pause = None
490 _pause = None
510 _beating = None
491 _beating = None
511
492
512 def __init__(self, context, session, address):
493 def __init__(self, context, session, address):
513 super(HBChannel, self).__init__(context, session, address)
494 super(HBChannel, self).__init__(context, session, address)
514 self._running = False
495 self._running = False
515 self._pause =True
496 self._pause =True
516 self.poller = zmq.Poller()
497 self.poller = zmq.Poller()
517
498
518 def _create_socket(self):
499 def _create_socket(self):
519 if self.socket is not None:
500 if self.socket is not None:
520 # close previous socket, before opening a new one
501 # close previous socket, before opening a new one
521 self.poller.unregister(self.socket)
502 self.poller.unregister(self.socket)
522 self.socket.close()
503 self.socket.close()
523 self.socket = self.context.socket(zmq.REQ)
504 self.socket = self.context.socket(zmq.REQ)
524 self.socket.linger = 1000
505 self.socket.linger = 1000
525 self.socket.connect(self.address)
506 self.socket.connect(self.address)
526
507
527 self.poller.register(self.socket, zmq.POLLIN)
508 self.poller.register(self.socket, zmq.POLLIN)
528
509
529 def _poll(self, start_time):
510 def _poll(self, start_time):
530 """poll for heartbeat replies until we reach self.time_to_dead.
511 """poll for heartbeat replies until we reach self.time_to_dead.
531
512
532 Ignores interrupts, and returns the result of poll(), which
513 Ignores interrupts, and returns the result of poll(), which
533 will be an empty list if no messages arrived before the timeout,
514 will be an empty list if no messages arrived before the timeout,
534 or the event tuple if there is a message to receive.
515 or the event tuple if there is a message to receive.
535 """
516 """
536
517
537 until_dead = self.time_to_dead - (time.time() - start_time)
518 until_dead = self.time_to_dead - (time.time() - start_time)
538 # ensure poll at least once
519 # ensure poll at least once
539 until_dead = max(until_dead, 1e-3)
520 until_dead = max(until_dead, 1e-3)
540 events = []
521 events = []
541 while True:
522 while True:
542 try:
523 try:
543 events = self.poller.poll(1000 * until_dead)
524 events = self.poller.poll(1000 * until_dead)
544 except ZMQError as e:
525 except ZMQError as e:
545 if e.errno == errno.EINTR:
526 if e.errno == errno.EINTR:
546 # ignore interrupts during heartbeat
527 # ignore interrupts during heartbeat
547 # this may never actually happen
528 # this may never actually happen
548 until_dead = self.time_to_dead - (time.time() - start_time)
529 until_dead = self.time_to_dead - (time.time() - start_time)
549 until_dead = max(until_dead, 1e-3)
530 until_dead = max(until_dead, 1e-3)
550 pass
531 pass
551 else:
532 else:
552 raise
533 raise
553 except Exception:
534 except Exception:
554 if self._exiting:
535 if self._exiting:
555 break
536 break
556 else:
537 else:
557 raise
538 raise
558 else:
539 else:
559 break
540 break
560 return events
541 return events
561
542
562 def run(self):
543 def run(self):
563 """The thread's main activity. Call start() instead."""
544 """The thread's main activity. Call start() instead."""
564 self._create_socket()
545 self._create_socket()
565 self._running = True
546 self._running = True
566 self._beating = True
547 self._beating = True
567
548
568 while self._running:
549 while self._running:
569 if self._pause:
550 if self._pause:
570 # just sleep, and skip the rest of the loop
551 # just sleep, and skip the rest of the loop
571 time.sleep(self.time_to_dead)
552 time.sleep(self.time_to_dead)
572 continue
553 continue
573
554
574 since_last_heartbeat = 0.0
555 since_last_heartbeat = 0.0
575 # io.rprint('Ping from HB channel') # dbg
556 # io.rprint('Ping from HB channel') # dbg
576 # no need to catch EFSM here, because the previous event was
557 # no need to catch EFSM here, because the previous event was
577 # either a recv or connect, which cannot be followed by EFSM
558 # either a recv or connect, which cannot be followed by EFSM
578 self.socket.send(b'ping')
559 self.socket.send(b'ping')
579 request_time = time.time()
560 request_time = time.time()
580 ready = self._poll(request_time)
561 ready = self._poll(request_time)
581 if ready:
562 if ready:
582 self._beating = True
563 self._beating = True
583 # the poll above guarantees we have something to recv
564 # the poll above guarantees we have something to recv
584 self.socket.recv()
565 self.socket.recv()
585 # sleep the remainder of the cycle
566 # sleep the remainder of the cycle
586 remainder = self.time_to_dead - (time.time() - request_time)
567 remainder = self.time_to_dead - (time.time() - request_time)
587 if remainder > 0:
568 if remainder > 0:
588 time.sleep(remainder)
569 time.sleep(remainder)
589 continue
570 continue
590 else:
571 else:
591 # nothing was received within the time limit, signal heart failure
572 # nothing was received within the time limit, signal heart failure
592 self._beating = False
573 self._beating = False
593 since_last_heartbeat = time.time() - request_time
574 since_last_heartbeat = time.time() - request_time
594 self.call_handlers(since_last_heartbeat)
575 self.call_handlers(since_last_heartbeat)
595 # and close/reopen the socket, because the REQ/REP cycle has been broken
576 # and close/reopen the socket, because the REQ/REP cycle has been broken
596 self._create_socket()
577 self._create_socket()
597 continue
578 continue
598
579
599 def pause(self):
580 def pause(self):
600 """Pause the heartbeat."""
581 """Pause the heartbeat."""
601 self._pause = True
582 self._pause = True
602
583
603 def unpause(self):
584 def unpause(self):
604 """Unpause the heartbeat."""
585 """Unpause the heartbeat."""
605 self._pause = False
586 self._pause = False
606
587
607 def is_beating(self):
588 def is_beating(self):
608 """Is the heartbeat running and responsive (and not paused)."""
589 """Is the heartbeat running and responsive (and not paused)."""
609 if self.is_alive() and not self._pause and self._beating:
590 if self.is_alive() and not self._pause and self._beating:
610 return True
591 return True
611 else:
592 else:
612 return False
593 return False
613
594
614 def stop(self):
595 def stop(self):
615 """Stop the channel's event loop and join its thread."""
596 """Stop the channel's event loop and join its thread."""
616 self._running = False
597 self._running = False
617 super(HBChannel, self).stop()
598 super(HBChannel, self).stop()
618
599
619 def call_handlers(self, since_last_heartbeat):
600 def call_handlers(self, since_last_heartbeat):
620 """This method is called in the ioloop thread when a message arrives.
601 """This method is called in the ioloop thread when a message arrives.
621
602
622 Subclasses should override this method to handle incoming messages.
603 Subclasses should override this method to handle incoming messages.
623 It is important to remember that this method is called in the thread
604 It is important to remember that this method is called in the thread
624 so that some logic must be done to ensure that the application level
605 so that some logic must be done to ensure that the application level
625 handlers are called in the application thread.
606 handlers are called in the application thread.
626 """
607 """
627 raise NotImplementedError('call_handlers must be defined in a subclass.')
608 raise NotImplementedError('call_handlers must be defined in a subclass.')
628
609
629
610
630 #---------------------------------------------------------------------#-----------------------------------------------------------------------------
611 #---------------------------------------------------------------------#-----------------------------------------------------------------------------
631 # ABC Registration
612 # ABC Registration
632 #-----------------------------------------------------------------------------
613 #-----------------------------------------------------------------------------
633
614
634 ShellChannelABC.register(ShellChannel)
615 ShellChannelABC.register(ShellChannel)
635 IOPubChannelABC.register(IOPubChannel)
616 IOPubChannelABC.register(IOPubChannel)
636 HBChannelABC.register(HBChannel)
617 HBChannelABC.register(HBChannel)
637 StdInChannelABC.register(StdInChannel)
618 StdInChannelABC.register(StdInChannel)
@@ -1,117 +1,113 b''
1 """Abstract base classes for kernel client channels"""
1 """Abstract base classes for kernel client channels"""
2
2
3 #-----------------------------------------------------------------------------
3 # Copyright (c) IPython Development Team.
4 # Copyright (C) 2013 The IPython Development Team
4 # Distributed under the terms of the Modified BSD License.
5 #
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
8 #-----------------------------------------------------------------------------
9
5
10 import abc
6 import abc
11
7
12 from IPython.utils.py3compat import with_metaclass
8 from IPython.utils.py3compat import with_metaclass
13
9
14
10
15 class ChannelABC(with_metaclass(abc.ABCMeta, object)):
11 class ChannelABC(with_metaclass(abc.ABCMeta, object)):
16 """A base class for all channel ABCs."""
12 """A base class for all channel ABCs."""
17
13
18 @abc.abstractmethod
14 @abc.abstractmethod
19 def start(self):
15 def start(self):
20 pass
16 pass
21
17
22 @abc.abstractmethod
18 @abc.abstractmethod
23 def stop(self):
19 def stop(self):
24 pass
20 pass
25
21
26 @abc.abstractmethod
22 @abc.abstractmethod
27 def is_alive(self):
23 def is_alive(self):
28 pass
24 pass
29
25
30
26
31 class ShellChannelABC(ChannelABC):
27 class ShellChannelABC(ChannelABC):
32 """ShellChannel ABC.
28 """ShellChannel ABC.
33
29
34 The docstrings for this class can be found in the base implementation:
30 The docstrings for this class can be found in the base implementation:
35
31
36 `IPython.kernel.channels.ShellChannel`
32 `IPython.kernel.channels.ShellChannel`
37 """
33 """
38
34
39 @abc.abstractproperty
35 @abc.abstractproperty
40 def allow_stdin(self):
36 def allow_stdin(self):
41 pass
37 pass
42
38
43 @abc.abstractmethod
39 @abc.abstractmethod
44 def execute(self, code, silent=False, store_history=True,
40 def execute(self, code, silent=False, store_history=True,
45 user_variables=None, user_expressions=None, allow_stdin=None):
41 user_expressions=None, allow_stdin=None):
46 pass
42 pass
47
43
48 @abc.abstractmethod
44 @abc.abstractmethod
49 def complete(self, text, line, cursor_pos, block=None):
45 def complete(self, text, line, cursor_pos, block=None):
50 pass
46 pass
51
47
52 @abc.abstractmethod
48 @abc.abstractmethod
53 def object_info(self, oname, detail_level=0):
49 def object_info(self, oname, detail_level=0):
54 pass
50 pass
55
51
56 @abc.abstractmethod
52 @abc.abstractmethod
57 def history(self, raw=True, output=False, hist_access_type='range', **kwargs):
53 def history(self, raw=True, output=False, hist_access_type='range', **kwargs):
58 pass
54 pass
59
55
60 @abc.abstractmethod
56 @abc.abstractmethod
61 def kernel_info(self):
57 def kernel_info(self):
62 pass
58 pass
63
59
64 @abc.abstractmethod
60 @abc.abstractmethod
65 def shutdown(self, restart=False):
61 def shutdown(self, restart=False):
66 pass
62 pass
67
63
68
64
69 class IOPubChannelABC(ChannelABC):
65 class IOPubChannelABC(ChannelABC):
70 """IOPubChannel ABC.
66 """IOPubChannel ABC.
71
67
72 The docstrings for this class can be found in the base implementation:
68 The docstrings for this class can be found in the base implementation:
73
69
74 `IPython.kernel.channels.IOPubChannel`
70 `IPython.kernel.channels.IOPubChannel`
75 """
71 """
76
72
77 @abc.abstractmethod
73 @abc.abstractmethod
78 def flush(self, timeout=1.0):
74 def flush(self, timeout=1.0):
79 pass
75 pass
80
76
81
77
82 class StdInChannelABC(ChannelABC):
78 class StdInChannelABC(ChannelABC):
83 """StdInChannel ABC.
79 """StdInChannel ABC.
84
80
85 The docstrings for this class can be found in the base implementation:
81 The docstrings for this class can be found in the base implementation:
86
82
87 `IPython.kernel.channels.StdInChannel`
83 `IPython.kernel.channels.StdInChannel`
88 """
84 """
89
85
90 @abc.abstractmethod
86 @abc.abstractmethod
91 def input(self, string):
87 def input(self, string):
92 pass
88 pass
93
89
94
90
95 class HBChannelABC(ChannelABC):
91 class HBChannelABC(ChannelABC):
96 """HBChannel ABC.
92 """HBChannel ABC.
97
93
98 The docstrings for this class can be found in the base implementation:
94 The docstrings for this class can be found in the base implementation:
99
95
100 `IPython.kernel.channels.HBChannel`
96 `IPython.kernel.channels.HBChannel`
101 """
97 """
102
98
103 @abc.abstractproperty
99 @abc.abstractproperty
104 def time_to_dead(self):
100 def time_to_dead(self):
105 pass
101 pass
106
102
107 @abc.abstractmethod
103 @abc.abstractmethod
108 def pause(self):
104 def pause(self):
109 pass
105 pass
110
106
111 @abc.abstractmethod
107 @abc.abstractmethod
112 def unpause(self):
108 def unpause(self):
113 pass
109 pass
114
110
115 @abc.abstractmethod
111 @abc.abstractmethod
116 def is_beating(self):
112 def is_beating(self):
117 pass
113 pass
@@ -1,201 +1,190 b''
1 """ A kernel client for in-process kernels. """
1 """A kernel client for in-process kernels."""
2
2
3 #-----------------------------------------------------------------------------
3 # Copyright (c) IPython Development Team.
4 # Copyright (C) 2012 The IPython Development Team
4 # Distributed under the terms of the Modified BSD License.
5 #
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
8 #-----------------------------------------------------------------------------
9
10 #-----------------------------------------------------------------------------
11 # Imports
12 #-----------------------------------------------------------------------------
13
5
14 # IPython imports
15 from IPython.kernel.channelsabc import (
6 from IPython.kernel.channelsabc import (
16 ShellChannelABC, IOPubChannelABC,
7 ShellChannelABC, IOPubChannelABC,
17 HBChannelABC, StdInChannelABC,
8 HBChannelABC, StdInChannelABC,
18 )
9 )
19
10
20 # Local imports
21 from .socket import DummySocket
11 from .socket import DummySocket
22
12
23 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
24 # Channel classes
14 # Channel classes
25 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
26
16
27 class InProcessChannel(object):
17 class InProcessChannel(object):
28 """Base class for in-process channels."""
18 """Base class for in-process channels."""
29 proxy_methods = []
19 proxy_methods = []
30
20
31 def __init__(self, client=None):
21 def __init__(self, client=None):
32 super(InProcessChannel, self).__init__()
22 super(InProcessChannel, self).__init__()
33 self.client = client
23 self.client = client
34 self._is_alive = False
24 self._is_alive = False
35
25
36 #--------------------------------------------------------------------------
26 #--------------------------------------------------------------------------
37 # Channel interface
27 # Channel interface
38 #--------------------------------------------------------------------------
28 #--------------------------------------------------------------------------
39
29
40 def is_alive(self):
30 def is_alive(self):
41 return self._is_alive
31 return self._is_alive
42
32
43 def start(self):
33 def start(self):
44 self._is_alive = True
34 self._is_alive = True
45
35
46 def stop(self):
36 def stop(self):
47 self._is_alive = False
37 self._is_alive = False
48
38
49 def call_handlers(self, msg):
39 def call_handlers(self, msg):
50 """ This method is called in the main thread when a message arrives.
40 """ This method is called in the main thread when a message arrives.
51
41
52 Subclasses should override this method to handle incoming messages.
42 Subclasses should override this method to handle incoming messages.
53 """
43 """
54 raise NotImplementedError('call_handlers must be defined in a subclass.')
44 raise NotImplementedError('call_handlers must be defined in a subclass.')
55
45
56 #--------------------------------------------------------------------------
46 #--------------------------------------------------------------------------
57 # InProcessChannel interface
47 # InProcessChannel interface
58 #--------------------------------------------------------------------------
48 #--------------------------------------------------------------------------
59
49
60 def call_handlers_later(self, *args, **kwds):
50 def call_handlers_later(self, *args, **kwds):
61 """ Call the message handlers later.
51 """ Call the message handlers later.
62
52
63 The default implementation just calls the handlers immediately, but this
53 The default implementation just calls the handlers immediately, but this
64 method exists so that GUI toolkits can defer calling the handlers until
54 method exists so that GUI toolkits can defer calling the handlers until
65 after the event loop has run, as expected by GUI frontends.
55 after the event loop has run, as expected by GUI frontends.
66 """
56 """
67 self.call_handlers(*args, **kwds)
57 self.call_handlers(*args, **kwds)
68
58
69 def process_events(self):
59 def process_events(self):
70 """ Process any pending GUI events.
60 """ Process any pending GUI events.
71
61
72 This method will be never be called from a frontend without an event
62 This method will be never be called from a frontend without an event
73 loop (e.g., a terminal frontend).
63 loop (e.g., a terminal frontend).
74 """
64 """
75 raise NotImplementedError
65 raise NotImplementedError
76
66
77
67
78 class InProcessShellChannel(InProcessChannel):
68 class InProcessShellChannel(InProcessChannel):
79 """See `IPython.kernel.channels.ShellChannel` for docstrings."""
69 """See `IPython.kernel.channels.ShellChannel` for docstrings."""
80
70
81 # flag for whether execute requests should be allowed to call raw_input
71 # flag for whether execute requests should be allowed to call raw_input
82 allow_stdin = True
72 allow_stdin = True
83 proxy_methods = [
73 proxy_methods = [
84 'execute',
74 'execute',
85 'complete',
75 'complete',
86 'object_info',
76 'object_info',
87 'history',
77 'history',
88 'shutdown',
78 'shutdown',
89 'kernel_info',
79 'kernel_info',
90 ]
80 ]
91
81
92 #--------------------------------------------------------------------------
82 #--------------------------------------------------------------------------
93 # ShellChannel interface
83 # ShellChannel interface
94 #--------------------------------------------------------------------------
84 #--------------------------------------------------------------------------
95
85
96 def execute(self, code, silent=False, store_history=True,
86 def execute(self, code, silent=False, store_history=True,
97 user_variables=[], user_expressions={}, allow_stdin=None):
87 user_expressions={}, allow_stdin=None):
98 if allow_stdin is None:
88 if allow_stdin is None:
99 allow_stdin = self.allow_stdin
89 allow_stdin = self.allow_stdin
100 content = dict(code=code, silent=silent, store_history=store_history,
90 content = dict(code=code, silent=silent, store_history=store_history,
101 user_variables=user_variables,
102 user_expressions=user_expressions,
91 user_expressions=user_expressions,
103 allow_stdin=allow_stdin)
92 allow_stdin=allow_stdin)
104 msg = self.client.session.msg('execute_request', content)
93 msg = self.client.session.msg('execute_request', content)
105 self._dispatch_to_kernel(msg)
94 self._dispatch_to_kernel(msg)
106 return msg['header']['msg_id']
95 return msg['header']['msg_id']
107
96
108 def complete(self, text, line, cursor_pos, block=None):
97 def complete(self, text, line, cursor_pos, block=None):
109 content = dict(text=text, line=line, block=block, cursor_pos=cursor_pos)
98 content = dict(text=text, line=line, block=block, cursor_pos=cursor_pos)
110 msg = self.client.session.msg('complete_request', content)
99 msg = self.client.session.msg('complete_request', content)
111 self._dispatch_to_kernel(msg)
100 self._dispatch_to_kernel(msg)
112 return msg['header']['msg_id']
101 return msg['header']['msg_id']
113
102
114 def object_info(self, oname, detail_level=0):
103 def object_info(self, oname, detail_level=0):
115 content = dict(oname=oname, detail_level=detail_level)
104 content = dict(oname=oname, detail_level=detail_level)
116 msg = self.client.session.msg('object_info_request', content)
105 msg = self.client.session.msg('object_info_request', content)
117 self._dispatch_to_kernel(msg)
106 self._dispatch_to_kernel(msg)
118 return msg['header']['msg_id']
107 return msg['header']['msg_id']
119
108
120 def history(self, raw=True, output=False, hist_access_type='range', **kwds):
109 def history(self, raw=True, output=False, hist_access_type='range', **kwds):
121 content = dict(raw=raw, output=output,
110 content = dict(raw=raw, output=output,
122 hist_access_type=hist_access_type, **kwds)
111 hist_access_type=hist_access_type, **kwds)
123 msg = self.client.session.msg('history_request', content)
112 msg = self.client.session.msg('history_request', content)
124 self._dispatch_to_kernel(msg)
113 self._dispatch_to_kernel(msg)
125 return msg['header']['msg_id']
114 return msg['header']['msg_id']
126
115
127 def shutdown(self, restart=False):
116 def shutdown(self, restart=False):
128 # FIXME: What to do here?
117 # FIXME: What to do here?
129 raise NotImplementedError('Cannot shutdown in-process kernel')
118 raise NotImplementedError('Cannot shutdown in-process kernel')
130
119
131 def kernel_info(self):
120 def kernel_info(self):
132 """Request kernel info."""
121 """Request kernel info."""
133 msg = self.client.session.msg('kernel_info_request')
122 msg = self.client.session.msg('kernel_info_request')
134 self._dispatch_to_kernel(msg)
123 self._dispatch_to_kernel(msg)
135 return msg['header']['msg_id']
124 return msg['header']['msg_id']
136
125
137 #--------------------------------------------------------------------------
126 #--------------------------------------------------------------------------
138 # Protected interface
127 # Protected interface
139 #--------------------------------------------------------------------------
128 #--------------------------------------------------------------------------
140
129
141 def _dispatch_to_kernel(self, msg):
130 def _dispatch_to_kernel(self, msg):
142 """ Send a message to the kernel and handle a reply.
131 """ Send a message to the kernel and handle a reply.
143 """
132 """
144 kernel = self.client.kernel
133 kernel = self.client.kernel
145 if kernel is None:
134 if kernel is None:
146 raise RuntimeError('Cannot send request. No kernel exists.')
135 raise RuntimeError('Cannot send request. No kernel exists.')
147
136
148 stream = DummySocket()
137 stream = DummySocket()
149 self.client.session.send(stream, msg)
138 self.client.session.send(stream, msg)
150 msg_parts = stream.recv_multipart()
139 msg_parts = stream.recv_multipart()
151 kernel.dispatch_shell(stream, msg_parts)
140 kernel.dispatch_shell(stream, msg_parts)
152
141
153 idents, reply_msg = self.client.session.recv(stream, copy=False)
142 idents, reply_msg = self.client.session.recv(stream, copy=False)
154 self.call_handlers_later(reply_msg)
143 self.call_handlers_later(reply_msg)
155
144
156
145
157 class InProcessIOPubChannel(InProcessChannel):
146 class InProcessIOPubChannel(InProcessChannel):
158 """See `IPython.kernel.channels.IOPubChannel` for docstrings."""
147 """See `IPython.kernel.channels.IOPubChannel` for docstrings."""
159
148
160 def flush(self, timeout=1.0):
149 def flush(self, timeout=1.0):
161 pass
150 pass
162
151
163
152
164 class InProcessStdInChannel(InProcessChannel):
153 class InProcessStdInChannel(InProcessChannel):
165 """See `IPython.kernel.channels.StdInChannel` for docstrings."""
154 """See `IPython.kernel.channels.StdInChannel` for docstrings."""
166
155
167 proxy_methods = ['input']
156 proxy_methods = ['input']
168
157
169 def input(self, string):
158 def input(self, string):
170 kernel = self.client.kernel
159 kernel = self.client.kernel
171 if kernel is None:
160 if kernel is None:
172 raise RuntimeError('Cannot send input reply. No kernel exists.')
161 raise RuntimeError('Cannot send input reply. No kernel exists.')
173 kernel.raw_input_str = string
162 kernel.raw_input_str = string
174
163
175
164
176 class InProcessHBChannel(InProcessChannel):
165 class InProcessHBChannel(InProcessChannel):
177 """See `IPython.kernel.channels.HBChannel` for docstrings."""
166 """See `IPython.kernel.channels.HBChannel` for docstrings."""
178
167
179 time_to_dead = 3.0
168 time_to_dead = 3.0
180
169
181 def __init__(self, *args, **kwds):
170 def __init__(self, *args, **kwds):
182 super(InProcessHBChannel, self).__init__(*args, **kwds)
171 super(InProcessHBChannel, self).__init__(*args, **kwds)
183 self._pause = True
172 self._pause = True
184
173
185 def pause(self):
174 def pause(self):
186 self._pause = True
175 self._pause = True
187
176
188 def unpause(self):
177 def unpause(self):
189 self._pause = False
178 self._pause = False
190
179
191 def is_beating(self):
180 def is_beating(self):
192 return not self._pause
181 return not self._pause
193
182
194 #-----------------------------------------------------------------------------
183 #-----------------------------------------------------------------------------
195 # ABC Registration
184 # ABC Registration
196 #-----------------------------------------------------------------------------
185 #-----------------------------------------------------------------------------
197
186
198 ShellChannelABC.register(InProcessShellChannel)
187 ShellChannelABC.register(InProcessShellChannel)
199 IOPubChannelABC.register(InProcessIOPubChannel)
188 IOPubChannelABC.register(InProcessIOPubChannel)
200 HBChannelABC.register(InProcessHBChannel)
189 HBChannelABC.register(InProcessHBChannel)
201 StdInChannelABC.register(InProcessStdInChannel)
190 StdInChannelABC.register(InProcessStdInChannel)
@@ -1,442 +1,420 b''
1 """Test suite for our zeromq-based message specification."""
1 """Test suite for our zeromq-based message specification."""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import re
6 import re
7 from distutils.version import LooseVersion as V
7 from distutils.version import LooseVersion as V
8 from subprocess import PIPE
8 from subprocess import PIPE
9 try:
9 try:
10 from queue import Empty # Py 3
10 from queue import Empty # Py 3
11 except ImportError:
11 except ImportError:
12 from Queue import Empty # Py 2
12 from Queue import Empty # Py 2
13
13
14 import nose.tools as nt
14 import nose.tools as nt
15
15
16 from IPython.kernel import KernelManager
16 from IPython.kernel import KernelManager
17
17
18 from IPython.utils.traitlets import (
18 from IPython.utils.traitlets import (
19 HasTraits, TraitError, Bool, Unicode, Dict, Integer, List, Enum, Any,
19 HasTraits, TraitError, Bool, Unicode, Dict, Integer, List, Enum, Any,
20 )
20 )
21 from IPython.utils.py3compat import string_types, iteritems
21 from IPython.utils.py3compat import string_types, iteritems
22
22
23 from .utils import TIMEOUT, start_global_kernel, flush_channels, execute
23 from .utils import TIMEOUT, start_global_kernel, flush_channels, execute
24
24
25 #-----------------------------------------------------------------------------
25 #-----------------------------------------------------------------------------
26 # Globals
26 # Globals
27 #-----------------------------------------------------------------------------
27 #-----------------------------------------------------------------------------
28 KC = None
28 KC = None
29
29
30 def setup():
30 def setup():
31 global KC
31 global KC
32 KC = start_global_kernel()
32 KC = start_global_kernel()
33
33
34 #-----------------------------------------------------------------------------
34 #-----------------------------------------------------------------------------
35 # Message Spec References
35 # Message Spec References
36 #-----------------------------------------------------------------------------
36 #-----------------------------------------------------------------------------
37
37
38 class Reference(HasTraits):
38 class Reference(HasTraits):
39
39
40 """
40 """
41 Base class for message spec specification testing.
41 Base class for message spec specification testing.
42
42
43 This class is the core of the message specification test. The
43 This class is the core of the message specification test. The
44 idea is that child classes implement trait attributes for each
44 idea is that child classes implement trait attributes for each
45 message keys, so that message keys can be tested against these
45 message keys, so that message keys can be tested against these
46 traits using :meth:`check` method.
46 traits using :meth:`check` method.
47
47
48 """
48 """
49
49
50 def check(self, d):
50 def check(self, d):
51 """validate a dict against our traits"""
51 """validate a dict against our traits"""
52 for key in self.trait_names():
52 for key in self.trait_names():
53 nt.assert_in(key, d)
53 nt.assert_in(key, d)
54 # FIXME: always allow None, probably not a good idea
54 # FIXME: always allow None, probably not a good idea
55 if d[key] is None:
55 if d[key] is None:
56 continue
56 continue
57 try:
57 try:
58 setattr(self, key, d[key])
58 setattr(self, key, d[key])
59 except TraitError as e:
59 except TraitError as e:
60 assert False, str(e)
60 assert False, str(e)
61
61
62 class Version(Unicode):
62 class Version(Unicode):
63 def validate(self, obj, value):
63 def validate(self, obj, value):
64 min_version = self.default_value
64 min_version = self.default_value
65 if V(value) < V(min_version):
65 if V(value) < V(min_version):
66 raise TraitError("bad version: %s < %s" % (value, min_version))
66 raise TraitError("bad version: %s < %s" % (value, min_version))
67
67
68 class RMessage(Reference):
68 class RMessage(Reference):
69 msg_id = Unicode()
69 msg_id = Unicode()
70 msg_type = Unicode()
70 msg_type = Unicode()
71 header = Dict()
71 header = Dict()
72 parent_header = Dict()
72 parent_header = Dict()
73 content = Dict()
73 content = Dict()
74
74
75 def check(self, d):
75 def check(self, d):
76 super(RMessage, self).check(d)
76 super(RMessage, self).check(d)
77 RHeader().check(self.header)
77 RHeader().check(self.header)
78 RHeader().check(self.parent_header)
78 if self.parent_header:
79 RHeader().check(self.parent_header)
79
80
80 class RHeader(Reference):
81 class RHeader(Reference):
81 msg_id = Unicode()
82 msg_id = Unicode()
82 msg_type = Unicode()
83 msg_type = Unicode()
83 session = Unicode()
84 session = Unicode()
84 username = Unicode()
85 username = Unicode()
85 version = Version('5.0')
86 version = Version('5.0')
86
87
87
88
88 class ExecuteReply(Reference):
89 class ExecuteReply(Reference):
89 execution_count = Integer()
90 execution_count = Integer()
90 status = Enum((u'ok', u'error'))
91 status = Enum((u'ok', u'error'))
91
92
92 def check(self, d):
93 def check(self, d):
93 Reference.check(self, d)
94 Reference.check(self, d)
94 if d['status'] == 'ok':
95 if d['status'] == 'ok':
95 ExecuteReplyOkay().check(d)
96 ExecuteReplyOkay().check(d)
96 elif d['status'] == 'error':
97 elif d['status'] == 'error':
97 ExecuteReplyError().check(d)
98 ExecuteReplyError().check(d)
98
99
99
100
100 class ExecuteReplyOkay(Reference):
101 class ExecuteReplyOkay(Reference):
101 payload = List(Dict)
102 payload = List(Dict)
102 user_variables = Dict()
103 user_expressions = Dict()
103 user_expressions = Dict()
104
104
105
105
106 class ExecuteReplyError(Reference):
106 class ExecuteReplyError(Reference):
107 ename = Unicode()
107 ename = Unicode()
108 evalue = Unicode()
108 evalue = Unicode()
109 traceback = List(Unicode)
109 traceback = List(Unicode)
110
110
111
111
112 class OInfoReply(Reference):
112 class OInfoReply(Reference):
113 name = Unicode()
113 name = Unicode()
114 found = Bool()
114 found = Bool()
115 ismagic = Bool()
115 ismagic = Bool()
116 isalias = Bool()
116 isalias = Bool()
117 namespace = Enum((u'builtin', u'magics', u'alias', u'Interactive'))
117 namespace = Enum((u'builtin', u'magics', u'alias', u'Interactive'))
118 type_name = Unicode()
118 type_name = Unicode()
119 string_form = Unicode()
119 string_form = Unicode()
120 base_class = Unicode()
120 base_class = Unicode()
121 length = Integer()
121 length = Integer()
122 file = Unicode()
122 file = Unicode()
123 definition = Unicode()
123 definition = Unicode()
124 argspec = Dict()
124 argspec = Dict()
125 init_definition = Unicode()
125 init_definition = Unicode()
126 docstring = Unicode()
126 docstring = Unicode()
127 init_docstring = Unicode()
127 init_docstring = Unicode()
128 class_docstring = Unicode()
128 class_docstring = Unicode()
129 call_def = Unicode()
129 call_def = Unicode()
130 call_docstring = Unicode()
130 call_docstring = Unicode()
131 source = Unicode()
131 source = Unicode()
132
132
133 def check(self, d):
133 def check(self, d):
134 super(OInfoReply, self).check(d)
134 super(OInfoReply, self).check(d)
135 if d['argspec'] is not None:
135 if d['argspec'] is not None:
136 ArgSpec().check(d['argspec'])
136 ArgSpec().check(d['argspec'])
137
137
138
138
139 class ArgSpec(Reference):
139 class ArgSpec(Reference):
140 args = List(Unicode)
140 args = List(Unicode)
141 varargs = Unicode()
141 varargs = Unicode()
142 varkw = Unicode()
142 varkw = Unicode()
143 defaults = List()
143 defaults = List()
144
144
145
145
146 class Status(Reference):
146 class Status(Reference):
147 execution_state = Enum((u'busy', u'idle', u'starting'))
147 execution_state = Enum((u'busy', u'idle', u'starting'))
148
148
149
149
150 class CompleteReply(Reference):
150 class CompleteReply(Reference):
151 matches = List(Unicode)
151 matches = List(Unicode)
152
152
153
153
154 class KernelInfoReply(Reference):
154 class KernelInfoReply(Reference):
155 protocol_version = Version('5.0')
155 protocol_version = Version('5.0')
156 ipython_version = Version('2.0')
156 ipython_version = Version('2.0')
157 language_version = Version('2.7')
157 language_version = Version('2.7')
158 language = Unicode()
158 language = Unicode()
159
159
160
160
161 # IOPub messages
161 # IOPub messages
162
162
163 class ExecuteInput(Reference):
163 class ExecuteInput(Reference):
164 code = Unicode()
164 code = Unicode()
165 execution_count = Integer()
165 execution_count = Integer()
166
166
167
167
168 Error = ExecuteReplyError
168 Error = ExecuteReplyError
169
169
170
170
171 class Stream(Reference):
171 class Stream(Reference):
172 name = Enum((u'stdout', u'stderr'))
172 name = Enum((u'stdout', u'stderr'))
173 data = Unicode()
173 data = Unicode()
174
174
175
175
176 mime_pat = re.compile(r'\w+/\w+')
176 mime_pat = re.compile(r'\w+/\w+')
177
177
178 class DisplayData(Reference):
178 class DisplayData(Reference):
179 source = Unicode()
179 source = Unicode()
180 metadata = Dict()
180 metadata = Dict()
181 data = Dict()
181 data = Dict()
182 def _data_changed(self, name, old, new):
182 def _data_changed(self, name, old, new):
183 for k,v in iteritems(new):
183 for k,v in iteritems(new):
184 assert mime_pat.match(k)
184 assert mime_pat.match(k)
185 nt.assert_is_instance(v, string_types)
185 nt.assert_is_instance(v, string_types)
186
186
187
187
188 class ExecuteResult(Reference):
188 class ExecuteResult(Reference):
189 execution_count = Integer()
189 execution_count = Integer()
190 data = Dict()
190 data = Dict()
191 def _data_changed(self, name, old, new):
191 def _data_changed(self, name, old, new):
192 for k,v in iteritems(new):
192 for k,v in iteritems(new):
193 assert mime_pat.match(k)
193 assert mime_pat.match(k)
194 nt.assert_is_instance(v, string_types)
194 nt.assert_is_instance(v, string_types)
195
195
196
196
197 references = {
197 references = {
198 'execute_reply' : ExecuteReply(),
198 'execute_reply' : ExecuteReply(),
199 'object_info_reply' : OInfoReply(),
199 'object_info_reply' : OInfoReply(),
200 'status' : Status(),
200 'status' : Status(),
201 'complete_reply' : CompleteReply(),
201 'complete_reply' : CompleteReply(),
202 'kernel_info_reply': KernelInfoReply(),
202 'kernel_info_reply': KernelInfoReply(),
203 'execute_input' : ExecuteInput(),
203 'execute_input' : ExecuteInput(),
204 'execute_result' : ExecuteResult(),
204 'execute_result' : ExecuteResult(),
205 'error' : Error(),
205 'error' : Error(),
206 'stream' : Stream(),
206 'stream' : Stream(),
207 'display_data' : DisplayData(),
207 'display_data' : DisplayData(),
208 'header' : RHeader(),
208 'header' : RHeader(),
209 }
209 }
210 """
210 """
211 Specifications of `content` part of the reply messages.
211 Specifications of `content` part of the reply messages.
212 """
212 """
213
213
214
214
215 def validate_message(msg, msg_type=None, parent=None):
215 def validate_message(msg, msg_type=None, parent=None):
216 """validate a message
216 """validate a message
217
217
218 This is a generator, and must be iterated through to actually
218 This is a generator, and must be iterated through to actually
219 trigger each test.
219 trigger each test.
220
220
221 If msg_type and/or parent are given, the msg_type and/or parent msg_id
221 If msg_type and/or parent are given, the msg_type and/or parent msg_id
222 are compared with the given values.
222 are compared with the given values.
223 """
223 """
224 RMessage().check(msg)
224 RMessage().check(msg)
225 if msg_type:
225 if msg_type:
226 nt.assert_equal(msg['msg_type'], msg_type)
226 nt.assert_equal(msg['msg_type'], msg_type)
227 if parent:
227 if parent:
228 nt.assert_equal(msg['parent_header']['msg_id'], parent)
228 nt.assert_equal(msg['parent_header']['msg_id'], parent)
229 content = msg['content']
229 content = msg['content']
230 ref = references[msg['msg_type']]
230 ref = references[msg['msg_type']]
231 ref.check(content)
231 ref.check(content)
232
232
233
233
234 #-----------------------------------------------------------------------------
234 #-----------------------------------------------------------------------------
235 # Tests
235 # Tests
236 #-----------------------------------------------------------------------------
236 #-----------------------------------------------------------------------------
237
237
238 # Shell channel
238 # Shell channel
239
239
240 def test_execute():
240 def test_execute():
241 flush_channels()
241 flush_channels()
242
242
243 msg_id = KC.execute(code='x=1')
243 msg_id = KC.execute(code='x=1')
244 reply = KC.get_shell_msg(timeout=TIMEOUT)
244 reply = KC.get_shell_msg(timeout=TIMEOUT)
245 validate_message(reply, 'execute_reply', msg_id)
245 validate_message(reply, 'execute_reply', msg_id)
246
246
247
247
248 def test_execute_silent():
248 def test_execute_silent():
249 flush_channels()
249 flush_channels()
250 msg_id, reply = execute(code='x=1', silent=True)
250 msg_id, reply = execute(code='x=1', silent=True)
251
251
252 # flush status=idle
252 # flush status=idle
253 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
253 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
254 validate_message(status, 'status', msg_id)
254 validate_message(status, 'status', msg_id)
255 nt.assert_equal(status['content']['execution_state'], 'idle')
255 nt.assert_equal(status['content']['execution_state'], 'idle')
256
256
257 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
257 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
258 count = reply['execution_count']
258 count = reply['execution_count']
259
259
260 msg_id, reply = execute(code='x=2', silent=True)
260 msg_id, reply = execute(code='x=2', silent=True)
261
261
262 # flush status=idle
262 # flush status=idle
263 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
263 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
264 validate_message(status, 'status', msg_id)
264 validate_message(status, 'status', msg_id)
265 nt.assert_equal(status['content']['execution_state'], 'idle')
265 nt.assert_equal(status['content']['execution_state'], 'idle')
266
266
267 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
267 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
268 count_2 = reply['execution_count']
268 count_2 = reply['execution_count']
269 nt.assert_equal(count_2, count)
269 nt.assert_equal(count_2, count)
270
270
271
271
272 def test_execute_error():
272 def test_execute_error():
273 flush_channels()
273 flush_channels()
274
274
275 msg_id, reply = execute(code='1/0')
275 msg_id, reply = execute(code='1/0')
276 nt.assert_equal(reply['status'], 'error')
276 nt.assert_equal(reply['status'], 'error')
277 nt.assert_equal(reply['ename'], 'ZeroDivisionError')
277 nt.assert_equal(reply['ename'], 'ZeroDivisionError')
278
278
279 error = KC.iopub_channel.get_msg(timeout=TIMEOUT)
279 error = KC.iopub_channel.get_msg(timeout=TIMEOUT)
280 validate_message(error, 'error', msg_id)
280 validate_message(error, 'error', msg_id)
281
281
282
282
283 def test_execute_inc():
283 def test_execute_inc():
284 """execute request should increment execution_count"""
284 """execute request should increment execution_count"""
285 flush_channels()
285 flush_channels()
286
286
287 msg_id, reply = execute(code='x=1')
287 msg_id, reply = execute(code='x=1')
288 count = reply['execution_count']
288 count = reply['execution_count']
289
289
290 flush_channels()
290 flush_channels()
291
291
292 msg_id, reply = execute(code='x=2')
292 msg_id, reply = execute(code='x=2')
293 count_2 = reply['execution_count']
293 count_2 = reply['execution_count']
294 nt.assert_equal(count_2, count+1)
294 nt.assert_equal(count_2, count+1)
295
295
296
296
297 def test_user_variables():
298 flush_channels()
299
300 msg_id, reply = execute(code='x=1', user_variables=['x'])
301 user_variables = reply['user_variables']
302 nt.assert_equal(user_variables, {u'x': {
303 u'status': u'ok',
304 u'data': {u'text/plain': u'1'},
305 u'metadata': {},
306 }})
307
308
309 def test_user_variables_fail():
310 flush_channels()
311
312 msg_id, reply = execute(code='x=1', user_variables=['nosuchname'])
313 user_variables = reply['user_variables']
314 foo = user_variables['nosuchname']
315 nt.assert_equal(foo['status'], 'error')
316 nt.assert_equal(foo['ename'], 'KeyError')
317
318
319 def test_user_expressions():
297 def test_user_expressions():
320 flush_channels()
298 flush_channels()
321
299
322 msg_id, reply = execute(code='x=1', user_expressions=dict(foo='x+1'))
300 msg_id, reply = execute(code='x=1', user_expressions=dict(foo='x+1'))
323 user_expressions = reply['user_expressions']
301 user_expressions = reply['user_expressions']
324 nt.assert_equal(user_expressions, {u'foo': {
302 nt.assert_equal(user_expressions, {u'foo': {
325 u'status': u'ok',
303 u'status': u'ok',
326 u'data': {u'text/plain': u'2'},
304 u'data': {u'text/plain': u'2'},
327 u'metadata': {},
305 u'metadata': {},
328 }})
306 }})
329
307
330
308
331 def test_user_expressions_fail():
309 def test_user_expressions_fail():
332 flush_channels()
310 flush_channels()
333
311
334 msg_id, reply = execute(code='x=0', user_expressions=dict(foo='nosuchname'))
312 msg_id, reply = execute(code='x=0', user_expressions=dict(foo='nosuchname'))
335 user_expressions = reply['user_expressions']
313 user_expressions = reply['user_expressions']
336 foo = user_expressions['foo']
314 foo = user_expressions['foo']
337 nt.assert_equal(foo['status'], 'error')
315 nt.assert_equal(foo['status'], 'error')
338 nt.assert_equal(foo['ename'], 'NameError')
316 nt.assert_equal(foo['ename'], 'NameError')
339
317
340
318
341 def test_oinfo():
319 def test_oinfo():
342 flush_channels()
320 flush_channels()
343
321
344 msg_id = KC.object_info('a')
322 msg_id = KC.object_info('a')
345 reply = KC.get_shell_msg(timeout=TIMEOUT)
323 reply = KC.get_shell_msg(timeout=TIMEOUT)
346 validate_message(reply, 'object_info_reply', msg_id)
324 validate_message(reply, 'object_info_reply', msg_id)
347
325
348
326
349 def test_oinfo_found():
327 def test_oinfo_found():
350 flush_channels()
328 flush_channels()
351
329
352 msg_id, reply = execute(code='a=5')
330 msg_id, reply = execute(code='a=5')
353
331
354 msg_id = KC.object_info('a')
332 msg_id = KC.object_info('a')
355 reply = KC.get_shell_msg(timeout=TIMEOUT)
333 reply = KC.get_shell_msg(timeout=TIMEOUT)
356 validate_message(reply, 'object_info_reply', msg_id)
334 validate_message(reply, 'object_info_reply', msg_id)
357 content = reply['content']
335 content = reply['content']
358 assert content['found']
336 assert content['found']
359 argspec = content['argspec']
337 argspec = content['argspec']
360 nt.assert_is(argspec, None)
338 nt.assert_is(argspec, None)
361
339
362
340
363 def test_oinfo_detail():
341 def test_oinfo_detail():
364 flush_channels()
342 flush_channels()
365
343
366 msg_id, reply = execute(code='ip=get_ipython()')
344 msg_id, reply = execute(code='ip=get_ipython()')
367
345
368 msg_id = KC.object_info('ip.object_inspect', detail_level=2)
346 msg_id = KC.object_info('ip.object_inspect', detail_level=2)
369 reply = KC.get_shell_msg(timeout=TIMEOUT)
347 reply = KC.get_shell_msg(timeout=TIMEOUT)
370 validate_message(reply, 'object_info_reply', msg_id)
348 validate_message(reply, 'object_info_reply', msg_id)
371 content = reply['content']
349 content = reply['content']
372 assert content['found']
350 assert content['found']
373 argspec = content['argspec']
351 argspec = content['argspec']
374 nt.assert_is_instance(argspec, dict, "expected non-empty argspec dict, got %r" % argspec)
352 nt.assert_is_instance(argspec, dict, "expected non-empty argspec dict, got %r" % argspec)
375 nt.assert_equal(argspec['defaults'], [0])
353 nt.assert_equal(argspec['defaults'], [0])
376
354
377
355
378 def test_oinfo_not_found():
356 def test_oinfo_not_found():
379 flush_channels()
357 flush_channels()
380
358
381 msg_id = KC.object_info('dne')
359 msg_id = KC.object_info('dne')
382 reply = KC.get_shell_msg(timeout=TIMEOUT)
360 reply = KC.get_shell_msg(timeout=TIMEOUT)
383 validate_message(reply, 'object_info_reply', msg_id)
361 validate_message(reply, 'object_info_reply', msg_id)
384 content = reply['content']
362 content = reply['content']
385 nt.assert_false(content['found'])
363 nt.assert_false(content['found'])
386
364
387
365
388 def test_complete():
366 def test_complete():
389 flush_channels()
367 flush_channels()
390
368
391 msg_id, reply = execute(code="alpha = albert = 5")
369 msg_id, reply = execute(code="alpha = albert = 5")
392
370
393 msg_id = KC.complete('al', 'al', 2)
371 msg_id = KC.complete('al', 'al', 2)
394 reply = KC.get_shell_msg(timeout=TIMEOUT)
372 reply = KC.get_shell_msg(timeout=TIMEOUT)
395 validate_message(reply, 'complete_reply', msg_id)
373 validate_message(reply, 'complete_reply', msg_id)
396 matches = reply['content']['matches']
374 matches = reply['content']['matches']
397 for name in ('alpha', 'albert'):
375 for name in ('alpha', 'albert'):
398 nt.assert_in(name, matches)
376 nt.assert_in(name, matches)
399
377
400
378
401 def test_kernel_info_request():
379 def test_kernel_info_request():
402 flush_channels()
380 flush_channels()
403
381
404 msg_id = KC.kernel_info()
382 msg_id = KC.kernel_info()
405 reply = KC.get_shell_msg(timeout=TIMEOUT)
383 reply = KC.get_shell_msg(timeout=TIMEOUT)
406 validate_message(reply, 'kernel_info_reply', msg_id)
384 validate_message(reply, 'kernel_info_reply', msg_id)
407
385
408
386
409 def test_single_payload():
387 def test_single_payload():
410 flush_channels()
388 flush_channels()
411 msg_id, reply = execute(code="for i in range(3):\n"+
389 msg_id, reply = execute(code="for i in range(3):\n"+
412 " x=range?\n")
390 " x=range?\n")
413 payload = reply['payload']
391 payload = reply['payload']
414 next_input_pls = [pl for pl in payload if pl["source"] == "set_next_input"]
392 next_input_pls = [pl for pl in payload if pl["source"] == "set_next_input"]
415 nt.assert_equal(len(next_input_pls), 1)
393 nt.assert_equal(len(next_input_pls), 1)
416
394
417
395
418 # IOPub channel
396 # IOPub channel
419
397
420
398
421 def test_stream():
399 def test_stream():
422 flush_channels()
400 flush_channels()
423
401
424 msg_id, reply = execute("print('hi')")
402 msg_id, reply = execute("print('hi')")
425
403
426 stdout = KC.iopub_channel.get_msg(timeout=TIMEOUT)
404 stdout = KC.iopub_channel.get_msg(timeout=TIMEOUT)
427 validate_message(stdout, 'stream', msg_id)
405 validate_message(stdout, 'stream', msg_id)
428 content = stdout['content']
406 content = stdout['content']
429 nt.assert_equal(content['name'], u'stdout')
407 nt.assert_equal(content['name'], u'stdout')
430 nt.assert_equal(content['data'], u'hi\n')
408 nt.assert_equal(content['data'], u'hi\n')
431
409
432
410
433 def test_display_data():
411 def test_display_data():
434 flush_channels()
412 flush_channels()
435
413
436 msg_id, reply = execute("from IPython.core.display import display; display(1)")
414 msg_id, reply = execute("from IPython.core.display import display; display(1)")
437
415
438 display = KC.iopub_channel.get_msg(timeout=TIMEOUT)
416 display = KC.iopub_channel.get_msg(timeout=TIMEOUT)
439 validate_message(display, 'display_data', parent=msg_id)
417 validate_message(display, 'display_data', parent=msg_id)
440 data = display['content']['data']
418 data = display['content']['data']
441 nt.assert_equal(data['text/plain'], u'1')
419 nt.assert_equal(data['text/plain'], u'1')
442
420
@@ -1,797 +1,793 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 """An interactive kernel that talks to frontends over 0MQ."""
2 """An interactive kernel that talks to frontends over 0MQ."""
3
3
4 # Copyright (c) IPython Development Team.
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
5 # Distributed under the terms of the Modified BSD License.
6
6
7 from __future__ import print_function
7 from __future__ import print_function
8
8
9 import sys
9 import sys
10 import time
10 import time
11 import traceback
11 import traceback
12 import logging
12 import logging
13 import uuid
13 import uuid
14
14
15 from datetime import datetime
15 from datetime import datetime
16 from signal import (
16 from signal import (
17 signal, default_int_handler, SIGINT
17 signal, default_int_handler, SIGINT
18 )
18 )
19
19
20 import zmq
20 import zmq
21 from zmq.eventloop import ioloop
21 from zmq.eventloop import ioloop
22 from zmq.eventloop.zmqstream import ZMQStream
22 from zmq.eventloop.zmqstream import ZMQStream
23
23
24 from IPython.config.configurable import Configurable
24 from IPython.config.configurable import Configurable
25 from IPython.core.error import StdinNotImplementedError
25 from IPython.core.error import StdinNotImplementedError
26 from IPython.core import release
26 from IPython.core import release
27 from IPython.utils import py3compat
27 from IPython.utils import py3compat
28 from IPython.utils.py3compat import builtin_mod, unicode_type, string_types
28 from IPython.utils.py3compat import builtin_mod, unicode_type, string_types
29 from IPython.utils.jsonutil import json_clean
29 from IPython.utils.jsonutil import json_clean
30 from IPython.utils.traitlets import (
30 from IPython.utils.traitlets import (
31 Any, Instance, Float, Dict, List, Set, Integer, Unicode,
31 Any, Instance, Float, Dict, List, Set, Integer, Unicode,
32 Type, Bool,
32 Type, Bool,
33 )
33 )
34
34
35 from .serialize import serialize_object, unpack_apply_message
35 from .serialize import serialize_object, unpack_apply_message
36 from .session import Session
36 from .session import Session
37 from .zmqshell import ZMQInteractiveShell
37 from .zmqshell import ZMQInteractiveShell
38
38
39
39
40 #-----------------------------------------------------------------------------
40 #-----------------------------------------------------------------------------
41 # Main kernel class
41 # Main kernel class
42 #-----------------------------------------------------------------------------
42 #-----------------------------------------------------------------------------
43
43
44 protocol_version = release.kernel_protocol_version
44 protocol_version = release.kernel_protocol_version
45 ipython_version = release.version
45 ipython_version = release.version
46 language_version = sys.version.split()[0]
46 language_version = sys.version.split()[0]
47
47
48
48
49 class Kernel(Configurable):
49 class Kernel(Configurable):
50
50
51 #---------------------------------------------------------------------------
51 #---------------------------------------------------------------------------
52 # Kernel interface
52 # Kernel interface
53 #---------------------------------------------------------------------------
53 #---------------------------------------------------------------------------
54
54
55 # attribute to override with a GUI
55 # attribute to override with a GUI
56 eventloop = Any(None)
56 eventloop = Any(None)
57 def _eventloop_changed(self, name, old, new):
57 def _eventloop_changed(self, name, old, new):
58 """schedule call to eventloop from IOLoop"""
58 """schedule call to eventloop from IOLoop"""
59 loop = ioloop.IOLoop.instance()
59 loop = ioloop.IOLoop.instance()
60 loop.add_callback(self.enter_eventloop)
60 loop.add_callback(self.enter_eventloop)
61
61
62 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
62 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
63 shell_class = Type(ZMQInteractiveShell)
63 shell_class = Type(ZMQInteractiveShell)
64
64
65 session = Instance(Session)
65 session = Instance(Session)
66 profile_dir = Instance('IPython.core.profiledir.ProfileDir')
66 profile_dir = Instance('IPython.core.profiledir.ProfileDir')
67 shell_streams = List()
67 shell_streams = List()
68 control_stream = Instance(ZMQStream)
68 control_stream = Instance(ZMQStream)
69 iopub_socket = Instance(zmq.Socket)
69 iopub_socket = Instance(zmq.Socket)
70 stdin_socket = Instance(zmq.Socket)
70 stdin_socket = Instance(zmq.Socket)
71 log = Instance(logging.Logger)
71 log = Instance(logging.Logger)
72
72
73 user_module = Any()
73 user_module = Any()
74 def _user_module_changed(self, name, old, new):
74 def _user_module_changed(self, name, old, new):
75 if self.shell is not None:
75 if self.shell is not None:
76 self.shell.user_module = new
76 self.shell.user_module = new
77
77
78 user_ns = Instance(dict, args=None, allow_none=True)
78 user_ns = Instance(dict, args=None, allow_none=True)
79 def _user_ns_changed(self, name, old, new):
79 def _user_ns_changed(self, name, old, new):
80 if self.shell is not None:
80 if self.shell is not None:
81 self.shell.user_ns = new
81 self.shell.user_ns = new
82 self.shell.init_user_ns()
82 self.shell.init_user_ns()
83
83
84 # identities:
84 # identities:
85 int_id = Integer(-1)
85 int_id = Integer(-1)
86 ident = Unicode()
86 ident = Unicode()
87
87
88 def _ident_default(self):
88 def _ident_default(self):
89 return unicode_type(uuid.uuid4())
89 return unicode_type(uuid.uuid4())
90
90
91 # Private interface
91 # Private interface
92
92
93 _darwin_app_nap = Bool(True, config=True,
93 _darwin_app_nap = Bool(True, config=True,
94 help="""Whether to use appnope for compatiblity with OS X App Nap.
94 help="""Whether to use appnope for compatiblity with OS X App Nap.
95
95
96 Only affects OS X >= 10.9.
96 Only affects OS X >= 10.9.
97 """
97 """
98 )
98 )
99
99
100 # Time to sleep after flushing the stdout/err buffers in each execute
100 # Time to sleep after flushing the stdout/err buffers in each execute
101 # cycle. While this introduces a hard limit on the minimal latency of the
101 # cycle. While this introduces a hard limit on the minimal latency of the
102 # execute cycle, it helps prevent output synchronization problems for
102 # execute cycle, it helps prevent output synchronization problems for
103 # clients.
103 # clients.
104 # Units are in seconds. The minimum zmq latency on local host is probably
104 # Units are in seconds. The minimum zmq latency on local host is probably
105 # ~150 microseconds, set this to 500us for now. We may need to increase it
105 # ~150 microseconds, set this to 500us for now. We may need to increase it
106 # a little if it's not enough after more interactive testing.
106 # a little if it's not enough after more interactive testing.
107 _execute_sleep = Float(0.0005, config=True)
107 _execute_sleep = Float(0.0005, config=True)
108
108
109 # Frequency of the kernel's event loop.
109 # Frequency of the kernel's event loop.
110 # Units are in seconds, kernel subclasses for GUI toolkits may need to
110 # Units are in seconds, kernel subclasses for GUI toolkits may need to
111 # adapt to milliseconds.
111 # adapt to milliseconds.
112 _poll_interval = Float(0.05, config=True)
112 _poll_interval = Float(0.05, config=True)
113
113
114 # If the shutdown was requested over the network, we leave here the
114 # If the shutdown was requested over the network, we leave here the
115 # necessary reply message so it can be sent by our registered atexit
115 # necessary reply message so it can be sent by our registered atexit
116 # handler. This ensures that the reply is only sent to clients truly at
116 # handler. This ensures that the reply is only sent to clients truly at
117 # the end of our shutdown process (which happens after the underlying
117 # the end of our shutdown process (which happens after the underlying
118 # IPython shell's own shutdown).
118 # IPython shell's own shutdown).
119 _shutdown_message = None
119 _shutdown_message = None
120
120
121 # This is a dict of port number that the kernel is listening on. It is set
121 # This is a dict of port number that the kernel is listening on. It is set
122 # by record_ports and used by connect_request.
122 # by record_ports and used by connect_request.
123 _recorded_ports = Dict()
123 _recorded_ports = Dict()
124
124
125 # A reference to the Python builtin 'raw_input' function.
125 # A reference to the Python builtin 'raw_input' function.
126 # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3)
126 # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3)
127 _sys_raw_input = Any()
127 _sys_raw_input = Any()
128 _sys_eval_input = Any()
128 _sys_eval_input = Any()
129
129
130 # set of aborted msg_ids
130 # set of aborted msg_ids
131 aborted = Set()
131 aborted = Set()
132
132
133
133
134 def __init__(self, **kwargs):
134 def __init__(self, **kwargs):
135 super(Kernel, self).__init__(**kwargs)
135 super(Kernel, self).__init__(**kwargs)
136
136
137 # Initialize the InteractiveShell subclass
137 # Initialize the InteractiveShell subclass
138 self.shell = self.shell_class.instance(parent=self,
138 self.shell = self.shell_class.instance(parent=self,
139 profile_dir = self.profile_dir,
139 profile_dir = self.profile_dir,
140 user_module = self.user_module,
140 user_module = self.user_module,
141 user_ns = self.user_ns,
141 user_ns = self.user_ns,
142 kernel = self,
142 kernel = self,
143 )
143 )
144 self.shell.displayhook.session = self.session
144 self.shell.displayhook.session = self.session
145 self.shell.displayhook.pub_socket = self.iopub_socket
145 self.shell.displayhook.pub_socket = self.iopub_socket
146 self.shell.displayhook.topic = self._topic('execute_result')
146 self.shell.displayhook.topic = self._topic('execute_result')
147 self.shell.display_pub.session = self.session
147 self.shell.display_pub.session = self.session
148 self.shell.display_pub.pub_socket = self.iopub_socket
148 self.shell.display_pub.pub_socket = self.iopub_socket
149 self.shell.data_pub.session = self.session
149 self.shell.data_pub.session = self.session
150 self.shell.data_pub.pub_socket = self.iopub_socket
150 self.shell.data_pub.pub_socket = self.iopub_socket
151
151
152 # TMP - hack while developing
152 # TMP - hack while developing
153 self.shell._reply_content = None
153 self.shell._reply_content = None
154
154
155 # Build dict of handlers for message types
155 # Build dict of handlers for message types
156 msg_types = [ 'execute_request', 'complete_request',
156 msg_types = [ 'execute_request', 'complete_request',
157 'object_info_request', 'history_request',
157 'object_info_request', 'history_request',
158 'kernel_info_request',
158 'kernel_info_request',
159 'connect_request', 'shutdown_request',
159 'connect_request', 'shutdown_request',
160 'apply_request',
160 'apply_request',
161 ]
161 ]
162 self.shell_handlers = {}
162 self.shell_handlers = {}
163 for msg_type in msg_types:
163 for msg_type in msg_types:
164 self.shell_handlers[msg_type] = getattr(self, msg_type)
164 self.shell_handlers[msg_type] = getattr(self, msg_type)
165
165
166 comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ]
166 comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ]
167 comm_manager = self.shell.comm_manager
167 comm_manager = self.shell.comm_manager
168 for msg_type in comm_msg_types:
168 for msg_type in comm_msg_types:
169 self.shell_handlers[msg_type] = getattr(comm_manager, msg_type)
169 self.shell_handlers[msg_type] = getattr(comm_manager, msg_type)
170
170
171 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
171 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
172 self.control_handlers = {}
172 self.control_handlers = {}
173 for msg_type in control_msg_types:
173 for msg_type in control_msg_types:
174 self.control_handlers[msg_type] = getattr(self, msg_type)
174 self.control_handlers[msg_type] = getattr(self, msg_type)
175
175
176
176
177 def dispatch_control(self, msg):
177 def dispatch_control(self, msg):
178 """dispatch control requests"""
178 """dispatch control requests"""
179 idents,msg = self.session.feed_identities(msg, copy=False)
179 idents,msg = self.session.feed_identities(msg, copy=False)
180 try:
180 try:
181 msg = self.session.unserialize(msg, content=True, copy=False)
181 msg = self.session.unserialize(msg, content=True, copy=False)
182 except:
182 except:
183 self.log.error("Invalid Control Message", exc_info=True)
183 self.log.error("Invalid Control Message", exc_info=True)
184 return
184 return
185
185
186 self.log.debug("Control received: %s", msg)
186 self.log.debug("Control received: %s", msg)
187
187
188 header = msg['header']
188 header = msg['header']
189 msg_id = header['msg_id']
189 msg_id = header['msg_id']
190 msg_type = header['msg_type']
190 msg_type = header['msg_type']
191
191
192 handler = self.control_handlers.get(msg_type, None)
192 handler = self.control_handlers.get(msg_type, None)
193 if handler is None:
193 if handler is None:
194 self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
194 self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
195 else:
195 else:
196 try:
196 try:
197 handler(self.control_stream, idents, msg)
197 handler(self.control_stream, idents, msg)
198 except Exception:
198 except Exception:
199 self.log.error("Exception in control handler:", exc_info=True)
199 self.log.error("Exception in control handler:", exc_info=True)
200
200
201 def dispatch_shell(self, stream, msg):
201 def dispatch_shell(self, stream, msg):
202 """dispatch shell requests"""
202 """dispatch shell requests"""
203 # flush control requests first
203 # flush control requests first
204 if self.control_stream:
204 if self.control_stream:
205 self.control_stream.flush()
205 self.control_stream.flush()
206
206
207 idents,msg = self.session.feed_identities(msg, copy=False)
207 idents,msg = self.session.feed_identities(msg, copy=False)
208 try:
208 try:
209 msg = self.session.unserialize(msg, content=True, copy=False)
209 msg = self.session.unserialize(msg, content=True, copy=False)
210 except:
210 except:
211 self.log.error("Invalid Message", exc_info=True)
211 self.log.error("Invalid Message", exc_info=True)
212 return
212 return
213
213
214 header = msg['header']
214 header = msg['header']
215 msg_id = header['msg_id']
215 msg_id = header['msg_id']
216 msg_type = msg['header']['msg_type']
216 msg_type = msg['header']['msg_type']
217
217
218 # Print some info about this message and leave a '--->' marker, so it's
218 # Print some info about this message and leave a '--->' marker, so it's
219 # easier to trace visually the message chain when debugging. Each
219 # easier to trace visually the message chain when debugging. Each
220 # handler prints its message at the end.
220 # handler prints its message at the end.
221 self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
221 self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
222 self.log.debug(' Content: %s\n --->\n ', msg['content'])
222 self.log.debug(' Content: %s\n --->\n ', msg['content'])
223
223
224 if msg_id in self.aborted:
224 if msg_id in self.aborted:
225 self.aborted.remove(msg_id)
225 self.aborted.remove(msg_id)
226 # is it safe to assume a msg_id will not be resubmitted?
226 # is it safe to assume a msg_id will not be resubmitted?
227 reply_type = msg_type.split('_')[0] + '_reply'
227 reply_type = msg_type.split('_')[0] + '_reply'
228 status = {'status' : 'aborted'}
228 status = {'status' : 'aborted'}
229 md = {'engine' : self.ident}
229 md = {'engine' : self.ident}
230 md.update(status)
230 md.update(status)
231 reply_msg = self.session.send(stream, reply_type, metadata=md,
231 reply_msg = self.session.send(stream, reply_type, metadata=md,
232 content=status, parent=msg, ident=idents)
232 content=status, parent=msg, ident=idents)
233 return
233 return
234
234
235 handler = self.shell_handlers.get(msg_type, None)
235 handler = self.shell_handlers.get(msg_type, None)
236 if handler is None:
236 if handler is None:
237 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
237 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
238 else:
238 else:
239 # ensure default_int_handler during handler call
239 # ensure default_int_handler during handler call
240 sig = signal(SIGINT, default_int_handler)
240 sig = signal(SIGINT, default_int_handler)
241 try:
241 try:
242 handler(stream, idents, msg)
242 handler(stream, idents, msg)
243 except Exception:
243 except Exception:
244 self.log.error("Exception in message handler:", exc_info=True)
244 self.log.error("Exception in message handler:", exc_info=True)
245 finally:
245 finally:
246 signal(SIGINT, sig)
246 signal(SIGINT, sig)
247
247
248 def enter_eventloop(self):
248 def enter_eventloop(self):
249 """enter eventloop"""
249 """enter eventloop"""
250 self.log.info("entering eventloop %s", self.eventloop)
250 self.log.info("entering eventloop %s", self.eventloop)
251 for stream in self.shell_streams:
251 for stream in self.shell_streams:
252 # flush any pending replies,
252 # flush any pending replies,
253 # which may be skipped by entering the eventloop
253 # which may be skipped by entering the eventloop
254 stream.flush(zmq.POLLOUT)
254 stream.flush(zmq.POLLOUT)
255 # restore default_int_handler
255 # restore default_int_handler
256 signal(SIGINT, default_int_handler)
256 signal(SIGINT, default_int_handler)
257 while self.eventloop is not None:
257 while self.eventloop is not None:
258 try:
258 try:
259 self.eventloop(self)
259 self.eventloop(self)
260 except KeyboardInterrupt:
260 except KeyboardInterrupt:
261 # Ctrl-C shouldn't crash the kernel
261 # Ctrl-C shouldn't crash the kernel
262 self.log.error("KeyboardInterrupt caught in kernel")
262 self.log.error("KeyboardInterrupt caught in kernel")
263 continue
263 continue
264 else:
264 else:
265 # eventloop exited cleanly, this means we should stop (right?)
265 # eventloop exited cleanly, this means we should stop (right?)
266 self.eventloop = None
266 self.eventloop = None
267 break
267 break
268 self.log.info("exiting eventloop")
268 self.log.info("exiting eventloop")
269
269
270 def start(self):
270 def start(self):
271 """register dispatchers for streams"""
271 """register dispatchers for streams"""
272 self.shell.exit_now = False
272 self.shell.exit_now = False
273 if self.control_stream:
273 if self.control_stream:
274 self.control_stream.on_recv(self.dispatch_control, copy=False)
274 self.control_stream.on_recv(self.dispatch_control, copy=False)
275
275
276 def make_dispatcher(stream):
276 def make_dispatcher(stream):
277 def dispatcher(msg):
277 def dispatcher(msg):
278 return self.dispatch_shell(stream, msg)
278 return self.dispatch_shell(stream, msg)
279 return dispatcher
279 return dispatcher
280
280
281 for s in self.shell_streams:
281 for s in self.shell_streams:
282 s.on_recv(make_dispatcher(s), copy=False)
282 s.on_recv(make_dispatcher(s), copy=False)
283
283
284 # publish idle status
284 # publish idle status
285 self._publish_status('starting')
285 self._publish_status('starting')
286
286
287 def do_one_iteration(self):
287 def do_one_iteration(self):
288 """step eventloop just once"""
288 """step eventloop just once"""
289 if self.control_stream:
289 if self.control_stream:
290 self.control_stream.flush()
290 self.control_stream.flush()
291 for stream in self.shell_streams:
291 for stream in self.shell_streams:
292 # handle at most one request per iteration
292 # handle at most one request per iteration
293 stream.flush(zmq.POLLIN, 1)
293 stream.flush(zmq.POLLIN, 1)
294 stream.flush(zmq.POLLOUT)
294 stream.flush(zmq.POLLOUT)
295
295
296
296
297 def record_ports(self, ports):
297 def record_ports(self, ports):
298 """Record the ports that this kernel is using.
298 """Record the ports that this kernel is using.
299
299
300 The creator of the Kernel instance must call this methods if they
300 The creator of the Kernel instance must call this methods if they
301 want the :meth:`connect_request` method to return the port numbers.
301 want the :meth:`connect_request` method to return the port numbers.
302 """
302 """
303 self._recorded_ports = ports
303 self._recorded_ports = ports
304
304
305 #---------------------------------------------------------------------------
305 #---------------------------------------------------------------------------
306 # Kernel request handlers
306 # Kernel request handlers
307 #---------------------------------------------------------------------------
307 #---------------------------------------------------------------------------
308
308
309 def _make_metadata(self, other=None):
309 def _make_metadata(self, other=None):
310 """init metadata dict, for execute/apply_reply"""
310 """init metadata dict, for execute/apply_reply"""
311 new_md = {
311 new_md = {
312 'dependencies_met' : True,
312 'dependencies_met' : True,
313 'engine' : self.ident,
313 'engine' : self.ident,
314 'started': datetime.now(),
314 'started': datetime.now(),
315 }
315 }
316 if other:
316 if other:
317 new_md.update(other)
317 new_md.update(other)
318 return new_md
318 return new_md
319
319
320 def _publish_execute_input(self, code, parent, execution_count):
320 def _publish_execute_input(self, code, parent, execution_count):
321 """Publish the code request on the iopub stream."""
321 """Publish the code request on the iopub stream."""
322
322
323 self.session.send(self.iopub_socket, u'execute_input',
323 self.session.send(self.iopub_socket, u'execute_input',
324 {u'code':code, u'execution_count': execution_count},
324 {u'code':code, u'execution_count': execution_count},
325 parent=parent, ident=self._topic('execute_input')
325 parent=parent, ident=self._topic('execute_input')
326 )
326 )
327
327
328 def _publish_status(self, status, parent=None):
328 def _publish_status(self, status, parent=None):
329 """send status (busy/idle) on IOPub"""
329 """send status (busy/idle) on IOPub"""
330 self.session.send(self.iopub_socket,
330 self.session.send(self.iopub_socket,
331 u'status',
331 u'status',
332 {u'execution_state': status},
332 {u'execution_state': status},
333 parent=parent,
333 parent=parent,
334 ident=self._topic('status'),
334 ident=self._topic('status'),
335 )
335 )
336
336
337
337
338 def execute_request(self, stream, ident, parent):
338 def execute_request(self, stream, ident, parent):
339 """handle an execute_request"""
339 """handle an execute_request"""
340
340
341 self._publish_status(u'busy', parent)
341 self._publish_status(u'busy', parent)
342
342
343 try:
343 try:
344 content = parent[u'content']
344 content = parent[u'content']
345 code = py3compat.cast_unicode_py2(content[u'code'])
345 code = py3compat.cast_unicode_py2(content[u'code'])
346 silent = content[u'silent']
346 silent = content[u'silent']
347 store_history = content.get(u'store_history', not silent)
347 store_history = content.get(u'store_history', not silent)
348 except:
348 except:
349 self.log.error("Got bad msg: ")
349 self.log.error("Got bad msg: ")
350 self.log.error("%s", parent)
350 self.log.error("%s", parent)
351 return
351 return
352
352
353 md = self._make_metadata(parent['metadata'])
353 md = self._make_metadata(parent['metadata'])
354
354
355 shell = self.shell # we'll need this a lot here
355 shell = self.shell # we'll need this a lot here
356
356
357 # Replace raw_input. Note that is not sufficient to replace
357 # Replace raw_input. Note that is not sufficient to replace
358 # raw_input in the user namespace.
358 # raw_input in the user namespace.
359 if content.get('allow_stdin', False):
359 if content.get('allow_stdin', False):
360 raw_input = lambda prompt='': self._raw_input(prompt, ident, parent)
360 raw_input = lambda prompt='': self._raw_input(prompt, ident, parent)
361 input = lambda prompt='': eval(raw_input(prompt))
361 input = lambda prompt='': eval(raw_input(prompt))
362 else:
362 else:
363 raw_input = input = lambda prompt='' : self._no_raw_input()
363 raw_input = input = lambda prompt='' : self._no_raw_input()
364
364
365 if py3compat.PY3:
365 if py3compat.PY3:
366 self._sys_raw_input = builtin_mod.input
366 self._sys_raw_input = builtin_mod.input
367 builtin_mod.input = raw_input
367 builtin_mod.input = raw_input
368 else:
368 else:
369 self._sys_raw_input = builtin_mod.raw_input
369 self._sys_raw_input = builtin_mod.raw_input
370 self._sys_eval_input = builtin_mod.input
370 self._sys_eval_input = builtin_mod.input
371 builtin_mod.raw_input = raw_input
371 builtin_mod.raw_input = raw_input
372 builtin_mod.input = input
372 builtin_mod.input = input
373
373
374 # Set the parent message of the display hook and out streams.
374 # Set the parent message of the display hook and out streams.
375 shell.set_parent(parent)
375 shell.set_parent(parent)
376
376
377 # Re-broadcast our input for the benefit of listening clients, and
377 # Re-broadcast our input for the benefit of listening clients, and
378 # start computing output
378 # start computing output
379 if not silent:
379 if not silent:
380 self._publish_execute_input(code, parent, shell.execution_count)
380 self._publish_execute_input(code, parent, shell.execution_count)
381
381
382 reply_content = {}
382 reply_content = {}
383 # FIXME: the shell calls the exception handler itself.
383 # FIXME: the shell calls the exception handler itself.
384 shell._reply_content = None
384 shell._reply_content = None
385 try:
385 try:
386 shell.run_cell(code, store_history=store_history, silent=silent)
386 shell.run_cell(code, store_history=store_history, silent=silent)
387 except:
387 except:
388 status = u'error'
388 status = u'error'
389 # FIXME: this code right now isn't being used yet by default,
389 # FIXME: this code right now isn't being used yet by default,
390 # because the run_cell() call above directly fires off exception
390 # because the run_cell() call above directly fires off exception
391 # reporting. This code, therefore, is only active in the scenario
391 # reporting. This code, therefore, is only active in the scenario
392 # where runlines itself has an unhandled exception. We need to
392 # where runlines itself has an unhandled exception. We need to
393 # uniformize this, for all exception construction to come from a
393 # uniformize this, for all exception construction to come from a
394 # single location in the codbase.
394 # single location in the codbase.
395 etype, evalue, tb = sys.exc_info()
395 etype, evalue, tb = sys.exc_info()
396 tb_list = traceback.format_exception(etype, evalue, tb)
396 tb_list = traceback.format_exception(etype, evalue, tb)
397 reply_content.update(shell._showtraceback(etype, evalue, tb_list))
397 reply_content.update(shell._showtraceback(etype, evalue, tb_list))
398 else:
398 else:
399 status = u'ok'
399 status = u'ok'
400 finally:
400 finally:
401 # Restore raw_input.
401 # Restore raw_input.
402 if py3compat.PY3:
402 if py3compat.PY3:
403 builtin_mod.input = self._sys_raw_input
403 builtin_mod.input = self._sys_raw_input
404 else:
404 else:
405 builtin_mod.raw_input = self._sys_raw_input
405 builtin_mod.raw_input = self._sys_raw_input
406 builtin_mod.input = self._sys_eval_input
406 builtin_mod.input = self._sys_eval_input
407
407
408 reply_content[u'status'] = status
408 reply_content[u'status'] = status
409
409
410 # Return the execution counter so clients can display prompts
410 # Return the execution counter so clients can display prompts
411 reply_content['execution_count'] = shell.execution_count - 1
411 reply_content['execution_count'] = shell.execution_count - 1
412
412
413 # FIXME - fish exception info out of shell, possibly left there by
413 # FIXME - fish exception info out of shell, possibly left there by
414 # runlines. We'll need to clean up this logic later.
414 # runlines. We'll need to clean up this logic later.
415 if shell._reply_content is not None:
415 if shell._reply_content is not None:
416 reply_content.update(shell._reply_content)
416 reply_content.update(shell._reply_content)
417 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute')
417 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute')
418 reply_content['engine_info'] = e_info
418 reply_content['engine_info'] = e_info
419 # reset after use
419 # reset after use
420 shell._reply_content = None
420 shell._reply_content = None
421
421
422 if 'traceback' in reply_content:
422 if 'traceback' in reply_content:
423 self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback']))
423 self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback']))
424
424
425
425
426 # At this point, we can tell whether the main code execution succeeded
426 # At this point, we can tell whether the main code execution succeeded
427 # or not. If it did, we proceed to evaluate user_variables/expressions
427 # or not. If it did, we proceed to evaluate user_expressions
428 if reply_content['status'] == 'ok':
428 if reply_content['status'] == 'ok':
429 reply_content[u'user_variables'] = \
430 shell.user_variables(content.get(u'user_variables', []))
431 reply_content[u'user_expressions'] = \
429 reply_content[u'user_expressions'] = \
432 shell.user_expressions(content.get(u'user_expressions', {}))
430 shell.user_expressions(content.get(u'user_expressions', {}))
433 else:
431 else:
434 # If there was an error, don't even try to compute variables or
432 # If there was an error, don't even try to compute expressions
435 # expressions
436 reply_content[u'user_variables'] = {}
437 reply_content[u'user_expressions'] = {}
433 reply_content[u'user_expressions'] = {}
438
434
439 # Payloads should be retrieved regardless of outcome, so we can both
435 # Payloads should be retrieved regardless of outcome, so we can both
440 # recover partial output (that could have been generated early in a
436 # recover partial output (that could have been generated early in a
441 # block, before an error) and clear the payload system always.
437 # block, before an error) and clear the payload system always.
442 reply_content[u'payload'] = shell.payload_manager.read_payload()
438 reply_content[u'payload'] = shell.payload_manager.read_payload()
443 # Be agressive about clearing the payload because we don't want
439 # Be agressive about clearing the payload because we don't want
444 # it to sit in memory until the next execute_request comes in.
440 # it to sit in memory until the next execute_request comes in.
445 shell.payload_manager.clear_payload()
441 shell.payload_manager.clear_payload()
446
442
447 # Flush output before sending the reply.
443 # Flush output before sending the reply.
448 sys.stdout.flush()
444 sys.stdout.flush()
449 sys.stderr.flush()
445 sys.stderr.flush()
450 # FIXME: on rare occasions, the flush doesn't seem to make it to the
446 # FIXME: on rare occasions, the flush doesn't seem to make it to the
451 # clients... This seems to mitigate the problem, but we definitely need
447 # clients... This seems to mitigate the problem, but we definitely need
452 # to better understand what's going on.
448 # to better understand what's going on.
453 if self._execute_sleep:
449 if self._execute_sleep:
454 time.sleep(self._execute_sleep)
450 time.sleep(self._execute_sleep)
455
451
456 # Send the reply.
452 # Send the reply.
457 reply_content = json_clean(reply_content)
453 reply_content = json_clean(reply_content)
458
454
459 md['status'] = reply_content['status']
455 md['status'] = reply_content['status']
460 if reply_content['status'] == 'error' and \
456 if reply_content['status'] == 'error' and \
461 reply_content['ename'] == 'UnmetDependency':
457 reply_content['ename'] == 'UnmetDependency':
462 md['dependencies_met'] = False
458 md['dependencies_met'] = False
463
459
464 reply_msg = self.session.send(stream, u'execute_reply',
460 reply_msg = self.session.send(stream, u'execute_reply',
465 reply_content, parent, metadata=md,
461 reply_content, parent, metadata=md,
466 ident=ident)
462 ident=ident)
467
463
468 self.log.debug("%s", reply_msg)
464 self.log.debug("%s", reply_msg)
469
465
470 if not silent and reply_msg['content']['status'] == u'error':
466 if not silent and reply_msg['content']['status'] == u'error':
471 self._abort_queues()
467 self._abort_queues()
472
468
473 self._publish_status(u'idle', parent)
469 self._publish_status(u'idle', parent)
474
470
475 def complete_request(self, stream, ident, parent):
471 def complete_request(self, stream, ident, parent):
476 txt, matches = self._complete(parent)
472 txt, matches = self._complete(parent)
477 matches = {'matches' : matches,
473 matches = {'matches' : matches,
478 'matched_text' : txt,
474 'matched_text' : txt,
479 'status' : 'ok'}
475 'status' : 'ok'}
480 matches = json_clean(matches)
476 matches = json_clean(matches)
481 completion_msg = self.session.send(stream, 'complete_reply',
477 completion_msg = self.session.send(stream, 'complete_reply',
482 matches, parent, ident)
478 matches, parent, ident)
483 self.log.debug("%s", completion_msg)
479 self.log.debug("%s", completion_msg)
484
480
485 def object_info_request(self, stream, ident, parent):
481 def object_info_request(self, stream, ident, parent):
486 content = parent['content']
482 content = parent['content']
487 object_info = self.shell.object_inspect(content['oname'],
483 object_info = self.shell.object_inspect(content['oname'],
488 detail_level = content.get('detail_level', 0)
484 detail_level = content.get('detail_level', 0)
489 )
485 )
490 # Before we send this object over, we scrub it for JSON usage
486 # Before we send this object over, we scrub it for JSON usage
491 oinfo = json_clean(object_info)
487 oinfo = json_clean(object_info)
492 msg = self.session.send(stream, 'object_info_reply',
488 msg = self.session.send(stream, 'object_info_reply',
493 oinfo, parent, ident)
489 oinfo, parent, ident)
494 self.log.debug("%s", msg)
490 self.log.debug("%s", msg)
495
491
496 def history_request(self, stream, ident, parent):
492 def history_request(self, stream, ident, parent):
497 # We need to pull these out, as passing **kwargs doesn't work with
493 # We need to pull these out, as passing **kwargs doesn't work with
498 # unicode keys before Python 2.6.5.
494 # unicode keys before Python 2.6.5.
499 hist_access_type = parent['content']['hist_access_type']
495 hist_access_type = parent['content']['hist_access_type']
500 raw = parent['content']['raw']
496 raw = parent['content']['raw']
501 output = parent['content']['output']
497 output = parent['content']['output']
502 if hist_access_type == 'tail':
498 if hist_access_type == 'tail':
503 n = parent['content']['n']
499 n = parent['content']['n']
504 hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
500 hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
505 include_latest=True)
501 include_latest=True)
506
502
507 elif hist_access_type == 'range':
503 elif hist_access_type == 'range':
508 session = parent['content']['session']
504 session = parent['content']['session']
509 start = parent['content']['start']
505 start = parent['content']['start']
510 stop = parent['content']['stop']
506 stop = parent['content']['stop']
511 hist = self.shell.history_manager.get_range(session, start, stop,
507 hist = self.shell.history_manager.get_range(session, start, stop,
512 raw=raw, output=output)
508 raw=raw, output=output)
513
509
514 elif hist_access_type == 'search':
510 elif hist_access_type == 'search':
515 n = parent['content'].get('n')
511 n = parent['content'].get('n')
516 unique = parent['content'].get('unique', False)
512 unique = parent['content'].get('unique', False)
517 pattern = parent['content']['pattern']
513 pattern = parent['content']['pattern']
518 hist = self.shell.history_manager.search(
514 hist = self.shell.history_manager.search(
519 pattern, raw=raw, output=output, n=n, unique=unique)
515 pattern, raw=raw, output=output, n=n, unique=unique)
520
516
521 else:
517 else:
522 hist = []
518 hist = []
523 hist = list(hist)
519 hist = list(hist)
524 content = {'history' : hist}
520 content = {'history' : hist}
525 content = json_clean(content)
521 content = json_clean(content)
526 msg = self.session.send(stream, 'history_reply',
522 msg = self.session.send(stream, 'history_reply',
527 content, parent, ident)
523 content, parent, ident)
528 self.log.debug("Sending history reply with %i entries", len(hist))
524 self.log.debug("Sending history reply with %i entries", len(hist))
529
525
530 def connect_request(self, stream, ident, parent):
526 def connect_request(self, stream, ident, parent):
531 if self._recorded_ports is not None:
527 if self._recorded_ports is not None:
532 content = self._recorded_ports.copy()
528 content = self._recorded_ports.copy()
533 else:
529 else:
534 content = {}
530 content = {}
535 msg = self.session.send(stream, 'connect_reply',
531 msg = self.session.send(stream, 'connect_reply',
536 content, parent, ident)
532 content, parent, ident)
537 self.log.debug("%s", msg)
533 self.log.debug("%s", msg)
538
534
539 def kernel_info_request(self, stream, ident, parent):
535 def kernel_info_request(self, stream, ident, parent):
540 vinfo = {
536 vinfo = {
541 'protocol_version': protocol_version,
537 'protocol_version': protocol_version,
542 'ipython_version': ipython_version,
538 'ipython_version': ipython_version,
543 'language_version': language_version,
539 'language_version': language_version,
544 'language': 'python',
540 'language': 'python',
545 }
541 }
546 msg = self.session.send(stream, 'kernel_info_reply',
542 msg = self.session.send(stream, 'kernel_info_reply',
547 vinfo, parent, ident)
543 vinfo, parent, ident)
548 self.log.debug("%s", msg)
544 self.log.debug("%s", msg)
549
545
550 def shutdown_request(self, stream, ident, parent):
546 def shutdown_request(self, stream, ident, parent):
551 self.shell.exit_now = True
547 self.shell.exit_now = True
552 content = dict(status='ok')
548 content = dict(status='ok')
553 content.update(parent['content'])
549 content.update(parent['content'])
554 self.session.send(stream, u'shutdown_reply', content, parent, ident=ident)
550 self.session.send(stream, u'shutdown_reply', content, parent, ident=ident)
555 # same content, but different msg_id for broadcasting on IOPub
551 # same content, but different msg_id for broadcasting on IOPub
556 self._shutdown_message = self.session.msg(u'shutdown_reply',
552 self._shutdown_message = self.session.msg(u'shutdown_reply',
557 content, parent
553 content, parent
558 )
554 )
559
555
560 self._at_shutdown()
556 self._at_shutdown()
561 # call sys.exit after a short delay
557 # call sys.exit after a short delay
562 loop = ioloop.IOLoop.instance()
558 loop = ioloop.IOLoop.instance()
563 loop.add_timeout(time.time()+0.1, loop.stop)
559 loop.add_timeout(time.time()+0.1, loop.stop)
564
560
565 #---------------------------------------------------------------------------
561 #---------------------------------------------------------------------------
566 # Engine methods
562 # Engine methods
567 #---------------------------------------------------------------------------
563 #---------------------------------------------------------------------------
568
564
569 def apply_request(self, stream, ident, parent):
565 def apply_request(self, stream, ident, parent):
570 try:
566 try:
571 content = parent[u'content']
567 content = parent[u'content']
572 bufs = parent[u'buffers']
568 bufs = parent[u'buffers']
573 msg_id = parent['header']['msg_id']
569 msg_id = parent['header']['msg_id']
574 except:
570 except:
575 self.log.error("Got bad msg: %s", parent, exc_info=True)
571 self.log.error("Got bad msg: %s", parent, exc_info=True)
576 return
572 return
577
573
578 self._publish_status(u'busy', parent)
574 self._publish_status(u'busy', parent)
579
575
580 # Set the parent message of the display hook and out streams.
576 # Set the parent message of the display hook and out streams.
581 shell = self.shell
577 shell = self.shell
582 shell.set_parent(parent)
578 shell.set_parent(parent)
583
579
584 # execute_input_msg = self.session.msg(u'execute_input',{u'code':code}, parent=parent)
580 # execute_input_msg = self.session.msg(u'execute_input',{u'code':code}, parent=parent)
585 # self.iopub_socket.send(execute_input_msg)
581 # self.iopub_socket.send(execute_input_msg)
586 # self.session.send(self.iopub_socket, u'execute_input', {u'code':code},parent=parent)
582 # self.session.send(self.iopub_socket, u'execute_input', {u'code':code},parent=parent)
587 md = self._make_metadata(parent['metadata'])
583 md = self._make_metadata(parent['metadata'])
588 try:
584 try:
589 working = shell.user_ns
585 working = shell.user_ns
590
586
591 prefix = "_"+str(msg_id).replace("-","")+"_"
587 prefix = "_"+str(msg_id).replace("-","")+"_"
592
588
593 f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
589 f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
594
590
595 fname = getattr(f, '__name__', 'f')
591 fname = getattr(f, '__name__', 'f')
596
592
597 fname = prefix+"f"
593 fname = prefix+"f"
598 argname = prefix+"args"
594 argname = prefix+"args"
599 kwargname = prefix+"kwargs"
595 kwargname = prefix+"kwargs"
600 resultname = prefix+"result"
596 resultname = prefix+"result"
601
597
602 ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
598 ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
603 # print ns
599 # print ns
604 working.update(ns)
600 working.update(ns)
605 code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
601 code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
606 try:
602 try:
607 exec(code, shell.user_global_ns, shell.user_ns)
603 exec(code, shell.user_global_ns, shell.user_ns)
608 result = working.get(resultname)
604 result = working.get(resultname)
609 finally:
605 finally:
610 for key in ns:
606 for key in ns:
611 working.pop(key)
607 working.pop(key)
612
608
613 result_buf = serialize_object(result,
609 result_buf = serialize_object(result,
614 buffer_threshold=self.session.buffer_threshold,
610 buffer_threshold=self.session.buffer_threshold,
615 item_threshold=self.session.item_threshold,
611 item_threshold=self.session.item_threshold,
616 )
612 )
617
613
618 except:
614 except:
619 # invoke IPython traceback formatting
615 # invoke IPython traceback formatting
620 shell.showtraceback()
616 shell.showtraceback()
621 # FIXME - fish exception info out of shell, possibly left there by
617 # FIXME - fish exception info out of shell, possibly left there by
622 # run_code. We'll need to clean up this logic later.
618 # run_code. We'll need to clean up this logic later.
623 reply_content = {}
619 reply_content = {}
624 if shell._reply_content is not None:
620 if shell._reply_content is not None:
625 reply_content.update(shell._reply_content)
621 reply_content.update(shell._reply_content)
626 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply')
622 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply')
627 reply_content['engine_info'] = e_info
623 reply_content['engine_info'] = e_info
628 # reset after use
624 # reset after use
629 shell._reply_content = None
625 shell._reply_content = None
630
626
631 self.session.send(self.iopub_socket, u'error', reply_content, parent=parent,
627 self.session.send(self.iopub_socket, u'error', reply_content, parent=parent,
632 ident=self._topic('error'))
628 ident=self._topic('error'))
633 self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback']))
629 self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback']))
634 result_buf = []
630 result_buf = []
635
631
636 if reply_content['ename'] == 'UnmetDependency':
632 if reply_content['ename'] == 'UnmetDependency':
637 md['dependencies_met'] = False
633 md['dependencies_met'] = False
638 else:
634 else:
639 reply_content = {'status' : 'ok'}
635 reply_content = {'status' : 'ok'}
640
636
641 # put 'ok'/'error' status in header, for scheduler introspection:
637 # put 'ok'/'error' status in header, for scheduler introspection:
642 md['status'] = reply_content['status']
638 md['status'] = reply_content['status']
643
639
644 # flush i/o
640 # flush i/o
645 sys.stdout.flush()
641 sys.stdout.flush()
646 sys.stderr.flush()
642 sys.stderr.flush()
647
643
648 reply_msg = self.session.send(stream, u'apply_reply', reply_content,
644 reply_msg = self.session.send(stream, u'apply_reply', reply_content,
649 parent=parent, ident=ident,buffers=result_buf, metadata=md)
645 parent=parent, ident=ident,buffers=result_buf, metadata=md)
650
646
651 self._publish_status(u'idle', parent)
647 self._publish_status(u'idle', parent)
652
648
653 #---------------------------------------------------------------------------
649 #---------------------------------------------------------------------------
654 # Control messages
650 # Control messages
655 #---------------------------------------------------------------------------
651 #---------------------------------------------------------------------------
656
652
657 def abort_request(self, stream, ident, parent):
653 def abort_request(self, stream, ident, parent):
658 """abort a specifig msg by id"""
654 """abort a specifig msg by id"""
659 msg_ids = parent['content'].get('msg_ids', None)
655 msg_ids = parent['content'].get('msg_ids', None)
660 if isinstance(msg_ids, string_types):
656 if isinstance(msg_ids, string_types):
661 msg_ids = [msg_ids]
657 msg_ids = [msg_ids]
662 if not msg_ids:
658 if not msg_ids:
663 self.abort_queues()
659 self.abort_queues()
664 for mid in msg_ids:
660 for mid in msg_ids:
665 self.aborted.add(str(mid))
661 self.aborted.add(str(mid))
666
662
667 content = dict(status='ok')
663 content = dict(status='ok')
668 reply_msg = self.session.send(stream, 'abort_reply', content=content,
664 reply_msg = self.session.send(stream, 'abort_reply', content=content,
669 parent=parent, ident=ident)
665 parent=parent, ident=ident)
670 self.log.debug("%s", reply_msg)
666 self.log.debug("%s", reply_msg)
671
667
672 def clear_request(self, stream, idents, parent):
668 def clear_request(self, stream, idents, parent):
673 """Clear our namespace."""
669 """Clear our namespace."""
674 self.shell.reset(False)
670 self.shell.reset(False)
675 msg = self.session.send(stream, 'clear_reply', ident=idents, parent=parent,
671 msg = self.session.send(stream, 'clear_reply', ident=idents, parent=parent,
676 content = dict(status='ok'))
672 content = dict(status='ok'))
677
673
678
674
679 #---------------------------------------------------------------------------
675 #---------------------------------------------------------------------------
680 # Protected interface
676 # Protected interface
681 #---------------------------------------------------------------------------
677 #---------------------------------------------------------------------------
682
678
683 def _wrap_exception(self, method=None):
679 def _wrap_exception(self, method=None):
684 # import here, because _wrap_exception is only used in parallel,
680 # import here, because _wrap_exception is only used in parallel,
685 # and parallel has higher min pyzmq version
681 # and parallel has higher min pyzmq version
686 from IPython.parallel.error import wrap_exception
682 from IPython.parallel.error import wrap_exception
687 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method=method)
683 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method=method)
688 content = wrap_exception(e_info)
684 content = wrap_exception(e_info)
689 return content
685 return content
690
686
691 def _topic(self, topic):
687 def _topic(self, topic):
692 """prefixed topic for IOPub messages"""
688 """prefixed topic for IOPub messages"""
693 if self.int_id >= 0:
689 if self.int_id >= 0:
694 base = "engine.%i" % self.int_id
690 base = "engine.%i" % self.int_id
695 else:
691 else:
696 base = "kernel.%s" % self.ident
692 base = "kernel.%s" % self.ident
697
693
698 return py3compat.cast_bytes("%s.%s" % (base, topic))
694 return py3compat.cast_bytes("%s.%s" % (base, topic))
699
695
700 def _abort_queues(self):
696 def _abort_queues(self):
701 for stream in self.shell_streams:
697 for stream in self.shell_streams:
702 if stream:
698 if stream:
703 self._abort_queue(stream)
699 self._abort_queue(stream)
704
700
705 def _abort_queue(self, stream):
701 def _abort_queue(self, stream):
706 poller = zmq.Poller()
702 poller = zmq.Poller()
707 poller.register(stream.socket, zmq.POLLIN)
703 poller.register(stream.socket, zmq.POLLIN)
708 while True:
704 while True:
709 idents,msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
705 idents,msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
710 if msg is None:
706 if msg is None:
711 return
707 return
712
708
713 self.log.info("Aborting:")
709 self.log.info("Aborting:")
714 self.log.info("%s", msg)
710 self.log.info("%s", msg)
715 msg_type = msg['header']['msg_type']
711 msg_type = msg['header']['msg_type']
716 reply_type = msg_type.split('_')[0] + '_reply'
712 reply_type = msg_type.split('_')[0] + '_reply'
717
713
718 status = {'status' : 'aborted'}
714 status = {'status' : 'aborted'}
719 md = {'engine' : self.ident}
715 md = {'engine' : self.ident}
720 md.update(status)
716 md.update(status)
721 reply_msg = self.session.send(stream, reply_type, metadata=md,
717 reply_msg = self.session.send(stream, reply_type, metadata=md,
722 content=status, parent=msg, ident=idents)
718 content=status, parent=msg, ident=idents)
723 self.log.debug("%s", reply_msg)
719 self.log.debug("%s", reply_msg)
724 # We need to wait a bit for requests to come in. This can probably
720 # We need to wait a bit for requests to come in. This can probably
725 # be set shorter for true asynchronous clients.
721 # be set shorter for true asynchronous clients.
726 poller.poll(50)
722 poller.poll(50)
727
723
728
724
729 def _no_raw_input(self):
725 def _no_raw_input(self):
730 """Raise StdinNotImplentedError if active frontend doesn't support
726 """Raise StdinNotImplentedError if active frontend doesn't support
731 stdin."""
727 stdin."""
732 raise StdinNotImplementedError("raw_input was called, but this "
728 raise StdinNotImplementedError("raw_input was called, but this "
733 "frontend does not support stdin.")
729 "frontend does not support stdin.")
734
730
735 def _raw_input(self, prompt, ident, parent):
731 def _raw_input(self, prompt, ident, parent):
736 # Flush output before making the request.
732 # Flush output before making the request.
737 sys.stderr.flush()
733 sys.stderr.flush()
738 sys.stdout.flush()
734 sys.stdout.flush()
739 # flush the stdin socket, to purge stale replies
735 # flush the stdin socket, to purge stale replies
740 while True:
736 while True:
741 try:
737 try:
742 self.stdin_socket.recv_multipart(zmq.NOBLOCK)
738 self.stdin_socket.recv_multipart(zmq.NOBLOCK)
743 except zmq.ZMQError as e:
739 except zmq.ZMQError as e:
744 if e.errno == zmq.EAGAIN:
740 if e.errno == zmq.EAGAIN:
745 break
741 break
746 else:
742 else:
747 raise
743 raise
748
744
749 # Send the input request.
745 # Send the input request.
750 content = json_clean(dict(prompt=prompt))
746 content = json_clean(dict(prompt=prompt))
751 self.session.send(self.stdin_socket, u'input_request', content, parent,
747 self.session.send(self.stdin_socket, u'input_request', content, parent,
752 ident=ident)
748 ident=ident)
753
749
754 # Await a response.
750 # Await a response.
755 while True:
751 while True:
756 try:
752 try:
757 ident, reply = self.session.recv(self.stdin_socket, 0)
753 ident, reply = self.session.recv(self.stdin_socket, 0)
758 except Exception:
754 except Exception:
759 self.log.warn("Invalid Message:", exc_info=True)
755 self.log.warn("Invalid Message:", exc_info=True)
760 except KeyboardInterrupt:
756 except KeyboardInterrupt:
761 # re-raise KeyboardInterrupt, to truncate traceback
757 # re-raise KeyboardInterrupt, to truncate traceback
762 raise KeyboardInterrupt
758 raise KeyboardInterrupt
763 else:
759 else:
764 break
760 break
765 try:
761 try:
766 value = py3compat.unicode_to_str(reply['content']['value'])
762 value = py3compat.unicode_to_str(reply['content']['value'])
767 except:
763 except:
768 self.log.error("Got bad raw_input reply: ")
764 self.log.error("Got bad raw_input reply: ")
769 self.log.error("%s", parent)
765 self.log.error("%s", parent)
770 value = ''
766 value = ''
771 if value == '\x04':
767 if value == '\x04':
772 # EOF
768 # EOF
773 raise EOFError
769 raise EOFError
774 return value
770 return value
775
771
776 def _complete(self, msg):
772 def _complete(self, msg):
777 c = msg['content']
773 c = msg['content']
778 try:
774 try:
779 cpos = int(c['cursor_pos'])
775 cpos = int(c['cursor_pos'])
780 except:
776 except:
781 # If we don't get something that we can convert to an integer, at
777 # If we don't get something that we can convert to an integer, at
782 # least attempt the completion guessing the cursor is at the end of
778 # least attempt the completion guessing the cursor is at the end of
783 # the text, if there's any, and otherwise of the line
779 # the text, if there's any, and otherwise of the line
784 cpos = len(c['text'])
780 cpos = len(c['text'])
785 if cpos==0:
781 if cpos==0:
786 cpos = len(c['line'])
782 cpos = len(c['line'])
787 return self.shell.complete(c['text'], c['line'], cpos)
783 return self.shell.complete(c['text'], c['line'], cpos)
788
784
789 def _at_shutdown(self):
785 def _at_shutdown(self):
790 """Actions taken at shutdown by the kernel, called by python's atexit.
786 """Actions taken at shutdown by the kernel, called by python's atexit.
791 """
787 """
792 # io.rprint("Kernel at_shutdown") # dbg
788 # io.rprint("Kernel at_shutdown") # dbg
793 if self._shutdown_message is not None:
789 if self._shutdown_message is not None:
794 self.session.send(self.iopub_socket, self._shutdown_message, ident=self._topic('shutdown'))
790 self.session.send(self.iopub_socket, self._shutdown_message, ident=self._topic('shutdown'))
795 self.log.debug("%s", self._shutdown_message)
791 self.log.debug("%s", self._shutdown_message)
796 [ s.flush(zmq.POLLOUT) for s in self.shell_streams ]
792 [ s.flush(zmq.POLLOUT) for s in self.shell_streams ]
797
793
@@ -1,1863 +1,1863 b''
1 """A semi-synchronous Client for IPython parallel"""
1 """A semi-synchronous Client for IPython parallel"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from __future__ import print_function
6 from __future__ import print_function
7
7
8 import os
8 import os
9 import json
9 import json
10 import sys
10 import sys
11 from threading import Thread, Event
11 from threading import Thread, Event
12 import time
12 import time
13 import warnings
13 import warnings
14 from datetime import datetime
14 from datetime import datetime
15 from getpass import getpass
15 from getpass import getpass
16 from pprint import pprint
16 from pprint import pprint
17
17
18 pjoin = os.path.join
18 pjoin = os.path.join
19
19
20 import zmq
20 import zmq
21
21
22 from IPython.config.configurable import MultipleInstanceError
22 from IPython.config.configurable import MultipleInstanceError
23 from IPython.core.application import BaseIPythonApplication
23 from IPython.core.application import BaseIPythonApplication
24 from IPython.core.profiledir import ProfileDir, ProfileDirError
24 from IPython.core.profiledir import ProfileDir, ProfileDirError
25
25
26 from IPython.utils.capture import RichOutput
26 from IPython.utils.capture import RichOutput
27 from IPython.utils.coloransi import TermColors
27 from IPython.utils.coloransi import TermColors
28 from IPython.utils.jsonutil import rekey, extract_dates, parse_date
28 from IPython.utils.jsonutil import rekey, extract_dates, parse_date
29 from IPython.utils.localinterfaces import localhost, is_local_ip
29 from IPython.utils.localinterfaces import localhost, is_local_ip
30 from IPython.utils.path import get_ipython_dir
30 from IPython.utils.path import get_ipython_dir
31 from IPython.utils.py3compat import cast_bytes, string_types, xrange, iteritems
31 from IPython.utils.py3compat import cast_bytes, string_types, xrange, iteritems
32 from IPython.utils.traitlets import (HasTraits, Integer, Instance, Unicode,
32 from IPython.utils.traitlets import (HasTraits, Integer, Instance, Unicode,
33 Dict, List, Bool, Set, Any)
33 Dict, List, Bool, Set, Any)
34 from IPython.external.decorator import decorator
34 from IPython.external.decorator import decorator
35 from IPython.external.ssh import tunnel
35 from IPython.external.ssh import tunnel
36
36
37 from IPython.parallel import Reference
37 from IPython.parallel import Reference
38 from IPython.parallel import error
38 from IPython.parallel import error
39 from IPython.parallel import util
39 from IPython.parallel import util
40
40
41 from IPython.kernel.zmq.session import Session, Message
41 from IPython.kernel.zmq.session import Session, Message
42 from IPython.kernel.zmq import serialize
42 from IPython.kernel.zmq import serialize
43
43
44 from .asyncresult import AsyncResult, AsyncHubResult
44 from .asyncresult import AsyncResult, AsyncHubResult
45 from .view import DirectView, LoadBalancedView
45 from .view import DirectView, LoadBalancedView
46
46
47 #--------------------------------------------------------------------------
47 #--------------------------------------------------------------------------
48 # Decorators for Client methods
48 # Decorators for Client methods
49 #--------------------------------------------------------------------------
49 #--------------------------------------------------------------------------
50
50
51 @decorator
51 @decorator
52 def spin_first(f, self, *args, **kwargs):
52 def spin_first(f, self, *args, **kwargs):
53 """Call spin() to sync state prior to calling the method."""
53 """Call spin() to sync state prior to calling the method."""
54 self.spin()
54 self.spin()
55 return f(self, *args, **kwargs)
55 return f(self, *args, **kwargs)
56
56
57
57
58 #--------------------------------------------------------------------------
58 #--------------------------------------------------------------------------
59 # Classes
59 # Classes
60 #--------------------------------------------------------------------------
60 #--------------------------------------------------------------------------
61
61
62
62
63 class ExecuteReply(RichOutput):
63 class ExecuteReply(RichOutput):
64 """wrapper for finished Execute results"""
64 """wrapper for finished Execute results"""
65 def __init__(self, msg_id, content, metadata):
65 def __init__(self, msg_id, content, metadata):
66 self.msg_id = msg_id
66 self.msg_id = msg_id
67 self._content = content
67 self._content = content
68 self.execution_count = content['execution_count']
68 self.execution_count = content['execution_count']
69 self.metadata = metadata
69 self.metadata = metadata
70
70
71 # RichOutput overrides
71 # RichOutput overrides
72
72
73 @property
73 @property
74 def source(self):
74 def source(self):
75 execute_result = self.metadata['execute_result']
75 execute_result = self.metadata['execute_result']
76 if execute_result:
76 if execute_result:
77 return execute_result.get('source', '')
77 return execute_result.get('source', '')
78
78
79 @property
79 @property
80 def data(self):
80 def data(self):
81 execute_result = self.metadata['execute_result']
81 execute_result = self.metadata['execute_result']
82 if execute_result:
82 if execute_result:
83 return execute_result.get('data', {})
83 return execute_result.get('data', {})
84
84
85 @property
85 @property
86 def _metadata(self):
86 def _metadata(self):
87 execute_result = self.metadata['execute_result']
87 execute_result = self.metadata['execute_result']
88 if execute_result:
88 if execute_result:
89 return execute_result.get('metadata', {})
89 return execute_result.get('metadata', {})
90
90
91 def display(self):
91 def display(self):
92 from IPython.display import publish_display_data
92 from IPython.display import publish_display_data
93 publish_display_data(self.source, self.data, self.metadata)
93 publish_display_data(self.source, self.data, self.metadata)
94
94
95 def _repr_mime_(self, mime):
95 def _repr_mime_(self, mime):
96 if mime not in self.data:
96 if mime not in self.data:
97 return
97 return
98 data = self.data[mime]
98 data = self.data[mime]
99 if mime in self._metadata:
99 if mime in self._metadata:
100 return data, self._metadata[mime]
100 return data, self._metadata[mime]
101 else:
101 else:
102 return data
102 return data
103
103
104 def __getitem__(self, key):
104 def __getitem__(self, key):
105 return self.metadata[key]
105 return self.metadata[key]
106
106
107 def __getattr__(self, key):
107 def __getattr__(self, key):
108 if key not in self.metadata:
108 if key not in self.metadata:
109 raise AttributeError(key)
109 raise AttributeError(key)
110 return self.metadata[key]
110 return self.metadata[key]
111
111
112 def __repr__(self):
112 def __repr__(self):
113 execute_result = self.metadata['execute_result'] or {'data':{}}
113 execute_result = self.metadata['execute_result'] or {'data':{}}
114 text_out = execute_result['data'].get('text/plain', '')
114 text_out = execute_result['data'].get('text/plain', '')
115 if len(text_out) > 32:
115 if len(text_out) > 32:
116 text_out = text_out[:29] + '...'
116 text_out = text_out[:29] + '...'
117
117
118 return "<ExecuteReply[%i]: %s>" % (self.execution_count, text_out)
118 return "<ExecuteReply[%i]: %s>" % (self.execution_count, text_out)
119
119
120 def _repr_pretty_(self, p, cycle):
120 def _repr_pretty_(self, p, cycle):
121 execute_result = self.metadata['execute_result'] or {'data':{}}
121 execute_result = self.metadata['execute_result'] or {'data':{}}
122 text_out = execute_result['data'].get('text/plain', '')
122 text_out = execute_result['data'].get('text/plain', '')
123
123
124 if not text_out:
124 if not text_out:
125 return
125 return
126
126
127 try:
127 try:
128 ip = get_ipython()
128 ip = get_ipython()
129 except NameError:
129 except NameError:
130 colors = "NoColor"
130 colors = "NoColor"
131 else:
131 else:
132 colors = ip.colors
132 colors = ip.colors
133
133
134 if colors == "NoColor":
134 if colors == "NoColor":
135 out = normal = ""
135 out = normal = ""
136 else:
136 else:
137 out = TermColors.Red
137 out = TermColors.Red
138 normal = TermColors.Normal
138 normal = TermColors.Normal
139
139
140 if '\n' in text_out and not text_out.startswith('\n'):
140 if '\n' in text_out and not text_out.startswith('\n'):
141 # add newline for multiline reprs
141 # add newline for multiline reprs
142 text_out = '\n' + text_out
142 text_out = '\n' + text_out
143
143
144 p.text(
144 p.text(
145 out + u'Out[%i:%i]: ' % (
145 out + u'Out[%i:%i]: ' % (
146 self.metadata['engine_id'], self.execution_count
146 self.metadata['engine_id'], self.execution_count
147 ) + normal + text_out
147 ) + normal + text_out
148 )
148 )
149
149
150
150
151 class Metadata(dict):
151 class Metadata(dict):
152 """Subclass of dict for initializing metadata values.
152 """Subclass of dict for initializing metadata values.
153
153
154 Attribute access works on keys.
154 Attribute access works on keys.
155
155
156 These objects have a strict set of keys - errors will raise if you try
156 These objects have a strict set of keys - errors will raise if you try
157 to add new keys.
157 to add new keys.
158 """
158 """
159 def __init__(self, *args, **kwargs):
159 def __init__(self, *args, **kwargs):
160 dict.__init__(self)
160 dict.__init__(self)
161 md = {'msg_id' : None,
161 md = {'msg_id' : None,
162 'submitted' : None,
162 'submitted' : None,
163 'started' : None,
163 'started' : None,
164 'completed' : None,
164 'completed' : None,
165 'received' : None,
165 'received' : None,
166 'engine_uuid' : None,
166 'engine_uuid' : None,
167 'engine_id' : None,
167 'engine_id' : None,
168 'follow' : None,
168 'follow' : None,
169 'after' : None,
169 'after' : None,
170 'status' : None,
170 'status' : None,
171
171
172 'execute_input' : None,
172 'execute_input' : None,
173 'execute_result' : None,
173 'execute_result' : None,
174 'error' : None,
174 'error' : None,
175 'stdout' : '',
175 'stdout' : '',
176 'stderr' : '',
176 'stderr' : '',
177 'outputs' : [],
177 'outputs' : [],
178 'data': {},
178 'data': {},
179 'outputs_ready' : False,
179 'outputs_ready' : False,
180 }
180 }
181 self.update(md)
181 self.update(md)
182 self.update(dict(*args, **kwargs))
182 self.update(dict(*args, **kwargs))
183
183
184 def __getattr__(self, key):
184 def __getattr__(self, key):
185 """getattr aliased to getitem"""
185 """getattr aliased to getitem"""
186 if key in self:
186 if key in self:
187 return self[key]
187 return self[key]
188 else:
188 else:
189 raise AttributeError(key)
189 raise AttributeError(key)
190
190
191 def __setattr__(self, key, value):
191 def __setattr__(self, key, value):
192 """setattr aliased to setitem, with strict"""
192 """setattr aliased to setitem, with strict"""
193 if key in self:
193 if key in self:
194 self[key] = value
194 self[key] = value
195 else:
195 else:
196 raise AttributeError(key)
196 raise AttributeError(key)
197
197
198 def __setitem__(self, key, value):
198 def __setitem__(self, key, value):
199 """strict static key enforcement"""
199 """strict static key enforcement"""
200 if key in self:
200 if key in self:
201 dict.__setitem__(self, key, value)
201 dict.__setitem__(self, key, value)
202 else:
202 else:
203 raise KeyError(key)
203 raise KeyError(key)
204
204
205
205
206 class Client(HasTraits):
206 class Client(HasTraits):
207 """A semi-synchronous client to the IPython ZMQ cluster
207 """A semi-synchronous client to the IPython ZMQ cluster
208
208
209 Parameters
209 Parameters
210 ----------
210 ----------
211
211
212 url_file : str/unicode; path to ipcontroller-client.json
212 url_file : str/unicode; path to ipcontroller-client.json
213 This JSON file should contain all the information needed to connect to a cluster,
213 This JSON file should contain all the information needed to connect to a cluster,
214 and is likely the only argument needed.
214 and is likely the only argument needed.
215 Connection information for the Hub's registration. If a json connector
215 Connection information for the Hub's registration. If a json connector
216 file is given, then likely no further configuration is necessary.
216 file is given, then likely no further configuration is necessary.
217 [Default: use profile]
217 [Default: use profile]
218 profile : bytes
218 profile : bytes
219 The name of the Cluster profile to be used to find connector information.
219 The name of the Cluster profile to be used to find connector information.
220 If run from an IPython application, the default profile will be the same
220 If run from an IPython application, the default profile will be the same
221 as the running application, otherwise it will be 'default'.
221 as the running application, otherwise it will be 'default'.
222 cluster_id : str
222 cluster_id : str
223 String id to added to runtime files, to prevent name collisions when using
223 String id to added to runtime files, to prevent name collisions when using
224 multiple clusters with a single profile simultaneously.
224 multiple clusters with a single profile simultaneously.
225 When set, will look for files named like: 'ipcontroller-<cluster_id>-client.json'
225 When set, will look for files named like: 'ipcontroller-<cluster_id>-client.json'
226 Since this is text inserted into filenames, typical recommendations apply:
226 Since this is text inserted into filenames, typical recommendations apply:
227 Simple character strings are ideal, and spaces are not recommended (but
227 Simple character strings are ideal, and spaces are not recommended (but
228 should generally work)
228 should generally work)
229 context : zmq.Context
229 context : zmq.Context
230 Pass an existing zmq.Context instance, otherwise the client will create its own.
230 Pass an existing zmq.Context instance, otherwise the client will create its own.
231 debug : bool
231 debug : bool
232 flag for lots of message printing for debug purposes
232 flag for lots of message printing for debug purposes
233 timeout : int/float
233 timeout : int/float
234 time (in seconds) to wait for connection replies from the Hub
234 time (in seconds) to wait for connection replies from the Hub
235 [Default: 10]
235 [Default: 10]
236
236
237 #-------------- session related args ----------------
237 #-------------- session related args ----------------
238
238
239 config : Config object
239 config : Config object
240 If specified, this will be relayed to the Session for configuration
240 If specified, this will be relayed to the Session for configuration
241 username : str
241 username : str
242 set username for the session object
242 set username for the session object
243
243
244 #-------------- ssh related args ----------------
244 #-------------- ssh related args ----------------
245 # These are args for configuring the ssh tunnel to be used
245 # These are args for configuring the ssh tunnel to be used
246 # credentials are used to forward connections over ssh to the Controller
246 # credentials are used to forward connections over ssh to the Controller
247 # Note that the ip given in `addr` needs to be relative to sshserver
247 # Note that the ip given in `addr` needs to be relative to sshserver
248 # The most basic case is to leave addr as pointing to localhost (127.0.0.1),
248 # The most basic case is to leave addr as pointing to localhost (127.0.0.1),
249 # and set sshserver as the same machine the Controller is on. However,
249 # and set sshserver as the same machine the Controller is on. However,
250 # the only requirement is that sshserver is able to see the Controller
250 # the only requirement is that sshserver is able to see the Controller
251 # (i.e. is within the same trusted network).
251 # (i.e. is within the same trusted network).
252
252
253 sshserver : str
253 sshserver : str
254 A string of the form passed to ssh, i.e. 'server.tld' or 'user@server.tld:port'
254 A string of the form passed to ssh, i.e. 'server.tld' or 'user@server.tld:port'
255 If keyfile or password is specified, and this is not, it will default to
255 If keyfile or password is specified, and this is not, it will default to
256 the ip given in addr.
256 the ip given in addr.
257 sshkey : str; path to ssh private key file
257 sshkey : str; path to ssh private key file
258 This specifies a key to be used in ssh login, default None.
258 This specifies a key to be used in ssh login, default None.
259 Regular default ssh keys will be used without specifying this argument.
259 Regular default ssh keys will be used without specifying this argument.
260 password : str
260 password : str
261 Your ssh password to sshserver. Note that if this is left None,
261 Your ssh password to sshserver. Note that if this is left None,
262 you will be prompted for it if passwordless key based login is unavailable.
262 you will be prompted for it if passwordless key based login is unavailable.
263 paramiko : bool
263 paramiko : bool
264 flag for whether to use paramiko instead of shell ssh for tunneling.
264 flag for whether to use paramiko instead of shell ssh for tunneling.
265 [default: True on win32, False else]
265 [default: True on win32, False else]
266
266
267
267
268 Attributes
268 Attributes
269 ----------
269 ----------
270
270
271 ids : list of int engine IDs
271 ids : list of int engine IDs
272 requesting the ids attribute always synchronizes
272 requesting the ids attribute always synchronizes
273 the registration state. To request ids without synchronization,
273 the registration state. To request ids without synchronization,
274 use semi-private _ids attributes.
274 use semi-private _ids attributes.
275
275
276 history : list of msg_ids
276 history : list of msg_ids
277 a list of msg_ids, keeping track of all the execution
277 a list of msg_ids, keeping track of all the execution
278 messages you have submitted in order.
278 messages you have submitted in order.
279
279
280 outstanding : set of msg_ids
280 outstanding : set of msg_ids
281 a set of msg_ids that have been submitted, but whose
281 a set of msg_ids that have been submitted, but whose
282 results have not yet been received.
282 results have not yet been received.
283
283
284 results : dict
284 results : dict
285 a dict of all our results, keyed by msg_id
285 a dict of all our results, keyed by msg_id
286
286
287 block : bool
287 block : bool
288 determines default behavior when block not specified
288 determines default behavior when block not specified
289 in execution methods
289 in execution methods
290
290
291 Methods
291 Methods
292 -------
292 -------
293
293
294 spin
294 spin
295 flushes incoming results and registration state changes
295 flushes incoming results and registration state changes
296 control methods spin, and requesting `ids` also ensures up to date
296 control methods spin, and requesting `ids` also ensures up to date
297
297
298 wait
298 wait
299 wait on one or more msg_ids
299 wait on one or more msg_ids
300
300
301 execution methods
301 execution methods
302 apply
302 apply
303 legacy: execute, run
303 legacy: execute, run
304
304
305 data movement
305 data movement
306 push, pull, scatter, gather
306 push, pull, scatter, gather
307
307
308 query methods
308 query methods
309 queue_status, get_result, purge, result_status
309 queue_status, get_result, purge, result_status
310
310
311 control methods
311 control methods
312 abort, shutdown
312 abort, shutdown
313
313
314 """
314 """
315
315
316
316
317 block = Bool(False)
317 block = Bool(False)
318 outstanding = Set()
318 outstanding = Set()
319 results = Instance('collections.defaultdict', (dict,))
319 results = Instance('collections.defaultdict', (dict,))
320 metadata = Instance('collections.defaultdict', (Metadata,))
320 metadata = Instance('collections.defaultdict', (Metadata,))
321 history = List()
321 history = List()
322 debug = Bool(False)
322 debug = Bool(False)
323 _spin_thread = Any()
323 _spin_thread = Any()
324 _stop_spinning = Any()
324 _stop_spinning = Any()
325
325
326 profile=Unicode()
326 profile=Unicode()
327 def _profile_default(self):
327 def _profile_default(self):
328 if BaseIPythonApplication.initialized():
328 if BaseIPythonApplication.initialized():
329 # an IPython app *might* be running, try to get its profile
329 # an IPython app *might* be running, try to get its profile
330 try:
330 try:
331 return BaseIPythonApplication.instance().profile
331 return BaseIPythonApplication.instance().profile
332 except (AttributeError, MultipleInstanceError):
332 except (AttributeError, MultipleInstanceError):
333 # could be a *different* subclass of config.Application,
333 # could be a *different* subclass of config.Application,
334 # which would raise one of these two errors.
334 # which would raise one of these two errors.
335 return u'default'
335 return u'default'
336 else:
336 else:
337 return u'default'
337 return u'default'
338
338
339
339
340 _outstanding_dict = Instance('collections.defaultdict', (set,))
340 _outstanding_dict = Instance('collections.defaultdict', (set,))
341 _ids = List()
341 _ids = List()
342 _connected=Bool(False)
342 _connected=Bool(False)
343 _ssh=Bool(False)
343 _ssh=Bool(False)
344 _context = Instance('zmq.Context')
344 _context = Instance('zmq.Context')
345 _config = Dict()
345 _config = Dict()
346 _engines=Instance(util.ReverseDict, (), {})
346 _engines=Instance(util.ReverseDict, (), {})
347 # _hub_socket=Instance('zmq.Socket')
347 # _hub_socket=Instance('zmq.Socket')
348 _query_socket=Instance('zmq.Socket')
348 _query_socket=Instance('zmq.Socket')
349 _control_socket=Instance('zmq.Socket')
349 _control_socket=Instance('zmq.Socket')
350 _iopub_socket=Instance('zmq.Socket')
350 _iopub_socket=Instance('zmq.Socket')
351 _notification_socket=Instance('zmq.Socket')
351 _notification_socket=Instance('zmq.Socket')
352 _mux_socket=Instance('zmq.Socket')
352 _mux_socket=Instance('zmq.Socket')
353 _task_socket=Instance('zmq.Socket')
353 _task_socket=Instance('zmq.Socket')
354 _task_scheme=Unicode()
354 _task_scheme=Unicode()
355 _closed = False
355 _closed = False
356 _ignored_control_replies=Integer(0)
356 _ignored_control_replies=Integer(0)
357 _ignored_hub_replies=Integer(0)
357 _ignored_hub_replies=Integer(0)
358
358
359 def __new__(self, *args, **kw):
359 def __new__(self, *args, **kw):
360 # don't raise on positional args
360 # don't raise on positional args
361 return HasTraits.__new__(self, **kw)
361 return HasTraits.__new__(self, **kw)
362
362
363 def __init__(self, url_file=None, profile=None, profile_dir=None, ipython_dir=None,
363 def __init__(self, url_file=None, profile=None, profile_dir=None, ipython_dir=None,
364 context=None, debug=False,
364 context=None, debug=False,
365 sshserver=None, sshkey=None, password=None, paramiko=None,
365 sshserver=None, sshkey=None, password=None, paramiko=None,
366 timeout=10, cluster_id=None, **extra_args
366 timeout=10, cluster_id=None, **extra_args
367 ):
367 ):
368 if profile:
368 if profile:
369 super(Client, self).__init__(debug=debug, profile=profile)
369 super(Client, self).__init__(debug=debug, profile=profile)
370 else:
370 else:
371 super(Client, self).__init__(debug=debug)
371 super(Client, self).__init__(debug=debug)
372 if context is None:
372 if context is None:
373 context = zmq.Context.instance()
373 context = zmq.Context.instance()
374 self._context = context
374 self._context = context
375 self._stop_spinning = Event()
375 self._stop_spinning = Event()
376
376
377 if 'url_or_file' in extra_args:
377 if 'url_or_file' in extra_args:
378 url_file = extra_args['url_or_file']
378 url_file = extra_args['url_or_file']
379 warnings.warn("url_or_file arg no longer supported, use url_file", DeprecationWarning)
379 warnings.warn("url_or_file arg no longer supported, use url_file", DeprecationWarning)
380
380
381 if url_file and util.is_url(url_file):
381 if url_file and util.is_url(url_file):
382 raise ValueError("single urls cannot be specified, url-files must be used.")
382 raise ValueError("single urls cannot be specified, url-files must be used.")
383
383
384 self._setup_profile_dir(self.profile, profile_dir, ipython_dir)
384 self._setup_profile_dir(self.profile, profile_dir, ipython_dir)
385
385
386 if self._cd is not None:
386 if self._cd is not None:
387 if url_file is None:
387 if url_file is None:
388 if not cluster_id:
388 if not cluster_id:
389 client_json = 'ipcontroller-client.json'
389 client_json = 'ipcontroller-client.json'
390 else:
390 else:
391 client_json = 'ipcontroller-%s-client.json' % cluster_id
391 client_json = 'ipcontroller-%s-client.json' % cluster_id
392 url_file = pjoin(self._cd.security_dir, client_json)
392 url_file = pjoin(self._cd.security_dir, client_json)
393 if url_file is None:
393 if url_file is None:
394 raise ValueError(
394 raise ValueError(
395 "I can't find enough information to connect to a hub!"
395 "I can't find enough information to connect to a hub!"
396 " Please specify at least one of url_file or profile."
396 " Please specify at least one of url_file or profile."
397 )
397 )
398
398
399 with open(url_file) as f:
399 with open(url_file) as f:
400 cfg = json.load(f)
400 cfg = json.load(f)
401
401
402 self._task_scheme = cfg['task_scheme']
402 self._task_scheme = cfg['task_scheme']
403
403
404 # sync defaults from args, json:
404 # sync defaults from args, json:
405 if sshserver:
405 if sshserver:
406 cfg['ssh'] = sshserver
406 cfg['ssh'] = sshserver
407
407
408 location = cfg.setdefault('location', None)
408 location = cfg.setdefault('location', None)
409
409
410 proto,addr = cfg['interface'].split('://')
410 proto,addr = cfg['interface'].split('://')
411 addr = util.disambiguate_ip_address(addr, location)
411 addr = util.disambiguate_ip_address(addr, location)
412 cfg['interface'] = "%s://%s" % (proto, addr)
412 cfg['interface'] = "%s://%s" % (proto, addr)
413
413
414 # turn interface,port into full urls:
414 # turn interface,port into full urls:
415 for key in ('control', 'task', 'mux', 'iopub', 'notification', 'registration'):
415 for key in ('control', 'task', 'mux', 'iopub', 'notification', 'registration'):
416 cfg[key] = cfg['interface'] + ':%i' % cfg[key]
416 cfg[key] = cfg['interface'] + ':%i' % cfg[key]
417
417
418 url = cfg['registration']
418 url = cfg['registration']
419
419
420 if location is not None and addr == localhost():
420 if location is not None and addr == localhost():
421 # location specified, and connection is expected to be local
421 # location specified, and connection is expected to be local
422 if not is_local_ip(location) and not sshserver:
422 if not is_local_ip(location) and not sshserver:
423 # load ssh from JSON *only* if the controller is not on
423 # load ssh from JSON *only* if the controller is not on
424 # this machine
424 # this machine
425 sshserver=cfg['ssh']
425 sshserver=cfg['ssh']
426 if not is_local_ip(location) and not sshserver:
426 if not is_local_ip(location) and not sshserver:
427 # warn if no ssh specified, but SSH is probably needed
427 # warn if no ssh specified, but SSH is probably needed
428 # This is only a warning, because the most likely cause
428 # This is only a warning, because the most likely cause
429 # is a local Controller on a laptop whose IP is dynamic
429 # is a local Controller on a laptop whose IP is dynamic
430 warnings.warn("""
430 warnings.warn("""
431 Controller appears to be listening on localhost, but not on this machine.
431 Controller appears to be listening on localhost, but not on this machine.
432 If this is true, you should specify Client(...,sshserver='you@%s')
432 If this is true, you should specify Client(...,sshserver='you@%s')
433 or instruct your controller to listen on an external IP."""%location,
433 or instruct your controller to listen on an external IP."""%location,
434 RuntimeWarning)
434 RuntimeWarning)
435 elif not sshserver:
435 elif not sshserver:
436 # otherwise sync with cfg
436 # otherwise sync with cfg
437 sshserver = cfg['ssh']
437 sshserver = cfg['ssh']
438
438
439 self._config = cfg
439 self._config = cfg
440
440
441 self._ssh = bool(sshserver or sshkey or password)
441 self._ssh = bool(sshserver or sshkey or password)
442 if self._ssh and sshserver is None:
442 if self._ssh and sshserver is None:
443 # default to ssh via localhost
443 # default to ssh via localhost
444 sshserver = addr
444 sshserver = addr
445 if self._ssh and password is None:
445 if self._ssh and password is None:
446 if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko):
446 if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko):
447 password=False
447 password=False
448 else:
448 else:
449 password = getpass("SSH Password for %s: "%sshserver)
449 password = getpass("SSH Password for %s: "%sshserver)
450 ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko)
450 ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko)
451
451
452 # configure and construct the session
452 # configure and construct the session
453 try:
453 try:
454 extra_args['packer'] = cfg['pack']
454 extra_args['packer'] = cfg['pack']
455 extra_args['unpacker'] = cfg['unpack']
455 extra_args['unpacker'] = cfg['unpack']
456 extra_args['key'] = cast_bytes(cfg['key'])
456 extra_args['key'] = cast_bytes(cfg['key'])
457 extra_args['signature_scheme'] = cfg['signature_scheme']
457 extra_args['signature_scheme'] = cfg['signature_scheme']
458 except KeyError as exc:
458 except KeyError as exc:
459 msg = '\n'.join([
459 msg = '\n'.join([
460 "Connection file is invalid (missing '{}'), possibly from an old version of IPython.",
460 "Connection file is invalid (missing '{}'), possibly from an old version of IPython.",
461 "If you are reusing connection files, remove them and start ipcontroller again."
461 "If you are reusing connection files, remove them and start ipcontroller again."
462 ])
462 ])
463 raise ValueError(msg.format(exc.message))
463 raise ValueError(msg.format(exc.message))
464
464
465 self.session = Session(**extra_args)
465 self.session = Session(**extra_args)
466
466
467 self._query_socket = self._context.socket(zmq.DEALER)
467 self._query_socket = self._context.socket(zmq.DEALER)
468
468
469 if self._ssh:
469 if self._ssh:
470 tunnel.tunnel_connection(self._query_socket, cfg['registration'], sshserver, **ssh_kwargs)
470 tunnel.tunnel_connection(self._query_socket, cfg['registration'], sshserver, **ssh_kwargs)
471 else:
471 else:
472 self._query_socket.connect(cfg['registration'])
472 self._query_socket.connect(cfg['registration'])
473
473
474 self.session.debug = self.debug
474 self.session.debug = self.debug
475
475
476 self._notification_handlers = {'registration_notification' : self._register_engine,
476 self._notification_handlers = {'registration_notification' : self._register_engine,
477 'unregistration_notification' : self._unregister_engine,
477 'unregistration_notification' : self._unregister_engine,
478 'shutdown_notification' : lambda msg: self.close(),
478 'shutdown_notification' : lambda msg: self.close(),
479 }
479 }
480 self._queue_handlers = {'execute_reply' : self._handle_execute_reply,
480 self._queue_handlers = {'execute_reply' : self._handle_execute_reply,
481 'apply_reply' : self._handle_apply_reply}
481 'apply_reply' : self._handle_apply_reply}
482
482
483 try:
483 try:
484 self._connect(sshserver, ssh_kwargs, timeout)
484 self._connect(sshserver, ssh_kwargs, timeout)
485 except:
485 except:
486 self.close(linger=0)
486 self.close(linger=0)
487 raise
487 raise
488
488
489 # last step: setup magics, if we are in IPython:
489 # last step: setup magics, if we are in IPython:
490
490
491 try:
491 try:
492 ip = get_ipython()
492 ip = get_ipython()
493 except NameError:
493 except NameError:
494 return
494 return
495 else:
495 else:
496 if 'px' not in ip.magics_manager.magics:
496 if 'px' not in ip.magics_manager.magics:
497 # in IPython but we are the first Client.
497 # in IPython but we are the first Client.
498 # activate a default view for parallel magics.
498 # activate a default view for parallel magics.
499 self.activate()
499 self.activate()
500
500
501 def __del__(self):
501 def __del__(self):
502 """cleanup sockets, but _not_ context."""
502 """cleanup sockets, but _not_ context."""
503 self.close()
503 self.close()
504
504
505 def _setup_profile_dir(self, profile, profile_dir, ipython_dir):
505 def _setup_profile_dir(self, profile, profile_dir, ipython_dir):
506 if ipython_dir is None:
506 if ipython_dir is None:
507 ipython_dir = get_ipython_dir()
507 ipython_dir = get_ipython_dir()
508 if profile_dir is not None:
508 if profile_dir is not None:
509 try:
509 try:
510 self._cd = ProfileDir.find_profile_dir(profile_dir)
510 self._cd = ProfileDir.find_profile_dir(profile_dir)
511 return
511 return
512 except ProfileDirError:
512 except ProfileDirError:
513 pass
513 pass
514 elif profile is not None:
514 elif profile is not None:
515 try:
515 try:
516 self._cd = ProfileDir.find_profile_dir_by_name(
516 self._cd = ProfileDir.find_profile_dir_by_name(
517 ipython_dir, profile)
517 ipython_dir, profile)
518 return
518 return
519 except ProfileDirError:
519 except ProfileDirError:
520 pass
520 pass
521 self._cd = None
521 self._cd = None
522
522
523 def _update_engines(self, engines):
523 def _update_engines(self, engines):
524 """Update our engines dict and _ids from a dict of the form: {id:uuid}."""
524 """Update our engines dict and _ids from a dict of the form: {id:uuid}."""
525 for k,v in iteritems(engines):
525 for k,v in iteritems(engines):
526 eid = int(k)
526 eid = int(k)
527 if eid not in self._engines:
527 if eid not in self._engines:
528 self._ids.append(eid)
528 self._ids.append(eid)
529 self._engines[eid] = v
529 self._engines[eid] = v
530 self._ids = sorted(self._ids)
530 self._ids = sorted(self._ids)
531 if sorted(self._engines.keys()) != list(range(len(self._engines))) and \
531 if sorted(self._engines.keys()) != list(range(len(self._engines))) and \
532 self._task_scheme == 'pure' and self._task_socket:
532 self._task_scheme == 'pure' and self._task_socket:
533 self._stop_scheduling_tasks()
533 self._stop_scheduling_tasks()
534
534
535 def _stop_scheduling_tasks(self):
535 def _stop_scheduling_tasks(self):
536 """Stop scheduling tasks because an engine has been unregistered
536 """Stop scheduling tasks because an engine has been unregistered
537 from a pure ZMQ scheduler.
537 from a pure ZMQ scheduler.
538 """
538 """
539 self._task_socket.close()
539 self._task_socket.close()
540 self._task_socket = None
540 self._task_socket = None
541 msg = "An engine has been unregistered, and we are using pure " +\
541 msg = "An engine has been unregistered, and we are using pure " +\
542 "ZMQ task scheduling. Task farming will be disabled."
542 "ZMQ task scheduling. Task farming will be disabled."
543 if self.outstanding:
543 if self.outstanding:
544 msg += " If you were running tasks when this happened, " +\
544 msg += " If you were running tasks when this happened, " +\
545 "some `outstanding` msg_ids may never resolve."
545 "some `outstanding` msg_ids may never resolve."
546 warnings.warn(msg, RuntimeWarning)
546 warnings.warn(msg, RuntimeWarning)
547
547
548 def _build_targets(self, targets):
548 def _build_targets(self, targets):
549 """Turn valid target IDs or 'all' into two lists:
549 """Turn valid target IDs or 'all' into two lists:
550 (int_ids, uuids).
550 (int_ids, uuids).
551 """
551 """
552 if not self._ids:
552 if not self._ids:
553 # flush notification socket if no engines yet, just in case
553 # flush notification socket if no engines yet, just in case
554 if not self.ids:
554 if not self.ids:
555 raise error.NoEnginesRegistered("Can't build targets without any engines")
555 raise error.NoEnginesRegistered("Can't build targets without any engines")
556
556
557 if targets is None:
557 if targets is None:
558 targets = self._ids
558 targets = self._ids
559 elif isinstance(targets, string_types):
559 elif isinstance(targets, string_types):
560 if targets.lower() == 'all':
560 if targets.lower() == 'all':
561 targets = self._ids
561 targets = self._ids
562 else:
562 else:
563 raise TypeError("%r not valid str target, must be 'all'"%(targets))
563 raise TypeError("%r not valid str target, must be 'all'"%(targets))
564 elif isinstance(targets, int):
564 elif isinstance(targets, int):
565 if targets < 0:
565 if targets < 0:
566 targets = self.ids[targets]
566 targets = self.ids[targets]
567 if targets not in self._ids:
567 if targets not in self._ids:
568 raise IndexError("No such engine: %i"%targets)
568 raise IndexError("No such engine: %i"%targets)
569 targets = [targets]
569 targets = [targets]
570
570
571 if isinstance(targets, slice):
571 if isinstance(targets, slice):
572 indices = list(range(len(self._ids))[targets])
572 indices = list(range(len(self._ids))[targets])
573 ids = self.ids
573 ids = self.ids
574 targets = [ ids[i] for i in indices ]
574 targets = [ ids[i] for i in indices ]
575
575
576 if not isinstance(targets, (tuple, list, xrange)):
576 if not isinstance(targets, (tuple, list, xrange)):
577 raise TypeError("targets by int/slice/collection of ints only, not %s"%(type(targets)))
577 raise TypeError("targets by int/slice/collection of ints only, not %s"%(type(targets)))
578
578
579 return [cast_bytes(self._engines[t]) for t in targets], list(targets)
579 return [cast_bytes(self._engines[t]) for t in targets], list(targets)
580
580
581 def _connect(self, sshserver, ssh_kwargs, timeout):
581 def _connect(self, sshserver, ssh_kwargs, timeout):
582 """setup all our socket connections to the cluster. This is called from
582 """setup all our socket connections to the cluster. This is called from
583 __init__."""
583 __init__."""
584
584
585 # Maybe allow reconnecting?
585 # Maybe allow reconnecting?
586 if self._connected:
586 if self._connected:
587 return
587 return
588 self._connected=True
588 self._connected=True
589
589
590 def connect_socket(s, url):
590 def connect_socket(s, url):
591 if self._ssh:
591 if self._ssh:
592 return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs)
592 return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs)
593 else:
593 else:
594 return s.connect(url)
594 return s.connect(url)
595
595
596 self.session.send(self._query_socket, 'connection_request')
596 self.session.send(self._query_socket, 'connection_request')
597 # use Poller because zmq.select has wrong units in pyzmq 2.1.7
597 # use Poller because zmq.select has wrong units in pyzmq 2.1.7
598 poller = zmq.Poller()
598 poller = zmq.Poller()
599 poller.register(self._query_socket, zmq.POLLIN)
599 poller.register(self._query_socket, zmq.POLLIN)
600 # poll expects milliseconds, timeout is seconds
600 # poll expects milliseconds, timeout is seconds
601 evts = poller.poll(timeout*1000)
601 evts = poller.poll(timeout*1000)
602 if not evts:
602 if not evts:
603 raise error.TimeoutError("Hub connection request timed out")
603 raise error.TimeoutError("Hub connection request timed out")
604 idents,msg = self.session.recv(self._query_socket,mode=0)
604 idents,msg = self.session.recv(self._query_socket,mode=0)
605 if self.debug:
605 if self.debug:
606 pprint(msg)
606 pprint(msg)
607 content = msg['content']
607 content = msg['content']
608 # self._config['registration'] = dict(content)
608 # self._config['registration'] = dict(content)
609 cfg = self._config
609 cfg = self._config
610 if content['status'] == 'ok':
610 if content['status'] == 'ok':
611 self._mux_socket = self._context.socket(zmq.DEALER)
611 self._mux_socket = self._context.socket(zmq.DEALER)
612 connect_socket(self._mux_socket, cfg['mux'])
612 connect_socket(self._mux_socket, cfg['mux'])
613
613
614 self._task_socket = self._context.socket(zmq.DEALER)
614 self._task_socket = self._context.socket(zmq.DEALER)
615 connect_socket(self._task_socket, cfg['task'])
615 connect_socket(self._task_socket, cfg['task'])
616
616
617 self._notification_socket = self._context.socket(zmq.SUB)
617 self._notification_socket = self._context.socket(zmq.SUB)
618 self._notification_socket.setsockopt(zmq.SUBSCRIBE, b'')
618 self._notification_socket.setsockopt(zmq.SUBSCRIBE, b'')
619 connect_socket(self._notification_socket, cfg['notification'])
619 connect_socket(self._notification_socket, cfg['notification'])
620
620
621 self._control_socket = self._context.socket(zmq.DEALER)
621 self._control_socket = self._context.socket(zmq.DEALER)
622 connect_socket(self._control_socket, cfg['control'])
622 connect_socket(self._control_socket, cfg['control'])
623
623
624 self._iopub_socket = self._context.socket(zmq.SUB)
624 self._iopub_socket = self._context.socket(zmq.SUB)
625 self._iopub_socket.setsockopt(zmq.SUBSCRIBE, b'')
625 self._iopub_socket.setsockopt(zmq.SUBSCRIBE, b'')
626 connect_socket(self._iopub_socket, cfg['iopub'])
626 connect_socket(self._iopub_socket, cfg['iopub'])
627
627
628 self._update_engines(dict(content['engines']))
628 self._update_engines(dict(content['engines']))
629 else:
629 else:
630 self._connected = False
630 self._connected = False
631 raise Exception("Failed to connect!")
631 raise Exception("Failed to connect!")
632
632
633 #--------------------------------------------------------------------------
633 #--------------------------------------------------------------------------
634 # handlers and callbacks for incoming messages
634 # handlers and callbacks for incoming messages
635 #--------------------------------------------------------------------------
635 #--------------------------------------------------------------------------
636
636
637 def _unwrap_exception(self, content):
637 def _unwrap_exception(self, content):
638 """unwrap exception, and remap engine_id to int."""
638 """unwrap exception, and remap engine_id to int."""
639 e = error.unwrap_exception(content)
639 e = error.unwrap_exception(content)
640 # print e.traceback
640 # print e.traceback
641 if e.engine_info:
641 if e.engine_info:
642 e_uuid = e.engine_info['engine_uuid']
642 e_uuid = e.engine_info['engine_uuid']
643 eid = self._engines[e_uuid]
643 eid = self._engines[e_uuid]
644 e.engine_info['engine_id'] = eid
644 e.engine_info['engine_id'] = eid
645 return e
645 return e
646
646
647 def _extract_metadata(self, msg):
647 def _extract_metadata(self, msg):
648 header = msg['header']
648 header = msg['header']
649 parent = msg['parent_header']
649 parent = msg['parent_header']
650 msg_meta = msg['metadata']
650 msg_meta = msg['metadata']
651 content = msg['content']
651 content = msg['content']
652 md = {'msg_id' : parent['msg_id'],
652 md = {'msg_id' : parent['msg_id'],
653 'received' : datetime.now(),
653 'received' : datetime.now(),
654 'engine_uuid' : msg_meta.get('engine', None),
654 'engine_uuid' : msg_meta.get('engine', None),
655 'follow' : msg_meta.get('follow', []),
655 'follow' : msg_meta.get('follow', []),
656 'after' : msg_meta.get('after', []),
656 'after' : msg_meta.get('after', []),
657 'status' : content['status'],
657 'status' : content['status'],
658 }
658 }
659
659
660 if md['engine_uuid'] is not None:
660 if md['engine_uuid'] is not None:
661 md['engine_id'] = self._engines.get(md['engine_uuid'], None)
661 md['engine_id'] = self._engines.get(md['engine_uuid'], None)
662
662
663 if 'date' in parent:
663 if 'date' in parent:
664 md['submitted'] = parent['date']
664 md['submitted'] = parent['date']
665 if 'started' in msg_meta:
665 if 'started' in msg_meta:
666 md['started'] = parse_date(msg_meta['started'])
666 md['started'] = parse_date(msg_meta['started'])
667 if 'date' in header:
667 if 'date' in header:
668 md['completed'] = header['date']
668 md['completed'] = header['date']
669 return md
669 return md
670
670
671 def _register_engine(self, msg):
671 def _register_engine(self, msg):
672 """Register a new engine, and update our connection info."""
672 """Register a new engine, and update our connection info."""
673 content = msg['content']
673 content = msg['content']
674 eid = content['id']
674 eid = content['id']
675 d = {eid : content['uuid']}
675 d = {eid : content['uuid']}
676 self._update_engines(d)
676 self._update_engines(d)
677
677
678 def _unregister_engine(self, msg):
678 def _unregister_engine(self, msg):
679 """Unregister an engine that has died."""
679 """Unregister an engine that has died."""
680 content = msg['content']
680 content = msg['content']
681 eid = int(content['id'])
681 eid = int(content['id'])
682 if eid in self._ids:
682 if eid in self._ids:
683 self._ids.remove(eid)
683 self._ids.remove(eid)
684 uuid = self._engines.pop(eid)
684 uuid = self._engines.pop(eid)
685
685
686 self._handle_stranded_msgs(eid, uuid)
686 self._handle_stranded_msgs(eid, uuid)
687
687
688 if self._task_socket and self._task_scheme == 'pure':
688 if self._task_socket and self._task_scheme == 'pure':
689 self._stop_scheduling_tasks()
689 self._stop_scheduling_tasks()
690
690
691 def _handle_stranded_msgs(self, eid, uuid):
691 def _handle_stranded_msgs(self, eid, uuid):
692 """Handle messages known to be on an engine when the engine unregisters.
692 """Handle messages known to be on an engine when the engine unregisters.
693
693
694 It is possible that this will fire prematurely - that is, an engine will
694 It is possible that this will fire prematurely - that is, an engine will
695 go down after completing a result, and the client will be notified
695 go down after completing a result, and the client will be notified
696 of the unregistration and later receive the successful result.
696 of the unregistration and later receive the successful result.
697 """
697 """
698
698
699 outstanding = self._outstanding_dict[uuid]
699 outstanding = self._outstanding_dict[uuid]
700
700
701 for msg_id in list(outstanding):
701 for msg_id in list(outstanding):
702 if msg_id in self.results:
702 if msg_id in self.results:
703 # we already
703 # we already
704 continue
704 continue
705 try:
705 try:
706 raise error.EngineError("Engine %r died while running task %r"%(eid, msg_id))
706 raise error.EngineError("Engine %r died while running task %r"%(eid, msg_id))
707 except:
707 except:
708 content = error.wrap_exception()
708 content = error.wrap_exception()
709 # build a fake message:
709 # build a fake message:
710 msg = self.session.msg('apply_reply', content=content)
710 msg = self.session.msg('apply_reply', content=content)
711 msg['parent_header']['msg_id'] = msg_id
711 msg['parent_header']['msg_id'] = msg_id
712 msg['metadata']['engine'] = uuid
712 msg['metadata']['engine'] = uuid
713 self._handle_apply_reply(msg)
713 self._handle_apply_reply(msg)
714
714
715 def _handle_execute_reply(self, msg):
715 def _handle_execute_reply(self, msg):
716 """Save the reply to an execute_request into our results.
716 """Save the reply to an execute_request into our results.
717
717
718 execute messages are never actually used. apply is used instead.
718 execute messages are never actually used. apply is used instead.
719 """
719 """
720
720
721 parent = msg['parent_header']
721 parent = msg['parent_header']
722 msg_id = parent['msg_id']
722 msg_id = parent['msg_id']
723 if msg_id not in self.outstanding:
723 if msg_id not in self.outstanding:
724 if msg_id in self.history:
724 if msg_id in self.history:
725 print("got stale result: %s"%msg_id)
725 print("got stale result: %s"%msg_id)
726 else:
726 else:
727 print("got unknown result: %s"%msg_id)
727 print("got unknown result: %s"%msg_id)
728 else:
728 else:
729 self.outstanding.remove(msg_id)
729 self.outstanding.remove(msg_id)
730
730
731 content = msg['content']
731 content = msg['content']
732 header = msg['header']
732 header = msg['header']
733
733
734 # construct metadata:
734 # construct metadata:
735 md = self.metadata[msg_id]
735 md = self.metadata[msg_id]
736 md.update(self._extract_metadata(msg))
736 md.update(self._extract_metadata(msg))
737 # is this redundant?
737 # is this redundant?
738 self.metadata[msg_id] = md
738 self.metadata[msg_id] = md
739
739
740 e_outstanding = self._outstanding_dict[md['engine_uuid']]
740 e_outstanding = self._outstanding_dict[md['engine_uuid']]
741 if msg_id in e_outstanding:
741 if msg_id in e_outstanding:
742 e_outstanding.remove(msg_id)
742 e_outstanding.remove(msg_id)
743
743
744 # construct result:
744 # construct result:
745 if content['status'] == 'ok':
745 if content['status'] == 'ok':
746 self.results[msg_id] = ExecuteReply(msg_id, content, md)
746 self.results[msg_id] = ExecuteReply(msg_id, content, md)
747 elif content['status'] == 'aborted':
747 elif content['status'] == 'aborted':
748 self.results[msg_id] = error.TaskAborted(msg_id)
748 self.results[msg_id] = error.TaskAborted(msg_id)
749 elif content['status'] == 'resubmitted':
749 elif content['status'] == 'resubmitted':
750 # TODO: handle resubmission
750 # TODO: handle resubmission
751 pass
751 pass
752 else:
752 else:
753 self.results[msg_id] = self._unwrap_exception(content)
753 self.results[msg_id] = self._unwrap_exception(content)
754
754
755 def _handle_apply_reply(self, msg):
755 def _handle_apply_reply(self, msg):
756 """Save the reply to an apply_request into our results."""
756 """Save the reply to an apply_request into our results."""
757 parent = msg['parent_header']
757 parent = msg['parent_header']
758 msg_id = parent['msg_id']
758 msg_id = parent['msg_id']
759 if msg_id not in self.outstanding:
759 if msg_id not in self.outstanding:
760 if msg_id in self.history:
760 if msg_id in self.history:
761 print("got stale result: %s"%msg_id)
761 print("got stale result: %s"%msg_id)
762 print(self.results[msg_id])
762 print(self.results[msg_id])
763 print(msg)
763 print(msg)
764 else:
764 else:
765 print("got unknown result: %s"%msg_id)
765 print("got unknown result: %s"%msg_id)
766 else:
766 else:
767 self.outstanding.remove(msg_id)
767 self.outstanding.remove(msg_id)
768 content = msg['content']
768 content = msg['content']
769 header = msg['header']
769 header = msg['header']
770
770
771 # construct metadata:
771 # construct metadata:
772 md = self.metadata[msg_id]
772 md = self.metadata[msg_id]
773 md.update(self._extract_metadata(msg))
773 md.update(self._extract_metadata(msg))
774 # is this redundant?
774 # is this redundant?
775 self.metadata[msg_id] = md
775 self.metadata[msg_id] = md
776
776
777 e_outstanding = self._outstanding_dict[md['engine_uuid']]
777 e_outstanding = self._outstanding_dict[md['engine_uuid']]
778 if msg_id in e_outstanding:
778 if msg_id in e_outstanding:
779 e_outstanding.remove(msg_id)
779 e_outstanding.remove(msg_id)
780
780
781 # construct result:
781 # construct result:
782 if content['status'] == 'ok':
782 if content['status'] == 'ok':
783 self.results[msg_id] = serialize.unserialize_object(msg['buffers'])[0]
783 self.results[msg_id] = serialize.unserialize_object(msg['buffers'])[0]
784 elif content['status'] == 'aborted':
784 elif content['status'] == 'aborted':
785 self.results[msg_id] = error.TaskAborted(msg_id)
785 self.results[msg_id] = error.TaskAborted(msg_id)
786 elif content['status'] == 'resubmitted':
786 elif content['status'] == 'resubmitted':
787 # TODO: handle resubmission
787 # TODO: handle resubmission
788 pass
788 pass
789 else:
789 else:
790 self.results[msg_id] = self._unwrap_exception(content)
790 self.results[msg_id] = self._unwrap_exception(content)
791
791
792 def _flush_notifications(self):
792 def _flush_notifications(self):
793 """Flush notifications of engine registrations waiting
793 """Flush notifications of engine registrations waiting
794 in ZMQ queue."""
794 in ZMQ queue."""
795 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
795 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
796 while msg is not None:
796 while msg is not None:
797 if self.debug:
797 if self.debug:
798 pprint(msg)
798 pprint(msg)
799 msg_type = msg['header']['msg_type']
799 msg_type = msg['header']['msg_type']
800 handler = self._notification_handlers.get(msg_type, None)
800 handler = self._notification_handlers.get(msg_type, None)
801 if handler is None:
801 if handler is None:
802 raise Exception("Unhandled message type: %s" % msg_type)
802 raise Exception("Unhandled message type: %s" % msg_type)
803 else:
803 else:
804 handler(msg)
804 handler(msg)
805 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
805 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
806
806
807 def _flush_results(self, sock):
807 def _flush_results(self, sock):
808 """Flush task or queue results waiting in ZMQ queue."""
808 """Flush task or queue results waiting in ZMQ queue."""
809 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
809 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
810 while msg is not None:
810 while msg is not None:
811 if self.debug:
811 if self.debug:
812 pprint(msg)
812 pprint(msg)
813 msg_type = msg['header']['msg_type']
813 msg_type = msg['header']['msg_type']
814 handler = self._queue_handlers.get(msg_type, None)
814 handler = self._queue_handlers.get(msg_type, None)
815 if handler is None:
815 if handler is None:
816 raise Exception("Unhandled message type: %s" % msg_type)
816 raise Exception("Unhandled message type: %s" % msg_type)
817 else:
817 else:
818 handler(msg)
818 handler(msg)
819 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
819 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
820
820
821 def _flush_control(self, sock):
821 def _flush_control(self, sock):
822 """Flush replies from the control channel waiting
822 """Flush replies from the control channel waiting
823 in the ZMQ queue.
823 in the ZMQ queue.
824
824
825 Currently: ignore them."""
825 Currently: ignore them."""
826 if self._ignored_control_replies <= 0:
826 if self._ignored_control_replies <= 0:
827 return
827 return
828 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
828 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
829 while msg is not None:
829 while msg is not None:
830 self._ignored_control_replies -= 1
830 self._ignored_control_replies -= 1
831 if self.debug:
831 if self.debug:
832 pprint(msg)
832 pprint(msg)
833 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
833 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
834
834
835 def _flush_ignored_control(self):
835 def _flush_ignored_control(self):
836 """flush ignored control replies"""
836 """flush ignored control replies"""
837 while self._ignored_control_replies > 0:
837 while self._ignored_control_replies > 0:
838 self.session.recv(self._control_socket)
838 self.session.recv(self._control_socket)
839 self._ignored_control_replies -= 1
839 self._ignored_control_replies -= 1
840
840
841 def _flush_ignored_hub_replies(self):
841 def _flush_ignored_hub_replies(self):
842 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
842 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
843 while msg is not None:
843 while msg is not None:
844 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
844 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
845
845
846 def _flush_iopub(self, sock):
846 def _flush_iopub(self, sock):
847 """Flush replies from the iopub channel waiting
847 """Flush replies from the iopub channel waiting
848 in the ZMQ queue.
848 in the ZMQ queue.
849 """
849 """
850 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
850 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
851 while msg is not None:
851 while msg is not None:
852 if self.debug:
852 if self.debug:
853 pprint(msg)
853 pprint(msg)
854 parent = msg['parent_header']
854 parent = msg['parent_header']
855 # ignore IOPub messages with no parent.
855 # ignore IOPub messages with no parent.
856 # Caused by print statements or warnings from before the first execution.
856 # Caused by print statements or warnings from before the first execution.
857 if not parent:
857 if not parent:
858 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
858 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
859 continue
859 continue
860 msg_id = parent['msg_id']
860 msg_id = parent['msg_id']
861 content = msg['content']
861 content = msg['content']
862 header = msg['header']
862 header = msg['header']
863 msg_type = msg['header']['msg_type']
863 msg_type = msg['header']['msg_type']
864
864
865 # init metadata:
865 # init metadata:
866 md = self.metadata[msg_id]
866 md = self.metadata[msg_id]
867
867
868 if msg_type == 'stream':
868 if msg_type == 'stream':
869 name = content['name']
869 name = content['name']
870 s = md[name] or ''
870 s = md[name] or ''
871 md[name] = s + content['data']
871 md[name] = s + content['data']
872 elif msg_type == 'error':
872 elif msg_type == 'error':
873 md.update({'error' : self._unwrap_exception(content)})
873 md.update({'error' : self._unwrap_exception(content)})
874 elif msg_type == 'execute_input':
874 elif msg_type == 'execute_input':
875 md.update({'execute_input' : content['code']})
875 md.update({'execute_input' : content['code']})
876 elif msg_type == 'display_data':
876 elif msg_type == 'display_data':
877 md['outputs'].append(content)
877 md['outputs'].append(content)
878 elif msg_type == 'execute_result':
878 elif msg_type == 'execute_result':
879 md['execute_result'] = content
879 md['execute_result'] = content
880 elif msg_type == 'data_message':
880 elif msg_type == 'data_message':
881 data, remainder = serialize.unserialize_object(msg['buffers'])
881 data, remainder = serialize.unserialize_object(msg['buffers'])
882 md['data'].update(data)
882 md['data'].update(data)
883 elif msg_type == 'status':
883 elif msg_type == 'status':
884 # idle message comes after all outputs
884 # idle message comes after all outputs
885 if content['execution_state'] == 'idle':
885 if content['execution_state'] == 'idle':
886 md['outputs_ready'] = True
886 md['outputs_ready'] = True
887 else:
887 else:
888 # unhandled msg_type (status, etc.)
888 # unhandled msg_type (status, etc.)
889 pass
889 pass
890
890
891 # reduntant?
891 # reduntant?
892 self.metadata[msg_id] = md
892 self.metadata[msg_id] = md
893
893
894 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
894 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
895
895
896 #--------------------------------------------------------------------------
896 #--------------------------------------------------------------------------
897 # len, getitem
897 # len, getitem
898 #--------------------------------------------------------------------------
898 #--------------------------------------------------------------------------
899
899
900 def __len__(self):
900 def __len__(self):
901 """len(client) returns # of engines."""
901 """len(client) returns # of engines."""
902 return len(self.ids)
902 return len(self.ids)
903
903
904 def __getitem__(self, key):
904 def __getitem__(self, key):
905 """index access returns DirectView multiplexer objects
905 """index access returns DirectView multiplexer objects
906
906
907 Must be int, slice, or list/tuple/xrange of ints"""
907 Must be int, slice, or list/tuple/xrange of ints"""
908 if not isinstance(key, (int, slice, tuple, list, xrange)):
908 if not isinstance(key, (int, slice, tuple, list, xrange)):
909 raise TypeError("key by int/slice/iterable of ints only, not %s"%(type(key)))
909 raise TypeError("key by int/slice/iterable of ints only, not %s"%(type(key)))
910 else:
910 else:
911 return self.direct_view(key)
911 return self.direct_view(key)
912
912
913 def __iter__(self):
913 def __iter__(self):
914 """Since we define getitem, Client is iterable
914 """Since we define getitem, Client is iterable
915
915
916 but unless we also define __iter__, it won't work correctly unless engine IDs
916 but unless we also define __iter__, it won't work correctly unless engine IDs
917 start at zero and are continuous.
917 start at zero and are continuous.
918 """
918 """
919 for eid in self.ids:
919 for eid in self.ids:
920 yield self.direct_view(eid)
920 yield self.direct_view(eid)
921
921
922 #--------------------------------------------------------------------------
922 #--------------------------------------------------------------------------
923 # Begin public methods
923 # Begin public methods
924 #--------------------------------------------------------------------------
924 #--------------------------------------------------------------------------
925
925
926 @property
926 @property
927 def ids(self):
927 def ids(self):
928 """Always up-to-date ids property."""
928 """Always up-to-date ids property."""
929 self._flush_notifications()
929 self._flush_notifications()
930 # always copy:
930 # always copy:
931 return list(self._ids)
931 return list(self._ids)
932
932
933 def activate(self, targets='all', suffix=''):
933 def activate(self, targets='all', suffix=''):
934 """Create a DirectView and register it with IPython magics
934 """Create a DirectView and register it with IPython magics
935
935
936 Defines the magics `%px, %autopx, %pxresult, %%px`
936 Defines the magics `%px, %autopx, %pxresult, %%px`
937
937
938 Parameters
938 Parameters
939 ----------
939 ----------
940
940
941 targets: int, list of ints, or 'all'
941 targets: int, list of ints, or 'all'
942 The engines on which the view's magics will run
942 The engines on which the view's magics will run
943 suffix: str [default: '']
943 suffix: str [default: '']
944 The suffix, if any, for the magics. This allows you to have
944 The suffix, if any, for the magics. This allows you to have
945 multiple views associated with parallel magics at the same time.
945 multiple views associated with parallel magics at the same time.
946
946
947 e.g. ``rc.activate(targets=0, suffix='0')`` will give you
947 e.g. ``rc.activate(targets=0, suffix='0')`` will give you
948 the magics ``%px0``, ``%pxresult0``, etc. for running magics just
948 the magics ``%px0``, ``%pxresult0``, etc. for running magics just
949 on engine 0.
949 on engine 0.
950 """
950 """
951 view = self.direct_view(targets)
951 view = self.direct_view(targets)
952 view.block = True
952 view.block = True
953 view.activate(suffix)
953 view.activate(suffix)
954 return view
954 return view
955
955
956 def close(self, linger=None):
956 def close(self, linger=None):
957 """Close my zmq Sockets
957 """Close my zmq Sockets
958
958
959 If `linger`, set the zmq LINGER socket option,
959 If `linger`, set the zmq LINGER socket option,
960 which allows discarding of messages.
960 which allows discarding of messages.
961 """
961 """
962 if self._closed:
962 if self._closed:
963 return
963 return
964 self.stop_spin_thread()
964 self.stop_spin_thread()
965 snames = [ trait for trait in self.trait_names() if trait.endswith("socket") ]
965 snames = [ trait for trait in self.trait_names() if trait.endswith("socket") ]
966 for name in snames:
966 for name in snames:
967 socket = getattr(self, name)
967 socket = getattr(self, name)
968 if socket is not None and not socket.closed:
968 if socket is not None and not socket.closed:
969 if linger is not None:
969 if linger is not None:
970 socket.close(linger=linger)
970 socket.close(linger=linger)
971 else:
971 else:
972 socket.close()
972 socket.close()
973 self._closed = True
973 self._closed = True
974
974
975 def _spin_every(self, interval=1):
975 def _spin_every(self, interval=1):
976 """target func for use in spin_thread"""
976 """target func for use in spin_thread"""
977 while True:
977 while True:
978 if self._stop_spinning.is_set():
978 if self._stop_spinning.is_set():
979 return
979 return
980 time.sleep(interval)
980 time.sleep(interval)
981 self.spin()
981 self.spin()
982
982
983 def spin_thread(self, interval=1):
983 def spin_thread(self, interval=1):
984 """call Client.spin() in a background thread on some regular interval
984 """call Client.spin() in a background thread on some regular interval
985
985
986 This helps ensure that messages don't pile up too much in the zmq queue
986 This helps ensure that messages don't pile up too much in the zmq queue
987 while you are working on other things, or just leaving an idle terminal.
987 while you are working on other things, or just leaving an idle terminal.
988
988
989 It also helps limit potential padding of the `received` timestamp
989 It also helps limit potential padding of the `received` timestamp
990 on AsyncResult objects, used for timings.
990 on AsyncResult objects, used for timings.
991
991
992 Parameters
992 Parameters
993 ----------
993 ----------
994
994
995 interval : float, optional
995 interval : float, optional
996 The interval on which to spin the client in the background thread
996 The interval on which to spin the client in the background thread
997 (simply passed to time.sleep).
997 (simply passed to time.sleep).
998
998
999 Notes
999 Notes
1000 -----
1000 -----
1001
1001
1002 For precision timing, you may want to use this method to put a bound
1002 For precision timing, you may want to use this method to put a bound
1003 on the jitter (in seconds) in `received` timestamps used
1003 on the jitter (in seconds) in `received` timestamps used
1004 in AsyncResult.wall_time.
1004 in AsyncResult.wall_time.
1005
1005
1006 """
1006 """
1007 if self._spin_thread is not None:
1007 if self._spin_thread is not None:
1008 self.stop_spin_thread()
1008 self.stop_spin_thread()
1009 self._stop_spinning.clear()
1009 self._stop_spinning.clear()
1010 self._spin_thread = Thread(target=self._spin_every, args=(interval,))
1010 self._spin_thread = Thread(target=self._spin_every, args=(interval,))
1011 self._spin_thread.daemon = True
1011 self._spin_thread.daemon = True
1012 self._spin_thread.start()
1012 self._spin_thread.start()
1013
1013
1014 def stop_spin_thread(self):
1014 def stop_spin_thread(self):
1015 """stop background spin_thread, if any"""
1015 """stop background spin_thread, if any"""
1016 if self._spin_thread is not None:
1016 if self._spin_thread is not None:
1017 self._stop_spinning.set()
1017 self._stop_spinning.set()
1018 self._spin_thread.join()
1018 self._spin_thread.join()
1019 self._spin_thread = None
1019 self._spin_thread = None
1020
1020
1021 def spin(self):
1021 def spin(self):
1022 """Flush any registration notifications and execution results
1022 """Flush any registration notifications and execution results
1023 waiting in the ZMQ queue.
1023 waiting in the ZMQ queue.
1024 """
1024 """
1025 if self._notification_socket:
1025 if self._notification_socket:
1026 self._flush_notifications()
1026 self._flush_notifications()
1027 if self._iopub_socket:
1027 if self._iopub_socket:
1028 self._flush_iopub(self._iopub_socket)
1028 self._flush_iopub(self._iopub_socket)
1029 if self._mux_socket:
1029 if self._mux_socket:
1030 self._flush_results(self._mux_socket)
1030 self._flush_results(self._mux_socket)
1031 if self._task_socket:
1031 if self._task_socket:
1032 self._flush_results(self._task_socket)
1032 self._flush_results(self._task_socket)
1033 if self._control_socket:
1033 if self._control_socket:
1034 self._flush_control(self._control_socket)
1034 self._flush_control(self._control_socket)
1035 if self._query_socket:
1035 if self._query_socket:
1036 self._flush_ignored_hub_replies()
1036 self._flush_ignored_hub_replies()
1037
1037
1038 def wait(self, jobs=None, timeout=-1):
1038 def wait(self, jobs=None, timeout=-1):
1039 """waits on one or more `jobs`, for up to `timeout` seconds.
1039 """waits on one or more `jobs`, for up to `timeout` seconds.
1040
1040
1041 Parameters
1041 Parameters
1042 ----------
1042 ----------
1043
1043
1044 jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects
1044 jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects
1045 ints are indices to self.history
1045 ints are indices to self.history
1046 strs are msg_ids
1046 strs are msg_ids
1047 default: wait on all outstanding messages
1047 default: wait on all outstanding messages
1048 timeout : float
1048 timeout : float
1049 a time in seconds, after which to give up.
1049 a time in seconds, after which to give up.
1050 default is -1, which means no timeout
1050 default is -1, which means no timeout
1051
1051
1052 Returns
1052 Returns
1053 -------
1053 -------
1054
1054
1055 True : when all msg_ids are done
1055 True : when all msg_ids are done
1056 False : timeout reached, some msg_ids still outstanding
1056 False : timeout reached, some msg_ids still outstanding
1057 """
1057 """
1058 tic = time.time()
1058 tic = time.time()
1059 if jobs is None:
1059 if jobs is None:
1060 theids = self.outstanding
1060 theids = self.outstanding
1061 else:
1061 else:
1062 if isinstance(jobs, string_types + (int, AsyncResult)):
1062 if isinstance(jobs, string_types + (int, AsyncResult)):
1063 jobs = [jobs]
1063 jobs = [jobs]
1064 theids = set()
1064 theids = set()
1065 for job in jobs:
1065 for job in jobs:
1066 if isinstance(job, int):
1066 if isinstance(job, int):
1067 # index access
1067 # index access
1068 job = self.history[job]
1068 job = self.history[job]
1069 elif isinstance(job, AsyncResult):
1069 elif isinstance(job, AsyncResult):
1070 theids.update(job.msg_ids)
1070 theids.update(job.msg_ids)
1071 continue
1071 continue
1072 theids.add(job)
1072 theids.add(job)
1073 if not theids.intersection(self.outstanding):
1073 if not theids.intersection(self.outstanding):
1074 return True
1074 return True
1075 self.spin()
1075 self.spin()
1076 while theids.intersection(self.outstanding):
1076 while theids.intersection(self.outstanding):
1077 if timeout >= 0 and ( time.time()-tic ) > timeout:
1077 if timeout >= 0 and ( time.time()-tic ) > timeout:
1078 break
1078 break
1079 time.sleep(1e-3)
1079 time.sleep(1e-3)
1080 self.spin()
1080 self.spin()
1081 return len(theids.intersection(self.outstanding)) == 0
1081 return len(theids.intersection(self.outstanding)) == 0
1082
1082
1083 #--------------------------------------------------------------------------
1083 #--------------------------------------------------------------------------
1084 # Control methods
1084 # Control methods
1085 #--------------------------------------------------------------------------
1085 #--------------------------------------------------------------------------
1086
1086
1087 @spin_first
1087 @spin_first
1088 def clear(self, targets=None, block=None):
1088 def clear(self, targets=None, block=None):
1089 """Clear the namespace in target(s)."""
1089 """Clear the namespace in target(s)."""
1090 block = self.block if block is None else block
1090 block = self.block if block is None else block
1091 targets = self._build_targets(targets)[0]
1091 targets = self._build_targets(targets)[0]
1092 for t in targets:
1092 for t in targets:
1093 self.session.send(self._control_socket, 'clear_request', content={}, ident=t)
1093 self.session.send(self._control_socket, 'clear_request', content={}, ident=t)
1094 error = False
1094 error = False
1095 if block:
1095 if block:
1096 self._flush_ignored_control()
1096 self._flush_ignored_control()
1097 for i in range(len(targets)):
1097 for i in range(len(targets)):
1098 idents,msg = self.session.recv(self._control_socket,0)
1098 idents,msg = self.session.recv(self._control_socket,0)
1099 if self.debug:
1099 if self.debug:
1100 pprint(msg)
1100 pprint(msg)
1101 if msg['content']['status'] != 'ok':
1101 if msg['content']['status'] != 'ok':
1102 error = self._unwrap_exception(msg['content'])
1102 error = self._unwrap_exception(msg['content'])
1103 else:
1103 else:
1104 self._ignored_control_replies += len(targets)
1104 self._ignored_control_replies += len(targets)
1105 if error:
1105 if error:
1106 raise error
1106 raise error
1107
1107
1108
1108
1109 @spin_first
1109 @spin_first
1110 def abort(self, jobs=None, targets=None, block=None):
1110 def abort(self, jobs=None, targets=None, block=None):
1111 """Abort specific jobs from the execution queues of target(s).
1111 """Abort specific jobs from the execution queues of target(s).
1112
1112
1113 This is a mechanism to prevent jobs that have already been submitted
1113 This is a mechanism to prevent jobs that have already been submitted
1114 from executing.
1114 from executing.
1115
1115
1116 Parameters
1116 Parameters
1117 ----------
1117 ----------
1118
1118
1119 jobs : msg_id, list of msg_ids, or AsyncResult
1119 jobs : msg_id, list of msg_ids, or AsyncResult
1120 The jobs to be aborted
1120 The jobs to be aborted
1121
1121
1122 If unspecified/None: abort all outstanding jobs.
1122 If unspecified/None: abort all outstanding jobs.
1123
1123
1124 """
1124 """
1125 block = self.block if block is None else block
1125 block = self.block if block is None else block
1126 jobs = jobs if jobs is not None else list(self.outstanding)
1126 jobs = jobs if jobs is not None else list(self.outstanding)
1127 targets = self._build_targets(targets)[0]
1127 targets = self._build_targets(targets)[0]
1128
1128
1129 msg_ids = []
1129 msg_ids = []
1130 if isinstance(jobs, string_types + (AsyncResult,)):
1130 if isinstance(jobs, string_types + (AsyncResult,)):
1131 jobs = [jobs]
1131 jobs = [jobs]
1132 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1132 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1133 if bad_ids:
1133 if bad_ids:
1134 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1134 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1135 for j in jobs:
1135 for j in jobs:
1136 if isinstance(j, AsyncResult):
1136 if isinstance(j, AsyncResult):
1137 msg_ids.extend(j.msg_ids)
1137 msg_ids.extend(j.msg_ids)
1138 else:
1138 else:
1139 msg_ids.append(j)
1139 msg_ids.append(j)
1140 content = dict(msg_ids=msg_ids)
1140 content = dict(msg_ids=msg_ids)
1141 for t in targets:
1141 for t in targets:
1142 self.session.send(self._control_socket, 'abort_request',
1142 self.session.send(self._control_socket, 'abort_request',
1143 content=content, ident=t)
1143 content=content, ident=t)
1144 error = False
1144 error = False
1145 if block:
1145 if block:
1146 self._flush_ignored_control()
1146 self._flush_ignored_control()
1147 for i in range(len(targets)):
1147 for i in range(len(targets)):
1148 idents,msg = self.session.recv(self._control_socket,0)
1148 idents,msg = self.session.recv(self._control_socket,0)
1149 if self.debug:
1149 if self.debug:
1150 pprint(msg)
1150 pprint(msg)
1151 if msg['content']['status'] != 'ok':
1151 if msg['content']['status'] != 'ok':
1152 error = self._unwrap_exception(msg['content'])
1152 error = self._unwrap_exception(msg['content'])
1153 else:
1153 else:
1154 self._ignored_control_replies += len(targets)
1154 self._ignored_control_replies += len(targets)
1155 if error:
1155 if error:
1156 raise error
1156 raise error
1157
1157
1158 @spin_first
1158 @spin_first
1159 def shutdown(self, targets='all', restart=False, hub=False, block=None):
1159 def shutdown(self, targets='all', restart=False, hub=False, block=None):
1160 """Terminates one or more engine processes, optionally including the hub.
1160 """Terminates one or more engine processes, optionally including the hub.
1161
1161
1162 Parameters
1162 Parameters
1163 ----------
1163 ----------
1164
1164
1165 targets: list of ints or 'all' [default: all]
1165 targets: list of ints or 'all' [default: all]
1166 Which engines to shutdown.
1166 Which engines to shutdown.
1167 hub: bool [default: False]
1167 hub: bool [default: False]
1168 Whether to include the Hub. hub=True implies targets='all'.
1168 Whether to include the Hub. hub=True implies targets='all'.
1169 block: bool [default: self.block]
1169 block: bool [default: self.block]
1170 Whether to wait for clean shutdown replies or not.
1170 Whether to wait for clean shutdown replies or not.
1171 restart: bool [default: False]
1171 restart: bool [default: False]
1172 NOT IMPLEMENTED
1172 NOT IMPLEMENTED
1173 whether to restart engines after shutting them down.
1173 whether to restart engines after shutting them down.
1174 """
1174 """
1175 from IPython.parallel.error import NoEnginesRegistered
1175 from IPython.parallel.error import NoEnginesRegistered
1176 if restart:
1176 if restart:
1177 raise NotImplementedError("Engine restart is not yet implemented")
1177 raise NotImplementedError("Engine restart is not yet implemented")
1178
1178
1179 block = self.block if block is None else block
1179 block = self.block if block is None else block
1180 if hub:
1180 if hub:
1181 targets = 'all'
1181 targets = 'all'
1182 try:
1182 try:
1183 targets = self._build_targets(targets)[0]
1183 targets = self._build_targets(targets)[0]
1184 except NoEnginesRegistered:
1184 except NoEnginesRegistered:
1185 targets = []
1185 targets = []
1186 for t in targets:
1186 for t in targets:
1187 self.session.send(self._control_socket, 'shutdown_request',
1187 self.session.send(self._control_socket, 'shutdown_request',
1188 content={'restart':restart},ident=t)
1188 content={'restart':restart},ident=t)
1189 error = False
1189 error = False
1190 if block or hub:
1190 if block or hub:
1191 self._flush_ignored_control()
1191 self._flush_ignored_control()
1192 for i in range(len(targets)):
1192 for i in range(len(targets)):
1193 idents,msg = self.session.recv(self._control_socket, 0)
1193 idents,msg = self.session.recv(self._control_socket, 0)
1194 if self.debug:
1194 if self.debug:
1195 pprint(msg)
1195 pprint(msg)
1196 if msg['content']['status'] != 'ok':
1196 if msg['content']['status'] != 'ok':
1197 error = self._unwrap_exception(msg['content'])
1197 error = self._unwrap_exception(msg['content'])
1198 else:
1198 else:
1199 self._ignored_control_replies += len(targets)
1199 self._ignored_control_replies += len(targets)
1200
1200
1201 if hub:
1201 if hub:
1202 time.sleep(0.25)
1202 time.sleep(0.25)
1203 self.session.send(self._query_socket, 'shutdown_request')
1203 self.session.send(self._query_socket, 'shutdown_request')
1204 idents,msg = self.session.recv(self._query_socket, 0)
1204 idents,msg = self.session.recv(self._query_socket, 0)
1205 if self.debug:
1205 if self.debug:
1206 pprint(msg)
1206 pprint(msg)
1207 if msg['content']['status'] != 'ok':
1207 if msg['content']['status'] != 'ok':
1208 error = self._unwrap_exception(msg['content'])
1208 error = self._unwrap_exception(msg['content'])
1209
1209
1210 if error:
1210 if error:
1211 raise error
1211 raise error
1212
1212
1213 #--------------------------------------------------------------------------
1213 #--------------------------------------------------------------------------
1214 # Execution related methods
1214 # Execution related methods
1215 #--------------------------------------------------------------------------
1215 #--------------------------------------------------------------------------
1216
1216
1217 def _maybe_raise(self, result):
1217 def _maybe_raise(self, result):
1218 """wrapper for maybe raising an exception if apply failed."""
1218 """wrapper for maybe raising an exception if apply failed."""
1219 if isinstance(result, error.RemoteError):
1219 if isinstance(result, error.RemoteError):
1220 raise result
1220 raise result
1221
1221
1222 return result
1222 return result
1223
1223
1224 def send_apply_request(self, socket, f, args=None, kwargs=None, metadata=None, track=False,
1224 def send_apply_request(self, socket, f, args=None, kwargs=None, metadata=None, track=False,
1225 ident=None):
1225 ident=None):
1226 """construct and send an apply message via a socket.
1226 """construct and send an apply message via a socket.
1227
1227
1228 This is the principal method with which all engine execution is performed by views.
1228 This is the principal method with which all engine execution is performed by views.
1229 """
1229 """
1230
1230
1231 if self._closed:
1231 if self._closed:
1232 raise RuntimeError("Client cannot be used after its sockets have been closed")
1232 raise RuntimeError("Client cannot be used after its sockets have been closed")
1233
1233
1234 # defaults:
1234 # defaults:
1235 args = args if args is not None else []
1235 args = args if args is not None else []
1236 kwargs = kwargs if kwargs is not None else {}
1236 kwargs = kwargs if kwargs is not None else {}
1237 metadata = metadata if metadata is not None else {}
1237 metadata = metadata if metadata is not None else {}
1238
1238
1239 # validate arguments
1239 # validate arguments
1240 if not callable(f) and not isinstance(f, Reference):
1240 if not callable(f) and not isinstance(f, Reference):
1241 raise TypeError("f must be callable, not %s"%type(f))
1241 raise TypeError("f must be callable, not %s"%type(f))
1242 if not isinstance(args, (tuple, list)):
1242 if not isinstance(args, (tuple, list)):
1243 raise TypeError("args must be tuple or list, not %s"%type(args))
1243 raise TypeError("args must be tuple or list, not %s"%type(args))
1244 if not isinstance(kwargs, dict):
1244 if not isinstance(kwargs, dict):
1245 raise TypeError("kwargs must be dict, not %s"%type(kwargs))
1245 raise TypeError("kwargs must be dict, not %s"%type(kwargs))
1246 if not isinstance(metadata, dict):
1246 if not isinstance(metadata, dict):
1247 raise TypeError("metadata must be dict, not %s"%type(metadata))
1247 raise TypeError("metadata must be dict, not %s"%type(metadata))
1248
1248
1249 bufs = serialize.pack_apply_message(f, args, kwargs,
1249 bufs = serialize.pack_apply_message(f, args, kwargs,
1250 buffer_threshold=self.session.buffer_threshold,
1250 buffer_threshold=self.session.buffer_threshold,
1251 item_threshold=self.session.item_threshold,
1251 item_threshold=self.session.item_threshold,
1252 )
1252 )
1253
1253
1254 msg = self.session.send(socket, "apply_request", buffers=bufs, ident=ident,
1254 msg = self.session.send(socket, "apply_request", buffers=bufs, ident=ident,
1255 metadata=metadata, track=track)
1255 metadata=metadata, track=track)
1256
1256
1257 msg_id = msg['header']['msg_id']
1257 msg_id = msg['header']['msg_id']
1258 self.outstanding.add(msg_id)
1258 self.outstanding.add(msg_id)
1259 if ident:
1259 if ident:
1260 # possibly routed to a specific engine
1260 # possibly routed to a specific engine
1261 if isinstance(ident, list):
1261 if isinstance(ident, list):
1262 ident = ident[-1]
1262 ident = ident[-1]
1263 if ident in self._engines.values():
1263 if ident in self._engines.values():
1264 # save for later, in case of engine death
1264 # save for later, in case of engine death
1265 self._outstanding_dict[ident].add(msg_id)
1265 self._outstanding_dict[ident].add(msg_id)
1266 self.history.append(msg_id)
1266 self.history.append(msg_id)
1267 self.metadata[msg_id]['submitted'] = datetime.now()
1267 self.metadata[msg_id]['submitted'] = datetime.now()
1268
1268
1269 return msg
1269 return msg
1270
1270
1271 def send_execute_request(self, socket, code, silent=True, metadata=None, ident=None):
1271 def send_execute_request(self, socket, code, silent=True, metadata=None, ident=None):
1272 """construct and send an execute request via a socket.
1272 """construct and send an execute request via a socket.
1273
1273
1274 """
1274 """
1275
1275
1276 if self._closed:
1276 if self._closed:
1277 raise RuntimeError("Client cannot be used after its sockets have been closed")
1277 raise RuntimeError("Client cannot be used after its sockets have been closed")
1278
1278
1279 # defaults:
1279 # defaults:
1280 metadata = metadata if metadata is not None else {}
1280 metadata = metadata if metadata is not None else {}
1281
1281
1282 # validate arguments
1282 # validate arguments
1283 if not isinstance(code, string_types):
1283 if not isinstance(code, string_types):
1284 raise TypeError("code must be text, not %s" % type(code))
1284 raise TypeError("code must be text, not %s" % type(code))
1285 if not isinstance(metadata, dict):
1285 if not isinstance(metadata, dict):
1286 raise TypeError("metadata must be dict, not %s" % type(metadata))
1286 raise TypeError("metadata must be dict, not %s" % type(metadata))
1287
1287
1288 content = dict(code=code, silent=bool(silent), user_variables=[], user_expressions={})
1288 content = dict(code=code, silent=bool(silent), user_expressions={})
1289
1289
1290
1290
1291 msg = self.session.send(socket, "execute_request", content=content, ident=ident,
1291 msg = self.session.send(socket, "execute_request", content=content, ident=ident,
1292 metadata=metadata)
1292 metadata=metadata)
1293
1293
1294 msg_id = msg['header']['msg_id']
1294 msg_id = msg['header']['msg_id']
1295 self.outstanding.add(msg_id)
1295 self.outstanding.add(msg_id)
1296 if ident:
1296 if ident:
1297 # possibly routed to a specific engine
1297 # possibly routed to a specific engine
1298 if isinstance(ident, list):
1298 if isinstance(ident, list):
1299 ident = ident[-1]
1299 ident = ident[-1]
1300 if ident in self._engines.values():
1300 if ident in self._engines.values():
1301 # save for later, in case of engine death
1301 # save for later, in case of engine death
1302 self._outstanding_dict[ident].add(msg_id)
1302 self._outstanding_dict[ident].add(msg_id)
1303 self.history.append(msg_id)
1303 self.history.append(msg_id)
1304 self.metadata[msg_id]['submitted'] = datetime.now()
1304 self.metadata[msg_id]['submitted'] = datetime.now()
1305
1305
1306 return msg
1306 return msg
1307
1307
1308 #--------------------------------------------------------------------------
1308 #--------------------------------------------------------------------------
1309 # construct a View object
1309 # construct a View object
1310 #--------------------------------------------------------------------------
1310 #--------------------------------------------------------------------------
1311
1311
1312 def load_balanced_view(self, targets=None):
1312 def load_balanced_view(self, targets=None):
1313 """construct a DirectView object.
1313 """construct a DirectView object.
1314
1314
1315 If no arguments are specified, create a LoadBalancedView
1315 If no arguments are specified, create a LoadBalancedView
1316 using all engines.
1316 using all engines.
1317
1317
1318 Parameters
1318 Parameters
1319 ----------
1319 ----------
1320
1320
1321 targets: list,slice,int,etc. [default: use all engines]
1321 targets: list,slice,int,etc. [default: use all engines]
1322 The subset of engines across which to load-balance
1322 The subset of engines across which to load-balance
1323 """
1323 """
1324 if targets == 'all':
1324 if targets == 'all':
1325 targets = None
1325 targets = None
1326 if targets is not None:
1326 if targets is not None:
1327 targets = self._build_targets(targets)[1]
1327 targets = self._build_targets(targets)[1]
1328 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
1328 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
1329
1329
1330 def direct_view(self, targets='all'):
1330 def direct_view(self, targets='all'):
1331 """construct a DirectView object.
1331 """construct a DirectView object.
1332
1332
1333 If no targets are specified, create a DirectView using all engines.
1333 If no targets are specified, create a DirectView using all engines.
1334
1334
1335 rc.direct_view('all') is distinguished from rc[:] in that 'all' will
1335 rc.direct_view('all') is distinguished from rc[:] in that 'all' will
1336 evaluate the target engines at each execution, whereas rc[:] will connect to
1336 evaluate the target engines at each execution, whereas rc[:] will connect to
1337 all *current* engines, and that list will not change.
1337 all *current* engines, and that list will not change.
1338
1338
1339 That is, 'all' will always use all engines, whereas rc[:] will not use
1339 That is, 'all' will always use all engines, whereas rc[:] will not use
1340 engines added after the DirectView is constructed.
1340 engines added after the DirectView is constructed.
1341
1341
1342 Parameters
1342 Parameters
1343 ----------
1343 ----------
1344
1344
1345 targets: list,slice,int,etc. [default: use all engines]
1345 targets: list,slice,int,etc. [default: use all engines]
1346 The engines to use for the View
1346 The engines to use for the View
1347 """
1347 """
1348 single = isinstance(targets, int)
1348 single = isinstance(targets, int)
1349 # allow 'all' to be lazily evaluated at each execution
1349 # allow 'all' to be lazily evaluated at each execution
1350 if targets != 'all':
1350 if targets != 'all':
1351 targets = self._build_targets(targets)[1]
1351 targets = self._build_targets(targets)[1]
1352 if single:
1352 if single:
1353 targets = targets[0]
1353 targets = targets[0]
1354 return DirectView(client=self, socket=self._mux_socket, targets=targets)
1354 return DirectView(client=self, socket=self._mux_socket, targets=targets)
1355
1355
1356 #--------------------------------------------------------------------------
1356 #--------------------------------------------------------------------------
1357 # Query methods
1357 # Query methods
1358 #--------------------------------------------------------------------------
1358 #--------------------------------------------------------------------------
1359
1359
1360 @spin_first
1360 @spin_first
1361 def get_result(self, indices_or_msg_ids=None, block=None):
1361 def get_result(self, indices_or_msg_ids=None, block=None):
1362 """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object.
1362 """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object.
1363
1363
1364 If the client already has the results, no request to the Hub will be made.
1364 If the client already has the results, no request to the Hub will be made.
1365
1365
1366 This is a convenient way to construct AsyncResult objects, which are wrappers
1366 This is a convenient way to construct AsyncResult objects, which are wrappers
1367 that include metadata about execution, and allow for awaiting results that
1367 that include metadata about execution, and allow for awaiting results that
1368 were not submitted by this Client.
1368 were not submitted by this Client.
1369
1369
1370 It can also be a convenient way to retrieve the metadata associated with
1370 It can also be a convenient way to retrieve the metadata associated with
1371 blocking execution, since it always retrieves
1371 blocking execution, since it always retrieves
1372
1372
1373 Examples
1373 Examples
1374 --------
1374 --------
1375 ::
1375 ::
1376
1376
1377 In [10]: r = client.apply()
1377 In [10]: r = client.apply()
1378
1378
1379 Parameters
1379 Parameters
1380 ----------
1380 ----------
1381
1381
1382 indices_or_msg_ids : integer history index, str msg_id, or list of either
1382 indices_or_msg_ids : integer history index, str msg_id, or list of either
1383 The indices or msg_ids of indices to be retrieved
1383 The indices or msg_ids of indices to be retrieved
1384
1384
1385 block : bool
1385 block : bool
1386 Whether to wait for the result to be done
1386 Whether to wait for the result to be done
1387
1387
1388 Returns
1388 Returns
1389 -------
1389 -------
1390
1390
1391 AsyncResult
1391 AsyncResult
1392 A single AsyncResult object will always be returned.
1392 A single AsyncResult object will always be returned.
1393
1393
1394 AsyncHubResult
1394 AsyncHubResult
1395 A subclass of AsyncResult that retrieves results from the Hub
1395 A subclass of AsyncResult that retrieves results from the Hub
1396
1396
1397 """
1397 """
1398 block = self.block if block is None else block
1398 block = self.block if block is None else block
1399 if indices_or_msg_ids is None:
1399 if indices_or_msg_ids is None:
1400 indices_or_msg_ids = -1
1400 indices_or_msg_ids = -1
1401
1401
1402 single_result = False
1402 single_result = False
1403 if not isinstance(indices_or_msg_ids, (list,tuple)):
1403 if not isinstance(indices_or_msg_ids, (list,tuple)):
1404 indices_or_msg_ids = [indices_or_msg_ids]
1404 indices_or_msg_ids = [indices_or_msg_ids]
1405 single_result = True
1405 single_result = True
1406
1406
1407 theids = []
1407 theids = []
1408 for id in indices_or_msg_ids:
1408 for id in indices_or_msg_ids:
1409 if isinstance(id, int):
1409 if isinstance(id, int):
1410 id = self.history[id]
1410 id = self.history[id]
1411 if not isinstance(id, string_types):
1411 if not isinstance(id, string_types):
1412 raise TypeError("indices must be str or int, not %r"%id)
1412 raise TypeError("indices must be str or int, not %r"%id)
1413 theids.append(id)
1413 theids.append(id)
1414
1414
1415 local_ids = [msg_id for msg_id in theids if (msg_id in self.outstanding or msg_id in self.results)]
1415 local_ids = [msg_id for msg_id in theids if (msg_id in self.outstanding or msg_id in self.results)]
1416 remote_ids = [msg_id for msg_id in theids if msg_id not in local_ids]
1416 remote_ids = [msg_id for msg_id in theids if msg_id not in local_ids]
1417
1417
1418 # given single msg_id initially, get_result shot get the result itself,
1418 # given single msg_id initially, get_result shot get the result itself,
1419 # not a length-one list
1419 # not a length-one list
1420 if single_result:
1420 if single_result:
1421 theids = theids[0]
1421 theids = theids[0]
1422
1422
1423 if remote_ids:
1423 if remote_ids:
1424 ar = AsyncHubResult(self, msg_ids=theids)
1424 ar = AsyncHubResult(self, msg_ids=theids)
1425 else:
1425 else:
1426 ar = AsyncResult(self, msg_ids=theids)
1426 ar = AsyncResult(self, msg_ids=theids)
1427
1427
1428 if block:
1428 if block:
1429 ar.wait()
1429 ar.wait()
1430
1430
1431 return ar
1431 return ar
1432
1432
1433 @spin_first
1433 @spin_first
1434 def resubmit(self, indices_or_msg_ids=None, metadata=None, block=None):
1434 def resubmit(self, indices_or_msg_ids=None, metadata=None, block=None):
1435 """Resubmit one or more tasks.
1435 """Resubmit one or more tasks.
1436
1436
1437 in-flight tasks may not be resubmitted.
1437 in-flight tasks may not be resubmitted.
1438
1438
1439 Parameters
1439 Parameters
1440 ----------
1440 ----------
1441
1441
1442 indices_or_msg_ids : integer history index, str msg_id, or list of either
1442 indices_or_msg_ids : integer history index, str msg_id, or list of either
1443 The indices or msg_ids of indices to be retrieved
1443 The indices or msg_ids of indices to be retrieved
1444
1444
1445 block : bool
1445 block : bool
1446 Whether to wait for the result to be done
1446 Whether to wait for the result to be done
1447
1447
1448 Returns
1448 Returns
1449 -------
1449 -------
1450
1450
1451 AsyncHubResult
1451 AsyncHubResult
1452 A subclass of AsyncResult that retrieves results from the Hub
1452 A subclass of AsyncResult that retrieves results from the Hub
1453
1453
1454 """
1454 """
1455 block = self.block if block is None else block
1455 block = self.block if block is None else block
1456 if indices_or_msg_ids is None:
1456 if indices_or_msg_ids is None:
1457 indices_or_msg_ids = -1
1457 indices_or_msg_ids = -1
1458
1458
1459 if not isinstance(indices_or_msg_ids, (list,tuple)):
1459 if not isinstance(indices_or_msg_ids, (list,tuple)):
1460 indices_or_msg_ids = [indices_or_msg_ids]
1460 indices_or_msg_ids = [indices_or_msg_ids]
1461
1461
1462 theids = []
1462 theids = []
1463 for id in indices_or_msg_ids:
1463 for id in indices_or_msg_ids:
1464 if isinstance(id, int):
1464 if isinstance(id, int):
1465 id = self.history[id]
1465 id = self.history[id]
1466 if not isinstance(id, string_types):
1466 if not isinstance(id, string_types):
1467 raise TypeError("indices must be str or int, not %r"%id)
1467 raise TypeError("indices must be str or int, not %r"%id)
1468 theids.append(id)
1468 theids.append(id)
1469
1469
1470 content = dict(msg_ids = theids)
1470 content = dict(msg_ids = theids)
1471
1471
1472 self.session.send(self._query_socket, 'resubmit_request', content)
1472 self.session.send(self._query_socket, 'resubmit_request', content)
1473
1473
1474 zmq.select([self._query_socket], [], [])
1474 zmq.select([self._query_socket], [], [])
1475 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1475 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1476 if self.debug:
1476 if self.debug:
1477 pprint(msg)
1477 pprint(msg)
1478 content = msg['content']
1478 content = msg['content']
1479 if content['status'] != 'ok':
1479 if content['status'] != 'ok':
1480 raise self._unwrap_exception(content)
1480 raise self._unwrap_exception(content)
1481 mapping = content['resubmitted']
1481 mapping = content['resubmitted']
1482 new_ids = [ mapping[msg_id] for msg_id in theids ]
1482 new_ids = [ mapping[msg_id] for msg_id in theids ]
1483
1483
1484 ar = AsyncHubResult(self, msg_ids=new_ids)
1484 ar = AsyncHubResult(self, msg_ids=new_ids)
1485
1485
1486 if block:
1486 if block:
1487 ar.wait()
1487 ar.wait()
1488
1488
1489 return ar
1489 return ar
1490
1490
1491 @spin_first
1491 @spin_first
1492 def result_status(self, msg_ids, status_only=True):
1492 def result_status(self, msg_ids, status_only=True):
1493 """Check on the status of the result(s) of the apply request with `msg_ids`.
1493 """Check on the status of the result(s) of the apply request with `msg_ids`.
1494
1494
1495 If status_only is False, then the actual results will be retrieved, else
1495 If status_only is False, then the actual results will be retrieved, else
1496 only the status of the results will be checked.
1496 only the status of the results will be checked.
1497
1497
1498 Parameters
1498 Parameters
1499 ----------
1499 ----------
1500
1500
1501 msg_ids : list of msg_ids
1501 msg_ids : list of msg_ids
1502 if int:
1502 if int:
1503 Passed as index to self.history for convenience.
1503 Passed as index to self.history for convenience.
1504 status_only : bool (default: True)
1504 status_only : bool (default: True)
1505 if False:
1505 if False:
1506 Retrieve the actual results of completed tasks.
1506 Retrieve the actual results of completed tasks.
1507
1507
1508 Returns
1508 Returns
1509 -------
1509 -------
1510
1510
1511 results : dict
1511 results : dict
1512 There will always be the keys 'pending' and 'completed', which will
1512 There will always be the keys 'pending' and 'completed', which will
1513 be lists of msg_ids that are incomplete or complete. If `status_only`
1513 be lists of msg_ids that are incomplete or complete. If `status_only`
1514 is False, then completed results will be keyed by their `msg_id`.
1514 is False, then completed results will be keyed by their `msg_id`.
1515 """
1515 """
1516 if not isinstance(msg_ids, (list,tuple)):
1516 if not isinstance(msg_ids, (list,tuple)):
1517 msg_ids = [msg_ids]
1517 msg_ids = [msg_ids]
1518
1518
1519 theids = []
1519 theids = []
1520 for msg_id in msg_ids:
1520 for msg_id in msg_ids:
1521 if isinstance(msg_id, int):
1521 if isinstance(msg_id, int):
1522 msg_id = self.history[msg_id]
1522 msg_id = self.history[msg_id]
1523 if not isinstance(msg_id, string_types):
1523 if not isinstance(msg_id, string_types):
1524 raise TypeError("msg_ids must be str, not %r"%msg_id)
1524 raise TypeError("msg_ids must be str, not %r"%msg_id)
1525 theids.append(msg_id)
1525 theids.append(msg_id)
1526
1526
1527 completed = []
1527 completed = []
1528 local_results = {}
1528 local_results = {}
1529
1529
1530 # comment this block out to temporarily disable local shortcut:
1530 # comment this block out to temporarily disable local shortcut:
1531 for msg_id in theids:
1531 for msg_id in theids:
1532 if msg_id in self.results:
1532 if msg_id in self.results:
1533 completed.append(msg_id)
1533 completed.append(msg_id)
1534 local_results[msg_id] = self.results[msg_id]
1534 local_results[msg_id] = self.results[msg_id]
1535 theids.remove(msg_id)
1535 theids.remove(msg_id)
1536
1536
1537 if theids: # some not locally cached
1537 if theids: # some not locally cached
1538 content = dict(msg_ids=theids, status_only=status_only)
1538 content = dict(msg_ids=theids, status_only=status_only)
1539 msg = self.session.send(self._query_socket, "result_request", content=content)
1539 msg = self.session.send(self._query_socket, "result_request", content=content)
1540 zmq.select([self._query_socket], [], [])
1540 zmq.select([self._query_socket], [], [])
1541 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1541 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1542 if self.debug:
1542 if self.debug:
1543 pprint(msg)
1543 pprint(msg)
1544 content = msg['content']
1544 content = msg['content']
1545 if content['status'] != 'ok':
1545 if content['status'] != 'ok':
1546 raise self._unwrap_exception(content)
1546 raise self._unwrap_exception(content)
1547 buffers = msg['buffers']
1547 buffers = msg['buffers']
1548 else:
1548 else:
1549 content = dict(completed=[],pending=[])
1549 content = dict(completed=[],pending=[])
1550
1550
1551 content['completed'].extend(completed)
1551 content['completed'].extend(completed)
1552
1552
1553 if status_only:
1553 if status_only:
1554 return content
1554 return content
1555
1555
1556 failures = []
1556 failures = []
1557 # load cached results into result:
1557 # load cached results into result:
1558 content.update(local_results)
1558 content.update(local_results)
1559
1559
1560 # update cache with results:
1560 # update cache with results:
1561 for msg_id in sorted(theids):
1561 for msg_id in sorted(theids):
1562 if msg_id in content['completed']:
1562 if msg_id in content['completed']:
1563 rec = content[msg_id]
1563 rec = content[msg_id]
1564 parent = extract_dates(rec['header'])
1564 parent = extract_dates(rec['header'])
1565 header = extract_dates(rec['result_header'])
1565 header = extract_dates(rec['result_header'])
1566 rcontent = rec['result_content']
1566 rcontent = rec['result_content']
1567 iodict = rec['io']
1567 iodict = rec['io']
1568 if isinstance(rcontent, str):
1568 if isinstance(rcontent, str):
1569 rcontent = self.session.unpack(rcontent)
1569 rcontent = self.session.unpack(rcontent)
1570
1570
1571 md = self.metadata[msg_id]
1571 md = self.metadata[msg_id]
1572 md_msg = dict(
1572 md_msg = dict(
1573 content=rcontent,
1573 content=rcontent,
1574 parent_header=parent,
1574 parent_header=parent,
1575 header=header,
1575 header=header,
1576 metadata=rec['result_metadata'],
1576 metadata=rec['result_metadata'],
1577 )
1577 )
1578 md.update(self._extract_metadata(md_msg))
1578 md.update(self._extract_metadata(md_msg))
1579 if rec.get('received'):
1579 if rec.get('received'):
1580 md['received'] = parse_date(rec['received'])
1580 md['received'] = parse_date(rec['received'])
1581 md.update(iodict)
1581 md.update(iodict)
1582
1582
1583 if rcontent['status'] == 'ok':
1583 if rcontent['status'] == 'ok':
1584 if header['msg_type'] == 'apply_reply':
1584 if header['msg_type'] == 'apply_reply':
1585 res,buffers = serialize.unserialize_object(buffers)
1585 res,buffers = serialize.unserialize_object(buffers)
1586 elif header['msg_type'] == 'execute_reply':
1586 elif header['msg_type'] == 'execute_reply':
1587 res = ExecuteReply(msg_id, rcontent, md)
1587 res = ExecuteReply(msg_id, rcontent, md)
1588 else:
1588 else:
1589 raise KeyError("unhandled msg type: %r" % header['msg_type'])
1589 raise KeyError("unhandled msg type: %r" % header['msg_type'])
1590 else:
1590 else:
1591 res = self._unwrap_exception(rcontent)
1591 res = self._unwrap_exception(rcontent)
1592 failures.append(res)
1592 failures.append(res)
1593
1593
1594 self.results[msg_id] = res
1594 self.results[msg_id] = res
1595 content[msg_id] = res
1595 content[msg_id] = res
1596
1596
1597 if len(theids) == 1 and failures:
1597 if len(theids) == 1 and failures:
1598 raise failures[0]
1598 raise failures[0]
1599
1599
1600 error.collect_exceptions(failures, "result_status")
1600 error.collect_exceptions(failures, "result_status")
1601 return content
1601 return content
1602
1602
1603 @spin_first
1603 @spin_first
1604 def queue_status(self, targets='all', verbose=False):
1604 def queue_status(self, targets='all', verbose=False):
1605 """Fetch the status of engine queues.
1605 """Fetch the status of engine queues.
1606
1606
1607 Parameters
1607 Parameters
1608 ----------
1608 ----------
1609
1609
1610 targets : int/str/list of ints/strs
1610 targets : int/str/list of ints/strs
1611 the engines whose states are to be queried.
1611 the engines whose states are to be queried.
1612 default : all
1612 default : all
1613 verbose : bool
1613 verbose : bool
1614 Whether to return lengths only, or lists of ids for each element
1614 Whether to return lengths only, or lists of ids for each element
1615 """
1615 """
1616 if targets == 'all':
1616 if targets == 'all':
1617 # allow 'all' to be evaluated on the engine
1617 # allow 'all' to be evaluated on the engine
1618 engine_ids = None
1618 engine_ids = None
1619 else:
1619 else:
1620 engine_ids = self._build_targets(targets)[1]
1620 engine_ids = self._build_targets(targets)[1]
1621 content = dict(targets=engine_ids, verbose=verbose)
1621 content = dict(targets=engine_ids, verbose=verbose)
1622 self.session.send(self._query_socket, "queue_request", content=content)
1622 self.session.send(self._query_socket, "queue_request", content=content)
1623 idents,msg = self.session.recv(self._query_socket, 0)
1623 idents,msg = self.session.recv(self._query_socket, 0)
1624 if self.debug:
1624 if self.debug:
1625 pprint(msg)
1625 pprint(msg)
1626 content = msg['content']
1626 content = msg['content']
1627 status = content.pop('status')
1627 status = content.pop('status')
1628 if status != 'ok':
1628 if status != 'ok':
1629 raise self._unwrap_exception(content)
1629 raise self._unwrap_exception(content)
1630 content = rekey(content)
1630 content = rekey(content)
1631 if isinstance(targets, int):
1631 if isinstance(targets, int):
1632 return content[targets]
1632 return content[targets]
1633 else:
1633 else:
1634 return content
1634 return content
1635
1635
1636 def _build_msgids_from_target(self, targets=None):
1636 def _build_msgids_from_target(self, targets=None):
1637 """Build a list of msg_ids from the list of engine targets"""
1637 """Build a list of msg_ids from the list of engine targets"""
1638 if not targets: # needed as _build_targets otherwise uses all engines
1638 if not targets: # needed as _build_targets otherwise uses all engines
1639 return []
1639 return []
1640 target_ids = self._build_targets(targets)[0]
1640 target_ids = self._build_targets(targets)[0]
1641 return [md_id for md_id in self.metadata if self.metadata[md_id]["engine_uuid"] in target_ids]
1641 return [md_id for md_id in self.metadata if self.metadata[md_id]["engine_uuid"] in target_ids]
1642
1642
1643 def _build_msgids_from_jobs(self, jobs=None):
1643 def _build_msgids_from_jobs(self, jobs=None):
1644 """Build a list of msg_ids from "jobs" """
1644 """Build a list of msg_ids from "jobs" """
1645 if not jobs:
1645 if not jobs:
1646 return []
1646 return []
1647 msg_ids = []
1647 msg_ids = []
1648 if isinstance(jobs, string_types + (AsyncResult,)):
1648 if isinstance(jobs, string_types + (AsyncResult,)):
1649 jobs = [jobs]
1649 jobs = [jobs]
1650 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1650 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1651 if bad_ids:
1651 if bad_ids:
1652 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1652 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1653 for j in jobs:
1653 for j in jobs:
1654 if isinstance(j, AsyncResult):
1654 if isinstance(j, AsyncResult):
1655 msg_ids.extend(j.msg_ids)
1655 msg_ids.extend(j.msg_ids)
1656 else:
1656 else:
1657 msg_ids.append(j)
1657 msg_ids.append(j)
1658 return msg_ids
1658 return msg_ids
1659
1659
1660 def purge_local_results(self, jobs=[], targets=[]):
1660 def purge_local_results(self, jobs=[], targets=[]):
1661 """Clears the client caches of results and their metadata.
1661 """Clears the client caches of results and their metadata.
1662
1662
1663 Individual results can be purged by msg_id, or the entire
1663 Individual results can be purged by msg_id, or the entire
1664 history of specific targets can be purged.
1664 history of specific targets can be purged.
1665
1665
1666 Use `purge_local_results('all')` to scrub everything from the Clients's
1666 Use `purge_local_results('all')` to scrub everything from the Clients's
1667 results and metadata caches.
1667 results and metadata caches.
1668
1668
1669 After this call all `AsyncResults` are invalid and should be discarded.
1669 After this call all `AsyncResults` are invalid and should be discarded.
1670
1670
1671 If you must "reget" the results, you can still do so by using
1671 If you must "reget" the results, you can still do so by using
1672 `client.get_result(msg_id)` or `client.get_result(asyncresult)`. This will
1672 `client.get_result(msg_id)` or `client.get_result(asyncresult)`. This will
1673 redownload the results from the hub if they are still available
1673 redownload the results from the hub if they are still available
1674 (i.e `client.purge_hub_results(...)` has not been called.
1674 (i.e `client.purge_hub_results(...)` has not been called.
1675
1675
1676 Parameters
1676 Parameters
1677 ----------
1677 ----------
1678
1678
1679 jobs : str or list of str or AsyncResult objects
1679 jobs : str or list of str or AsyncResult objects
1680 the msg_ids whose results should be purged.
1680 the msg_ids whose results should be purged.
1681 targets : int/list of ints
1681 targets : int/list of ints
1682 The engines, by integer ID, whose entire result histories are to be purged.
1682 The engines, by integer ID, whose entire result histories are to be purged.
1683
1683
1684 Raises
1684 Raises
1685 ------
1685 ------
1686
1686
1687 RuntimeError : if any of the tasks to be purged are still outstanding.
1687 RuntimeError : if any of the tasks to be purged are still outstanding.
1688
1688
1689 """
1689 """
1690 if not targets and not jobs:
1690 if not targets and not jobs:
1691 raise ValueError("Must specify at least one of `targets` and `jobs`")
1691 raise ValueError("Must specify at least one of `targets` and `jobs`")
1692
1692
1693 if jobs == 'all':
1693 if jobs == 'all':
1694 if self.outstanding:
1694 if self.outstanding:
1695 raise RuntimeError("Can't purge outstanding tasks: %s" % self.outstanding)
1695 raise RuntimeError("Can't purge outstanding tasks: %s" % self.outstanding)
1696 self.results.clear()
1696 self.results.clear()
1697 self.metadata.clear()
1697 self.metadata.clear()
1698 else:
1698 else:
1699 msg_ids = set()
1699 msg_ids = set()
1700 msg_ids.update(self._build_msgids_from_target(targets))
1700 msg_ids.update(self._build_msgids_from_target(targets))
1701 msg_ids.update(self._build_msgids_from_jobs(jobs))
1701 msg_ids.update(self._build_msgids_from_jobs(jobs))
1702 still_outstanding = self.outstanding.intersection(msg_ids)
1702 still_outstanding = self.outstanding.intersection(msg_ids)
1703 if still_outstanding:
1703 if still_outstanding:
1704 raise RuntimeError("Can't purge outstanding tasks: %s" % still_outstanding)
1704 raise RuntimeError("Can't purge outstanding tasks: %s" % still_outstanding)
1705 for mid in msg_ids:
1705 for mid in msg_ids:
1706 self.results.pop(mid)
1706 self.results.pop(mid)
1707 self.metadata.pop(mid)
1707 self.metadata.pop(mid)
1708
1708
1709
1709
1710 @spin_first
1710 @spin_first
1711 def purge_hub_results(self, jobs=[], targets=[]):
1711 def purge_hub_results(self, jobs=[], targets=[]):
1712 """Tell the Hub to forget results.
1712 """Tell the Hub to forget results.
1713
1713
1714 Individual results can be purged by msg_id, or the entire
1714 Individual results can be purged by msg_id, or the entire
1715 history of specific targets can be purged.
1715 history of specific targets can be purged.
1716
1716
1717 Use `purge_results('all')` to scrub everything from the Hub's db.
1717 Use `purge_results('all')` to scrub everything from the Hub's db.
1718
1718
1719 Parameters
1719 Parameters
1720 ----------
1720 ----------
1721
1721
1722 jobs : str or list of str or AsyncResult objects
1722 jobs : str or list of str or AsyncResult objects
1723 the msg_ids whose results should be forgotten.
1723 the msg_ids whose results should be forgotten.
1724 targets : int/str/list of ints/strs
1724 targets : int/str/list of ints/strs
1725 The targets, by int_id, whose entire history is to be purged.
1725 The targets, by int_id, whose entire history is to be purged.
1726
1726
1727 default : None
1727 default : None
1728 """
1728 """
1729 if not targets and not jobs:
1729 if not targets and not jobs:
1730 raise ValueError("Must specify at least one of `targets` and `jobs`")
1730 raise ValueError("Must specify at least one of `targets` and `jobs`")
1731 if targets:
1731 if targets:
1732 targets = self._build_targets(targets)[1]
1732 targets = self._build_targets(targets)[1]
1733
1733
1734 # construct msg_ids from jobs
1734 # construct msg_ids from jobs
1735 if jobs == 'all':
1735 if jobs == 'all':
1736 msg_ids = jobs
1736 msg_ids = jobs
1737 else:
1737 else:
1738 msg_ids = self._build_msgids_from_jobs(jobs)
1738 msg_ids = self._build_msgids_from_jobs(jobs)
1739
1739
1740 content = dict(engine_ids=targets, msg_ids=msg_ids)
1740 content = dict(engine_ids=targets, msg_ids=msg_ids)
1741 self.session.send(self._query_socket, "purge_request", content=content)
1741 self.session.send(self._query_socket, "purge_request", content=content)
1742 idents, msg = self.session.recv(self._query_socket, 0)
1742 idents, msg = self.session.recv(self._query_socket, 0)
1743 if self.debug:
1743 if self.debug:
1744 pprint(msg)
1744 pprint(msg)
1745 content = msg['content']
1745 content = msg['content']
1746 if content['status'] != 'ok':
1746 if content['status'] != 'ok':
1747 raise self._unwrap_exception(content)
1747 raise self._unwrap_exception(content)
1748
1748
1749 def purge_results(self, jobs=[], targets=[]):
1749 def purge_results(self, jobs=[], targets=[]):
1750 """Clears the cached results from both the hub and the local client
1750 """Clears the cached results from both the hub and the local client
1751
1751
1752 Individual results can be purged by msg_id, or the entire
1752 Individual results can be purged by msg_id, or the entire
1753 history of specific targets can be purged.
1753 history of specific targets can be purged.
1754
1754
1755 Use `purge_results('all')` to scrub every cached result from both the Hub's and
1755 Use `purge_results('all')` to scrub every cached result from both the Hub's and
1756 the Client's db.
1756 the Client's db.
1757
1757
1758 Equivalent to calling both `purge_hub_results()` and `purge_client_results()` with
1758 Equivalent to calling both `purge_hub_results()` and `purge_client_results()` with
1759 the same arguments.
1759 the same arguments.
1760
1760
1761 Parameters
1761 Parameters
1762 ----------
1762 ----------
1763
1763
1764 jobs : str or list of str or AsyncResult objects
1764 jobs : str or list of str or AsyncResult objects
1765 the msg_ids whose results should be forgotten.
1765 the msg_ids whose results should be forgotten.
1766 targets : int/str/list of ints/strs
1766 targets : int/str/list of ints/strs
1767 The targets, by int_id, whose entire history is to be purged.
1767 The targets, by int_id, whose entire history is to be purged.
1768
1768
1769 default : None
1769 default : None
1770 """
1770 """
1771 self.purge_local_results(jobs=jobs, targets=targets)
1771 self.purge_local_results(jobs=jobs, targets=targets)
1772 self.purge_hub_results(jobs=jobs, targets=targets)
1772 self.purge_hub_results(jobs=jobs, targets=targets)
1773
1773
1774 def purge_everything(self):
1774 def purge_everything(self):
1775 """Clears all content from previous Tasks from both the hub and the local client
1775 """Clears all content from previous Tasks from both the hub and the local client
1776
1776
1777 In addition to calling `purge_results("all")` it also deletes the history and
1777 In addition to calling `purge_results("all")` it also deletes the history and
1778 other bookkeeping lists.
1778 other bookkeeping lists.
1779 """
1779 """
1780 self.purge_results("all")
1780 self.purge_results("all")
1781 self.history = []
1781 self.history = []
1782 self.session.digest_history.clear()
1782 self.session.digest_history.clear()
1783
1783
1784 @spin_first
1784 @spin_first
1785 def hub_history(self):
1785 def hub_history(self):
1786 """Get the Hub's history
1786 """Get the Hub's history
1787
1787
1788 Just like the Client, the Hub has a history, which is a list of msg_ids.
1788 Just like the Client, the Hub has a history, which is a list of msg_ids.
1789 This will contain the history of all clients, and, depending on configuration,
1789 This will contain the history of all clients, and, depending on configuration,
1790 may contain history across multiple cluster sessions.
1790 may contain history across multiple cluster sessions.
1791
1791
1792 Any msg_id returned here is a valid argument to `get_result`.
1792 Any msg_id returned here is a valid argument to `get_result`.
1793
1793
1794 Returns
1794 Returns
1795 -------
1795 -------
1796
1796
1797 msg_ids : list of strs
1797 msg_ids : list of strs
1798 list of all msg_ids, ordered by task submission time.
1798 list of all msg_ids, ordered by task submission time.
1799 """
1799 """
1800
1800
1801 self.session.send(self._query_socket, "history_request", content={})
1801 self.session.send(self._query_socket, "history_request", content={})
1802 idents, msg = self.session.recv(self._query_socket, 0)
1802 idents, msg = self.session.recv(self._query_socket, 0)
1803
1803
1804 if self.debug:
1804 if self.debug:
1805 pprint(msg)
1805 pprint(msg)
1806 content = msg['content']
1806 content = msg['content']
1807 if content['status'] != 'ok':
1807 if content['status'] != 'ok':
1808 raise self._unwrap_exception(content)
1808 raise self._unwrap_exception(content)
1809 else:
1809 else:
1810 return content['history']
1810 return content['history']
1811
1811
1812 @spin_first
1812 @spin_first
1813 def db_query(self, query, keys=None):
1813 def db_query(self, query, keys=None):
1814 """Query the Hub's TaskRecord database
1814 """Query the Hub's TaskRecord database
1815
1815
1816 This will return a list of task record dicts that match `query`
1816 This will return a list of task record dicts that match `query`
1817
1817
1818 Parameters
1818 Parameters
1819 ----------
1819 ----------
1820
1820
1821 query : mongodb query dict
1821 query : mongodb query dict
1822 The search dict. See mongodb query docs for details.
1822 The search dict. See mongodb query docs for details.
1823 keys : list of strs [optional]
1823 keys : list of strs [optional]
1824 The subset of keys to be returned. The default is to fetch everything but buffers.
1824 The subset of keys to be returned. The default is to fetch everything but buffers.
1825 'msg_id' will *always* be included.
1825 'msg_id' will *always* be included.
1826 """
1826 """
1827 if isinstance(keys, string_types):
1827 if isinstance(keys, string_types):
1828 keys = [keys]
1828 keys = [keys]
1829 content = dict(query=query, keys=keys)
1829 content = dict(query=query, keys=keys)
1830 self.session.send(self._query_socket, "db_request", content=content)
1830 self.session.send(self._query_socket, "db_request", content=content)
1831 idents, msg = self.session.recv(self._query_socket, 0)
1831 idents, msg = self.session.recv(self._query_socket, 0)
1832 if self.debug:
1832 if self.debug:
1833 pprint(msg)
1833 pprint(msg)
1834 content = msg['content']
1834 content = msg['content']
1835 if content['status'] != 'ok':
1835 if content['status'] != 'ok':
1836 raise self._unwrap_exception(content)
1836 raise self._unwrap_exception(content)
1837
1837
1838 records = content['records']
1838 records = content['records']
1839
1839
1840 buffer_lens = content['buffer_lens']
1840 buffer_lens = content['buffer_lens']
1841 result_buffer_lens = content['result_buffer_lens']
1841 result_buffer_lens = content['result_buffer_lens']
1842 buffers = msg['buffers']
1842 buffers = msg['buffers']
1843 has_bufs = buffer_lens is not None
1843 has_bufs = buffer_lens is not None
1844 has_rbufs = result_buffer_lens is not None
1844 has_rbufs = result_buffer_lens is not None
1845 for i,rec in enumerate(records):
1845 for i,rec in enumerate(records):
1846 # unpack datetime objects
1846 # unpack datetime objects
1847 for hkey in ('header', 'result_header'):
1847 for hkey in ('header', 'result_header'):
1848 if hkey in rec:
1848 if hkey in rec:
1849 rec[hkey] = extract_dates(rec[hkey])
1849 rec[hkey] = extract_dates(rec[hkey])
1850 for dtkey in ('submitted', 'started', 'completed', 'received'):
1850 for dtkey in ('submitted', 'started', 'completed', 'received'):
1851 if dtkey in rec:
1851 if dtkey in rec:
1852 rec[dtkey] = parse_date(rec[dtkey])
1852 rec[dtkey] = parse_date(rec[dtkey])
1853 # relink buffers
1853 # relink buffers
1854 if has_bufs:
1854 if has_bufs:
1855 blen = buffer_lens[i]
1855 blen = buffer_lens[i]
1856 rec['buffers'], buffers = buffers[:blen],buffers[blen:]
1856 rec['buffers'], buffers = buffers[:blen],buffers[blen:]
1857 if has_rbufs:
1857 if has_rbufs:
1858 blen = result_buffer_lens[i]
1858 blen = result_buffer_lens[i]
1859 rec['result_buffers'], buffers = buffers[:blen],buffers[blen:]
1859 rec['result_buffers'], buffers = buffers[:blen],buffers[blen:]
1860
1860
1861 return records
1861 return records
1862
1862
1863 __all__ = [ 'Client' ]
1863 __all__ = [ 'Client' ]
General Comments 0
You need to be logged in to leave comments. Login now