##// END OF EJS Templates
Remove unused imports in IPython.kernel
Thomas Kluyver -
Show More
@@ -1,112 +1,111 b''
1 #-------------------------------------------------------------------------------
1 #-------------------------------------------------------------------------------
2 # Copyright (C) 2012 The IPython Development Team
2 # Copyright (C) 2012 The IPython Development Team
3 #
3 #
4 # Distributed under the terms of the BSD License. The full license is in
4 # Distributed under the terms of the BSD License. The full license is in
5 # the file COPYING, distributed as part of this software.
5 # the file COPYING, distributed as part of this software.
6 #-------------------------------------------------------------------------------
6 #-------------------------------------------------------------------------------
7
7
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9 # Imports
9 # Imports
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11 from __future__ import print_function
11 from __future__ import print_function
12
12
13 # Standard library imports
13 # Standard library imports
14 import unittest
14 import unittest
15
15
16 # Local imports
16 # Local imports
17 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
17 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
18 from IPython.kernel.inprocess.ipkernel import InProcessKernel
19 from IPython.kernel.inprocess.manager import InProcessKernelManager
18 from IPython.kernel.inprocess.manager import InProcessKernelManager
20
19
21 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
22 # Test case
21 # Test case
23 #-----------------------------------------------------------------------------
22 #-----------------------------------------------------------------------------
24
23
25 class InProcessKernelManagerTestCase(unittest.TestCase):
24 class InProcessKernelManagerTestCase(unittest.TestCase):
26
25
27 def test_interface(self):
26 def test_interface(self):
28 """ Does the in-process kernel manager implement the basic KM interface?
27 """ Does the in-process kernel manager implement the basic KM interface?
29 """
28 """
30 km = InProcessKernelManager()
29 km = InProcessKernelManager()
31 self.assert_(not km.has_kernel)
30 self.assert_(not km.has_kernel)
32
31
33 km.start_kernel()
32 km.start_kernel()
34 self.assert_(km.has_kernel)
33 self.assert_(km.has_kernel)
35 self.assert_(km.kernel is not None)
34 self.assert_(km.kernel is not None)
36
35
37 kc = BlockingInProcessKernelClient(kernel=km.kernel)
36 kc = BlockingInProcessKernelClient(kernel=km.kernel)
38 self.assert_(not kc.channels_running)
37 self.assert_(not kc.channels_running)
39
38
40 kc.start_channels()
39 kc.start_channels()
41 self.assert_(kc.channels_running)
40 self.assert_(kc.channels_running)
42
41
43 old_kernel = km.kernel
42 old_kernel = km.kernel
44 km.restart_kernel()
43 km.restart_kernel()
45 self.assert_(km.kernel is not None)
44 self.assert_(km.kernel is not None)
46 self.assertNotEquals(km.kernel, old_kernel)
45 self.assertNotEquals(km.kernel, old_kernel)
47
46
48 km.shutdown_kernel()
47 km.shutdown_kernel()
49 self.assert_(not km.has_kernel)
48 self.assert_(not km.has_kernel)
50
49
51 self.assertRaises(NotImplementedError, km.interrupt_kernel)
50 self.assertRaises(NotImplementedError, km.interrupt_kernel)
52 self.assertRaises(NotImplementedError, km.signal_kernel, 9)
51 self.assertRaises(NotImplementedError, km.signal_kernel, 9)
53
52
54 kc.stop_channels()
53 kc.stop_channels()
55 self.assert_(not kc.channels_running)
54 self.assert_(not kc.channels_running)
56
55
57 def test_execute(self):
56 def test_execute(self):
58 """ Does executing code in an in-process kernel work?
57 """ Does executing code in an in-process kernel work?
59 """
58 """
60 km = InProcessKernelManager()
59 km = InProcessKernelManager()
61 km.start_kernel()
60 km.start_kernel()
62 kc = BlockingInProcessKernelClient(kernel=km.kernel)
61 kc = BlockingInProcessKernelClient(kernel=km.kernel)
63 kc.start_channels()
62 kc.start_channels()
64 kc.execute('foo = 1')
63 kc.execute('foo = 1')
65 self.assertEquals(km.kernel.shell.user_ns['foo'], 1)
64 self.assertEquals(km.kernel.shell.user_ns['foo'], 1)
66
65
67 def test_complete(self):
66 def test_complete(self):
68 """ Does requesting completion from an in-process kernel work?
67 """ Does requesting completion from an in-process kernel work?
69 """
68 """
70 km = InProcessKernelManager()
69 km = InProcessKernelManager()
71 km.start_kernel()
70 km.start_kernel()
72 kc = BlockingInProcessKernelClient(kernel=km.kernel)
71 kc = BlockingInProcessKernelClient(kernel=km.kernel)
73 kc.start_channels()
72 kc.start_channels()
74 km.kernel.shell.push({'my_bar': 0, 'my_baz': 1})
73 km.kernel.shell.push({'my_bar': 0, 'my_baz': 1})
75 kc.complete('my_ba', 'my_ba', 5)
74 kc.complete('my_ba', 'my_ba', 5)
76 msg = kc.get_shell_msg()
75 msg = kc.get_shell_msg()
77 self.assertEqual(msg['header']['msg_type'], 'complete_reply')
76 self.assertEqual(msg['header']['msg_type'], 'complete_reply')
78 self.assertEqual(sorted(msg['content']['matches']),
77 self.assertEqual(sorted(msg['content']['matches']),
79 ['my_bar', 'my_baz'])
78 ['my_bar', 'my_baz'])
80
79
81 def test_object_info(self):
80 def test_object_info(self):
82 """ Does requesting object information from an in-process kernel work?
81 """ Does requesting object information from an in-process kernel work?
83 """
82 """
84 km = InProcessKernelManager()
83 km = InProcessKernelManager()
85 km.start_kernel()
84 km.start_kernel()
86 kc = BlockingInProcessKernelClient(kernel=km.kernel)
85 kc = BlockingInProcessKernelClient(kernel=km.kernel)
87 kc.start_channels()
86 kc.start_channels()
88 km.kernel.shell.user_ns['foo'] = 1
87 km.kernel.shell.user_ns['foo'] = 1
89 kc.object_info('foo')
88 kc.object_info('foo')
90 msg = kc.get_shell_msg()
89 msg = kc.get_shell_msg()
91 self.assertEquals(msg['header']['msg_type'], 'object_info_reply')
90 self.assertEquals(msg['header']['msg_type'], 'object_info_reply')
92 self.assertEquals(msg['content']['name'], 'foo')
91 self.assertEquals(msg['content']['name'], 'foo')
93 self.assertEquals(msg['content']['type_name'], 'int')
92 self.assertEquals(msg['content']['type_name'], 'int')
94
93
95 def test_history(self):
94 def test_history(self):
96 """ Does requesting history from an in-process kernel work?
95 """ Does requesting history from an in-process kernel work?
97 """
96 """
98 km = InProcessKernelManager()
97 km = InProcessKernelManager()
99 km.start_kernel()
98 km.start_kernel()
100 kc = BlockingInProcessKernelClient(kernel=km.kernel)
99 kc = BlockingInProcessKernelClient(kernel=km.kernel)
101 kc.start_channels()
100 kc.start_channels()
102 kc.execute('%who')
101 kc.execute('%who')
103 kc.history(hist_access_type='tail', n=1)
102 kc.history(hist_access_type='tail', n=1)
104 msg = kc.shell_channel.get_msgs()[-1]
103 msg = kc.shell_channel.get_msgs()[-1]
105 self.assertEquals(msg['header']['msg_type'], 'history_reply')
104 self.assertEquals(msg['header']['msg_type'], 'history_reply')
106 history = msg['content']['history']
105 history = msg['content']['history']
107 self.assertEquals(len(history), 1)
106 self.assertEquals(len(history), 1)
108 self.assertEquals(history[0][2], '%who')
107 self.assertEquals(history[0][2], '%who')
109
108
110
109
111 if __name__ == '__main__':
110 if __name__ == '__main__':
112 unittest.main()
111 unittest.main()
@@ -1,63 +1,62 b''
1 """A kernel manager with a tornado IOLoop"""
1 """A kernel manager with a tornado IOLoop"""
2
2
3 #-----------------------------------------------------------------------------
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2013 The IPython Development Team
4 # Copyright (C) 2013 The IPython Development Team
5 #
5 #
6 # Distributed under the terms of the BSD License. The full license is in
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
7 # the file COPYING, distributed as part of this software.
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9
9
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11 # Imports
11 # Imports
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16 import zmq
17 from zmq.eventloop import ioloop
16 from zmq.eventloop import ioloop
18 from zmq.eventloop.zmqstream import ZMQStream
17 from zmq.eventloop.zmqstream import ZMQStream
19
18
20 from IPython.utils.traitlets import (
19 from IPython.utils.traitlets import (
21 Instance
20 Instance
22 )
21 )
23
22
24 from IPython.kernel.manager import KernelManager
23 from IPython.kernel.manager import KernelManager
25 from .restarter import IOLoopKernelRestarter
24 from .restarter import IOLoopKernelRestarter
26
25
27 #-----------------------------------------------------------------------------
26 #-----------------------------------------------------------------------------
28 # Code
27 # Code
29 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
30
29
31
30
32 def as_zmqstream(f):
31 def as_zmqstream(f):
33 def wrapped(self, *args, **kwargs):
32 def wrapped(self, *args, **kwargs):
34 socket = f(self, *args, **kwargs)
33 socket = f(self, *args, **kwargs)
35 return ZMQStream(socket, self.loop)
34 return ZMQStream(socket, self.loop)
36 return wrapped
35 return wrapped
37
36
38 class IOLoopKernelManager(KernelManager):
37 class IOLoopKernelManager(KernelManager):
39
38
40 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
39 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
41 def _loop_default(self):
40 def _loop_default(self):
42 return ioloop.IOLoop.instance()
41 return ioloop.IOLoop.instance()
43
42
44 _restarter = Instance('IPython.kernel.ioloop.IOLoopKernelRestarter')
43 _restarter = Instance('IPython.kernel.ioloop.IOLoopKernelRestarter')
45
44
46 def start_restarter(self):
45 def start_restarter(self):
47 if self.autorestart and self.has_kernel:
46 if self.autorestart and self.has_kernel:
48 if self._restarter is None:
47 if self._restarter is None:
49 self._restarter = IOLoopKernelRestarter(
48 self._restarter = IOLoopKernelRestarter(
50 kernel_manager=self, loop=self.loop,
49 kernel_manager=self, loop=self.loop,
51 parent=self, log=self.log
50 parent=self, log=self.log
52 )
51 )
53 self._restarter.start()
52 self._restarter.start()
54
53
55 def stop_restarter(self):
54 def stop_restarter(self):
56 if self.autorestart:
55 if self.autorestart:
57 if self._restarter is not None:
56 if self._restarter is not None:
58 self._restarter.stop()
57 self._restarter.stop()
59
58
60 connect_shell = as_zmqstream(KernelManager.connect_shell)
59 connect_shell = as_zmqstream(KernelManager.connect_shell)
61 connect_iopub = as_zmqstream(KernelManager.connect_iopub)
60 connect_iopub = as_zmqstream(KernelManager.connect_iopub)
62 connect_stdin = as_zmqstream(KernelManager.connect_stdin)
61 connect_stdin = as_zmqstream(KernelManager.connect_stdin)
63 connect_hb = as_zmqstream(KernelManager.connect_hb)
62 connect_hb = as_zmqstream(KernelManager.connect_hb)
@@ -1,55 +1,54 b''
1 """A basic in process kernel monitor with autorestarting.
1 """A basic in process kernel monitor with autorestarting.
2
2
3 This watches a kernel's state using KernelManager.is_alive and auto
3 This watches a kernel's state using KernelManager.is_alive and auto
4 restarts the kernel if it dies.
4 restarts the kernel if it dies.
5 """
5 """
6
6
7 #-----------------------------------------------------------------------------
7 #-----------------------------------------------------------------------------
8 # Copyright (C) 2013 The IPython Development Team
8 # Copyright (C) 2013 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import zmq
21 from zmq.eventloop import ioloop
20 from zmq.eventloop import ioloop
22
21
23
22
24 from IPython.kernel.restarter import KernelRestarter
23 from IPython.kernel.restarter import KernelRestarter
25 from IPython.utils.traitlets import (
24 from IPython.utils.traitlets import (
26 Instance, Float, List,
25 Instance,
27 )
26 )
28
27
29 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
30 # Code
29 # Code
31 #-----------------------------------------------------------------------------
30 #-----------------------------------------------------------------------------
32
31
33 class IOLoopKernelRestarter(KernelRestarter):
32 class IOLoopKernelRestarter(KernelRestarter):
34 """Monitor and autorestart a kernel."""
33 """Monitor and autorestart a kernel."""
35
34
36 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
35 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
37 def _loop_default(self):
36 def _loop_default(self):
38 return ioloop.IOLoop.instance()
37 return ioloop.IOLoop.instance()
39
38
40 _pcallback = None
39 _pcallback = None
41
40
42 def start(self):
41 def start(self):
43 """Start the polling of the kernel."""
42 """Start the polling of the kernel."""
44 if self._pcallback is None:
43 if self._pcallback is None:
45 self._pcallback = ioloop.PeriodicCallback(
44 self._pcallback = ioloop.PeriodicCallback(
46 self.poll, 1000*self.time_to_dead, self.loop
45 self.poll, 1000*self.time_to_dead, self.loop
47 )
46 )
48 self._pcallback.start()
47 self._pcallback.start()
49
48
50 def stop(self):
49 def stop(self):
51 """Stop the kernel polling."""
50 """Stop the kernel polling."""
52 if self._pcallback is not None:
51 if self._pcallback is not None:
53 self._pcallback.stop()
52 self._pcallback.stop()
54 self._pcallback = None
53 self._pcallback = None
55
54
@@ -1,301 +1,301 b''
1 """A kernel manager for multiple kernels
1 """A kernel manager for multiple kernels
2
2
3 Authors:
3 Authors:
4
4
5 * Brian Granger
5 * Brian Granger
6 """
6 """
7
7
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9 # Copyright (C) 2013 The IPython Development Team
9 # Copyright (C) 2013 The IPython Development Team
10 #
10 #
11 # Distributed under the terms of the BSD License. The full license is in
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
12 # the file COPYING, distributed as part of this software.
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-----------------------------------------------------------------------------
17 #-----------------------------------------------------------------------------
18
18
19 from __future__ import absolute_import
19 from __future__ import absolute_import
20
20
21 import os
21 import os
22 import uuid
22 import uuid
23
23
24 import zmq
24 import zmq
25
25
26 from IPython.config.configurable import LoggingConfigurable
26 from IPython.config.configurable import LoggingConfigurable
27 from IPython.utils.importstring import import_item
27 from IPython.utils.importstring import import_item
28 from IPython.utils.traitlets import (
28 from IPython.utils.traitlets import (
29 Instance, Dict, Unicode, Any, DottedObjectName, Bool
29 Instance, Dict, Unicode, Any, DottedObjectName
30 )
30 )
31
31
32 #-----------------------------------------------------------------------------
32 #-----------------------------------------------------------------------------
33 # Classes
33 # Classes
34 #-----------------------------------------------------------------------------
34 #-----------------------------------------------------------------------------
35
35
36 class DuplicateKernelError(Exception):
36 class DuplicateKernelError(Exception):
37 pass
37 pass
38
38
39
39
40
40
41 def kernel_method(f):
41 def kernel_method(f):
42 """decorator for proxying MKM.method(kernel_id) to individual KMs by ID"""
42 """decorator for proxying MKM.method(kernel_id) to individual KMs by ID"""
43 def wrapped(self, kernel_id, *args, **kwargs):
43 def wrapped(self, kernel_id, *args, **kwargs):
44 # get the kernel
44 # get the kernel
45 km = self.get_kernel(kernel_id)
45 km = self.get_kernel(kernel_id)
46 method = getattr(km, f.__name__)
46 method = getattr(km, f.__name__)
47 # call the kernel's method
47 # call the kernel's method
48 r = method(*args, **kwargs)
48 r = method(*args, **kwargs)
49 # last thing, call anything defined in the actual class method
49 # last thing, call anything defined in the actual class method
50 # such as logging messages
50 # such as logging messages
51 f(self, kernel_id, *args, **kwargs)
51 f(self, kernel_id, *args, **kwargs)
52 # return the method result
52 # return the method result
53 return r
53 return r
54 return wrapped
54 return wrapped
55
55
56
56
57 class MultiKernelManager(LoggingConfigurable):
57 class MultiKernelManager(LoggingConfigurable):
58 """A class for managing multiple kernels."""
58 """A class for managing multiple kernels."""
59
59
60 kernel_manager_class = DottedObjectName(
60 kernel_manager_class = DottedObjectName(
61 "IPython.kernel.ioloop.IOLoopKernelManager", config=True,
61 "IPython.kernel.ioloop.IOLoopKernelManager", config=True,
62 help="""The kernel manager class. This is configurable to allow
62 help="""The kernel manager class. This is configurable to allow
63 subclassing of the KernelManager for customized behavior.
63 subclassing of the KernelManager for customized behavior.
64 """
64 """
65 )
65 )
66 def _kernel_manager_class_changed(self, name, old, new):
66 def _kernel_manager_class_changed(self, name, old, new):
67 self.kernel_manager_factory = import_item(new)
67 self.kernel_manager_factory = import_item(new)
68
68
69 kernel_manager_factory = Any(help="this is kernel_manager_class after import")
69 kernel_manager_factory = Any(help="this is kernel_manager_class after import")
70 def _kernel_manager_factory_default(self):
70 def _kernel_manager_factory_default(self):
71 return import_item(self.kernel_manager_class)
71 return import_item(self.kernel_manager_class)
72
72
73 context = Instance('zmq.Context')
73 context = Instance('zmq.Context')
74 def _context_default(self):
74 def _context_default(self):
75 return zmq.Context.instance()
75 return zmq.Context.instance()
76
76
77 connection_dir = Unicode('')
77 connection_dir = Unicode('')
78
78
79 _kernels = Dict()
79 _kernels = Dict()
80
80
81 def list_kernel_ids(self):
81 def list_kernel_ids(self):
82 """Return a list of the kernel ids of the active kernels."""
82 """Return a list of the kernel ids of the active kernels."""
83 # Create a copy so we can iterate over kernels in operations
83 # Create a copy so we can iterate over kernels in operations
84 # that delete keys.
84 # that delete keys.
85 return list(self._kernels.keys())
85 return list(self._kernels.keys())
86
86
87 def __len__(self):
87 def __len__(self):
88 """Return the number of running kernels."""
88 """Return the number of running kernels."""
89 return len(self.list_kernel_ids())
89 return len(self.list_kernel_ids())
90
90
91 def __contains__(self, kernel_id):
91 def __contains__(self, kernel_id):
92 return kernel_id in self._kernels
92 return kernel_id in self._kernels
93
93
94 def start_kernel(self, **kwargs):
94 def start_kernel(self, **kwargs):
95 """Start a new kernel.
95 """Start a new kernel.
96
96
97 The caller can pick a kernel_id by passing one in as a keyword arg,
97 The caller can pick a kernel_id by passing one in as a keyword arg,
98 otherwise one will be picked using a uuid.
98 otherwise one will be picked using a uuid.
99
99
100 To silence the kernel's stdout/stderr, call this using::
100 To silence the kernel's stdout/stderr, call this using::
101
101
102 km.start_kernel(stdout=PIPE, stderr=PIPE)
102 km.start_kernel(stdout=PIPE, stderr=PIPE)
103
103
104 """
104 """
105 kernel_id = kwargs.pop('kernel_id', unicode(uuid.uuid4()))
105 kernel_id = kwargs.pop('kernel_id', unicode(uuid.uuid4()))
106 if kernel_id in self:
106 if kernel_id in self:
107 raise DuplicateKernelError('Kernel already exists: %s' % kernel_id)
107 raise DuplicateKernelError('Kernel already exists: %s' % kernel_id)
108 # kernel_manager_factory is the constructor for the KernelManager
108 # kernel_manager_factory is the constructor for the KernelManager
109 # subclass we are using. It can be configured as any Configurable,
109 # subclass we are using. It can be configured as any Configurable,
110 # including things like its transport and ip.
110 # including things like its transport and ip.
111 km = self.kernel_manager_factory(connection_file=os.path.join(
111 km = self.kernel_manager_factory(connection_file=os.path.join(
112 self.connection_dir, "kernel-%s.json" % kernel_id),
112 self.connection_dir, "kernel-%s.json" % kernel_id),
113 parent=self, autorestart=True, log=self.log
113 parent=self, autorestart=True, log=self.log
114 )
114 )
115 km.start_kernel(**kwargs)
115 km.start_kernel(**kwargs)
116 self._kernels[kernel_id] = km
116 self._kernels[kernel_id] = km
117 return kernel_id
117 return kernel_id
118
118
119 @kernel_method
119 @kernel_method
120 def shutdown_kernel(self, kernel_id, now=False):
120 def shutdown_kernel(self, kernel_id, now=False):
121 """Shutdown a kernel by its kernel uuid.
121 """Shutdown a kernel by its kernel uuid.
122
122
123 Parameters
123 Parameters
124 ==========
124 ==========
125 kernel_id : uuid
125 kernel_id : uuid
126 The id of the kernel to shutdown.
126 The id of the kernel to shutdown.
127 now : bool
127 now : bool
128 Should the kernel be shutdown forcibly using a signal.
128 Should the kernel be shutdown forcibly using a signal.
129 """
129 """
130 self.log.info("Kernel shutdown: %s" % kernel_id)
130 self.log.info("Kernel shutdown: %s" % kernel_id)
131 self.remove_kernel(kernel_id)
131 self.remove_kernel(kernel_id)
132
132
133 def remove_kernel(self, kernel_id):
133 def remove_kernel(self, kernel_id):
134 """remove a kernel from our mapping.
134 """remove a kernel from our mapping.
135
135
136 Mainly so that a kernel can be removed if it is already dead,
136 Mainly so that a kernel can be removed if it is already dead,
137 without having to call shutdown_kernel.
137 without having to call shutdown_kernel.
138
138
139 The kernel object is returned.
139 The kernel object is returned.
140 """
140 """
141 return self._kernels.pop(kernel_id)
141 return self._kernels.pop(kernel_id)
142
142
143 def shutdown_all(self, now=False):
143 def shutdown_all(self, now=False):
144 """Shutdown all kernels."""
144 """Shutdown all kernels."""
145 for kid in self.list_kernel_ids():
145 for kid in self.list_kernel_ids():
146 self.shutdown_kernel(kid, now=now)
146 self.shutdown_kernel(kid, now=now)
147
147
148 @kernel_method
148 @kernel_method
149 def interrupt_kernel(self, kernel_id):
149 def interrupt_kernel(self, kernel_id):
150 """Interrupt (SIGINT) the kernel by its uuid.
150 """Interrupt (SIGINT) the kernel by its uuid.
151
151
152 Parameters
152 Parameters
153 ==========
153 ==========
154 kernel_id : uuid
154 kernel_id : uuid
155 The id of the kernel to interrupt.
155 The id of the kernel to interrupt.
156 """
156 """
157 self.log.info("Kernel interrupted: %s" % kernel_id)
157 self.log.info("Kernel interrupted: %s" % kernel_id)
158
158
159 @kernel_method
159 @kernel_method
160 def signal_kernel(self, kernel_id, signum):
160 def signal_kernel(self, kernel_id, signum):
161 """Sends a signal to the kernel by its uuid.
161 """Sends a signal to the kernel by its uuid.
162
162
163 Note that since only SIGTERM is supported on Windows, this function
163 Note that since only SIGTERM is supported on Windows, this function
164 is only useful on Unix systems.
164 is only useful on Unix systems.
165
165
166 Parameters
166 Parameters
167 ==========
167 ==========
168 kernel_id : uuid
168 kernel_id : uuid
169 The id of the kernel to signal.
169 The id of the kernel to signal.
170 """
170 """
171 self.log.info("Signaled Kernel %s with %s" % (kernel_id, signum))
171 self.log.info("Signaled Kernel %s with %s" % (kernel_id, signum))
172
172
173 @kernel_method
173 @kernel_method
174 def restart_kernel(self, kernel_id):
174 def restart_kernel(self, kernel_id):
175 """Restart a kernel by its uuid, keeping the same ports.
175 """Restart a kernel by its uuid, keeping the same ports.
176
176
177 Parameters
177 Parameters
178 ==========
178 ==========
179 kernel_id : uuid
179 kernel_id : uuid
180 The id of the kernel to interrupt.
180 The id of the kernel to interrupt.
181 """
181 """
182 self.log.info("Kernel restarted: %s" % kernel_id)
182 self.log.info("Kernel restarted: %s" % kernel_id)
183
183
184 @kernel_method
184 @kernel_method
185 def is_alive(self, kernel_id):
185 def is_alive(self, kernel_id):
186 """Is the kernel alive.
186 """Is the kernel alive.
187
187
188 This calls KernelManager.is_alive() which calls Popen.poll on the
188 This calls KernelManager.is_alive() which calls Popen.poll on the
189 actual kernel subprocess.
189 actual kernel subprocess.
190
190
191 Parameters
191 Parameters
192 ==========
192 ==========
193 kernel_id : uuid
193 kernel_id : uuid
194 The id of the kernel.
194 The id of the kernel.
195 """
195 """
196
196
197 def _check_kernel_id(self, kernel_id):
197 def _check_kernel_id(self, kernel_id):
198 """check that a kernel id is valid"""
198 """check that a kernel id is valid"""
199 if kernel_id not in self:
199 if kernel_id not in self:
200 raise KeyError("Kernel with id not found: %s" % kernel_id)
200 raise KeyError("Kernel with id not found: %s" % kernel_id)
201
201
202 def get_kernel(self, kernel_id):
202 def get_kernel(self, kernel_id):
203 """Get the single KernelManager object for a kernel by its uuid.
203 """Get the single KernelManager object for a kernel by its uuid.
204
204
205 Parameters
205 Parameters
206 ==========
206 ==========
207 kernel_id : uuid
207 kernel_id : uuid
208 The id of the kernel.
208 The id of the kernel.
209 """
209 """
210 self._check_kernel_id(kernel_id)
210 self._check_kernel_id(kernel_id)
211 return self._kernels[kernel_id]
211 return self._kernels[kernel_id]
212
212
213 @kernel_method
213 @kernel_method
214 def add_restart_callback(self, kernel_id, callback, event='restart'):
214 def add_restart_callback(self, kernel_id, callback, event='restart'):
215 """add a callback for the KernelRestarter"""
215 """add a callback for the KernelRestarter"""
216
216
217 @kernel_method
217 @kernel_method
218 def remove_restart_callback(self, kernel_id, callback, event='restart'):
218 def remove_restart_callback(self, kernel_id, callback, event='restart'):
219 """remove a callback for the KernelRestarter"""
219 """remove a callback for the KernelRestarter"""
220
220
221 @kernel_method
221 @kernel_method
222 def get_connection_info(self, kernel_id):
222 def get_connection_info(self, kernel_id):
223 """Return a dictionary of connection data for a kernel.
223 """Return a dictionary of connection data for a kernel.
224
224
225 Parameters
225 Parameters
226 ==========
226 ==========
227 kernel_id : uuid
227 kernel_id : uuid
228 The id of the kernel.
228 The id of the kernel.
229
229
230 Returns
230 Returns
231 =======
231 =======
232 connection_dict : dict
232 connection_dict : dict
233 A dict of the information needed to connect to a kernel.
233 A dict of the information needed to connect to a kernel.
234 This includes the ip address and the integer port
234 This includes the ip address and the integer port
235 numbers of the different channels (stdin_port, iopub_port,
235 numbers of the different channels (stdin_port, iopub_port,
236 shell_port, hb_port).
236 shell_port, hb_port).
237 """
237 """
238
238
239 @kernel_method
239 @kernel_method
240 def connect_iopub(self, kernel_id, identity=None):
240 def connect_iopub(self, kernel_id, identity=None):
241 """Return a zmq Socket connected to the iopub channel.
241 """Return a zmq Socket connected to the iopub channel.
242
242
243 Parameters
243 Parameters
244 ==========
244 ==========
245 kernel_id : uuid
245 kernel_id : uuid
246 The id of the kernel
246 The id of the kernel
247 identity : bytes (optional)
247 identity : bytes (optional)
248 The zmq identity of the socket
248 The zmq identity of the socket
249
249
250 Returns
250 Returns
251 =======
251 =======
252 stream : zmq Socket or ZMQStream
252 stream : zmq Socket or ZMQStream
253 """
253 """
254
254
255 @kernel_method
255 @kernel_method
256 def connect_shell(self, kernel_id, identity=None):
256 def connect_shell(self, kernel_id, identity=None):
257 """Return a zmq Socket connected to the shell channel.
257 """Return a zmq Socket connected to the shell channel.
258
258
259 Parameters
259 Parameters
260 ==========
260 ==========
261 kernel_id : uuid
261 kernel_id : uuid
262 The id of the kernel
262 The id of the kernel
263 identity : bytes (optional)
263 identity : bytes (optional)
264 The zmq identity of the socket
264 The zmq identity of the socket
265
265
266 Returns
266 Returns
267 =======
267 =======
268 stream : zmq Socket or ZMQStream
268 stream : zmq Socket or ZMQStream
269 """
269 """
270
270
271 @kernel_method
271 @kernel_method
272 def connect_stdin(self, kernel_id, identity=None):
272 def connect_stdin(self, kernel_id, identity=None):
273 """Return a zmq Socket connected to the stdin channel.
273 """Return a zmq Socket connected to the stdin channel.
274
274
275 Parameters
275 Parameters
276 ==========
276 ==========
277 kernel_id : uuid
277 kernel_id : uuid
278 The id of the kernel
278 The id of the kernel
279 identity : bytes (optional)
279 identity : bytes (optional)
280 The zmq identity of the socket
280 The zmq identity of the socket
281
281
282 Returns
282 Returns
283 =======
283 =======
284 stream : zmq Socket or ZMQStream
284 stream : zmq Socket or ZMQStream
285 """
285 """
286
286
287 @kernel_method
287 @kernel_method
288 def connect_hb(self, kernel_id, identity=None):
288 def connect_hb(self, kernel_id, identity=None):
289 """Return a zmq Socket connected to the hb channel.
289 """Return a zmq Socket connected to the hb channel.
290
290
291 Parameters
291 Parameters
292 ==========
292 ==========
293 kernel_id : uuid
293 kernel_id : uuid
294 The id of the kernel
294 The id of the kernel
295 identity : bytes (optional)
295 identity : bytes (optional)
296 The zmq identity of the socket
296 The zmq identity of the socket
297
297
298 Returns
298 Returns
299 =======
299 =======
300 stream : zmq Socket or ZMQStream
300 stream : zmq Socket or ZMQStream
301 """
301 """
@@ -1,63 +1,60 b''
1 """Tests for kernel utility functions
1 """Tests for kernel utility functions
2
2
3 Authors
3 Authors
4 -------
4 -------
5 * MinRK
5 * MinRK
6 """
6 """
7 #-----------------------------------------------------------------------------
7 #-----------------------------------------------------------------------------
8 # Copyright (c) 2011, the IPython Development Team.
8 # Copyright (c) 2011, the IPython Development Team.
9 #
9 #
10 # Distributed under the terms of the Modified BSD License.
10 # Distributed under the terms of the Modified BSD License.
11 #
11 #
12 # The full license is in the file COPYING.txt, distributed with this software.
12 # The full license is in the file COPYING.txt, distributed with this software.
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-----------------------------------------------------------------------------
17 #-----------------------------------------------------------------------------
18
18
19 # Stdlib imports
20 from unittest import TestCase
21
22 # Third-party imports
19 # Third-party imports
23 import nose.tools as nt
20 import nose.tools as nt
24
21
25 # Our own imports
22 # Our own imports
26 from IPython.testing import decorators as dec
23 from IPython.testing import decorators as dec
27 from IPython.kernel.launcher import swallow_argv
24 from IPython.kernel.launcher import swallow_argv
28
25
29 #-----------------------------------------------------------------------------
26 #-----------------------------------------------------------------------------
30 # Classes and functions
27 # Classes and functions
31 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
32
29
33 @dec.parametric
30 @dec.parametric
34 def test_swallow_argv():
31 def test_swallow_argv():
35 tests = [
32 tests = [
36 # expected , argv , aliases, flags
33 # expected , argv , aliases, flags
37 (['-a', '5'], ['-a', '5'], None, None),
34 (['-a', '5'], ['-a', '5'], None, None),
38 (['5'], ['-a', '5'], None, ['a']),
35 (['5'], ['-a', '5'], None, ['a']),
39 ([], ['-a', '5'], ['a'], None),
36 ([], ['-a', '5'], ['a'], None),
40 ([], ['-a', '5'], ['a'], ['a']),
37 ([], ['-a', '5'], ['a'], ['a']),
41 ([], ['--foo'], None, ['foo']),
38 ([], ['--foo'], None, ['foo']),
42 ([], ['--foo'], ['foobar'], []),
39 ([], ['--foo'], ['foobar'], []),
43 ([], ['--foo', '5'], ['foo'], []),
40 ([], ['--foo', '5'], ['foo'], []),
44 ([], ['--foo=5'], ['foo'], []),
41 ([], ['--foo=5'], ['foo'], []),
45 (['--foo=5'], ['--foo=5'], [], ['foo']),
42 (['--foo=5'], ['--foo=5'], [], ['foo']),
46 (['5'], ['--foo', '5'], [], ['foo']),
43 (['5'], ['--foo', '5'], [], ['foo']),
47 (['bar'], ['--foo', '5', 'bar'], ['foo'], ['foo']),
44 (['bar'], ['--foo', '5', 'bar'], ['foo'], ['foo']),
48 (['bar'], ['--foo=5', 'bar'], ['foo'], ['foo']),
45 (['bar'], ['--foo=5', 'bar'], ['foo'], ['foo']),
49 (['5','bar'], ['--foo', '5', 'bar'], None, ['foo']),
46 (['5','bar'], ['--foo', '5', 'bar'], None, ['foo']),
50 (['bar'], ['--foo', '5', 'bar'], ['foo'], None),
47 (['bar'], ['--foo', '5', 'bar'], ['foo'], None),
51 (['bar'], ['--foo=5', 'bar'], ['foo'], None),
48 (['bar'], ['--foo=5', 'bar'], ['foo'], None),
52 ]
49 ]
53 for expected, argv, aliases, flags in tests:
50 for expected, argv, aliases, flags in tests:
54 stripped = swallow_argv(argv, aliases=aliases, flags=flags)
51 stripped = swallow_argv(argv, aliases=aliases, flags=flags)
55 message = '\n'.join(['',
52 message = '\n'.join(['',
56 "argv: %r" % argv,
53 "argv: %r" % argv,
57 "aliases: %r" % aliases,
54 "aliases: %r" % aliases,
58 "flags : %r" % flags,
55 "flags : %r" % flags,
59 "expected : %r" % expected,
56 "expected : %r" % expected,
60 "returned : %r" % stripped,
57 "returned : %r" % stripped,
61 ])
58 ])
62 yield nt.assert_equal(expected, stripped, message)
59 yield nt.assert_equal(expected, stripped, message)
63
60
@@ -1,511 +1,507 b''
1 """Test suite for our zeromq-based messaging specification.
1 """Test suite for our zeromq-based messaging specification.
2 """
2 """
3 #-----------------------------------------------------------------------------
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2010-2011 The IPython Development Team
4 # Copyright (C) 2010-2011 The IPython Development Team
5 #
5 #
6 # Distributed under the terms of the BSD License. The full license is in
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING.txt, distributed as part of this software.
7 # the file COPYING.txt, distributed as part of this software.
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9
9
10 import re
10 import re
11 import sys
12 import time
13 from subprocess import PIPE
11 from subprocess import PIPE
14 from Queue import Empty
12 from Queue import Empty
15
13
16 import nose.tools as nt
14 import nose.tools as nt
17
15
18 from IPython.kernel import KernelManager
16 from IPython.kernel import KernelManager
19
17
20
21 from IPython.testing import decorators as dec
18 from IPython.testing import decorators as dec
22 from IPython.utils import io
23 from IPython.utils.traitlets import (
19 from IPython.utils.traitlets import (
24 HasTraits, TraitError, Bool, Unicode, Dict, Integer, List, Enum, Any,
20 HasTraits, TraitError, Bool, Unicode, Dict, Integer, List, Enum, Any,
25 )
21 )
26
22
27 #-----------------------------------------------------------------------------
23 #-----------------------------------------------------------------------------
28 # Global setup and utilities
24 # Global setup and utilities
29 #-----------------------------------------------------------------------------
25 #-----------------------------------------------------------------------------
30
26
31 def setup():
27 def setup():
32 global KM, KC
28 global KM, KC
33 KM = KernelManager()
29 KM = KernelManager()
34 KM.start_kernel(stdout=PIPE, stderr=PIPE)
30 KM.start_kernel(stdout=PIPE, stderr=PIPE)
35 KC = KM.client()
31 KC = KM.client()
36 KC.start_channels()
32 KC.start_channels()
37
33
38 # wait for kernel to be ready
34 # wait for kernel to be ready
39 KC.execute("pass")
35 KC.execute("pass")
40 KC.get_shell_msg(block=True, timeout=5)
36 KC.get_shell_msg(block=True, timeout=5)
41 flush_channels()
37 flush_channels()
42
38
43
39
44 def teardown():
40 def teardown():
45 KC.stop_channels()
41 KC.stop_channels()
46 KM.shutdown_kernel()
42 KM.shutdown_kernel()
47
43
48
44
49 def flush_channels(kc=None):
45 def flush_channels(kc=None):
50 """flush any messages waiting on the queue"""
46 """flush any messages waiting on the queue"""
51 if kc is None:
47 if kc is None:
52 kc = KC
48 kc = KC
53 for channel in (kc.shell_channel, kc.iopub_channel):
49 for channel in (kc.shell_channel, kc.iopub_channel):
54 while True:
50 while True:
55 try:
51 try:
56 msg = channel.get_msg(block=True, timeout=0.1)
52 msg = channel.get_msg(block=True, timeout=0.1)
57 except Empty:
53 except Empty:
58 break
54 break
59 else:
55 else:
60 list(validate_message(msg))
56 list(validate_message(msg))
61
57
62
58
63 def execute(code='', kc=None, **kwargs):
59 def execute(code='', kc=None, **kwargs):
64 """wrapper for doing common steps for validating an execution request"""
60 """wrapper for doing common steps for validating an execution request"""
65 msg_id = KC.execute(code=code, **kwargs)
61 msg_id = KC.execute(code=code, **kwargs)
66 reply = KC.get_shell_msg(timeout=2)
62 reply = KC.get_shell_msg(timeout=2)
67 list(validate_message(reply, 'execute_reply', msg_id))
63 list(validate_message(reply, 'execute_reply', msg_id))
68 busy = KC.get_iopub_msg(timeout=2)
64 busy = KC.get_iopub_msg(timeout=2)
69 list(validate_message(busy, 'status', msg_id))
65 list(validate_message(busy, 'status', msg_id))
70 nt.assert_equal(busy['content']['execution_state'], 'busy')
66 nt.assert_equal(busy['content']['execution_state'], 'busy')
71
67
72 if not kwargs.get('silent'):
68 if not kwargs.get('silent'):
73 pyin = KC.get_iopub_msg(timeout=2)
69 pyin = KC.get_iopub_msg(timeout=2)
74 list(validate_message(pyin, 'pyin', msg_id))
70 list(validate_message(pyin, 'pyin', msg_id))
75 nt.assert_equal(pyin['content']['code'], code)
71 nt.assert_equal(pyin['content']['code'], code)
76
72
77 return msg_id, reply['content']
73 return msg_id, reply['content']
78
74
79 #-----------------------------------------------------------------------------
75 #-----------------------------------------------------------------------------
80 # MSG Spec References
76 # MSG Spec References
81 #-----------------------------------------------------------------------------
77 #-----------------------------------------------------------------------------
82
78
83
79
84 class Reference(HasTraits):
80 class Reference(HasTraits):
85
81
86 """
82 """
87 Base class for message spec specification testing.
83 Base class for message spec specification testing.
88
84
89 This class is the core of the message specification test. The
85 This class is the core of the message specification test. The
90 idea is that child classes implement trait attributes for each
86 idea is that child classes implement trait attributes for each
91 message keys, so that message keys can be tested against these
87 message keys, so that message keys can be tested against these
92 traits using :meth:`check` method.
88 traits using :meth:`check` method.
93
89
94 """
90 """
95
91
96 def check(self, d):
92 def check(self, d):
97 """validate a dict against our traits"""
93 """validate a dict against our traits"""
98 for key in self.trait_names():
94 for key in self.trait_names():
99 yield nt.assert_true(key in d, "Missing key: %r, should be found in %s" % (key, d))
95 yield nt.assert_true(key in d, "Missing key: %r, should be found in %s" % (key, d))
100 # FIXME: always allow None, probably not a good idea
96 # FIXME: always allow None, probably not a good idea
101 if d[key] is None:
97 if d[key] is None:
102 continue
98 continue
103 try:
99 try:
104 setattr(self, key, d[key])
100 setattr(self, key, d[key])
105 except TraitError as e:
101 except TraitError as e:
106 yield nt.assert_true(False, str(e))
102 yield nt.assert_true(False, str(e))
107
103
108
104
109 class RMessage(Reference):
105 class RMessage(Reference):
110 msg_id = Unicode()
106 msg_id = Unicode()
111 msg_type = Unicode()
107 msg_type = Unicode()
112 header = Dict()
108 header = Dict()
113 parent_header = Dict()
109 parent_header = Dict()
114 content = Dict()
110 content = Dict()
115
111
116 class RHeader(Reference):
112 class RHeader(Reference):
117 msg_id = Unicode()
113 msg_id = Unicode()
118 msg_type = Unicode()
114 msg_type = Unicode()
119 session = Unicode()
115 session = Unicode()
120 username = Unicode()
116 username = Unicode()
121
117
122 class RContent(Reference):
118 class RContent(Reference):
123 status = Enum((u'ok', u'error'))
119 status = Enum((u'ok', u'error'))
124
120
125
121
126 class ExecuteReply(Reference):
122 class ExecuteReply(Reference):
127 execution_count = Integer()
123 execution_count = Integer()
128 status = Enum((u'ok', u'error'))
124 status = Enum((u'ok', u'error'))
129
125
130 def check(self, d):
126 def check(self, d):
131 for tst in Reference.check(self, d):
127 for tst in Reference.check(self, d):
132 yield tst
128 yield tst
133 if d['status'] == 'ok':
129 if d['status'] == 'ok':
134 for tst in ExecuteReplyOkay().check(d):
130 for tst in ExecuteReplyOkay().check(d):
135 yield tst
131 yield tst
136 elif d['status'] == 'error':
132 elif d['status'] == 'error':
137 for tst in ExecuteReplyError().check(d):
133 for tst in ExecuteReplyError().check(d):
138 yield tst
134 yield tst
139
135
140
136
141 class ExecuteReplyOkay(Reference):
137 class ExecuteReplyOkay(Reference):
142 payload = List(Dict)
138 payload = List(Dict)
143 user_variables = Dict()
139 user_variables = Dict()
144 user_expressions = Dict()
140 user_expressions = Dict()
145
141
146
142
147 class ExecuteReplyError(Reference):
143 class ExecuteReplyError(Reference):
148 ename = Unicode()
144 ename = Unicode()
149 evalue = Unicode()
145 evalue = Unicode()
150 traceback = List(Unicode)
146 traceback = List(Unicode)
151
147
152
148
153 class OInfoReply(Reference):
149 class OInfoReply(Reference):
154 name = Unicode()
150 name = Unicode()
155 found = Bool()
151 found = Bool()
156 ismagic = Bool()
152 ismagic = Bool()
157 isalias = Bool()
153 isalias = Bool()
158 namespace = Enum((u'builtin', u'magics', u'alias', u'Interactive'))
154 namespace = Enum((u'builtin', u'magics', u'alias', u'Interactive'))
159 type_name = Unicode()
155 type_name = Unicode()
160 string_form = Unicode()
156 string_form = Unicode()
161 base_class = Unicode()
157 base_class = Unicode()
162 length = Integer()
158 length = Integer()
163 file = Unicode()
159 file = Unicode()
164 definition = Unicode()
160 definition = Unicode()
165 argspec = Dict()
161 argspec = Dict()
166 init_definition = Unicode()
162 init_definition = Unicode()
167 docstring = Unicode()
163 docstring = Unicode()
168 init_docstring = Unicode()
164 init_docstring = Unicode()
169 class_docstring = Unicode()
165 class_docstring = Unicode()
170 call_def = Unicode()
166 call_def = Unicode()
171 call_docstring = Unicode()
167 call_docstring = Unicode()
172 source = Unicode()
168 source = Unicode()
173
169
174 def check(self, d):
170 def check(self, d):
175 for tst in Reference.check(self, d):
171 for tst in Reference.check(self, d):
176 yield tst
172 yield tst
177 if d['argspec'] is not None:
173 if d['argspec'] is not None:
178 for tst in ArgSpec().check(d['argspec']):
174 for tst in ArgSpec().check(d['argspec']):
179 yield tst
175 yield tst
180
176
181
177
182 class ArgSpec(Reference):
178 class ArgSpec(Reference):
183 args = List(Unicode)
179 args = List(Unicode)
184 varargs = Unicode()
180 varargs = Unicode()
185 varkw = Unicode()
181 varkw = Unicode()
186 defaults = List()
182 defaults = List()
187
183
188
184
189 class Status(Reference):
185 class Status(Reference):
190 execution_state = Enum((u'busy', u'idle', u'starting'))
186 execution_state = Enum((u'busy', u'idle', u'starting'))
191
187
192
188
193 class CompleteReply(Reference):
189 class CompleteReply(Reference):
194 matches = List(Unicode)
190 matches = List(Unicode)
195
191
196
192
197 def Version(num, trait=Integer):
193 def Version(num, trait=Integer):
198 return List(trait, default_value=[0] * num, minlen=num, maxlen=num)
194 return List(trait, default_value=[0] * num, minlen=num, maxlen=num)
199
195
200
196
201 class KernelInfoReply(Reference):
197 class KernelInfoReply(Reference):
202
198
203 protocol_version = Version(2)
199 protocol_version = Version(2)
204 ipython_version = Version(4, Any)
200 ipython_version = Version(4, Any)
205 language_version = Version(3)
201 language_version = Version(3)
206 language = Unicode()
202 language = Unicode()
207
203
208 def _ipython_version_changed(self, name, old, new):
204 def _ipython_version_changed(self, name, old, new):
209 for v in new:
205 for v in new:
210 nt.assert_true(
206 nt.assert_true(
211 isinstance(v, int) or isinstance(v, basestring),
207 isinstance(v, int) or isinstance(v, basestring),
212 'expected int or string as version component, got {0!r}'
208 'expected int or string as version component, got {0!r}'
213 .format(v))
209 .format(v))
214
210
215
211
216 # IOPub messages
212 # IOPub messages
217
213
218 class PyIn(Reference):
214 class PyIn(Reference):
219 code = Unicode()
215 code = Unicode()
220 execution_count = Integer()
216 execution_count = Integer()
221
217
222
218
223 PyErr = ExecuteReplyError
219 PyErr = ExecuteReplyError
224
220
225
221
226 class Stream(Reference):
222 class Stream(Reference):
227 name = Enum((u'stdout', u'stderr'))
223 name = Enum((u'stdout', u'stderr'))
228 data = Unicode()
224 data = Unicode()
229
225
230
226
231 mime_pat = re.compile(r'\w+/\w+')
227 mime_pat = re.compile(r'\w+/\w+')
232
228
233 class DisplayData(Reference):
229 class DisplayData(Reference):
234 source = Unicode()
230 source = Unicode()
235 metadata = Dict()
231 metadata = Dict()
236 data = Dict()
232 data = Dict()
237 def _data_changed(self, name, old, new):
233 def _data_changed(self, name, old, new):
238 for k,v in new.iteritems():
234 for k,v in new.iteritems():
239 nt.assert_true(mime_pat.match(k))
235 nt.assert_true(mime_pat.match(k))
240 nt.assert_true(isinstance(v, basestring), "expected string data, got %r" % v)
236 nt.assert_true(isinstance(v, basestring), "expected string data, got %r" % v)
241
237
242
238
243 class PyOut(Reference):
239 class PyOut(Reference):
244 execution_count = Integer()
240 execution_count = Integer()
245 data = Dict()
241 data = Dict()
246 def _data_changed(self, name, old, new):
242 def _data_changed(self, name, old, new):
247 for k,v in new.iteritems():
243 for k,v in new.iteritems():
248 nt.assert_true(mime_pat.match(k))
244 nt.assert_true(mime_pat.match(k))
249 nt.assert_true(isinstance(v, basestring), "expected string data, got %r" % v)
245 nt.assert_true(isinstance(v, basestring), "expected string data, got %r" % v)
250
246
251
247
252 references = {
248 references = {
253 'execute_reply' : ExecuteReply(),
249 'execute_reply' : ExecuteReply(),
254 'object_info_reply' : OInfoReply(),
250 'object_info_reply' : OInfoReply(),
255 'status' : Status(),
251 'status' : Status(),
256 'complete_reply' : CompleteReply(),
252 'complete_reply' : CompleteReply(),
257 'kernel_info_reply': KernelInfoReply(),
253 'kernel_info_reply': KernelInfoReply(),
258 'pyin' : PyIn(),
254 'pyin' : PyIn(),
259 'pyout' : PyOut(),
255 'pyout' : PyOut(),
260 'pyerr' : PyErr(),
256 'pyerr' : PyErr(),
261 'stream' : Stream(),
257 'stream' : Stream(),
262 'display_data' : DisplayData(),
258 'display_data' : DisplayData(),
263 }
259 }
264 """
260 """
265 Specifications of `content` part of the reply messages.
261 Specifications of `content` part of the reply messages.
266 """
262 """
267
263
268
264
269 def validate_message(msg, msg_type=None, parent=None):
265 def validate_message(msg, msg_type=None, parent=None):
270 """validate a message
266 """validate a message
271
267
272 This is a generator, and must be iterated through to actually
268 This is a generator, and must be iterated through to actually
273 trigger each test.
269 trigger each test.
274
270
275 If msg_type and/or parent are given, the msg_type and/or parent msg_id
271 If msg_type and/or parent are given, the msg_type and/or parent msg_id
276 are compared with the given values.
272 are compared with the given values.
277 """
273 """
278 RMessage().check(msg)
274 RMessage().check(msg)
279 if msg_type:
275 if msg_type:
280 yield nt.assert_equal(msg['msg_type'], msg_type)
276 yield nt.assert_equal(msg['msg_type'], msg_type)
281 if parent:
277 if parent:
282 yield nt.assert_equal(msg['parent_header']['msg_id'], parent)
278 yield nt.assert_equal(msg['parent_header']['msg_id'], parent)
283 content = msg['content']
279 content = msg['content']
284 ref = references[msg['msg_type']]
280 ref = references[msg['msg_type']]
285 for tst in ref.check(content):
281 for tst in ref.check(content):
286 yield tst
282 yield tst
287
283
288
284
289 #-----------------------------------------------------------------------------
285 #-----------------------------------------------------------------------------
290 # Tests
286 # Tests
291 #-----------------------------------------------------------------------------
287 #-----------------------------------------------------------------------------
292
288
293 # Shell channel
289 # Shell channel
294
290
295 @dec.parametric
291 @dec.parametric
296 def test_execute():
292 def test_execute():
297 flush_channels()
293 flush_channels()
298
294
299 msg_id = KC.execute(code='x=1')
295 msg_id = KC.execute(code='x=1')
300 reply = KC.get_shell_msg(timeout=2)
296 reply = KC.get_shell_msg(timeout=2)
301 for tst in validate_message(reply, 'execute_reply', msg_id):
297 for tst in validate_message(reply, 'execute_reply', msg_id):
302 yield tst
298 yield tst
303
299
304
300
305 @dec.parametric
301 @dec.parametric
306 def test_execute_silent():
302 def test_execute_silent():
307 flush_channels()
303 flush_channels()
308 msg_id, reply = execute(code='x=1', silent=True)
304 msg_id, reply = execute(code='x=1', silent=True)
309
305
310 # flush status=idle
306 # flush status=idle
311 status = KC.iopub_channel.get_msg(timeout=2)
307 status = KC.iopub_channel.get_msg(timeout=2)
312 for tst in validate_message(status, 'status', msg_id):
308 for tst in validate_message(status, 'status', msg_id):
313 yield tst
309 yield tst
314 nt.assert_equal(status['content']['execution_state'], 'idle')
310 nt.assert_equal(status['content']['execution_state'], 'idle')
315
311
316 yield nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
312 yield nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
317 count = reply['execution_count']
313 count = reply['execution_count']
318
314
319 msg_id, reply = execute(code='x=2', silent=True)
315 msg_id, reply = execute(code='x=2', silent=True)
320
316
321 # flush status=idle
317 # flush status=idle
322 status = KC.iopub_channel.get_msg(timeout=2)
318 status = KC.iopub_channel.get_msg(timeout=2)
323 for tst in validate_message(status, 'status', msg_id):
319 for tst in validate_message(status, 'status', msg_id):
324 yield tst
320 yield tst
325 yield nt.assert_equal(status['content']['execution_state'], 'idle')
321 yield nt.assert_equal(status['content']['execution_state'], 'idle')
326
322
327 yield nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
323 yield nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
328 count_2 = reply['execution_count']
324 count_2 = reply['execution_count']
329 yield nt.assert_equal(count_2, count)
325 yield nt.assert_equal(count_2, count)
330
326
331
327
332 @dec.parametric
328 @dec.parametric
333 def test_execute_error():
329 def test_execute_error():
334 flush_channels()
330 flush_channels()
335
331
336 msg_id, reply = execute(code='1/0')
332 msg_id, reply = execute(code='1/0')
337 yield nt.assert_equal(reply['status'], 'error')
333 yield nt.assert_equal(reply['status'], 'error')
338 yield nt.assert_equal(reply['ename'], 'ZeroDivisionError')
334 yield nt.assert_equal(reply['ename'], 'ZeroDivisionError')
339
335
340 pyerr = KC.iopub_channel.get_msg(timeout=2)
336 pyerr = KC.iopub_channel.get_msg(timeout=2)
341 for tst in validate_message(pyerr, 'pyerr', msg_id):
337 for tst in validate_message(pyerr, 'pyerr', msg_id):
342 yield tst
338 yield tst
343
339
344
340
345 def test_execute_inc():
341 def test_execute_inc():
346 """execute request should increment execution_count"""
342 """execute request should increment execution_count"""
347 flush_channels()
343 flush_channels()
348
344
349 msg_id, reply = execute(code='x=1')
345 msg_id, reply = execute(code='x=1')
350 count = reply['execution_count']
346 count = reply['execution_count']
351
347
352 flush_channels()
348 flush_channels()
353
349
354 msg_id, reply = execute(code='x=2')
350 msg_id, reply = execute(code='x=2')
355 count_2 = reply['execution_count']
351 count_2 = reply['execution_count']
356 nt.assert_equal(count_2, count+1)
352 nt.assert_equal(count_2, count+1)
357
353
358
354
359 def test_user_variables():
355 def test_user_variables():
360 flush_channels()
356 flush_channels()
361
357
362 msg_id, reply = execute(code='x=1', user_variables=['x'])
358 msg_id, reply = execute(code='x=1', user_variables=['x'])
363 user_variables = reply['user_variables']
359 user_variables = reply['user_variables']
364 nt.assert_equal(user_variables, {u'x': {
360 nt.assert_equal(user_variables, {u'x': {
365 u'status': u'ok',
361 u'status': u'ok',
366 u'data': {u'text/plain': u'1'},
362 u'data': {u'text/plain': u'1'},
367 u'metadata': {},
363 u'metadata': {},
368 }})
364 }})
369
365
370
366
371 def test_user_variables_fail():
367 def test_user_variables_fail():
372 flush_channels()
368 flush_channels()
373
369
374 msg_id, reply = execute(code='x=1', user_variables=['nosuchname'])
370 msg_id, reply = execute(code='x=1', user_variables=['nosuchname'])
375 user_variables = reply['user_variables']
371 user_variables = reply['user_variables']
376 foo = user_variables['nosuchname']
372 foo = user_variables['nosuchname']
377 nt.assert_equal(foo['status'], 'error')
373 nt.assert_equal(foo['status'], 'error')
378 nt.assert_equal(foo['ename'], 'KeyError')
374 nt.assert_equal(foo['ename'], 'KeyError')
379
375
380
376
381 def test_user_expressions():
377 def test_user_expressions():
382 flush_channels()
378 flush_channels()
383
379
384 msg_id, reply = execute(code='x=1', user_expressions=dict(foo='x+1'))
380 msg_id, reply = execute(code='x=1', user_expressions=dict(foo='x+1'))
385 user_expressions = reply['user_expressions']
381 user_expressions = reply['user_expressions']
386 nt.assert_equal(user_expressions, {u'foo': {
382 nt.assert_equal(user_expressions, {u'foo': {
387 u'status': u'ok',
383 u'status': u'ok',
388 u'data': {u'text/plain': u'2'},
384 u'data': {u'text/plain': u'2'},
389 u'metadata': {},
385 u'metadata': {},
390 }})
386 }})
391
387
392
388
393 def test_user_expressions_fail():
389 def test_user_expressions_fail():
394 flush_channels()
390 flush_channels()
395
391
396 msg_id, reply = execute(code='x=0', user_expressions=dict(foo='nosuchname'))
392 msg_id, reply = execute(code='x=0', user_expressions=dict(foo='nosuchname'))
397 user_expressions = reply['user_expressions']
393 user_expressions = reply['user_expressions']
398 foo = user_expressions['foo']
394 foo = user_expressions['foo']
399 nt.assert_equal(foo['status'], 'error')
395 nt.assert_equal(foo['status'], 'error')
400 nt.assert_equal(foo['ename'], 'NameError')
396 nt.assert_equal(foo['ename'], 'NameError')
401
397
402
398
403 @dec.parametric
399 @dec.parametric
404 def test_oinfo():
400 def test_oinfo():
405 flush_channels()
401 flush_channels()
406
402
407 msg_id = KC.object_info('a')
403 msg_id = KC.object_info('a')
408 reply = KC.get_shell_msg(timeout=2)
404 reply = KC.get_shell_msg(timeout=2)
409 for tst in validate_message(reply, 'object_info_reply', msg_id):
405 for tst in validate_message(reply, 'object_info_reply', msg_id):
410 yield tst
406 yield tst
411
407
412
408
413 @dec.parametric
409 @dec.parametric
414 def test_oinfo_found():
410 def test_oinfo_found():
415 flush_channels()
411 flush_channels()
416
412
417 msg_id, reply = execute(code='a=5')
413 msg_id, reply = execute(code='a=5')
418
414
419 msg_id = KC.object_info('a')
415 msg_id = KC.object_info('a')
420 reply = KC.get_shell_msg(timeout=2)
416 reply = KC.get_shell_msg(timeout=2)
421 for tst in validate_message(reply, 'object_info_reply', msg_id):
417 for tst in validate_message(reply, 'object_info_reply', msg_id):
422 yield tst
418 yield tst
423 content = reply['content']
419 content = reply['content']
424 yield nt.assert_true(content['found'])
420 yield nt.assert_true(content['found'])
425 argspec = content['argspec']
421 argspec = content['argspec']
426 yield nt.assert_true(argspec is None, "didn't expect argspec dict, got %r" % argspec)
422 yield nt.assert_true(argspec is None, "didn't expect argspec dict, got %r" % argspec)
427
423
428
424
429 @dec.parametric
425 @dec.parametric
430 def test_oinfo_detail():
426 def test_oinfo_detail():
431 flush_channels()
427 flush_channels()
432
428
433 msg_id, reply = execute(code='ip=get_ipython()')
429 msg_id, reply = execute(code='ip=get_ipython()')
434
430
435 msg_id = KC.object_info('ip.object_inspect', detail_level=2)
431 msg_id = KC.object_info('ip.object_inspect', detail_level=2)
436 reply = KC.get_shell_msg(timeout=2)
432 reply = KC.get_shell_msg(timeout=2)
437 for tst in validate_message(reply, 'object_info_reply', msg_id):
433 for tst in validate_message(reply, 'object_info_reply', msg_id):
438 yield tst
434 yield tst
439 content = reply['content']
435 content = reply['content']
440 yield nt.assert_true(content['found'])
436 yield nt.assert_true(content['found'])
441 argspec = content['argspec']
437 argspec = content['argspec']
442 yield nt.assert_true(isinstance(argspec, dict), "expected non-empty argspec dict, got %r" % argspec)
438 yield nt.assert_true(isinstance(argspec, dict), "expected non-empty argspec dict, got %r" % argspec)
443 yield nt.assert_equal(argspec['defaults'], [0])
439 yield nt.assert_equal(argspec['defaults'], [0])
444
440
445
441
446 @dec.parametric
442 @dec.parametric
447 def test_oinfo_not_found():
443 def test_oinfo_not_found():
448 flush_channels()
444 flush_channels()
449
445
450 msg_id = KC.object_info('dne')
446 msg_id = KC.object_info('dne')
451 reply = KC.get_shell_msg(timeout=2)
447 reply = KC.get_shell_msg(timeout=2)
452 for tst in validate_message(reply, 'object_info_reply', msg_id):
448 for tst in validate_message(reply, 'object_info_reply', msg_id):
453 yield tst
449 yield tst
454 content = reply['content']
450 content = reply['content']
455 yield nt.assert_false(content['found'])
451 yield nt.assert_false(content['found'])
456
452
457
453
458 @dec.parametric
454 @dec.parametric
459 def test_complete():
455 def test_complete():
460 flush_channels()
456 flush_channels()
461
457
462 msg_id, reply = execute(code="alpha = albert = 5")
458 msg_id, reply = execute(code="alpha = albert = 5")
463
459
464 msg_id = KC.complete('al', 'al', 2)
460 msg_id = KC.complete('al', 'al', 2)
465 reply = KC.get_shell_msg(timeout=2)
461 reply = KC.get_shell_msg(timeout=2)
466 for tst in validate_message(reply, 'complete_reply', msg_id):
462 for tst in validate_message(reply, 'complete_reply', msg_id):
467 yield tst
463 yield tst
468 matches = reply['content']['matches']
464 matches = reply['content']['matches']
469 for name in ('alpha', 'albert'):
465 for name in ('alpha', 'albert'):
470 yield nt.assert_true(name in matches, "Missing match: %r" % name)
466 yield nt.assert_true(name in matches, "Missing match: %r" % name)
471
467
472
468
473 @dec.parametric
469 @dec.parametric
474 def test_kernel_info_request():
470 def test_kernel_info_request():
475 flush_channels()
471 flush_channels()
476
472
477 msg_id = KC.kernel_info()
473 msg_id = KC.kernel_info()
478 reply = KC.get_shell_msg(timeout=2)
474 reply = KC.get_shell_msg(timeout=2)
479 for tst in validate_message(reply, 'kernel_info_reply', msg_id):
475 for tst in validate_message(reply, 'kernel_info_reply', msg_id):
480 yield tst
476 yield tst
481
477
482
478
483 # IOPub channel
479 # IOPub channel
484
480
485
481
486 @dec.parametric
482 @dec.parametric
487 def test_stream():
483 def test_stream():
488 flush_channels()
484 flush_channels()
489
485
490 msg_id, reply = execute("print('hi')")
486 msg_id, reply = execute("print('hi')")
491
487
492 stdout = KC.iopub_channel.get_msg(timeout=2)
488 stdout = KC.iopub_channel.get_msg(timeout=2)
493 for tst in validate_message(stdout, 'stream', msg_id):
489 for tst in validate_message(stdout, 'stream', msg_id):
494 yield tst
490 yield tst
495 content = stdout['content']
491 content = stdout['content']
496 yield nt.assert_equal(content['name'], u'stdout')
492 yield nt.assert_equal(content['name'], u'stdout')
497 yield nt.assert_equal(content['data'], u'hi\n')
493 yield nt.assert_equal(content['data'], u'hi\n')
498
494
499
495
500 @dec.parametric
496 @dec.parametric
501 def test_display_data():
497 def test_display_data():
502 flush_channels()
498 flush_channels()
503
499
504 msg_id, reply = execute("from IPython.core.display import display; display(1)")
500 msg_id, reply = execute("from IPython.core.display import display; display(1)")
505
501
506 display = KC.iopub_channel.get_msg(timeout=2)
502 display = KC.iopub_channel.get_msg(timeout=2)
507 for tst in validate_message(display, 'display_data', parent=msg_id):
503 for tst in validate_message(display, 'display_data', parent=msg_id):
508 yield tst
504 yield tst
509 data = display['content']['data']
505 data = display['content']['data']
510 yield nt.assert_equal(data['text/plain'], u'1')
506 yield nt.assert_equal(data['text/plain'], u'1')
511
507
@@ -1,57 +1,56 b''
1 """The client and server for a basic ping-pong style heartbeat.
1 """The client and server for a basic ping-pong style heartbeat.
2 """
2 """
3
3
4 #-----------------------------------------------------------------------------
4 #-----------------------------------------------------------------------------
5 # Copyright (C) 2008-2011 The IPython Development Team
5 # Copyright (C) 2008-2011 The IPython Development Team
6 #
6 #
7 # Distributed under the terms of the BSD License. The full license is in
7 # Distributed under the terms of the BSD License. The full license is in
8 # the file COPYING, distributed as part of this software.
8 # the file COPYING, distributed as part of this software.
9 #-----------------------------------------------------------------------------
9 #-----------------------------------------------------------------------------
10
10
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12 # Imports
12 # Imports
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 import os
15 import os
16 import socket
16 import socket
17 import sys
18 from threading import Thread
17 from threading import Thread
19
18
20 import zmq
19 import zmq
21
20
22 from IPython.utils.localinterfaces import LOCALHOST
21 from IPython.utils.localinterfaces import LOCALHOST
23
22
24 #-----------------------------------------------------------------------------
23 #-----------------------------------------------------------------------------
25 # Code
24 # Code
26 #-----------------------------------------------------------------------------
25 #-----------------------------------------------------------------------------
27
26
28
27
29 class Heartbeat(Thread):
28 class Heartbeat(Thread):
30 "A simple ping-pong style heartbeat that runs in a thread."
29 "A simple ping-pong style heartbeat that runs in a thread."
31
30
32 def __init__(self, context, addr=('tcp', LOCALHOST, 0)):
31 def __init__(self, context, addr=('tcp', LOCALHOST, 0)):
33 Thread.__init__(self)
32 Thread.__init__(self)
34 self.context = context
33 self.context = context
35 self.transport, self.ip, self.port = addr
34 self.transport, self.ip, self.port = addr
36 if self.port == 0:
35 if self.port == 0:
37 if addr[0] == 'tcp':
36 if addr[0] == 'tcp':
38 s = socket.socket()
37 s = socket.socket()
39 # '*' means all interfaces to 0MQ, which is '' to socket.socket
38 # '*' means all interfaces to 0MQ, which is '' to socket.socket
40 s.bind(('' if self.ip == '*' else self.ip, 0))
39 s.bind(('' if self.ip == '*' else self.ip, 0))
41 self.port = s.getsockname()[1]
40 self.port = s.getsockname()[1]
42 s.close()
41 s.close()
43 elif addr[0] == 'ipc':
42 elif addr[0] == 'ipc':
44 self.port = 1
43 self.port = 1
45 while os.path.exists("%s-%s" % (self.ip, self.port)):
44 while os.path.exists("%s-%s" % (self.ip, self.port)):
46 self.port = self.port + 1
45 self.port = self.port + 1
47 else:
46 else:
48 raise ValueError("Unrecognized zmq transport: %s" % addr[0])
47 raise ValueError("Unrecognized zmq transport: %s" % addr[0])
49 self.addr = (self.ip, self.port)
48 self.addr = (self.ip, self.port)
50 self.daemon = True
49 self.daemon = True
51
50
52 def run(self):
51 def run(self):
53 self.socket = self.context.socket(zmq.REP)
52 self.socket = self.context.socket(zmq.REP)
54 c = ':' if self.transport == 'tcp' else '-'
53 c = ':' if self.transport == 'tcp' else '-'
55 self.socket.bind('%s://%s' % (self.transport, self.ip) + c + str(self.port))
54 self.socket.bind('%s://%s' % (self.transport, self.ip) + c + str(self.port))
56 zmq.device(zmq.FORWARDER, self.socket, self.socket)
55 zmq.device(zmq.FORWARDER, self.socket, self.socket)
57
56
@@ -1,804 +1,803 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 """A simple interactive kernel that talks to a frontend over 0MQ.
2 """A simple interactive kernel that talks to a frontend over 0MQ.
3
3
4 Things to do:
4 Things to do:
5
5
6 * Implement `set_parent` logic. Right before doing exec, the Kernel should
6 * Implement `set_parent` logic. Right before doing exec, the Kernel should
7 call set_parent on all the PUB objects with the message about to be executed.
7 call set_parent on all the PUB objects with the message about to be executed.
8 * Implement random port and security key logic.
8 * Implement random port and security key logic.
9 * Implement control messages.
9 * Implement control messages.
10 * Implement event loop and poll version.
10 * Implement event loop and poll version.
11 """
11 """
12
12
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Imports
14 # Imports
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 from __future__ import print_function
16 from __future__ import print_function
17
17
18 # Standard library imports
18 # Standard library imports
19 import __builtin__
19 import __builtin__
20 import sys
20 import sys
21 import time
21 import time
22 import traceback
22 import traceback
23 import logging
23 import logging
24 import uuid
24 import uuid
25
25
26 from datetime import datetime
26 from datetime import datetime
27 from signal import (
27 from signal import (
28 signal, getsignal, default_int_handler, SIGINT, SIG_IGN
28 signal, default_int_handler, SIGINT
29 )
29 )
30
30
31 # System library imports
31 # System library imports
32 import zmq
32 import zmq
33 from zmq.eventloop import ioloop
33 from zmq.eventloop import ioloop
34 from zmq.eventloop.zmqstream import ZMQStream
34 from zmq.eventloop.zmqstream import ZMQStream
35
35
36 # Local imports
36 # Local imports
37 from IPython.config.configurable import Configurable
37 from IPython.config.configurable import Configurable
38 from IPython.core.error import StdinNotImplementedError
38 from IPython.core.error import StdinNotImplementedError
39 from IPython.core import release
39 from IPython.core import release
40 from IPython.utils import io
41 from IPython.utils import py3compat
40 from IPython.utils import py3compat
42 from IPython.utils.jsonutil import json_clean
41 from IPython.utils.jsonutil import json_clean
43 from IPython.utils.traitlets import (
42 from IPython.utils.traitlets import (
44 Any, Instance, Float, Dict, CaselessStrEnum, List, Set, Integer, Unicode,
43 Any, Instance, Float, Dict, List, Set, Integer, Unicode,
45 Type
44 Type
46 )
45 )
47
46
48 from serialize import serialize_object, unpack_apply_message
47 from serialize import serialize_object, unpack_apply_message
49 from session import Session
48 from session import Session
50 from zmqshell import ZMQInteractiveShell
49 from zmqshell import ZMQInteractiveShell
51
50
52
51
53 #-----------------------------------------------------------------------------
52 #-----------------------------------------------------------------------------
54 # Main kernel class
53 # Main kernel class
55 #-----------------------------------------------------------------------------
54 #-----------------------------------------------------------------------------
56
55
57 protocol_version = list(release.kernel_protocol_version_info)
56 protocol_version = list(release.kernel_protocol_version_info)
58 ipython_version = list(release.version_info)
57 ipython_version = list(release.version_info)
59 language_version = list(sys.version_info[:3])
58 language_version = list(sys.version_info[:3])
60
59
61
60
62 class Kernel(Configurable):
61 class Kernel(Configurable):
63
62
64 #---------------------------------------------------------------------------
63 #---------------------------------------------------------------------------
65 # Kernel interface
64 # Kernel interface
66 #---------------------------------------------------------------------------
65 #---------------------------------------------------------------------------
67
66
68 # attribute to override with a GUI
67 # attribute to override with a GUI
69 eventloop = Any(None)
68 eventloop = Any(None)
70 def _eventloop_changed(self, name, old, new):
69 def _eventloop_changed(self, name, old, new):
71 """schedule call to eventloop from IOLoop"""
70 """schedule call to eventloop from IOLoop"""
72 loop = ioloop.IOLoop.instance()
71 loop = ioloop.IOLoop.instance()
73 loop.add_timeout(time.time()+0.1, self.enter_eventloop)
72 loop.add_timeout(time.time()+0.1, self.enter_eventloop)
74
73
75 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
74 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
76 shell_class = Type(ZMQInteractiveShell)
75 shell_class = Type(ZMQInteractiveShell)
77
76
78 session = Instance(Session)
77 session = Instance(Session)
79 profile_dir = Instance('IPython.core.profiledir.ProfileDir')
78 profile_dir = Instance('IPython.core.profiledir.ProfileDir')
80 shell_streams = List()
79 shell_streams = List()
81 control_stream = Instance(ZMQStream)
80 control_stream = Instance(ZMQStream)
82 iopub_socket = Instance(zmq.Socket)
81 iopub_socket = Instance(zmq.Socket)
83 stdin_socket = Instance(zmq.Socket)
82 stdin_socket = Instance(zmq.Socket)
84 log = Instance(logging.Logger)
83 log = Instance(logging.Logger)
85
84
86 user_module = Any()
85 user_module = Any()
87 def _user_module_changed(self, name, old, new):
86 def _user_module_changed(self, name, old, new):
88 if self.shell is not None:
87 if self.shell is not None:
89 self.shell.user_module = new
88 self.shell.user_module = new
90
89
91 user_ns = Dict(default_value=None)
90 user_ns = Dict(default_value=None)
92 def _user_ns_changed(self, name, old, new):
91 def _user_ns_changed(self, name, old, new):
93 if self.shell is not None:
92 if self.shell is not None:
94 self.shell.user_ns = new
93 self.shell.user_ns = new
95 self.shell.init_user_ns()
94 self.shell.init_user_ns()
96
95
97 # identities:
96 # identities:
98 int_id = Integer(-1)
97 int_id = Integer(-1)
99 ident = Unicode()
98 ident = Unicode()
100
99
101 def _ident_default(self):
100 def _ident_default(self):
102 return unicode(uuid.uuid4())
101 return unicode(uuid.uuid4())
103
102
104
103
105 # Private interface
104 # Private interface
106
105
107 # Time to sleep after flushing the stdout/err buffers in each execute
106 # Time to sleep after flushing the stdout/err buffers in each execute
108 # cycle. While this introduces a hard limit on the minimal latency of the
107 # cycle. While this introduces a hard limit on the minimal latency of the
109 # execute cycle, it helps prevent output synchronization problems for
108 # execute cycle, it helps prevent output synchronization problems for
110 # clients.
109 # clients.
111 # Units are in seconds. The minimum zmq latency on local host is probably
110 # Units are in seconds. The minimum zmq latency on local host is probably
112 # ~150 microseconds, set this to 500us for now. We may need to increase it
111 # ~150 microseconds, set this to 500us for now. We may need to increase it
113 # a little if it's not enough after more interactive testing.
112 # a little if it's not enough after more interactive testing.
114 _execute_sleep = Float(0.0005, config=True)
113 _execute_sleep = Float(0.0005, config=True)
115
114
116 # Frequency of the kernel's event loop.
115 # Frequency of the kernel's event loop.
117 # Units are in seconds, kernel subclasses for GUI toolkits may need to
116 # Units are in seconds, kernel subclasses for GUI toolkits may need to
118 # adapt to milliseconds.
117 # adapt to milliseconds.
119 _poll_interval = Float(0.05, config=True)
118 _poll_interval = Float(0.05, config=True)
120
119
121 # If the shutdown was requested over the network, we leave here the
120 # If the shutdown was requested over the network, we leave here the
122 # necessary reply message so it can be sent by our registered atexit
121 # necessary reply message so it can be sent by our registered atexit
123 # handler. This ensures that the reply is only sent to clients truly at
122 # handler. This ensures that the reply is only sent to clients truly at
124 # the end of our shutdown process (which happens after the underlying
123 # the end of our shutdown process (which happens after the underlying
125 # IPython shell's own shutdown).
124 # IPython shell's own shutdown).
126 _shutdown_message = None
125 _shutdown_message = None
127
126
128 # This is a dict of port number that the kernel is listening on. It is set
127 # This is a dict of port number that the kernel is listening on. It is set
129 # by record_ports and used by connect_request.
128 # by record_ports and used by connect_request.
130 _recorded_ports = Dict()
129 _recorded_ports = Dict()
131
130
132 # A reference to the Python builtin 'raw_input' function.
131 # A reference to the Python builtin 'raw_input' function.
133 # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3)
132 # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3)
134 _sys_raw_input = Any()
133 _sys_raw_input = Any()
135
134
136 # set of aborted msg_ids
135 # set of aborted msg_ids
137 aborted = Set()
136 aborted = Set()
138
137
139
138
140 def __init__(self, **kwargs):
139 def __init__(self, **kwargs):
141 super(Kernel, self).__init__(**kwargs)
140 super(Kernel, self).__init__(**kwargs)
142
141
143 # Initialize the InteractiveShell subclass
142 # Initialize the InteractiveShell subclass
144 self.shell = self.shell_class.instance(parent=self,
143 self.shell = self.shell_class.instance(parent=self,
145 profile_dir = self.profile_dir,
144 profile_dir = self.profile_dir,
146 user_module = self.user_module,
145 user_module = self.user_module,
147 user_ns = self.user_ns,
146 user_ns = self.user_ns,
148 )
147 )
149 self.shell.displayhook.session = self.session
148 self.shell.displayhook.session = self.session
150 self.shell.displayhook.pub_socket = self.iopub_socket
149 self.shell.displayhook.pub_socket = self.iopub_socket
151 self.shell.displayhook.topic = self._topic('pyout')
150 self.shell.displayhook.topic = self._topic('pyout')
152 self.shell.display_pub.session = self.session
151 self.shell.display_pub.session = self.session
153 self.shell.display_pub.pub_socket = self.iopub_socket
152 self.shell.display_pub.pub_socket = self.iopub_socket
154 self.shell.data_pub.session = self.session
153 self.shell.data_pub.session = self.session
155 self.shell.data_pub.pub_socket = self.iopub_socket
154 self.shell.data_pub.pub_socket = self.iopub_socket
156
155
157 # TMP - hack while developing
156 # TMP - hack while developing
158 self.shell._reply_content = None
157 self.shell._reply_content = None
159
158
160 # Build dict of handlers for message types
159 # Build dict of handlers for message types
161 msg_types = [ 'execute_request', 'complete_request',
160 msg_types = [ 'execute_request', 'complete_request',
162 'object_info_request', 'history_request',
161 'object_info_request', 'history_request',
163 'kernel_info_request',
162 'kernel_info_request',
164 'connect_request', 'shutdown_request',
163 'connect_request', 'shutdown_request',
165 'apply_request',
164 'apply_request',
166 ]
165 ]
167 self.shell_handlers = {}
166 self.shell_handlers = {}
168 for msg_type in msg_types:
167 for msg_type in msg_types:
169 self.shell_handlers[msg_type] = getattr(self, msg_type)
168 self.shell_handlers[msg_type] = getattr(self, msg_type)
170
169
171 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
170 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
172 self.control_handlers = {}
171 self.control_handlers = {}
173 for msg_type in control_msg_types:
172 for msg_type in control_msg_types:
174 self.control_handlers[msg_type] = getattr(self, msg_type)
173 self.control_handlers[msg_type] = getattr(self, msg_type)
175
174
176 def dispatch_control(self, msg):
175 def dispatch_control(self, msg):
177 """dispatch control requests"""
176 """dispatch control requests"""
178 idents,msg = self.session.feed_identities(msg, copy=False)
177 idents,msg = self.session.feed_identities(msg, copy=False)
179 try:
178 try:
180 msg = self.session.unserialize(msg, content=True, copy=False)
179 msg = self.session.unserialize(msg, content=True, copy=False)
181 except:
180 except:
182 self.log.error("Invalid Control Message", exc_info=True)
181 self.log.error("Invalid Control Message", exc_info=True)
183 return
182 return
184
183
185 self.log.debug("Control received: %s", msg)
184 self.log.debug("Control received: %s", msg)
186
185
187 header = msg['header']
186 header = msg['header']
188 msg_id = header['msg_id']
187 msg_id = header['msg_id']
189 msg_type = header['msg_type']
188 msg_type = header['msg_type']
190
189
191 handler = self.control_handlers.get(msg_type, None)
190 handler = self.control_handlers.get(msg_type, None)
192 if handler is None:
191 if handler is None:
193 self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
192 self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
194 else:
193 else:
195 try:
194 try:
196 handler(self.control_stream, idents, msg)
195 handler(self.control_stream, idents, msg)
197 except Exception:
196 except Exception:
198 self.log.error("Exception in control handler:", exc_info=True)
197 self.log.error("Exception in control handler:", exc_info=True)
199
198
200 def dispatch_shell(self, stream, msg):
199 def dispatch_shell(self, stream, msg):
201 """dispatch shell requests"""
200 """dispatch shell requests"""
202 # flush control requests first
201 # flush control requests first
203 if self.control_stream:
202 if self.control_stream:
204 self.control_stream.flush()
203 self.control_stream.flush()
205
204
206 idents,msg = self.session.feed_identities(msg, copy=False)
205 idents,msg = self.session.feed_identities(msg, copy=False)
207 try:
206 try:
208 msg = self.session.unserialize(msg, content=True, copy=False)
207 msg = self.session.unserialize(msg, content=True, copy=False)
209 except:
208 except:
210 self.log.error("Invalid Message", exc_info=True)
209 self.log.error("Invalid Message", exc_info=True)
211 return
210 return
212
211
213 header = msg['header']
212 header = msg['header']
214 msg_id = header['msg_id']
213 msg_id = header['msg_id']
215 msg_type = msg['header']['msg_type']
214 msg_type = msg['header']['msg_type']
216
215
217 # Print some info about this message and leave a '--->' marker, so it's
216 # Print some info about this message and leave a '--->' marker, so it's
218 # easier to trace visually the message chain when debugging. Each
217 # easier to trace visually the message chain when debugging. Each
219 # handler prints its message at the end.
218 # handler prints its message at the end.
220 self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
219 self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
221 self.log.debug(' Content: %s\n --->\n ', msg['content'])
220 self.log.debug(' Content: %s\n --->\n ', msg['content'])
222
221
223 if msg_id in self.aborted:
222 if msg_id in self.aborted:
224 self.aborted.remove(msg_id)
223 self.aborted.remove(msg_id)
225 # is it safe to assume a msg_id will not be resubmitted?
224 # is it safe to assume a msg_id will not be resubmitted?
226 reply_type = msg_type.split('_')[0] + '_reply'
225 reply_type = msg_type.split('_')[0] + '_reply'
227 status = {'status' : 'aborted'}
226 status = {'status' : 'aborted'}
228 md = {'engine' : self.ident}
227 md = {'engine' : self.ident}
229 md.update(status)
228 md.update(status)
230 reply_msg = self.session.send(stream, reply_type, metadata=md,
229 reply_msg = self.session.send(stream, reply_type, metadata=md,
231 content=status, parent=msg, ident=idents)
230 content=status, parent=msg, ident=idents)
232 return
231 return
233
232
234 handler = self.shell_handlers.get(msg_type, None)
233 handler = self.shell_handlers.get(msg_type, None)
235 if handler is None:
234 if handler is None:
236 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
235 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
237 else:
236 else:
238 # ensure default_int_handler during handler call
237 # ensure default_int_handler during handler call
239 sig = signal(SIGINT, default_int_handler)
238 sig = signal(SIGINT, default_int_handler)
240 try:
239 try:
241 handler(stream, idents, msg)
240 handler(stream, idents, msg)
242 except Exception:
241 except Exception:
243 self.log.error("Exception in message handler:", exc_info=True)
242 self.log.error("Exception in message handler:", exc_info=True)
244 finally:
243 finally:
245 signal(SIGINT, sig)
244 signal(SIGINT, sig)
246
245
247 def enter_eventloop(self):
246 def enter_eventloop(self):
248 """enter eventloop"""
247 """enter eventloop"""
249 self.log.info("entering eventloop")
248 self.log.info("entering eventloop")
250 # restore default_int_handler
249 # restore default_int_handler
251 signal(SIGINT, default_int_handler)
250 signal(SIGINT, default_int_handler)
252 while self.eventloop is not None:
251 while self.eventloop is not None:
253 try:
252 try:
254 self.eventloop(self)
253 self.eventloop(self)
255 except KeyboardInterrupt:
254 except KeyboardInterrupt:
256 # Ctrl-C shouldn't crash the kernel
255 # Ctrl-C shouldn't crash the kernel
257 self.log.error("KeyboardInterrupt caught in kernel")
256 self.log.error("KeyboardInterrupt caught in kernel")
258 continue
257 continue
259 else:
258 else:
260 # eventloop exited cleanly, this means we should stop (right?)
259 # eventloop exited cleanly, this means we should stop (right?)
261 self.eventloop = None
260 self.eventloop = None
262 break
261 break
263 self.log.info("exiting eventloop")
262 self.log.info("exiting eventloop")
264
263
265 def start(self):
264 def start(self):
266 """register dispatchers for streams"""
265 """register dispatchers for streams"""
267 self.shell.exit_now = False
266 self.shell.exit_now = False
268 if self.control_stream:
267 if self.control_stream:
269 self.control_stream.on_recv(self.dispatch_control, copy=False)
268 self.control_stream.on_recv(self.dispatch_control, copy=False)
270
269
271 def make_dispatcher(stream):
270 def make_dispatcher(stream):
272 def dispatcher(msg):
271 def dispatcher(msg):
273 return self.dispatch_shell(stream, msg)
272 return self.dispatch_shell(stream, msg)
274 return dispatcher
273 return dispatcher
275
274
276 for s in self.shell_streams:
275 for s in self.shell_streams:
277 s.on_recv(make_dispatcher(s), copy=False)
276 s.on_recv(make_dispatcher(s), copy=False)
278
277
279 # publish idle status
278 # publish idle status
280 self._publish_status('starting')
279 self._publish_status('starting')
281
280
282 def do_one_iteration(self):
281 def do_one_iteration(self):
283 """step eventloop just once"""
282 """step eventloop just once"""
284 if self.control_stream:
283 if self.control_stream:
285 self.control_stream.flush()
284 self.control_stream.flush()
286 for stream in self.shell_streams:
285 for stream in self.shell_streams:
287 # handle at most one request per iteration
286 # handle at most one request per iteration
288 stream.flush(zmq.POLLIN, 1)
287 stream.flush(zmq.POLLIN, 1)
289 stream.flush(zmq.POLLOUT)
288 stream.flush(zmq.POLLOUT)
290
289
291
290
292 def record_ports(self, ports):
291 def record_ports(self, ports):
293 """Record the ports that this kernel is using.
292 """Record the ports that this kernel is using.
294
293
295 The creator of the Kernel instance must call this methods if they
294 The creator of the Kernel instance must call this methods if they
296 want the :meth:`connect_request` method to return the port numbers.
295 want the :meth:`connect_request` method to return the port numbers.
297 """
296 """
298 self._recorded_ports = ports
297 self._recorded_ports = ports
299
298
300 #---------------------------------------------------------------------------
299 #---------------------------------------------------------------------------
301 # Kernel request handlers
300 # Kernel request handlers
302 #---------------------------------------------------------------------------
301 #---------------------------------------------------------------------------
303
302
304 def _make_metadata(self, other=None):
303 def _make_metadata(self, other=None):
305 """init metadata dict, for execute/apply_reply"""
304 """init metadata dict, for execute/apply_reply"""
306 new_md = {
305 new_md = {
307 'dependencies_met' : True,
306 'dependencies_met' : True,
308 'engine' : self.ident,
307 'engine' : self.ident,
309 'started': datetime.now(),
308 'started': datetime.now(),
310 }
309 }
311 if other:
310 if other:
312 new_md.update(other)
311 new_md.update(other)
313 return new_md
312 return new_md
314
313
315 def _publish_pyin(self, code, parent, execution_count):
314 def _publish_pyin(self, code, parent, execution_count):
316 """Publish the code request on the pyin stream."""
315 """Publish the code request on the pyin stream."""
317
316
318 self.session.send(self.iopub_socket, u'pyin',
317 self.session.send(self.iopub_socket, u'pyin',
319 {u'code':code, u'execution_count': execution_count},
318 {u'code':code, u'execution_count': execution_count},
320 parent=parent, ident=self._topic('pyin')
319 parent=parent, ident=self._topic('pyin')
321 )
320 )
322
321
323 def _publish_status(self, status, parent=None):
322 def _publish_status(self, status, parent=None):
324 """send status (busy/idle) on IOPub"""
323 """send status (busy/idle) on IOPub"""
325 self.session.send(self.iopub_socket,
324 self.session.send(self.iopub_socket,
326 u'status',
325 u'status',
327 {u'execution_state': status},
326 {u'execution_state': status},
328 parent=parent,
327 parent=parent,
329 ident=self._topic('status'),
328 ident=self._topic('status'),
330 )
329 )
331
330
332
331
333 def execute_request(self, stream, ident, parent):
332 def execute_request(self, stream, ident, parent):
334 """handle an execute_request"""
333 """handle an execute_request"""
335
334
336 self._publish_status(u'busy', parent)
335 self._publish_status(u'busy', parent)
337
336
338 try:
337 try:
339 content = parent[u'content']
338 content = parent[u'content']
340 code = content[u'code']
339 code = content[u'code']
341 silent = content[u'silent']
340 silent = content[u'silent']
342 store_history = content.get(u'store_history', not silent)
341 store_history = content.get(u'store_history', not silent)
343 except:
342 except:
344 self.log.error("Got bad msg: ")
343 self.log.error("Got bad msg: ")
345 self.log.error("%s", parent)
344 self.log.error("%s", parent)
346 return
345 return
347
346
348 md = self._make_metadata(parent['metadata'])
347 md = self._make_metadata(parent['metadata'])
349
348
350 shell = self.shell # we'll need this a lot here
349 shell = self.shell # we'll need this a lot here
351
350
352 # Replace raw_input. Note that is not sufficient to replace
351 # Replace raw_input. Note that is not sufficient to replace
353 # raw_input in the user namespace.
352 # raw_input in the user namespace.
354 if content.get('allow_stdin', False):
353 if content.get('allow_stdin', False):
355 raw_input = lambda prompt='': self._raw_input(prompt, ident, parent)
354 raw_input = lambda prompt='': self._raw_input(prompt, ident, parent)
356 else:
355 else:
357 raw_input = lambda prompt='' : self._no_raw_input()
356 raw_input = lambda prompt='' : self._no_raw_input()
358
357
359 if py3compat.PY3:
358 if py3compat.PY3:
360 self._sys_raw_input = __builtin__.input
359 self._sys_raw_input = __builtin__.input
361 __builtin__.input = raw_input
360 __builtin__.input = raw_input
362 else:
361 else:
363 self._sys_raw_input = __builtin__.raw_input
362 self._sys_raw_input = __builtin__.raw_input
364 __builtin__.raw_input = raw_input
363 __builtin__.raw_input = raw_input
365
364
366 # Set the parent message of the display hook and out streams.
365 # Set the parent message of the display hook and out streams.
367 shell.displayhook.set_parent(parent)
366 shell.displayhook.set_parent(parent)
368 shell.display_pub.set_parent(parent)
367 shell.display_pub.set_parent(parent)
369 shell.data_pub.set_parent(parent)
368 shell.data_pub.set_parent(parent)
370 try:
369 try:
371 sys.stdout.set_parent(parent)
370 sys.stdout.set_parent(parent)
372 except AttributeError:
371 except AttributeError:
373 pass
372 pass
374 try:
373 try:
375 sys.stderr.set_parent(parent)
374 sys.stderr.set_parent(parent)
376 except AttributeError:
375 except AttributeError:
377 pass
376 pass
378
377
379 # Re-broadcast our input for the benefit of listening clients, and
378 # Re-broadcast our input for the benefit of listening clients, and
380 # start computing output
379 # start computing output
381 if not silent:
380 if not silent:
382 self._publish_pyin(code, parent, shell.execution_count)
381 self._publish_pyin(code, parent, shell.execution_count)
383
382
384 reply_content = {}
383 reply_content = {}
385 try:
384 try:
386 # FIXME: the shell calls the exception handler itself.
385 # FIXME: the shell calls the exception handler itself.
387 shell.run_cell(code, store_history=store_history, silent=silent)
386 shell.run_cell(code, store_history=store_history, silent=silent)
388 except:
387 except:
389 status = u'error'
388 status = u'error'
390 # FIXME: this code right now isn't being used yet by default,
389 # FIXME: this code right now isn't being used yet by default,
391 # because the run_cell() call above directly fires off exception
390 # because the run_cell() call above directly fires off exception
392 # reporting. This code, therefore, is only active in the scenario
391 # reporting. This code, therefore, is only active in the scenario
393 # where runlines itself has an unhandled exception. We need to
392 # where runlines itself has an unhandled exception. We need to
394 # uniformize this, for all exception construction to come from a
393 # uniformize this, for all exception construction to come from a
395 # single location in the codbase.
394 # single location in the codbase.
396 etype, evalue, tb = sys.exc_info()
395 etype, evalue, tb = sys.exc_info()
397 tb_list = traceback.format_exception(etype, evalue, tb)
396 tb_list = traceback.format_exception(etype, evalue, tb)
398 reply_content.update(shell._showtraceback(etype, evalue, tb_list))
397 reply_content.update(shell._showtraceback(etype, evalue, tb_list))
399 else:
398 else:
400 status = u'ok'
399 status = u'ok'
401 finally:
400 finally:
402 # Restore raw_input.
401 # Restore raw_input.
403 if py3compat.PY3:
402 if py3compat.PY3:
404 __builtin__.input = self._sys_raw_input
403 __builtin__.input = self._sys_raw_input
405 else:
404 else:
406 __builtin__.raw_input = self._sys_raw_input
405 __builtin__.raw_input = self._sys_raw_input
407
406
408 reply_content[u'status'] = status
407 reply_content[u'status'] = status
409
408
410 # Return the execution counter so clients can display prompts
409 # Return the execution counter so clients can display prompts
411 reply_content['execution_count'] = shell.execution_count - 1
410 reply_content['execution_count'] = shell.execution_count - 1
412
411
413 # FIXME - fish exception info out of shell, possibly left there by
412 # FIXME - fish exception info out of shell, possibly left there by
414 # runlines. We'll need to clean up this logic later.
413 # runlines. We'll need to clean up this logic later.
415 if shell._reply_content is not None:
414 if shell._reply_content is not None:
416 reply_content.update(shell._reply_content)
415 reply_content.update(shell._reply_content)
417 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute')
416 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute')
418 reply_content['engine_info'] = e_info
417 reply_content['engine_info'] = e_info
419 # reset after use
418 # reset after use
420 shell._reply_content = None
419 shell._reply_content = None
421
420
422 if 'traceback' in reply_content:
421 if 'traceback' in reply_content:
423 self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback']))
422 self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback']))
424
423
425
424
426 # At this point, we can tell whether the main code execution succeeded
425 # At this point, we can tell whether the main code execution succeeded
427 # or not. If it did, we proceed to evaluate user_variables/expressions
426 # or not. If it did, we proceed to evaluate user_variables/expressions
428 if reply_content['status'] == 'ok':
427 if reply_content['status'] == 'ok':
429 reply_content[u'user_variables'] = \
428 reply_content[u'user_variables'] = \
430 shell.user_variables(content.get(u'user_variables', []))
429 shell.user_variables(content.get(u'user_variables', []))
431 reply_content[u'user_expressions'] = \
430 reply_content[u'user_expressions'] = \
432 shell.user_expressions(content.get(u'user_expressions', {}))
431 shell.user_expressions(content.get(u'user_expressions', {}))
433 else:
432 else:
434 # If there was an error, don't even try to compute variables or
433 # If there was an error, don't even try to compute variables or
435 # expressions
434 # expressions
436 reply_content[u'user_variables'] = {}
435 reply_content[u'user_variables'] = {}
437 reply_content[u'user_expressions'] = {}
436 reply_content[u'user_expressions'] = {}
438
437
439 # Payloads should be retrieved regardless of outcome, so we can both
438 # Payloads should be retrieved regardless of outcome, so we can both
440 # recover partial output (that could have been generated early in a
439 # recover partial output (that could have been generated early in a
441 # block, before an error) and clear the payload system always.
440 # block, before an error) and clear the payload system always.
442 reply_content[u'payload'] = shell.payload_manager.read_payload()
441 reply_content[u'payload'] = shell.payload_manager.read_payload()
443 # Be agressive about clearing the payload because we don't want
442 # Be agressive about clearing the payload because we don't want
444 # it to sit in memory until the next execute_request comes in.
443 # it to sit in memory until the next execute_request comes in.
445 shell.payload_manager.clear_payload()
444 shell.payload_manager.clear_payload()
446
445
447 # Flush output before sending the reply.
446 # Flush output before sending the reply.
448 sys.stdout.flush()
447 sys.stdout.flush()
449 sys.stderr.flush()
448 sys.stderr.flush()
450 # FIXME: on rare occasions, the flush doesn't seem to make it to the
449 # FIXME: on rare occasions, the flush doesn't seem to make it to the
451 # clients... This seems to mitigate the problem, but we definitely need
450 # clients... This seems to mitigate the problem, but we definitely need
452 # to better understand what's going on.
451 # to better understand what's going on.
453 if self._execute_sleep:
452 if self._execute_sleep:
454 time.sleep(self._execute_sleep)
453 time.sleep(self._execute_sleep)
455
454
456 # Send the reply.
455 # Send the reply.
457 reply_content = json_clean(reply_content)
456 reply_content = json_clean(reply_content)
458
457
459 md['status'] = reply_content['status']
458 md['status'] = reply_content['status']
460 if reply_content['status'] == 'error' and \
459 if reply_content['status'] == 'error' and \
461 reply_content['ename'] == 'UnmetDependency':
460 reply_content['ename'] == 'UnmetDependency':
462 md['dependencies_met'] = False
461 md['dependencies_met'] = False
463
462
464 reply_msg = self.session.send(stream, u'execute_reply',
463 reply_msg = self.session.send(stream, u'execute_reply',
465 reply_content, parent, metadata=md,
464 reply_content, parent, metadata=md,
466 ident=ident)
465 ident=ident)
467
466
468 self.log.debug("%s", reply_msg)
467 self.log.debug("%s", reply_msg)
469
468
470 if not silent and reply_msg['content']['status'] == u'error':
469 if not silent and reply_msg['content']['status'] == u'error':
471 self._abort_queues()
470 self._abort_queues()
472
471
473 self._publish_status(u'idle', parent)
472 self._publish_status(u'idle', parent)
474
473
475 def complete_request(self, stream, ident, parent):
474 def complete_request(self, stream, ident, parent):
476 txt, matches = self._complete(parent)
475 txt, matches = self._complete(parent)
477 matches = {'matches' : matches,
476 matches = {'matches' : matches,
478 'matched_text' : txt,
477 'matched_text' : txt,
479 'status' : 'ok'}
478 'status' : 'ok'}
480 matches = json_clean(matches)
479 matches = json_clean(matches)
481 completion_msg = self.session.send(stream, 'complete_reply',
480 completion_msg = self.session.send(stream, 'complete_reply',
482 matches, parent, ident)
481 matches, parent, ident)
483 self.log.debug("%s", completion_msg)
482 self.log.debug("%s", completion_msg)
484
483
485 def object_info_request(self, stream, ident, parent):
484 def object_info_request(self, stream, ident, parent):
486 content = parent['content']
485 content = parent['content']
487 object_info = self.shell.object_inspect(content['oname'],
486 object_info = self.shell.object_inspect(content['oname'],
488 detail_level = content.get('detail_level', 0)
487 detail_level = content.get('detail_level', 0)
489 )
488 )
490 # Before we send this object over, we scrub it for JSON usage
489 # Before we send this object over, we scrub it for JSON usage
491 oinfo = json_clean(object_info)
490 oinfo = json_clean(object_info)
492 msg = self.session.send(stream, 'object_info_reply',
491 msg = self.session.send(stream, 'object_info_reply',
493 oinfo, parent, ident)
492 oinfo, parent, ident)
494 self.log.debug("%s", msg)
493 self.log.debug("%s", msg)
495
494
496 def history_request(self, stream, ident, parent):
495 def history_request(self, stream, ident, parent):
497 # We need to pull these out, as passing **kwargs doesn't work with
496 # We need to pull these out, as passing **kwargs doesn't work with
498 # unicode keys before Python 2.6.5.
497 # unicode keys before Python 2.6.5.
499 hist_access_type = parent['content']['hist_access_type']
498 hist_access_type = parent['content']['hist_access_type']
500 raw = parent['content']['raw']
499 raw = parent['content']['raw']
501 output = parent['content']['output']
500 output = parent['content']['output']
502 if hist_access_type == 'tail':
501 if hist_access_type == 'tail':
503 n = parent['content']['n']
502 n = parent['content']['n']
504 hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
503 hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
505 include_latest=True)
504 include_latest=True)
506
505
507 elif hist_access_type == 'range':
506 elif hist_access_type == 'range':
508 session = parent['content']['session']
507 session = parent['content']['session']
509 start = parent['content']['start']
508 start = parent['content']['start']
510 stop = parent['content']['stop']
509 stop = parent['content']['stop']
511 hist = self.shell.history_manager.get_range(session, start, stop,
510 hist = self.shell.history_manager.get_range(session, start, stop,
512 raw=raw, output=output)
511 raw=raw, output=output)
513
512
514 elif hist_access_type == 'search':
513 elif hist_access_type == 'search':
515 n = parent['content'].get('n')
514 n = parent['content'].get('n')
516 unique = parent['content'].get('unique', False)
515 unique = parent['content'].get('unique', False)
517 pattern = parent['content']['pattern']
516 pattern = parent['content']['pattern']
518 hist = self.shell.history_manager.search(
517 hist = self.shell.history_manager.search(
519 pattern, raw=raw, output=output, n=n, unique=unique)
518 pattern, raw=raw, output=output, n=n, unique=unique)
520
519
521 else:
520 else:
522 hist = []
521 hist = []
523 hist = list(hist)
522 hist = list(hist)
524 content = {'history' : hist}
523 content = {'history' : hist}
525 content = json_clean(content)
524 content = json_clean(content)
526 msg = self.session.send(stream, 'history_reply',
525 msg = self.session.send(stream, 'history_reply',
527 content, parent, ident)
526 content, parent, ident)
528 self.log.debug("Sending history reply with %i entries", len(hist))
527 self.log.debug("Sending history reply with %i entries", len(hist))
529
528
530 def connect_request(self, stream, ident, parent):
529 def connect_request(self, stream, ident, parent):
531 if self._recorded_ports is not None:
530 if self._recorded_ports is not None:
532 content = self._recorded_ports.copy()
531 content = self._recorded_ports.copy()
533 else:
532 else:
534 content = {}
533 content = {}
535 msg = self.session.send(stream, 'connect_reply',
534 msg = self.session.send(stream, 'connect_reply',
536 content, parent, ident)
535 content, parent, ident)
537 self.log.debug("%s", msg)
536 self.log.debug("%s", msg)
538
537
539 def kernel_info_request(self, stream, ident, parent):
538 def kernel_info_request(self, stream, ident, parent):
540 vinfo = {
539 vinfo = {
541 'protocol_version': protocol_version,
540 'protocol_version': protocol_version,
542 'ipython_version': ipython_version,
541 'ipython_version': ipython_version,
543 'language_version': language_version,
542 'language_version': language_version,
544 'language': 'python',
543 'language': 'python',
545 }
544 }
546 msg = self.session.send(stream, 'kernel_info_reply',
545 msg = self.session.send(stream, 'kernel_info_reply',
547 vinfo, parent, ident)
546 vinfo, parent, ident)
548 self.log.debug("%s", msg)
547 self.log.debug("%s", msg)
549
548
550 def shutdown_request(self, stream, ident, parent):
549 def shutdown_request(self, stream, ident, parent):
551 self.shell.exit_now = True
550 self.shell.exit_now = True
552 content = dict(status='ok')
551 content = dict(status='ok')
553 content.update(parent['content'])
552 content.update(parent['content'])
554 self.session.send(stream, u'shutdown_reply', content, parent, ident=ident)
553 self.session.send(stream, u'shutdown_reply', content, parent, ident=ident)
555 # same content, but different msg_id for broadcasting on IOPub
554 # same content, but different msg_id for broadcasting on IOPub
556 self._shutdown_message = self.session.msg(u'shutdown_reply',
555 self._shutdown_message = self.session.msg(u'shutdown_reply',
557 content, parent
556 content, parent
558 )
557 )
559
558
560 self._at_shutdown()
559 self._at_shutdown()
561 # call sys.exit after a short delay
560 # call sys.exit after a short delay
562 loop = ioloop.IOLoop.instance()
561 loop = ioloop.IOLoop.instance()
563 loop.add_timeout(time.time()+0.1, loop.stop)
562 loop.add_timeout(time.time()+0.1, loop.stop)
564
563
565 #---------------------------------------------------------------------------
564 #---------------------------------------------------------------------------
566 # Engine methods
565 # Engine methods
567 #---------------------------------------------------------------------------
566 #---------------------------------------------------------------------------
568
567
569 def apply_request(self, stream, ident, parent):
568 def apply_request(self, stream, ident, parent):
570 try:
569 try:
571 content = parent[u'content']
570 content = parent[u'content']
572 bufs = parent[u'buffers']
571 bufs = parent[u'buffers']
573 msg_id = parent['header']['msg_id']
572 msg_id = parent['header']['msg_id']
574 except:
573 except:
575 self.log.error("Got bad msg: %s", parent, exc_info=True)
574 self.log.error("Got bad msg: %s", parent, exc_info=True)
576 return
575 return
577
576
578 self._publish_status(u'busy', parent)
577 self._publish_status(u'busy', parent)
579
578
580 # Set the parent message of the display hook and out streams.
579 # Set the parent message of the display hook and out streams.
581 shell = self.shell
580 shell = self.shell
582 shell.displayhook.set_parent(parent)
581 shell.displayhook.set_parent(parent)
583 shell.display_pub.set_parent(parent)
582 shell.display_pub.set_parent(parent)
584 shell.data_pub.set_parent(parent)
583 shell.data_pub.set_parent(parent)
585 try:
584 try:
586 sys.stdout.set_parent(parent)
585 sys.stdout.set_parent(parent)
587 except AttributeError:
586 except AttributeError:
588 pass
587 pass
589 try:
588 try:
590 sys.stderr.set_parent(parent)
589 sys.stderr.set_parent(parent)
591 except AttributeError:
590 except AttributeError:
592 pass
591 pass
593
592
594 # pyin_msg = self.session.msg(u'pyin',{u'code':code}, parent=parent)
593 # pyin_msg = self.session.msg(u'pyin',{u'code':code}, parent=parent)
595 # self.iopub_socket.send(pyin_msg)
594 # self.iopub_socket.send(pyin_msg)
596 # self.session.send(self.iopub_socket, u'pyin', {u'code':code},parent=parent)
595 # self.session.send(self.iopub_socket, u'pyin', {u'code':code},parent=parent)
597 md = self._make_metadata(parent['metadata'])
596 md = self._make_metadata(parent['metadata'])
598 try:
597 try:
599 working = shell.user_ns
598 working = shell.user_ns
600
599
601 prefix = "_"+str(msg_id).replace("-","")+"_"
600 prefix = "_"+str(msg_id).replace("-","")+"_"
602
601
603 f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
602 f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
604
603
605 fname = getattr(f, '__name__', 'f')
604 fname = getattr(f, '__name__', 'f')
606
605
607 fname = prefix+"f"
606 fname = prefix+"f"
608 argname = prefix+"args"
607 argname = prefix+"args"
609 kwargname = prefix+"kwargs"
608 kwargname = prefix+"kwargs"
610 resultname = prefix+"result"
609 resultname = prefix+"result"
611
610
612 ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
611 ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
613 # print ns
612 # print ns
614 working.update(ns)
613 working.update(ns)
615 code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
614 code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
616 try:
615 try:
617 exec code in shell.user_global_ns, shell.user_ns
616 exec code in shell.user_global_ns, shell.user_ns
618 result = working.get(resultname)
617 result = working.get(resultname)
619 finally:
618 finally:
620 for key in ns.iterkeys():
619 for key in ns.iterkeys():
621 working.pop(key)
620 working.pop(key)
622
621
623 result_buf = serialize_object(result,
622 result_buf = serialize_object(result,
624 buffer_threshold=self.session.buffer_threshold,
623 buffer_threshold=self.session.buffer_threshold,
625 item_threshold=self.session.item_threshold,
624 item_threshold=self.session.item_threshold,
626 )
625 )
627
626
628 except:
627 except:
629 # invoke IPython traceback formatting
628 # invoke IPython traceback formatting
630 shell.showtraceback()
629 shell.showtraceback()
631 # FIXME - fish exception info out of shell, possibly left there by
630 # FIXME - fish exception info out of shell, possibly left there by
632 # run_code. We'll need to clean up this logic later.
631 # run_code. We'll need to clean up this logic later.
633 reply_content = {}
632 reply_content = {}
634 if shell._reply_content is not None:
633 if shell._reply_content is not None:
635 reply_content.update(shell._reply_content)
634 reply_content.update(shell._reply_content)
636 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply')
635 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply')
637 reply_content['engine_info'] = e_info
636 reply_content['engine_info'] = e_info
638 # reset after use
637 # reset after use
639 shell._reply_content = None
638 shell._reply_content = None
640
639
641 self.session.send(self.iopub_socket, u'pyerr', reply_content, parent=parent,
640 self.session.send(self.iopub_socket, u'pyerr', reply_content, parent=parent,
642 ident=self._topic('pyerr'))
641 ident=self._topic('pyerr'))
643 self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback']))
642 self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback']))
644 result_buf = []
643 result_buf = []
645
644
646 if reply_content['ename'] == 'UnmetDependency':
645 if reply_content['ename'] == 'UnmetDependency':
647 md['dependencies_met'] = False
646 md['dependencies_met'] = False
648 else:
647 else:
649 reply_content = {'status' : 'ok'}
648 reply_content = {'status' : 'ok'}
650
649
651 # put 'ok'/'error' status in header, for scheduler introspection:
650 # put 'ok'/'error' status in header, for scheduler introspection:
652 md['status'] = reply_content['status']
651 md['status'] = reply_content['status']
653
652
654 # flush i/o
653 # flush i/o
655 sys.stdout.flush()
654 sys.stdout.flush()
656 sys.stderr.flush()
655 sys.stderr.flush()
657
656
658 reply_msg = self.session.send(stream, u'apply_reply', reply_content,
657 reply_msg = self.session.send(stream, u'apply_reply', reply_content,
659 parent=parent, ident=ident,buffers=result_buf, metadata=md)
658 parent=parent, ident=ident,buffers=result_buf, metadata=md)
660
659
661 self._publish_status(u'idle', parent)
660 self._publish_status(u'idle', parent)
662
661
663 #---------------------------------------------------------------------------
662 #---------------------------------------------------------------------------
664 # Control messages
663 # Control messages
665 #---------------------------------------------------------------------------
664 #---------------------------------------------------------------------------
666
665
667 def abort_request(self, stream, ident, parent):
666 def abort_request(self, stream, ident, parent):
668 """abort a specifig msg by id"""
667 """abort a specifig msg by id"""
669 msg_ids = parent['content'].get('msg_ids', None)
668 msg_ids = parent['content'].get('msg_ids', None)
670 if isinstance(msg_ids, basestring):
669 if isinstance(msg_ids, basestring):
671 msg_ids = [msg_ids]
670 msg_ids = [msg_ids]
672 if not msg_ids:
671 if not msg_ids:
673 self.abort_queues()
672 self.abort_queues()
674 for mid in msg_ids:
673 for mid in msg_ids:
675 self.aborted.add(str(mid))
674 self.aborted.add(str(mid))
676
675
677 content = dict(status='ok')
676 content = dict(status='ok')
678 reply_msg = self.session.send(stream, 'abort_reply', content=content,
677 reply_msg = self.session.send(stream, 'abort_reply', content=content,
679 parent=parent, ident=ident)
678 parent=parent, ident=ident)
680 self.log.debug("%s", reply_msg)
679 self.log.debug("%s", reply_msg)
681
680
682 def clear_request(self, stream, idents, parent):
681 def clear_request(self, stream, idents, parent):
683 """Clear our namespace."""
682 """Clear our namespace."""
684 self.shell.reset(False)
683 self.shell.reset(False)
685 msg = self.session.send(stream, 'clear_reply', ident=idents, parent=parent,
684 msg = self.session.send(stream, 'clear_reply', ident=idents, parent=parent,
686 content = dict(status='ok'))
685 content = dict(status='ok'))
687
686
688
687
689 #---------------------------------------------------------------------------
688 #---------------------------------------------------------------------------
690 # Protected interface
689 # Protected interface
691 #---------------------------------------------------------------------------
690 #---------------------------------------------------------------------------
692
691
693 def _wrap_exception(self, method=None):
692 def _wrap_exception(self, method=None):
694 # import here, because _wrap_exception is only used in parallel,
693 # import here, because _wrap_exception is only used in parallel,
695 # and parallel has higher min pyzmq version
694 # and parallel has higher min pyzmq version
696 from IPython.parallel.error import wrap_exception
695 from IPython.parallel.error import wrap_exception
697 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method=method)
696 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method=method)
698 content = wrap_exception(e_info)
697 content = wrap_exception(e_info)
699 return content
698 return content
700
699
701 def _topic(self, topic):
700 def _topic(self, topic):
702 """prefixed topic for IOPub messages"""
701 """prefixed topic for IOPub messages"""
703 if self.int_id >= 0:
702 if self.int_id >= 0:
704 base = "engine.%i" % self.int_id
703 base = "engine.%i" % self.int_id
705 else:
704 else:
706 base = "kernel.%s" % self.ident
705 base = "kernel.%s" % self.ident
707
706
708 return py3compat.cast_bytes("%s.%s" % (base, topic))
707 return py3compat.cast_bytes("%s.%s" % (base, topic))
709
708
710 def _abort_queues(self):
709 def _abort_queues(self):
711 for stream in self.shell_streams:
710 for stream in self.shell_streams:
712 if stream:
711 if stream:
713 self._abort_queue(stream)
712 self._abort_queue(stream)
714
713
715 def _abort_queue(self, stream):
714 def _abort_queue(self, stream):
716 poller = zmq.Poller()
715 poller = zmq.Poller()
717 poller.register(stream.socket, zmq.POLLIN)
716 poller.register(stream.socket, zmq.POLLIN)
718 while True:
717 while True:
719 idents,msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
718 idents,msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
720 if msg is None:
719 if msg is None:
721 return
720 return
722
721
723 self.log.info("Aborting:")
722 self.log.info("Aborting:")
724 self.log.info("%s", msg)
723 self.log.info("%s", msg)
725 msg_type = msg['header']['msg_type']
724 msg_type = msg['header']['msg_type']
726 reply_type = msg_type.split('_')[0] + '_reply'
725 reply_type = msg_type.split('_')[0] + '_reply'
727
726
728 status = {'status' : 'aborted'}
727 status = {'status' : 'aborted'}
729 md = {'engine' : self.ident}
728 md = {'engine' : self.ident}
730 md.update(status)
729 md.update(status)
731 reply_msg = self.session.send(stream, reply_type, metadata=md,
730 reply_msg = self.session.send(stream, reply_type, metadata=md,
732 content=status, parent=msg, ident=idents)
731 content=status, parent=msg, ident=idents)
733 self.log.debug("%s", reply_msg)
732 self.log.debug("%s", reply_msg)
734 # We need to wait a bit for requests to come in. This can probably
733 # We need to wait a bit for requests to come in. This can probably
735 # be set shorter for true asynchronous clients.
734 # be set shorter for true asynchronous clients.
736 poller.poll(50)
735 poller.poll(50)
737
736
738
737
739 def _no_raw_input(self):
738 def _no_raw_input(self):
740 """Raise StdinNotImplentedError if active frontend doesn't support
739 """Raise StdinNotImplentedError if active frontend doesn't support
741 stdin."""
740 stdin."""
742 raise StdinNotImplementedError("raw_input was called, but this "
741 raise StdinNotImplementedError("raw_input was called, but this "
743 "frontend does not support stdin.")
742 "frontend does not support stdin.")
744
743
745 def _raw_input(self, prompt, ident, parent):
744 def _raw_input(self, prompt, ident, parent):
746 # Flush output before making the request.
745 # Flush output before making the request.
747 sys.stderr.flush()
746 sys.stderr.flush()
748 sys.stdout.flush()
747 sys.stdout.flush()
749 # flush the stdin socket, to purge stale replies
748 # flush the stdin socket, to purge stale replies
750 while True:
749 while True:
751 try:
750 try:
752 self.stdin_socket.recv_multipart(zmq.NOBLOCK)
751 self.stdin_socket.recv_multipart(zmq.NOBLOCK)
753 except zmq.ZMQError as e:
752 except zmq.ZMQError as e:
754 if e.errno == zmq.EAGAIN:
753 if e.errno == zmq.EAGAIN:
755 break
754 break
756 else:
755 else:
757 raise
756 raise
758
757
759 # Send the input request.
758 # Send the input request.
760 content = json_clean(dict(prompt=prompt))
759 content = json_clean(dict(prompt=prompt))
761 self.session.send(self.stdin_socket, u'input_request', content, parent,
760 self.session.send(self.stdin_socket, u'input_request', content, parent,
762 ident=ident)
761 ident=ident)
763
762
764 # Await a response.
763 # Await a response.
765 while True:
764 while True:
766 try:
765 try:
767 ident, reply = self.session.recv(self.stdin_socket, 0)
766 ident, reply = self.session.recv(self.stdin_socket, 0)
768 except Exception:
767 except Exception:
769 self.log.warn("Invalid Message:", exc_info=True)
768 self.log.warn("Invalid Message:", exc_info=True)
770 else:
769 else:
771 break
770 break
772 try:
771 try:
773 value = reply['content']['value']
772 value = reply['content']['value']
774 except:
773 except:
775 self.log.error("Got bad raw_input reply: ")
774 self.log.error("Got bad raw_input reply: ")
776 self.log.error("%s", parent)
775 self.log.error("%s", parent)
777 value = ''
776 value = ''
778 if value == '\x04':
777 if value == '\x04':
779 # EOF
778 # EOF
780 raise EOFError
779 raise EOFError
781 return value
780 return value
782
781
783 def _complete(self, msg):
782 def _complete(self, msg):
784 c = msg['content']
783 c = msg['content']
785 try:
784 try:
786 cpos = int(c['cursor_pos'])
785 cpos = int(c['cursor_pos'])
787 except:
786 except:
788 # If we don't get something that we can convert to an integer, at
787 # If we don't get something that we can convert to an integer, at
789 # least attempt the completion guessing the cursor is at the end of
788 # least attempt the completion guessing the cursor is at the end of
790 # the text, if there's any, and otherwise of the line
789 # the text, if there's any, and otherwise of the line
791 cpos = len(c['text'])
790 cpos = len(c['text'])
792 if cpos==0:
791 if cpos==0:
793 cpos = len(c['line'])
792 cpos = len(c['line'])
794 return self.shell.complete(c['text'], c['line'], cpos)
793 return self.shell.complete(c['text'], c['line'], cpos)
795
794
796 def _at_shutdown(self):
795 def _at_shutdown(self):
797 """Actions taken at shutdown by the kernel, called by python's atexit.
796 """Actions taken at shutdown by the kernel, called by python's atexit.
798 """
797 """
799 # io.rprint("Kernel at_shutdown") # dbg
798 # io.rprint("Kernel at_shutdown") # dbg
800 if self._shutdown_message is not None:
799 if self._shutdown_message is not None:
801 self.session.send(self.iopub_socket, self._shutdown_message, ident=self._topic('shutdown'))
800 self.session.send(self.iopub_socket, self._shutdown_message, ident=self._topic('shutdown'))
802 self.log.debug("%s", self._shutdown_message)
801 self.log.debug("%s", self._shutdown_message)
803 [ s.flush(zmq.POLLOUT) for s in self.shell_streams ]
802 [ s.flush(zmq.POLLOUT) for s in self.shell_streams ]
804
803
@@ -1,23 +1,21 b''
1 import logging
2 from logging import INFO, DEBUG, WARN, ERROR, FATAL
1 from logging import INFO, DEBUG, WARN, ERROR, FATAL
3
2
4 import zmq
5 from zmq.log.handlers import PUBHandler
3 from zmq.log.handlers import PUBHandler
6
4
7 class EnginePUBHandler(PUBHandler):
5 class EnginePUBHandler(PUBHandler):
8 """A simple PUBHandler subclass that sets root_topic"""
6 """A simple PUBHandler subclass that sets root_topic"""
9 engine=None
7 engine=None
10
8
11 def __init__(self, engine, *args, **kwargs):
9 def __init__(self, engine, *args, **kwargs):
12 PUBHandler.__init__(self,*args, **kwargs)
10 PUBHandler.__init__(self,*args, **kwargs)
13 self.engine = engine
11 self.engine = engine
14
12
15 @property
13 @property
16 def root_topic(self):
14 def root_topic(self):
17 """this is a property, in case the handler is created
15 """this is a property, in case the handler is created
18 before the engine gets registered with an id"""
16 before the engine gets registered with an id"""
19 if isinstance(getattr(self.engine, 'id', None), int):
17 if isinstance(getattr(self.engine, 'id', None), int):
20 return "engine.%i"%self.engine.id
18 return "engine.%i"%self.engine.id
21 else:
19 else:
22 return "engine"
20 return "engine"
23
21
@@ -1,215 +1,212 b''
1 """Produce SVG versions of active plots for display by the rich Qt frontend.
1 """Produce SVG versions of active plots for display by the rich Qt frontend.
2 """
2 """
3 #-----------------------------------------------------------------------------
3 #-----------------------------------------------------------------------------
4 # Imports
4 # Imports
5 #-----------------------------------------------------------------------------
5 #-----------------------------------------------------------------------------
6 from __future__ import print_function
6 from __future__ import print_function
7
7
8 # Standard library imports
9 import sys
10
11 # Third-party imports
8 # Third-party imports
12 import matplotlib
9 import matplotlib
13 from matplotlib.backends.backend_agg import new_figure_manager, FigureCanvasAgg
10 from matplotlib.backends.backend_agg import new_figure_manager, FigureCanvasAgg
14 from matplotlib._pylab_helpers import Gcf
11 from matplotlib._pylab_helpers import Gcf
15
12
16 # Local imports.
13 # Local imports.
17 from IPython.config.configurable import SingletonConfigurable
14 from IPython.config.configurable import SingletonConfigurable
18 from IPython.core.display import display
15 from IPython.core.display import display
19 from IPython.core.displaypub import publish_display_data
16 from IPython.core.displaypub import publish_display_data
20 from IPython.core.pylabtools import print_figure, select_figure_format
17 from IPython.core.pylabtools import print_figure, select_figure_format
21 from IPython.utils.traitlets import Dict, Instance, CaselessStrEnum, Bool
18 from IPython.utils.traitlets import Dict, Instance, CaselessStrEnum, Bool
22 from IPython.utils.warn import warn
19 from IPython.utils.warn import warn
23
20
24 #-----------------------------------------------------------------------------
21 #-----------------------------------------------------------------------------
25 # Configurable for inline backend options
22 # Configurable for inline backend options
26 #-----------------------------------------------------------------------------
23 #-----------------------------------------------------------------------------
27 # inherit from InlineBackendConfig for deprecation purposes
24 # inherit from InlineBackendConfig for deprecation purposes
28 class InlineBackendConfig(SingletonConfigurable):
25 class InlineBackendConfig(SingletonConfigurable):
29 pass
26 pass
30
27
31 class InlineBackend(InlineBackendConfig):
28 class InlineBackend(InlineBackendConfig):
32 """An object to store configuration of the inline backend."""
29 """An object to store configuration of the inline backend."""
33
30
34 def _config_changed(self, name, old, new):
31 def _config_changed(self, name, old, new):
35 # warn on change of renamed config section
32 # warn on change of renamed config section
36 if new.InlineBackendConfig != old.InlineBackendConfig:
33 if new.InlineBackendConfig != old.InlineBackendConfig:
37 warn("InlineBackendConfig has been renamed to InlineBackend")
34 warn("InlineBackendConfig has been renamed to InlineBackend")
38 super(InlineBackend, self)._config_changed(name, old, new)
35 super(InlineBackend, self)._config_changed(name, old, new)
39
36
40 # The typical default figure size is too large for inline use,
37 # The typical default figure size is too large for inline use,
41 # so we shrink the figure size to 6x4, and tweak fonts to
38 # so we shrink the figure size to 6x4, and tweak fonts to
42 # make that fit.
39 # make that fit.
43 rc = Dict({'figure.figsize': (6.0,4.0),
40 rc = Dict({'figure.figsize': (6.0,4.0),
44 # play nicely with white background in the Qt and notebook frontend
41 # play nicely with white background in the Qt and notebook frontend
45 'figure.facecolor': 'white',
42 'figure.facecolor': 'white',
46 'figure.edgecolor': 'white',
43 'figure.edgecolor': 'white',
47 # 12pt labels get cutoff on 6x4 logplots, so use 10pt.
44 # 12pt labels get cutoff on 6x4 logplots, so use 10pt.
48 'font.size': 10,
45 'font.size': 10,
49 # 72 dpi matches SVG/qtconsole
46 # 72 dpi matches SVG/qtconsole
50 # this only affects PNG export, as SVG has no dpi setting
47 # this only affects PNG export, as SVG has no dpi setting
51 'savefig.dpi': 72,
48 'savefig.dpi': 72,
52 # 10pt still needs a little more room on the xlabel:
49 # 10pt still needs a little more room on the xlabel:
53 'figure.subplot.bottom' : .125
50 'figure.subplot.bottom' : .125
54 }, config=True,
51 }, config=True,
55 help="""Subset of matplotlib rcParams that should be different for the
52 help="""Subset of matplotlib rcParams that should be different for the
56 inline backend."""
53 inline backend."""
57 )
54 )
58
55
59 figure_format = CaselessStrEnum(['svg', 'png', 'retina'], default_value='png', config=True,
56 figure_format = CaselessStrEnum(['svg', 'png', 'retina'], default_value='png', config=True,
60 help="The image format for figures with the inline backend.")
57 help="The image format for figures with the inline backend.")
61
58
62 def _figure_format_changed(self, name, old, new):
59 def _figure_format_changed(self, name, old, new):
63 if self.shell is None:
60 if self.shell is None:
64 return
61 return
65 else:
62 else:
66 select_figure_format(self.shell, new)
63 select_figure_format(self.shell, new)
67
64
68 close_figures = Bool(True, config=True,
65 close_figures = Bool(True, config=True,
69 help="""Close all figures at the end of each cell.
66 help="""Close all figures at the end of each cell.
70
67
71 When True, ensures that each cell starts with no active figures, but it
68 When True, ensures that each cell starts with no active figures, but it
72 also means that one must keep track of references in order to edit or
69 also means that one must keep track of references in order to edit or
73 redraw figures in subsequent cells. This mode is ideal for the notebook,
70 redraw figures in subsequent cells. This mode is ideal for the notebook,
74 where residual plots from other cells might be surprising.
71 where residual plots from other cells might be surprising.
75
72
76 When False, one must call figure() to create new figures. This means
73 When False, one must call figure() to create new figures. This means
77 that gcf() and getfigs() can reference figures created in other cells,
74 that gcf() and getfigs() can reference figures created in other cells,
78 and the active figure can continue to be edited with pylab/pyplot
75 and the active figure can continue to be edited with pylab/pyplot
79 methods that reference the current active figure. This mode facilitates
76 methods that reference the current active figure. This mode facilitates
80 iterative editing of figures, and behaves most consistently with
77 iterative editing of figures, and behaves most consistently with
81 other matplotlib backends, but figure barriers between cells must
78 other matplotlib backends, but figure barriers between cells must
82 be explicit.
79 be explicit.
83 """)
80 """)
84
81
85 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
82 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
86
83
87
84
88 #-----------------------------------------------------------------------------
85 #-----------------------------------------------------------------------------
89 # Functions
86 # Functions
90 #-----------------------------------------------------------------------------
87 #-----------------------------------------------------------------------------
91
88
92 def show(close=None):
89 def show(close=None):
93 """Show all figures as SVG/PNG payloads sent to the IPython clients.
90 """Show all figures as SVG/PNG payloads sent to the IPython clients.
94
91
95 Parameters
92 Parameters
96 ----------
93 ----------
97 close : bool, optional
94 close : bool, optional
98 If true, a ``plt.close('all')`` call is automatically issued after
95 If true, a ``plt.close('all')`` call is automatically issued after
99 sending all the figures. If this is set, the figures will entirely
96 sending all the figures. If this is set, the figures will entirely
100 removed from the internal list of figures.
97 removed from the internal list of figures.
101 """
98 """
102 if close is None:
99 if close is None:
103 close = InlineBackend.instance().close_figures
100 close = InlineBackend.instance().close_figures
104 try:
101 try:
105 for figure_manager in Gcf.get_all_fig_managers():
102 for figure_manager in Gcf.get_all_fig_managers():
106 display(figure_manager.canvas.figure)
103 display(figure_manager.canvas.figure)
107 finally:
104 finally:
108 show._to_draw = []
105 show._to_draw = []
109 if close:
106 if close:
110 matplotlib.pyplot.close('all')
107 matplotlib.pyplot.close('all')
111
108
112
109
113
110
114 # This flag will be reset by draw_if_interactive when called
111 # This flag will be reset by draw_if_interactive when called
115 show._draw_called = False
112 show._draw_called = False
116 # list of figures to draw when flush_figures is called
113 # list of figures to draw when flush_figures is called
117 show._to_draw = []
114 show._to_draw = []
118
115
119
116
120 def draw_if_interactive():
117 def draw_if_interactive():
121 """
118 """
122 Is called after every pylab drawing command
119 Is called after every pylab drawing command
123 """
120 """
124 # signal that the current active figure should be sent at the end of
121 # signal that the current active figure should be sent at the end of
125 # execution. Also sets the _draw_called flag, signaling that there will be
122 # execution. Also sets the _draw_called flag, signaling that there will be
126 # something to send. At the end of the code execution, a separate call to
123 # something to send. At the end of the code execution, a separate call to
127 # flush_figures() will act upon these values
124 # flush_figures() will act upon these values
128
125
129 fig = Gcf.get_active().canvas.figure
126 fig = Gcf.get_active().canvas.figure
130
127
131 # Hack: matplotlib FigureManager objects in interacive backends (at least
128 # Hack: matplotlib FigureManager objects in interacive backends (at least
132 # in some of them) monkeypatch the figure object and add a .show() method
129 # in some of them) monkeypatch the figure object and add a .show() method
133 # to it. This applies the same monkeypatch in order to support user code
130 # to it. This applies the same monkeypatch in order to support user code
134 # that might expect `.show()` to be part of the official API of figure
131 # that might expect `.show()` to be part of the official API of figure
135 # objects.
132 # objects.
136 # For further reference:
133 # For further reference:
137 # https://github.com/ipython/ipython/issues/1612
134 # https://github.com/ipython/ipython/issues/1612
138 # https://github.com/matplotlib/matplotlib/issues/835
135 # https://github.com/matplotlib/matplotlib/issues/835
139
136
140 if not hasattr(fig, 'show'):
137 if not hasattr(fig, 'show'):
141 # Queue up `fig` for display
138 # Queue up `fig` for display
142 fig.show = lambda *a: display(fig)
139 fig.show = lambda *a: display(fig)
143
140
144 # If matplotlib was manually set to non-interactive mode, this function
141 # If matplotlib was manually set to non-interactive mode, this function
145 # should be a no-op (otherwise we'll generate duplicate plots, since a user
142 # should be a no-op (otherwise we'll generate duplicate plots, since a user
146 # who set ioff() manually expects to make separate draw/show calls).
143 # who set ioff() manually expects to make separate draw/show calls).
147 if not matplotlib.is_interactive():
144 if not matplotlib.is_interactive():
148 return
145 return
149
146
150 # ensure current figure will be drawn, and each subsequent call
147 # ensure current figure will be drawn, and each subsequent call
151 # of draw_if_interactive() moves the active figure to ensure it is
148 # of draw_if_interactive() moves the active figure to ensure it is
152 # drawn last
149 # drawn last
153 try:
150 try:
154 show._to_draw.remove(fig)
151 show._to_draw.remove(fig)
155 except ValueError:
152 except ValueError:
156 # ensure it only appears in the draw list once
153 # ensure it only appears in the draw list once
157 pass
154 pass
158 # Queue up the figure for drawing in next show() call
155 # Queue up the figure for drawing in next show() call
159 show._to_draw.append(fig)
156 show._to_draw.append(fig)
160 show._draw_called = True
157 show._draw_called = True
161
158
162
159
163 def flush_figures():
160 def flush_figures():
164 """Send all figures that changed
161 """Send all figures that changed
165
162
166 This is meant to be called automatically and will call show() if, during
163 This is meant to be called automatically and will call show() if, during
167 prior code execution, there had been any calls to draw_if_interactive.
164 prior code execution, there had been any calls to draw_if_interactive.
168
165
169 This function is meant to be used as a post_execute callback in IPython,
166 This function is meant to be used as a post_execute callback in IPython,
170 so user-caused errors are handled with showtraceback() instead of being
167 so user-caused errors are handled with showtraceback() instead of being
171 allowed to raise. If this function is not called from within IPython,
168 allowed to raise. If this function is not called from within IPython,
172 then these exceptions will raise.
169 then these exceptions will raise.
173 """
170 """
174 if not show._draw_called:
171 if not show._draw_called:
175 return
172 return
176
173
177 if InlineBackend.instance().close_figures:
174 if InlineBackend.instance().close_figures:
178 # ignore the tracking, just draw and close all figures
175 # ignore the tracking, just draw and close all figures
179 try:
176 try:
180 return show(True)
177 return show(True)
181 except Exception as e:
178 except Exception as e:
182 # safely show traceback if in IPython, else raise
179 # safely show traceback if in IPython, else raise
183 try:
180 try:
184 get_ipython
181 get_ipython
185 except NameError:
182 except NameError:
186 raise e
183 raise e
187 else:
184 else:
188 get_ipython().showtraceback()
185 get_ipython().showtraceback()
189 return
186 return
190 try:
187 try:
191 # exclude any figures that were closed:
188 # exclude any figures that were closed:
192 active = set([fm.canvas.figure for fm in Gcf.get_all_fig_managers()])
189 active = set([fm.canvas.figure for fm in Gcf.get_all_fig_managers()])
193 for fig in [ fig for fig in show._to_draw if fig in active ]:
190 for fig in [ fig for fig in show._to_draw if fig in active ]:
194 try:
191 try:
195 display(fig)
192 display(fig)
196 except Exception as e:
193 except Exception as e:
197 # safely show traceback if in IPython, else raise
194 # safely show traceback if in IPython, else raise
198 try:
195 try:
199 get_ipython
196 get_ipython
200 except NameError:
197 except NameError:
201 raise e
198 raise e
202 else:
199 else:
203 get_ipython().showtraceback()
200 get_ipython().showtraceback()
204 break
201 break
205 finally:
202 finally:
206 # clear flags for next round
203 # clear flags for next round
207 show._to_draw = []
204 show._to_draw = []
208 show._draw_called = False
205 show._draw_called = False
209
206
210
207
211 # Changes to matplotlib in version 1.2 requires a mpl backend to supply a default
208 # Changes to matplotlib in version 1.2 requires a mpl backend to supply a default
212 # figurecanvas. This is set here to a Agg canvas
209 # figurecanvas. This is set here to a Agg canvas
213 # See https://github.com/matplotlib/matplotlib/pull/1125
210 # See https://github.com/matplotlib/matplotlib/pull/1125
214 FigureCanvas = FigureCanvasAgg
211 FigureCanvas = FigureCanvasAgg
215
212
@@ -1,205 +1,198 b''
1 """serialization utilities for apply messages
1 """serialization utilities for apply messages
2
2
3 Authors:
3 Authors:
4
4
5 * Min RK
5 * Min RK
6 """
6 """
7 #-----------------------------------------------------------------------------
7 #-----------------------------------------------------------------------------
8 # Copyright (C) 2010-2011 The IPython Development Team
8 # Copyright (C) 2010-2011 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17
17
18 # Standard library imports
19 import logging
20 import os
21 import re
22 import socket
23 import sys
24
25 try:
18 try:
26 import cPickle
19 import cPickle
27 pickle = cPickle
20 pickle = cPickle
28 except:
21 except:
29 cPickle = None
22 cPickle = None
30 import pickle
23 import pickle
31
24
32
25
33 # IPython imports
26 # IPython imports
34 from IPython.utils import py3compat
27 from IPython.utils import py3compat
35 from IPython.utils.data import flatten
28 from IPython.utils.data import flatten
36 from IPython.utils.pickleutil import (
29 from IPython.utils.pickleutil import (
37 can, uncan, can_sequence, uncan_sequence, CannedObject,
30 can, uncan, can_sequence, uncan_sequence, CannedObject,
38 istype, sequence_types,
31 istype, sequence_types,
39 )
32 )
40
33
41 if py3compat.PY3:
34 if py3compat.PY3:
42 buffer = memoryview
35 buffer = memoryview
43
36
44 #-----------------------------------------------------------------------------
37 #-----------------------------------------------------------------------------
45 # Serialization Functions
38 # Serialization Functions
46 #-----------------------------------------------------------------------------
39 #-----------------------------------------------------------------------------
47
40
48 # default values for the thresholds:
41 # default values for the thresholds:
49 MAX_ITEMS = 64
42 MAX_ITEMS = 64
50 MAX_BYTES = 1024
43 MAX_BYTES = 1024
51
44
52 def _extract_buffers(obj, threshold=MAX_BYTES):
45 def _extract_buffers(obj, threshold=MAX_BYTES):
53 """extract buffers larger than a certain threshold"""
46 """extract buffers larger than a certain threshold"""
54 buffers = []
47 buffers = []
55 if isinstance(obj, CannedObject) and obj.buffers:
48 if isinstance(obj, CannedObject) and obj.buffers:
56 for i,buf in enumerate(obj.buffers):
49 for i,buf in enumerate(obj.buffers):
57 if len(buf) > threshold:
50 if len(buf) > threshold:
58 # buffer larger than threshold, prevent pickling
51 # buffer larger than threshold, prevent pickling
59 obj.buffers[i] = None
52 obj.buffers[i] = None
60 buffers.append(buf)
53 buffers.append(buf)
61 elif isinstance(buf, buffer):
54 elif isinstance(buf, buffer):
62 # buffer too small for separate send, coerce to bytes
55 # buffer too small for separate send, coerce to bytes
63 # because pickling buffer objects just results in broken pointers
56 # because pickling buffer objects just results in broken pointers
64 obj.buffers[i] = bytes(buf)
57 obj.buffers[i] = bytes(buf)
65 return buffers
58 return buffers
66
59
67 def _restore_buffers(obj, buffers):
60 def _restore_buffers(obj, buffers):
68 """restore buffers extracted by """
61 """restore buffers extracted by """
69 if isinstance(obj, CannedObject) and obj.buffers:
62 if isinstance(obj, CannedObject) and obj.buffers:
70 for i,buf in enumerate(obj.buffers):
63 for i,buf in enumerate(obj.buffers):
71 if buf is None:
64 if buf is None:
72 obj.buffers[i] = buffers.pop(0)
65 obj.buffers[i] = buffers.pop(0)
73
66
74 def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
67 def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
75 """Serialize an object into a list of sendable buffers.
68 """Serialize an object into a list of sendable buffers.
76
69
77 Parameters
70 Parameters
78 ----------
71 ----------
79
72
80 obj : object
73 obj : object
81 The object to be serialized
74 The object to be serialized
82 buffer_threshold : int
75 buffer_threshold : int
83 The threshold (in bytes) for pulling out data buffers
76 The threshold (in bytes) for pulling out data buffers
84 to avoid pickling them.
77 to avoid pickling them.
85 item_threshold : int
78 item_threshold : int
86 The maximum number of items over which canning will iterate.
79 The maximum number of items over which canning will iterate.
87 Containers (lists, dicts) larger than this will be pickled without
80 Containers (lists, dicts) larger than this will be pickled without
88 introspection.
81 introspection.
89
82
90 Returns
83 Returns
91 -------
84 -------
92 [bufs] : list of buffers representing the serialized object.
85 [bufs] : list of buffers representing the serialized object.
93 """
86 """
94 buffers = []
87 buffers = []
95 if istype(obj, sequence_types) and len(obj) < item_threshold:
88 if istype(obj, sequence_types) and len(obj) < item_threshold:
96 cobj = can_sequence(obj)
89 cobj = can_sequence(obj)
97 for c in cobj:
90 for c in cobj:
98 buffers.extend(_extract_buffers(c, buffer_threshold))
91 buffers.extend(_extract_buffers(c, buffer_threshold))
99 elif istype(obj, dict) and len(obj) < item_threshold:
92 elif istype(obj, dict) and len(obj) < item_threshold:
100 cobj = {}
93 cobj = {}
101 for k in sorted(obj.iterkeys()):
94 for k in sorted(obj.iterkeys()):
102 c = can(obj[k])
95 c = can(obj[k])
103 buffers.extend(_extract_buffers(c, buffer_threshold))
96 buffers.extend(_extract_buffers(c, buffer_threshold))
104 cobj[k] = c
97 cobj[k] = c
105 else:
98 else:
106 cobj = can(obj)
99 cobj = can(obj)
107 buffers.extend(_extract_buffers(cobj, buffer_threshold))
100 buffers.extend(_extract_buffers(cobj, buffer_threshold))
108
101
109 buffers.insert(0, pickle.dumps(cobj,-1))
102 buffers.insert(0, pickle.dumps(cobj,-1))
110 return buffers
103 return buffers
111
104
112 def unserialize_object(buffers, g=None):
105 def unserialize_object(buffers, g=None):
113 """reconstruct an object serialized by serialize_object from data buffers.
106 """reconstruct an object serialized by serialize_object from data buffers.
114
107
115 Parameters
108 Parameters
116 ----------
109 ----------
117
110
118 bufs : list of buffers/bytes
111 bufs : list of buffers/bytes
119
112
120 g : globals to be used when uncanning
113 g : globals to be used when uncanning
121
114
122 Returns
115 Returns
123 -------
116 -------
124
117
125 (newobj, bufs) : unpacked object, and the list of remaining unused buffers.
118 (newobj, bufs) : unpacked object, and the list of remaining unused buffers.
126 """
119 """
127 bufs = list(buffers)
120 bufs = list(buffers)
128 pobj = bufs.pop(0)
121 pobj = bufs.pop(0)
129 if not isinstance(pobj, bytes):
122 if not isinstance(pobj, bytes):
130 # a zmq message
123 # a zmq message
131 pobj = bytes(pobj)
124 pobj = bytes(pobj)
132 canned = pickle.loads(pobj)
125 canned = pickle.loads(pobj)
133 if istype(canned, sequence_types) and len(canned) < MAX_ITEMS:
126 if istype(canned, sequence_types) and len(canned) < MAX_ITEMS:
134 for c in canned:
127 for c in canned:
135 _restore_buffers(c, bufs)
128 _restore_buffers(c, bufs)
136 newobj = uncan_sequence(canned, g)
129 newobj = uncan_sequence(canned, g)
137 elif isinstance(canned, dict) and len(canned) < MAX_ITEMS:
130 elif isinstance(canned, dict) and len(canned) < MAX_ITEMS:
138 newobj = {}
131 newobj = {}
139 for k in sorted(canned.iterkeys()):
132 for k in sorted(canned.iterkeys()):
140 c = canned[k]
133 c = canned[k]
141 _restore_buffers(c, bufs)
134 _restore_buffers(c, bufs)
142 newobj[k] = uncan(c, g)
135 newobj[k] = uncan(c, g)
143 else:
136 else:
144 _restore_buffers(canned, bufs)
137 _restore_buffers(canned, bufs)
145 newobj = uncan(canned, g)
138 newobj = uncan(canned, g)
146
139
147 return newobj, bufs
140 return newobj, bufs
148
141
149 def pack_apply_message(f, args, kwargs, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
142 def pack_apply_message(f, args, kwargs, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
150 """pack up a function, args, and kwargs to be sent over the wire
143 """pack up a function, args, and kwargs to be sent over the wire
151
144
152 Each element of args/kwargs will be canned for special treatment,
145 Each element of args/kwargs will be canned for special treatment,
153 but inspection will not go any deeper than that.
146 but inspection will not go any deeper than that.
154
147
155 Any object whose data is larger than `threshold` will not have their data copied
148 Any object whose data is larger than `threshold` will not have their data copied
156 (only numpy arrays and bytes/buffers support zero-copy)
149 (only numpy arrays and bytes/buffers support zero-copy)
157
150
158 Message will be a list of bytes/buffers of the format:
151 Message will be a list of bytes/buffers of the format:
159
152
160 [ cf, pinfo, <arg_bufs>, <kwarg_bufs> ]
153 [ cf, pinfo, <arg_bufs>, <kwarg_bufs> ]
161
154
162 With length at least two + len(args) + len(kwargs)
155 With length at least two + len(args) + len(kwargs)
163 """
156 """
164
157
165 arg_bufs = flatten(serialize_object(arg, buffer_threshold, item_threshold) for arg in args)
158 arg_bufs = flatten(serialize_object(arg, buffer_threshold, item_threshold) for arg in args)
166
159
167 kw_keys = sorted(kwargs.keys())
160 kw_keys = sorted(kwargs.keys())
168 kwarg_bufs = flatten(serialize_object(kwargs[key], buffer_threshold, item_threshold) for key in kw_keys)
161 kwarg_bufs = flatten(serialize_object(kwargs[key], buffer_threshold, item_threshold) for key in kw_keys)
169
162
170 info = dict(nargs=len(args), narg_bufs=len(arg_bufs), kw_keys=kw_keys)
163 info = dict(nargs=len(args), narg_bufs=len(arg_bufs), kw_keys=kw_keys)
171
164
172 msg = [pickle.dumps(can(f),-1)]
165 msg = [pickle.dumps(can(f),-1)]
173 msg.append(pickle.dumps(info, -1))
166 msg.append(pickle.dumps(info, -1))
174 msg.extend(arg_bufs)
167 msg.extend(arg_bufs)
175 msg.extend(kwarg_bufs)
168 msg.extend(kwarg_bufs)
176
169
177 return msg
170 return msg
178
171
179 def unpack_apply_message(bufs, g=None, copy=True):
172 def unpack_apply_message(bufs, g=None, copy=True):
180 """unpack f,args,kwargs from buffers packed by pack_apply_message()
173 """unpack f,args,kwargs from buffers packed by pack_apply_message()
181 Returns: original f,args,kwargs"""
174 Returns: original f,args,kwargs"""
182 bufs = list(bufs) # allow us to pop
175 bufs = list(bufs) # allow us to pop
183 assert len(bufs) >= 2, "not enough buffers!"
176 assert len(bufs) >= 2, "not enough buffers!"
184 if not copy:
177 if not copy:
185 for i in range(2):
178 for i in range(2):
186 bufs[i] = bufs[i].bytes
179 bufs[i] = bufs[i].bytes
187 f = uncan(pickle.loads(bufs.pop(0)), g)
180 f = uncan(pickle.loads(bufs.pop(0)), g)
188 info = pickle.loads(bufs.pop(0))
181 info = pickle.loads(bufs.pop(0))
189 arg_bufs, kwarg_bufs = bufs[:info['narg_bufs']], bufs[info['narg_bufs']:]
182 arg_bufs, kwarg_bufs = bufs[:info['narg_bufs']], bufs[info['narg_bufs']:]
190
183
191 args = []
184 args = []
192 for i in range(info['nargs']):
185 for i in range(info['nargs']):
193 arg, arg_bufs = unserialize_object(arg_bufs, g)
186 arg, arg_bufs = unserialize_object(arg_bufs, g)
194 args.append(arg)
187 args.append(arg)
195 args = tuple(args)
188 args = tuple(args)
196 assert not arg_bufs, "Shouldn't be any arg bufs left over"
189 assert not arg_bufs, "Shouldn't be any arg bufs left over"
197
190
198 kwargs = {}
191 kwargs = {}
199 for key in info['kw_keys']:
192 for key in info['kw_keys']:
200 kwarg, kwarg_bufs = unserialize_object(kwarg_bufs, g)
193 kwarg, kwarg_bufs = unserialize_object(kwarg_bufs, g)
201 kwargs[key] = kwarg
194 kwargs[key] = kwarg
202 assert not kwarg_bufs, "Shouldn't be any kwarg bufs left over"
195 assert not kwarg_bufs, "Shouldn't be any kwarg bufs left over"
203
196
204 return f,args,kwargs
197 return f,args,kwargs
205
198
@@ -1,807 +1,806 b''
1 """Session object for building, serializing, sending, and receiving messages in
1 """Session object for building, serializing, sending, and receiving messages in
2 IPython. The Session object supports serialization, HMAC signatures, and
2 IPython. The Session object supports serialization, HMAC signatures, and
3 metadata on messages.
3 metadata on messages.
4
4
5 Also defined here are utilities for working with Sessions:
5 Also defined here are utilities for working with Sessions:
6 * A SessionFactory to be used as a base class for configurables that work with
6 * A SessionFactory to be used as a base class for configurables that work with
7 Sessions.
7 Sessions.
8 * A Message object for convenience that allows attribute-access to the msg dict.
8 * A Message object for convenience that allows attribute-access to the msg dict.
9
9
10 Authors:
10 Authors:
11
11
12 * Min RK
12 * Min RK
13 * Brian Granger
13 * Brian Granger
14 * Fernando Perez
14 * Fernando Perez
15 """
15 """
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17 # Copyright (C) 2010-2011 The IPython Development Team
17 # Copyright (C) 2010-2011 The IPython Development Team
18 #
18 #
19 # Distributed under the terms of the BSD License. The full license is in
19 # Distributed under the terms of the BSD License. The full license is in
20 # the file COPYING, distributed as part of this software.
20 # the file COPYING, distributed as part of this software.
21 #-----------------------------------------------------------------------------
21 #-----------------------------------------------------------------------------
22
22
23 #-----------------------------------------------------------------------------
23 #-----------------------------------------------------------------------------
24 # Imports
24 # Imports
25 #-----------------------------------------------------------------------------
25 #-----------------------------------------------------------------------------
26
26
27 import hmac
27 import hmac
28 import logging
28 import logging
29 import os
29 import os
30 import pprint
30 import pprint
31 import random
31 import random
32 import uuid
32 import uuid
33 from datetime import datetime
33 from datetime import datetime
34
34
35 try:
35 try:
36 import cPickle
36 import cPickle
37 pickle = cPickle
37 pickle = cPickle
38 except:
38 except:
39 cPickle = None
39 cPickle = None
40 import pickle
40 import pickle
41
41
42 import zmq
42 import zmq
43 from zmq.utils import jsonapi
43 from zmq.utils import jsonapi
44 from zmq.eventloop.ioloop import IOLoop
44 from zmq.eventloop.ioloop import IOLoop
45 from zmq.eventloop.zmqstream import ZMQStream
45 from zmq.eventloop.zmqstream import ZMQStream
46
46
47 from IPython.config.application import Application, boolean_flag
48 from IPython.config.configurable import Configurable, LoggingConfigurable
47 from IPython.config.configurable import Configurable, LoggingConfigurable
49 from IPython.utils import io
48 from IPython.utils import io
50 from IPython.utils.importstring import import_item
49 from IPython.utils.importstring import import_item
51 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
50 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
52 from IPython.utils.py3compat import str_to_bytes, str_to_unicode
51 from IPython.utils.py3compat import str_to_bytes, str_to_unicode
53 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
52 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
54 DottedObjectName, CUnicode, Dict, Integer)
53 DottedObjectName, CUnicode, Dict, Integer)
55 from IPython.kernel.zmq.serialize import MAX_ITEMS, MAX_BYTES
54 from IPython.kernel.zmq.serialize import MAX_ITEMS, MAX_BYTES
56
55
57 #-----------------------------------------------------------------------------
56 #-----------------------------------------------------------------------------
58 # utility functions
57 # utility functions
59 #-----------------------------------------------------------------------------
58 #-----------------------------------------------------------------------------
60
59
61 def squash_unicode(obj):
60 def squash_unicode(obj):
62 """coerce unicode back to bytestrings."""
61 """coerce unicode back to bytestrings."""
63 if isinstance(obj,dict):
62 if isinstance(obj,dict):
64 for key in obj.keys():
63 for key in obj.keys():
65 obj[key] = squash_unicode(obj[key])
64 obj[key] = squash_unicode(obj[key])
66 if isinstance(key, unicode):
65 if isinstance(key, unicode):
67 obj[squash_unicode(key)] = obj.pop(key)
66 obj[squash_unicode(key)] = obj.pop(key)
68 elif isinstance(obj, list):
67 elif isinstance(obj, list):
69 for i,v in enumerate(obj):
68 for i,v in enumerate(obj):
70 obj[i] = squash_unicode(v)
69 obj[i] = squash_unicode(v)
71 elif isinstance(obj, unicode):
70 elif isinstance(obj, unicode):
72 obj = obj.encode('utf8')
71 obj = obj.encode('utf8')
73 return obj
72 return obj
74
73
75 #-----------------------------------------------------------------------------
74 #-----------------------------------------------------------------------------
76 # globals and defaults
75 # globals and defaults
77 #-----------------------------------------------------------------------------
76 #-----------------------------------------------------------------------------
78
77
79 # ISO8601-ify datetime objects
78 # ISO8601-ify datetime objects
80 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default)
79 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default)
81 json_unpacker = lambda s: extract_dates(jsonapi.loads(s))
80 json_unpacker = lambda s: extract_dates(jsonapi.loads(s))
82
81
83 pickle_packer = lambda o: pickle.dumps(o,-1)
82 pickle_packer = lambda o: pickle.dumps(o,-1)
84 pickle_unpacker = pickle.loads
83 pickle_unpacker = pickle.loads
85
84
86 default_packer = json_packer
85 default_packer = json_packer
87 default_unpacker = json_unpacker
86 default_unpacker = json_unpacker
88
87
89 DELIM = b"<IDS|MSG>"
88 DELIM = b"<IDS|MSG>"
90 # singleton dummy tracker, which will always report as done
89 # singleton dummy tracker, which will always report as done
91 DONE = zmq.MessageTracker()
90 DONE = zmq.MessageTracker()
92
91
93 #-----------------------------------------------------------------------------
92 #-----------------------------------------------------------------------------
94 # Mixin tools for apps that use Sessions
93 # Mixin tools for apps that use Sessions
95 #-----------------------------------------------------------------------------
94 #-----------------------------------------------------------------------------
96
95
97 session_aliases = dict(
96 session_aliases = dict(
98 ident = 'Session.session',
97 ident = 'Session.session',
99 user = 'Session.username',
98 user = 'Session.username',
100 keyfile = 'Session.keyfile',
99 keyfile = 'Session.keyfile',
101 )
100 )
102
101
103 session_flags = {
102 session_flags = {
104 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
103 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
105 'keyfile' : '' }},
104 'keyfile' : '' }},
106 """Use HMAC digests for authentication of messages.
105 """Use HMAC digests for authentication of messages.
107 Setting this flag will generate a new UUID to use as the HMAC key.
106 Setting this flag will generate a new UUID to use as the HMAC key.
108 """),
107 """),
109 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
108 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
110 """Don't authenticate messages."""),
109 """Don't authenticate messages."""),
111 }
110 }
112
111
113 def default_secure(cfg):
112 def default_secure(cfg):
114 """Set the default behavior for a config environment to be secure.
113 """Set the default behavior for a config environment to be secure.
115
114
116 If Session.key/keyfile have not been set, set Session.key to
115 If Session.key/keyfile have not been set, set Session.key to
117 a new random UUID.
116 a new random UUID.
118 """
117 """
119
118
120 if 'Session' in cfg:
119 if 'Session' in cfg:
121 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
120 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
122 return
121 return
123 # key/keyfile not specified, generate new UUID:
122 # key/keyfile not specified, generate new UUID:
124 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
123 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
125
124
126
125
127 #-----------------------------------------------------------------------------
126 #-----------------------------------------------------------------------------
128 # Classes
127 # Classes
129 #-----------------------------------------------------------------------------
128 #-----------------------------------------------------------------------------
130
129
131 class SessionFactory(LoggingConfigurable):
130 class SessionFactory(LoggingConfigurable):
132 """The Base class for configurables that have a Session, Context, logger,
131 """The Base class for configurables that have a Session, Context, logger,
133 and IOLoop.
132 and IOLoop.
134 """
133 """
135
134
136 logname = Unicode('')
135 logname = Unicode('')
137 def _logname_changed(self, name, old, new):
136 def _logname_changed(self, name, old, new):
138 self.log = logging.getLogger(new)
137 self.log = logging.getLogger(new)
139
138
140 # not configurable:
139 # not configurable:
141 context = Instance('zmq.Context')
140 context = Instance('zmq.Context')
142 def _context_default(self):
141 def _context_default(self):
143 return zmq.Context.instance()
142 return zmq.Context.instance()
144
143
145 session = Instance('IPython.kernel.zmq.session.Session')
144 session = Instance('IPython.kernel.zmq.session.Session')
146
145
147 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
146 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
148 def _loop_default(self):
147 def _loop_default(self):
149 return IOLoop.instance()
148 return IOLoop.instance()
150
149
151 def __init__(self, **kwargs):
150 def __init__(self, **kwargs):
152 super(SessionFactory, self).__init__(**kwargs)
151 super(SessionFactory, self).__init__(**kwargs)
153
152
154 if self.session is None:
153 if self.session is None:
155 # construct the session
154 # construct the session
156 self.session = Session(**kwargs)
155 self.session = Session(**kwargs)
157
156
158
157
159 class Message(object):
158 class Message(object):
160 """A simple message object that maps dict keys to attributes.
159 """A simple message object that maps dict keys to attributes.
161
160
162 A Message can be created from a dict and a dict from a Message instance
161 A Message can be created from a dict and a dict from a Message instance
163 simply by calling dict(msg_obj)."""
162 simply by calling dict(msg_obj)."""
164
163
165 def __init__(self, msg_dict):
164 def __init__(self, msg_dict):
166 dct = self.__dict__
165 dct = self.__dict__
167 for k, v in dict(msg_dict).iteritems():
166 for k, v in dict(msg_dict).iteritems():
168 if isinstance(v, dict):
167 if isinstance(v, dict):
169 v = Message(v)
168 v = Message(v)
170 dct[k] = v
169 dct[k] = v
171
170
172 # Having this iterator lets dict(msg_obj) work out of the box.
171 # Having this iterator lets dict(msg_obj) work out of the box.
173 def __iter__(self):
172 def __iter__(self):
174 return iter(self.__dict__.iteritems())
173 return iter(self.__dict__.iteritems())
175
174
176 def __repr__(self):
175 def __repr__(self):
177 return repr(self.__dict__)
176 return repr(self.__dict__)
178
177
179 def __str__(self):
178 def __str__(self):
180 return pprint.pformat(self.__dict__)
179 return pprint.pformat(self.__dict__)
181
180
182 def __contains__(self, k):
181 def __contains__(self, k):
183 return k in self.__dict__
182 return k in self.__dict__
184
183
185 def __getitem__(self, k):
184 def __getitem__(self, k):
186 return self.__dict__[k]
185 return self.__dict__[k]
187
186
188
187
189 def msg_header(msg_id, msg_type, username, session):
188 def msg_header(msg_id, msg_type, username, session):
190 date = datetime.now()
189 date = datetime.now()
191 return locals()
190 return locals()
192
191
193 def extract_header(msg_or_header):
192 def extract_header(msg_or_header):
194 """Given a message or header, return the header."""
193 """Given a message or header, return the header."""
195 if not msg_or_header:
194 if not msg_or_header:
196 return {}
195 return {}
197 try:
196 try:
198 # See if msg_or_header is the entire message.
197 # See if msg_or_header is the entire message.
199 h = msg_or_header['header']
198 h = msg_or_header['header']
200 except KeyError:
199 except KeyError:
201 try:
200 try:
202 # See if msg_or_header is just the header
201 # See if msg_or_header is just the header
203 h = msg_or_header['msg_id']
202 h = msg_or_header['msg_id']
204 except KeyError:
203 except KeyError:
205 raise
204 raise
206 else:
205 else:
207 h = msg_or_header
206 h = msg_or_header
208 if not isinstance(h, dict):
207 if not isinstance(h, dict):
209 h = dict(h)
208 h = dict(h)
210 return h
209 return h
211
210
212 class Session(Configurable):
211 class Session(Configurable):
213 """Object for handling serialization and sending of messages.
212 """Object for handling serialization and sending of messages.
214
213
215 The Session object handles building messages and sending them
214 The Session object handles building messages and sending them
216 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
215 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
217 other over the network via Session objects, and only need to work with the
216 other over the network via Session objects, and only need to work with the
218 dict-based IPython message spec. The Session will handle
217 dict-based IPython message spec. The Session will handle
219 serialization/deserialization, security, and metadata.
218 serialization/deserialization, security, and metadata.
220
219
221 Sessions support configurable serialiization via packer/unpacker traits,
220 Sessions support configurable serialiization via packer/unpacker traits,
222 and signing with HMAC digests via the key/keyfile traits.
221 and signing with HMAC digests via the key/keyfile traits.
223
222
224 Parameters
223 Parameters
225 ----------
224 ----------
226
225
227 debug : bool
226 debug : bool
228 whether to trigger extra debugging statements
227 whether to trigger extra debugging statements
229 packer/unpacker : str : 'json', 'pickle' or import_string
228 packer/unpacker : str : 'json', 'pickle' or import_string
230 importstrings for methods to serialize message parts. If just
229 importstrings for methods to serialize message parts. If just
231 'json' or 'pickle', predefined JSON and pickle packers will be used.
230 'json' or 'pickle', predefined JSON and pickle packers will be used.
232 Otherwise, the entire importstring must be used.
231 Otherwise, the entire importstring must be used.
233
232
234 The functions must accept at least valid JSON input, and output *bytes*.
233 The functions must accept at least valid JSON input, and output *bytes*.
235
234
236 For example, to use msgpack:
235 For example, to use msgpack:
237 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
236 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
238 pack/unpack : callables
237 pack/unpack : callables
239 You can also set the pack/unpack callables for serialization directly.
238 You can also set the pack/unpack callables for serialization directly.
240 session : bytes
239 session : bytes
241 the ID of this Session object. The default is to generate a new UUID.
240 the ID of this Session object. The default is to generate a new UUID.
242 username : unicode
241 username : unicode
243 username added to message headers. The default is to ask the OS.
242 username added to message headers. The default is to ask the OS.
244 key : bytes
243 key : bytes
245 The key used to initialize an HMAC signature. If unset, messages
244 The key used to initialize an HMAC signature. If unset, messages
246 will not be signed or checked.
245 will not be signed or checked.
247 keyfile : filepath
246 keyfile : filepath
248 The file containing a key. If this is set, `key` will be initialized
247 The file containing a key. If this is set, `key` will be initialized
249 to the contents of the file.
248 to the contents of the file.
250
249
251 """
250 """
252
251
253 debug=Bool(False, config=True, help="""Debug output in the Session""")
252 debug=Bool(False, config=True, help="""Debug output in the Session""")
254
253
255 packer = DottedObjectName('json',config=True,
254 packer = DottedObjectName('json',config=True,
256 help="""The name of the packer for serializing messages.
255 help="""The name of the packer for serializing messages.
257 Should be one of 'json', 'pickle', or an import name
256 Should be one of 'json', 'pickle', or an import name
258 for a custom callable serializer.""")
257 for a custom callable serializer.""")
259 def _packer_changed(self, name, old, new):
258 def _packer_changed(self, name, old, new):
260 if new.lower() == 'json':
259 if new.lower() == 'json':
261 self.pack = json_packer
260 self.pack = json_packer
262 self.unpack = json_unpacker
261 self.unpack = json_unpacker
263 self.unpacker = new
262 self.unpacker = new
264 elif new.lower() == 'pickle':
263 elif new.lower() == 'pickle':
265 self.pack = pickle_packer
264 self.pack = pickle_packer
266 self.unpack = pickle_unpacker
265 self.unpack = pickle_unpacker
267 self.unpacker = new
266 self.unpacker = new
268 else:
267 else:
269 self.pack = import_item(str(new))
268 self.pack = import_item(str(new))
270
269
271 unpacker = DottedObjectName('json', config=True,
270 unpacker = DottedObjectName('json', config=True,
272 help="""The name of the unpacker for unserializing messages.
271 help="""The name of the unpacker for unserializing messages.
273 Only used with custom functions for `packer`.""")
272 Only used with custom functions for `packer`.""")
274 def _unpacker_changed(self, name, old, new):
273 def _unpacker_changed(self, name, old, new):
275 if new.lower() == 'json':
274 if new.lower() == 'json':
276 self.pack = json_packer
275 self.pack = json_packer
277 self.unpack = json_unpacker
276 self.unpack = json_unpacker
278 self.packer = new
277 self.packer = new
279 elif new.lower() == 'pickle':
278 elif new.lower() == 'pickle':
280 self.pack = pickle_packer
279 self.pack = pickle_packer
281 self.unpack = pickle_unpacker
280 self.unpack = pickle_unpacker
282 self.packer = new
281 self.packer = new
283 else:
282 else:
284 self.unpack = import_item(str(new))
283 self.unpack = import_item(str(new))
285
284
286 session = CUnicode(u'', config=True,
285 session = CUnicode(u'', config=True,
287 help="""The UUID identifying this session.""")
286 help="""The UUID identifying this session.""")
288 def _session_default(self):
287 def _session_default(self):
289 u = unicode(uuid.uuid4())
288 u = unicode(uuid.uuid4())
290 self.bsession = u.encode('ascii')
289 self.bsession = u.encode('ascii')
291 return u
290 return u
292
291
293 def _session_changed(self, name, old, new):
292 def _session_changed(self, name, old, new):
294 self.bsession = self.session.encode('ascii')
293 self.bsession = self.session.encode('ascii')
295
294
296 # bsession is the session as bytes
295 # bsession is the session as bytes
297 bsession = CBytes(b'')
296 bsession = CBytes(b'')
298
297
299 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
298 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
300 help="""Username for the Session. Default is your system username.""",
299 help="""Username for the Session. Default is your system username.""",
301 config=True)
300 config=True)
302
301
303 metadata = Dict({}, config=True,
302 metadata = Dict({}, config=True,
304 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
303 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
305
304
306 # message signature related traits:
305 # message signature related traits:
307
306
308 key = CBytes(b'', config=True,
307 key = CBytes(b'', config=True,
309 help="""execution key, for extra authentication.""")
308 help="""execution key, for extra authentication.""")
310 def _key_changed(self, name, old, new):
309 def _key_changed(self, name, old, new):
311 if new:
310 if new:
312 self.auth = hmac.HMAC(new)
311 self.auth = hmac.HMAC(new)
313 else:
312 else:
314 self.auth = None
313 self.auth = None
315
314
316 auth = Instance(hmac.HMAC)
315 auth = Instance(hmac.HMAC)
317
316
318 digest_history = Set()
317 digest_history = Set()
319 digest_history_size = Integer(2**16, config=True,
318 digest_history_size = Integer(2**16, config=True,
320 help="""The maximum number of digests to remember.
319 help="""The maximum number of digests to remember.
321
320
322 The digest history will be culled when it exceeds this value.
321 The digest history will be culled when it exceeds this value.
323 """
322 """
324 )
323 )
325
324
326 keyfile = Unicode('', config=True,
325 keyfile = Unicode('', config=True,
327 help="""path to file containing execution key.""")
326 help="""path to file containing execution key.""")
328 def _keyfile_changed(self, name, old, new):
327 def _keyfile_changed(self, name, old, new):
329 with open(new, 'rb') as f:
328 with open(new, 'rb') as f:
330 self.key = f.read().strip()
329 self.key = f.read().strip()
331
330
332 # for protecting against sends from forks
331 # for protecting against sends from forks
333 pid = Integer()
332 pid = Integer()
334
333
335 # serialization traits:
334 # serialization traits:
336
335
337 pack = Any(default_packer) # the actual packer function
336 pack = Any(default_packer) # the actual packer function
338 def _pack_changed(self, name, old, new):
337 def _pack_changed(self, name, old, new):
339 if not callable(new):
338 if not callable(new):
340 raise TypeError("packer must be callable, not %s"%type(new))
339 raise TypeError("packer must be callable, not %s"%type(new))
341
340
342 unpack = Any(default_unpacker) # the actual packer function
341 unpack = Any(default_unpacker) # the actual packer function
343 def _unpack_changed(self, name, old, new):
342 def _unpack_changed(self, name, old, new):
344 # unpacker is not checked - it is assumed to be
343 # unpacker is not checked - it is assumed to be
345 if not callable(new):
344 if not callable(new):
346 raise TypeError("unpacker must be callable, not %s"%type(new))
345 raise TypeError("unpacker must be callable, not %s"%type(new))
347
346
348 # thresholds:
347 # thresholds:
349 copy_threshold = Integer(2**16, config=True,
348 copy_threshold = Integer(2**16, config=True,
350 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
349 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
351 buffer_threshold = Integer(MAX_BYTES, config=True,
350 buffer_threshold = Integer(MAX_BYTES, config=True,
352 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
351 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
353 item_threshold = Integer(MAX_ITEMS, config=True,
352 item_threshold = Integer(MAX_ITEMS, config=True,
354 help="""The maximum number of items for a container to be introspected for custom serialization.
353 help="""The maximum number of items for a container to be introspected for custom serialization.
355 Containers larger than this are pickled outright.
354 Containers larger than this are pickled outright.
356 """
355 """
357 )
356 )
358
357
359
358
360 def __init__(self, **kwargs):
359 def __init__(self, **kwargs):
361 """create a Session object
360 """create a Session object
362
361
363 Parameters
362 Parameters
364 ----------
363 ----------
365
364
366 debug : bool
365 debug : bool
367 whether to trigger extra debugging statements
366 whether to trigger extra debugging statements
368 packer/unpacker : str : 'json', 'pickle' or import_string
367 packer/unpacker : str : 'json', 'pickle' or import_string
369 importstrings for methods to serialize message parts. If just
368 importstrings for methods to serialize message parts. If just
370 'json' or 'pickle', predefined JSON and pickle packers will be used.
369 'json' or 'pickle', predefined JSON and pickle packers will be used.
371 Otherwise, the entire importstring must be used.
370 Otherwise, the entire importstring must be used.
372
371
373 The functions must accept at least valid JSON input, and output
372 The functions must accept at least valid JSON input, and output
374 *bytes*.
373 *bytes*.
375
374
376 For example, to use msgpack:
375 For example, to use msgpack:
377 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
376 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
378 pack/unpack : callables
377 pack/unpack : callables
379 You can also set the pack/unpack callables for serialization
378 You can also set the pack/unpack callables for serialization
380 directly.
379 directly.
381 session : unicode (must be ascii)
380 session : unicode (must be ascii)
382 the ID of this Session object. The default is to generate a new
381 the ID of this Session object. The default is to generate a new
383 UUID.
382 UUID.
384 bsession : bytes
383 bsession : bytes
385 The session as bytes
384 The session as bytes
386 username : unicode
385 username : unicode
387 username added to message headers. The default is to ask the OS.
386 username added to message headers. The default is to ask the OS.
388 key : bytes
387 key : bytes
389 The key used to initialize an HMAC signature. If unset, messages
388 The key used to initialize an HMAC signature. If unset, messages
390 will not be signed or checked.
389 will not be signed or checked.
391 keyfile : filepath
390 keyfile : filepath
392 The file containing a key. If this is set, `key` will be
391 The file containing a key. If this is set, `key` will be
393 initialized to the contents of the file.
392 initialized to the contents of the file.
394 """
393 """
395 super(Session, self).__init__(**kwargs)
394 super(Session, self).__init__(**kwargs)
396 self._check_packers()
395 self._check_packers()
397 self.none = self.pack({})
396 self.none = self.pack({})
398 # ensure self._session_default() if necessary, so bsession is defined:
397 # ensure self._session_default() if necessary, so bsession is defined:
399 self.session
398 self.session
400 self.pid = os.getpid()
399 self.pid = os.getpid()
401
400
402 @property
401 @property
403 def msg_id(self):
402 def msg_id(self):
404 """always return new uuid"""
403 """always return new uuid"""
405 return str(uuid.uuid4())
404 return str(uuid.uuid4())
406
405
407 def _check_packers(self):
406 def _check_packers(self):
408 """check packers for binary data and datetime support."""
407 """check packers for binary data and datetime support."""
409 pack = self.pack
408 pack = self.pack
410 unpack = self.unpack
409 unpack = self.unpack
411
410
412 # check simple serialization
411 # check simple serialization
413 msg = dict(a=[1,'hi'])
412 msg = dict(a=[1,'hi'])
414 try:
413 try:
415 packed = pack(msg)
414 packed = pack(msg)
416 except Exception:
415 except Exception:
417 raise ValueError("packer could not serialize a simple message")
416 raise ValueError("packer could not serialize a simple message")
418
417
419 # ensure packed message is bytes
418 # ensure packed message is bytes
420 if not isinstance(packed, bytes):
419 if not isinstance(packed, bytes):
421 raise ValueError("message packed to %r, but bytes are required"%type(packed))
420 raise ValueError("message packed to %r, but bytes are required"%type(packed))
422
421
423 # check that unpack is pack's inverse
422 # check that unpack is pack's inverse
424 try:
423 try:
425 unpacked = unpack(packed)
424 unpacked = unpack(packed)
426 except Exception:
425 except Exception:
427 raise ValueError("unpacker could not handle the packer's output")
426 raise ValueError("unpacker could not handle the packer's output")
428
427
429 # check datetime support
428 # check datetime support
430 msg = dict(t=datetime.now())
429 msg = dict(t=datetime.now())
431 try:
430 try:
432 unpacked = unpack(pack(msg))
431 unpacked = unpack(pack(msg))
433 except Exception:
432 except Exception:
434 self.pack = lambda o: pack(squash_dates(o))
433 self.pack = lambda o: pack(squash_dates(o))
435 self.unpack = lambda s: extract_dates(unpack(s))
434 self.unpack = lambda s: extract_dates(unpack(s))
436
435
437 def msg_header(self, msg_type):
436 def msg_header(self, msg_type):
438 return msg_header(self.msg_id, msg_type, self.username, self.session)
437 return msg_header(self.msg_id, msg_type, self.username, self.session)
439
438
440 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
439 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
441 """Return the nested message dict.
440 """Return the nested message dict.
442
441
443 This format is different from what is sent over the wire. The
442 This format is different from what is sent over the wire. The
444 serialize/unserialize methods converts this nested message dict to the wire
443 serialize/unserialize methods converts this nested message dict to the wire
445 format, which is a list of message parts.
444 format, which is a list of message parts.
446 """
445 """
447 msg = {}
446 msg = {}
448 header = self.msg_header(msg_type) if header is None else header
447 header = self.msg_header(msg_type) if header is None else header
449 msg['header'] = header
448 msg['header'] = header
450 msg['msg_id'] = header['msg_id']
449 msg['msg_id'] = header['msg_id']
451 msg['msg_type'] = header['msg_type']
450 msg['msg_type'] = header['msg_type']
452 msg['parent_header'] = {} if parent is None else extract_header(parent)
451 msg['parent_header'] = {} if parent is None else extract_header(parent)
453 msg['content'] = {} if content is None else content
452 msg['content'] = {} if content is None else content
454 msg['metadata'] = self.metadata.copy()
453 msg['metadata'] = self.metadata.copy()
455 if metadata is not None:
454 if metadata is not None:
456 msg['metadata'].update(metadata)
455 msg['metadata'].update(metadata)
457 return msg
456 return msg
458
457
459 def sign(self, msg_list):
458 def sign(self, msg_list):
460 """Sign a message with HMAC digest. If no auth, return b''.
459 """Sign a message with HMAC digest. If no auth, return b''.
461
460
462 Parameters
461 Parameters
463 ----------
462 ----------
464 msg_list : list
463 msg_list : list
465 The [p_header,p_parent,p_content] part of the message list.
464 The [p_header,p_parent,p_content] part of the message list.
466 """
465 """
467 if self.auth is None:
466 if self.auth is None:
468 return b''
467 return b''
469 h = self.auth.copy()
468 h = self.auth.copy()
470 for m in msg_list:
469 for m in msg_list:
471 h.update(m)
470 h.update(m)
472 return str_to_bytes(h.hexdigest())
471 return str_to_bytes(h.hexdigest())
473
472
474 def serialize(self, msg, ident=None):
473 def serialize(self, msg, ident=None):
475 """Serialize the message components to bytes.
474 """Serialize the message components to bytes.
476
475
477 This is roughly the inverse of unserialize. The serialize/unserialize
476 This is roughly the inverse of unserialize. The serialize/unserialize
478 methods work with full message lists, whereas pack/unpack work with
477 methods work with full message lists, whereas pack/unpack work with
479 the individual message parts in the message list.
478 the individual message parts in the message list.
480
479
481 Parameters
480 Parameters
482 ----------
481 ----------
483 msg : dict or Message
482 msg : dict or Message
484 The nexted message dict as returned by the self.msg method.
483 The nexted message dict as returned by the self.msg method.
485
484
486 Returns
485 Returns
487 -------
486 -------
488 msg_list : list
487 msg_list : list
489 The list of bytes objects to be sent with the format:
488 The list of bytes objects to be sent with the format:
490 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_metadata,p_content,
489 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_metadata,p_content,
491 buffer1,buffer2,...]. In this list, the p_* entities are
490 buffer1,buffer2,...]. In this list, the p_* entities are
492 the packed or serialized versions, so if JSON is used, these
491 the packed or serialized versions, so if JSON is used, these
493 are utf8 encoded JSON strings.
492 are utf8 encoded JSON strings.
494 """
493 """
495 content = msg.get('content', {})
494 content = msg.get('content', {})
496 if content is None:
495 if content is None:
497 content = self.none
496 content = self.none
498 elif isinstance(content, dict):
497 elif isinstance(content, dict):
499 content = self.pack(content)
498 content = self.pack(content)
500 elif isinstance(content, bytes):
499 elif isinstance(content, bytes):
501 # content is already packed, as in a relayed message
500 # content is already packed, as in a relayed message
502 pass
501 pass
503 elif isinstance(content, unicode):
502 elif isinstance(content, unicode):
504 # should be bytes, but JSON often spits out unicode
503 # should be bytes, but JSON often spits out unicode
505 content = content.encode('utf8')
504 content = content.encode('utf8')
506 else:
505 else:
507 raise TypeError("Content incorrect type: %s"%type(content))
506 raise TypeError("Content incorrect type: %s"%type(content))
508
507
509 real_message = [self.pack(msg['header']),
508 real_message = [self.pack(msg['header']),
510 self.pack(msg['parent_header']),
509 self.pack(msg['parent_header']),
511 self.pack(msg['metadata']),
510 self.pack(msg['metadata']),
512 content,
511 content,
513 ]
512 ]
514
513
515 to_send = []
514 to_send = []
516
515
517 if isinstance(ident, list):
516 if isinstance(ident, list):
518 # accept list of idents
517 # accept list of idents
519 to_send.extend(ident)
518 to_send.extend(ident)
520 elif ident is not None:
519 elif ident is not None:
521 to_send.append(ident)
520 to_send.append(ident)
522 to_send.append(DELIM)
521 to_send.append(DELIM)
523
522
524 signature = self.sign(real_message)
523 signature = self.sign(real_message)
525 to_send.append(signature)
524 to_send.append(signature)
526
525
527 to_send.extend(real_message)
526 to_send.extend(real_message)
528
527
529 return to_send
528 return to_send
530
529
531 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
530 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
532 buffers=None, track=False, header=None, metadata=None):
531 buffers=None, track=False, header=None, metadata=None):
533 """Build and send a message via stream or socket.
532 """Build and send a message via stream or socket.
534
533
535 The message format used by this function internally is as follows:
534 The message format used by this function internally is as follows:
536
535
537 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
536 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
538 buffer1,buffer2,...]
537 buffer1,buffer2,...]
539
538
540 The serialize/unserialize methods convert the nested message dict into this
539 The serialize/unserialize methods convert the nested message dict into this
541 format.
540 format.
542
541
543 Parameters
542 Parameters
544 ----------
543 ----------
545
544
546 stream : zmq.Socket or ZMQStream
545 stream : zmq.Socket or ZMQStream
547 The socket-like object used to send the data.
546 The socket-like object used to send the data.
548 msg_or_type : str or Message/dict
547 msg_or_type : str or Message/dict
549 Normally, msg_or_type will be a msg_type unless a message is being
548 Normally, msg_or_type will be a msg_type unless a message is being
550 sent more than once. If a header is supplied, this can be set to
549 sent more than once. If a header is supplied, this can be set to
551 None and the msg_type will be pulled from the header.
550 None and the msg_type will be pulled from the header.
552
551
553 content : dict or None
552 content : dict or None
554 The content of the message (ignored if msg_or_type is a message).
553 The content of the message (ignored if msg_or_type is a message).
555 header : dict or None
554 header : dict or None
556 The header dict for the message (ignored if msg_to_type is a message).
555 The header dict for the message (ignored if msg_to_type is a message).
557 parent : Message or dict or None
556 parent : Message or dict or None
558 The parent or parent header describing the parent of this message
557 The parent or parent header describing the parent of this message
559 (ignored if msg_or_type is a message).
558 (ignored if msg_or_type is a message).
560 ident : bytes or list of bytes
559 ident : bytes or list of bytes
561 The zmq.IDENTITY routing path.
560 The zmq.IDENTITY routing path.
562 metadata : dict or None
561 metadata : dict or None
563 The metadata describing the message
562 The metadata describing the message
564 buffers : list or None
563 buffers : list or None
565 The already-serialized buffers to be appended to the message.
564 The already-serialized buffers to be appended to the message.
566 track : bool
565 track : bool
567 Whether to track. Only for use with Sockets, because ZMQStream
566 Whether to track. Only for use with Sockets, because ZMQStream
568 objects cannot track messages.
567 objects cannot track messages.
569
568
570
569
571 Returns
570 Returns
572 -------
571 -------
573 msg : dict
572 msg : dict
574 The constructed message.
573 The constructed message.
575 """
574 """
576 if not isinstance(stream, zmq.Socket):
575 if not isinstance(stream, zmq.Socket):
577 # ZMQStreams and dummy sockets do not support tracking.
576 # ZMQStreams and dummy sockets do not support tracking.
578 track = False
577 track = False
579
578
580 if isinstance(msg_or_type, (Message, dict)):
579 if isinstance(msg_or_type, (Message, dict)):
581 # We got a Message or message dict, not a msg_type so don't
580 # We got a Message or message dict, not a msg_type so don't
582 # build a new Message.
581 # build a new Message.
583 msg = msg_or_type
582 msg = msg_or_type
584 else:
583 else:
585 msg = self.msg(msg_or_type, content=content, parent=parent,
584 msg = self.msg(msg_or_type, content=content, parent=parent,
586 header=header, metadata=metadata)
585 header=header, metadata=metadata)
587 if not os.getpid() == self.pid:
586 if not os.getpid() == self.pid:
588 io.rprint("WARNING: attempted to send message from fork")
587 io.rprint("WARNING: attempted to send message from fork")
589 io.rprint(msg)
588 io.rprint(msg)
590 return
589 return
591 buffers = [] if buffers is None else buffers
590 buffers = [] if buffers is None else buffers
592 to_send = self.serialize(msg, ident)
591 to_send = self.serialize(msg, ident)
593 to_send.extend(buffers)
592 to_send.extend(buffers)
594 longest = max([ len(s) for s in to_send ])
593 longest = max([ len(s) for s in to_send ])
595 copy = (longest < self.copy_threshold)
594 copy = (longest < self.copy_threshold)
596
595
597 if buffers and track and not copy:
596 if buffers and track and not copy:
598 # only really track when we are doing zero-copy buffers
597 # only really track when we are doing zero-copy buffers
599 tracker = stream.send_multipart(to_send, copy=False, track=True)
598 tracker = stream.send_multipart(to_send, copy=False, track=True)
600 else:
599 else:
601 # use dummy tracker, which will be done immediately
600 # use dummy tracker, which will be done immediately
602 tracker = DONE
601 tracker = DONE
603 stream.send_multipart(to_send, copy=copy)
602 stream.send_multipart(to_send, copy=copy)
604
603
605 if self.debug:
604 if self.debug:
606 pprint.pprint(msg)
605 pprint.pprint(msg)
607 pprint.pprint(to_send)
606 pprint.pprint(to_send)
608 pprint.pprint(buffers)
607 pprint.pprint(buffers)
609
608
610 msg['tracker'] = tracker
609 msg['tracker'] = tracker
611
610
612 return msg
611 return msg
613
612
614 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
613 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
615 """Send a raw message via ident path.
614 """Send a raw message via ident path.
616
615
617 This method is used to send a already serialized message.
616 This method is used to send a already serialized message.
618
617
619 Parameters
618 Parameters
620 ----------
619 ----------
621 stream : ZMQStream or Socket
620 stream : ZMQStream or Socket
622 The ZMQ stream or socket to use for sending the message.
621 The ZMQ stream or socket to use for sending the message.
623 msg_list : list
622 msg_list : list
624 The serialized list of messages to send. This only includes the
623 The serialized list of messages to send. This only includes the
625 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
624 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
626 the message.
625 the message.
627 ident : ident or list
626 ident : ident or list
628 A single ident or a list of idents to use in sending.
627 A single ident or a list of idents to use in sending.
629 """
628 """
630 to_send = []
629 to_send = []
631 if isinstance(ident, bytes):
630 if isinstance(ident, bytes):
632 ident = [ident]
631 ident = [ident]
633 if ident is not None:
632 if ident is not None:
634 to_send.extend(ident)
633 to_send.extend(ident)
635
634
636 to_send.append(DELIM)
635 to_send.append(DELIM)
637 to_send.append(self.sign(msg_list))
636 to_send.append(self.sign(msg_list))
638 to_send.extend(msg_list)
637 to_send.extend(msg_list)
639 stream.send_multipart(msg_list, flags, copy=copy)
638 stream.send_multipart(msg_list, flags, copy=copy)
640
639
641 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
640 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
642 """Receive and unpack a message.
641 """Receive and unpack a message.
643
642
644 Parameters
643 Parameters
645 ----------
644 ----------
646 socket : ZMQStream or Socket
645 socket : ZMQStream or Socket
647 The socket or stream to use in receiving.
646 The socket or stream to use in receiving.
648
647
649 Returns
648 Returns
650 -------
649 -------
651 [idents], msg
650 [idents], msg
652 [idents] is a list of idents and msg is a nested message dict of
651 [idents] is a list of idents and msg is a nested message dict of
653 same format as self.msg returns.
652 same format as self.msg returns.
654 """
653 """
655 if isinstance(socket, ZMQStream):
654 if isinstance(socket, ZMQStream):
656 socket = socket.socket
655 socket = socket.socket
657 try:
656 try:
658 msg_list = socket.recv_multipart(mode, copy=copy)
657 msg_list = socket.recv_multipart(mode, copy=copy)
659 except zmq.ZMQError as e:
658 except zmq.ZMQError as e:
660 if e.errno == zmq.EAGAIN:
659 if e.errno == zmq.EAGAIN:
661 # We can convert EAGAIN to None as we know in this case
660 # We can convert EAGAIN to None as we know in this case
662 # recv_multipart won't return None.
661 # recv_multipart won't return None.
663 return None,None
662 return None,None
664 else:
663 else:
665 raise
664 raise
666 # split multipart message into identity list and message dict
665 # split multipart message into identity list and message dict
667 # invalid large messages can cause very expensive string comparisons
666 # invalid large messages can cause very expensive string comparisons
668 idents, msg_list = self.feed_identities(msg_list, copy)
667 idents, msg_list = self.feed_identities(msg_list, copy)
669 try:
668 try:
670 return idents, self.unserialize(msg_list, content=content, copy=copy)
669 return idents, self.unserialize(msg_list, content=content, copy=copy)
671 except Exception as e:
670 except Exception as e:
672 # TODO: handle it
671 # TODO: handle it
673 raise e
672 raise e
674
673
675 def feed_identities(self, msg_list, copy=True):
674 def feed_identities(self, msg_list, copy=True):
676 """Split the identities from the rest of the message.
675 """Split the identities from the rest of the message.
677
676
678 Feed until DELIM is reached, then return the prefix as idents and
677 Feed until DELIM is reached, then return the prefix as idents and
679 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
678 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
680 but that would be silly.
679 but that would be silly.
681
680
682 Parameters
681 Parameters
683 ----------
682 ----------
684 msg_list : a list of Message or bytes objects
683 msg_list : a list of Message or bytes objects
685 The message to be split.
684 The message to be split.
686 copy : bool
685 copy : bool
687 flag determining whether the arguments are bytes or Messages
686 flag determining whether the arguments are bytes or Messages
688
687
689 Returns
688 Returns
690 -------
689 -------
691 (idents, msg_list) : two lists
690 (idents, msg_list) : two lists
692 idents will always be a list of bytes, each of which is a ZMQ
691 idents will always be a list of bytes, each of which is a ZMQ
693 identity. msg_list will be a list of bytes or zmq.Messages of the
692 identity. msg_list will be a list of bytes or zmq.Messages of the
694 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
693 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
695 should be unpackable/unserializable via self.unserialize at this
694 should be unpackable/unserializable via self.unserialize at this
696 point.
695 point.
697 """
696 """
698 if copy:
697 if copy:
699 idx = msg_list.index(DELIM)
698 idx = msg_list.index(DELIM)
700 return msg_list[:idx], msg_list[idx+1:]
699 return msg_list[:idx], msg_list[idx+1:]
701 else:
700 else:
702 failed = True
701 failed = True
703 for idx,m in enumerate(msg_list):
702 for idx,m in enumerate(msg_list):
704 if m.bytes == DELIM:
703 if m.bytes == DELIM:
705 failed = False
704 failed = False
706 break
705 break
707 if failed:
706 if failed:
708 raise ValueError("DELIM not in msg_list")
707 raise ValueError("DELIM not in msg_list")
709 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
708 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
710 return [m.bytes for m in idents], msg_list
709 return [m.bytes for m in idents], msg_list
711
710
712 def _add_digest(self, signature):
711 def _add_digest(self, signature):
713 """add a digest to history to protect against replay attacks"""
712 """add a digest to history to protect against replay attacks"""
714 if self.digest_history_size == 0:
713 if self.digest_history_size == 0:
715 # no history, never add digests
714 # no history, never add digests
716 return
715 return
717
716
718 self.digest_history.add(signature)
717 self.digest_history.add(signature)
719 if len(self.digest_history) > self.digest_history_size:
718 if len(self.digest_history) > self.digest_history_size:
720 # threshold reached, cull 10%
719 # threshold reached, cull 10%
721 self._cull_digest_history()
720 self._cull_digest_history()
722
721
723 def _cull_digest_history(self):
722 def _cull_digest_history(self):
724 """cull the digest history
723 """cull the digest history
725
724
726 Removes a randomly selected 10% of the digest history
725 Removes a randomly selected 10% of the digest history
727 """
726 """
728 current = len(self.digest_history)
727 current = len(self.digest_history)
729 n_to_cull = max(int(current // 10), current - self.digest_history_size)
728 n_to_cull = max(int(current // 10), current - self.digest_history_size)
730 if n_to_cull >= current:
729 if n_to_cull >= current:
731 self.digest_history = set()
730 self.digest_history = set()
732 return
731 return
733 to_cull = random.sample(self.digest_history, n_to_cull)
732 to_cull = random.sample(self.digest_history, n_to_cull)
734 self.digest_history.difference_update(to_cull)
733 self.digest_history.difference_update(to_cull)
735
734
736 def unserialize(self, msg_list, content=True, copy=True):
735 def unserialize(self, msg_list, content=True, copy=True):
737 """Unserialize a msg_list to a nested message dict.
736 """Unserialize a msg_list to a nested message dict.
738
737
739 This is roughly the inverse of serialize. The serialize/unserialize
738 This is roughly the inverse of serialize. The serialize/unserialize
740 methods work with full message lists, whereas pack/unpack work with
739 methods work with full message lists, whereas pack/unpack work with
741 the individual message parts in the message list.
740 the individual message parts in the message list.
742
741
743 Parameters:
742 Parameters:
744 -----------
743 -----------
745 msg_list : list of bytes or Message objects
744 msg_list : list of bytes or Message objects
746 The list of message parts of the form [HMAC,p_header,p_parent,
745 The list of message parts of the form [HMAC,p_header,p_parent,
747 p_metadata,p_content,buffer1,buffer2,...].
746 p_metadata,p_content,buffer1,buffer2,...].
748 content : bool (True)
747 content : bool (True)
749 Whether to unpack the content dict (True), or leave it packed
748 Whether to unpack the content dict (True), or leave it packed
750 (False).
749 (False).
751 copy : bool (True)
750 copy : bool (True)
752 Whether to return the bytes (True), or the non-copying Message
751 Whether to return the bytes (True), or the non-copying Message
753 object in each place (False).
752 object in each place (False).
754
753
755 Returns
754 Returns
756 -------
755 -------
757 msg : dict
756 msg : dict
758 The nested message dict with top-level keys [header, parent_header,
757 The nested message dict with top-level keys [header, parent_header,
759 content, buffers].
758 content, buffers].
760 """
759 """
761 minlen = 5
760 minlen = 5
762 message = {}
761 message = {}
763 if not copy:
762 if not copy:
764 for i in range(minlen):
763 for i in range(minlen):
765 msg_list[i] = msg_list[i].bytes
764 msg_list[i] = msg_list[i].bytes
766 if self.auth is not None:
765 if self.auth is not None:
767 signature = msg_list[0]
766 signature = msg_list[0]
768 if not signature:
767 if not signature:
769 raise ValueError("Unsigned Message")
768 raise ValueError("Unsigned Message")
770 if signature in self.digest_history:
769 if signature in self.digest_history:
771 raise ValueError("Duplicate Signature: %r" % signature)
770 raise ValueError("Duplicate Signature: %r" % signature)
772 self._add_digest(signature)
771 self._add_digest(signature)
773 check = self.sign(msg_list[1:5])
772 check = self.sign(msg_list[1:5])
774 if not signature == check:
773 if not signature == check:
775 raise ValueError("Invalid Signature: %r" % signature)
774 raise ValueError("Invalid Signature: %r" % signature)
776 if not len(msg_list) >= minlen:
775 if not len(msg_list) >= minlen:
777 raise TypeError("malformed message, must have at least %i elements"%minlen)
776 raise TypeError("malformed message, must have at least %i elements"%minlen)
778 header = self.unpack(msg_list[1])
777 header = self.unpack(msg_list[1])
779 message['header'] = header
778 message['header'] = header
780 message['msg_id'] = header['msg_id']
779 message['msg_id'] = header['msg_id']
781 message['msg_type'] = header['msg_type']
780 message['msg_type'] = header['msg_type']
782 message['parent_header'] = self.unpack(msg_list[2])
781 message['parent_header'] = self.unpack(msg_list[2])
783 message['metadata'] = self.unpack(msg_list[3])
782 message['metadata'] = self.unpack(msg_list[3])
784 if content:
783 if content:
785 message['content'] = self.unpack(msg_list[4])
784 message['content'] = self.unpack(msg_list[4])
786 else:
785 else:
787 message['content'] = msg_list[4]
786 message['content'] = msg_list[4]
788
787
789 message['buffers'] = msg_list[5:]
788 message['buffers'] = msg_list[5:]
790 return message
789 return message
791
790
792 def test_msg2obj():
791 def test_msg2obj():
793 am = dict(x=1)
792 am = dict(x=1)
794 ao = Message(am)
793 ao = Message(am)
795 assert ao.x == am['x']
794 assert ao.x == am['x']
796
795
797 am['y'] = dict(z=1)
796 am['y'] = dict(z=1)
798 ao = Message(am)
797 ao = Message(am)
799 assert ao.y.z == am['y']['z']
798 assert ao.y.z == am['y']['z']
800
799
801 k1, k2 = 'y', 'z'
800 k1, k2 = 'y', 'z'
802 assert ao[k1][k2] == am[k1][k2]
801 assert ao[k1][k2] == am[k1][k2]
803
802
804 am2 = dict(ao)
803 am2 = dict(ao)
805 assert am['x'] == am2['x']
804 assert am['x'] == am2['x']
806 assert am['y']['z'] == am2['y']['z']
805 assert am['y']['z'] == am2['y']['z']
807
806
@@ -1,597 +1,597 b''
1 """A ZMQ-based subclass of InteractiveShell.
1 """A ZMQ-based subclass of InteractiveShell.
2
2
3 This code is meant to ease the refactoring of the base InteractiveShell into
3 This code is meant to ease the refactoring of the base InteractiveShell into
4 something with a cleaner architecture for 2-process use, without actually
4 something with a cleaner architecture for 2-process use, without actually
5 breaking InteractiveShell itself. So we're doing something a bit ugly, where
5 breaking InteractiveShell itself. So we're doing something a bit ugly, where
6 we subclass and override what we want to fix. Once this is working well, we
6 we subclass and override what we want to fix. Once this is working well, we
7 can go back to the base class and refactor the code for a cleaner inheritance
7 can go back to the base class and refactor the code for a cleaner inheritance
8 implementation that doesn't rely on so much monkeypatching.
8 implementation that doesn't rely on so much monkeypatching.
9
9
10 But this lets us maintain a fully working IPython as we develop the new
10 But this lets us maintain a fully working IPython as we develop the new
11 machinery. This should thus be thought of as scaffolding.
11 machinery. This should thus be thought of as scaffolding.
12 """
12 """
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Imports
14 # Imports
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 from __future__ import print_function
16 from __future__ import print_function
17
17
18 # Stdlib
18 # Stdlib
19 import os
19 import os
20 import sys
20 import sys
21 import time
21 import time
22
22
23 # System library imports
23 # System library imports
24 from zmq.eventloop import ioloop
24 from zmq.eventloop import ioloop
25
25
26 # Our own
26 # Our own
27 from IPython.core.interactiveshell import (
27 from IPython.core.interactiveshell import (
28 InteractiveShell, InteractiveShellABC
28 InteractiveShell, InteractiveShellABC
29 )
29 )
30 from IPython.core import page
30 from IPython.core import page
31 from IPython.core.autocall import ZMQExitAutocall
31 from IPython.core.autocall import ZMQExitAutocall
32 from IPython.core.displaypub import DisplayPublisher
32 from IPython.core.displaypub import DisplayPublisher
33 from IPython.core.error import UsageError
33 from IPython.core.error import UsageError
34 from IPython.core.magics import MacroToEdit, CodeMagics
34 from IPython.core.magics import MacroToEdit, CodeMagics
35 from IPython.core.magic import magics_class, line_magic, Magics
35 from IPython.core.magic import magics_class, line_magic, Magics
36 from IPython.core.payloadpage import install_payload_page
36 from IPython.core.payloadpage import install_payload_page
37 from IPython.display import display, Javascript
37 from IPython.display import display, Javascript
38 from IPython.kernel.inprocess.socket import SocketABC
38 from IPython.kernel.inprocess.socket import SocketABC
39 from IPython.kernel import (
39 from IPython.kernel import (
40 get_connection_file, get_connection_info, connect_qtconsole
40 get_connection_file, get_connection_info, connect_qtconsole
41 )
41 )
42 from IPython.testing.skipdoctest import skip_doctest
42 from IPython.testing.skipdoctest import skip_doctest
43 from IPython.utils import io, openpy
43 from IPython.utils import openpy
44 from IPython.utils.jsonutil import json_clean, encode_images
44 from IPython.utils.jsonutil import json_clean, encode_images
45 from IPython.utils.process import arg_split
45 from IPython.utils.process import arg_split
46 from IPython.utils import py3compat
46 from IPython.utils import py3compat
47 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes
47 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes
48 from IPython.utils.warn import warn, error
48 from IPython.utils.warn import error
49 from IPython.kernel.zmq.displayhook import ZMQShellDisplayHook
49 from IPython.kernel.zmq.displayhook import ZMQShellDisplayHook
50 from IPython.kernel.zmq.datapub import ZMQDataPublisher
50 from IPython.kernel.zmq.datapub import ZMQDataPublisher
51 from IPython.kernel.zmq.session import extract_header
51 from IPython.kernel.zmq.session import extract_header
52 from session import Session
52 from session import Session
53
53
54 #-----------------------------------------------------------------------------
54 #-----------------------------------------------------------------------------
55 # Functions and classes
55 # Functions and classes
56 #-----------------------------------------------------------------------------
56 #-----------------------------------------------------------------------------
57
57
58 class ZMQDisplayPublisher(DisplayPublisher):
58 class ZMQDisplayPublisher(DisplayPublisher):
59 """A display publisher that publishes data using a ZeroMQ PUB socket."""
59 """A display publisher that publishes data using a ZeroMQ PUB socket."""
60
60
61 session = Instance(Session)
61 session = Instance(Session)
62 pub_socket = Instance(SocketABC)
62 pub_socket = Instance(SocketABC)
63 parent_header = Dict({})
63 parent_header = Dict({})
64 topic = CBytes(b'displaypub')
64 topic = CBytes(b'displaypub')
65
65
66 def set_parent(self, parent):
66 def set_parent(self, parent):
67 """Set the parent for outbound messages."""
67 """Set the parent for outbound messages."""
68 self.parent_header = extract_header(parent)
68 self.parent_header = extract_header(parent)
69
69
70 def _flush_streams(self):
70 def _flush_streams(self):
71 """flush IO Streams prior to display"""
71 """flush IO Streams prior to display"""
72 sys.stdout.flush()
72 sys.stdout.flush()
73 sys.stderr.flush()
73 sys.stderr.flush()
74
74
75 def publish(self, source, data, metadata=None):
75 def publish(self, source, data, metadata=None):
76 self._flush_streams()
76 self._flush_streams()
77 if metadata is None:
77 if metadata is None:
78 metadata = {}
78 metadata = {}
79 self._validate_data(source, data, metadata)
79 self._validate_data(source, data, metadata)
80 content = {}
80 content = {}
81 content['source'] = source
81 content['source'] = source
82 content['data'] = encode_images(data)
82 content['data'] = encode_images(data)
83 content['metadata'] = metadata
83 content['metadata'] = metadata
84 self.session.send(
84 self.session.send(
85 self.pub_socket, u'display_data', json_clean(content),
85 self.pub_socket, u'display_data', json_clean(content),
86 parent=self.parent_header, ident=self.topic,
86 parent=self.parent_header, ident=self.topic,
87 )
87 )
88
88
89 def clear_output(self, stdout=True, stderr=True, other=True):
89 def clear_output(self, stdout=True, stderr=True, other=True):
90 content = dict(stdout=stdout, stderr=stderr, other=other)
90 content = dict(stdout=stdout, stderr=stderr, other=other)
91
91
92 if stdout:
92 if stdout:
93 print('\r', file=sys.stdout, end='')
93 print('\r', file=sys.stdout, end='')
94 if stderr:
94 if stderr:
95 print('\r', file=sys.stderr, end='')
95 print('\r', file=sys.stderr, end='')
96
96
97 self._flush_streams()
97 self._flush_streams()
98
98
99 self.session.send(
99 self.session.send(
100 self.pub_socket, u'clear_output', content,
100 self.pub_socket, u'clear_output', content,
101 parent=self.parent_header, ident=self.topic,
101 parent=self.parent_header, ident=self.topic,
102 )
102 )
103
103
104 @magics_class
104 @magics_class
105 class KernelMagics(Magics):
105 class KernelMagics(Magics):
106 #------------------------------------------------------------------------
106 #------------------------------------------------------------------------
107 # Magic overrides
107 # Magic overrides
108 #------------------------------------------------------------------------
108 #------------------------------------------------------------------------
109 # Once the base class stops inheriting from magic, this code needs to be
109 # Once the base class stops inheriting from magic, this code needs to be
110 # moved into a separate machinery as well. For now, at least isolate here
110 # moved into a separate machinery as well. For now, at least isolate here
111 # the magics which this class needs to implement differently from the base
111 # the magics which this class needs to implement differently from the base
112 # class, or that are unique to it.
112 # class, or that are unique to it.
113
113
114 @line_magic
114 @line_magic
115 def doctest_mode(self, parameter_s=''):
115 def doctest_mode(self, parameter_s=''):
116 """Toggle doctest mode on and off.
116 """Toggle doctest mode on and off.
117
117
118 This mode is intended to make IPython behave as much as possible like a
118 This mode is intended to make IPython behave as much as possible like a
119 plain Python shell, from the perspective of how its prompts, exceptions
119 plain Python shell, from the perspective of how its prompts, exceptions
120 and output look. This makes it easy to copy and paste parts of a
120 and output look. This makes it easy to copy and paste parts of a
121 session into doctests. It does so by:
121 session into doctests. It does so by:
122
122
123 - Changing the prompts to the classic ``>>>`` ones.
123 - Changing the prompts to the classic ``>>>`` ones.
124 - Changing the exception reporting mode to 'Plain'.
124 - Changing the exception reporting mode to 'Plain'.
125 - Disabling pretty-printing of output.
125 - Disabling pretty-printing of output.
126
126
127 Note that IPython also supports the pasting of code snippets that have
127 Note that IPython also supports the pasting of code snippets that have
128 leading '>>>' and '...' prompts in them. This means that you can paste
128 leading '>>>' and '...' prompts in them. This means that you can paste
129 doctests from files or docstrings (even if they have leading
129 doctests from files or docstrings (even if they have leading
130 whitespace), and the code will execute correctly. You can then use
130 whitespace), and the code will execute correctly. You can then use
131 '%history -t' to see the translated history; this will give you the
131 '%history -t' to see the translated history; this will give you the
132 input after removal of all the leading prompts and whitespace, which
132 input after removal of all the leading prompts and whitespace, which
133 can be pasted back into an editor.
133 can be pasted back into an editor.
134
134
135 With these features, you can switch into this mode easily whenever you
135 With these features, you can switch into this mode easily whenever you
136 need to do testing and changes to doctests, without having to leave
136 need to do testing and changes to doctests, without having to leave
137 your existing IPython session.
137 your existing IPython session.
138 """
138 """
139
139
140 from IPython.utils.ipstruct import Struct
140 from IPython.utils.ipstruct import Struct
141
141
142 # Shorthands
142 # Shorthands
143 shell = self.shell
143 shell = self.shell
144 disp_formatter = self.shell.display_formatter
144 disp_formatter = self.shell.display_formatter
145 ptformatter = disp_formatter.formatters['text/plain']
145 ptformatter = disp_formatter.formatters['text/plain']
146 # dstore is a data store kept in the instance metadata bag to track any
146 # dstore is a data store kept in the instance metadata bag to track any
147 # changes we make, so we can undo them later.
147 # changes we make, so we can undo them later.
148 dstore = shell.meta.setdefault('doctest_mode', Struct())
148 dstore = shell.meta.setdefault('doctest_mode', Struct())
149 save_dstore = dstore.setdefault
149 save_dstore = dstore.setdefault
150
150
151 # save a few values we'll need to recover later
151 # save a few values we'll need to recover later
152 mode = save_dstore('mode', False)
152 mode = save_dstore('mode', False)
153 save_dstore('rc_pprint', ptformatter.pprint)
153 save_dstore('rc_pprint', ptformatter.pprint)
154 save_dstore('rc_active_types',disp_formatter.active_types)
154 save_dstore('rc_active_types',disp_formatter.active_types)
155 save_dstore('xmode', shell.InteractiveTB.mode)
155 save_dstore('xmode', shell.InteractiveTB.mode)
156
156
157 if mode == False:
157 if mode == False:
158 # turn on
158 # turn on
159 ptformatter.pprint = False
159 ptformatter.pprint = False
160 disp_formatter.active_types = ['text/plain']
160 disp_formatter.active_types = ['text/plain']
161 shell.magic('xmode Plain')
161 shell.magic('xmode Plain')
162 else:
162 else:
163 # turn off
163 # turn off
164 ptformatter.pprint = dstore.rc_pprint
164 ptformatter.pprint = dstore.rc_pprint
165 disp_formatter.active_types = dstore.rc_active_types
165 disp_formatter.active_types = dstore.rc_active_types
166 shell.magic("xmode " + dstore.xmode)
166 shell.magic("xmode " + dstore.xmode)
167
167
168 # Store new mode and inform on console
168 # Store new mode and inform on console
169 dstore.mode = bool(1-int(mode))
169 dstore.mode = bool(1-int(mode))
170 mode_label = ['OFF','ON'][dstore.mode]
170 mode_label = ['OFF','ON'][dstore.mode]
171 print('Doctest mode is:', mode_label)
171 print('Doctest mode is:', mode_label)
172
172
173 # Send the payload back so that clients can modify their prompt display
173 # Send the payload back so that clients can modify their prompt display
174 payload = dict(
174 payload = dict(
175 source='IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.doctest_mode',
175 source='IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.doctest_mode',
176 mode=dstore.mode)
176 mode=dstore.mode)
177 shell.payload_manager.write_payload(payload)
177 shell.payload_manager.write_payload(payload)
178
178
179
179
180 _find_edit_target = CodeMagics._find_edit_target
180 _find_edit_target = CodeMagics._find_edit_target
181
181
182 @skip_doctest
182 @skip_doctest
183 @line_magic
183 @line_magic
184 def edit(self, parameter_s='', last_call=['','']):
184 def edit(self, parameter_s='', last_call=['','']):
185 """Bring up an editor and execute the resulting code.
185 """Bring up an editor and execute the resulting code.
186
186
187 Usage:
187 Usage:
188 %edit [options] [args]
188 %edit [options] [args]
189
189
190 %edit runs an external text editor. You will need to set the command for
190 %edit runs an external text editor. You will need to set the command for
191 this editor via the ``TerminalInteractiveShell.editor`` option in your
191 this editor via the ``TerminalInteractiveShell.editor`` option in your
192 configuration file before it will work.
192 configuration file before it will work.
193
193
194 This command allows you to conveniently edit multi-line code right in
194 This command allows you to conveniently edit multi-line code right in
195 your IPython session.
195 your IPython session.
196
196
197 If called without arguments, %edit opens up an empty editor with a
197 If called without arguments, %edit opens up an empty editor with a
198 temporary file and will execute the contents of this file when you
198 temporary file and will execute the contents of this file when you
199 close it (don't forget to save it!).
199 close it (don't forget to save it!).
200
200
201
201
202 Options:
202 Options:
203
203
204 -n <number>: open the editor at a specified line number. By default,
204 -n <number>: open the editor at a specified line number. By default,
205 the IPython editor hook uses the unix syntax 'editor +N filename', but
205 the IPython editor hook uses the unix syntax 'editor +N filename', but
206 you can configure this by providing your own modified hook if your
206 you can configure this by providing your own modified hook if your
207 favorite editor supports line-number specifications with a different
207 favorite editor supports line-number specifications with a different
208 syntax.
208 syntax.
209
209
210 -p: this will call the editor with the same data as the previous time
210 -p: this will call the editor with the same data as the previous time
211 it was used, regardless of how long ago (in your current session) it
211 it was used, regardless of how long ago (in your current session) it
212 was.
212 was.
213
213
214 -r: use 'raw' input. This option only applies to input taken from the
214 -r: use 'raw' input. This option only applies to input taken from the
215 user's history. By default, the 'processed' history is used, so that
215 user's history. By default, the 'processed' history is used, so that
216 magics are loaded in their transformed version to valid Python. If
216 magics are loaded in their transformed version to valid Python. If
217 this option is given, the raw input as typed as the command line is
217 this option is given, the raw input as typed as the command line is
218 used instead. When you exit the editor, it will be executed by
218 used instead. When you exit the editor, it will be executed by
219 IPython's own processor.
219 IPython's own processor.
220
220
221 -x: do not execute the edited code immediately upon exit. This is
221 -x: do not execute the edited code immediately upon exit. This is
222 mainly useful if you are editing programs which need to be called with
222 mainly useful if you are editing programs which need to be called with
223 command line arguments, which you can then do using %run.
223 command line arguments, which you can then do using %run.
224
224
225
225
226 Arguments:
226 Arguments:
227
227
228 If arguments are given, the following possibilites exist:
228 If arguments are given, the following possibilites exist:
229
229
230 - The arguments are numbers or pairs of colon-separated numbers (like
230 - The arguments are numbers or pairs of colon-separated numbers (like
231 1 4:8 9). These are interpreted as lines of previous input to be
231 1 4:8 9). These are interpreted as lines of previous input to be
232 loaded into the editor. The syntax is the same of the %macro command.
232 loaded into the editor. The syntax is the same of the %macro command.
233
233
234 - If the argument doesn't start with a number, it is evaluated as a
234 - If the argument doesn't start with a number, it is evaluated as a
235 variable and its contents loaded into the editor. You can thus edit
235 variable and its contents loaded into the editor. You can thus edit
236 any string which contains python code (including the result of
236 any string which contains python code (including the result of
237 previous edits).
237 previous edits).
238
238
239 - If the argument is the name of an object (other than a string),
239 - If the argument is the name of an object (other than a string),
240 IPython will try to locate the file where it was defined and open the
240 IPython will try to locate the file where it was defined and open the
241 editor at the point where it is defined. You can use `%edit function`
241 editor at the point where it is defined. You can use `%edit function`
242 to load an editor exactly at the point where 'function' is defined,
242 to load an editor exactly at the point where 'function' is defined,
243 edit it and have the file be executed automatically.
243 edit it and have the file be executed automatically.
244
244
245 If the object is a macro (see %macro for details), this opens up your
245 If the object is a macro (see %macro for details), this opens up your
246 specified editor with a temporary file containing the macro's data.
246 specified editor with a temporary file containing the macro's data.
247 Upon exit, the macro is reloaded with the contents of the file.
247 Upon exit, the macro is reloaded with the contents of the file.
248
248
249 Note: opening at an exact line is only supported under Unix, and some
249 Note: opening at an exact line is only supported under Unix, and some
250 editors (like kedit and gedit up to Gnome 2.8) do not understand the
250 editors (like kedit and gedit up to Gnome 2.8) do not understand the
251 '+NUMBER' parameter necessary for this feature. Good editors like
251 '+NUMBER' parameter necessary for this feature. Good editors like
252 (X)Emacs, vi, jed, pico and joe all do.
252 (X)Emacs, vi, jed, pico and joe all do.
253
253
254 - If the argument is not found as a variable, IPython will look for a
254 - If the argument is not found as a variable, IPython will look for a
255 file with that name (adding .py if necessary) and load it into the
255 file with that name (adding .py if necessary) and load it into the
256 editor. It will execute its contents with execfile() when you exit,
256 editor. It will execute its contents with execfile() when you exit,
257 loading any code in the file into your interactive namespace.
257 loading any code in the file into your interactive namespace.
258
258
259 After executing your code, %edit will return as output the code you
259 After executing your code, %edit will return as output the code you
260 typed in the editor (except when it was an existing file). This way
260 typed in the editor (except when it was an existing file). This way
261 you can reload the code in further invocations of %edit as a variable,
261 you can reload the code in further invocations of %edit as a variable,
262 via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
262 via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
263 the output.
263 the output.
264
264
265 Note that %edit is also available through the alias %ed.
265 Note that %edit is also available through the alias %ed.
266
266
267 This is an example of creating a simple function inside the editor and
267 This is an example of creating a simple function inside the editor and
268 then modifying it. First, start up the editor:
268 then modifying it. First, start up the editor:
269
269
270 In [1]: ed
270 In [1]: ed
271 Editing... done. Executing edited code...
271 Editing... done. Executing edited code...
272 Out[1]: 'def foo():n print "foo() was defined in an editing session"n'
272 Out[1]: 'def foo():n print "foo() was defined in an editing session"n'
273
273
274 We can then call the function foo():
274 We can then call the function foo():
275
275
276 In [2]: foo()
276 In [2]: foo()
277 foo() was defined in an editing session
277 foo() was defined in an editing session
278
278
279 Now we edit foo. IPython automatically loads the editor with the
279 Now we edit foo. IPython automatically loads the editor with the
280 (temporary) file where foo() was previously defined:
280 (temporary) file where foo() was previously defined:
281
281
282 In [3]: ed foo
282 In [3]: ed foo
283 Editing... done. Executing edited code...
283 Editing... done. Executing edited code...
284
284
285 And if we call foo() again we get the modified version:
285 And if we call foo() again we get the modified version:
286
286
287 In [4]: foo()
287 In [4]: foo()
288 foo() has now been changed!
288 foo() has now been changed!
289
289
290 Here is an example of how to edit a code snippet successive
290 Here is an example of how to edit a code snippet successive
291 times. First we call the editor:
291 times. First we call the editor:
292
292
293 In [5]: ed
293 In [5]: ed
294 Editing... done. Executing edited code...
294 Editing... done. Executing edited code...
295 hello
295 hello
296 Out[5]: "print 'hello'n"
296 Out[5]: "print 'hello'n"
297
297
298 Now we call it again with the previous output (stored in _):
298 Now we call it again with the previous output (stored in _):
299
299
300 In [6]: ed _
300 In [6]: ed _
301 Editing... done. Executing edited code...
301 Editing... done. Executing edited code...
302 hello world
302 hello world
303 Out[6]: "print 'hello world'n"
303 Out[6]: "print 'hello world'n"
304
304
305 Now we call it with the output #8 (stored in _8, also as Out[8]):
305 Now we call it with the output #8 (stored in _8, also as Out[8]):
306
306
307 In [7]: ed _8
307 In [7]: ed _8
308 Editing... done. Executing edited code...
308 Editing... done. Executing edited code...
309 hello again
309 hello again
310 Out[7]: "print 'hello again'n"
310 Out[7]: "print 'hello again'n"
311 """
311 """
312
312
313 opts,args = self.parse_options(parameter_s,'prn:')
313 opts,args = self.parse_options(parameter_s,'prn:')
314
314
315 try:
315 try:
316 filename, lineno, _ = CodeMagics._find_edit_target(self.shell, args, opts, last_call)
316 filename, lineno, _ = CodeMagics._find_edit_target(self.shell, args, opts, last_call)
317 except MacroToEdit as e:
317 except MacroToEdit as e:
318 # TODO: Implement macro editing over 2 processes.
318 # TODO: Implement macro editing over 2 processes.
319 print("Macro editing not yet implemented in 2-process model.")
319 print("Macro editing not yet implemented in 2-process model.")
320 return
320 return
321
321
322 # Make sure we send to the client an absolute path, in case the working
322 # Make sure we send to the client an absolute path, in case the working
323 # directory of client and kernel don't match
323 # directory of client and kernel don't match
324 filename = os.path.abspath(filename)
324 filename = os.path.abspath(filename)
325
325
326 payload = {
326 payload = {
327 'source' : 'IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.edit_magic',
327 'source' : 'IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.edit_magic',
328 'filename' : filename,
328 'filename' : filename,
329 'line_number' : lineno
329 'line_number' : lineno
330 }
330 }
331 self.shell.payload_manager.write_payload(payload)
331 self.shell.payload_manager.write_payload(payload)
332
332
333 # A few magics that are adapted to the specifics of using pexpect and a
333 # A few magics that are adapted to the specifics of using pexpect and a
334 # remote terminal
334 # remote terminal
335
335
336 @line_magic
336 @line_magic
337 def clear(self, arg_s):
337 def clear(self, arg_s):
338 """Clear the terminal."""
338 """Clear the terminal."""
339 if os.name == 'posix':
339 if os.name == 'posix':
340 self.shell.system("clear")
340 self.shell.system("clear")
341 else:
341 else:
342 self.shell.system("cls")
342 self.shell.system("cls")
343
343
344 if os.name == 'nt':
344 if os.name == 'nt':
345 # This is the usual name in windows
345 # This is the usual name in windows
346 cls = line_magic('cls')(clear)
346 cls = line_magic('cls')(clear)
347
347
348 # Terminal pagers won't work over pexpect, but we do have our own pager
348 # Terminal pagers won't work over pexpect, but we do have our own pager
349
349
350 @line_magic
350 @line_magic
351 def less(self, arg_s):
351 def less(self, arg_s):
352 """Show a file through the pager.
352 """Show a file through the pager.
353
353
354 Files ending in .py are syntax-highlighted."""
354 Files ending in .py are syntax-highlighted."""
355 if not arg_s:
355 if not arg_s:
356 raise UsageError('Missing filename.')
356 raise UsageError('Missing filename.')
357
357
358 cont = open(arg_s).read()
358 cont = open(arg_s).read()
359 if arg_s.endswith('.py'):
359 if arg_s.endswith('.py'):
360 cont = self.shell.pycolorize(openpy.read_py_file(arg_s, skip_encoding_cookie=False))
360 cont = self.shell.pycolorize(openpy.read_py_file(arg_s, skip_encoding_cookie=False))
361 else:
361 else:
362 cont = open(arg_s).read()
362 cont = open(arg_s).read()
363 page.page(cont)
363 page.page(cont)
364
364
365 more = line_magic('more')(less)
365 more = line_magic('more')(less)
366
366
367 # Man calls a pager, so we also need to redefine it
367 # Man calls a pager, so we also need to redefine it
368 if os.name == 'posix':
368 if os.name == 'posix':
369 @line_magic
369 @line_magic
370 def man(self, arg_s):
370 def man(self, arg_s):
371 """Find the man page for the given command and display in pager."""
371 """Find the man page for the given command and display in pager."""
372 page.page(self.shell.getoutput('man %s | col -b' % arg_s,
372 page.page(self.shell.getoutput('man %s | col -b' % arg_s,
373 split=False))
373 split=False))
374
374
375 @line_magic
375 @line_magic
376 def connect_info(self, arg_s):
376 def connect_info(self, arg_s):
377 """Print information for connecting other clients to this kernel
377 """Print information for connecting other clients to this kernel
378
378
379 It will print the contents of this session's connection file, as well as
379 It will print the contents of this session's connection file, as well as
380 shortcuts for local clients.
380 shortcuts for local clients.
381
381
382 In the simplest case, when called from the most recently launched kernel,
382 In the simplest case, when called from the most recently launched kernel,
383 secondary clients can be connected, simply with:
383 secondary clients can be connected, simply with:
384
384
385 $> ipython <app> --existing
385 $> ipython <app> --existing
386
386
387 """
387 """
388
388
389 from IPython.core.application import BaseIPythonApplication as BaseIPApp
389 from IPython.core.application import BaseIPythonApplication as BaseIPApp
390
390
391 if BaseIPApp.initialized():
391 if BaseIPApp.initialized():
392 app = BaseIPApp.instance()
392 app = BaseIPApp.instance()
393 security_dir = app.profile_dir.security_dir
393 security_dir = app.profile_dir.security_dir
394 profile = app.profile
394 profile = app.profile
395 else:
395 else:
396 profile = 'default'
396 profile = 'default'
397 security_dir = ''
397 security_dir = ''
398
398
399 try:
399 try:
400 connection_file = get_connection_file()
400 connection_file = get_connection_file()
401 info = get_connection_info(unpack=False)
401 info = get_connection_info(unpack=False)
402 except Exception as e:
402 except Exception as e:
403 error("Could not get connection info: %r" % e)
403 error("Could not get connection info: %r" % e)
404 return
404 return
405
405
406 # add profile flag for non-default profile
406 # add profile flag for non-default profile
407 profile_flag = "--profile %s" % profile if profile != 'default' else ""
407 profile_flag = "--profile %s" % profile if profile != 'default' else ""
408
408
409 # if it's in the security dir, truncate to basename
409 # if it's in the security dir, truncate to basename
410 if security_dir == os.path.dirname(connection_file):
410 if security_dir == os.path.dirname(connection_file):
411 connection_file = os.path.basename(connection_file)
411 connection_file = os.path.basename(connection_file)
412
412
413
413
414 print (info + '\n')
414 print (info + '\n')
415 print ("Paste the above JSON into a file, and connect with:\n"
415 print ("Paste the above JSON into a file, and connect with:\n"
416 " $> ipython <app> --existing <file>\n"
416 " $> ipython <app> --existing <file>\n"
417 "or, if you are local, you can connect with just:\n"
417 "or, if you are local, you can connect with just:\n"
418 " $> ipython <app> --existing {0} {1}\n"
418 " $> ipython <app> --existing {0} {1}\n"
419 "or even just:\n"
419 "or even just:\n"
420 " $> ipython <app> --existing {1}\n"
420 " $> ipython <app> --existing {1}\n"
421 "if this is the most recent IPython session you have started.".format(
421 "if this is the most recent IPython session you have started.".format(
422 connection_file, profile_flag
422 connection_file, profile_flag
423 )
423 )
424 )
424 )
425
425
426 @line_magic
426 @line_magic
427 def qtconsole(self, arg_s):
427 def qtconsole(self, arg_s):
428 """Open a qtconsole connected to this kernel.
428 """Open a qtconsole connected to this kernel.
429
429
430 Useful for connecting a qtconsole to running notebooks, for better
430 Useful for connecting a qtconsole to running notebooks, for better
431 debugging.
431 debugging.
432 """
432 """
433
433
434 # %qtconsole should imply bind_kernel for engines:
434 # %qtconsole should imply bind_kernel for engines:
435 try:
435 try:
436 from IPython.parallel import bind_kernel
436 from IPython.parallel import bind_kernel
437 except ImportError:
437 except ImportError:
438 # technically possible, because parallel has higher pyzmq min-version
438 # technically possible, because parallel has higher pyzmq min-version
439 pass
439 pass
440 else:
440 else:
441 bind_kernel()
441 bind_kernel()
442
442
443 try:
443 try:
444 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
444 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
445 except Exception as e:
445 except Exception as e:
446 error("Could not start qtconsole: %r" % e)
446 error("Could not start qtconsole: %r" % e)
447 return
447 return
448
448
449 @line_magic
449 @line_magic
450 def autosave(self, arg_s):
450 def autosave(self, arg_s):
451 """Set the autosave interval in the notebook (in seconds).
451 """Set the autosave interval in the notebook (in seconds).
452
452
453 The default value is 120, or two minutes.
453 The default value is 120, or two minutes.
454 ``%autosave 0`` will disable autosave.
454 ``%autosave 0`` will disable autosave.
455
455
456 This magic only has an effect when called from the notebook interface.
456 This magic only has an effect when called from the notebook interface.
457 It has no effect when called in a startup file.
457 It has no effect when called in a startup file.
458 """
458 """
459
459
460 try:
460 try:
461 interval = int(arg_s)
461 interval = int(arg_s)
462 except ValueError:
462 except ValueError:
463 raise UsageError("%%autosave requires an integer, got %r" % arg_s)
463 raise UsageError("%%autosave requires an integer, got %r" % arg_s)
464
464
465 # javascript wants milliseconds
465 # javascript wants milliseconds
466 milliseconds = 1000 * interval
466 milliseconds = 1000 * interval
467 display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds),
467 display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds),
468 include=['application/javascript']
468 include=['application/javascript']
469 )
469 )
470 if interval:
470 if interval:
471 print("Autosaving every %i seconds" % interval)
471 print("Autosaving every %i seconds" % interval)
472 else:
472 else:
473 print("Autosave disabled")
473 print("Autosave disabled")
474
474
475
475
476 class ZMQInteractiveShell(InteractiveShell):
476 class ZMQInteractiveShell(InteractiveShell):
477 """A subclass of InteractiveShell for ZMQ."""
477 """A subclass of InteractiveShell for ZMQ."""
478
478
479 displayhook_class = Type(ZMQShellDisplayHook)
479 displayhook_class = Type(ZMQShellDisplayHook)
480 display_pub_class = Type(ZMQDisplayPublisher)
480 display_pub_class = Type(ZMQDisplayPublisher)
481 data_pub_class = Type(ZMQDataPublisher)
481 data_pub_class = Type(ZMQDataPublisher)
482
482
483 # Override the traitlet in the parent class, because there's no point using
483 # Override the traitlet in the parent class, because there's no point using
484 # readline for the kernel. Can be removed when the readline code is moved
484 # readline for the kernel. Can be removed when the readline code is moved
485 # to the terminal frontend.
485 # to the terminal frontend.
486 colors_force = CBool(True)
486 colors_force = CBool(True)
487 readline_use = CBool(False)
487 readline_use = CBool(False)
488 # autoindent has no meaning in a zmqshell, and attempting to enable it
488 # autoindent has no meaning in a zmqshell, and attempting to enable it
489 # will print a warning in the absence of readline.
489 # will print a warning in the absence of readline.
490 autoindent = CBool(False)
490 autoindent = CBool(False)
491
491
492 exiter = Instance(ZMQExitAutocall)
492 exiter = Instance(ZMQExitAutocall)
493 def _exiter_default(self):
493 def _exiter_default(self):
494 return ZMQExitAutocall(self)
494 return ZMQExitAutocall(self)
495
495
496 def _exit_now_changed(self, name, old, new):
496 def _exit_now_changed(self, name, old, new):
497 """stop eventloop when exit_now fires"""
497 """stop eventloop when exit_now fires"""
498 if new:
498 if new:
499 loop = ioloop.IOLoop.instance()
499 loop = ioloop.IOLoop.instance()
500 loop.add_timeout(time.time()+0.1, loop.stop)
500 loop.add_timeout(time.time()+0.1, loop.stop)
501
501
502 keepkernel_on_exit = None
502 keepkernel_on_exit = None
503
503
504 # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no
504 # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no
505 # interactive input being read; we provide event loop support in ipkernel
505 # interactive input being read; we provide event loop support in ipkernel
506 from .eventloops import enable_gui
506 from .eventloops import enable_gui
507 enable_gui = staticmethod(enable_gui)
507 enable_gui = staticmethod(enable_gui)
508
508
509 def init_environment(self):
509 def init_environment(self):
510 """Configure the user's environment.
510 """Configure the user's environment.
511
511
512 """
512 """
513 env = os.environ
513 env = os.environ
514 # These two ensure 'ls' produces nice coloring on BSD-derived systems
514 # These two ensure 'ls' produces nice coloring on BSD-derived systems
515 env['TERM'] = 'xterm-color'
515 env['TERM'] = 'xterm-color'
516 env['CLICOLOR'] = '1'
516 env['CLICOLOR'] = '1'
517 # Since normal pagers don't work at all (over pexpect we don't have
517 # Since normal pagers don't work at all (over pexpect we don't have
518 # single-key control of the subprocess), try to disable paging in
518 # single-key control of the subprocess), try to disable paging in
519 # subprocesses as much as possible.
519 # subprocesses as much as possible.
520 env['PAGER'] = 'cat'
520 env['PAGER'] = 'cat'
521 env['GIT_PAGER'] = 'cat'
521 env['GIT_PAGER'] = 'cat'
522
522
523 # And install the payload version of page.
523 # And install the payload version of page.
524 install_payload_page()
524 install_payload_page()
525
525
526 def auto_rewrite_input(self, cmd):
526 def auto_rewrite_input(self, cmd):
527 """Called to show the auto-rewritten input for autocall and friends.
527 """Called to show the auto-rewritten input for autocall and friends.
528
528
529 FIXME: this payload is currently not correctly processed by the
529 FIXME: this payload is currently not correctly processed by the
530 frontend.
530 frontend.
531 """
531 """
532 new = self.prompt_manager.render('rewrite') + cmd
532 new = self.prompt_manager.render('rewrite') + cmd
533 payload = dict(
533 payload = dict(
534 source='IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.auto_rewrite_input',
534 source='IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.auto_rewrite_input',
535 transformed_input=new,
535 transformed_input=new,
536 )
536 )
537 self.payload_manager.write_payload(payload)
537 self.payload_manager.write_payload(payload)
538
538
539 def ask_exit(self):
539 def ask_exit(self):
540 """Engage the exit actions."""
540 """Engage the exit actions."""
541 self.exit_now = True
541 self.exit_now = True
542 payload = dict(
542 payload = dict(
543 source='IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.ask_exit',
543 source='IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.ask_exit',
544 exit=True,
544 exit=True,
545 keepkernel=self.keepkernel_on_exit,
545 keepkernel=self.keepkernel_on_exit,
546 )
546 )
547 self.payload_manager.write_payload(payload)
547 self.payload_manager.write_payload(payload)
548
548
549 def _showtraceback(self, etype, evalue, stb):
549 def _showtraceback(self, etype, evalue, stb):
550
550
551 exc_content = {
551 exc_content = {
552 u'traceback' : stb,
552 u'traceback' : stb,
553 u'ename' : unicode(etype.__name__),
553 u'ename' : unicode(etype.__name__),
554 u'evalue' : py3compat.safe_unicode(evalue),
554 u'evalue' : py3compat.safe_unicode(evalue),
555 }
555 }
556
556
557 dh = self.displayhook
557 dh = self.displayhook
558 # Send exception info over pub socket for other clients than the caller
558 # Send exception info over pub socket for other clients than the caller
559 # to pick up
559 # to pick up
560 topic = None
560 topic = None
561 if dh.topic:
561 if dh.topic:
562 topic = dh.topic.replace(b'pyout', b'pyerr')
562 topic = dh.topic.replace(b'pyout', b'pyerr')
563
563
564 exc_msg = dh.session.send(dh.pub_socket, u'pyerr', json_clean(exc_content), dh.parent_header, ident=topic)
564 exc_msg = dh.session.send(dh.pub_socket, u'pyerr', json_clean(exc_content), dh.parent_header, ident=topic)
565
565
566 # FIXME - Hack: store exception info in shell object. Right now, the
566 # FIXME - Hack: store exception info in shell object. Right now, the
567 # caller is reading this info after the fact, we need to fix this logic
567 # caller is reading this info after the fact, we need to fix this logic
568 # to remove this hack. Even uglier, we need to store the error status
568 # to remove this hack. Even uglier, we need to store the error status
569 # here, because in the main loop, the logic that sets it is being
569 # here, because in the main loop, the logic that sets it is being
570 # skipped because runlines swallows the exceptions.
570 # skipped because runlines swallows the exceptions.
571 exc_content[u'status'] = u'error'
571 exc_content[u'status'] = u'error'
572 self._reply_content = exc_content
572 self._reply_content = exc_content
573 # /FIXME
573 # /FIXME
574
574
575 return exc_content
575 return exc_content
576
576
577 def set_next_input(self, text):
577 def set_next_input(self, text):
578 """Send the specified text to the frontend to be presented at the next
578 """Send the specified text to the frontend to be presented at the next
579 input cell."""
579 input cell."""
580 payload = dict(
580 payload = dict(
581 source='IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.set_next_input',
581 source='IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.set_next_input',
582 text=text
582 text=text
583 )
583 )
584 self.payload_manager.write_payload(payload)
584 self.payload_manager.write_payload(payload)
585
585
586 #-------------------------------------------------------------------------
586 #-------------------------------------------------------------------------
587 # Things related to magics
587 # Things related to magics
588 #-------------------------------------------------------------------------
588 #-------------------------------------------------------------------------
589
589
590 def init_magics(self):
590 def init_magics(self):
591 super(ZMQInteractiveShell, self).init_magics()
591 super(ZMQInteractiveShell, self).init_magics()
592 self.register_magics(KernelMagics)
592 self.register_magics(KernelMagics)
593 self.magics_manager.register_alias('ed', 'edit')
593 self.magics_manager.register_alias('ed', 'edit')
594
594
595
595
596
596
597 InteractiveShellABC.register(ZMQInteractiveShell)
597 InteractiveShellABC.register(ZMQInteractiveShell)
@@ -1,353 +1,354 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """Pickle related utilities. Perhaps this should be called 'can'."""
3 """Pickle related utilities. Perhaps this should be called 'can'."""
4
4
5 __docformat__ = "restructuredtext en"
5 __docformat__ = "restructuredtext en"
6
6
7 #-------------------------------------------------------------------------------
7 #-------------------------------------------------------------------------------
8 # Copyright (C) 2008-2011 The IPython Development Team
8 # Copyright (C) 2008-2011 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-------------------------------------------------------------------------------
12 #-------------------------------------------------------------------------------
13
13
14 #-------------------------------------------------------------------------------
14 #-------------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17
17
18 import copy
18 import copy
19 import logging
19 import logging
20 import sys
20 import sys
21 from types import FunctionType
21 from types import FunctionType
22
22
23 try:
23 try:
24 import cPickle as pickle
24 import cPickle as pickle
25 except ImportError:
25 except ImportError:
26 import pickle
26 import pickle
27
27
28 try:
28 try:
29 import numpy
29 import numpy
30 except:
30 except:
31 numpy = None
31 numpy = None
32
32
33 import codeutil # This registers a hook when it's imported
33 import py3compat
34 import py3compat
34 from importstring import import_item
35 from importstring import import_item
35
36
36 from IPython.config import Application
37 from IPython.config import Application
37
38
38 if py3compat.PY3:
39 if py3compat.PY3:
39 buffer = memoryview
40 buffer = memoryview
40 class_type = type
41 class_type = type
41 else:
42 else:
42 from types import ClassType
43 from types import ClassType
43 class_type = (type, ClassType)
44 class_type = (type, ClassType)
44
45
45 #-------------------------------------------------------------------------------
46 #-------------------------------------------------------------------------------
46 # Classes
47 # Classes
47 #-------------------------------------------------------------------------------
48 #-------------------------------------------------------------------------------
48
49
49
50
50 class CannedObject(object):
51 class CannedObject(object):
51 def __init__(self, obj, keys=[], hook=None):
52 def __init__(self, obj, keys=[], hook=None):
52 """can an object for safe pickling
53 """can an object for safe pickling
53
54
54 Parameters
55 Parameters
55 ==========
56 ==========
56
57
57 obj:
58 obj:
58 The object to be canned
59 The object to be canned
59 keys: list (optional)
60 keys: list (optional)
60 list of attribute names that will be explicitly canned / uncanned
61 list of attribute names that will be explicitly canned / uncanned
61 hook: callable (optional)
62 hook: callable (optional)
62 An optional extra callable,
63 An optional extra callable,
63 which can do additional processing of the uncanned object.
64 which can do additional processing of the uncanned object.
64
65
65 large data may be offloaded into the buffers list,
66 large data may be offloaded into the buffers list,
66 used for zero-copy transfers.
67 used for zero-copy transfers.
67 """
68 """
68 self.keys = keys
69 self.keys = keys
69 self.obj = copy.copy(obj)
70 self.obj = copy.copy(obj)
70 self.hook = can(hook)
71 self.hook = can(hook)
71 for key in keys:
72 for key in keys:
72 setattr(self.obj, key, can(getattr(obj, key)))
73 setattr(self.obj, key, can(getattr(obj, key)))
73
74
74 self.buffers = []
75 self.buffers = []
75
76
76 def get_object(self, g=None):
77 def get_object(self, g=None):
77 if g is None:
78 if g is None:
78 g = {}
79 g = {}
79 obj = self.obj
80 obj = self.obj
80 for key in self.keys:
81 for key in self.keys:
81 setattr(obj, key, uncan(getattr(obj, key), g))
82 setattr(obj, key, uncan(getattr(obj, key), g))
82
83
83 if self.hook:
84 if self.hook:
84 self.hook = uncan(self.hook, g)
85 self.hook = uncan(self.hook, g)
85 self.hook(obj, g)
86 self.hook(obj, g)
86 return self.obj
87 return self.obj
87
88
88
89
89 class Reference(CannedObject):
90 class Reference(CannedObject):
90 """object for wrapping a remote reference by name."""
91 """object for wrapping a remote reference by name."""
91 def __init__(self, name):
92 def __init__(self, name):
92 if not isinstance(name, basestring):
93 if not isinstance(name, basestring):
93 raise TypeError("illegal name: %r"%name)
94 raise TypeError("illegal name: %r"%name)
94 self.name = name
95 self.name = name
95 self.buffers = []
96 self.buffers = []
96
97
97 def __repr__(self):
98 def __repr__(self):
98 return "<Reference: %r>"%self.name
99 return "<Reference: %r>"%self.name
99
100
100 def get_object(self, g=None):
101 def get_object(self, g=None):
101 if g is None:
102 if g is None:
102 g = {}
103 g = {}
103
104
104 return eval(self.name, g)
105 return eval(self.name, g)
105
106
106
107
107 class CannedFunction(CannedObject):
108 class CannedFunction(CannedObject):
108
109
109 def __init__(self, f):
110 def __init__(self, f):
110 self._check_type(f)
111 self._check_type(f)
111 self.code = f.func_code
112 self.code = f.func_code
112 if f.func_defaults:
113 if f.func_defaults:
113 self.defaults = [ can(fd) for fd in f.func_defaults ]
114 self.defaults = [ can(fd) for fd in f.func_defaults ]
114 else:
115 else:
115 self.defaults = None
116 self.defaults = None
116 self.module = f.__module__ or '__main__'
117 self.module = f.__module__ or '__main__'
117 self.__name__ = f.__name__
118 self.__name__ = f.__name__
118 self.buffers = []
119 self.buffers = []
119
120
120 def _check_type(self, obj):
121 def _check_type(self, obj):
121 assert isinstance(obj, FunctionType), "Not a function type"
122 assert isinstance(obj, FunctionType), "Not a function type"
122
123
123 def get_object(self, g=None):
124 def get_object(self, g=None):
124 # try to load function back into its module:
125 # try to load function back into its module:
125 if not self.module.startswith('__'):
126 if not self.module.startswith('__'):
126 __import__(self.module)
127 __import__(self.module)
127 g = sys.modules[self.module].__dict__
128 g = sys.modules[self.module].__dict__
128
129
129 if g is None:
130 if g is None:
130 g = {}
131 g = {}
131 if self.defaults:
132 if self.defaults:
132 defaults = tuple(uncan(cfd, g) for cfd in self.defaults)
133 defaults = tuple(uncan(cfd, g) for cfd in self.defaults)
133 else:
134 else:
134 defaults = None
135 defaults = None
135 newFunc = FunctionType(self.code, g, self.__name__, defaults)
136 newFunc = FunctionType(self.code, g, self.__name__, defaults)
136 return newFunc
137 return newFunc
137
138
138 class CannedClass(CannedObject):
139 class CannedClass(CannedObject):
139
140
140 def __init__(self, cls):
141 def __init__(self, cls):
141 self._check_type(cls)
142 self._check_type(cls)
142 self.name = cls.__name__
143 self.name = cls.__name__
143 self.old_style = not isinstance(cls, type)
144 self.old_style = not isinstance(cls, type)
144 self._canned_dict = {}
145 self._canned_dict = {}
145 for k,v in cls.__dict__.items():
146 for k,v in cls.__dict__.items():
146 if k not in ('__weakref__', '__dict__'):
147 if k not in ('__weakref__', '__dict__'):
147 self._canned_dict[k] = can(v)
148 self._canned_dict[k] = can(v)
148 if self.old_style:
149 if self.old_style:
149 mro = []
150 mro = []
150 else:
151 else:
151 mro = cls.mro()
152 mro = cls.mro()
152
153
153 self.parents = [ can(c) for c in mro[1:] ]
154 self.parents = [ can(c) for c in mro[1:] ]
154 self.buffers = []
155 self.buffers = []
155
156
156 def _check_type(self, obj):
157 def _check_type(self, obj):
157 assert isinstance(obj, class_type), "Not a class type"
158 assert isinstance(obj, class_type), "Not a class type"
158
159
159 def get_object(self, g=None):
160 def get_object(self, g=None):
160 parents = tuple(uncan(p, g) for p in self.parents)
161 parents = tuple(uncan(p, g) for p in self.parents)
161 return type(self.name, parents, uncan_dict(self._canned_dict, g=g))
162 return type(self.name, parents, uncan_dict(self._canned_dict, g=g))
162
163
163 class CannedArray(CannedObject):
164 class CannedArray(CannedObject):
164 def __init__(self, obj):
165 def __init__(self, obj):
165 self.shape = obj.shape
166 self.shape = obj.shape
166 self.dtype = obj.dtype.descr if obj.dtype.fields else obj.dtype.str
167 self.dtype = obj.dtype.descr if obj.dtype.fields else obj.dtype.str
167 if sum(obj.shape) == 0:
168 if sum(obj.shape) == 0:
168 # just pickle it
169 # just pickle it
169 self.buffers = [pickle.dumps(obj, -1)]
170 self.buffers = [pickle.dumps(obj, -1)]
170 else:
171 else:
171 # ensure contiguous
172 # ensure contiguous
172 obj = numpy.ascontiguousarray(obj, dtype=None)
173 obj = numpy.ascontiguousarray(obj, dtype=None)
173 self.buffers = [buffer(obj)]
174 self.buffers = [buffer(obj)]
174
175
175 def get_object(self, g=None):
176 def get_object(self, g=None):
176 data = self.buffers[0]
177 data = self.buffers[0]
177 if sum(self.shape) == 0:
178 if sum(self.shape) == 0:
178 # no shape, we just pickled it
179 # no shape, we just pickled it
179 return pickle.loads(data)
180 return pickle.loads(data)
180 else:
181 else:
181 return numpy.frombuffer(data, dtype=self.dtype).reshape(self.shape)
182 return numpy.frombuffer(data, dtype=self.dtype).reshape(self.shape)
182
183
183
184
184 class CannedBytes(CannedObject):
185 class CannedBytes(CannedObject):
185 wrap = bytes
186 wrap = bytes
186 def __init__(self, obj):
187 def __init__(self, obj):
187 self.buffers = [obj]
188 self.buffers = [obj]
188
189
189 def get_object(self, g=None):
190 def get_object(self, g=None):
190 data = self.buffers[0]
191 data = self.buffers[0]
191 return self.wrap(data)
192 return self.wrap(data)
192
193
193 def CannedBuffer(CannedBytes):
194 def CannedBuffer(CannedBytes):
194 wrap = buffer
195 wrap = buffer
195
196
196 #-------------------------------------------------------------------------------
197 #-------------------------------------------------------------------------------
197 # Functions
198 # Functions
198 #-------------------------------------------------------------------------------
199 #-------------------------------------------------------------------------------
199
200
200 def _logger():
201 def _logger():
201 """get the logger for the current Application
202 """get the logger for the current Application
202
203
203 the root logger will be used if no Application is running
204 the root logger will be used if no Application is running
204 """
205 """
205 if Application.initialized():
206 if Application.initialized():
206 logger = Application.instance().log
207 logger = Application.instance().log
207 else:
208 else:
208 logger = logging.getLogger()
209 logger = logging.getLogger()
209 if not logger.handlers:
210 if not logger.handlers:
210 logging.basicConfig()
211 logging.basicConfig()
211
212
212 return logger
213 return logger
213
214
214 def _import_mapping(mapping, original=None):
215 def _import_mapping(mapping, original=None):
215 """import any string-keys in a type mapping
216 """import any string-keys in a type mapping
216
217
217 """
218 """
218 log = _logger()
219 log = _logger()
219 log.debug("Importing canning map")
220 log.debug("Importing canning map")
220 for key,value in mapping.items():
221 for key,value in mapping.items():
221 if isinstance(key, basestring):
222 if isinstance(key, basestring):
222 try:
223 try:
223 cls = import_item(key)
224 cls = import_item(key)
224 except Exception:
225 except Exception:
225 if original and key not in original:
226 if original and key not in original:
226 # only message on user-added classes
227 # only message on user-added classes
227 log.error("cannning class not importable: %r", key, exc_info=True)
228 log.error("cannning class not importable: %r", key, exc_info=True)
228 mapping.pop(key)
229 mapping.pop(key)
229 else:
230 else:
230 mapping[cls] = mapping.pop(key)
231 mapping[cls] = mapping.pop(key)
231
232
232 def istype(obj, check):
233 def istype(obj, check):
233 """like isinstance(obj, check), but strict
234 """like isinstance(obj, check), but strict
234
235
235 This won't catch subclasses.
236 This won't catch subclasses.
236 """
237 """
237 if isinstance(check, tuple):
238 if isinstance(check, tuple):
238 for cls in check:
239 for cls in check:
239 if type(obj) is cls:
240 if type(obj) is cls:
240 return True
241 return True
241 return False
242 return False
242 else:
243 else:
243 return type(obj) is check
244 return type(obj) is check
244
245
245 def can(obj):
246 def can(obj):
246 """prepare an object for pickling"""
247 """prepare an object for pickling"""
247
248
248 import_needed = False
249 import_needed = False
249
250
250 for cls,canner in can_map.iteritems():
251 for cls,canner in can_map.iteritems():
251 if isinstance(cls, basestring):
252 if isinstance(cls, basestring):
252 import_needed = True
253 import_needed = True
253 break
254 break
254 elif istype(obj, cls):
255 elif istype(obj, cls):
255 return canner(obj)
256 return canner(obj)
256
257
257 if import_needed:
258 if import_needed:
258 # perform can_map imports, then try again
259 # perform can_map imports, then try again
259 # this will usually only happen once
260 # this will usually only happen once
260 _import_mapping(can_map, _original_can_map)
261 _import_mapping(can_map, _original_can_map)
261 return can(obj)
262 return can(obj)
262
263
263 return obj
264 return obj
264
265
265 def can_class(obj):
266 def can_class(obj):
266 if isinstance(obj, class_type) and obj.__module__ == '__main__':
267 if isinstance(obj, class_type) and obj.__module__ == '__main__':
267 return CannedClass(obj)
268 return CannedClass(obj)
268 else:
269 else:
269 return obj
270 return obj
270
271
271 def can_dict(obj):
272 def can_dict(obj):
272 """can the *values* of a dict"""
273 """can the *values* of a dict"""
273 if istype(obj, dict):
274 if istype(obj, dict):
274 newobj = {}
275 newobj = {}
275 for k, v in obj.iteritems():
276 for k, v in obj.iteritems():
276 newobj[k] = can(v)
277 newobj[k] = can(v)
277 return newobj
278 return newobj
278 else:
279 else:
279 return obj
280 return obj
280
281
281 sequence_types = (list, tuple, set)
282 sequence_types = (list, tuple, set)
282
283
283 def can_sequence(obj):
284 def can_sequence(obj):
284 """can the elements of a sequence"""
285 """can the elements of a sequence"""
285 if istype(obj, sequence_types):
286 if istype(obj, sequence_types):
286 t = type(obj)
287 t = type(obj)
287 return t([can(i) for i in obj])
288 return t([can(i) for i in obj])
288 else:
289 else:
289 return obj
290 return obj
290
291
291 def uncan(obj, g=None):
292 def uncan(obj, g=None):
292 """invert canning"""
293 """invert canning"""
293
294
294 import_needed = False
295 import_needed = False
295 for cls,uncanner in uncan_map.iteritems():
296 for cls,uncanner in uncan_map.iteritems():
296 if isinstance(cls, basestring):
297 if isinstance(cls, basestring):
297 import_needed = True
298 import_needed = True
298 break
299 break
299 elif isinstance(obj, cls):
300 elif isinstance(obj, cls):
300 return uncanner(obj, g)
301 return uncanner(obj, g)
301
302
302 if import_needed:
303 if import_needed:
303 # perform uncan_map imports, then try again
304 # perform uncan_map imports, then try again
304 # this will usually only happen once
305 # this will usually only happen once
305 _import_mapping(uncan_map, _original_uncan_map)
306 _import_mapping(uncan_map, _original_uncan_map)
306 return uncan(obj, g)
307 return uncan(obj, g)
307
308
308 return obj
309 return obj
309
310
310 def uncan_dict(obj, g=None):
311 def uncan_dict(obj, g=None):
311 if istype(obj, dict):
312 if istype(obj, dict):
312 newobj = {}
313 newobj = {}
313 for k, v in obj.iteritems():
314 for k, v in obj.iteritems():
314 newobj[k] = uncan(v,g)
315 newobj[k] = uncan(v,g)
315 return newobj
316 return newobj
316 else:
317 else:
317 return obj
318 return obj
318
319
319 def uncan_sequence(obj, g=None):
320 def uncan_sequence(obj, g=None):
320 if istype(obj, sequence_types):
321 if istype(obj, sequence_types):
321 t = type(obj)
322 t = type(obj)
322 return t([uncan(i,g) for i in obj])
323 return t([uncan(i,g) for i in obj])
323 else:
324 else:
324 return obj
325 return obj
325
326
326 def _uncan_dependent_hook(dep, g=None):
327 def _uncan_dependent_hook(dep, g=None):
327 dep.check_dependency()
328 dep.check_dependency()
328
329
329 def can_dependent(obj):
330 def can_dependent(obj):
330 return CannedObject(obj, keys=('f', 'df'), hook=_uncan_dependent_hook)
331 return CannedObject(obj, keys=('f', 'df'), hook=_uncan_dependent_hook)
331
332
332 #-------------------------------------------------------------------------------
333 #-------------------------------------------------------------------------------
333 # API dictionaries
334 # API dictionaries
334 #-------------------------------------------------------------------------------
335 #-------------------------------------------------------------------------------
335
336
336 # These dicts can be extended for custom serialization of new objects
337 # These dicts can be extended for custom serialization of new objects
337
338
338 can_map = {
339 can_map = {
339 'IPython.parallel.dependent' : can_dependent,
340 'IPython.parallel.dependent' : can_dependent,
340 'numpy.ndarray' : CannedArray,
341 'numpy.ndarray' : CannedArray,
341 FunctionType : CannedFunction,
342 FunctionType : CannedFunction,
342 bytes : CannedBytes,
343 bytes : CannedBytes,
343 buffer : CannedBuffer,
344 buffer : CannedBuffer,
344 class_type : can_class,
345 class_type : can_class,
345 }
346 }
346
347
347 uncan_map = {
348 uncan_map = {
348 CannedObject : lambda obj, g: obj.get_object(g),
349 CannedObject : lambda obj, g: obj.get_object(g),
349 }
350 }
350
351
351 # for use in _import_mapping:
352 # for use in _import_mapping:
352 _original_can_map = can_map.copy()
353 _original_can_map = can_map.copy()
353 _original_uncan_map = uncan_map.copy()
354 _original_uncan_map = uncan_map.copy()
General Comments 0
You need to be logged in to leave comments. Login now