##// END OF EJS Templates
Remove unused imports in IPython.kernel
Thomas Kluyver -
Show More
@@ -1,112 +1,111 b''
1 1 #-------------------------------------------------------------------------------
2 2 # Copyright (C) 2012 The IPython Development Team
3 3 #
4 4 # Distributed under the terms of the BSD License. The full license is in
5 5 # the file COPYING, distributed as part of this software.
6 6 #-------------------------------------------------------------------------------
7 7
8 8 #-----------------------------------------------------------------------------
9 9 # Imports
10 10 #-----------------------------------------------------------------------------
11 11 from __future__ import print_function
12 12
13 13 # Standard library imports
14 14 import unittest
15 15
16 16 # Local imports
17 17 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
18 from IPython.kernel.inprocess.ipkernel import InProcessKernel
19 18 from IPython.kernel.inprocess.manager import InProcessKernelManager
20 19
21 20 #-----------------------------------------------------------------------------
22 21 # Test case
23 22 #-----------------------------------------------------------------------------
24 23
25 24 class InProcessKernelManagerTestCase(unittest.TestCase):
26 25
27 26 def test_interface(self):
28 27 """ Does the in-process kernel manager implement the basic KM interface?
29 28 """
30 29 km = InProcessKernelManager()
31 30 self.assert_(not km.has_kernel)
32 31
33 32 km.start_kernel()
34 33 self.assert_(km.has_kernel)
35 34 self.assert_(km.kernel is not None)
36 35
37 36 kc = BlockingInProcessKernelClient(kernel=km.kernel)
38 37 self.assert_(not kc.channels_running)
39 38
40 39 kc.start_channels()
41 40 self.assert_(kc.channels_running)
42 41
43 42 old_kernel = km.kernel
44 43 km.restart_kernel()
45 44 self.assert_(km.kernel is not None)
46 45 self.assertNotEquals(km.kernel, old_kernel)
47 46
48 47 km.shutdown_kernel()
49 48 self.assert_(not km.has_kernel)
50 49
51 50 self.assertRaises(NotImplementedError, km.interrupt_kernel)
52 51 self.assertRaises(NotImplementedError, km.signal_kernel, 9)
53 52
54 53 kc.stop_channels()
55 54 self.assert_(not kc.channels_running)
56 55
57 56 def test_execute(self):
58 57 """ Does executing code in an in-process kernel work?
59 58 """
60 59 km = InProcessKernelManager()
61 60 km.start_kernel()
62 61 kc = BlockingInProcessKernelClient(kernel=km.kernel)
63 62 kc.start_channels()
64 63 kc.execute('foo = 1')
65 64 self.assertEquals(km.kernel.shell.user_ns['foo'], 1)
66 65
67 66 def test_complete(self):
68 67 """ Does requesting completion from an in-process kernel work?
69 68 """
70 69 km = InProcessKernelManager()
71 70 km.start_kernel()
72 71 kc = BlockingInProcessKernelClient(kernel=km.kernel)
73 72 kc.start_channels()
74 73 km.kernel.shell.push({'my_bar': 0, 'my_baz': 1})
75 74 kc.complete('my_ba', 'my_ba', 5)
76 75 msg = kc.get_shell_msg()
77 76 self.assertEqual(msg['header']['msg_type'], 'complete_reply')
78 77 self.assertEqual(sorted(msg['content']['matches']),
79 78 ['my_bar', 'my_baz'])
80 79
81 80 def test_object_info(self):
82 81 """ Does requesting object information from an in-process kernel work?
83 82 """
84 83 km = InProcessKernelManager()
85 84 km.start_kernel()
86 85 kc = BlockingInProcessKernelClient(kernel=km.kernel)
87 86 kc.start_channels()
88 87 km.kernel.shell.user_ns['foo'] = 1
89 88 kc.object_info('foo')
90 89 msg = kc.get_shell_msg()
91 90 self.assertEquals(msg['header']['msg_type'], 'object_info_reply')
92 91 self.assertEquals(msg['content']['name'], 'foo')
93 92 self.assertEquals(msg['content']['type_name'], 'int')
94 93
95 94 def test_history(self):
96 95 """ Does requesting history from an in-process kernel work?
97 96 """
98 97 km = InProcessKernelManager()
99 98 km.start_kernel()
100 99 kc = BlockingInProcessKernelClient(kernel=km.kernel)
101 100 kc.start_channels()
102 101 kc.execute('%who')
103 102 kc.history(hist_access_type='tail', n=1)
104 103 msg = kc.shell_channel.get_msgs()[-1]
105 104 self.assertEquals(msg['header']['msg_type'], 'history_reply')
106 105 history = msg['content']['history']
107 106 self.assertEquals(len(history), 1)
108 107 self.assertEquals(history[0][2], '%who')
109 108
110 109
111 110 if __name__ == '__main__':
112 111 unittest.main()
@@ -1,63 +1,62 b''
1 1 """A kernel manager with a tornado IOLoop"""
2 2
3 3 #-----------------------------------------------------------------------------
4 4 # Copyright (C) 2013 The IPython Development Team
5 5 #
6 6 # Distributed under the terms of the BSD License. The full license is in
7 7 # the file COPYING, distributed as part of this software.
8 8 #-----------------------------------------------------------------------------
9 9
10 10 #-----------------------------------------------------------------------------
11 11 # Imports
12 12 #-----------------------------------------------------------------------------
13 13
14 14 from __future__ import absolute_import
15 15
16 import zmq
17 16 from zmq.eventloop import ioloop
18 17 from zmq.eventloop.zmqstream import ZMQStream
19 18
20 19 from IPython.utils.traitlets import (
21 20 Instance
22 21 )
23 22
24 23 from IPython.kernel.manager import KernelManager
25 24 from .restarter import IOLoopKernelRestarter
26 25
27 26 #-----------------------------------------------------------------------------
28 27 # Code
29 28 #-----------------------------------------------------------------------------
30 29
31 30
32 31 def as_zmqstream(f):
33 32 def wrapped(self, *args, **kwargs):
34 33 socket = f(self, *args, **kwargs)
35 34 return ZMQStream(socket, self.loop)
36 35 return wrapped
37 36
38 37 class IOLoopKernelManager(KernelManager):
39 38
40 39 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
41 40 def _loop_default(self):
42 41 return ioloop.IOLoop.instance()
43 42
44 43 _restarter = Instance('IPython.kernel.ioloop.IOLoopKernelRestarter')
45 44
46 45 def start_restarter(self):
47 46 if self.autorestart and self.has_kernel:
48 47 if self._restarter is None:
49 48 self._restarter = IOLoopKernelRestarter(
50 49 kernel_manager=self, loop=self.loop,
51 50 parent=self, log=self.log
52 51 )
53 52 self._restarter.start()
54 53
55 54 def stop_restarter(self):
56 55 if self.autorestart:
57 56 if self._restarter is not None:
58 57 self._restarter.stop()
59 58
60 59 connect_shell = as_zmqstream(KernelManager.connect_shell)
61 60 connect_iopub = as_zmqstream(KernelManager.connect_iopub)
62 61 connect_stdin = as_zmqstream(KernelManager.connect_stdin)
63 62 connect_hb = as_zmqstream(KernelManager.connect_hb)
@@ -1,55 +1,54 b''
1 1 """A basic in process kernel monitor with autorestarting.
2 2
3 3 This watches a kernel's state using KernelManager.is_alive and auto
4 4 restarts the kernel if it dies.
5 5 """
6 6
7 7 #-----------------------------------------------------------------------------
8 8 # Copyright (C) 2013 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-----------------------------------------------------------------------------
13 13
14 14 #-----------------------------------------------------------------------------
15 15 # Imports
16 16 #-----------------------------------------------------------------------------
17 17
18 18 from __future__ import absolute_import
19 19
20 import zmq
21 20 from zmq.eventloop import ioloop
22 21
23 22
24 23 from IPython.kernel.restarter import KernelRestarter
25 24 from IPython.utils.traitlets import (
26 Instance, Float, List,
25 Instance,
27 26 )
28 27
29 28 #-----------------------------------------------------------------------------
30 29 # Code
31 30 #-----------------------------------------------------------------------------
32 31
33 32 class IOLoopKernelRestarter(KernelRestarter):
34 33 """Monitor and autorestart a kernel."""
35 34
36 35 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
37 36 def _loop_default(self):
38 37 return ioloop.IOLoop.instance()
39 38
40 39 _pcallback = None
41 40
42 41 def start(self):
43 42 """Start the polling of the kernel."""
44 43 if self._pcallback is None:
45 44 self._pcallback = ioloop.PeriodicCallback(
46 45 self.poll, 1000*self.time_to_dead, self.loop
47 46 )
48 47 self._pcallback.start()
49 48
50 49 def stop(self):
51 50 """Stop the kernel polling."""
52 51 if self._pcallback is not None:
53 52 self._pcallback.stop()
54 53 self._pcallback = None
55 54
@@ -1,301 +1,301 b''
1 1 """A kernel manager for multiple kernels
2 2
3 3 Authors:
4 4
5 5 * Brian Granger
6 6 """
7 7
8 8 #-----------------------------------------------------------------------------
9 9 # Copyright (C) 2013 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-----------------------------------------------------------------------------
14 14
15 15 #-----------------------------------------------------------------------------
16 16 # Imports
17 17 #-----------------------------------------------------------------------------
18 18
19 19 from __future__ import absolute_import
20 20
21 21 import os
22 22 import uuid
23 23
24 24 import zmq
25 25
26 26 from IPython.config.configurable import LoggingConfigurable
27 27 from IPython.utils.importstring import import_item
28 28 from IPython.utils.traitlets import (
29 Instance, Dict, Unicode, Any, DottedObjectName, Bool
29 Instance, Dict, Unicode, Any, DottedObjectName
30 30 )
31 31
32 32 #-----------------------------------------------------------------------------
33 33 # Classes
34 34 #-----------------------------------------------------------------------------
35 35
36 36 class DuplicateKernelError(Exception):
37 37 pass
38 38
39 39
40 40
41 41 def kernel_method(f):
42 42 """decorator for proxying MKM.method(kernel_id) to individual KMs by ID"""
43 43 def wrapped(self, kernel_id, *args, **kwargs):
44 44 # get the kernel
45 45 km = self.get_kernel(kernel_id)
46 46 method = getattr(km, f.__name__)
47 47 # call the kernel's method
48 48 r = method(*args, **kwargs)
49 49 # last thing, call anything defined in the actual class method
50 50 # such as logging messages
51 51 f(self, kernel_id, *args, **kwargs)
52 52 # return the method result
53 53 return r
54 54 return wrapped
55 55
56 56
57 57 class MultiKernelManager(LoggingConfigurable):
58 58 """A class for managing multiple kernels."""
59 59
60 60 kernel_manager_class = DottedObjectName(
61 61 "IPython.kernel.ioloop.IOLoopKernelManager", config=True,
62 62 help="""The kernel manager class. This is configurable to allow
63 63 subclassing of the KernelManager for customized behavior.
64 64 """
65 65 )
66 66 def _kernel_manager_class_changed(self, name, old, new):
67 67 self.kernel_manager_factory = import_item(new)
68 68
69 69 kernel_manager_factory = Any(help="this is kernel_manager_class after import")
70 70 def _kernel_manager_factory_default(self):
71 71 return import_item(self.kernel_manager_class)
72 72
73 73 context = Instance('zmq.Context')
74 74 def _context_default(self):
75 75 return zmq.Context.instance()
76 76
77 77 connection_dir = Unicode('')
78 78
79 79 _kernels = Dict()
80 80
81 81 def list_kernel_ids(self):
82 82 """Return a list of the kernel ids of the active kernels."""
83 83 # Create a copy so we can iterate over kernels in operations
84 84 # that delete keys.
85 85 return list(self._kernels.keys())
86 86
87 87 def __len__(self):
88 88 """Return the number of running kernels."""
89 89 return len(self.list_kernel_ids())
90 90
91 91 def __contains__(self, kernel_id):
92 92 return kernel_id in self._kernels
93 93
94 94 def start_kernel(self, **kwargs):
95 95 """Start a new kernel.
96 96
97 97 The caller can pick a kernel_id by passing one in as a keyword arg,
98 98 otherwise one will be picked using a uuid.
99 99
100 100 To silence the kernel's stdout/stderr, call this using::
101 101
102 102 km.start_kernel(stdout=PIPE, stderr=PIPE)
103 103
104 104 """
105 105 kernel_id = kwargs.pop('kernel_id', unicode(uuid.uuid4()))
106 106 if kernel_id in self:
107 107 raise DuplicateKernelError('Kernel already exists: %s' % kernel_id)
108 108 # kernel_manager_factory is the constructor for the KernelManager
109 109 # subclass we are using. It can be configured as any Configurable,
110 110 # including things like its transport and ip.
111 111 km = self.kernel_manager_factory(connection_file=os.path.join(
112 112 self.connection_dir, "kernel-%s.json" % kernel_id),
113 113 parent=self, autorestart=True, log=self.log
114 114 )
115 115 km.start_kernel(**kwargs)
116 116 self._kernels[kernel_id] = km
117 117 return kernel_id
118 118
119 119 @kernel_method
120 120 def shutdown_kernel(self, kernel_id, now=False):
121 121 """Shutdown a kernel by its kernel uuid.
122 122
123 123 Parameters
124 124 ==========
125 125 kernel_id : uuid
126 126 The id of the kernel to shutdown.
127 127 now : bool
128 128 Should the kernel be shutdown forcibly using a signal.
129 129 """
130 130 self.log.info("Kernel shutdown: %s" % kernel_id)
131 131 self.remove_kernel(kernel_id)
132 132
133 133 def remove_kernel(self, kernel_id):
134 134 """remove a kernel from our mapping.
135 135
136 136 Mainly so that a kernel can be removed if it is already dead,
137 137 without having to call shutdown_kernel.
138 138
139 139 The kernel object is returned.
140 140 """
141 141 return self._kernels.pop(kernel_id)
142 142
143 143 def shutdown_all(self, now=False):
144 144 """Shutdown all kernels."""
145 145 for kid in self.list_kernel_ids():
146 146 self.shutdown_kernel(kid, now=now)
147 147
148 148 @kernel_method
149 149 def interrupt_kernel(self, kernel_id):
150 150 """Interrupt (SIGINT) the kernel by its uuid.
151 151
152 152 Parameters
153 153 ==========
154 154 kernel_id : uuid
155 155 The id of the kernel to interrupt.
156 156 """
157 157 self.log.info("Kernel interrupted: %s" % kernel_id)
158 158
159 159 @kernel_method
160 160 def signal_kernel(self, kernel_id, signum):
161 161 """Sends a signal to the kernel by its uuid.
162 162
163 163 Note that since only SIGTERM is supported on Windows, this function
164 164 is only useful on Unix systems.
165 165
166 166 Parameters
167 167 ==========
168 168 kernel_id : uuid
169 169 The id of the kernel to signal.
170 170 """
171 171 self.log.info("Signaled Kernel %s with %s" % (kernel_id, signum))
172 172
173 173 @kernel_method
174 174 def restart_kernel(self, kernel_id):
175 175 """Restart a kernel by its uuid, keeping the same ports.
176 176
177 177 Parameters
178 178 ==========
179 179 kernel_id : uuid
180 180 The id of the kernel to interrupt.
181 181 """
182 182 self.log.info("Kernel restarted: %s" % kernel_id)
183 183
184 184 @kernel_method
185 185 def is_alive(self, kernel_id):
186 186 """Is the kernel alive.
187 187
188 188 This calls KernelManager.is_alive() which calls Popen.poll on the
189 189 actual kernel subprocess.
190 190
191 191 Parameters
192 192 ==========
193 193 kernel_id : uuid
194 194 The id of the kernel.
195 195 """
196 196
197 197 def _check_kernel_id(self, kernel_id):
198 198 """check that a kernel id is valid"""
199 199 if kernel_id not in self:
200 200 raise KeyError("Kernel with id not found: %s" % kernel_id)
201 201
202 202 def get_kernel(self, kernel_id):
203 203 """Get the single KernelManager object for a kernel by its uuid.
204 204
205 205 Parameters
206 206 ==========
207 207 kernel_id : uuid
208 208 The id of the kernel.
209 209 """
210 210 self._check_kernel_id(kernel_id)
211 211 return self._kernels[kernel_id]
212 212
213 213 @kernel_method
214 214 def add_restart_callback(self, kernel_id, callback, event='restart'):
215 215 """add a callback for the KernelRestarter"""
216 216
217 217 @kernel_method
218 218 def remove_restart_callback(self, kernel_id, callback, event='restart'):
219 219 """remove a callback for the KernelRestarter"""
220 220
221 221 @kernel_method
222 222 def get_connection_info(self, kernel_id):
223 223 """Return a dictionary of connection data for a kernel.
224 224
225 225 Parameters
226 226 ==========
227 227 kernel_id : uuid
228 228 The id of the kernel.
229 229
230 230 Returns
231 231 =======
232 232 connection_dict : dict
233 233 A dict of the information needed to connect to a kernel.
234 234 This includes the ip address and the integer port
235 235 numbers of the different channels (stdin_port, iopub_port,
236 236 shell_port, hb_port).
237 237 """
238 238
239 239 @kernel_method
240 240 def connect_iopub(self, kernel_id, identity=None):
241 241 """Return a zmq Socket connected to the iopub channel.
242 242
243 243 Parameters
244 244 ==========
245 245 kernel_id : uuid
246 246 The id of the kernel
247 247 identity : bytes (optional)
248 248 The zmq identity of the socket
249 249
250 250 Returns
251 251 =======
252 252 stream : zmq Socket or ZMQStream
253 253 """
254 254
255 255 @kernel_method
256 256 def connect_shell(self, kernel_id, identity=None):
257 257 """Return a zmq Socket connected to the shell channel.
258 258
259 259 Parameters
260 260 ==========
261 261 kernel_id : uuid
262 262 The id of the kernel
263 263 identity : bytes (optional)
264 264 The zmq identity of the socket
265 265
266 266 Returns
267 267 =======
268 268 stream : zmq Socket or ZMQStream
269 269 """
270 270
271 271 @kernel_method
272 272 def connect_stdin(self, kernel_id, identity=None):
273 273 """Return a zmq Socket connected to the stdin channel.
274 274
275 275 Parameters
276 276 ==========
277 277 kernel_id : uuid
278 278 The id of the kernel
279 279 identity : bytes (optional)
280 280 The zmq identity of the socket
281 281
282 282 Returns
283 283 =======
284 284 stream : zmq Socket or ZMQStream
285 285 """
286 286
287 287 @kernel_method
288 288 def connect_hb(self, kernel_id, identity=None):
289 289 """Return a zmq Socket connected to the hb channel.
290 290
291 291 Parameters
292 292 ==========
293 293 kernel_id : uuid
294 294 The id of the kernel
295 295 identity : bytes (optional)
296 296 The zmq identity of the socket
297 297
298 298 Returns
299 299 =======
300 300 stream : zmq Socket or ZMQStream
301 301 """
@@ -1,63 +1,60 b''
1 1 """Tests for kernel utility functions
2 2
3 3 Authors
4 4 -------
5 5 * MinRK
6 6 """
7 7 #-----------------------------------------------------------------------------
8 8 # Copyright (c) 2011, the IPython Development Team.
9 9 #
10 10 # Distributed under the terms of the Modified BSD License.
11 11 #
12 12 # The full license is in the file COPYING.txt, distributed with this software.
13 13 #-----------------------------------------------------------------------------
14 14
15 15 #-----------------------------------------------------------------------------
16 16 # Imports
17 17 #-----------------------------------------------------------------------------
18 18
19 # Stdlib imports
20 from unittest import TestCase
21
22 19 # Third-party imports
23 20 import nose.tools as nt
24 21
25 22 # Our own imports
26 23 from IPython.testing import decorators as dec
27 24 from IPython.kernel.launcher import swallow_argv
28 25
29 26 #-----------------------------------------------------------------------------
30 27 # Classes and functions
31 28 #-----------------------------------------------------------------------------
32 29
33 30 @dec.parametric
34 31 def test_swallow_argv():
35 32 tests = [
36 33 # expected , argv , aliases, flags
37 34 (['-a', '5'], ['-a', '5'], None, None),
38 35 (['5'], ['-a', '5'], None, ['a']),
39 36 ([], ['-a', '5'], ['a'], None),
40 37 ([], ['-a', '5'], ['a'], ['a']),
41 38 ([], ['--foo'], None, ['foo']),
42 39 ([], ['--foo'], ['foobar'], []),
43 40 ([], ['--foo', '5'], ['foo'], []),
44 41 ([], ['--foo=5'], ['foo'], []),
45 42 (['--foo=5'], ['--foo=5'], [], ['foo']),
46 43 (['5'], ['--foo', '5'], [], ['foo']),
47 44 (['bar'], ['--foo', '5', 'bar'], ['foo'], ['foo']),
48 45 (['bar'], ['--foo=5', 'bar'], ['foo'], ['foo']),
49 46 (['5','bar'], ['--foo', '5', 'bar'], None, ['foo']),
50 47 (['bar'], ['--foo', '5', 'bar'], ['foo'], None),
51 48 (['bar'], ['--foo=5', 'bar'], ['foo'], None),
52 49 ]
53 50 for expected, argv, aliases, flags in tests:
54 51 stripped = swallow_argv(argv, aliases=aliases, flags=flags)
55 52 message = '\n'.join(['',
56 53 "argv: %r" % argv,
57 54 "aliases: %r" % aliases,
58 55 "flags : %r" % flags,
59 56 "expected : %r" % expected,
60 57 "returned : %r" % stripped,
61 58 ])
62 59 yield nt.assert_equal(expected, stripped, message)
63 60
@@ -1,511 +1,507 b''
1 1 """Test suite for our zeromq-based messaging specification.
2 2 """
3 3 #-----------------------------------------------------------------------------
4 4 # Copyright (C) 2010-2011 The IPython Development Team
5 5 #
6 6 # Distributed under the terms of the BSD License. The full license is in
7 7 # the file COPYING.txt, distributed as part of this software.
8 8 #-----------------------------------------------------------------------------
9 9
10 10 import re
11 import sys
12 import time
13 11 from subprocess import PIPE
14 12 from Queue import Empty
15 13
16 14 import nose.tools as nt
17 15
18 16 from IPython.kernel import KernelManager
19 17
20
21 18 from IPython.testing import decorators as dec
22 from IPython.utils import io
23 19 from IPython.utils.traitlets import (
24 20 HasTraits, TraitError, Bool, Unicode, Dict, Integer, List, Enum, Any,
25 21 )
26 22
27 23 #-----------------------------------------------------------------------------
28 24 # Global setup and utilities
29 25 #-----------------------------------------------------------------------------
30 26
31 27 def setup():
32 28 global KM, KC
33 29 KM = KernelManager()
34 30 KM.start_kernel(stdout=PIPE, stderr=PIPE)
35 31 KC = KM.client()
36 32 KC.start_channels()
37 33
38 34 # wait for kernel to be ready
39 35 KC.execute("pass")
40 36 KC.get_shell_msg(block=True, timeout=5)
41 37 flush_channels()
42 38
43 39
44 40 def teardown():
45 41 KC.stop_channels()
46 42 KM.shutdown_kernel()
47 43
48 44
49 45 def flush_channels(kc=None):
50 46 """flush any messages waiting on the queue"""
51 47 if kc is None:
52 48 kc = KC
53 49 for channel in (kc.shell_channel, kc.iopub_channel):
54 50 while True:
55 51 try:
56 52 msg = channel.get_msg(block=True, timeout=0.1)
57 53 except Empty:
58 54 break
59 55 else:
60 56 list(validate_message(msg))
61 57
62 58
63 59 def execute(code='', kc=None, **kwargs):
64 60 """wrapper for doing common steps for validating an execution request"""
65 61 msg_id = KC.execute(code=code, **kwargs)
66 62 reply = KC.get_shell_msg(timeout=2)
67 63 list(validate_message(reply, 'execute_reply', msg_id))
68 64 busy = KC.get_iopub_msg(timeout=2)
69 65 list(validate_message(busy, 'status', msg_id))
70 66 nt.assert_equal(busy['content']['execution_state'], 'busy')
71 67
72 68 if not kwargs.get('silent'):
73 69 pyin = KC.get_iopub_msg(timeout=2)
74 70 list(validate_message(pyin, 'pyin', msg_id))
75 71 nt.assert_equal(pyin['content']['code'], code)
76 72
77 73 return msg_id, reply['content']
78 74
79 75 #-----------------------------------------------------------------------------
80 76 # MSG Spec References
81 77 #-----------------------------------------------------------------------------
82 78
83 79
84 80 class Reference(HasTraits):
85 81
86 82 """
87 83 Base class for message spec specification testing.
88 84
89 85 This class is the core of the message specification test. The
90 86 idea is that child classes implement trait attributes for each
91 87 message keys, so that message keys can be tested against these
92 88 traits using :meth:`check` method.
93 89
94 90 """
95 91
96 92 def check(self, d):
97 93 """validate a dict against our traits"""
98 94 for key in self.trait_names():
99 95 yield nt.assert_true(key in d, "Missing key: %r, should be found in %s" % (key, d))
100 96 # FIXME: always allow None, probably not a good idea
101 97 if d[key] is None:
102 98 continue
103 99 try:
104 100 setattr(self, key, d[key])
105 101 except TraitError as e:
106 102 yield nt.assert_true(False, str(e))
107 103
108 104
109 105 class RMessage(Reference):
110 106 msg_id = Unicode()
111 107 msg_type = Unicode()
112 108 header = Dict()
113 109 parent_header = Dict()
114 110 content = Dict()
115 111
116 112 class RHeader(Reference):
117 113 msg_id = Unicode()
118 114 msg_type = Unicode()
119 115 session = Unicode()
120 116 username = Unicode()
121 117
122 118 class RContent(Reference):
123 119 status = Enum((u'ok', u'error'))
124 120
125 121
126 122 class ExecuteReply(Reference):
127 123 execution_count = Integer()
128 124 status = Enum((u'ok', u'error'))
129 125
130 126 def check(self, d):
131 127 for tst in Reference.check(self, d):
132 128 yield tst
133 129 if d['status'] == 'ok':
134 130 for tst in ExecuteReplyOkay().check(d):
135 131 yield tst
136 132 elif d['status'] == 'error':
137 133 for tst in ExecuteReplyError().check(d):
138 134 yield tst
139 135
140 136
141 137 class ExecuteReplyOkay(Reference):
142 138 payload = List(Dict)
143 139 user_variables = Dict()
144 140 user_expressions = Dict()
145 141
146 142
147 143 class ExecuteReplyError(Reference):
148 144 ename = Unicode()
149 145 evalue = Unicode()
150 146 traceback = List(Unicode)
151 147
152 148
153 149 class OInfoReply(Reference):
154 150 name = Unicode()
155 151 found = Bool()
156 152 ismagic = Bool()
157 153 isalias = Bool()
158 154 namespace = Enum((u'builtin', u'magics', u'alias', u'Interactive'))
159 155 type_name = Unicode()
160 156 string_form = Unicode()
161 157 base_class = Unicode()
162 158 length = Integer()
163 159 file = Unicode()
164 160 definition = Unicode()
165 161 argspec = Dict()
166 162 init_definition = Unicode()
167 163 docstring = Unicode()
168 164 init_docstring = Unicode()
169 165 class_docstring = Unicode()
170 166 call_def = Unicode()
171 167 call_docstring = Unicode()
172 168 source = Unicode()
173 169
174 170 def check(self, d):
175 171 for tst in Reference.check(self, d):
176 172 yield tst
177 173 if d['argspec'] is not None:
178 174 for tst in ArgSpec().check(d['argspec']):
179 175 yield tst
180 176
181 177
182 178 class ArgSpec(Reference):
183 179 args = List(Unicode)
184 180 varargs = Unicode()
185 181 varkw = Unicode()
186 182 defaults = List()
187 183
188 184
189 185 class Status(Reference):
190 186 execution_state = Enum((u'busy', u'idle', u'starting'))
191 187
192 188
193 189 class CompleteReply(Reference):
194 190 matches = List(Unicode)
195 191
196 192
197 193 def Version(num, trait=Integer):
198 194 return List(trait, default_value=[0] * num, minlen=num, maxlen=num)
199 195
200 196
201 197 class KernelInfoReply(Reference):
202 198
203 199 protocol_version = Version(2)
204 200 ipython_version = Version(4, Any)
205 201 language_version = Version(3)
206 202 language = Unicode()
207 203
208 204 def _ipython_version_changed(self, name, old, new):
209 205 for v in new:
210 206 nt.assert_true(
211 207 isinstance(v, int) or isinstance(v, basestring),
212 208 'expected int or string as version component, got {0!r}'
213 209 .format(v))
214 210
215 211
216 212 # IOPub messages
217 213
218 214 class PyIn(Reference):
219 215 code = Unicode()
220 216 execution_count = Integer()
221 217
222 218
223 219 PyErr = ExecuteReplyError
224 220
225 221
226 222 class Stream(Reference):
227 223 name = Enum((u'stdout', u'stderr'))
228 224 data = Unicode()
229 225
230 226
231 227 mime_pat = re.compile(r'\w+/\w+')
232 228
233 229 class DisplayData(Reference):
234 230 source = Unicode()
235 231 metadata = Dict()
236 232 data = Dict()
237 233 def _data_changed(self, name, old, new):
238 234 for k,v in new.iteritems():
239 235 nt.assert_true(mime_pat.match(k))
240 236 nt.assert_true(isinstance(v, basestring), "expected string data, got %r" % v)
241 237
242 238
243 239 class PyOut(Reference):
244 240 execution_count = Integer()
245 241 data = Dict()
246 242 def _data_changed(self, name, old, new):
247 243 for k,v in new.iteritems():
248 244 nt.assert_true(mime_pat.match(k))
249 245 nt.assert_true(isinstance(v, basestring), "expected string data, got %r" % v)
250 246
251 247
252 248 references = {
253 249 'execute_reply' : ExecuteReply(),
254 250 'object_info_reply' : OInfoReply(),
255 251 'status' : Status(),
256 252 'complete_reply' : CompleteReply(),
257 253 'kernel_info_reply': KernelInfoReply(),
258 254 'pyin' : PyIn(),
259 255 'pyout' : PyOut(),
260 256 'pyerr' : PyErr(),
261 257 'stream' : Stream(),
262 258 'display_data' : DisplayData(),
263 259 }
264 260 """
265 261 Specifications of `content` part of the reply messages.
266 262 """
267 263
268 264
269 265 def validate_message(msg, msg_type=None, parent=None):
270 266 """validate a message
271 267
272 268 This is a generator, and must be iterated through to actually
273 269 trigger each test.
274 270
275 271 If msg_type and/or parent are given, the msg_type and/or parent msg_id
276 272 are compared with the given values.
277 273 """
278 274 RMessage().check(msg)
279 275 if msg_type:
280 276 yield nt.assert_equal(msg['msg_type'], msg_type)
281 277 if parent:
282 278 yield nt.assert_equal(msg['parent_header']['msg_id'], parent)
283 279 content = msg['content']
284 280 ref = references[msg['msg_type']]
285 281 for tst in ref.check(content):
286 282 yield tst
287 283
288 284
289 285 #-----------------------------------------------------------------------------
290 286 # Tests
291 287 #-----------------------------------------------------------------------------
292 288
293 289 # Shell channel
294 290
295 291 @dec.parametric
296 292 def test_execute():
297 293 flush_channels()
298 294
299 295 msg_id = KC.execute(code='x=1')
300 296 reply = KC.get_shell_msg(timeout=2)
301 297 for tst in validate_message(reply, 'execute_reply', msg_id):
302 298 yield tst
303 299
304 300
305 301 @dec.parametric
306 302 def test_execute_silent():
307 303 flush_channels()
308 304 msg_id, reply = execute(code='x=1', silent=True)
309 305
310 306 # flush status=idle
311 307 status = KC.iopub_channel.get_msg(timeout=2)
312 308 for tst in validate_message(status, 'status', msg_id):
313 309 yield tst
314 310 nt.assert_equal(status['content']['execution_state'], 'idle')
315 311
316 312 yield nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
317 313 count = reply['execution_count']
318 314
319 315 msg_id, reply = execute(code='x=2', silent=True)
320 316
321 317 # flush status=idle
322 318 status = KC.iopub_channel.get_msg(timeout=2)
323 319 for tst in validate_message(status, 'status', msg_id):
324 320 yield tst
325 321 yield nt.assert_equal(status['content']['execution_state'], 'idle')
326 322
327 323 yield nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
328 324 count_2 = reply['execution_count']
329 325 yield nt.assert_equal(count_2, count)
330 326
331 327
332 328 @dec.parametric
333 329 def test_execute_error():
334 330 flush_channels()
335 331
336 332 msg_id, reply = execute(code='1/0')
337 333 yield nt.assert_equal(reply['status'], 'error')
338 334 yield nt.assert_equal(reply['ename'], 'ZeroDivisionError')
339 335
340 336 pyerr = KC.iopub_channel.get_msg(timeout=2)
341 337 for tst in validate_message(pyerr, 'pyerr', msg_id):
342 338 yield tst
343 339
344 340
345 341 def test_execute_inc():
346 342 """execute request should increment execution_count"""
347 343 flush_channels()
348 344
349 345 msg_id, reply = execute(code='x=1')
350 346 count = reply['execution_count']
351 347
352 348 flush_channels()
353 349
354 350 msg_id, reply = execute(code='x=2')
355 351 count_2 = reply['execution_count']
356 352 nt.assert_equal(count_2, count+1)
357 353
358 354
359 355 def test_user_variables():
360 356 flush_channels()
361 357
362 358 msg_id, reply = execute(code='x=1', user_variables=['x'])
363 359 user_variables = reply['user_variables']
364 360 nt.assert_equal(user_variables, {u'x': {
365 361 u'status': u'ok',
366 362 u'data': {u'text/plain': u'1'},
367 363 u'metadata': {},
368 364 }})
369 365
370 366
371 367 def test_user_variables_fail():
372 368 flush_channels()
373 369
374 370 msg_id, reply = execute(code='x=1', user_variables=['nosuchname'])
375 371 user_variables = reply['user_variables']
376 372 foo = user_variables['nosuchname']
377 373 nt.assert_equal(foo['status'], 'error')
378 374 nt.assert_equal(foo['ename'], 'KeyError')
379 375
380 376
381 377 def test_user_expressions():
382 378 flush_channels()
383 379
384 380 msg_id, reply = execute(code='x=1', user_expressions=dict(foo='x+1'))
385 381 user_expressions = reply['user_expressions']
386 382 nt.assert_equal(user_expressions, {u'foo': {
387 383 u'status': u'ok',
388 384 u'data': {u'text/plain': u'2'},
389 385 u'metadata': {},
390 386 }})
391 387
392 388
393 389 def test_user_expressions_fail():
394 390 flush_channels()
395 391
396 392 msg_id, reply = execute(code='x=0', user_expressions=dict(foo='nosuchname'))
397 393 user_expressions = reply['user_expressions']
398 394 foo = user_expressions['foo']
399 395 nt.assert_equal(foo['status'], 'error')
400 396 nt.assert_equal(foo['ename'], 'NameError')
401 397
402 398
403 399 @dec.parametric
404 400 def test_oinfo():
405 401 flush_channels()
406 402
407 403 msg_id = KC.object_info('a')
408 404 reply = KC.get_shell_msg(timeout=2)
409 405 for tst in validate_message(reply, 'object_info_reply', msg_id):
410 406 yield tst
411 407
412 408
413 409 @dec.parametric
414 410 def test_oinfo_found():
415 411 flush_channels()
416 412
417 413 msg_id, reply = execute(code='a=5')
418 414
419 415 msg_id = KC.object_info('a')
420 416 reply = KC.get_shell_msg(timeout=2)
421 417 for tst in validate_message(reply, 'object_info_reply', msg_id):
422 418 yield tst
423 419 content = reply['content']
424 420 yield nt.assert_true(content['found'])
425 421 argspec = content['argspec']
426 422 yield nt.assert_true(argspec is None, "didn't expect argspec dict, got %r" % argspec)
427 423
428 424
429 425 @dec.parametric
430 426 def test_oinfo_detail():
431 427 flush_channels()
432 428
433 429 msg_id, reply = execute(code='ip=get_ipython()')
434 430
435 431 msg_id = KC.object_info('ip.object_inspect', detail_level=2)
436 432 reply = KC.get_shell_msg(timeout=2)
437 433 for tst in validate_message(reply, 'object_info_reply', msg_id):
438 434 yield tst
439 435 content = reply['content']
440 436 yield nt.assert_true(content['found'])
441 437 argspec = content['argspec']
442 438 yield nt.assert_true(isinstance(argspec, dict), "expected non-empty argspec dict, got %r" % argspec)
443 439 yield nt.assert_equal(argspec['defaults'], [0])
444 440
445 441
446 442 @dec.parametric
447 443 def test_oinfo_not_found():
448 444 flush_channels()
449 445
450 446 msg_id = KC.object_info('dne')
451 447 reply = KC.get_shell_msg(timeout=2)
452 448 for tst in validate_message(reply, 'object_info_reply', msg_id):
453 449 yield tst
454 450 content = reply['content']
455 451 yield nt.assert_false(content['found'])
456 452
457 453
458 454 @dec.parametric
459 455 def test_complete():
460 456 flush_channels()
461 457
462 458 msg_id, reply = execute(code="alpha = albert = 5")
463 459
464 460 msg_id = KC.complete('al', 'al', 2)
465 461 reply = KC.get_shell_msg(timeout=2)
466 462 for tst in validate_message(reply, 'complete_reply', msg_id):
467 463 yield tst
468 464 matches = reply['content']['matches']
469 465 for name in ('alpha', 'albert'):
470 466 yield nt.assert_true(name in matches, "Missing match: %r" % name)
471 467
472 468
473 469 @dec.parametric
474 470 def test_kernel_info_request():
475 471 flush_channels()
476 472
477 473 msg_id = KC.kernel_info()
478 474 reply = KC.get_shell_msg(timeout=2)
479 475 for tst in validate_message(reply, 'kernel_info_reply', msg_id):
480 476 yield tst
481 477
482 478
483 479 # IOPub channel
484 480
485 481
486 482 @dec.parametric
487 483 def test_stream():
488 484 flush_channels()
489 485
490 486 msg_id, reply = execute("print('hi')")
491 487
492 488 stdout = KC.iopub_channel.get_msg(timeout=2)
493 489 for tst in validate_message(stdout, 'stream', msg_id):
494 490 yield tst
495 491 content = stdout['content']
496 492 yield nt.assert_equal(content['name'], u'stdout')
497 493 yield nt.assert_equal(content['data'], u'hi\n')
498 494
499 495
500 496 @dec.parametric
501 497 def test_display_data():
502 498 flush_channels()
503 499
504 500 msg_id, reply = execute("from IPython.core.display import display; display(1)")
505 501
506 502 display = KC.iopub_channel.get_msg(timeout=2)
507 503 for tst in validate_message(display, 'display_data', parent=msg_id):
508 504 yield tst
509 505 data = display['content']['data']
510 506 yield nt.assert_equal(data['text/plain'], u'1')
511 507
@@ -1,57 +1,56 b''
1 1 """The client and server for a basic ping-pong style heartbeat.
2 2 """
3 3
4 4 #-----------------------------------------------------------------------------
5 5 # Copyright (C) 2008-2011 The IPython Development Team
6 6 #
7 7 # Distributed under the terms of the BSD License. The full license is in
8 8 # the file COPYING, distributed as part of this software.
9 9 #-----------------------------------------------------------------------------
10 10
11 11 #-----------------------------------------------------------------------------
12 12 # Imports
13 13 #-----------------------------------------------------------------------------
14 14
15 15 import os
16 16 import socket
17 import sys
18 17 from threading import Thread
19 18
20 19 import zmq
21 20
22 21 from IPython.utils.localinterfaces import LOCALHOST
23 22
24 23 #-----------------------------------------------------------------------------
25 24 # Code
26 25 #-----------------------------------------------------------------------------
27 26
28 27
29 28 class Heartbeat(Thread):
30 29 "A simple ping-pong style heartbeat that runs in a thread."
31 30
32 31 def __init__(self, context, addr=('tcp', LOCALHOST, 0)):
33 32 Thread.__init__(self)
34 33 self.context = context
35 34 self.transport, self.ip, self.port = addr
36 35 if self.port == 0:
37 36 if addr[0] == 'tcp':
38 37 s = socket.socket()
39 38 # '*' means all interfaces to 0MQ, which is '' to socket.socket
40 39 s.bind(('' if self.ip == '*' else self.ip, 0))
41 40 self.port = s.getsockname()[1]
42 41 s.close()
43 42 elif addr[0] == 'ipc':
44 43 self.port = 1
45 44 while os.path.exists("%s-%s" % (self.ip, self.port)):
46 45 self.port = self.port + 1
47 46 else:
48 47 raise ValueError("Unrecognized zmq transport: %s" % addr[0])
49 48 self.addr = (self.ip, self.port)
50 49 self.daemon = True
51 50
52 51 def run(self):
53 52 self.socket = self.context.socket(zmq.REP)
54 53 c = ':' if self.transport == 'tcp' else '-'
55 54 self.socket.bind('%s://%s' % (self.transport, self.ip) + c + str(self.port))
56 55 zmq.device(zmq.FORWARDER, self.socket, self.socket)
57 56
@@ -1,804 +1,803 b''
1 1 #!/usr/bin/env python
2 2 """A simple interactive kernel that talks to a frontend over 0MQ.
3 3
4 4 Things to do:
5 5
6 6 * Implement `set_parent` logic. Right before doing exec, the Kernel should
7 7 call set_parent on all the PUB objects with the message about to be executed.
8 8 * Implement random port and security key logic.
9 9 * Implement control messages.
10 10 * Implement event loop and poll version.
11 11 """
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Imports
15 15 #-----------------------------------------------------------------------------
16 16 from __future__ import print_function
17 17
18 18 # Standard library imports
19 19 import __builtin__
20 20 import sys
21 21 import time
22 22 import traceback
23 23 import logging
24 24 import uuid
25 25
26 26 from datetime import datetime
27 27 from signal import (
28 signal, getsignal, default_int_handler, SIGINT, SIG_IGN
28 signal, default_int_handler, SIGINT
29 29 )
30 30
31 31 # System library imports
32 32 import zmq
33 33 from zmq.eventloop import ioloop
34 34 from zmq.eventloop.zmqstream import ZMQStream
35 35
36 36 # Local imports
37 37 from IPython.config.configurable import Configurable
38 38 from IPython.core.error import StdinNotImplementedError
39 39 from IPython.core import release
40 from IPython.utils import io
41 40 from IPython.utils import py3compat
42 41 from IPython.utils.jsonutil import json_clean
43 42 from IPython.utils.traitlets import (
44 Any, Instance, Float, Dict, CaselessStrEnum, List, Set, Integer, Unicode,
43 Any, Instance, Float, Dict, List, Set, Integer, Unicode,
45 44 Type
46 45 )
47 46
48 47 from serialize import serialize_object, unpack_apply_message
49 48 from session import Session
50 49 from zmqshell import ZMQInteractiveShell
51 50
52 51
53 52 #-----------------------------------------------------------------------------
54 53 # Main kernel class
55 54 #-----------------------------------------------------------------------------
56 55
57 56 protocol_version = list(release.kernel_protocol_version_info)
58 57 ipython_version = list(release.version_info)
59 58 language_version = list(sys.version_info[:3])
60 59
61 60
62 61 class Kernel(Configurable):
63 62
64 63 #---------------------------------------------------------------------------
65 64 # Kernel interface
66 65 #---------------------------------------------------------------------------
67 66
68 67 # attribute to override with a GUI
69 68 eventloop = Any(None)
70 69 def _eventloop_changed(self, name, old, new):
71 70 """schedule call to eventloop from IOLoop"""
72 71 loop = ioloop.IOLoop.instance()
73 72 loop.add_timeout(time.time()+0.1, self.enter_eventloop)
74 73
75 74 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
76 75 shell_class = Type(ZMQInteractiveShell)
77 76
78 77 session = Instance(Session)
79 78 profile_dir = Instance('IPython.core.profiledir.ProfileDir')
80 79 shell_streams = List()
81 80 control_stream = Instance(ZMQStream)
82 81 iopub_socket = Instance(zmq.Socket)
83 82 stdin_socket = Instance(zmq.Socket)
84 83 log = Instance(logging.Logger)
85 84
86 85 user_module = Any()
87 86 def _user_module_changed(self, name, old, new):
88 87 if self.shell is not None:
89 88 self.shell.user_module = new
90 89
91 90 user_ns = Dict(default_value=None)
92 91 def _user_ns_changed(self, name, old, new):
93 92 if self.shell is not None:
94 93 self.shell.user_ns = new
95 94 self.shell.init_user_ns()
96 95
97 96 # identities:
98 97 int_id = Integer(-1)
99 98 ident = Unicode()
100 99
101 100 def _ident_default(self):
102 101 return unicode(uuid.uuid4())
103 102
104 103
105 104 # Private interface
106 105
107 106 # Time to sleep after flushing the stdout/err buffers in each execute
108 107 # cycle. While this introduces a hard limit on the minimal latency of the
109 108 # execute cycle, it helps prevent output synchronization problems for
110 109 # clients.
111 110 # Units are in seconds. The minimum zmq latency on local host is probably
112 111 # ~150 microseconds, set this to 500us for now. We may need to increase it
113 112 # a little if it's not enough after more interactive testing.
114 113 _execute_sleep = Float(0.0005, config=True)
115 114
116 115 # Frequency of the kernel's event loop.
117 116 # Units are in seconds, kernel subclasses for GUI toolkits may need to
118 117 # adapt to milliseconds.
119 118 _poll_interval = Float(0.05, config=True)
120 119
121 120 # If the shutdown was requested over the network, we leave here the
122 121 # necessary reply message so it can be sent by our registered atexit
123 122 # handler. This ensures that the reply is only sent to clients truly at
124 123 # the end of our shutdown process (which happens after the underlying
125 124 # IPython shell's own shutdown).
126 125 _shutdown_message = None
127 126
128 127 # This is a dict of port number that the kernel is listening on. It is set
129 128 # by record_ports and used by connect_request.
130 129 _recorded_ports = Dict()
131 130
132 131 # A reference to the Python builtin 'raw_input' function.
133 132 # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3)
134 133 _sys_raw_input = Any()
135 134
136 135 # set of aborted msg_ids
137 136 aborted = Set()
138 137
139 138
140 139 def __init__(self, **kwargs):
141 140 super(Kernel, self).__init__(**kwargs)
142 141
143 142 # Initialize the InteractiveShell subclass
144 143 self.shell = self.shell_class.instance(parent=self,
145 144 profile_dir = self.profile_dir,
146 145 user_module = self.user_module,
147 146 user_ns = self.user_ns,
148 147 )
149 148 self.shell.displayhook.session = self.session
150 149 self.shell.displayhook.pub_socket = self.iopub_socket
151 150 self.shell.displayhook.topic = self._topic('pyout')
152 151 self.shell.display_pub.session = self.session
153 152 self.shell.display_pub.pub_socket = self.iopub_socket
154 153 self.shell.data_pub.session = self.session
155 154 self.shell.data_pub.pub_socket = self.iopub_socket
156 155
157 156 # TMP - hack while developing
158 157 self.shell._reply_content = None
159 158
160 159 # Build dict of handlers for message types
161 160 msg_types = [ 'execute_request', 'complete_request',
162 161 'object_info_request', 'history_request',
163 162 'kernel_info_request',
164 163 'connect_request', 'shutdown_request',
165 164 'apply_request',
166 165 ]
167 166 self.shell_handlers = {}
168 167 for msg_type in msg_types:
169 168 self.shell_handlers[msg_type] = getattr(self, msg_type)
170 169
171 170 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
172 171 self.control_handlers = {}
173 172 for msg_type in control_msg_types:
174 173 self.control_handlers[msg_type] = getattr(self, msg_type)
175 174
176 175 def dispatch_control(self, msg):
177 176 """dispatch control requests"""
178 177 idents,msg = self.session.feed_identities(msg, copy=False)
179 178 try:
180 179 msg = self.session.unserialize(msg, content=True, copy=False)
181 180 except:
182 181 self.log.error("Invalid Control Message", exc_info=True)
183 182 return
184 183
185 184 self.log.debug("Control received: %s", msg)
186 185
187 186 header = msg['header']
188 187 msg_id = header['msg_id']
189 188 msg_type = header['msg_type']
190 189
191 190 handler = self.control_handlers.get(msg_type, None)
192 191 if handler is None:
193 192 self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
194 193 else:
195 194 try:
196 195 handler(self.control_stream, idents, msg)
197 196 except Exception:
198 197 self.log.error("Exception in control handler:", exc_info=True)
199 198
200 199 def dispatch_shell(self, stream, msg):
201 200 """dispatch shell requests"""
202 201 # flush control requests first
203 202 if self.control_stream:
204 203 self.control_stream.flush()
205 204
206 205 idents,msg = self.session.feed_identities(msg, copy=False)
207 206 try:
208 207 msg = self.session.unserialize(msg, content=True, copy=False)
209 208 except:
210 209 self.log.error("Invalid Message", exc_info=True)
211 210 return
212 211
213 212 header = msg['header']
214 213 msg_id = header['msg_id']
215 214 msg_type = msg['header']['msg_type']
216 215
217 216 # Print some info about this message and leave a '--->' marker, so it's
218 217 # easier to trace visually the message chain when debugging. Each
219 218 # handler prints its message at the end.
220 219 self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
221 220 self.log.debug(' Content: %s\n --->\n ', msg['content'])
222 221
223 222 if msg_id in self.aborted:
224 223 self.aborted.remove(msg_id)
225 224 # is it safe to assume a msg_id will not be resubmitted?
226 225 reply_type = msg_type.split('_')[0] + '_reply'
227 226 status = {'status' : 'aborted'}
228 227 md = {'engine' : self.ident}
229 228 md.update(status)
230 229 reply_msg = self.session.send(stream, reply_type, metadata=md,
231 230 content=status, parent=msg, ident=idents)
232 231 return
233 232
234 233 handler = self.shell_handlers.get(msg_type, None)
235 234 if handler is None:
236 235 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
237 236 else:
238 237 # ensure default_int_handler during handler call
239 238 sig = signal(SIGINT, default_int_handler)
240 239 try:
241 240 handler(stream, idents, msg)
242 241 except Exception:
243 242 self.log.error("Exception in message handler:", exc_info=True)
244 243 finally:
245 244 signal(SIGINT, sig)
246 245
247 246 def enter_eventloop(self):
248 247 """enter eventloop"""
249 248 self.log.info("entering eventloop")
250 249 # restore default_int_handler
251 250 signal(SIGINT, default_int_handler)
252 251 while self.eventloop is not None:
253 252 try:
254 253 self.eventloop(self)
255 254 except KeyboardInterrupt:
256 255 # Ctrl-C shouldn't crash the kernel
257 256 self.log.error("KeyboardInterrupt caught in kernel")
258 257 continue
259 258 else:
260 259 # eventloop exited cleanly, this means we should stop (right?)
261 260 self.eventloop = None
262 261 break
263 262 self.log.info("exiting eventloop")
264 263
265 264 def start(self):
266 265 """register dispatchers for streams"""
267 266 self.shell.exit_now = False
268 267 if self.control_stream:
269 268 self.control_stream.on_recv(self.dispatch_control, copy=False)
270 269
271 270 def make_dispatcher(stream):
272 271 def dispatcher(msg):
273 272 return self.dispatch_shell(stream, msg)
274 273 return dispatcher
275 274
276 275 for s in self.shell_streams:
277 276 s.on_recv(make_dispatcher(s), copy=False)
278 277
279 278 # publish idle status
280 279 self._publish_status('starting')
281 280
282 281 def do_one_iteration(self):
283 282 """step eventloop just once"""
284 283 if self.control_stream:
285 284 self.control_stream.flush()
286 285 for stream in self.shell_streams:
287 286 # handle at most one request per iteration
288 287 stream.flush(zmq.POLLIN, 1)
289 288 stream.flush(zmq.POLLOUT)
290 289
291 290
292 291 def record_ports(self, ports):
293 292 """Record the ports that this kernel is using.
294 293
295 294 The creator of the Kernel instance must call this methods if they
296 295 want the :meth:`connect_request` method to return the port numbers.
297 296 """
298 297 self._recorded_ports = ports
299 298
300 299 #---------------------------------------------------------------------------
301 300 # Kernel request handlers
302 301 #---------------------------------------------------------------------------
303 302
304 303 def _make_metadata(self, other=None):
305 304 """init metadata dict, for execute/apply_reply"""
306 305 new_md = {
307 306 'dependencies_met' : True,
308 307 'engine' : self.ident,
309 308 'started': datetime.now(),
310 309 }
311 310 if other:
312 311 new_md.update(other)
313 312 return new_md
314 313
315 314 def _publish_pyin(self, code, parent, execution_count):
316 315 """Publish the code request on the pyin stream."""
317 316
318 317 self.session.send(self.iopub_socket, u'pyin',
319 318 {u'code':code, u'execution_count': execution_count},
320 319 parent=parent, ident=self._topic('pyin')
321 320 )
322 321
323 322 def _publish_status(self, status, parent=None):
324 323 """send status (busy/idle) on IOPub"""
325 324 self.session.send(self.iopub_socket,
326 325 u'status',
327 326 {u'execution_state': status},
328 327 parent=parent,
329 328 ident=self._topic('status'),
330 329 )
331 330
332 331
333 332 def execute_request(self, stream, ident, parent):
334 333 """handle an execute_request"""
335 334
336 335 self._publish_status(u'busy', parent)
337 336
338 337 try:
339 338 content = parent[u'content']
340 339 code = content[u'code']
341 340 silent = content[u'silent']
342 341 store_history = content.get(u'store_history', not silent)
343 342 except:
344 343 self.log.error("Got bad msg: ")
345 344 self.log.error("%s", parent)
346 345 return
347 346
348 347 md = self._make_metadata(parent['metadata'])
349 348
350 349 shell = self.shell # we'll need this a lot here
351 350
352 351 # Replace raw_input. Note that is not sufficient to replace
353 352 # raw_input in the user namespace.
354 353 if content.get('allow_stdin', False):
355 354 raw_input = lambda prompt='': self._raw_input(prompt, ident, parent)
356 355 else:
357 356 raw_input = lambda prompt='' : self._no_raw_input()
358 357
359 358 if py3compat.PY3:
360 359 self._sys_raw_input = __builtin__.input
361 360 __builtin__.input = raw_input
362 361 else:
363 362 self._sys_raw_input = __builtin__.raw_input
364 363 __builtin__.raw_input = raw_input
365 364
366 365 # Set the parent message of the display hook and out streams.
367 366 shell.displayhook.set_parent(parent)
368 367 shell.display_pub.set_parent(parent)
369 368 shell.data_pub.set_parent(parent)
370 369 try:
371 370 sys.stdout.set_parent(parent)
372 371 except AttributeError:
373 372 pass
374 373 try:
375 374 sys.stderr.set_parent(parent)
376 375 except AttributeError:
377 376 pass
378 377
379 378 # Re-broadcast our input for the benefit of listening clients, and
380 379 # start computing output
381 380 if not silent:
382 381 self._publish_pyin(code, parent, shell.execution_count)
383 382
384 383 reply_content = {}
385 384 try:
386 385 # FIXME: the shell calls the exception handler itself.
387 386 shell.run_cell(code, store_history=store_history, silent=silent)
388 387 except:
389 388 status = u'error'
390 389 # FIXME: this code right now isn't being used yet by default,
391 390 # because the run_cell() call above directly fires off exception
392 391 # reporting. This code, therefore, is only active in the scenario
393 392 # where runlines itself has an unhandled exception. We need to
394 393 # uniformize this, for all exception construction to come from a
395 394 # single location in the codbase.
396 395 etype, evalue, tb = sys.exc_info()
397 396 tb_list = traceback.format_exception(etype, evalue, tb)
398 397 reply_content.update(shell._showtraceback(etype, evalue, tb_list))
399 398 else:
400 399 status = u'ok'
401 400 finally:
402 401 # Restore raw_input.
403 402 if py3compat.PY3:
404 403 __builtin__.input = self._sys_raw_input
405 404 else:
406 405 __builtin__.raw_input = self._sys_raw_input
407 406
408 407 reply_content[u'status'] = status
409 408
410 409 # Return the execution counter so clients can display prompts
411 410 reply_content['execution_count'] = shell.execution_count - 1
412 411
413 412 # FIXME - fish exception info out of shell, possibly left there by
414 413 # runlines. We'll need to clean up this logic later.
415 414 if shell._reply_content is not None:
416 415 reply_content.update(shell._reply_content)
417 416 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute')
418 417 reply_content['engine_info'] = e_info
419 418 # reset after use
420 419 shell._reply_content = None
421 420
422 421 if 'traceback' in reply_content:
423 422 self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback']))
424 423
425 424
426 425 # At this point, we can tell whether the main code execution succeeded
427 426 # or not. If it did, we proceed to evaluate user_variables/expressions
428 427 if reply_content['status'] == 'ok':
429 428 reply_content[u'user_variables'] = \
430 429 shell.user_variables(content.get(u'user_variables', []))
431 430 reply_content[u'user_expressions'] = \
432 431 shell.user_expressions(content.get(u'user_expressions', {}))
433 432 else:
434 433 # If there was an error, don't even try to compute variables or
435 434 # expressions
436 435 reply_content[u'user_variables'] = {}
437 436 reply_content[u'user_expressions'] = {}
438 437
439 438 # Payloads should be retrieved regardless of outcome, so we can both
440 439 # recover partial output (that could have been generated early in a
441 440 # block, before an error) and clear the payload system always.
442 441 reply_content[u'payload'] = shell.payload_manager.read_payload()
443 442 # Be agressive about clearing the payload because we don't want
444 443 # it to sit in memory until the next execute_request comes in.
445 444 shell.payload_manager.clear_payload()
446 445
447 446 # Flush output before sending the reply.
448 447 sys.stdout.flush()
449 448 sys.stderr.flush()
450 449 # FIXME: on rare occasions, the flush doesn't seem to make it to the
451 450 # clients... This seems to mitigate the problem, but we definitely need
452 451 # to better understand what's going on.
453 452 if self._execute_sleep:
454 453 time.sleep(self._execute_sleep)
455 454
456 455 # Send the reply.
457 456 reply_content = json_clean(reply_content)
458 457
459 458 md['status'] = reply_content['status']
460 459 if reply_content['status'] == 'error' and \
461 460 reply_content['ename'] == 'UnmetDependency':
462 461 md['dependencies_met'] = False
463 462
464 463 reply_msg = self.session.send(stream, u'execute_reply',
465 464 reply_content, parent, metadata=md,
466 465 ident=ident)
467 466
468 467 self.log.debug("%s", reply_msg)
469 468
470 469 if not silent and reply_msg['content']['status'] == u'error':
471 470 self._abort_queues()
472 471
473 472 self._publish_status(u'idle', parent)
474 473
475 474 def complete_request(self, stream, ident, parent):
476 475 txt, matches = self._complete(parent)
477 476 matches = {'matches' : matches,
478 477 'matched_text' : txt,
479 478 'status' : 'ok'}
480 479 matches = json_clean(matches)
481 480 completion_msg = self.session.send(stream, 'complete_reply',
482 481 matches, parent, ident)
483 482 self.log.debug("%s", completion_msg)
484 483
485 484 def object_info_request(self, stream, ident, parent):
486 485 content = parent['content']
487 486 object_info = self.shell.object_inspect(content['oname'],
488 487 detail_level = content.get('detail_level', 0)
489 488 )
490 489 # Before we send this object over, we scrub it for JSON usage
491 490 oinfo = json_clean(object_info)
492 491 msg = self.session.send(stream, 'object_info_reply',
493 492 oinfo, parent, ident)
494 493 self.log.debug("%s", msg)
495 494
496 495 def history_request(self, stream, ident, parent):
497 496 # We need to pull these out, as passing **kwargs doesn't work with
498 497 # unicode keys before Python 2.6.5.
499 498 hist_access_type = parent['content']['hist_access_type']
500 499 raw = parent['content']['raw']
501 500 output = parent['content']['output']
502 501 if hist_access_type == 'tail':
503 502 n = parent['content']['n']
504 503 hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
505 504 include_latest=True)
506 505
507 506 elif hist_access_type == 'range':
508 507 session = parent['content']['session']
509 508 start = parent['content']['start']
510 509 stop = parent['content']['stop']
511 510 hist = self.shell.history_manager.get_range(session, start, stop,
512 511 raw=raw, output=output)
513 512
514 513 elif hist_access_type == 'search':
515 514 n = parent['content'].get('n')
516 515 unique = parent['content'].get('unique', False)
517 516 pattern = parent['content']['pattern']
518 517 hist = self.shell.history_manager.search(
519 518 pattern, raw=raw, output=output, n=n, unique=unique)
520 519
521 520 else:
522 521 hist = []
523 522 hist = list(hist)
524 523 content = {'history' : hist}
525 524 content = json_clean(content)
526 525 msg = self.session.send(stream, 'history_reply',
527 526 content, parent, ident)
528 527 self.log.debug("Sending history reply with %i entries", len(hist))
529 528
530 529 def connect_request(self, stream, ident, parent):
531 530 if self._recorded_ports is not None:
532 531 content = self._recorded_ports.copy()
533 532 else:
534 533 content = {}
535 534 msg = self.session.send(stream, 'connect_reply',
536 535 content, parent, ident)
537 536 self.log.debug("%s", msg)
538 537
539 538 def kernel_info_request(self, stream, ident, parent):
540 539 vinfo = {
541 540 'protocol_version': protocol_version,
542 541 'ipython_version': ipython_version,
543 542 'language_version': language_version,
544 543 'language': 'python',
545 544 }
546 545 msg = self.session.send(stream, 'kernel_info_reply',
547 546 vinfo, parent, ident)
548 547 self.log.debug("%s", msg)
549 548
550 549 def shutdown_request(self, stream, ident, parent):
551 550 self.shell.exit_now = True
552 551 content = dict(status='ok')
553 552 content.update(parent['content'])
554 553 self.session.send(stream, u'shutdown_reply', content, parent, ident=ident)
555 554 # same content, but different msg_id for broadcasting on IOPub
556 555 self._shutdown_message = self.session.msg(u'shutdown_reply',
557 556 content, parent
558 557 )
559 558
560 559 self._at_shutdown()
561 560 # call sys.exit after a short delay
562 561 loop = ioloop.IOLoop.instance()
563 562 loop.add_timeout(time.time()+0.1, loop.stop)
564 563
565 564 #---------------------------------------------------------------------------
566 565 # Engine methods
567 566 #---------------------------------------------------------------------------
568 567
569 568 def apply_request(self, stream, ident, parent):
570 569 try:
571 570 content = parent[u'content']
572 571 bufs = parent[u'buffers']
573 572 msg_id = parent['header']['msg_id']
574 573 except:
575 574 self.log.error("Got bad msg: %s", parent, exc_info=True)
576 575 return
577 576
578 577 self._publish_status(u'busy', parent)
579 578
580 579 # Set the parent message of the display hook and out streams.
581 580 shell = self.shell
582 581 shell.displayhook.set_parent(parent)
583 582 shell.display_pub.set_parent(parent)
584 583 shell.data_pub.set_parent(parent)
585 584 try:
586 585 sys.stdout.set_parent(parent)
587 586 except AttributeError:
588 587 pass
589 588 try:
590 589 sys.stderr.set_parent(parent)
591 590 except AttributeError:
592 591 pass
593 592
594 593 # pyin_msg = self.session.msg(u'pyin',{u'code':code}, parent=parent)
595 594 # self.iopub_socket.send(pyin_msg)
596 595 # self.session.send(self.iopub_socket, u'pyin', {u'code':code},parent=parent)
597 596 md = self._make_metadata(parent['metadata'])
598 597 try:
599 598 working = shell.user_ns
600 599
601 600 prefix = "_"+str(msg_id).replace("-","")+"_"
602 601
603 602 f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
604 603
605 604 fname = getattr(f, '__name__', 'f')
606 605
607 606 fname = prefix+"f"
608 607 argname = prefix+"args"
609 608 kwargname = prefix+"kwargs"
610 609 resultname = prefix+"result"
611 610
612 611 ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
613 612 # print ns
614 613 working.update(ns)
615 614 code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
616 615 try:
617 616 exec code in shell.user_global_ns, shell.user_ns
618 617 result = working.get(resultname)
619 618 finally:
620 619 for key in ns.iterkeys():
621 620 working.pop(key)
622 621
623 622 result_buf = serialize_object(result,
624 623 buffer_threshold=self.session.buffer_threshold,
625 624 item_threshold=self.session.item_threshold,
626 625 )
627 626
628 627 except:
629 628 # invoke IPython traceback formatting
630 629 shell.showtraceback()
631 630 # FIXME - fish exception info out of shell, possibly left there by
632 631 # run_code. We'll need to clean up this logic later.
633 632 reply_content = {}
634 633 if shell._reply_content is not None:
635 634 reply_content.update(shell._reply_content)
636 635 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply')
637 636 reply_content['engine_info'] = e_info
638 637 # reset after use
639 638 shell._reply_content = None
640 639
641 640 self.session.send(self.iopub_socket, u'pyerr', reply_content, parent=parent,
642 641 ident=self._topic('pyerr'))
643 642 self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback']))
644 643 result_buf = []
645 644
646 645 if reply_content['ename'] == 'UnmetDependency':
647 646 md['dependencies_met'] = False
648 647 else:
649 648 reply_content = {'status' : 'ok'}
650 649
651 650 # put 'ok'/'error' status in header, for scheduler introspection:
652 651 md['status'] = reply_content['status']
653 652
654 653 # flush i/o
655 654 sys.stdout.flush()
656 655 sys.stderr.flush()
657 656
658 657 reply_msg = self.session.send(stream, u'apply_reply', reply_content,
659 658 parent=parent, ident=ident,buffers=result_buf, metadata=md)
660 659
661 660 self._publish_status(u'idle', parent)
662 661
663 662 #---------------------------------------------------------------------------
664 663 # Control messages
665 664 #---------------------------------------------------------------------------
666 665
667 666 def abort_request(self, stream, ident, parent):
668 667 """abort a specifig msg by id"""
669 668 msg_ids = parent['content'].get('msg_ids', None)
670 669 if isinstance(msg_ids, basestring):
671 670 msg_ids = [msg_ids]
672 671 if not msg_ids:
673 672 self.abort_queues()
674 673 for mid in msg_ids:
675 674 self.aborted.add(str(mid))
676 675
677 676 content = dict(status='ok')
678 677 reply_msg = self.session.send(stream, 'abort_reply', content=content,
679 678 parent=parent, ident=ident)
680 679 self.log.debug("%s", reply_msg)
681 680
682 681 def clear_request(self, stream, idents, parent):
683 682 """Clear our namespace."""
684 683 self.shell.reset(False)
685 684 msg = self.session.send(stream, 'clear_reply', ident=idents, parent=parent,
686 685 content = dict(status='ok'))
687 686
688 687
689 688 #---------------------------------------------------------------------------
690 689 # Protected interface
691 690 #---------------------------------------------------------------------------
692 691
693 692 def _wrap_exception(self, method=None):
694 693 # import here, because _wrap_exception is only used in parallel,
695 694 # and parallel has higher min pyzmq version
696 695 from IPython.parallel.error import wrap_exception
697 696 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method=method)
698 697 content = wrap_exception(e_info)
699 698 return content
700 699
701 700 def _topic(self, topic):
702 701 """prefixed topic for IOPub messages"""
703 702 if self.int_id >= 0:
704 703 base = "engine.%i" % self.int_id
705 704 else:
706 705 base = "kernel.%s" % self.ident
707 706
708 707 return py3compat.cast_bytes("%s.%s" % (base, topic))
709 708
710 709 def _abort_queues(self):
711 710 for stream in self.shell_streams:
712 711 if stream:
713 712 self._abort_queue(stream)
714 713
715 714 def _abort_queue(self, stream):
716 715 poller = zmq.Poller()
717 716 poller.register(stream.socket, zmq.POLLIN)
718 717 while True:
719 718 idents,msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
720 719 if msg is None:
721 720 return
722 721
723 722 self.log.info("Aborting:")
724 723 self.log.info("%s", msg)
725 724 msg_type = msg['header']['msg_type']
726 725 reply_type = msg_type.split('_')[0] + '_reply'
727 726
728 727 status = {'status' : 'aborted'}
729 728 md = {'engine' : self.ident}
730 729 md.update(status)
731 730 reply_msg = self.session.send(stream, reply_type, metadata=md,
732 731 content=status, parent=msg, ident=idents)
733 732 self.log.debug("%s", reply_msg)
734 733 # We need to wait a bit for requests to come in. This can probably
735 734 # be set shorter for true asynchronous clients.
736 735 poller.poll(50)
737 736
738 737
739 738 def _no_raw_input(self):
740 739 """Raise StdinNotImplentedError if active frontend doesn't support
741 740 stdin."""
742 741 raise StdinNotImplementedError("raw_input was called, but this "
743 742 "frontend does not support stdin.")
744 743
745 744 def _raw_input(self, prompt, ident, parent):
746 745 # Flush output before making the request.
747 746 sys.stderr.flush()
748 747 sys.stdout.flush()
749 748 # flush the stdin socket, to purge stale replies
750 749 while True:
751 750 try:
752 751 self.stdin_socket.recv_multipart(zmq.NOBLOCK)
753 752 except zmq.ZMQError as e:
754 753 if e.errno == zmq.EAGAIN:
755 754 break
756 755 else:
757 756 raise
758 757
759 758 # Send the input request.
760 759 content = json_clean(dict(prompt=prompt))
761 760 self.session.send(self.stdin_socket, u'input_request', content, parent,
762 761 ident=ident)
763 762
764 763 # Await a response.
765 764 while True:
766 765 try:
767 766 ident, reply = self.session.recv(self.stdin_socket, 0)
768 767 except Exception:
769 768 self.log.warn("Invalid Message:", exc_info=True)
770 769 else:
771 770 break
772 771 try:
773 772 value = reply['content']['value']
774 773 except:
775 774 self.log.error("Got bad raw_input reply: ")
776 775 self.log.error("%s", parent)
777 776 value = ''
778 777 if value == '\x04':
779 778 # EOF
780 779 raise EOFError
781 780 return value
782 781
783 782 def _complete(self, msg):
784 783 c = msg['content']
785 784 try:
786 785 cpos = int(c['cursor_pos'])
787 786 except:
788 787 # If we don't get something that we can convert to an integer, at
789 788 # least attempt the completion guessing the cursor is at the end of
790 789 # the text, if there's any, and otherwise of the line
791 790 cpos = len(c['text'])
792 791 if cpos==0:
793 792 cpos = len(c['line'])
794 793 return self.shell.complete(c['text'], c['line'], cpos)
795 794
796 795 def _at_shutdown(self):
797 796 """Actions taken at shutdown by the kernel, called by python's atexit.
798 797 """
799 798 # io.rprint("Kernel at_shutdown") # dbg
800 799 if self._shutdown_message is not None:
801 800 self.session.send(self.iopub_socket, self._shutdown_message, ident=self._topic('shutdown'))
802 801 self.log.debug("%s", self._shutdown_message)
803 802 [ s.flush(zmq.POLLOUT) for s in self.shell_streams ]
804 803
@@ -1,23 +1,21 b''
1 import logging
2 1 from logging import INFO, DEBUG, WARN, ERROR, FATAL
3 2
4 import zmq
5 3 from zmq.log.handlers import PUBHandler
6 4
7 5 class EnginePUBHandler(PUBHandler):
8 6 """A simple PUBHandler subclass that sets root_topic"""
9 7 engine=None
10 8
11 9 def __init__(self, engine, *args, **kwargs):
12 10 PUBHandler.__init__(self,*args, **kwargs)
13 11 self.engine = engine
14 12
15 13 @property
16 14 def root_topic(self):
17 15 """this is a property, in case the handler is created
18 16 before the engine gets registered with an id"""
19 17 if isinstance(getattr(self.engine, 'id', None), int):
20 18 return "engine.%i"%self.engine.id
21 19 else:
22 20 return "engine"
23 21
@@ -1,215 +1,212 b''
1 1 """Produce SVG versions of active plots for display by the rich Qt frontend.
2 2 """
3 3 #-----------------------------------------------------------------------------
4 4 # Imports
5 5 #-----------------------------------------------------------------------------
6 6 from __future__ import print_function
7 7
8 # Standard library imports
9 import sys
10
11 8 # Third-party imports
12 9 import matplotlib
13 10 from matplotlib.backends.backend_agg import new_figure_manager, FigureCanvasAgg
14 11 from matplotlib._pylab_helpers import Gcf
15 12
16 13 # Local imports.
17 14 from IPython.config.configurable import SingletonConfigurable
18 15 from IPython.core.display import display
19 16 from IPython.core.displaypub import publish_display_data
20 17 from IPython.core.pylabtools import print_figure, select_figure_format
21 18 from IPython.utils.traitlets import Dict, Instance, CaselessStrEnum, Bool
22 19 from IPython.utils.warn import warn
23 20
24 21 #-----------------------------------------------------------------------------
25 22 # Configurable for inline backend options
26 23 #-----------------------------------------------------------------------------
27 24 # inherit from InlineBackendConfig for deprecation purposes
28 25 class InlineBackendConfig(SingletonConfigurable):
29 26 pass
30 27
31 28 class InlineBackend(InlineBackendConfig):
32 29 """An object to store configuration of the inline backend."""
33 30
34 31 def _config_changed(self, name, old, new):
35 32 # warn on change of renamed config section
36 33 if new.InlineBackendConfig != old.InlineBackendConfig:
37 34 warn("InlineBackendConfig has been renamed to InlineBackend")
38 35 super(InlineBackend, self)._config_changed(name, old, new)
39 36
40 37 # The typical default figure size is too large for inline use,
41 38 # so we shrink the figure size to 6x4, and tweak fonts to
42 39 # make that fit.
43 40 rc = Dict({'figure.figsize': (6.0,4.0),
44 41 # play nicely with white background in the Qt and notebook frontend
45 42 'figure.facecolor': 'white',
46 43 'figure.edgecolor': 'white',
47 44 # 12pt labels get cutoff on 6x4 logplots, so use 10pt.
48 45 'font.size': 10,
49 46 # 72 dpi matches SVG/qtconsole
50 47 # this only affects PNG export, as SVG has no dpi setting
51 48 'savefig.dpi': 72,
52 49 # 10pt still needs a little more room on the xlabel:
53 50 'figure.subplot.bottom' : .125
54 51 }, config=True,
55 52 help="""Subset of matplotlib rcParams that should be different for the
56 53 inline backend."""
57 54 )
58 55
59 56 figure_format = CaselessStrEnum(['svg', 'png', 'retina'], default_value='png', config=True,
60 57 help="The image format for figures with the inline backend.")
61 58
62 59 def _figure_format_changed(self, name, old, new):
63 60 if self.shell is None:
64 61 return
65 62 else:
66 63 select_figure_format(self.shell, new)
67 64
68 65 close_figures = Bool(True, config=True,
69 66 help="""Close all figures at the end of each cell.
70 67
71 68 When True, ensures that each cell starts with no active figures, but it
72 69 also means that one must keep track of references in order to edit or
73 70 redraw figures in subsequent cells. This mode is ideal for the notebook,
74 71 where residual plots from other cells might be surprising.
75 72
76 73 When False, one must call figure() to create new figures. This means
77 74 that gcf() and getfigs() can reference figures created in other cells,
78 75 and the active figure can continue to be edited with pylab/pyplot
79 76 methods that reference the current active figure. This mode facilitates
80 77 iterative editing of figures, and behaves most consistently with
81 78 other matplotlib backends, but figure barriers between cells must
82 79 be explicit.
83 80 """)
84 81
85 82 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
86 83
87 84
88 85 #-----------------------------------------------------------------------------
89 86 # Functions
90 87 #-----------------------------------------------------------------------------
91 88
92 89 def show(close=None):
93 90 """Show all figures as SVG/PNG payloads sent to the IPython clients.
94 91
95 92 Parameters
96 93 ----------
97 94 close : bool, optional
98 95 If true, a ``plt.close('all')`` call is automatically issued after
99 96 sending all the figures. If this is set, the figures will entirely
100 97 removed from the internal list of figures.
101 98 """
102 99 if close is None:
103 100 close = InlineBackend.instance().close_figures
104 101 try:
105 102 for figure_manager in Gcf.get_all_fig_managers():
106 103 display(figure_manager.canvas.figure)
107 104 finally:
108 105 show._to_draw = []
109 106 if close:
110 107 matplotlib.pyplot.close('all')
111 108
112 109
113 110
114 111 # This flag will be reset by draw_if_interactive when called
115 112 show._draw_called = False
116 113 # list of figures to draw when flush_figures is called
117 114 show._to_draw = []
118 115
119 116
120 117 def draw_if_interactive():
121 118 """
122 119 Is called after every pylab drawing command
123 120 """
124 121 # signal that the current active figure should be sent at the end of
125 122 # execution. Also sets the _draw_called flag, signaling that there will be
126 123 # something to send. At the end of the code execution, a separate call to
127 124 # flush_figures() will act upon these values
128 125
129 126 fig = Gcf.get_active().canvas.figure
130 127
131 128 # Hack: matplotlib FigureManager objects in interacive backends (at least
132 129 # in some of them) monkeypatch the figure object and add a .show() method
133 130 # to it. This applies the same monkeypatch in order to support user code
134 131 # that might expect `.show()` to be part of the official API of figure
135 132 # objects.
136 133 # For further reference:
137 134 # https://github.com/ipython/ipython/issues/1612
138 135 # https://github.com/matplotlib/matplotlib/issues/835
139 136
140 137 if not hasattr(fig, 'show'):
141 138 # Queue up `fig` for display
142 139 fig.show = lambda *a: display(fig)
143 140
144 141 # If matplotlib was manually set to non-interactive mode, this function
145 142 # should be a no-op (otherwise we'll generate duplicate plots, since a user
146 143 # who set ioff() manually expects to make separate draw/show calls).
147 144 if not matplotlib.is_interactive():
148 145 return
149 146
150 147 # ensure current figure will be drawn, and each subsequent call
151 148 # of draw_if_interactive() moves the active figure to ensure it is
152 149 # drawn last
153 150 try:
154 151 show._to_draw.remove(fig)
155 152 except ValueError:
156 153 # ensure it only appears in the draw list once
157 154 pass
158 155 # Queue up the figure for drawing in next show() call
159 156 show._to_draw.append(fig)
160 157 show._draw_called = True
161 158
162 159
163 160 def flush_figures():
164 161 """Send all figures that changed
165 162
166 163 This is meant to be called automatically and will call show() if, during
167 164 prior code execution, there had been any calls to draw_if_interactive.
168 165
169 166 This function is meant to be used as a post_execute callback in IPython,
170 167 so user-caused errors are handled with showtraceback() instead of being
171 168 allowed to raise. If this function is not called from within IPython,
172 169 then these exceptions will raise.
173 170 """
174 171 if not show._draw_called:
175 172 return
176 173
177 174 if InlineBackend.instance().close_figures:
178 175 # ignore the tracking, just draw and close all figures
179 176 try:
180 177 return show(True)
181 178 except Exception as e:
182 179 # safely show traceback if in IPython, else raise
183 180 try:
184 181 get_ipython
185 182 except NameError:
186 183 raise e
187 184 else:
188 185 get_ipython().showtraceback()
189 186 return
190 187 try:
191 188 # exclude any figures that were closed:
192 189 active = set([fm.canvas.figure for fm in Gcf.get_all_fig_managers()])
193 190 for fig in [ fig for fig in show._to_draw if fig in active ]:
194 191 try:
195 192 display(fig)
196 193 except Exception as e:
197 194 # safely show traceback if in IPython, else raise
198 195 try:
199 196 get_ipython
200 197 except NameError:
201 198 raise e
202 199 else:
203 200 get_ipython().showtraceback()
204 201 break
205 202 finally:
206 203 # clear flags for next round
207 204 show._to_draw = []
208 205 show._draw_called = False
209 206
210 207
211 208 # Changes to matplotlib in version 1.2 requires a mpl backend to supply a default
212 209 # figurecanvas. This is set here to a Agg canvas
213 210 # See https://github.com/matplotlib/matplotlib/pull/1125
214 211 FigureCanvas = FigureCanvasAgg
215 212
@@ -1,205 +1,198 b''
1 1 """serialization utilities for apply messages
2 2
3 3 Authors:
4 4
5 5 * Min RK
6 6 """
7 7 #-----------------------------------------------------------------------------
8 8 # Copyright (C) 2010-2011 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-----------------------------------------------------------------------------
13 13
14 14 #-----------------------------------------------------------------------------
15 15 # Imports
16 16 #-----------------------------------------------------------------------------
17 17
18 # Standard library imports
19 import logging
20 import os
21 import re
22 import socket
23 import sys
24
25 18 try:
26 19 import cPickle
27 20 pickle = cPickle
28 21 except:
29 22 cPickle = None
30 23 import pickle
31 24
32 25
33 26 # IPython imports
34 27 from IPython.utils import py3compat
35 28 from IPython.utils.data import flatten
36 29 from IPython.utils.pickleutil import (
37 30 can, uncan, can_sequence, uncan_sequence, CannedObject,
38 31 istype, sequence_types,
39 32 )
40 33
41 34 if py3compat.PY3:
42 35 buffer = memoryview
43 36
44 37 #-----------------------------------------------------------------------------
45 38 # Serialization Functions
46 39 #-----------------------------------------------------------------------------
47 40
48 41 # default values for the thresholds:
49 42 MAX_ITEMS = 64
50 43 MAX_BYTES = 1024
51 44
52 45 def _extract_buffers(obj, threshold=MAX_BYTES):
53 46 """extract buffers larger than a certain threshold"""
54 47 buffers = []
55 48 if isinstance(obj, CannedObject) and obj.buffers:
56 49 for i,buf in enumerate(obj.buffers):
57 50 if len(buf) > threshold:
58 51 # buffer larger than threshold, prevent pickling
59 52 obj.buffers[i] = None
60 53 buffers.append(buf)
61 54 elif isinstance(buf, buffer):
62 55 # buffer too small for separate send, coerce to bytes
63 56 # because pickling buffer objects just results in broken pointers
64 57 obj.buffers[i] = bytes(buf)
65 58 return buffers
66 59
67 60 def _restore_buffers(obj, buffers):
68 61 """restore buffers extracted by """
69 62 if isinstance(obj, CannedObject) and obj.buffers:
70 63 for i,buf in enumerate(obj.buffers):
71 64 if buf is None:
72 65 obj.buffers[i] = buffers.pop(0)
73 66
74 67 def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
75 68 """Serialize an object into a list of sendable buffers.
76 69
77 70 Parameters
78 71 ----------
79 72
80 73 obj : object
81 74 The object to be serialized
82 75 buffer_threshold : int
83 76 The threshold (in bytes) for pulling out data buffers
84 77 to avoid pickling them.
85 78 item_threshold : int
86 79 The maximum number of items over which canning will iterate.
87 80 Containers (lists, dicts) larger than this will be pickled without
88 81 introspection.
89 82
90 83 Returns
91 84 -------
92 85 [bufs] : list of buffers representing the serialized object.
93 86 """
94 87 buffers = []
95 88 if istype(obj, sequence_types) and len(obj) < item_threshold:
96 89 cobj = can_sequence(obj)
97 90 for c in cobj:
98 91 buffers.extend(_extract_buffers(c, buffer_threshold))
99 92 elif istype(obj, dict) and len(obj) < item_threshold:
100 93 cobj = {}
101 94 for k in sorted(obj.iterkeys()):
102 95 c = can(obj[k])
103 96 buffers.extend(_extract_buffers(c, buffer_threshold))
104 97 cobj[k] = c
105 98 else:
106 99 cobj = can(obj)
107 100 buffers.extend(_extract_buffers(cobj, buffer_threshold))
108 101
109 102 buffers.insert(0, pickle.dumps(cobj,-1))
110 103 return buffers
111 104
112 105 def unserialize_object(buffers, g=None):
113 106 """reconstruct an object serialized by serialize_object from data buffers.
114 107
115 108 Parameters
116 109 ----------
117 110
118 111 bufs : list of buffers/bytes
119 112
120 113 g : globals to be used when uncanning
121 114
122 115 Returns
123 116 -------
124 117
125 118 (newobj, bufs) : unpacked object, and the list of remaining unused buffers.
126 119 """
127 120 bufs = list(buffers)
128 121 pobj = bufs.pop(0)
129 122 if not isinstance(pobj, bytes):
130 123 # a zmq message
131 124 pobj = bytes(pobj)
132 125 canned = pickle.loads(pobj)
133 126 if istype(canned, sequence_types) and len(canned) < MAX_ITEMS:
134 127 for c in canned:
135 128 _restore_buffers(c, bufs)
136 129 newobj = uncan_sequence(canned, g)
137 130 elif isinstance(canned, dict) and len(canned) < MAX_ITEMS:
138 131 newobj = {}
139 132 for k in sorted(canned.iterkeys()):
140 133 c = canned[k]
141 134 _restore_buffers(c, bufs)
142 135 newobj[k] = uncan(c, g)
143 136 else:
144 137 _restore_buffers(canned, bufs)
145 138 newobj = uncan(canned, g)
146 139
147 140 return newobj, bufs
148 141
149 142 def pack_apply_message(f, args, kwargs, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
150 143 """pack up a function, args, and kwargs to be sent over the wire
151 144
152 145 Each element of args/kwargs will be canned for special treatment,
153 146 but inspection will not go any deeper than that.
154 147
155 148 Any object whose data is larger than `threshold` will not have their data copied
156 149 (only numpy arrays and bytes/buffers support zero-copy)
157 150
158 151 Message will be a list of bytes/buffers of the format:
159 152
160 153 [ cf, pinfo, <arg_bufs>, <kwarg_bufs> ]
161 154
162 155 With length at least two + len(args) + len(kwargs)
163 156 """
164 157
165 158 arg_bufs = flatten(serialize_object(arg, buffer_threshold, item_threshold) for arg in args)
166 159
167 160 kw_keys = sorted(kwargs.keys())
168 161 kwarg_bufs = flatten(serialize_object(kwargs[key], buffer_threshold, item_threshold) for key in kw_keys)
169 162
170 163 info = dict(nargs=len(args), narg_bufs=len(arg_bufs), kw_keys=kw_keys)
171 164
172 165 msg = [pickle.dumps(can(f),-1)]
173 166 msg.append(pickle.dumps(info, -1))
174 167 msg.extend(arg_bufs)
175 168 msg.extend(kwarg_bufs)
176 169
177 170 return msg
178 171
179 172 def unpack_apply_message(bufs, g=None, copy=True):
180 173 """unpack f,args,kwargs from buffers packed by pack_apply_message()
181 174 Returns: original f,args,kwargs"""
182 175 bufs = list(bufs) # allow us to pop
183 176 assert len(bufs) >= 2, "not enough buffers!"
184 177 if not copy:
185 178 for i in range(2):
186 179 bufs[i] = bufs[i].bytes
187 180 f = uncan(pickle.loads(bufs.pop(0)), g)
188 181 info = pickle.loads(bufs.pop(0))
189 182 arg_bufs, kwarg_bufs = bufs[:info['narg_bufs']], bufs[info['narg_bufs']:]
190 183
191 184 args = []
192 185 for i in range(info['nargs']):
193 186 arg, arg_bufs = unserialize_object(arg_bufs, g)
194 187 args.append(arg)
195 188 args = tuple(args)
196 189 assert not arg_bufs, "Shouldn't be any arg bufs left over"
197 190
198 191 kwargs = {}
199 192 for key in info['kw_keys']:
200 193 kwarg, kwarg_bufs = unserialize_object(kwarg_bufs, g)
201 194 kwargs[key] = kwarg
202 195 assert not kwarg_bufs, "Shouldn't be any kwarg bufs left over"
203 196
204 197 return f,args,kwargs
205 198
@@ -1,807 +1,806 b''
1 1 """Session object for building, serializing, sending, and receiving messages in
2 2 IPython. The Session object supports serialization, HMAC signatures, and
3 3 metadata on messages.
4 4
5 5 Also defined here are utilities for working with Sessions:
6 6 * A SessionFactory to be used as a base class for configurables that work with
7 7 Sessions.
8 8 * A Message object for convenience that allows attribute-access to the msg dict.
9 9
10 10 Authors:
11 11
12 12 * Min RK
13 13 * Brian Granger
14 14 * Fernando Perez
15 15 """
16 16 #-----------------------------------------------------------------------------
17 17 # Copyright (C) 2010-2011 The IPython Development Team
18 18 #
19 19 # Distributed under the terms of the BSD License. The full license is in
20 20 # the file COPYING, distributed as part of this software.
21 21 #-----------------------------------------------------------------------------
22 22
23 23 #-----------------------------------------------------------------------------
24 24 # Imports
25 25 #-----------------------------------------------------------------------------
26 26
27 27 import hmac
28 28 import logging
29 29 import os
30 30 import pprint
31 31 import random
32 32 import uuid
33 33 from datetime import datetime
34 34
35 35 try:
36 36 import cPickle
37 37 pickle = cPickle
38 38 except:
39 39 cPickle = None
40 40 import pickle
41 41
42 42 import zmq
43 43 from zmq.utils import jsonapi
44 44 from zmq.eventloop.ioloop import IOLoop
45 45 from zmq.eventloop.zmqstream import ZMQStream
46 46
47 from IPython.config.application import Application, boolean_flag
48 47 from IPython.config.configurable import Configurable, LoggingConfigurable
49 48 from IPython.utils import io
50 49 from IPython.utils.importstring import import_item
51 50 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
52 51 from IPython.utils.py3compat import str_to_bytes, str_to_unicode
53 52 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
54 53 DottedObjectName, CUnicode, Dict, Integer)
55 54 from IPython.kernel.zmq.serialize import MAX_ITEMS, MAX_BYTES
56 55
57 56 #-----------------------------------------------------------------------------
58 57 # utility functions
59 58 #-----------------------------------------------------------------------------
60 59
61 60 def squash_unicode(obj):
62 61 """coerce unicode back to bytestrings."""
63 62 if isinstance(obj,dict):
64 63 for key in obj.keys():
65 64 obj[key] = squash_unicode(obj[key])
66 65 if isinstance(key, unicode):
67 66 obj[squash_unicode(key)] = obj.pop(key)
68 67 elif isinstance(obj, list):
69 68 for i,v in enumerate(obj):
70 69 obj[i] = squash_unicode(v)
71 70 elif isinstance(obj, unicode):
72 71 obj = obj.encode('utf8')
73 72 return obj
74 73
75 74 #-----------------------------------------------------------------------------
76 75 # globals and defaults
77 76 #-----------------------------------------------------------------------------
78 77
79 78 # ISO8601-ify datetime objects
80 79 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default)
81 80 json_unpacker = lambda s: extract_dates(jsonapi.loads(s))
82 81
83 82 pickle_packer = lambda o: pickle.dumps(o,-1)
84 83 pickle_unpacker = pickle.loads
85 84
86 85 default_packer = json_packer
87 86 default_unpacker = json_unpacker
88 87
89 88 DELIM = b"<IDS|MSG>"
90 89 # singleton dummy tracker, which will always report as done
91 90 DONE = zmq.MessageTracker()
92 91
93 92 #-----------------------------------------------------------------------------
94 93 # Mixin tools for apps that use Sessions
95 94 #-----------------------------------------------------------------------------
96 95
97 96 session_aliases = dict(
98 97 ident = 'Session.session',
99 98 user = 'Session.username',
100 99 keyfile = 'Session.keyfile',
101 100 )
102 101
103 102 session_flags = {
104 103 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
105 104 'keyfile' : '' }},
106 105 """Use HMAC digests for authentication of messages.
107 106 Setting this flag will generate a new UUID to use as the HMAC key.
108 107 """),
109 108 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
110 109 """Don't authenticate messages."""),
111 110 }
112 111
113 112 def default_secure(cfg):
114 113 """Set the default behavior for a config environment to be secure.
115 114
116 115 If Session.key/keyfile have not been set, set Session.key to
117 116 a new random UUID.
118 117 """
119 118
120 119 if 'Session' in cfg:
121 120 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
122 121 return
123 122 # key/keyfile not specified, generate new UUID:
124 123 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
125 124
126 125
127 126 #-----------------------------------------------------------------------------
128 127 # Classes
129 128 #-----------------------------------------------------------------------------
130 129
131 130 class SessionFactory(LoggingConfigurable):
132 131 """The Base class for configurables that have a Session, Context, logger,
133 132 and IOLoop.
134 133 """
135 134
136 135 logname = Unicode('')
137 136 def _logname_changed(self, name, old, new):
138 137 self.log = logging.getLogger(new)
139 138
140 139 # not configurable:
141 140 context = Instance('zmq.Context')
142 141 def _context_default(self):
143 142 return zmq.Context.instance()
144 143
145 144 session = Instance('IPython.kernel.zmq.session.Session')
146 145
147 146 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
148 147 def _loop_default(self):
149 148 return IOLoop.instance()
150 149
151 150 def __init__(self, **kwargs):
152 151 super(SessionFactory, self).__init__(**kwargs)
153 152
154 153 if self.session is None:
155 154 # construct the session
156 155 self.session = Session(**kwargs)
157 156
158 157
159 158 class Message(object):
160 159 """A simple message object that maps dict keys to attributes.
161 160
162 161 A Message can be created from a dict and a dict from a Message instance
163 162 simply by calling dict(msg_obj)."""
164 163
165 164 def __init__(self, msg_dict):
166 165 dct = self.__dict__
167 166 for k, v in dict(msg_dict).iteritems():
168 167 if isinstance(v, dict):
169 168 v = Message(v)
170 169 dct[k] = v
171 170
172 171 # Having this iterator lets dict(msg_obj) work out of the box.
173 172 def __iter__(self):
174 173 return iter(self.__dict__.iteritems())
175 174
176 175 def __repr__(self):
177 176 return repr(self.__dict__)
178 177
179 178 def __str__(self):
180 179 return pprint.pformat(self.__dict__)
181 180
182 181 def __contains__(self, k):
183 182 return k in self.__dict__
184 183
185 184 def __getitem__(self, k):
186 185 return self.__dict__[k]
187 186
188 187
189 188 def msg_header(msg_id, msg_type, username, session):
190 189 date = datetime.now()
191 190 return locals()
192 191
193 192 def extract_header(msg_or_header):
194 193 """Given a message or header, return the header."""
195 194 if not msg_or_header:
196 195 return {}
197 196 try:
198 197 # See if msg_or_header is the entire message.
199 198 h = msg_or_header['header']
200 199 except KeyError:
201 200 try:
202 201 # See if msg_or_header is just the header
203 202 h = msg_or_header['msg_id']
204 203 except KeyError:
205 204 raise
206 205 else:
207 206 h = msg_or_header
208 207 if not isinstance(h, dict):
209 208 h = dict(h)
210 209 return h
211 210
212 211 class Session(Configurable):
213 212 """Object for handling serialization and sending of messages.
214 213
215 214 The Session object handles building messages and sending them
216 215 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
217 216 other over the network via Session objects, and only need to work with the
218 217 dict-based IPython message spec. The Session will handle
219 218 serialization/deserialization, security, and metadata.
220 219
221 220 Sessions support configurable serialiization via packer/unpacker traits,
222 221 and signing with HMAC digests via the key/keyfile traits.
223 222
224 223 Parameters
225 224 ----------
226 225
227 226 debug : bool
228 227 whether to trigger extra debugging statements
229 228 packer/unpacker : str : 'json', 'pickle' or import_string
230 229 importstrings for methods to serialize message parts. If just
231 230 'json' or 'pickle', predefined JSON and pickle packers will be used.
232 231 Otherwise, the entire importstring must be used.
233 232
234 233 The functions must accept at least valid JSON input, and output *bytes*.
235 234
236 235 For example, to use msgpack:
237 236 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
238 237 pack/unpack : callables
239 238 You can also set the pack/unpack callables for serialization directly.
240 239 session : bytes
241 240 the ID of this Session object. The default is to generate a new UUID.
242 241 username : unicode
243 242 username added to message headers. The default is to ask the OS.
244 243 key : bytes
245 244 The key used to initialize an HMAC signature. If unset, messages
246 245 will not be signed or checked.
247 246 keyfile : filepath
248 247 The file containing a key. If this is set, `key` will be initialized
249 248 to the contents of the file.
250 249
251 250 """
252 251
253 252 debug=Bool(False, config=True, help="""Debug output in the Session""")
254 253
255 254 packer = DottedObjectName('json',config=True,
256 255 help="""The name of the packer for serializing messages.
257 256 Should be one of 'json', 'pickle', or an import name
258 257 for a custom callable serializer.""")
259 258 def _packer_changed(self, name, old, new):
260 259 if new.lower() == 'json':
261 260 self.pack = json_packer
262 261 self.unpack = json_unpacker
263 262 self.unpacker = new
264 263 elif new.lower() == 'pickle':
265 264 self.pack = pickle_packer
266 265 self.unpack = pickle_unpacker
267 266 self.unpacker = new
268 267 else:
269 268 self.pack = import_item(str(new))
270 269
271 270 unpacker = DottedObjectName('json', config=True,
272 271 help="""The name of the unpacker for unserializing messages.
273 272 Only used with custom functions for `packer`.""")
274 273 def _unpacker_changed(self, name, old, new):
275 274 if new.lower() == 'json':
276 275 self.pack = json_packer
277 276 self.unpack = json_unpacker
278 277 self.packer = new
279 278 elif new.lower() == 'pickle':
280 279 self.pack = pickle_packer
281 280 self.unpack = pickle_unpacker
282 281 self.packer = new
283 282 else:
284 283 self.unpack = import_item(str(new))
285 284
286 285 session = CUnicode(u'', config=True,
287 286 help="""The UUID identifying this session.""")
288 287 def _session_default(self):
289 288 u = unicode(uuid.uuid4())
290 289 self.bsession = u.encode('ascii')
291 290 return u
292 291
293 292 def _session_changed(self, name, old, new):
294 293 self.bsession = self.session.encode('ascii')
295 294
296 295 # bsession is the session as bytes
297 296 bsession = CBytes(b'')
298 297
299 298 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
300 299 help="""Username for the Session. Default is your system username.""",
301 300 config=True)
302 301
303 302 metadata = Dict({}, config=True,
304 303 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
305 304
306 305 # message signature related traits:
307 306
308 307 key = CBytes(b'', config=True,
309 308 help="""execution key, for extra authentication.""")
310 309 def _key_changed(self, name, old, new):
311 310 if new:
312 311 self.auth = hmac.HMAC(new)
313 312 else:
314 313 self.auth = None
315 314
316 315 auth = Instance(hmac.HMAC)
317 316
318 317 digest_history = Set()
319 318 digest_history_size = Integer(2**16, config=True,
320 319 help="""The maximum number of digests to remember.
321 320
322 321 The digest history will be culled when it exceeds this value.
323 322 """
324 323 )
325 324
326 325 keyfile = Unicode('', config=True,
327 326 help="""path to file containing execution key.""")
328 327 def _keyfile_changed(self, name, old, new):
329 328 with open(new, 'rb') as f:
330 329 self.key = f.read().strip()
331 330
332 331 # for protecting against sends from forks
333 332 pid = Integer()
334 333
335 334 # serialization traits:
336 335
337 336 pack = Any(default_packer) # the actual packer function
338 337 def _pack_changed(self, name, old, new):
339 338 if not callable(new):
340 339 raise TypeError("packer must be callable, not %s"%type(new))
341 340
342 341 unpack = Any(default_unpacker) # the actual packer function
343 342 def _unpack_changed(self, name, old, new):
344 343 # unpacker is not checked - it is assumed to be
345 344 if not callable(new):
346 345 raise TypeError("unpacker must be callable, not %s"%type(new))
347 346
348 347 # thresholds:
349 348 copy_threshold = Integer(2**16, config=True,
350 349 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
351 350 buffer_threshold = Integer(MAX_BYTES, config=True,
352 351 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
353 352 item_threshold = Integer(MAX_ITEMS, config=True,
354 353 help="""The maximum number of items for a container to be introspected for custom serialization.
355 354 Containers larger than this are pickled outright.
356 355 """
357 356 )
358 357
359 358
360 359 def __init__(self, **kwargs):
361 360 """create a Session object
362 361
363 362 Parameters
364 363 ----------
365 364
366 365 debug : bool
367 366 whether to trigger extra debugging statements
368 367 packer/unpacker : str : 'json', 'pickle' or import_string
369 368 importstrings for methods to serialize message parts. If just
370 369 'json' or 'pickle', predefined JSON and pickle packers will be used.
371 370 Otherwise, the entire importstring must be used.
372 371
373 372 The functions must accept at least valid JSON input, and output
374 373 *bytes*.
375 374
376 375 For example, to use msgpack:
377 376 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
378 377 pack/unpack : callables
379 378 You can also set the pack/unpack callables for serialization
380 379 directly.
381 380 session : unicode (must be ascii)
382 381 the ID of this Session object. The default is to generate a new
383 382 UUID.
384 383 bsession : bytes
385 384 The session as bytes
386 385 username : unicode
387 386 username added to message headers. The default is to ask the OS.
388 387 key : bytes
389 388 The key used to initialize an HMAC signature. If unset, messages
390 389 will not be signed or checked.
391 390 keyfile : filepath
392 391 The file containing a key. If this is set, `key` will be
393 392 initialized to the contents of the file.
394 393 """
395 394 super(Session, self).__init__(**kwargs)
396 395 self._check_packers()
397 396 self.none = self.pack({})
398 397 # ensure self._session_default() if necessary, so bsession is defined:
399 398 self.session
400 399 self.pid = os.getpid()
401 400
402 401 @property
403 402 def msg_id(self):
404 403 """always return new uuid"""
405 404 return str(uuid.uuid4())
406 405
407 406 def _check_packers(self):
408 407 """check packers for binary data and datetime support."""
409 408 pack = self.pack
410 409 unpack = self.unpack
411 410
412 411 # check simple serialization
413 412 msg = dict(a=[1,'hi'])
414 413 try:
415 414 packed = pack(msg)
416 415 except Exception:
417 416 raise ValueError("packer could not serialize a simple message")
418 417
419 418 # ensure packed message is bytes
420 419 if not isinstance(packed, bytes):
421 420 raise ValueError("message packed to %r, but bytes are required"%type(packed))
422 421
423 422 # check that unpack is pack's inverse
424 423 try:
425 424 unpacked = unpack(packed)
426 425 except Exception:
427 426 raise ValueError("unpacker could not handle the packer's output")
428 427
429 428 # check datetime support
430 429 msg = dict(t=datetime.now())
431 430 try:
432 431 unpacked = unpack(pack(msg))
433 432 except Exception:
434 433 self.pack = lambda o: pack(squash_dates(o))
435 434 self.unpack = lambda s: extract_dates(unpack(s))
436 435
437 436 def msg_header(self, msg_type):
438 437 return msg_header(self.msg_id, msg_type, self.username, self.session)
439 438
440 439 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
441 440 """Return the nested message dict.
442 441
443 442 This format is different from what is sent over the wire. The
444 443 serialize/unserialize methods converts this nested message dict to the wire
445 444 format, which is a list of message parts.
446 445 """
447 446 msg = {}
448 447 header = self.msg_header(msg_type) if header is None else header
449 448 msg['header'] = header
450 449 msg['msg_id'] = header['msg_id']
451 450 msg['msg_type'] = header['msg_type']
452 451 msg['parent_header'] = {} if parent is None else extract_header(parent)
453 452 msg['content'] = {} if content is None else content
454 453 msg['metadata'] = self.metadata.copy()
455 454 if metadata is not None:
456 455 msg['metadata'].update(metadata)
457 456 return msg
458 457
459 458 def sign(self, msg_list):
460 459 """Sign a message with HMAC digest. If no auth, return b''.
461 460
462 461 Parameters
463 462 ----------
464 463 msg_list : list
465 464 The [p_header,p_parent,p_content] part of the message list.
466 465 """
467 466 if self.auth is None:
468 467 return b''
469 468 h = self.auth.copy()
470 469 for m in msg_list:
471 470 h.update(m)
472 471 return str_to_bytes(h.hexdigest())
473 472
474 473 def serialize(self, msg, ident=None):
475 474 """Serialize the message components to bytes.
476 475
477 476 This is roughly the inverse of unserialize. The serialize/unserialize
478 477 methods work with full message lists, whereas pack/unpack work with
479 478 the individual message parts in the message list.
480 479
481 480 Parameters
482 481 ----------
483 482 msg : dict or Message
484 483 The nexted message dict as returned by the self.msg method.
485 484
486 485 Returns
487 486 -------
488 487 msg_list : list
489 488 The list of bytes objects to be sent with the format:
490 489 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_metadata,p_content,
491 490 buffer1,buffer2,...]. In this list, the p_* entities are
492 491 the packed or serialized versions, so if JSON is used, these
493 492 are utf8 encoded JSON strings.
494 493 """
495 494 content = msg.get('content', {})
496 495 if content is None:
497 496 content = self.none
498 497 elif isinstance(content, dict):
499 498 content = self.pack(content)
500 499 elif isinstance(content, bytes):
501 500 # content is already packed, as in a relayed message
502 501 pass
503 502 elif isinstance(content, unicode):
504 503 # should be bytes, but JSON often spits out unicode
505 504 content = content.encode('utf8')
506 505 else:
507 506 raise TypeError("Content incorrect type: %s"%type(content))
508 507
509 508 real_message = [self.pack(msg['header']),
510 509 self.pack(msg['parent_header']),
511 510 self.pack(msg['metadata']),
512 511 content,
513 512 ]
514 513
515 514 to_send = []
516 515
517 516 if isinstance(ident, list):
518 517 # accept list of idents
519 518 to_send.extend(ident)
520 519 elif ident is not None:
521 520 to_send.append(ident)
522 521 to_send.append(DELIM)
523 522
524 523 signature = self.sign(real_message)
525 524 to_send.append(signature)
526 525
527 526 to_send.extend(real_message)
528 527
529 528 return to_send
530 529
531 530 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
532 531 buffers=None, track=False, header=None, metadata=None):
533 532 """Build and send a message via stream or socket.
534 533
535 534 The message format used by this function internally is as follows:
536 535
537 536 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
538 537 buffer1,buffer2,...]
539 538
540 539 The serialize/unserialize methods convert the nested message dict into this
541 540 format.
542 541
543 542 Parameters
544 543 ----------
545 544
546 545 stream : zmq.Socket or ZMQStream
547 546 The socket-like object used to send the data.
548 547 msg_or_type : str or Message/dict
549 548 Normally, msg_or_type will be a msg_type unless a message is being
550 549 sent more than once. If a header is supplied, this can be set to
551 550 None and the msg_type will be pulled from the header.
552 551
553 552 content : dict or None
554 553 The content of the message (ignored if msg_or_type is a message).
555 554 header : dict or None
556 555 The header dict for the message (ignored if msg_to_type is a message).
557 556 parent : Message or dict or None
558 557 The parent or parent header describing the parent of this message
559 558 (ignored if msg_or_type is a message).
560 559 ident : bytes or list of bytes
561 560 The zmq.IDENTITY routing path.
562 561 metadata : dict or None
563 562 The metadata describing the message
564 563 buffers : list or None
565 564 The already-serialized buffers to be appended to the message.
566 565 track : bool
567 566 Whether to track. Only for use with Sockets, because ZMQStream
568 567 objects cannot track messages.
569 568
570 569
571 570 Returns
572 571 -------
573 572 msg : dict
574 573 The constructed message.
575 574 """
576 575 if not isinstance(stream, zmq.Socket):
577 576 # ZMQStreams and dummy sockets do not support tracking.
578 577 track = False
579 578
580 579 if isinstance(msg_or_type, (Message, dict)):
581 580 # We got a Message or message dict, not a msg_type so don't
582 581 # build a new Message.
583 582 msg = msg_or_type
584 583 else:
585 584 msg = self.msg(msg_or_type, content=content, parent=parent,
586 585 header=header, metadata=metadata)
587 586 if not os.getpid() == self.pid:
588 587 io.rprint("WARNING: attempted to send message from fork")
589 588 io.rprint(msg)
590 589 return
591 590 buffers = [] if buffers is None else buffers
592 591 to_send = self.serialize(msg, ident)
593 592 to_send.extend(buffers)
594 593 longest = max([ len(s) for s in to_send ])
595 594 copy = (longest < self.copy_threshold)
596 595
597 596 if buffers and track and not copy:
598 597 # only really track when we are doing zero-copy buffers
599 598 tracker = stream.send_multipart(to_send, copy=False, track=True)
600 599 else:
601 600 # use dummy tracker, which will be done immediately
602 601 tracker = DONE
603 602 stream.send_multipart(to_send, copy=copy)
604 603
605 604 if self.debug:
606 605 pprint.pprint(msg)
607 606 pprint.pprint(to_send)
608 607 pprint.pprint(buffers)
609 608
610 609 msg['tracker'] = tracker
611 610
612 611 return msg
613 612
614 613 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
615 614 """Send a raw message via ident path.
616 615
617 616 This method is used to send a already serialized message.
618 617
619 618 Parameters
620 619 ----------
621 620 stream : ZMQStream or Socket
622 621 The ZMQ stream or socket to use for sending the message.
623 622 msg_list : list
624 623 The serialized list of messages to send. This only includes the
625 624 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
626 625 the message.
627 626 ident : ident or list
628 627 A single ident or a list of idents to use in sending.
629 628 """
630 629 to_send = []
631 630 if isinstance(ident, bytes):
632 631 ident = [ident]
633 632 if ident is not None:
634 633 to_send.extend(ident)
635 634
636 635 to_send.append(DELIM)
637 636 to_send.append(self.sign(msg_list))
638 637 to_send.extend(msg_list)
639 638 stream.send_multipart(msg_list, flags, copy=copy)
640 639
641 640 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
642 641 """Receive and unpack a message.
643 642
644 643 Parameters
645 644 ----------
646 645 socket : ZMQStream or Socket
647 646 The socket or stream to use in receiving.
648 647
649 648 Returns
650 649 -------
651 650 [idents], msg
652 651 [idents] is a list of idents and msg is a nested message dict of
653 652 same format as self.msg returns.
654 653 """
655 654 if isinstance(socket, ZMQStream):
656 655 socket = socket.socket
657 656 try:
658 657 msg_list = socket.recv_multipart(mode, copy=copy)
659 658 except zmq.ZMQError as e:
660 659 if e.errno == zmq.EAGAIN:
661 660 # We can convert EAGAIN to None as we know in this case
662 661 # recv_multipart won't return None.
663 662 return None,None
664 663 else:
665 664 raise
666 665 # split multipart message into identity list and message dict
667 666 # invalid large messages can cause very expensive string comparisons
668 667 idents, msg_list = self.feed_identities(msg_list, copy)
669 668 try:
670 669 return idents, self.unserialize(msg_list, content=content, copy=copy)
671 670 except Exception as e:
672 671 # TODO: handle it
673 672 raise e
674 673
675 674 def feed_identities(self, msg_list, copy=True):
676 675 """Split the identities from the rest of the message.
677 676
678 677 Feed until DELIM is reached, then return the prefix as idents and
679 678 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
680 679 but that would be silly.
681 680
682 681 Parameters
683 682 ----------
684 683 msg_list : a list of Message or bytes objects
685 684 The message to be split.
686 685 copy : bool
687 686 flag determining whether the arguments are bytes or Messages
688 687
689 688 Returns
690 689 -------
691 690 (idents, msg_list) : two lists
692 691 idents will always be a list of bytes, each of which is a ZMQ
693 692 identity. msg_list will be a list of bytes or zmq.Messages of the
694 693 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
695 694 should be unpackable/unserializable via self.unserialize at this
696 695 point.
697 696 """
698 697 if copy:
699 698 idx = msg_list.index(DELIM)
700 699 return msg_list[:idx], msg_list[idx+1:]
701 700 else:
702 701 failed = True
703 702 for idx,m in enumerate(msg_list):
704 703 if m.bytes == DELIM:
705 704 failed = False
706 705 break
707 706 if failed:
708 707 raise ValueError("DELIM not in msg_list")
709 708 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
710 709 return [m.bytes for m in idents], msg_list
711 710
712 711 def _add_digest(self, signature):
713 712 """add a digest to history to protect against replay attacks"""
714 713 if self.digest_history_size == 0:
715 714 # no history, never add digests
716 715 return
717 716
718 717 self.digest_history.add(signature)
719 718 if len(self.digest_history) > self.digest_history_size:
720 719 # threshold reached, cull 10%
721 720 self._cull_digest_history()
722 721
723 722 def _cull_digest_history(self):
724 723 """cull the digest history
725 724
726 725 Removes a randomly selected 10% of the digest history
727 726 """
728 727 current = len(self.digest_history)
729 728 n_to_cull = max(int(current // 10), current - self.digest_history_size)
730 729 if n_to_cull >= current:
731 730 self.digest_history = set()
732 731 return
733 732 to_cull = random.sample(self.digest_history, n_to_cull)
734 733 self.digest_history.difference_update(to_cull)
735 734
736 735 def unserialize(self, msg_list, content=True, copy=True):
737 736 """Unserialize a msg_list to a nested message dict.
738 737
739 738 This is roughly the inverse of serialize. The serialize/unserialize
740 739 methods work with full message lists, whereas pack/unpack work with
741 740 the individual message parts in the message list.
742 741
743 742 Parameters:
744 743 -----------
745 744 msg_list : list of bytes or Message objects
746 745 The list of message parts of the form [HMAC,p_header,p_parent,
747 746 p_metadata,p_content,buffer1,buffer2,...].
748 747 content : bool (True)
749 748 Whether to unpack the content dict (True), or leave it packed
750 749 (False).
751 750 copy : bool (True)
752 751 Whether to return the bytes (True), or the non-copying Message
753 752 object in each place (False).
754 753
755 754 Returns
756 755 -------
757 756 msg : dict
758 757 The nested message dict with top-level keys [header, parent_header,
759 758 content, buffers].
760 759 """
761 760 minlen = 5
762 761 message = {}
763 762 if not copy:
764 763 for i in range(minlen):
765 764 msg_list[i] = msg_list[i].bytes
766 765 if self.auth is not None:
767 766 signature = msg_list[0]
768 767 if not signature:
769 768 raise ValueError("Unsigned Message")
770 769 if signature in self.digest_history:
771 770 raise ValueError("Duplicate Signature: %r" % signature)
772 771 self._add_digest(signature)
773 772 check = self.sign(msg_list[1:5])
774 773 if not signature == check:
775 774 raise ValueError("Invalid Signature: %r" % signature)
776 775 if not len(msg_list) >= minlen:
777 776 raise TypeError("malformed message, must have at least %i elements"%minlen)
778 777 header = self.unpack(msg_list[1])
779 778 message['header'] = header
780 779 message['msg_id'] = header['msg_id']
781 780 message['msg_type'] = header['msg_type']
782 781 message['parent_header'] = self.unpack(msg_list[2])
783 782 message['metadata'] = self.unpack(msg_list[3])
784 783 if content:
785 784 message['content'] = self.unpack(msg_list[4])
786 785 else:
787 786 message['content'] = msg_list[4]
788 787
789 788 message['buffers'] = msg_list[5:]
790 789 return message
791 790
792 791 def test_msg2obj():
793 792 am = dict(x=1)
794 793 ao = Message(am)
795 794 assert ao.x == am['x']
796 795
797 796 am['y'] = dict(z=1)
798 797 ao = Message(am)
799 798 assert ao.y.z == am['y']['z']
800 799
801 800 k1, k2 = 'y', 'z'
802 801 assert ao[k1][k2] == am[k1][k2]
803 802
804 803 am2 = dict(ao)
805 804 assert am['x'] == am2['x']
806 805 assert am['y']['z'] == am2['y']['z']
807 806
@@ -1,597 +1,597 b''
1 1 """A ZMQ-based subclass of InteractiveShell.
2 2
3 3 This code is meant to ease the refactoring of the base InteractiveShell into
4 4 something with a cleaner architecture for 2-process use, without actually
5 5 breaking InteractiveShell itself. So we're doing something a bit ugly, where
6 6 we subclass and override what we want to fix. Once this is working well, we
7 7 can go back to the base class and refactor the code for a cleaner inheritance
8 8 implementation that doesn't rely on so much monkeypatching.
9 9
10 10 But this lets us maintain a fully working IPython as we develop the new
11 11 machinery. This should thus be thought of as scaffolding.
12 12 """
13 13 #-----------------------------------------------------------------------------
14 14 # Imports
15 15 #-----------------------------------------------------------------------------
16 16 from __future__ import print_function
17 17
18 18 # Stdlib
19 19 import os
20 20 import sys
21 21 import time
22 22
23 23 # System library imports
24 24 from zmq.eventloop import ioloop
25 25
26 26 # Our own
27 27 from IPython.core.interactiveshell import (
28 28 InteractiveShell, InteractiveShellABC
29 29 )
30 30 from IPython.core import page
31 31 from IPython.core.autocall import ZMQExitAutocall
32 32 from IPython.core.displaypub import DisplayPublisher
33 33 from IPython.core.error import UsageError
34 34 from IPython.core.magics import MacroToEdit, CodeMagics
35 35 from IPython.core.magic import magics_class, line_magic, Magics
36 36 from IPython.core.payloadpage import install_payload_page
37 37 from IPython.display import display, Javascript
38 38 from IPython.kernel.inprocess.socket import SocketABC
39 39 from IPython.kernel import (
40 40 get_connection_file, get_connection_info, connect_qtconsole
41 41 )
42 42 from IPython.testing.skipdoctest import skip_doctest
43 from IPython.utils import io, openpy
43 from IPython.utils import openpy
44 44 from IPython.utils.jsonutil import json_clean, encode_images
45 45 from IPython.utils.process import arg_split
46 46 from IPython.utils import py3compat
47 47 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes
48 from IPython.utils.warn import warn, error
48 from IPython.utils.warn import error
49 49 from IPython.kernel.zmq.displayhook import ZMQShellDisplayHook
50 50 from IPython.kernel.zmq.datapub import ZMQDataPublisher
51 51 from IPython.kernel.zmq.session import extract_header
52 52 from session import Session
53 53
54 54 #-----------------------------------------------------------------------------
55 55 # Functions and classes
56 56 #-----------------------------------------------------------------------------
57 57
58 58 class ZMQDisplayPublisher(DisplayPublisher):
59 59 """A display publisher that publishes data using a ZeroMQ PUB socket."""
60 60
61 61 session = Instance(Session)
62 62 pub_socket = Instance(SocketABC)
63 63 parent_header = Dict({})
64 64 topic = CBytes(b'displaypub')
65 65
66 66 def set_parent(self, parent):
67 67 """Set the parent for outbound messages."""
68 68 self.parent_header = extract_header(parent)
69 69
70 70 def _flush_streams(self):
71 71 """flush IO Streams prior to display"""
72 72 sys.stdout.flush()
73 73 sys.stderr.flush()
74 74
75 75 def publish(self, source, data, metadata=None):
76 76 self._flush_streams()
77 77 if metadata is None:
78 78 metadata = {}
79 79 self._validate_data(source, data, metadata)
80 80 content = {}
81 81 content['source'] = source
82 82 content['data'] = encode_images(data)
83 83 content['metadata'] = metadata
84 84 self.session.send(
85 85 self.pub_socket, u'display_data', json_clean(content),
86 86 parent=self.parent_header, ident=self.topic,
87 87 )
88 88
89 89 def clear_output(self, stdout=True, stderr=True, other=True):
90 90 content = dict(stdout=stdout, stderr=stderr, other=other)
91 91
92 92 if stdout:
93 93 print('\r', file=sys.stdout, end='')
94 94 if stderr:
95 95 print('\r', file=sys.stderr, end='')
96 96
97 97 self._flush_streams()
98 98
99 99 self.session.send(
100 100 self.pub_socket, u'clear_output', content,
101 101 parent=self.parent_header, ident=self.topic,
102 102 )
103 103
104 104 @magics_class
105 105 class KernelMagics(Magics):
106 106 #------------------------------------------------------------------------
107 107 # Magic overrides
108 108 #------------------------------------------------------------------------
109 109 # Once the base class stops inheriting from magic, this code needs to be
110 110 # moved into a separate machinery as well. For now, at least isolate here
111 111 # the magics which this class needs to implement differently from the base
112 112 # class, or that are unique to it.
113 113
114 114 @line_magic
115 115 def doctest_mode(self, parameter_s=''):
116 116 """Toggle doctest mode on and off.
117 117
118 118 This mode is intended to make IPython behave as much as possible like a
119 119 plain Python shell, from the perspective of how its prompts, exceptions
120 120 and output look. This makes it easy to copy and paste parts of a
121 121 session into doctests. It does so by:
122 122
123 123 - Changing the prompts to the classic ``>>>`` ones.
124 124 - Changing the exception reporting mode to 'Plain'.
125 125 - Disabling pretty-printing of output.
126 126
127 127 Note that IPython also supports the pasting of code snippets that have
128 128 leading '>>>' and '...' prompts in them. This means that you can paste
129 129 doctests from files or docstrings (even if they have leading
130 130 whitespace), and the code will execute correctly. You can then use
131 131 '%history -t' to see the translated history; this will give you the
132 132 input after removal of all the leading prompts and whitespace, which
133 133 can be pasted back into an editor.
134 134
135 135 With these features, you can switch into this mode easily whenever you
136 136 need to do testing and changes to doctests, without having to leave
137 137 your existing IPython session.
138 138 """
139 139
140 140 from IPython.utils.ipstruct import Struct
141 141
142 142 # Shorthands
143 143 shell = self.shell
144 144 disp_formatter = self.shell.display_formatter
145 145 ptformatter = disp_formatter.formatters['text/plain']
146 146 # dstore is a data store kept in the instance metadata bag to track any
147 147 # changes we make, so we can undo them later.
148 148 dstore = shell.meta.setdefault('doctest_mode', Struct())
149 149 save_dstore = dstore.setdefault
150 150
151 151 # save a few values we'll need to recover later
152 152 mode = save_dstore('mode', False)
153 153 save_dstore('rc_pprint', ptformatter.pprint)
154 154 save_dstore('rc_active_types',disp_formatter.active_types)
155 155 save_dstore('xmode', shell.InteractiveTB.mode)
156 156
157 157 if mode == False:
158 158 # turn on
159 159 ptformatter.pprint = False
160 160 disp_formatter.active_types = ['text/plain']
161 161 shell.magic('xmode Plain')
162 162 else:
163 163 # turn off
164 164 ptformatter.pprint = dstore.rc_pprint
165 165 disp_formatter.active_types = dstore.rc_active_types
166 166 shell.magic("xmode " + dstore.xmode)
167 167
168 168 # Store new mode and inform on console
169 169 dstore.mode = bool(1-int(mode))
170 170 mode_label = ['OFF','ON'][dstore.mode]
171 171 print('Doctest mode is:', mode_label)
172 172
173 173 # Send the payload back so that clients can modify their prompt display
174 174 payload = dict(
175 175 source='IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.doctest_mode',
176 176 mode=dstore.mode)
177 177 shell.payload_manager.write_payload(payload)
178 178
179 179
180 180 _find_edit_target = CodeMagics._find_edit_target
181 181
182 182 @skip_doctest
183 183 @line_magic
184 184 def edit(self, parameter_s='', last_call=['','']):
185 185 """Bring up an editor and execute the resulting code.
186 186
187 187 Usage:
188 188 %edit [options] [args]
189 189
190 190 %edit runs an external text editor. You will need to set the command for
191 191 this editor via the ``TerminalInteractiveShell.editor`` option in your
192 192 configuration file before it will work.
193 193
194 194 This command allows you to conveniently edit multi-line code right in
195 195 your IPython session.
196 196
197 197 If called without arguments, %edit opens up an empty editor with a
198 198 temporary file and will execute the contents of this file when you
199 199 close it (don't forget to save it!).
200 200
201 201
202 202 Options:
203 203
204 204 -n <number>: open the editor at a specified line number. By default,
205 205 the IPython editor hook uses the unix syntax 'editor +N filename', but
206 206 you can configure this by providing your own modified hook if your
207 207 favorite editor supports line-number specifications with a different
208 208 syntax.
209 209
210 210 -p: this will call the editor with the same data as the previous time
211 211 it was used, regardless of how long ago (in your current session) it
212 212 was.
213 213
214 214 -r: use 'raw' input. This option only applies to input taken from the
215 215 user's history. By default, the 'processed' history is used, so that
216 216 magics are loaded in their transformed version to valid Python. If
217 217 this option is given, the raw input as typed as the command line is
218 218 used instead. When you exit the editor, it will be executed by
219 219 IPython's own processor.
220 220
221 221 -x: do not execute the edited code immediately upon exit. This is
222 222 mainly useful if you are editing programs which need to be called with
223 223 command line arguments, which you can then do using %run.
224 224
225 225
226 226 Arguments:
227 227
228 228 If arguments are given, the following possibilites exist:
229 229
230 230 - The arguments are numbers or pairs of colon-separated numbers (like
231 231 1 4:8 9). These are interpreted as lines of previous input to be
232 232 loaded into the editor. The syntax is the same of the %macro command.
233 233
234 234 - If the argument doesn't start with a number, it is evaluated as a
235 235 variable and its contents loaded into the editor. You can thus edit
236 236 any string which contains python code (including the result of
237 237 previous edits).
238 238
239 239 - If the argument is the name of an object (other than a string),
240 240 IPython will try to locate the file where it was defined and open the
241 241 editor at the point where it is defined. You can use `%edit function`
242 242 to load an editor exactly at the point where 'function' is defined,
243 243 edit it and have the file be executed automatically.
244 244
245 245 If the object is a macro (see %macro for details), this opens up your
246 246 specified editor with a temporary file containing the macro's data.
247 247 Upon exit, the macro is reloaded with the contents of the file.
248 248
249 249 Note: opening at an exact line is only supported under Unix, and some
250 250 editors (like kedit and gedit up to Gnome 2.8) do not understand the
251 251 '+NUMBER' parameter necessary for this feature. Good editors like
252 252 (X)Emacs, vi, jed, pico and joe all do.
253 253
254 254 - If the argument is not found as a variable, IPython will look for a
255 255 file with that name (adding .py if necessary) and load it into the
256 256 editor. It will execute its contents with execfile() when you exit,
257 257 loading any code in the file into your interactive namespace.
258 258
259 259 After executing your code, %edit will return as output the code you
260 260 typed in the editor (except when it was an existing file). This way
261 261 you can reload the code in further invocations of %edit as a variable,
262 262 via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
263 263 the output.
264 264
265 265 Note that %edit is also available through the alias %ed.
266 266
267 267 This is an example of creating a simple function inside the editor and
268 268 then modifying it. First, start up the editor:
269 269
270 270 In [1]: ed
271 271 Editing... done. Executing edited code...
272 272 Out[1]: 'def foo():n print "foo() was defined in an editing session"n'
273 273
274 274 We can then call the function foo():
275 275
276 276 In [2]: foo()
277 277 foo() was defined in an editing session
278 278
279 279 Now we edit foo. IPython automatically loads the editor with the
280 280 (temporary) file where foo() was previously defined:
281 281
282 282 In [3]: ed foo
283 283 Editing... done. Executing edited code...
284 284
285 285 And if we call foo() again we get the modified version:
286 286
287 287 In [4]: foo()
288 288 foo() has now been changed!
289 289
290 290 Here is an example of how to edit a code snippet successive
291 291 times. First we call the editor:
292 292
293 293 In [5]: ed
294 294 Editing... done. Executing edited code...
295 295 hello
296 296 Out[5]: "print 'hello'n"
297 297
298 298 Now we call it again with the previous output (stored in _):
299 299
300 300 In [6]: ed _
301 301 Editing... done. Executing edited code...
302 302 hello world
303 303 Out[6]: "print 'hello world'n"
304 304
305 305 Now we call it with the output #8 (stored in _8, also as Out[8]):
306 306
307 307 In [7]: ed _8
308 308 Editing... done. Executing edited code...
309 309 hello again
310 310 Out[7]: "print 'hello again'n"
311 311 """
312 312
313 313 opts,args = self.parse_options(parameter_s,'prn:')
314 314
315 315 try:
316 316 filename, lineno, _ = CodeMagics._find_edit_target(self.shell, args, opts, last_call)
317 317 except MacroToEdit as e:
318 318 # TODO: Implement macro editing over 2 processes.
319 319 print("Macro editing not yet implemented in 2-process model.")
320 320 return
321 321
322 322 # Make sure we send to the client an absolute path, in case the working
323 323 # directory of client and kernel don't match
324 324 filename = os.path.abspath(filename)
325 325
326 326 payload = {
327 327 'source' : 'IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.edit_magic',
328 328 'filename' : filename,
329 329 'line_number' : lineno
330 330 }
331 331 self.shell.payload_manager.write_payload(payload)
332 332
333 333 # A few magics that are adapted to the specifics of using pexpect and a
334 334 # remote terminal
335 335
336 336 @line_magic
337 337 def clear(self, arg_s):
338 338 """Clear the terminal."""
339 339 if os.name == 'posix':
340 340 self.shell.system("clear")
341 341 else:
342 342 self.shell.system("cls")
343 343
344 344 if os.name == 'nt':
345 345 # This is the usual name in windows
346 346 cls = line_magic('cls')(clear)
347 347
348 348 # Terminal pagers won't work over pexpect, but we do have our own pager
349 349
350 350 @line_magic
351 351 def less(self, arg_s):
352 352 """Show a file through the pager.
353 353
354 354 Files ending in .py are syntax-highlighted."""
355 355 if not arg_s:
356 356 raise UsageError('Missing filename.')
357 357
358 358 cont = open(arg_s).read()
359 359 if arg_s.endswith('.py'):
360 360 cont = self.shell.pycolorize(openpy.read_py_file(arg_s, skip_encoding_cookie=False))
361 361 else:
362 362 cont = open(arg_s).read()
363 363 page.page(cont)
364 364
365 365 more = line_magic('more')(less)
366 366
367 367 # Man calls a pager, so we also need to redefine it
368 368 if os.name == 'posix':
369 369 @line_magic
370 370 def man(self, arg_s):
371 371 """Find the man page for the given command and display in pager."""
372 372 page.page(self.shell.getoutput('man %s | col -b' % arg_s,
373 373 split=False))
374 374
375 375 @line_magic
376 376 def connect_info(self, arg_s):
377 377 """Print information for connecting other clients to this kernel
378 378
379 379 It will print the contents of this session's connection file, as well as
380 380 shortcuts for local clients.
381 381
382 382 In the simplest case, when called from the most recently launched kernel,
383 383 secondary clients can be connected, simply with:
384 384
385 385 $> ipython <app> --existing
386 386
387 387 """
388 388
389 389 from IPython.core.application import BaseIPythonApplication as BaseIPApp
390 390
391 391 if BaseIPApp.initialized():
392 392 app = BaseIPApp.instance()
393 393 security_dir = app.profile_dir.security_dir
394 394 profile = app.profile
395 395 else:
396 396 profile = 'default'
397 397 security_dir = ''
398 398
399 399 try:
400 400 connection_file = get_connection_file()
401 401 info = get_connection_info(unpack=False)
402 402 except Exception as e:
403 403 error("Could not get connection info: %r" % e)
404 404 return
405 405
406 406 # add profile flag for non-default profile
407 407 profile_flag = "--profile %s" % profile if profile != 'default' else ""
408 408
409 409 # if it's in the security dir, truncate to basename
410 410 if security_dir == os.path.dirname(connection_file):
411 411 connection_file = os.path.basename(connection_file)
412 412
413 413
414 414 print (info + '\n')
415 415 print ("Paste the above JSON into a file, and connect with:\n"
416 416 " $> ipython <app> --existing <file>\n"
417 417 "or, if you are local, you can connect with just:\n"
418 418 " $> ipython <app> --existing {0} {1}\n"
419 419 "or even just:\n"
420 420 " $> ipython <app> --existing {1}\n"
421 421 "if this is the most recent IPython session you have started.".format(
422 422 connection_file, profile_flag
423 423 )
424 424 )
425 425
426 426 @line_magic
427 427 def qtconsole(self, arg_s):
428 428 """Open a qtconsole connected to this kernel.
429 429
430 430 Useful for connecting a qtconsole to running notebooks, for better
431 431 debugging.
432 432 """
433 433
434 434 # %qtconsole should imply bind_kernel for engines:
435 435 try:
436 436 from IPython.parallel import bind_kernel
437 437 except ImportError:
438 438 # technically possible, because parallel has higher pyzmq min-version
439 439 pass
440 440 else:
441 441 bind_kernel()
442 442
443 443 try:
444 444 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
445 445 except Exception as e:
446 446 error("Could not start qtconsole: %r" % e)
447 447 return
448 448
449 449 @line_magic
450 450 def autosave(self, arg_s):
451 451 """Set the autosave interval in the notebook (in seconds).
452 452
453 453 The default value is 120, or two minutes.
454 454 ``%autosave 0`` will disable autosave.
455 455
456 456 This magic only has an effect when called from the notebook interface.
457 457 It has no effect when called in a startup file.
458 458 """
459 459
460 460 try:
461 461 interval = int(arg_s)
462 462 except ValueError:
463 463 raise UsageError("%%autosave requires an integer, got %r" % arg_s)
464 464
465 465 # javascript wants milliseconds
466 466 milliseconds = 1000 * interval
467 467 display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds),
468 468 include=['application/javascript']
469 469 )
470 470 if interval:
471 471 print("Autosaving every %i seconds" % interval)
472 472 else:
473 473 print("Autosave disabled")
474 474
475 475
476 476 class ZMQInteractiveShell(InteractiveShell):
477 477 """A subclass of InteractiveShell for ZMQ."""
478 478
479 479 displayhook_class = Type(ZMQShellDisplayHook)
480 480 display_pub_class = Type(ZMQDisplayPublisher)
481 481 data_pub_class = Type(ZMQDataPublisher)
482 482
483 483 # Override the traitlet in the parent class, because there's no point using
484 484 # readline for the kernel. Can be removed when the readline code is moved
485 485 # to the terminal frontend.
486 486 colors_force = CBool(True)
487 487 readline_use = CBool(False)
488 488 # autoindent has no meaning in a zmqshell, and attempting to enable it
489 489 # will print a warning in the absence of readline.
490 490 autoindent = CBool(False)
491 491
492 492 exiter = Instance(ZMQExitAutocall)
493 493 def _exiter_default(self):
494 494 return ZMQExitAutocall(self)
495 495
496 496 def _exit_now_changed(self, name, old, new):
497 497 """stop eventloop when exit_now fires"""
498 498 if new:
499 499 loop = ioloop.IOLoop.instance()
500 500 loop.add_timeout(time.time()+0.1, loop.stop)
501 501
502 502 keepkernel_on_exit = None
503 503
504 504 # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no
505 505 # interactive input being read; we provide event loop support in ipkernel
506 506 from .eventloops import enable_gui
507 507 enable_gui = staticmethod(enable_gui)
508 508
509 509 def init_environment(self):
510 510 """Configure the user's environment.
511 511
512 512 """
513 513 env = os.environ
514 514 # These two ensure 'ls' produces nice coloring on BSD-derived systems
515 515 env['TERM'] = 'xterm-color'
516 516 env['CLICOLOR'] = '1'
517 517 # Since normal pagers don't work at all (over pexpect we don't have
518 518 # single-key control of the subprocess), try to disable paging in
519 519 # subprocesses as much as possible.
520 520 env['PAGER'] = 'cat'
521 521 env['GIT_PAGER'] = 'cat'
522 522
523 523 # And install the payload version of page.
524 524 install_payload_page()
525 525
526 526 def auto_rewrite_input(self, cmd):
527 527 """Called to show the auto-rewritten input for autocall and friends.
528 528
529 529 FIXME: this payload is currently not correctly processed by the
530 530 frontend.
531 531 """
532 532 new = self.prompt_manager.render('rewrite') + cmd
533 533 payload = dict(
534 534 source='IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.auto_rewrite_input',
535 535 transformed_input=new,
536 536 )
537 537 self.payload_manager.write_payload(payload)
538 538
539 539 def ask_exit(self):
540 540 """Engage the exit actions."""
541 541 self.exit_now = True
542 542 payload = dict(
543 543 source='IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.ask_exit',
544 544 exit=True,
545 545 keepkernel=self.keepkernel_on_exit,
546 546 )
547 547 self.payload_manager.write_payload(payload)
548 548
549 549 def _showtraceback(self, etype, evalue, stb):
550 550
551 551 exc_content = {
552 552 u'traceback' : stb,
553 553 u'ename' : unicode(etype.__name__),
554 554 u'evalue' : py3compat.safe_unicode(evalue),
555 555 }
556 556
557 557 dh = self.displayhook
558 558 # Send exception info over pub socket for other clients than the caller
559 559 # to pick up
560 560 topic = None
561 561 if dh.topic:
562 562 topic = dh.topic.replace(b'pyout', b'pyerr')
563 563
564 564 exc_msg = dh.session.send(dh.pub_socket, u'pyerr', json_clean(exc_content), dh.parent_header, ident=topic)
565 565
566 566 # FIXME - Hack: store exception info in shell object. Right now, the
567 567 # caller is reading this info after the fact, we need to fix this logic
568 568 # to remove this hack. Even uglier, we need to store the error status
569 569 # here, because in the main loop, the logic that sets it is being
570 570 # skipped because runlines swallows the exceptions.
571 571 exc_content[u'status'] = u'error'
572 572 self._reply_content = exc_content
573 573 # /FIXME
574 574
575 575 return exc_content
576 576
577 577 def set_next_input(self, text):
578 578 """Send the specified text to the frontend to be presented at the next
579 579 input cell."""
580 580 payload = dict(
581 581 source='IPython.kernel.zmq.zmqshell.ZMQInteractiveShell.set_next_input',
582 582 text=text
583 583 )
584 584 self.payload_manager.write_payload(payload)
585 585
586 586 #-------------------------------------------------------------------------
587 587 # Things related to magics
588 588 #-------------------------------------------------------------------------
589 589
590 590 def init_magics(self):
591 591 super(ZMQInteractiveShell, self).init_magics()
592 592 self.register_magics(KernelMagics)
593 593 self.magics_manager.register_alias('ed', 'edit')
594 594
595 595
596 596
597 597 InteractiveShellABC.register(ZMQInteractiveShell)
@@ -1,353 +1,354 b''
1 1 # encoding: utf-8
2 2
3 3 """Pickle related utilities. Perhaps this should be called 'can'."""
4 4
5 5 __docformat__ = "restructuredtext en"
6 6
7 7 #-------------------------------------------------------------------------------
8 8 # Copyright (C) 2008-2011 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-------------------------------------------------------------------------------
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Imports
16 16 #-------------------------------------------------------------------------------
17 17
18 18 import copy
19 19 import logging
20 20 import sys
21 21 from types import FunctionType
22 22
23 23 try:
24 24 import cPickle as pickle
25 25 except ImportError:
26 26 import pickle
27 27
28 28 try:
29 29 import numpy
30 30 except:
31 31 numpy = None
32 32
33 import codeutil # This registers a hook when it's imported
33 34 import py3compat
34 35 from importstring import import_item
35 36
36 37 from IPython.config import Application
37 38
38 39 if py3compat.PY3:
39 40 buffer = memoryview
40 41 class_type = type
41 42 else:
42 43 from types import ClassType
43 44 class_type = (type, ClassType)
44 45
45 46 #-------------------------------------------------------------------------------
46 47 # Classes
47 48 #-------------------------------------------------------------------------------
48 49
49 50
50 51 class CannedObject(object):
51 52 def __init__(self, obj, keys=[], hook=None):
52 53 """can an object for safe pickling
53 54
54 55 Parameters
55 56 ==========
56 57
57 58 obj:
58 59 The object to be canned
59 60 keys: list (optional)
60 61 list of attribute names that will be explicitly canned / uncanned
61 62 hook: callable (optional)
62 63 An optional extra callable,
63 64 which can do additional processing of the uncanned object.
64 65
65 66 large data may be offloaded into the buffers list,
66 67 used for zero-copy transfers.
67 68 """
68 69 self.keys = keys
69 70 self.obj = copy.copy(obj)
70 71 self.hook = can(hook)
71 72 for key in keys:
72 73 setattr(self.obj, key, can(getattr(obj, key)))
73 74
74 75 self.buffers = []
75 76
76 77 def get_object(self, g=None):
77 78 if g is None:
78 79 g = {}
79 80 obj = self.obj
80 81 for key in self.keys:
81 82 setattr(obj, key, uncan(getattr(obj, key), g))
82 83
83 84 if self.hook:
84 85 self.hook = uncan(self.hook, g)
85 86 self.hook(obj, g)
86 87 return self.obj
87 88
88 89
89 90 class Reference(CannedObject):
90 91 """object for wrapping a remote reference by name."""
91 92 def __init__(self, name):
92 93 if not isinstance(name, basestring):
93 94 raise TypeError("illegal name: %r"%name)
94 95 self.name = name
95 96 self.buffers = []
96 97
97 98 def __repr__(self):
98 99 return "<Reference: %r>"%self.name
99 100
100 101 def get_object(self, g=None):
101 102 if g is None:
102 103 g = {}
103 104
104 105 return eval(self.name, g)
105 106
106 107
107 108 class CannedFunction(CannedObject):
108 109
109 110 def __init__(self, f):
110 111 self._check_type(f)
111 112 self.code = f.func_code
112 113 if f.func_defaults:
113 114 self.defaults = [ can(fd) for fd in f.func_defaults ]
114 115 else:
115 116 self.defaults = None
116 117 self.module = f.__module__ or '__main__'
117 118 self.__name__ = f.__name__
118 119 self.buffers = []
119 120
120 121 def _check_type(self, obj):
121 122 assert isinstance(obj, FunctionType), "Not a function type"
122 123
123 124 def get_object(self, g=None):
124 125 # try to load function back into its module:
125 126 if not self.module.startswith('__'):
126 127 __import__(self.module)
127 128 g = sys.modules[self.module].__dict__
128 129
129 130 if g is None:
130 131 g = {}
131 132 if self.defaults:
132 133 defaults = tuple(uncan(cfd, g) for cfd in self.defaults)
133 134 else:
134 135 defaults = None
135 136 newFunc = FunctionType(self.code, g, self.__name__, defaults)
136 137 return newFunc
137 138
138 139 class CannedClass(CannedObject):
139 140
140 141 def __init__(self, cls):
141 142 self._check_type(cls)
142 143 self.name = cls.__name__
143 144 self.old_style = not isinstance(cls, type)
144 145 self._canned_dict = {}
145 146 for k,v in cls.__dict__.items():
146 147 if k not in ('__weakref__', '__dict__'):
147 148 self._canned_dict[k] = can(v)
148 149 if self.old_style:
149 150 mro = []
150 151 else:
151 152 mro = cls.mro()
152 153
153 154 self.parents = [ can(c) for c in mro[1:] ]
154 155 self.buffers = []
155 156
156 157 def _check_type(self, obj):
157 158 assert isinstance(obj, class_type), "Not a class type"
158 159
159 160 def get_object(self, g=None):
160 161 parents = tuple(uncan(p, g) for p in self.parents)
161 162 return type(self.name, parents, uncan_dict(self._canned_dict, g=g))
162 163
163 164 class CannedArray(CannedObject):
164 165 def __init__(self, obj):
165 166 self.shape = obj.shape
166 167 self.dtype = obj.dtype.descr if obj.dtype.fields else obj.dtype.str
167 168 if sum(obj.shape) == 0:
168 169 # just pickle it
169 170 self.buffers = [pickle.dumps(obj, -1)]
170 171 else:
171 172 # ensure contiguous
172 173 obj = numpy.ascontiguousarray(obj, dtype=None)
173 174 self.buffers = [buffer(obj)]
174 175
175 176 def get_object(self, g=None):
176 177 data = self.buffers[0]
177 178 if sum(self.shape) == 0:
178 179 # no shape, we just pickled it
179 180 return pickle.loads(data)
180 181 else:
181 182 return numpy.frombuffer(data, dtype=self.dtype).reshape(self.shape)
182 183
183 184
184 185 class CannedBytes(CannedObject):
185 186 wrap = bytes
186 187 def __init__(self, obj):
187 188 self.buffers = [obj]
188 189
189 190 def get_object(self, g=None):
190 191 data = self.buffers[0]
191 192 return self.wrap(data)
192 193
193 194 def CannedBuffer(CannedBytes):
194 195 wrap = buffer
195 196
196 197 #-------------------------------------------------------------------------------
197 198 # Functions
198 199 #-------------------------------------------------------------------------------
199 200
200 201 def _logger():
201 202 """get the logger for the current Application
202 203
203 204 the root logger will be used if no Application is running
204 205 """
205 206 if Application.initialized():
206 207 logger = Application.instance().log
207 208 else:
208 209 logger = logging.getLogger()
209 210 if not logger.handlers:
210 211 logging.basicConfig()
211 212
212 213 return logger
213 214
214 215 def _import_mapping(mapping, original=None):
215 216 """import any string-keys in a type mapping
216 217
217 218 """
218 219 log = _logger()
219 220 log.debug("Importing canning map")
220 221 for key,value in mapping.items():
221 222 if isinstance(key, basestring):
222 223 try:
223 224 cls = import_item(key)
224 225 except Exception:
225 226 if original and key not in original:
226 227 # only message on user-added classes
227 228 log.error("cannning class not importable: %r", key, exc_info=True)
228 229 mapping.pop(key)
229 230 else:
230 231 mapping[cls] = mapping.pop(key)
231 232
232 233 def istype(obj, check):
233 234 """like isinstance(obj, check), but strict
234 235
235 236 This won't catch subclasses.
236 237 """
237 238 if isinstance(check, tuple):
238 239 for cls in check:
239 240 if type(obj) is cls:
240 241 return True
241 242 return False
242 243 else:
243 244 return type(obj) is check
244 245
245 246 def can(obj):
246 247 """prepare an object for pickling"""
247 248
248 249 import_needed = False
249 250
250 251 for cls,canner in can_map.iteritems():
251 252 if isinstance(cls, basestring):
252 253 import_needed = True
253 254 break
254 255 elif istype(obj, cls):
255 256 return canner(obj)
256 257
257 258 if import_needed:
258 259 # perform can_map imports, then try again
259 260 # this will usually only happen once
260 261 _import_mapping(can_map, _original_can_map)
261 262 return can(obj)
262 263
263 264 return obj
264 265
265 266 def can_class(obj):
266 267 if isinstance(obj, class_type) and obj.__module__ == '__main__':
267 268 return CannedClass(obj)
268 269 else:
269 270 return obj
270 271
271 272 def can_dict(obj):
272 273 """can the *values* of a dict"""
273 274 if istype(obj, dict):
274 275 newobj = {}
275 276 for k, v in obj.iteritems():
276 277 newobj[k] = can(v)
277 278 return newobj
278 279 else:
279 280 return obj
280 281
281 282 sequence_types = (list, tuple, set)
282 283
283 284 def can_sequence(obj):
284 285 """can the elements of a sequence"""
285 286 if istype(obj, sequence_types):
286 287 t = type(obj)
287 288 return t([can(i) for i in obj])
288 289 else:
289 290 return obj
290 291
291 292 def uncan(obj, g=None):
292 293 """invert canning"""
293 294
294 295 import_needed = False
295 296 for cls,uncanner in uncan_map.iteritems():
296 297 if isinstance(cls, basestring):
297 298 import_needed = True
298 299 break
299 300 elif isinstance(obj, cls):
300 301 return uncanner(obj, g)
301 302
302 303 if import_needed:
303 304 # perform uncan_map imports, then try again
304 305 # this will usually only happen once
305 306 _import_mapping(uncan_map, _original_uncan_map)
306 307 return uncan(obj, g)
307 308
308 309 return obj
309 310
310 311 def uncan_dict(obj, g=None):
311 312 if istype(obj, dict):
312 313 newobj = {}
313 314 for k, v in obj.iteritems():
314 315 newobj[k] = uncan(v,g)
315 316 return newobj
316 317 else:
317 318 return obj
318 319
319 320 def uncan_sequence(obj, g=None):
320 321 if istype(obj, sequence_types):
321 322 t = type(obj)
322 323 return t([uncan(i,g) for i in obj])
323 324 else:
324 325 return obj
325 326
326 327 def _uncan_dependent_hook(dep, g=None):
327 328 dep.check_dependency()
328 329
329 330 def can_dependent(obj):
330 331 return CannedObject(obj, keys=('f', 'df'), hook=_uncan_dependent_hook)
331 332
332 333 #-------------------------------------------------------------------------------
333 334 # API dictionaries
334 335 #-------------------------------------------------------------------------------
335 336
336 337 # These dicts can be extended for custom serialization of new objects
337 338
338 339 can_map = {
339 340 'IPython.parallel.dependent' : can_dependent,
340 341 'numpy.ndarray' : CannedArray,
341 342 FunctionType : CannedFunction,
342 343 bytes : CannedBytes,
343 344 buffer : CannedBuffer,
344 345 class_type : can_class,
345 346 }
346 347
347 348 uncan_map = {
348 349 CannedObject : lambda obj, g: obj.get_object(g),
349 350 }
350 351
351 352 # for use in _import_mapping:
352 353 _original_can_map = can_map.copy()
353 354 _original_uncan_map = uncan_map.copy()
General Comments 0
You need to be logged in to leave comments. Login now