##// END OF EJS Templates
s/IPython.kernel/jupyter_client in jupyter_client
Min RK -
Show More
@@ -1,11 +1,8 b''
1 """IPython kernels and associated utilities"""
1 """IPython kernels and associated utilities"""
2
2
3 # just for friendlier zmq version check
4 from . import zmq
5
6 from .connect import *
3 from .connect import *
7 from .launcher import *
4 from .launcher import *
8 from .client import KernelClient
5 from .client import KernelClient
9 from .manager import KernelManager, run_kernel
6 from .manager import KernelManager, run_kernel
10 from .blocking import BlockingKernelClient
7 from .blocking import BlockingKernelClient
11 from .multikernelmanager import MultiKernelManager
8 from .multikernelmanager import MultiKernelManager
@@ -1,38 +1,38 b''
1 """Implements a fully blocking kernel client.
1 """Implements a fully blocking kernel client.
2
2
3 Useful for test suites and blocking terminal interfaces.
3 Useful for test suites and blocking terminal interfaces.
4 """
4 """
5 # Copyright (c) IPython Development Team.
5 # Copyright (c) IPython Development Team.
6 # Distributed under the terms of the Modified BSD License.
6 # Distributed under the terms of the Modified BSD License.
7
7
8 try:
8 try:
9 from queue import Empty # Python 3
9 from queue import Empty # Python 3
10 except ImportError:
10 except ImportError:
11 from Queue import Empty # Python 2
11 from Queue import Empty # Python 2
12
12
13 from IPython.utils.traitlets import Type
13 from IPython.utils.traitlets import Type
14 from IPython.kernel.channels import HBChannel
14 from jupyter_client.channels import HBChannel
15 from IPython.kernel.client import KernelClient
15 from jupyter_client.client import KernelClient
16 from .channels import ZMQSocketChannel
16 from .channels import ZMQSocketChannel
17
17
18 class BlockingKernelClient(KernelClient):
18 class BlockingKernelClient(KernelClient):
19 def wait_for_ready(self):
19 def wait_for_ready(self):
20 # Wait for kernel info reply on shell channel
20 # Wait for kernel info reply on shell channel
21 while True:
21 while True:
22 msg = self.shell_channel.get_msg(block=True)
22 msg = self.shell_channel.get_msg(block=True)
23 if msg['msg_type'] == 'kernel_info_reply':
23 if msg['msg_type'] == 'kernel_info_reply':
24 self._handle_kernel_info_reply(msg)
24 self._handle_kernel_info_reply(msg)
25 break
25 break
26
26
27 # Flush IOPub channel
27 # Flush IOPub channel
28 while True:
28 while True:
29 try:
29 try:
30 msg = self.iopub_channel.get_msg(block=True, timeout=0.2)
30 msg = self.iopub_channel.get_msg(block=True, timeout=0.2)
31 except Empty:
31 except Empty:
32 break
32 break
33
33
34 # The classes to use for the various channels
34 # The classes to use for the various channels
35 shell_channel_class = Type(ZMQSocketChannel)
35 shell_channel_class = Type(ZMQSocketChannel)
36 iopub_channel_class = Type(ZMQSocketChannel)
36 iopub_channel_class = Type(ZMQSocketChannel)
37 stdin_channel_class = Type(ZMQSocketChannel)
37 stdin_channel_class = Type(ZMQSocketChannel)
38 hb_channel_class = Type(HBChannel)
38 hb_channel_class = Type(HBChannel)
@@ -1,49 +1,49 b''
1 """Abstract base classes for kernel client channels"""
1 """Abstract base classes for kernel client channels"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import abc
6 import abc
7
7
8 from IPython.utils.py3compat import with_metaclass
8 from IPython.utils.py3compat import with_metaclass
9
9
10
10
11 class ChannelABC(with_metaclass(abc.ABCMeta, object)):
11 class ChannelABC(with_metaclass(abc.ABCMeta, object)):
12 """A base class for all channel ABCs."""
12 """A base class for all channel ABCs."""
13
13
14 @abc.abstractmethod
14 @abc.abstractmethod
15 def start(self):
15 def start(self):
16 pass
16 pass
17
17
18 @abc.abstractmethod
18 @abc.abstractmethod
19 def stop(self):
19 def stop(self):
20 pass
20 pass
21
21
22 @abc.abstractmethod
22 @abc.abstractmethod
23 def is_alive(self):
23 def is_alive(self):
24 pass
24 pass
25
25
26
26
27 class HBChannelABC(ChannelABC):
27 class HBChannelABC(ChannelABC):
28 """HBChannel ABC.
28 """HBChannel ABC.
29
29
30 The docstrings for this class can be found in the base implementation:
30 The docstrings for this class can be found in the base implementation:
31
31
32 `IPython.kernel.channels.HBChannel`
32 `jupyter_client.channels.HBChannel`
33 """
33 """
34
34
35 @abc.abstractproperty
35 @abc.abstractproperty
36 def time_to_dead(self):
36 def time_to_dead(self):
37 pass
37 pass
38
38
39 @abc.abstractmethod
39 @abc.abstractmethod
40 def pause(self):
40 def pause(self):
41 pass
41 pass
42
42
43 @abc.abstractmethod
43 @abc.abstractmethod
44 def unpause(self):
44 def unpause(self):
45 pass
45 pass
46
46
47 @abc.abstractmethod
47 @abc.abstractmethod
48 def is_beating(self):
48 def is_beating(self):
49 pass
49 pass
@@ -1,390 +1,390 b''
1 """Base class to manage the interaction with a running kernel"""
1 """Base class to manage the interaction with a running kernel"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7 from IPython.kernel.channels import major_protocol_version
7 from jupyter_client.channels import major_protocol_version
8 from IPython.utils.py3compat import string_types, iteritems
8 from IPython.utils.py3compat import string_types, iteritems
9
9
10 import zmq
10 import zmq
11
11
12 from IPython.utils.traitlets import (
12 from IPython.utils.traitlets import (
13 Any, Instance, Type,
13 Any, Instance, Type,
14 )
14 )
15
15
16 from .channelsabc import (ChannelABC, HBChannelABC)
16 from .channelsabc import (ChannelABC, HBChannelABC)
17 from .clientabc import KernelClientABC
17 from .clientabc import KernelClientABC
18 from .connect import ConnectionFileMixin
18 from .connect import ConnectionFileMixin
19
19
20
20
21 # some utilities to validate message structure, these might get moved elsewhere
21 # some utilities to validate message structure, these might get moved elsewhere
22 # if they prove to have more generic utility
22 # if they prove to have more generic utility
23
23
24 def validate_string_dict(dct):
24 def validate_string_dict(dct):
25 """Validate that the input is a dict with string keys and values.
25 """Validate that the input is a dict with string keys and values.
26
26
27 Raises ValueError if not."""
27 Raises ValueError if not."""
28 for k,v in iteritems(dct):
28 for k,v in iteritems(dct):
29 if not isinstance(k, string_types):
29 if not isinstance(k, string_types):
30 raise ValueError('key %r in dict must be a string' % k)
30 raise ValueError('key %r in dict must be a string' % k)
31 if not isinstance(v, string_types):
31 if not isinstance(v, string_types):
32 raise ValueError('value %r in dict must be a string' % v)
32 raise ValueError('value %r in dict must be a string' % v)
33
33
34
34
35 class KernelClient(ConnectionFileMixin):
35 class KernelClient(ConnectionFileMixin):
36 """Communicates with a single kernel on any host via zmq channels.
36 """Communicates with a single kernel on any host via zmq channels.
37
37
38 There are four channels associated with each kernel:
38 There are four channels associated with each kernel:
39
39
40 * shell: for request/reply calls to the kernel.
40 * shell: for request/reply calls to the kernel.
41 * iopub: for the kernel to publish results to frontends.
41 * iopub: for the kernel to publish results to frontends.
42 * hb: for monitoring the kernel's heartbeat.
42 * hb: for monitoring the kernel's heartbeat.
43 * stdin: for frontends to reply to raw_input calls in the kernel.
43 * stdin: for frontends to reply to raw_input calls in the kernel.
44
44
45 The methods of the channels are exposed as methods of the client itself
45 The methods of the channels are exposed as methods of the client itself
46 (KernelClient.execute, complete, history, etc.).
46 (KernelClient.execute, complete, history, etc.).
47 See the channels themselves for documentation of these methods.
47 See the channels themselves for documentation of these methods.
48
48
49 """
49 """
50
50
51 # The PyZMQ Context to use for communication with the kernel.
51 # The PyZMQ Context to use for communication with the kernel.
52 context = Instance(zmq.Context)
52 context = Instance(zmq.Context)
53 def _context_default(self):
53 def _context_default(self):
54 return zmq.Context.instance()
54 return zmq.Context.instance()
55
55
56 # The classes to use for the various channels
56 # The classes to use for the various channels
57 shell_channel_class = Type(ChannelABC)
57 shell_channel_class = Type(ChannelABC)
58 iopub_channel_class = Type(ChannelABC)
58 iopub_channel_class = Type(ChannelABC)
59 stdin_channel_class = Type(ChannelABC)
59 stdin_channel_class = Type(ChannelABC)
60 hb_channel_class = Type(HBChannelABC)
60 hb_channel_class = Type(HBChannelABC)
61
61
62 # Protected traits
62 # Protected traits
63 _shell_channel = Any
63 _shell_channel = Any
64 _iopub_channel = Any
64 _iopub_channel = Any
65 _stdin_channel = Any
65 _stdin_channel = Any
66 _hb_channel = Any
66 _hb_channel = Any
67
67
68 # flag for whether execute requests should be allowed to call raw_input:
68 # flag for whether execute requests should be allowed to call raw_input:
69 allow_stdin = True
69 allow_stdin = True
70
70
71 #--------------------------------------------------------------------------
71 #--------------------------------------------------------------------------
72 # Channel proxy methods
72 # Channel proxy methods
73 #--------------------------------------------------------------------------
73 #--------------------------------------------------------------------------
74
74
75 def _get_msg(channel, *args, **kwargs):
75 def _get_msg(channel, *args, **kwargs):
76 return channel.get_msg(*args, **kwargs)
76 return channel.get_msg(*args, **kwargs)
77
77
78 def get_shell_msg(self, *args, **kwargs):
78 def get_shell_msg(self, *args, **kwargs):
79 """Get a message from the shell channel"""
79 """Get a message from the shell channel"""
80 return self.shell_channel.get_msg(*args, **kwargs)
80 return self.shell_channel.get_msg(*args, **kwargs)
81
81
82 def get_iopub_msg(self, *args, **kwargs):
82 def get_iopub_msg(self, *args, **kwargs):
83 """Get a message from the iopub channel"""
83 """Get a message from the iopub channel"""
84 return self.iopub_channel.get_msg(*args, **kwargs)
84 return self.iopub_channel.get_msg(*args, **kwargs)
85
85
86 def get_stdin_msg(self, *args, **kwargs):
86 def get_stdin_msg(self, *args, **kwargs):
87 """Get a message from the stdin channel"""
87 """Get a message from the stdin channel"""
88 return self.stdin_channel.get_msg(*args, **kwargs)
88 return self.stdin_channel.get_msg(*args, **kwargs)
89
89
90 #--------------------------------------------------------------------------
90 #--------------------------------------------------------------------------
91 # Channel management methods
91 # Channel management methods
92 #--------------------------------------------------------------------------
92 #--------------------------------------------------------------------------
93
93
94 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
94 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
95 """Starts the channels for this kernel.
95 """Starts the channels for this kernel.
96
96
97 This will create the channels if they do not exist and then start
97 This will create the channels if they do not exist and then start
98 them (their activity runs in a thread). If port numbers of 0 are
98 them (their activity runs in a thread). If port numbers of 0 are
99 being used (random ports) then you must first call
99 being used (random ports) then you must first call
100 :meth:`start_kernel`. If the channels have been stopped and you
100 :meth:`start_kernel`. If the channels have been stopped and you
101 call this, :class:`RuntimeError` will be raised.
101 call this, :class:`RuntimeError` will be raised.
102 """
102 """
103 if shell:
103 if shell:
104 self.shell_channel.start()
104 self.shell_channel.start()
105 self.kernel_info()
105 self.kernel_info()
106 if iopub:
106 if iopub:
107 self.iopub_channel.start()
107 self.iopub_channel.start()
108 if stdin:
108 if stdin:
109 self.stdin_channel.start()
109 self.stdin_channel.start()
110 self.allow_stdin = True
110 self.allow_stdin = True
111 else:
111 else:
112 self.allow_stdin = False
112 self.allow_stdin = False
113 if hb:
113 if hb:
114 self.hb_channel.start()
114 self.hb_channel.start()
115
115
116 def stop_channels(self):
116 def stop_channels(self):
117 """Stops all the running channels for this kernel.
117 """Stops all the running channels for this kernel.
118
118
119 This stops their event loops and joins their threads.
119 This stops their event loops and joins their threads.
120 """
120 """
121 if self.shell_channel.is_alive():
121 if self.shell_channel.is_alive():
122 self.shell_channel.stop()
122 self.shell_channel.stop()
123 if self.iopub_channel.is_alive():
123 if self.iopub_channel.is_alive():
124 self.iopub_channel.stop()
124 self.iopub_channel.stop()
125 if self.stdin_channel.is_alive():
125 if self.stdin_channel.is_alive():
126 self.stdin_channel.stop()
126 self.stdin_channel.stop()
127 if self.hb_channel.is_alive():
127 if self.hb_channel.is_alive():
128 self.hb_channel.stop()
128 self.hb_channel.stop()
129
129
130 @property
130 @property
131 def channels_running(self):
131 def channels_running(self):
132 """Are any of the channels created and running?"""
132 """Are any of the channels created and running?"""
133 return (self.shell_channel.is_alive() or self.iopub_channel.is_alive() or
133 return (self.shell_channel.is_alive() or self.iopub_channel.is_alive() or
134 self.stdin_channel.is_alive() or self.hb_channel.is_alive())
134 self.stdin_channel.is_alive() or self.hb_channel.is_alive())
135
135
136 ioloop = None # Overridden in subclasses that use pyzmq event loop
136 ioloop = None # Overridden in subclasses that use pyzmq event loop
137
137
138 @property
138 @property
139 def shell_channel(self):
139 def shell_channel(self):
140 """Get the shell channel object for this kernel."""
140 """Get the shell channel object for this kernel."""
141 if self._shell_channel is None:
141 if self._shell_channel is None:
142 url = self._make_url('shell')
142 url = self._make_url('shell')
143 self.log.debug("connecting shell channel to %s", url)
143 self.log.debug("connecting shell channel to %s", url)
144 socket = self.connect_shell(identity=self.session.bsession)
144 socket = self.connect_shell(identity=self.session.bsession)
145 self._shell_channel = self.shell_channel_class(
145 self._shell_channel = self.shell_channel_class(
146 socket, self.session, self.ioloop
146 socket, self.session, self.ioloop
147 )
147 )
148 return self._shell_channel
148 return self._shell_channel
149
149
150 @property
150 @property
151 def iopub_channel(self):
151 def iopub_channel(self):
152 """Get the iopub channel object for this kernel."""
152 """Get the iopub channel object for this kernel."""
153 if self._iopub_channel is None:
153 if self._iopub_channel is None:
154 url = self._make_url('iopub')
154 url = self._make_url('iopub')
155 self.log.debug("connecting iopub channel to %s", url)
155 self.log.debug("connecting iopub channel to %s", url)
156 socket = self.connect_iopub()
156 socket = self.connect_iopub()
157 self._iopub_channel = self.iopub_channel_class(
157 self._iopub_channel = self.iopub_channel_class(
158 socket, self.session, self.ioloop
158 socket, self.session, self.ioloop
159 )
159 )
160 return self._iopub_channel
160 return self._iopub_channel
161
161
162 @property
162 @property
163 def stdin_channel(self):
163 def stdin_channel(self):
164 """Get the stdin channel object for this kernel."""
164 """Get the stdin channel object for this kernel."""
165 if self._stdin_channel is None:
165 if self._stdin_channel is None:
166 url = self._make_url('stdin')
166 url = self._make_url('stdin')
167 self.log.debug("connecting stdin channel to %s", url)
167 self.log.debug("connecting stdin channel to %s", url)
168 socket = self.connect_stdin(identity=self.session.bsession)
168 socket = self.connect_stdin(identity=self.session.bsession)
169 self._stdin_channel = self.stdin_channel_class(
169 self._stdin_channel = self.stdin_channel_class(
170 socket, self.session, self.ioloop
170 socket, self.session, self.ioloop
171 )
171 )
172 return self._stdin_channel
172 return self._stdin_channel
173
173
174 @property
174 @property
175 def hb_channel(self):
175 def hb_channel(self):
176 """Get the hb channel object for this kernel."""
176 """Get the hb channel object for this kernel."""
177 if self._hb_channel is None:
177 if self._hb_channel is None:
178 url = self._make_url('hb')
178 url = self._make_url('hb')
179 self.log.debug("connecting heartbeat channel to %s", url)
179 self.log.debug("connecting heartbeat channel to %s", url)
180 self._hb_channel = self.hb_channel_class(
180 self._hb_channel = self.hb_channel_class(
181 self.context, self.session, url
181 self.context, self.session, url
182 )
182 )
183 return self._hb_channel
183 return self._hb_channel
184
184
185 def is_alive(self):
185 def is_alive(self):
186 """Is the kernel process still running?"""
186 """Is the kernel process still running?"""
187 if self._hb_channel is not None:
187 if self._hb_channel is not None:
188 # We didn't start the kernel with this KernelManager so we
188 # We didn't start the kernel with this KernelManager so we
189 # use the heartbeat.
189 # use the heartbeat.
190 return self._hb_channel.is_beating()
190 return self._hb_channel.is_beating()
191 else:
191 else:
192 # no heartbeat and not local, we can't tell if it's running,
192 # no heartbeat and not local, we can't tell if it's running,
193 # so naively return True
193 # so naively return True
194 return True
194 return True
195
195
196
196
197 # Methods to send specific messages on channels
197 # Methods to send specific messages on channels
198 def execute(self, code, silent=False, store_history=True,
198 def execute(self, code, silent=False, store_history=True,
199 user_expressions=None, allow_stdin=None, stop_on_error=True):
199 user_expressions=None, allow_stdin=None, stop_on_error=True):
200 """Execute code in the kernel.
200 """Execute code in the kernel.
201
201
202 Parameters
202 Parameters
203 ----------
203 ----------
204 code : str
204 code : str
205 A string of Python code.
205 A string of Python code.
206
206
207 silent : bool, optional (default False)
207 silent : bool, optional (default False)
208 If set, the kernel will execute the code as quietly possible, and
208 If set, the kernel will execute the code as quietly possible, and
209 will force store_history to be False.
209 will force store_history to be False.
210
210
211 store_history : bool, optional (default True)
211 store_history : bool, optional (default True)
212 If set, the kernel will store command history. This is forced
212 If set, the kernel will store command history. This is forced
213 to be False if silent is True.
213 to be False if silent is True.
214
214
215 user_expressions : dict, optional
215 user_expressions : dict, optional
216 A dict mapping names to expressions to be evaluated in the user's
216 A dict mapping names to expressions to be evaluated in the user's
217 dict. The expression values are returned as strings formatted using
217 dict. The expression values are returned as strings formatted using
218 :func:`repr`.
218 :func:`repr`.
219
219
220 allow_stdin : bool, optional (default self.allow_stdin)
220 allow_stdin : bool, optional (default self.allow_stdin)
221 Flag for whether the kernel can send stdin requests to frontends.
221 Flag for whether the kernel can send stdin requests to frontends.
222
222
223 Some frontends (e.g. the Notebook) do not support stdin requests.
223 Some frontends (e.g. the Notebook) do not support stdin requests.
224 If raw_input is called from code executed from such a frontend, a
224 If raw_input is called from code executed from such a frontend, a
225 StdinNotImplementedError will be raised.
225 StdinNotImplementedError will be raised.
226
226
227 stop_on_error: bool, optional (default True)
227 stop_on_error: bool, optional (default True)
228 Flag whether to abort the execution queue, if an exception is encountered.
228 Flag whether to abort the execution queue, if an exception is encountered.
229
229
230 Returns
230 Returns
231 -------
231 -------
232 The msg_id of the message sent.
232 The msg_id of the message sent.
233 """
233 """
234 if user_expressions is None:
234 if user_expressions is None:
235 user_expressions = {}
235 user_expressions = {}
236 if allow_stdin is None:
236 if allow_stdin is None:
237 allow_stdin = self.allow_stdin
237 allow_stdin = self.allow_stdin
238
238
239
239
240 # Don't waste network traffic if inputs are invalid
240 # Don't waste network traffic if inputs are invalid
241 if not isinstance(code, string_types):
241 if not isinstance(code, string_types):
242 raise ValueError('code %r must be a string' % code)
242 raise ValueError('code %r must be a string' % code)
243 validate_string_dict(user_expressions)
243 validate_string_dict(user_expressions)
244
244
245 # Create class for content/msg creation. Related to, but possibly
245 # Create class for content/msg creation. Related to, but possibly
246 # not in Session.
246 # not in Session.
247 content = dict(code=code, silent=silent, store_history=store_history,
247 content = dict(code=code, silent=silent, store_history=store_history,
248 user_expressions=user_expressions,
248 user_expressions=user_expressions,
249 allow_stdin=allow_stdin, stop_on_error=stop_on_error
249 allow_stdin=allow_stdin, stop_on_error=stop_on_error
250 )
250 )
251 msg = self.session.msg('execute_request', content)
251 msg = self.session.msg('execute_request', content)
252 self.shell_channel.send(msg)
252 self.shell_channel.send(msg)
253 return msg['header']['msg_id']
253 return msg['header']['msg_id']
254
254
255 def complete(self, code, cursor_pos=None):
255 def complete(self, code, cursor_pos=None):
256 """Tab complete text in the kernel's namespace.
256 """Tab complete text in the kernel's namespace.
257
257
258 Parameters
258 Parameters
259 ----------
259 ----------
260 code : str
260 code : str
261 The context in which completion is requested.
261 The context in which completion is requested.
262 Can be anything between a variable name and an entire cell.
262 Can be anything between a variable name and an entire cell.
263 cursor_pos : int, optional
263 cursor_pos : int, optional
264 The position of the cursor in the block of code where the completion was requested.
264 The position of the cursor in the block of code where the completion was requested.
265 Default: ``len(code)``
265 Default: ``len(code)``
266
266
267 Returns
267 Returns
268 -------
268 -------
269 The msg_id of the message sent.
269 The msg_id of the message sent.
270 """
270 """
271 if cursor_pos is None:
271 if cursor_pos is None:
272 cursor_pos = len(code)
272 cursor_pos = len(code)
273 content = dict(code=code, cursor_pos=cursor_pos)
273 content = dict(code=code, cursor_pos=cursor_pos)
274 msg = self.session.msg('complete_request', content)
274 msg = self.session.msg('complete_request', content)
275 self.shell_channel.send(msg)
275 self.shell_channel.send(msg)
276 return msg['header']['msg_id']
276 return msg['header']['msg_id']
277
277
278 def inspect(self, code, cursor_pos=None, detail_level=0):
278 def inspect(self, code, cursor_pos=None, detail_level=0):
279 """Get metadata information about an object in the kernel's namespace.
279 """Get metadata information about an object in the kernel's namespace.
280
280
281 It is up to the kernel to determine the appropriate object to inspect.
281 It is up to the kernel to determine the appropriate object to inspect.
282
282
283 Parameters
283 Parameters
284 ----------
284 ----------
285 code : str
285 code : str
286 The context in which info is requested.
286 The context in which info is requested.
287 Can be anything between a variable name and an entire cell.
287 Can be anything between a variable name and an entire cell.
288 cursor_pos : int, optional
288 cursor_pos : int, optional
289 The position of the cursor in the block of code where the info was requested.
289 The position of the cursor in the block of code where the info was requested.
290 Default: ``len(code)``
290 Default: ``len(code)``
291 detail_level : int, optional
291 detail_level : int, optional
292 The level of detail for the introspection (0-2)
292 The level of detail for the introspection (0-2)
293
293
294 Returns
294 Returns
295 -------
295 -------
296 The msg_id of the message sent.
296 The msg_id of the message sent.
297 """
297 """
298 if cursor_pos is None:
298 if cursor_pos is None:
299 cursor_pos = len(code)
299 cursor_pos = len(code)
300 content = dict(code=code, cursor_pos=cursor_pos,
300 content = dict(code=code, cursor_pos=cursor_pos,
301 detail_level=detail_level,
301 detail_level=detail_level,
302 )
302 )
303 msg = self.session.msg('inspect_request', content)
303 msg = self.session.msg('inspect_request', content)
304 self.shell_channel.send(msg)
304 self.shell_channel.send(msg)
305 return msg['header']['msg_id']
305 return msg['header']['msg_id']
306
306
307 def history(self, raw=True, output=False, hist_access_type='range', **kwargs):
307 def history(self, raw=True, output=False, hist_access_type='range', **kwargs):
308 """Get entries from the kernel's history list.
308 """Get entries from the kernel's history list.
309
309
310 Parameters
310 Parameters
311 ----------
311 ----------
312 raw : bool
312 raw : bool
313 If True, return the raw input.
313 If True, return the raw input.
314 output : bool
314 output : bool
315 If True, then return the output as well.
315 If True, then return the output as well.
316 hist_access_type : str
316 hist_access_type : str
317 'range' (fill in session, start and stop params), 'tail' (fill in n)
317 'range' (fill in session, start and stop params), 'tail' (fill in n)
318 or 'search' (fill in pattern param).
318 or 'search' (fill in pattern param).
319
319
320 session : int
320 session : int
321 For a range request, the session from which to get lines. Session
321 For a range request, the session from which to get lines. Session
322 numbers are positive integers; negative ones count back from the
322 numbers are positive integers; negative ones count back from the
323 current session.
323 current session.
324 start : int
324 start : int
325 The first line number of a history range.
325 The first line number of a history range.
326 stop : int
326 stop : int
327 The final (excluded) line number of a history range.
327 The final (excluded) line number of a history range.
328
328
329 n : int
329 n : int
330 The number of lines of history to get for a tail request.
330 The number of lines of history to get for a tail request.
331
331
332 pattern : str
332 pattern : str
333 The glob-syntax pattern for a search request.
333 The glob-syntax pattern for a search request.
334
334
335 Returns
335 Returns
336 -------
336 -------
337 The msg_id of the message sent.
337 The msg_id of the message sent.
338 """
338 """
339 content = dict(raw=raw, output=output, hist_access_type=hist_access_type,
339 content = dict(raw=raw, output=output, hist_access_type=hist_access_type,
340 **kwargs)
340 **kwargs)
341 msg = self.session.msg('history_request', content)
341 msg = self.session.msg('history_request', content)
342 self.shell_channel.send(msg)
342 self.shell_channel.send(msg)
343 return msg['header']['msg_id']
343 return msg['header']['msg_id']
344
344
345 def kernel_info(self):
345 def kernel_info(self):
346 """Request kernel info."""
346 """Request kernel info."""
347 msg = self.session.msg('kernel_info_request')
347 msg = self.session.msg('kernel_info_request')
348 self.shell_channel.send(msg)
348 self.shell_channel.send(msg)
349 return msg['header']['msg_id']
349 return msg['header']['msg_id']
350
350
351 def _handle_kernel_info_reply(self, msg):
351 def _handle_kernel_info_reply(self, msg):
352 """handle kernel info reply
352 """handle kernel info reply
353
353
354 sets protocol adaptation version. This might
354 sets protocol adaptation version. This might
355 be run from a separate thread.
355 be run from a separate thread.
356 """
356 """
357 adapt_version = int(msg['content']['protocol_version'].split('.')[0])
357 adapt_version = int(msg['content']['protocol_version'].split('.')[0])
358 if adapt_version != major_protocol_version:
358 if adapt_version != major_protocol_version:
359 self.session.adapt_version = adapt_version
359 self.session.adapt_version = adapt_version
360
360
361 def shutdown(self, restart=False):
361 def shutdown(self, restart=False):
362 """Request an immediate kernel shutdown.
362 """Request an immediate kernel shutdown.
363
363
364 Upon receipt of the (empty) reply, client code can safely assume that
364 Upon receipt of the (empty) reply, client code can safely assume that
365 the kernel has shut down and it's safe to forcefully terminate it if
365 the kernel has shut down and it's safe to forcefully terminate it if
366 it's still alive.
366 it's still alive.
367
367
368 The kernel will send the reply via a function registered with Python's
368 The kernel will send the reply via a function registered with Python's
369 atexit module, ensuring it's truly done as the kernel is done with all
369 atexit module, ensuring it's truly done as the kernel is done with all
370 normal operation.
370 normal operation.
371 """
371 """
372 # Send quit message to kernel. Once we implement kernel-side setattr,
372 # Send quit message to kernel. Once we implement kernel-side setattr,
373 # this should probably be done that way, but for now this will do.
373 # this should probably be done that way, but for now this will do.
374 msg = self.session.msg('shutdown_request', {'restart':restart})
374 msg = self.session.msg('shutdown_request', {'restart':restart})
375 self.shell_channel.send(msg)
375 self.shell_channel.send(msg)
376 return msg['header']['msg_id']
376 return msg['header']['msg_id']
377
377
378 def is_complete(self, code):
378 def is_complete(self, code):
379 msg = self.session.msg('is_complete_request', {'code': code})
379 msg = self.session.msg('is_complete_request', {'code': code})
380 self.shell_channel.send(msg)
380 self.shell_channel.send(msg)
381 return msg['header']['msg_id']
381 return msg['header']['msg_id']
382
382
383 def input(self, string):
383 def input(self, string):
384 """Send a string of raw input to the kernel."""
384 """Send a string of raw input to the kernel."""
385 content = dict(value=string)
385 content = dict(value=string)
386 msg = self.session.msg('input_reply', content)
386 msg = self.session.msg('input_reply', content)
387 self.stdin_channel.send(msg)
387 self.stdin_channel.send(msg)
388
388
389
389
390 KernelClientABC.register(KernelClient)
390 KernelClientABC.register(KernelClient)
@@ -1,80 +1,80 b''
1 """Abstract base class for kernel clients"""
1 """Abstract base class for kernel clients"""
2
2
3 #-----------------------------------------------------------------------------
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2013 The IPython Development Team
4 # Copyright (C) 2013 The IPython Development Team
5 #
5 #
6 # Distributed under the terms of the BSD License. The full license is in
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
7 # the file COPYING, distributed as part of this software.
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9
9
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11 # Imports
11 # Imports
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 import abc
14 import abc
15
15
16 from IPython.utils.py3compat import with_metaclass
16 from IPython.utils.py3compat import with_metaclass
17
17
18 #-----------------------------------------------------------------------------
18 #-----------------------------------------------------------------------------
19 # Main kernel client class
19 # Main kernel client class
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
21
21
22 class KernelClientABC(with_metaclass(abc.ABCMeta, object)):
22 class KernelClientABC(with_metaclass(abc.ABCMeta, object)):
23 """KernelManager ABC.
23 """KernelManager ABC.
24
24
25 The docstrings for this class can be found in the base implementation:
25 The docstrings for this class can be found in the base implementation:
26
26
27 `IPython.kernel.client.KernelClient`
27 `jupyter_client.client.KernelClient`
28 """
28 """
29
29
30 @abc.abstractproperty
30 @abc.abstractproperty
31 def kernel(self):
31 def kernel(self):
32 pass
32 pass
33
33
34 @abc.abstractproperty
34 @abc.abstractproperty
35 def shell_channel_class(self):
35 def shell_channel_class(self):
36 pass
36 pass
37
37
38 @abc.abstractproperty
38 @abc.abstractproperty
39 def iopub_channel_class(self):
39 def iopub_channel_class(self):
40 pass
40 pass
41
41
42 @abc.abstractproperty
42 @abc.abstractproperty
43 def hb_channel_class(self):
43 def hb_channel_class(self):
44 pass
44 pass
45
45
46 @abc.abstractproperty
46 @abc.abstractproperty
47 def stdin_channel_class(self):
47 def stdin_channel_class(self):
48 pass
48 pass
49
49
50 #--------------------------------------------------------------------------
50 #--------------------------------------------------------------------------
51 # Channel management methods
51 # Channel management methods
52 #--------------------------------------------------------------------------
52 #--------------------------------------------------------------------------
53
53
54 @abc.abstractmethod
54 @abc.abstractmethod
55 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
55 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
56 pass
56 pass
57
57
58 @abc.abstractmethod
58 @abc.abstractmethod
59 def stop_channels(self):
59 def stop_channels(self):
60 pass
60 pass
61
61
62 @abc.abstractproperty
62 @abc.abstractproperty
63 def channels_running(self):
63 def channels_running(self):
64 pass
64 pass
65
65
66 @abc.abstractproperty
66 @abc.abstractproperty
67 def shell_channel(self):
67 def shell_channel(self):
68 pass
68 pass
69
69
70 @abc.abstractproperty
70 @abc.abstractproperty
71 def iopub_channel(self):
71 def iopub_channel(self):
72 pass
72 pass
73
73
74 @abc.abstractproperty
74 @abc.abstractproperty
75 def stdin_channel(self):
75 def stdin_channel(self):
76 pass
76 pass
77
77
78 @abc.abstractproperty
78 @abc.abstractproperty
79 def hb_channel(self):
79 def hb_channel(self):
80 pass
80 pass
@@ -1,576 +1,576 b''
1 """Utilities for connecting to kernels
1 """Utilities for connecting to kernels
2
2
3 The :class:`ConnectionFileMixin` class in this module encapsulates the logic
3 The :class:`ConnectionFileMixin` class in this module encapsulates the logic
4 related to writing and reading connections files.
4 related to writing and reading connections files.
5 """
5 """
6 # Copyright (c) IPython Development Team.
6 # Copyright (c) IPython Development Team.
7 # Distributed under the terms of the Modified BSD License.
7 # Distributed under the terms of the Modified BSD License.
8
8
9 #-----------------------------------------------------------------------------
9 #-----------------------------------------------------------------------------
10 # Imports
10 # Imports
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12
12
13 from __future__ import absolute_import
13 from __future__ import absolute_import
14
14
15 import glob
15 import glob
16 import json
16 import json
17 import os
17 import os
18 import socket
18 import socket
19 import sys
19 import sys
20 from getpass import getpass
20 from getpass import getpass
21 from subprocess import Popen, PIPE
21 from subprocess import Popen, PIPE
22 import tempfile
22 import tempfile
23
23
24 import zmq
24 import zmq
25
25
26 # IPython imports
26 # IPython imports
27 from IPython.config import LoggingConfigurable
27 from IPython.config import LoggingConfigurable
28 from IPython.core.profiledir import ProfileDir
28 from IPython.core.profiledir import ProfileDir
29 from IPython.utils.localinterfaces import localhost
29 from IPython.utils.localinterfaces import localhost
30 from IPython.utils.path import filefind, get_ipython_dir
30 from IPython.utils.path import filefind, get_ipython_dir
31 from IPython.utils.py3compat import (str_to_bytes, bytes_to_str, cast_bytes_py2,
31 from IPython.utils.py3compat import (str_to_bytes, bytes_to_str, cast_bytes_py2,
32 string_types)
32 string_types)
33 from IPython.utils.traitlets import (
33 from IPython.utils.traitlets import (
34 Bool, Integer, Unicode, CaselessStrEnum, Instance,
34 Bool, Integer, Unicode, CaselessStrEnum, Instance,
35 )
35 )
36
36
37
37
38 #-----------------------------------------------------------------------------
38 #-----------------------------------------------------------------------------
39 # Working with Connection Files
39 # Working with Connection Files
40 #-----------------------------------------------------------------------------
40 #-----------------------------------------------------------------------------
41
41
42 def write_connection_file(fname=None, shell_port=0, iopub_port=0, stdin_port=0, hb_port=0,
42 def write_connection_file(fname=None, shell_port=0, iopub_port=0, stdin_port=0, hb_port=0,
43 control_port=0, ip='', key=b'', transport='tcp',
43 control_port=0, ip='', key=b'', transport='tcp',
44 signature_scheme='hmac-sha256',
44 signature_scheme='hmac-sha256',
45 ):
45 ):
46 """Generates a JSON config file, including the selection of random ports.
46 """Generates a JSON config file, including the selection of random ports.
47
47
48 Parameters
48 Parameters
49 ----------
49 ----------
50
50
51 fname : unicode
51 fname : unicode
52 The path to the file to write
52 The path to the file to write
53
53
54 shell_port : int, optional
54 shell_port : int, optional
55 The port to use for ROUTER (shell) channel.
55 The port to use for ROUTER (shell) channel.
56
56
57 iopub_port : int, optional
57 iopub_port : int, optional
58 The port to use for the SUB channel.
58 The port to use for the SUB channel.
59
59
60 stdin_port : int, optional
60 stdin_port : int, optional
61 The port to use for the ROUTER (raw input) channel.
61 The port to use for the ROUTER (raw input) channel.
62
62
63 control_port : int, optional
63 control_port : int, optional
64 The port to use for the ROUTER (control) channel.
64 The port to use for the ROUTER (control) channel.
65
65
66 hb_port : int, optional
66 hb_port : int, optional
67 The port to use for the heartbeat REP channel.
67 The port to use for the heartbeat REP channel.
68
68
69 ip : str, optional
69 ip : str, optional
70 The ip address the kernel will bind to.
70 The ip address the kernel will bind to.
71
71
72 key : str, optional
72 key : str, optional
73 The Session key used for message authentication.
73 The Session key used for message authentication.
74
74
75 signature_scheme : str, optional
75 signature_scheme : str, optional
76 The scheme used for message authentication.
76 The scheme used for message authentication.
77 This has the form 'digest-hash', where 'digest'
77 This has the form 'digest-hash', where 'digest'
78 is the scheme used for digests, and 'hash' is the name of the hash function
78 is the scheme used for digests, and 'hash' is the name of the hash function
79 used by the digest scheme.
79 used by the digest scheme.
80 Currently, 'hmac' is the only supported digest scheme,
80 Currently, 'hmac' is the only supported digest scheme,
81 and 'sha256' is the default hash function.
81 and 'sha256' is the default hash function.
82
82
83 """
83 """
84 if not ip:
84 if not ip:
85 ip = localhost()
85 ip = localhost()
86 # default to temporary connector file
86 # default to temporary connector file
87 if not fname:
87 if not fname:
88 fd, fname = tempfile.mkstemp('.json')
88 fd, fname = tempfile.mkstemp('.json')
89 os.close(fd)
89 os.close(fd)
90
90
91 # Find open ports as necessary.
91 # Find open ports as necessary.
92
92
93 ports = []
93 ports = []
94 ports_needed = int(shell_port <= 0) + \
94 ports_needed = int(shell_port <= 0) + \
95 int(iopub_port <= 0) + \
95 int(iopub_port <= 0) + \
96 int(stdin_port <= 0) + \
96 int(stdin_port <= 0) + \
97 int(control_port <= 0) + \
97 int(control_port <= 0) + \
98 int(hb_port <= 0)
98 int(hb_port <= 0)
99 if transport == 'tcp':
99 if transport == 'tcp':
100 for i in range(ports_needed):
100 for i in range(ports_needed):
101 sock = socket.socket()
101 sock = socket.socket()
102 # struct.pack('ii', (0,0)) is 8 null bytes
102 # struct.pack('ii', (0,0)) is 8 null bytes
103 sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, b'\0' * 8)
103 sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, b'\0' * 8)
104 sock.bind(('', 0))
104 sock.bind(('', 0))
105 ports.append(sock)
105 ports.append(sock)
106 for i, sock in enumerate(ports):
106 for i, sock in enumerate(ports):
107 port = sock.getsockname()[1]
107 port = sock.getsockname()[1]
108 sock.close()
108 sock.close()
109 ports[i] = port
109 ports[i] = port
110 else:
110 else:
111 N = 1
111 N = 1
112 for i in range(ports_needed):
112 for i in range(ports_needed):
113 while os.path.exists("%s-%s" % (ip, str(N))):
113 while os.path.exists("%s-%s" % (ip, str(N))):
114 N += 1
114 N += 1
115 ports.append(N)
115 ports.append(N)
116 N += 1
116 N += 1
117 if shell_port <= 0:
117 if shell_port <= 0:
118 shell_port = ports.pop(0)
118 shell_port = ports.pop(0)
119 if iopub_port <= 0:
119 if iopub_port <= 0:
120 iopub_port = ports.pop(0)
120 iopub_port = ports.pop(0)
121 if stdin_port <= 0:
121 if stdin_port <= 0:
122 stdin_port = ports.pop(0)
122 stdin_port = ports.pop(0)
123 if control_port <= 0:
123 if control_port <= 0:
124 control_port = ports.pop(0)
124 control_port = ports.pop(0)
125 if hb_port <= 0:
125 if hb_port <= 0:
126 hb_port = ports.pop(0)
126 hb_port = ports.pop(0)
127
127
128 cfg = dict( shell_port=shell_port,
128 cfg = dict( shell_port=shell_port,
129 iopub_port=iopub_port,
129 iopub_port=iopub_port,
130 stdin_port=stdin_port,
130 stdin_port=stdin_port,
131 control_port=control_port,
131 control_port=control_port,
132 hb_port=hb_port,
132 hb_port=hb_port,
133 )
133 )
134 cfg['ip'] = ip
134 cfg['ip'] = ip
135 cfg['key'] = bytes_to_str(key)
135 cfg['key'] = bytes_to_str(key)
136 cfg['transport'] = transport
136 cfg['transport'] = transport
137 cfg['signature_scheme'] = signature_scheme
137 cfg['signature_scheme'] = signature_scheme
138
138
139 with open(fname, 'w') as f:
139 with open(fname, 'w') as f:
140 f.write(json.dumps(cfg, indent=2))
140 f.write(json.dumps(cfg, indent=2))
141
141
142 return fname, cfg
142 return fname, cfg
143
143
144
144
145 def get_connection_file(app=None):
145 def get_connection_file(app=None):
146 """Return the path to the connection file of an app
146 """Return the path to the connection file of an app
147
147
148 Parameters
148 Parameters
149 ----------
149 ----------
150 app : IPKernelApp instance [optional]
150 app : IPKernelApp instance [optional]
151 If unspecified, the currently running app will be used
151 If unspecified, the currently running app will be used
152 """
152 """
153 if app is None:
153 if app is None:
154 from IPython.kernel.zmq.kernelapp import IPKernelApp
154 from jupyter_client.kernelapp import IPKernelApp
155 if not IPKernelApp.initialized():
155 if not IPKernelApp.initialized():
156 raise RuntimeError("app not specified, and not in a running Kernel")
156 raise RuntimeError("app not specified, and not in a running Kernel")
157
157
158 app = IPKernelApp.instance()
158 app = IPKernelApp.instance()
159 return filefind(app.connection_file, ['.', app.profile_dir.security_dir])
159 return filefind(app.connection_file, ['.', app.profile_dir.security_dir])
160
160
161
161
162 def find_connection_file(filename='kernel-*.json', profile=None):
162 def find_connection_file(filename='kernel-*.json', profile=None):
163 """find a connection file, and return its absolute path.
163 """find a connection file, and return its absolute path.
164
164
165 The current working directory and the profile's security
165 The current working directory and the profile's security
166 directory will be searched for the file if it is not given by
166 directory will be searched for the file if it is not given by
167 absolute path.
167 absolute path.
168
168
169 If profile is unspecified, then the current running application's
169 If profile is unspecified, then the current running application's
170 profile will be used, or 'default', if not run from IPython.
170 profile will be used, or 'default', if not run from IPython.
171
171
172 If the argument does not match an existing file, it will be interpreted as a
172 If the argument does not match an existing file, it will be interpreted as a
173 fileglob, and the matching file in the profile's security dir with
173 fileglob, and the matching file in the profile's security dir with
174 the latest access time will be used.
174 the latest access time will be used.
175
175
176 Parameters
176 Parameters
177 ----------
177 ----------
178 filename : str
178 filename : str
179 The connection file or fileglob to search for.
179 The connection file or fileglob to search for.
180 profile : str [optional]
180 profile : str [optional]
181 The name of the profile to use when searching for the connection file,
181 The name of the profile to use when searching for the connection file,
182 if different from the current IPython session or 'default'.
182 if different from the current IPython session or 'default'.
183
183
184 Returns
184 Returns
185 -------
185 -------
186 str : The absolute path of the connection file.
186 str : The absolute path of the connection file.
187 """
187 """
188 from IPython.core.application import BaseIPythonApplication as IPApp
188 from IPython.core.application import BaseIPythonApplication as IPApp
189 try:
189 try:
190 # quick check for absolute path, before going through logic
190 # quick check for absolute path, before going through logic
191 return filefind(filename)
191 return filefind(filename)
192 except IOError:
192 except IOError:
193 pass
193 pass
194
194
195 if profile is None:
195 if profile is None:
196 # profile unspecified, check if running from an IPython app
196 # profile unspecified, check if running from an IPython app
197 if IPApp.initialized():
197 if IPApp.initialized():
198 app = IPApp.instance()
198 app = IPApp.instance()
199 profile_dir = app.profile_dir
199 profile_dir = app.profile_dir
200 else:
200 else:
201 # not running in IPython, use default profile
201 # not running in IPython, use default profile
202 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), 'default')
202 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), 'default')
203 else:
203 else:
204 # find profiledir by profile name:
204 # find profiledir by profile name:
205 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
205 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
206 security_dir = profile_dir.security_dir
206 security_dir = profile_dir.security_dir
207
207
208 try:
208 try:
209 # first, try explicit name
209 # first, try explicit name
210 return filefind(filename, ['.', security_dir])
210 return filefind(filename, ['.', security_dir])
211 except IOError:
211 except IOError:
212 pass
212 pass
213
213
214 # not found by full name
214 # not found by full name
215
215
216 if '*' in filename:
216 if '*' in filename:
217 # given as a glob already
217 # given as a glob already
218 pat = filename
218 pat = filename
219 else:
219 else:
220 # accept any substring match
220 # accept any substring match
221 pat = '*%s*' % filename
221 pat = '*%s*' % filename
222 matches = glob.glob( os.path.join(security_dir, pat) )
222 matches = glob.glob( os.path.join(security_dir, pat) )
223 if not matches:
223 if not matches:
224 raise IOError("Could not find %r in %r" % (filename, security_dir))
224 raise IOError("Could not find %r in %r" % (filename, security_dir))
225 elif len(matches) == 1:
225 elif len(matches) == 1:
226 return matches[0]
226 return matches[0]
227 else:
227 else:
228 # get most recent match, by access time:
228 # get most recent match, by access time:
229 return sorted(matches, key=lambda f: os.stat(f).st_atime)[-1]
229 return sorted(matches, key=lambda f: os.stat(f).st_atime)[-1]
230
230
231
231
232 def get_connection_info(connection_file=None, unpack=False, profile=None):
232 def get_connection_info(connection_file=None, unpack=False, profile=None):
233 """Return the connection information for the current Kernel.
233 """Return the connection information for the current Kernel.
234
234
235 Parameters
235 Parameters
236 ----------
236 ----------
237 connection_file : str [optional]
237 connection_file : str [optional]
238 The connection file to be used. Can be given by absolute path, or
238 The connection file to be used. Can be given by absolute path, or
239 IPython will search in the security directory of a given profile.
239 IPython will search in the security directory of a given profile.
240 If run from IPython,
240 If run from IPython,
241
241
242 If unspecified, the connection file for the currently running
242 If unspecified, the connection file for the currently running
243 IPython Kernel will be used, which is only allowed from inside a kernel.
243 IPython Kernel will be used, which is only allowed from inside a kernel.
244 unpack : bool [default: False]
244 unpack : bool [default: False]
245 if True, return the unpacked dict, otherwise just the string contents
245 if True, return the unpacked dict, otherwise just the string contents
246 of the file.
246 of the file.
247 profile : str [optional]
247 profile : str [optional]
248 The name of the profile to use when searching for the connection file,
248 The name of the profile to use when searching for the connection file,
249 if different from the current IPython session or 'default'.
249 if different from the current IPython session or 'default'.
250
250
251
251
252 Returns
252 Returns
253 -------
253 -------
254 The connection dictionary of the current kernel, as string or dict,
254 The connection dictionary of the current kernel, as string or dict,
255 depending on `unpack`.
255 depending on `unpack`.
256 """
256 """
257 if connection_file is None:
257 if connection_file is None:
258 # get connection file from current kernel
258 # get connection file from current kernel
259 cf = get_connection_file()
259 cf = get_connection_file()
260 else:
260 else:
261 # connection file specified, allow shortnames:
261 # connection file specified, allow shortnames:
262 cf = find_connection_file(connection_file, profile=profile)
262 cf = find_connection_file(connection_file, profile=profile)
263
263
264 with open(cf) as f:
264 with open(cf) as f:
265 info = f.read()
265 info = f.read()
266
266
267 if unpack:
267 if unpack:
268 info = json.loads(info)
268 info = json.loads(info)
269 # ensure key is bytes:
269 # ensure key is bytes:
270 info['key'] = str_to_bytes(info.get('key', ''))
270 info['key'] = str_to_bytes(info.get('key', ''))
271 return info
271 return info
272
272
273
273
274 def connect_qtconsole(connection_file=None, argv=None, profile=None):
274 def connect_qtconsole(connection_file=None, argv=None, profile=None):
275 """Connect a qtconsole to the current kernel.
275 """Connect a qtconsole to the current kernel.
276
276
277 This is useful for connecting a second qtconsole to a kernel, or to a
277 This is useful for connecting a second qtconsole to a kernel, or to a
278 local notebook.
278 local notebook.
279
279
280 Parameters
280 Parameters
281 ----------
281 ----------
282 connection_file : str [optional]
282 connection_file : str [optional]
283 The connection file to be used. Can be given by absolute path, or
283 The connection file to be used. Can be given by absolute path, or
284 IPython will search in the security directory of a given profile.
284 IPython will search in the security directory of a given profile.
285 If run from IPython,
285 If run from IPython,
286
286
287 If unspecified, the connection file for the currently running
287 If unspecified, the connection file for the currently running
288 IPython Kernel will be used, which is only allowed from inside a kernel.
288 IPython Kernel will be used, which is only allowed from inside a kernel.
289 argv : list [optional]
289 argv : list [optional]
290 Any extra args to be passed to the console.
290 Any extra args to be passed to the console.
291 profile : str [optional]
291 profile : str [optional]
292 The name of the profile to use when searching for the connection file,
292 The name of the profile to use when searching for the connection file,
293 if different from the current IPython session or 'default'.
293 if different from the current IPython session or 'default'.
294
294
295
295
296 Returns
296 Returns
297 -------
297 -------
298 :class:`subprocess.Popen` instance running the qtconsole frontend
298 :class:`subprocess.Popen` instance running the qtconsole frontend
299 """
299 """
300 argv = [] if argv is None else argv
300 argv = [] if argv is None else argv
301
301
302 if connection_file is None:
302 if connection_file is None:
303 # get connection file from current kernel
303 # get connection file from current kernel
304 cf = get_connection_file()
304 cf = get_connection_file()
305 else:
305 else:
306 cf = find_connection_file(connection_file, profile=profile)
306 cf = find_connection_file(connection_file, profile=profile)
307
307
308 cmd = ';'.join([
308 cmd = ';'.join([
309 "from IPython.qt.console import qtconsoleapp",
309 "from IPython.qt.console import qtconsoleapp",
310 "qtconsoleapp.main()"
310 "qtconsoleapp.main()"
311 ])
311 ])
312
312
313 return Popen([sys.executable, '-c', cmd, '--existing', cf] + argv,
313 return Popen([sys.executable, '-c', cmd, '--existing', cf] + argv,
314 stdout=PIPE, stderr=PIPE, close_fds=(sys.platform != 'win32'),
314 stdout=PIPE, stderr=PIPE, close_fds=(sys.platform != 'win32'),
315 )
315 )
316
316
317
317
318 def tunnel_to_kernel(connection_info, sshserver, sshkey=None):
318 def tunnel_to_kernel(connection_info, sshserver, sshkey=None):
319 """tunnel connections to a kernel via ssh
319 """tunnel connections to a kernel via ssh
320
320
321 This will open four SSH tunnels from localhost on this machine to the
321 This will open four SSH tunnels from localhost on this machine to the
322 ports associated with the kernel. They can be either direct
322 ports associated with the kernel. They can be either direct
323 localhost-localhost tunnels, or if an intermediate server is necessary,
323 localhost-localhost tunnels, or if an intermediate server is necessary,
324 the kernel must be listening on a public IP.
324 the kernel must be listening on a public IP.
325
325
326 Parameters
326 Parameters
327 ----------
327 ----------
328 connection_info : dict or str (path)
328 connection_info : dict or str (path)
329 Either a connection dict, or the path to a JSON connection file
329 Either a connection dict, or the path to a JSON connection file
330 sshserver : str
330 sshserver : str
331 The ssh sever to use to tunnel to the kernel. Can be a full
331 The ssh sever to use to tunnel to the kernel. Can be a full
332 `user@server:port` string. ssh config aliases are respected.
332 `user@server:port` string. ssh config aliases are respected.
333 sshkey : str [optional]
333 sshkey : str [optional]
334 Path to file containing ssh key to use for authentication.
334 Path to file containing ssh key to use for authentication.
335 Only necessary if your ssh config does not already associate
335 Only necessary if your ssh config does not already associate
336 a keyfile with the host.
336 a keyfile with the host.
337
337
338 Returns
338 Returns
339 -------
339 -------
340
340
341 (shell, iopub, stdin, hb) : ints
341 (shell, iopub, stdin, hb) : ints
342 The four ports on localhost that have been forwarded to the kernel.
342 The four ports on localhost that have been forwarded to the kernel.
343 """
343 """
344 from zmq.ssh import tunnel
344 from zmq.ssh import tunnel
345 if isinstance(connection_info, string_types):
345 if isinstance(connection_info, string_types):
346 # it's a path, unpack it
346 # it's a path, unpack it
347 with open(connection_info) as f:
347 with open(connection_info) as f:
348 connection_info = json.loads(f.read())
348 connection_info = json.loads(f.read())
349
349
350 cf = connection_info
350 cf = connection_info
351
351
352 lports = tunnel.select_random_ports(4)
352 lports = tunnel.select_random_ports(4)
353 rports = cf['shell_port'], cf['iopub_port'], cf['stdin_port'], cf['hb_port']
353 rports = cf['shell_port'], cf['iopub_port'], cf['stdin_port'], cf['hb_port']
354
354
355 remote_ip = cf['ip']
355 remote_ip = cf['ip']
356
356
357 if tunnel.try_passwordless_ssh(sshserver, sshkey):
357 if tunnel.try_passwordless_ssh(sshserver, sshkey):
358 password=False
358 password=False
359 else:
359 else:
360 password = getpass("SSH Password for %s: " % cast_bytes_py2(sshserver))
360 password = getpass("SSH Password for %s: " % cast_bytes_py2(sshserver))
361
361
362 for lp,rp in zip(lports, rports):
362 for lp,rp in zip(lports, rports):
363 tunnel.ssh_tunnel(lp, rp, sshserver, remote_ip, sshkey, password)
363 tunnel.ssh_tunnel(lp, rp, sshserver, remote_ip, sshkey, password)
364
364
365 return tuple(lports)
365 return tuple(lports)
366
366
367
367
368 #-----------------------------------------------------------------------------
368 #-----------------------------------------------------------------------------
369 # Mixin for classes that work with connection files
369 # Mixin for classes that work with connection files
370 #-----------------------------------------------------------------------------
370 #-----------------------------------------------------------------------------
371
371
372 channel_socket_types = {
372 channel_socket_types = {
373 'hb' : zmq.REQ,
373 'hb' : zmq.REQ,
374 'shell' : zmq.DEALER,
374 'shell' : zmq.DEALER,
375 'iopub' : zmq.SUB,
375 'iopub' : zmq.SUB,
376 'stdin' : zmq.DEALER,
376 'stdin' : zmq.DEALER,
377 'control': zmq.DEALER,
377 'control': zmq.DEALER,
378 }
378 }
379
379
380 port_names = [ "%s_port" % channel for channel in ('shell', 'stdin', 'iopub', 'hb', 'control')]
380 port_names = [ "%s_port" % channel for channel in ('shell', 'stdin', 'iopub', 'hb', 'control')]
381
381
382 class ConnectionFileMixin(LoggingConfigurable):
382 class ConnectionFileMixin(LoggingConfigurable):
383 """Mixin for configurable classes that work with connection files"""
383 """Mixin for configurable classes that work with connection files"""
384
384
385 # The addresses for the communication channels
385 # The addresses for the communication channels
386 connection_file = Unicode('', config=True,
386 connection_file = Unicode('', config=True,
387 help="""JSON file in which to store connection info [default: kernel-<pid>.json]
387 help="""JSON file in which to store connection info [default: kernel-<pid>.json]
388
388
389 This file will contain the IP, ports, and authentication key needed to connect
389 This file will contain the IP, ports, and authentication key needed to connect
390 clients to this kernel. By default, this file will be created in the security dir
390 clients to this kernel. By default, this file will be created in the security dir
391 of the current profile, but can be specified by absolute path.
391 of the current profile, but can be specified by absolute path.
392 """)
392 """)
393 _connection_file_written = Bool(False)
393 _connection_file_written = Bool(False)
394
394
395 transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True)
395 transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True)
396
396
397 ip = Unicode(config=True,
397 ip = Unicode(config=True,
398 help="""Set the kernel\'s IP address [default localhost].
398 help="""Set the kernel\'s IP address [default localhost].
399 If the IP address is something other than localhost, then
399 If the IP address is something other than localhost, then
400 Consoles on other machines will be able to connect
400 Consoles on other machines will be able to connect
401 to the Kernel, so be careful!"""
401 to the Kernel, so be careful!"""
402 )
402 )
403
403
404 def _ip_default(self):
404 def _ip_default(self):
405 if self.transport == 'ipc':
405 if self.transport == 'ipc':
406 if self.connection_file:
406 if self.connection_file:
407 return os.path.splitext(self.connection_file)[0] + '-ipc'
407 return os.path.splitext(self.connection_file)[0] + '-ipc'
408 else:
408 else:
409 return 'kernel-ipc'
409 return 'kernel-ipc'
410 else:
410 else:
411 return localhost()
411 return localhost()
412
412
413 def _ip_changed(self, name, old, new):
413 def _ip_changed(self, name, old, new):
414 if new == '*':
414 if new == '*':
415 self.ip = '0.0.0.0'
415 self.ip = '0.0.0.0'
416
416
417 # protected traits
417 # protected traits
418
418
419 hb_port = Integer(0, config=True,
419 hb_port = Integer(0, config=True,
420 help="set the heartbeat port [default: random]")
420 help="set the heartbeat port [default: random]")
421 shell_port = Integer(0, config=True,
421 shell_port = Integer(0, config=True,
422 help="set the shell (ROUTER) port [default: random]")
422 help="set the shell (ROUTER) port [default: random]")
423 iopub_port = Integer(0, config=True,
423 iopub_port = Integer(0, config=True,
424 help="set the iopub (PUB) port [default: random]")
424 help="set the iopub (PUB) port [default: random]")
425 stdin_port = Integer(0, config=True,
425 stdin_port = Integer(0, config=True,
426 help="set the stdin (ROUTER) port [default: random]")
426 help="set the stdin (ROUTER) port [default: random]")
427 control_port = Integer(0, config=True,
427 control_port = Integer(0, config=True,
428 help="set the control (ROUTER) port [default: random]")
428 help="set the control (ROUTER) port [default: random]")
429
429
430 @property
430 @property
431 def ports(self):
431 def ports(self):
432 return [ getattr(self, name) for name in port_names ]
432 return [ getattr(self, name) for name in port_names ]
433
433
434 # The Session to use for communication with the kernel.
434 # The Session to use for communication with the kernel.
435 session = Instance('IPython.kernel.zmq.session.Session')
435 session = Instance('jupyter_client.session.Session')
436 def _session_default(self):
436 def _session_default(self):
437 from IPython.kernel.zmq.session import Session
437 from jupyter_client.session import Session
438 return Session(parent=self)
438 return Session(parent=self)
439
439
440 #--------------------------------------------------------------------------
440 #--------------------------------------------------------------------------
441 # Connection and ipc file management
441 # Connection and ipc file management
442 #--------------------------------------------------------------------------
442 #--------------------------------------------------------------------------
443
443
444 def get_connection_info(self):
444 def get_connection_info(self):
445 """return the connection info as a dict"""
445 """return the connection info as a dict"""
446 return dict(
446 return dict(
447 transport=self.transport,
447 transport=self.transport,
448 ip=self.ip,
448 ip=self.ip,
449 shell_port=self.shell_port,
449 shell_port=self.shell_port,
450 iopub_port=self.iopub_port,
450 iopub_port=self.iopub_port,
451 stdin_port=self.stdin_port,
451 stdin_port=self.stdin_port,
452 hb_port=self.hb_port,
452 hb_port=self.hb_port,
453 control_port=self.control_port,
453 control_port=self.control_port,
454 signature_scheme=self.session.signature_scheme,
454 signature_scheme=self.session.signature_scheme,
455 key=self.session.key,
455 key=self.session.key,
456 )
456 )
457
457
458 def cleanup_connection_file(self):
458 def cleanup_connection_file(self):
459 """Cleanup connection file *if we wrote it*
459 """Cleanup connection file *if we wrote it*
460
460
461 Will not raise if the connection file was already removed somehow.
461 Will not raise if the connection file was already removed somehow.
462 """
462 """
463 if self._connection_file_written:
463 if self._connection_file_written:
464 # cleanup connection files on full shutdown of kernel we started
464 # cleanup connection files on full shutdown of kernel we started
465 self._connection_file_written = False
465 self._connection_file_written = False
466 try:
466 try:
467 os.remove(self.connection_file)
467 os.remove(self.connection_file)
468 except (IOError, OSError, AttributeError):
468 except (IOError, OSError, AttributeError):
469 pass
469 pass
470
470
471 def cleanup_ipc_files(self):
471 def cleanup_ipc_files(self):
472 """Cleanup ipc files if we wrote them."""
472 """Cleanup ipc files if we wrote them."""
473 if self.transport != 'ipc':
473 if self.transport != 'ipc':
474 return
474 return
475 for port in self.ports:
475 for port in self.ports:
476 ipcfile = "%s-%i" % (self.ip, port)
476 ipcfile = "%s-%i" % (self.ip, port)
477 try:
477 try:
478 os.remove(ipcfile)
478 os.remove(ipcfile)
479 except (IOError, OSError):
479 except (IOError, OSError):
480 pass
480 pass
481
481
482 def write_connection_file(self):
482 def write_connection_file(self):
483 """Write connection info to JSON dict in self.connection_file."""
483 """Write connection info to JSON dict in self.connection_file."""
484 if self._connection_file_written and os.path.exists(self.connection_file):
484 if self._connection_file_written and os.path.exists(self.connection_file):
485 return
485 return
486
486
487 self.connection_file, cfg = write_connection_file(self.connection_file,
487 self.connection_file, cfg = write_connection_file(self.connection_file,
488 transport=self.transport, ip=self.ip, key=self.session.key,
488 transport=self.transport, ip=self.ip, key=self.session.key,
489 stdin_port=self.stdin_port, iopub_port=self.iopub_port,
489 stdin_port=self.stdin_port, iopub_port=self.iopub_port,
490 shell_port=self.shell_port, hb_port=self.hb_port,
490 shell_port=self.shell_port, hb_port=self.hb_port,
491 control_port=self.control_port,
491 control_port=self.control_port,
492 signature_scheme=self.session.signature_scheme,
492 signature_scheme=self.session.signature_scheme,
493 )
493 )
494 # write_connection_file also sets default ports:
494 # write_connection_file also sets default ports:
495 for name in port_names:
495 for name in port_names:
496 setattr(self, name, cfg[name])
496 setattr(self, name, cfg[name])
497
497
498 self._connection_file_written = True
498 self._connection_file_written = True
499
499
500 def load_connection_file(self):
500 def load_connection_file(self):
501 """Load connection info from JSON dict in self.connection_file."""
501 """Load connection info from JSON dict in self.connection_file."""
502 self.log.debug(u"Loading connection file %s", self.connection_file)
502 self.log.debug(u"Loading connection file %s", self.connection_file)
503 with open(self.connection_file) as f:
503 with open(self.connection_file) as f:
504 cfg = json.load(f)
504 cfg = json.load(f)
505 self.transport = cfg.get('transport', self.transport)
505 self.transport = cfg.get('transport', self.transport)
506 self.ip = cfg.get('ip', self._ip_default())
506 self.ip = cfg.get('ip', self._ip_default())
507
507
508 for name in port_names:
508 for name in port_names:
509 if getattr(self, name) == 0 and name in cfg:
509 if getattr(self, name) == 0 and name in cfg:
510 # not overridden by config or cl_args
510 # not overridden by config or cl_args
511 setattr(self, name, cfg[name])
511 setattr(self, name, cfg[name])
512
512
513 if 'key' in cfg:
513 if 'key' in cfg:
514 self.session.key = str_to_bytes(cfg['key'])
514 self.session.key = str_to_bytes(cfg['key'])
515 if 'signature_scheme' in cfg:
515 if 'signature_scheme' in cfg:
516 self.session.signature_scheme = cfg['signature_scheme']
516 self.session.signature_scheme = cfg['signature_scheme']
517
517
518 #--------------------------------------------------------------------------
518 #--------------------------------------------------------------------------
519 # Creating connected sockets
519 # Creating connected sockets
520 #--------------------------------------------------------------------------
520 #--------------------------------------------------------------------------
521
521
522 def _make_url(self, channel):
522 def _make_url(self, channel):
523 """Make a ZeroMQ URL for a given channel."""
523 """Make a ZeroMQ URL for a given channel."""
524 transport = self.transport
524 transport = self.transport
525 ip = self.ip
525 ip = self.ip
526 port = getattr(self, '%s_port' % channel)
526 port = getattr(self, '%s_port' % channel)
527
527
528 if transport == 'tcp':
528 if transport == 'tcp':
529 return "tcp://%s:%i" % (ip, port)
529 return "tcp://%s:%i" % (ip, port)
530 else:
530 else:
531 return "%s://%s-%s" % (transport, ip, port)
531 return "%s://%s-%s" % (transport, ip, port)
532
532
533 def _create_connected_socket(self, channel, identity=None):
533 def _create_connected_socket(self, channel, identity=None):
534 """Create a zmq Socket and connect it to the kernel."""
534 """Create a zmq Socket and connect it to the kernel."""
535 url = self._make_url(channel)
535 url = self._make_url(channel)
536 socket_type = channel_socket_types[channel]
536 socket_type = channel_socket_types[channel]
537 self.log.debug("Connecting to: %s" % url)
537 self.log.debug("Connecting to: %s" % url)
538 sock = self.context.socket(socket_type)
538 sock = self.context.socket(socket_type)
539 # set linger to 1s to prevent hangs at exit
539 # set linger to 1s to prevent hangs at exit
540 sock.linger = 1000
540 sock.linger = 1000
541 if identity:
541 if identity:
542 sock.identity = identity
542 sock.identity = identity
543 sock.connect(url)
543 sock.connect(url)
544 return sock
544 return sock
545
545
546 def connect_iopub(self, identity=None):
546 def connect_iopub(self, identity=None):
547 """return zmq Socket connected to the IOPub channel"""
547 """return zmq Socket connected to the IOPub channel"""
548 sock = self._create_connected_socket('iopub', identity=identity)
548 sock = self._create_connected_socket('iopub', identity=identity)
549 sock.setsockopt(zmq.SUBSCRIBE, b'')
549 sock.setsockopt(zmq.SUBSCRIBE, b'')
550 return sock
550 return sock
551
551
552 def connect_shell(self, identity=None):
552 def connect_shell(self, identity=None):
553 """return zmq Socket connected to the Shell channel"""
553 """return zmq Socket connected to the Shell channel"""
554 return self._create_connected_socket('shell', identity=identity)
554 return self._create_connected_socket('shell', identity=identity)
555
555
556 def connect_stdin(self, identity=None):
556 def connect_stdin(self, identity=None):
557 """return zmq Socket connected to the StdIn channel"""
557 """return zmq Socket connected to the StdIn channel"""
558 return self._create_connected_socket('stdin', identity=identity)
558 return self._create_connected_socket('stdin', identity=identity)
559
559
560 def connect_hb(self, identity=None):
560 def connect_hb(self, identity=None):
561 """return zmq Socket connected to the Heartbeat channel"""
561 """return zmq Socket connected to the Heartbeat channel"""
562 return self._create_connected_socket('hb', identity=identity)
562 return self._create_connected_socket('hb', identity=identity)
563
563
564 def connect_control(self, identity=None):
564 def connect_control(self, identity=None):
565 """return zmq Socket connected to the Control channel"""
565 """return zmq Socket connected to the Control channel"""
566 return self._create_connected_socket('control', identity=identity)
566 return self._create_connected_socket('control', identity=identity)
567
567
568
568
569 __all__ = [
569 __all__ = [
570 'write_connection_file',
570 'write_connection_file',
571 'get_connection_file',
571 'get_connection_file',
572 'find_connection_file',
572 'find_connection_file',
573 'get_connection_info',
573 'get_connection_info',
574 'connect_qtconsole',
574 'connect_qtconsole',
575 'tunnel_to_kernel',
575 'tunnel_to_kernel',
576 ]
576 ]
@@ -1,62 +1,62 b''
1 """A kernel manager with a tornado IOLoop"""
1 """A kernel manager with a tornado IOLoop"""
2
2
3 #-----------------------------------------------------------------------------
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2013 The IPython Development Team
4 # Copyright (C) 2013 The IPython Development Team
5 #
5 #
6 # Distributed under the terms of the BSD License. The full license is in
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
7 # the file COPYING, distributed as part of this software.
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9
9
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11 # Imports
11 # Imports
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16 from zmq.eventloop import ioloop
16 from zmq.eventloop import ioloop
17 from zmq.eventloop.zmqstream import ZMQStream
17 from zmq.eventloop.zmqstream import ZMQStream
18
18
19 from IPython.utils.traitlets import (
19 from IPython.utils.traitlets import (
20 Instance
20 Instance
21 )
21 )
22
22
23 from IPython.kernel.manager import KernelManager
23 from jupyter_client.manager import KernelManager
24 from .restarter import IOLoopKernelRestarter
24 from .restarter import IOLoopKernelRestarter
25
25
26 #-----------------------------------------------------------------------------
26 #-----------------------------------------------------------------------------
27 # Code
27 # Code
28 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
29
29
30
30
31 def as_zmqstream(f):
31 def as_zmqstream(f):
32 def wrapped(self, *args, **kwargs):
32 def wrapped(self, *args, **kwargs):
33 socket = f(self, *args, **kwargs)
33 socket = f(self, *args, **kwargs)
34 return ZMQStream(socket, self.loop)
34 return ZMQStream(socket, self.loop)
35 return wrapped
35 return wrapped
36
36
37 class IOLoopKernelManager(KernelManager):
37 class IOLoopKernelManager(KernelManager):
38
38
39 loop = Instance('zmq.eventloop.ioloop.IOLoop')
39 loop = Instance('zmq.eventloop.ioloop.IOLoop')
40 def _loop_default(self):
40 def _loop_default(self):
41 return ioloop.IOLoop.instance()
41 return ioloop.IOLoop.instance()
42
42
43 _restarter = Instance('IPython.kernel.ioloop.IOLoopKernelRestarter', allow_none=True)
43 _restarter = Instance('jupyter_client.ioloop.IOLoopKernelRestarter', allow_none=True)
44
44
45 def start_restarter(self):
45 def start_restarter(self):
46 if self.autorestart and self.has_kernel:
46 if self.autorestart and self.has_kernel:
47 if self._restarter is None:
47 if self._restarter is None:
48 self._restarter = IOLoopKernelRestarter(
48 self._restarter = IOLoopKernelRestarter(
49 kernel_manager=self, loop=self.loop,
49 kernel_manager=self, loop=self.loop,
50 parent=self, log=self.log
50 parent=self, log=self.log
51 )
51 )
52 self._restarter.start()
52 self._restarter.start()
53
53
54 def stop_restarter(self):
54 def stop_restarter(self):
55 if self.autorestart:
55 if self.autorestart:
56 if self._restarter is not None:
56 if self._restarter is not None:
57 self._restarter.stop()
57 self._restarter.stop()
58
58
59 connect_shell = as_zmqstream(KernelManager.connect_shell)
59 connect_shell = as_zmqstream(KernelManager.connect_shell)
60 connect_iopub = as_zmqstream(KernelManager.connect_iopub)
60 connect_iopub = as_zmqstream(KernelManager.connect_iopub)
61 connect_stdin = as_zmqstream(KernelManager.connect_stdin)
61 connect_stdin = as_zmqstream(KernelManager.connect_stdin)
62 connect_hb = as_zmqstream(KernelManager.connect_hb)
62 connect_hb = as_zmqstream(KernelManager.connect_hb)
@@ -1,53 +1,53 b''
1 """A basic in process kernel monitor with autorestarting.
1 """A basic in process kernel monitor with autorestarting.
2
2
3 This watches a kernel's state using KernelManager.is_alive and auto
3 This watches a kernel's state using KernelManager.is_alive and auto
4 restarts the kernel if it dies.
4 restarts the kernel if it dies.
5 """
5 """
6
6
7 #-----------------------------------------------------------------------------
7 #-----------------------------------------------------------------------------
8 # Copyright (C) 2013 The IPython Development Team
8 # Copyright (C) 2013 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 from zmq.eventloop import ioloop
20 from zmq.eventloop import ioloop
21
21
22
22
23 from IPython.kernel.restarter import KernelRestarter
23 from jupyter_client.restarter import KernelRestarter
24 from IPython.utils.traitlets import (
24 from IPython.utils.traitlets import (
25 Instance,
25 Instance,
26 )
26 )
27
27
28 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
29 # Code
29 # Code
30 #-----------------------------------------------------------------------------
30 #-----------------------------------------------------------------------------
31
31
32 class IOLoopKernelRestarter(KernelRestarter):
32 class IOLoopKernelRestarter(KernelRestarter):
33 """Monitor and autorestart a kernel."""
33 """Monitor and autorestart a kernel."""
34
34
35 loop = Instance('zmq.eventloop.ioloop.IOLoop')
35 loop = Instance('zmq.eventloop.ioloop.IOLoop')
36 def _loop_default(self):
36 def _loop_default(self):
37 return ioloop.IOLoop.instance()
37 return ioloop.IOLoop.instance()
38
38
39 _pcallback = None
39 _pcallback = None
40
40
41 def start(self):
41 def start(self):
42 """Start the polling of the kernel."""
42 """Start the polling of the kernel."""
43 if self._pcallback is None:
43 if self._pcallback is None:
44 self._pcallback = ioloop.PeriodicCallback(
44 self._pcallback = ioloop.PeriodicCallback(
45 self.poll, 1000*self.time_to_dead, self.loop
45 self.poll, 1000*self.time_to_dead, self.loop
46 )
46 )
47 self._pcallback.start()
47 self._pcallback.start()
48
48
49 def stop(self):
49 def stop(self):
50 """Stop the kernel polling."""
50 """Stop the kernel polling."""
51 if self._pcallback is not None:
51 if self._pcallback is not None:
52 self._pcallback.stop()
52 self._pcallback.stop()
53 self._pcallback = None
53 self._pcallback = None
@@ -1,226 +1,226 b''
1 """Utilities for launching kernels
1 """Utilities for launching kernels
2 """
2 """
3
3
4 # Copyright (c) IPython Development Team.
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
5 # Distributed under the terms of the Modified BSD License.
6
6
7 import os
7 import os
8 import sys
8 import sys
9 from subprocess import Popen, PIPE
9 from subprocess import Popen, PIPE
10
10
11 from IPython.utils.encoding import getdefaultencoding
11 from IPython.utils.encoding import getdefaultencoding
12 from IPython.utils.py3compat import cast_bytes_py2
12 from IPython.utils.py3compat import cast_bytes_py2
13
13
14
14
15 def swallow_argv(argv, aliases=None, flags=None):
15 def swallow_argv(argv, aliases=None, flags=None):
16 """strip frontend-specific aliases and flags from an argument list
16 """strip frontend-specific aliases and flags from an argument list
17
17
18 For use primarily in frontend apps that want to pass a subset of command-line
18 For use primarily in frontend apps that want to pass a subset of command-line
19 arguments through to a subprocess, where frontend-specific flags and aliases
19 arguments through to a subprocess, where frontend-specific flags and aliases
20 should be removed from the list.
20 should be removed from the list.
21
21
22 Parameters
22 Parameters
23 ----------
23 ----------
24
24
25 argv : list(str)
25 argv : list(str)
26 The starting argv, to be filtered
26 The starting argv, to be filtered
27 aliases : container of aliases (dict, list, set, etc.)
27 aliases : container of aliases (dict, list, set, etc.)
28 The frontend-specific aliases to be removed
28 The frontend-specific aliases to be removed
29 flags : container of flags (dict, list, set, etc.)
29 flags : container of flags (dict, list, set, etc.)
30 The frontend-specific flags to be removed
30 The frontend-specific flags to be removed
31
31
32 Returns
32 Returns
33 -------
33 -------
34
34
35 argv : list(str)
35 argv : list(str)
36 The argv list, excluding flags and aliases that have been stripped
36 The argv list, excluding flags and aliases that have been stripped
37 """
37 """
38
38
39 if aliases is None:
39 if aliases is None:
40 aliases = set()
40 aliases = set()
41 if flags is None:
41 if flags is None:
42 flags = set()
42 flags = set()
43
43
44 stripped = list(argv) # copy
44 stripped = list(argv) # copy
45
45
46 swallow_next = False
46 swallow_next = False
47 was_flag = False
47 was_flag = False
48 for a in argv:
48 for a in argv:
49 if a == '--':
49 if a == '--':
50 break
50 break
51 if swallow_next:
51 if swallow_next:
52 swallow_next = False
52 swallow_next = False
53 # last arg was an alias, remove the next one
53 # last arg was an alias, remove the next one
54 # *unless* the last alias has a no-arg flag version, in which
54 # *unless* the last alias has a no-arg flag version, in which
55 # case, don't swallow the next arg if it's also a flag:
55 # case, don't swallow the next arg if it's also a flag:
56 if not (was_flag and a.startswith('-')):
56 if not (was_flag and a.startswith('-')):
57 stripped.remove(a)
57 stripped.remove(a)
58 continue
58 continue
59 if a.startswith('-'):
59 if a.startswith('-'):
60 split = a.lstrip('-').split('=')
60 split = a.lstrip('-').split('=')
61 name = split[0]
61 name = split[0]
62 # we use startswith because argparse accepts any arg to be specified
62 # we use startswith because argparse accepts any arg to be specified
63 # by any leading section, as long as it is unique,
63 # by any leading section, as long as it is unique,
64 # so `--no-br` means `--no-browser` in the notebook, etc.
64 # so `--no-br` means `--no-browser` in the notebook, etc.
65 if any(alias.startswith(name) for alias in aliases):
65 if any(alias.startswith(name) for alias in aliases):
66 stripped.remove(a)
66 stripped.remove(a)
67 if len(split) == 1:
67 if len(split) == 1:
68 # alias passed with arg via space
68 # alias passed with arg via space
69 swallow_next = True
69 swallow_next = True
70 # could have been a flag that matches an alias, e.g. `existing`
70 # could have been a flag that matches an alias, e.g. `existing`
71 # in which case, we might not swallow the next arg
71 # in which case, we might not swallow the next arg
72 was_flag = name in flags
72 was_flag = name in flags
73 elif len(split) == 1 and any(flag.startswith(name) for flag in flags):
73 elif len(split) == 1 and any(flag.startswith(name) for flag in flags):
74 # strip flag, but don't swallow next, as flags don't take args
74 # strip flag, but don't swallow next, as flags don't take args
75 stripped.remove(a)
75 stripped.remove(a)
76
76
77 # return shortened list
77 # return shortened list
78 return stripped
78 return stripped
79
79
80
80
81 def make_ipkernel_cmd(mod='IPython.kernel', executable=None, extra_arguments=[], **kw):
81 def make_ipkernel_cmd(mod='IPython.kernel', executable=None, extra_arguments=[], **kw):
82 """Build Popen command list for launching an IPython kernel.
82 """Build Popen command list for launching an IPython kernel.
83
83
84 Parameters
84 Parameters
85 ----------
85 ----------
86 mod : str, optional (default 'IPython.kernel')
86 mod : str, optional (default 'IPython.kernel')
87 A string of an IPython module whose __main__ starts an IPython kernel
87 A string of an IPython module whose __main__ starts an IPython kernel
88
88
89 executable : str, optional (default sys.executable)
89 executable : str, optional (default sys.executable)
90 The Python executable to use for the kernel process.
90 The Python executable to use for the kernel process.
91
91
92 extra_arguments : list, optional
92 extra_arguments : list, optional
93 A list of extra arguments to pass when executing the launch code.
93 A list of extra arguments to pass when executing the launch code.
94
94
95 Returns
95 Returns
96 -------
96 -------
97
97
98 A Popen command list
98 A Popen command list
99 """
99 """
100 if executable is None:
100 if executable is None:
101 executable = sys.executable
101 executable = sys.executable
102 arguments = [ executable, '-m', mod, '-f', '{connection_file}' ]
102 arguments = [ executable, '-m', mod, '-f', '{connection_file}' ]
103 arguments.extend(extra_arguments)
103 arguments.extend(extra_arguments)
104
104
105 return arguments
105 return arguments
106
106
107
107
108 def launch_kernel(cmd, stdin=None, stdout=None, stderr=None, env=None,
108 def launch_kernel(cmd, stdin=None, stdout=None, stderr=None, env=None,
109 independent=False,
109 independent=False,
110 cwd=None,
110 cwd=None,
111 **kw
111 **kw
112 ):
112 ):
113 """ Launches a localhost kernel, binding to the specified ports.
113 """ Launches a localhost kernel, binding to the specified ports.
114
114
115 Parameters
115 Parameters
116 ----------
116 ----------
117 cmd : Popen list,
117 cmd : Popen list,
118 A string of Python code that imports and executes a kernel entry point.
118 A string of Python code that imports and executes a kernel entry point.
119
119
120 stdin, stdout, stderr : optional (default None)
120 stdin, stdout, stderr : optional (default None)
121 Standards streams, as defined in subprocess.Popen.
121 Standards streams, as defined in subprocess.Popen.
122
122
123 independent : bool, optional (default False)
123 independent : bool, optional (default False)
124 If set, the kernel process is guaranteed to survive if this process
124 If set, the kernel process is guaranteed to survive if this process
125 dies. If not set, an effort is made to ensure that the kernel is killed
125 dies. If not set, an effort is made to ensure that the kernel is killed
126 when this process dies. Note that in this case it is still good practice
126 when this process dies. Note that in this case it is still good practice
127 to kill kernels manually before exiting.
127 to kill kernels manually before exiting.
128
128
129 cwd : path, optional
129 cwd : path, optional
130 The working dir of the kernel process (default: cwd of this process).
130 The working dir of the kernel process (default: cwd of this process).
131
131
132 Returns
132 Returns
133 -------
133 -------
134
134
135 Popen instance for the kernel subprocess
135 Popen instance for the kernel subprocess
136 """
136 """
137
137
138 # Popen will fail (sometimes with a deadlock) if stdin, stdout, and stderr
138 # Popen will fail (sometimes with a deadlock) if stdin, stdout, and stderr
139 # are invalid. Unfortunately, there is in general no way to detect whether
139 # are invalid. Unfortunately, there is in general no way to detect whether
140 # they are valid. The following two blocks redirect them to (temporary)
140 # they are valid. The following two blocks redirect them to (temporary)
141 # pipes in certain important cases.
141 # pipes in certain important cases.
142
142
143 # If this process has been backgrounded, our stdin is invalid. Since there
143 # If this process has been backgrounded, our stdin is invalid. Since there
144 # is no compelling reason for the kernel to inherit our stdin anyway, we'll
144 # is no compelling reason for the kernel to inherit our stdin anyway, we'll
145 # place this one safe and always redirect.
145 # place this one safe and always redirect.
146 redirect_in = True
146 redirect_in = True
147 _stdin = PIPE if stdin is None else stdin
147 _stdin = PIPE if stdin is None else stdin
148
148
149 # If this process in running on pythonw, we know that stdin, stdout, and
149 # If this process in running on pythonw, we know that stdin, stdout, and
150 # stderr are all invalid.
150 # stderr are all invalid.
151 redirect_out = sys.executable.endswith('pythonw.exe')
151 redirect_out = sys.executable.endswith('pythonw.exe')
152 if redirect_out:
152 if redirect_out:
153 blackhole = open(os.devnull, 'w')
153 blackhole = open(os.devnull, 'w')
154 _stdout = blackhole if stdout is None else stdout
154 _stdout = blackhole if stdout is None else stdout
155 _stderr = blackhole if stderr is None else stderr
155 _stderr = blackhole if stderr is None else stderr
156 else:
156 else:
157 _stdout, _stderr = stdout, stderr
157 _stdout, _stderr = stdout, stderr
158
158
159 env = env if (env is not None) else os.environ.copy()
159 env = env if (env is not None) else os.environ.copy()
160
160
161 encoding = getdefaultencoding(prefer_stream=False)
161 encoding = getdefaultencoding(prefer_stream=False)
162 kwargs = dict(
162 kwargs = dict(
163 stdin=_stdin,
163 stdin=_stdin,
164 stdout=_stdout,
164 stdout=_stdout,
165 stderr=_stderr,
165 stderr=_stderr,
166 cwd=cwd,
166 cwd=cwd,
167 env=env,
167 env=env,
168 )
168 )
169
169
170 # Spawn a kernel.
170 # Spawn a kernel.
171 if sys.platform == 'win32':
171 if sys.platform == 'win32':
172 # Popen on Python 2 on Windows cannot handle unicode args or cwd
172 # Popen on Python 2 on Windows cannot handle unicode args or cwd
173 cmd = [ cast_bytes_py2(c, encoding) for c in cmd ]
173 cmd = [ cast_bytes_py2(c, encoding) for c in cmd ]
174 if cwd:
174 if cwd:
175 cwd = cast_bytes_py2(cwd, sys.getfilesystemencoding() or 'ascii')
175 cwd = cast_bytes_py2(cwd, sys.getfilesystemencoding() or 'ascii')
176 kwargs['cwd'] = cwd
176 kwargs['cwd'] = cwd
177
177
178 from IPython.kernel.zmq.parentpoller import ParentPollerWindows
178 from jupyter_client.parentpoller import ParentPollerWindows
179 # Create a Win32 event for interrupting the kernel
179 # Create a Win32 event for interrupting the kernel
180 # and store it in an environment variable.
180 # and store it in an environment variable.
181 interrupt_event = ParentPollerWindows.create_interrupt_event()
181 interrupt_event = ParentPollerWindows.create_interrupt_event()
182 env["JPY_INTERRUPT_EVENT"] = str(interrupt_event)
182 env["JPY_INTERRUPT_EVENT"] = str(interrupt_event)
183 # deprecated old env name:
183 # deprecated old env name:
184 env["IPY_INTERRUPT_EVENT"] = env["JPY_INTERRUPT_EVENT"]
184 env["IPY_INTERRUPT_EVENT"] = env["JPY_INTERRUPT_EVENT"]
185
185
186 try:
186 try:
187 from _winapi import DuplicateHandle, GetCurrentProcess, \
187 from _winapi import DuplicateHandle, GetCurrentProcess, \
188 DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
188 DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
189 except:
189 except:
190 from _subprocess import DuplicateHandle, GetCurrentProcess, \
190 from _subprocess import DuplicateHandle, GetCurrentProcess, \
191 DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
191 DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
192 # Launch the kernel process
192 # Launch the kernel process
193 if independent:
193 if independent:
194 kwargs['creationflags'] = CREATE_NEW_PROCESS_GROUP
194 kwargs['creationflags'] = CREATE_NEW_PROCESS_GROUP
195 else:
195 else:
196 pid = GetCurrentProcess()
196 pid = GetCurrentProcess()
197 handle = DuplicateHandle(pid, pid, pid, 0,
197 handle = DuplicateHandle(pid, pid, pid, 0,
198 True, # Inheritable by new processes.
198 True, # Inheritable by new processes.
199 DUPLICATE_SAME_ACCESS)
199 DUPLICATE_SAME_ACCESS)
200 env['JPY_PARENT_PID'] = str(int(handle))
200 env['JPY_PARENT_PID'] = str(int(handle))
201
201
202 proc = Popen(cmd, **kwargs)
202 proc = Popen(cmd, **kwargs)
203
203
204 # Attach the interrupt event to the Popen objet so it can be used later.
204 # Attach the interrupt event to the Popen objet so it can be used later.
205 proc.win32_interrupt_event = interrupt_event
205 proc.win32_interrupt_event = interrupt_event
206
206
207 else:
207 else:
208 if independent:
208 if independent:
209 kwargs['preexec_fn'] = lambda: os.setsid()
209 kwargs['preexec_fn'] = lambda: os.setsid()
210 else:
210 else:
211 env['JPY_PARENT_PID'] = str(os.getpid())
211 env['JPY_PARENT_PID'] = str(os.getpid())
212
212
213 proc = Popen(cmd, **kwargs)
213 proc = Popen(cmd, **kwargs)
214
214
215 # Clean up pipes created to work around Popen bug.
215 # Clean up pipes created to work around Popen bug.
216 if redirect_in:
216 if redirect_in:
217 if stdin is None:
217 if stdin is None:
218 proc.stdin.close()
218 proc.stdin.close()
219
219
220 return proc
220 return proc
221
221
222 __all__ = [
222 __all__ = [
223 'swallow_argv',
223 'swallow_argv',
224 'make_ipkernel_cmd',
224 'make_ipkernel_cmd',
225 'launch_kernel',
225 'launch_kernel',
226 ]
226 ]
@@ -1,442 +1,442 b''
1 """Base class to manage a running kernel"""
1 """Base class to manage a running kernel"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7
7
8 from contextlib import contextmanager
8 from contextlib import contextmanager
9 import os
9 import os
10 import re
10 import re
11 import signal
11 import signal
12 import sys
12 import sys
13 import time
13 import time
14 import warnings
14 import warnings
15 try:
15 try:
16 from queue import Empty # Py 3
16 from queue import Empty # Py 3
17 except ImportError:
17 except ImportError:
18 from Queue import Empty # Py 2
18 from Queue import Empty # Py 2
19
19
20 import zmq
20 import zmq
21
21
22 from IPython.utils.importstring import import_item
22 from IPython.utils.importstring import import_item
23 from IPython.utils.localinterfaces import is_local_ip, local_ips
23 from IPython.utils.localinterfaces import is_local_ip, local_ips
24 from IPython.utils.path import get_ipython_dir
24 from IPython.utils.path import get_ipython_dir
25 from IPython.utils.traitlets import (
25 from IPython.utils.traitlets import (
26 Any, Instance, Unicode, List, Bool, Type, DottedObjectName
26 Any, Instance, Unicode, List, Bool, Type, DottedObjectName
27 )
27 )
28 from IPython.kernel import (
28 from jupyter_client import (
29 launch_kernel,
29 launch_kernel,
30 kernelspec,
30 kernelspec,
31 )
31 )
32 from .connect import ConnectionFileMixin
32 from .connect import ConnectionFileMixin
33 from .zmq.session import Session
33 from .session import Session
34 from .managerabc import (
34 from .managerabc import (
35 KernelManagerABC
35 KernelManagerABC
36 )
36 )
37
37
38
38
39 class KernelManager(ConnectionFileMixin):
39 class KernelManager(ConnectionFileMixin):
40 """Manages a single kernel in a subprocess on this host.
40 """Manages a single kernel in a subprocess on this host.
41
41
42 This version starts kernels with Popen.
42 This version starts kernels with Popen.
43 """
43 """
44
44
45 # The PyZMQ Context to use for communication with the kernel.
45 # The PyZMQ Context to use for communication with the kernel.
46 context = Instance(zmq.Context)
46 context = Instance(zmq.Context)
47 def _context_default(self):
47 def _context_default(self):
48 return zmq.Context.instance()
48 return zmq.Context.instance()
49
49
50 # the class to create with our `client` method
50 # the class to create with our `client` method
51 client_class = DottedObjectName('IPython.kernel.blocking.BlockingKernelClient')
51 client_class = DottedObjectName('jupyter_client.blocking.BlockingKernelClient')
52 client_factory = Type(allow_none=True)
52 client_factory = Type(allow_none=True)
53 def _client_class_changed(self, name, old, new):
53 def _client_class_changed(self, name, old, new):
54 self.client_factory = import_item(str(new))
54 self.client_factory = import_item(str(new))
55
55
56 # The kernel process with which the KernelManager is communicating.
56 # The kernel process with which the KernelManager is communicating.
57 # generally a Popen instance
57 # generally a Popen instance
58 kernel = Any()
58 kernel = Any()
59
59
60 kernel_spec_manager = Instance(kernelspec.KernelSpecManager)
60 kernel_spec_manager = Instance(kernelspec.KernelSpecManager)
61
61
62 def _kernel_spec_manager_default(self):
62 def _kernel_spec_manager_default(self):
63 return kernelspec.KernelSpecManager(ipython_dir=self.ipython_dir)
63 return kernelspec.KernelSpecManager(ipython_dir=self.ipython_dir)
64
64
65 kernel_name = Unicode(kernelspec.NATIVE_KERNEL_NAME)
65 kernel_name = Unicode(kernelspec.NATIVE_KERNEL_NAME)
66
66
67 kernel_spec = Instance(kernelspec.KernelSpec)
67 kernel_spec = Instance(kernelspec.KernelSpec)
68
68
69 def _kernel_spec_default(self):
69 def _kernel_spec_default(self):
70 return self.kernel_spec_manager.get_kernel_spec(self.kernel_name)
70 return self.kernel_spec_manager.get_kernel_spec(self.kernel_name)
71
71
72 def _kernel_name_changed(self, name, old, new):
72 def _kernel_name_changed(self, name, old, new):
73 if new == 'python':
73 if new == 'python':
74 self.kernel_name = kernelspec.NATIVE_KERNEL_NAME
74 self.kernel_name = kernelspec.NATIVE_KERNEL_NAME
75 # This triggered another run of this function, so we can exit now
75 # This triggered another run of this function, so we can exit now
76 return
76 return
77 self.kernel_spec = self.kernel_spec_manager.get_kernel_spec(new)
77 self.kernel_spec = self.kernel_spec_manager.get_kernel_spec(new)
78 self.ipython_kernel = new in {'python', 'python2', 'python3'}
78 self.ipython_kernel = new in {'python', 'python2', 'python3'}
79
79
80 kernel_cmd = List(Unicode, config=True,
80 kernel_cmd = List(Unicode, config=True,
81 help="""DEPRECATED: Use kernel_name instead.
81 help="""DEPRECATED: Use kernel_name instead.
82
82
83 The Popen Command to launch the kernel.
83 The Popen Command to launch the kernel.
84 Override this if you have a custom kernel.
84 Override this if you have a custom kernel.
85 If kernel_cmd is specified in a configuration file,
85 If kernel_cmd is specified in a configuration file,
86 IPython does not pass any arguments to the kernel,
86 IPython does not pass any arguments to the kernel,
87 because it cannot make any assumptions about the
87 because it cannot make any assumptions about the
88 arguments that the kernel understands. In particular,
88 arguments that the kernel understands. In particular,
89 this means that the kernel does not receive the
89 this means that the kernel does not receive the
90 option --debug if it given on the IPython command line.
90 option --debug if it given on the IPython command line.
91 """
91 """
92 )
92 )
93
93
94 def _kernel_cmd_changed(self, name, old, new):
94 def _kernel_cmd_changed(self, name, old, new):
95 warnings.warn("Setting kernel_cmd is deprecated, use kernel_spec to "
95 warnings.warn("Setting kernel_cmd is deprecated, use kernel_spec to "
96 "start different kernels.")
96 "start different kernels.")
97 self.ipython_kernel = False
97 self.ipython_kernel = False
98
98
99 ipython_kernel = Bool(True)
99 ipython_kernel = Bool(True)
100
100
101 ipython_dir = Unicode()
101 ipython_dir = Unicode()
102 def _ipython_dir_default(self):
102 def _ipython_dir_default(self):
103 return get_ipython_dir()
103 return get_ipython_dir()
104
104
105 # Protected traits
105 # Protected traits
106 _launch_args = Any()
106 _launch_args = Any()
107 _control_socket = Any()
107 _control_socket = Any()
108
108
109 _restarter = Any()
109 _restarter = Any()
110
110
111 autorestart = Bool(False, config=True,
111 autorestart = Bool(False, config=True,
112 help="""Should we autorestart the kernel if it dies."""
112 help="""Should we autorestart the kernel if it dies."""
113 )
113 )
114
114
115 def __del__(self):
115 def __del__(self):
116 self._close_control_socket()
116 self._close_control_socket()
117 self.cleanup_connection_file()
117 self.cleanup_connection_file()
118
118
119 #--------------------------------------------------------------------------
119 #--------------------------------------------------------------------------
120 # Kernel restarter
120 # Kernel restarter
121 #--------------------------------------------------------------------------
121 #--------------------------------------------------------------------------
122
122
123 def start_restarter(self):
123 def start_restarter(self):
124 pass
124 pass
125
125
126 def stop_restarter(self):
126 def stop_restarter(self):
127 pass
127 pass
128
128
129 def add_restart_callback(self, callback, event='restart'):
129 def add_restart_callback(self, callback, event='restart'):
130 """register a callback to be called when a kernel is restarted"""
130 """register a callback to be called when a kernel is restarted"""
131 if self._restarter is None:
131 if self._restarter is None:
132 return
132 return
133 self._restarter.add_callback(callback, event)
133 self._restarter.add_callback(callback, event)
134
134
135 def remove_restart_callback(self, callback, event='restart'):
135 def remove_restart_callback(self, callback, event='restart'):
136 """unregister a callback to be called when a kernel is restarted"""
136 """unregister a callback to be called when a kernel is restarted"""
137 if self._restarter is None:
137 if self._restarter is None:
138 return
138 return
139 self._restarter.remove_callback(callback, event)
139 self._restarter.remove_callback(callback, event)
140
140
141 #--------------------------------------------------------------------------
141 #--------------------------------------------------------------------------
142 # create a Client connected to our Kernel
142 # create a Client connected to our Kernel
143 #--------------------------------------------------------------------------
143 #--------------------------------------------------------------------------
144
144
145 def client(self, **kwargs):
145 def client(self, **kwargs):
146 """Create a client configured to connect to our kernel"""
146 """Create a client configured to connect to our kernel"""
147 if self.client_factory is None:
147 if self.client_factory is None:
148 self.client_factory = import_item(self.client_class)
148 self.client_factory = import_item(self.client_class)
149
149
150 kw = {}
150 kw = {}
151 kw.update(self.get_connection_info())
151 kw.update(self.get_connection_info())
152 kw.update(dict(
152 kw.update(dict(
153 connection_file=self.connection_file,
153 connection_file=self.connection_file,
154 session=self.session,
154 session=self.session,
155 parent=self,
155 parent=self,
156 ))
156 ))
157
157
158 # add kwargs last, for manual overrides
158 # add kwargs last, for manual overrides
159 kw.update(kwargs)
159 kw.update(kwargs)
160 return self.client_factory(**kw)
160 return self.client_factory(**kw)
161
161
162 #--------------------------------------------------------------------------
162 #--------------------------------------------------------------------------
163 # Kernel management
163 # Kernel management
164 #--------------------------------------------------------------------------
164 #--------------------------------------------------------------------------
165
165
166 def format_kernel_cmd(self, extra_arguments=None):
166 def format_kernel_cmd(self, extra_arguments=None):
167 """replace templated args (e.g. {connection_file})"""
167 """replace templated args (e.g. {connection_file})"""
168 extra_arguments = extra_arguments or []
168 extra_arguments = extra_arguments or []
169 if self.kernel_cmd:
169 if self.kernel_cmd:
170 cmd = self.kernel_cmd + extra_arguments
170 cmd = self.kernel_cmd + extra_arguments
171 else:
171 else:
172 cmd = self.kernel_spec.argv + extra_arguments
172 cmd = self.kernel_spec.argv + extra_arguments
173
173
174 ns = dict(connection_file=self.connection_file)
174 ns = dict(connection_file=self.connection_file)
175 ns.update(self._launch_args)
175 ns.update(self._launch_args)
176
176
177 pat = re.compile(r'\{([A-Za-z0-9_]+)\}')
177 pat = re.compile(r'\{([A-Za-z0-9_]+)\}')
178 def from_ns(match):
178 def from_ns(match):
179 """Get the key out of ns if it's there, otherwise no change."""
179 """Get the key out of ns if it's there, otherwise no change."""
180 return ns.get(match.group(1), match.group())
180 return ns.get(match.group(1), match.group())
181
181
182 return [ pat.sub(from_ns, arg) for arg in cmd ]
182 return [ pat.sub(from_ns, arg) for arg in cmd ]
183
183
184 def _launch_kernel(self, kernel_cmd, **kw):
184 def _launch_kernel(self, kernel_cmd, **kw):
185 """actually launch the kernel
185 """actually launch the kernel
186
186
187 override in a subclass to launch kernel subprocesses differently
187 override in a subclass to launch kernel subprocesses differently
188 """
188 """
189 return launch_kernel(kernel_cmd, **kw)
189 return launch_kernel(kernel_cmd, **kw)
190
190
191 # Control socket used for polite kernel shutdown
191 # Control socket used for polite kernel shutdown
192
192
193 def _connect_control_socket(self):
193 def _connect_control_socket(self):
194 if self._control_socket is None:
194 if self._control_socket is None:
195 self._control_socket = self.connect_control()
195 self._control_socket = self.connect_control()
196 self._control_socket.linger = 100
196 self._control_socket.linger = 100
197
197
198 def _close_control_socket(self):
198 def _close_control_socket(self):
199 if self._control_socket is None:
199 if self._control_socket is None:
200 return
200 return
201 self._control_socket.close()
201 self._control_socket.close()
202 self._control_socket = None
202 self._control_socket = None
203
203
204 def start_kernel(self, **kw):
204 def start_kernel(self, **kw):
205 """Starts a kernel on this host in a separate process.
205 """Starts a kernel on this host in a separate process.
206
206
207 If random ports (port=0) are being used, this method must be called
207 If random ports (port=0) are being used, this method must be called
208 before the channels are created.
208 before the channels are created.
209
209
210 Parameters
210 Parameters
211 ----------
211 ----------
212 **kw : optional
212 **kw : optional
213 keyword arguments that are passed down to build the kernel_cmd
213 keyword arguments that are passed down to build the kernel_cmd
214 and launching the kernel (e.g. Popen kwargs).
214 and launching the kernel (e.g. Popen kwargs).
215 """
215 """
216 if self.transport == 'tcp' and not is_local_ip(self.ip):
216 if self.transport == 'tcp' and not is_local_ip(self.ip):
217 raise RuntimeError("Can only launch a kernel on a local interface. "
217 raise RuntimeError("Can only launch a kernel on a local interface. "
218 "Make sure that the '*_address' attributes are "
218 "Make sure that the '*_address' attributes are "
219 "configured properly. "
219 "configured properly. "
220 "Currently valid addresses are: %s" % local_ips()
220 "Currently valid addresses are: %s" % local_ips()
221 )
221 )
222
222
223 # write connection file / get default ports
223 # write connection file / get default ports
224 self.write_connection_file()
224 self.write_connection_file()
225
225
226 # save kwargs for use in restart
226 # save kwargs for use in restart
227 self._launch_args = kw.copy()
227 self._launch_args = kw.copy()
228 # build the Popen cmd
228 # build the Popen cmd
229 extra_arguments = kw.pop('extra_arguments', [])
229 extra_arguments = kw.pop('extra_arguments', [])
230 kernel_cmd = self.format_kernel_cmd(extra_arguments=extra_arguments)
230 kernel_cmd = self.format_kernel_cmd(extra_arguments=extra_arguments)
231 if self.kernel_cmd:
231 if self.kernel_cmd:
232 # If kernel_cmd has been set manually, don't refer to a kernel spec
232 # If kernel_cmd has been set manually, don't refer to a kernel spec
233 env = os.environ
233 env = os.environ
234 else:
234 else:
235 # Environment variables from kernel spec are added to os.environ
235 # Environment variables from kernel spec are added to os.environ
236 env = os.environ.copy()
236 env = os.environ.copy()
237 env.update(self.kernel_spec.env or {})
237 env.update(self.kernel_spec.env or {})
238 # launch the kernel subprocess
238 # launch the kernel subprocess
239 self.kernel = self._launch_kernel(kernel_cmd, env=env,
239 self.kernel = self._launch_kernel(kernel_cmd, env=env,
240 **kw)
240 **kw)
241 self.start_restarter()
241 self.start_restarter()
242 self._connect_control_socket()
242 self._connect_control_socket()
243
243
244 def request_shutdown(self, restart=False):
244 def request_shutdown(self, restart=False):
245 """Send a shutdown request via control channel
245 """Send a shutdown request via control channel
246
246
247 On Windows, this just kills kernels instead, because the shutdown
247 On Windows, this just kills kernels instead, because the shutdown
248 messages don't work.
248 messages don't work.
249 """
249 """
250 content = dict(restart=restart)
250 content = dict(restart=restart)
251 msg = self.session.msg("shutdown_request", content=content)
251 msg = self.session.msg("shutdown_request", content=content)
252 self.session.send(self._control_socket, msg)
252 self.session.send(self._control_socket, msg)
253
253
254 def finish_shutdown(self, waittime=1, pollinterval=0.1):
254 def finish_shutdown(self, waittime=1, pollinterval=0.1):
255 """Wait for kernel shutdown, then kill process if it doesn't shutdown.
255 """Wait for kernel shutdown, then kill process if it doesn't shutdown.
256
256
257 This does not send shutdown requests - use :meth:`request_shutdown`
257 This does not send shutdown requests - use :meth:`request_shutdown`
258 first.
258 first.
259 """
259 """
260 for i in range(int(waittime/pollinterval)):
260 for i in range(int(waittime/pollinterval)):
261 if self.is_alive():
261 if self.is_alive():
262 time.sleep(pollinterval)
262 time.sleep(pollinterval)
263 else:
263 else:
264 break
264 break
265 else:
265 else:
266 # OK, we've waited long enough.
266 # OK, we've waited long enough.
267 if self.has_kernel:
267 if self.has_kernel:
268 self._kill_kernel()
268 self._kill_kernel()
269
269
270 def cleanup(self, connection_file=True):
270 def cleanup(self, connection_file=True):
271 """Clean up resources when the kernel is shut down"""
271 """Clean up resources when the kernel is shut down"""
272 if connection_file:
272 if connection_file:
273 self.cleanup_connection_file()
273 self.cleanup_connection_file()
274
274
275 self.cleanup_ipc_files()
275 self.cleanup_ipc_files()
276 self._close_control_socket()
276 self._close_control_socket()
277
277
278 def shutdown_kernel(self, now=False, restart=False):
278 def shutdown_kernel(self, now=False, restart=False):
279 """Attempts to the stop the kernel process cleanly.
279 """Attempts to the stop the kernel process cleanly.
280
280
281 This attempts to shutdown the kernels cleanly by:
281 This attempts to shutdown the kernels cleanly by:
282
282
283 1. Sending it a shutdown message over the shell channel.
283 1. Sending it a shutdown message over the shell channel.
284 2. If that fails, the kernel is shutdown forcibly by sending it
284 2. If that fails, the kernel is shutdown forcibly by sending it
285 a signal.
285 a signal.
286
286
287 Parameters
287 Parameters
288 ----------
288 ----------
289 now : bool
289 now : bool
290 Should the kernel be forcible killed *now*. This skips the
290 Should the kernel be forcible killed *now*. This skips the
291 first, nice shutdown attempt.
291 first, nice shutdown attempt.
292 restart: bool
292 restart: bool
293 Will this kernel be restarted after it is shutdown. When this
293 Will this kernel be restarted after it is shutdown. When this
294 is True, connection files will not be cleaned up.
294 is True, connection files will not be cleaned up.
295 """
295 """
296 # Stop monitoring for restarting while we shutdown.
296 # Stop monitoring for restarting while we shutdown.
297 self.stop_restarter()
297 self.stop_restarter()
298
298
299 if now:
299 if now:
300 self._kill_kernel()
300 self._kill_kernel()
301 else:
301 else:
302 self.request_shutdown(restart=restart)
302 self.request_shutdown(restart=restart)
303 # Don't send any additional kernel kill messages immediately, to give
303 # Don't send any additional kernel kill messages immediately, to give
304 # the kernel a chance to properly execute shutdown actions. Wait for at
304 # the kernel a chance to properly execute shutdown actions. Wait for at
305 # most 1s, checking every 0.1s.
305 # most 1s, checking every 0.1s.
306 self.finish_shutdown()
306 self.finish_shutdown()
307
307
308 self.cleanup(connection_file=not restart)
308 self.cleanup(connection_file=not restart)
309
309
310 def restart_kernel(self, now=False, **kw):
310 def restart_kernel(self, now=False, **kw):
311 """Restarts a kernel with the arguments that were used to launch it.
311 """Restarts a kernel with the arguments that were used to launch it.
312
312
313 If the old kernel was launched with random ports, the same ports will be
313 If the old kernel was launched with random ports, the same ports will be
314 used for the new kernel. The same connection file is used again.
314 used for the new kernel. The same connection file is used again.
315
315
316 Parameters
316 Parameters
317 ----------
317 ----------
318 now : bool, optional
318 now : bool, optional
319 If True, the kernel is forcefully restarted *immediately*, without
319 If True, the kernel is forcefully restarted *immediately*, without
320 having a chance to do any cleanup action. Otherwise the kernel is
320 having a chance to do any cleanup action. Otherwise the kernel is
321 given 1s to clean up before a forceful restart is issued.
321 given 1s to clean up before a forceful restart is issued.
322
322
323 In all cases the kernel is restarted, the only difference is whether
323 In all cases the kernel is restarted, the only difference is whether
324 it is given a chance to perform a clean shutdown or not.
324 it is given a chance to perform a clean shutdown or not.
325
325
326 **kw : optional
326 **kw : optional
327 Any options specified here will overwrite those used to launch the
327 Any options specified here will overwrite those used to launch the
328 kernel.
328 kernel.
329 """
329 """
330 if self._launch_args is None:
330 if self._launch_args is None:
331 raise RuntimeError("Cannot restart the kernel. "
331 raise RuntimeError("Cannot restart the kernel. "
332 "No previous call to 'start_kernel'.")
332 "No previous call to 'start_kernel'.")
333 else:
333 else:
334 # Stop currently running kernel.
334 # Stop currently running kernel.
335 self.shutdown_kernel(now=now, restart=True)
335 self.shutdown_kernel(now=now, restart=True)
336
336
337 # Start new kernel.
337 # Start new kernel.
338 self._launch_args.update(kw)
338 self._launch_args.update(kw)
339 self.start_kernel(**self._launch_args)
339 self.start_kernel(**self._launch_args)
340
340
341 @property
341 @property
342 def has_kernel(self):
342 def has_kernel(self):
343 """Has a kernel been started that we are managing."""
343 """Has a kernel been started that we are managing."""
344 return self.kernel is not None
344 return self.kernel is not None
345
345
346 def _kill_kernel(self):
346 def _kill_kernel(self):
347 """Kill the running kernel.
347 """Kill the running kernel.
348
348
349 This is a private method, callers should use shutdown_kernel(now=True).
349 This is a private method, callers should use shutdown_kernel(now=True).
350 """
350 """
351 if self.has_kernel:
351 if self.has_kernel:
352
352
353 # Signal the kernel to terminate (sends SIGKILL on Unix and calls
353 # Signal the kernel to terminate (sends SIGKILL on Unix and calls
354 # TerminateProcess() on Win32).
354 # TerminateProcess() on Win32).
355 try:
355 try:
356 self.kernel.kill()
356 self.kernel.kill()
357 except OSError as e:
357 except OSError as e:
358 # In Windows, we will get an Access Denied error if the process
358 # In Windows, we will get an Access Denied error if the process
359 # has already terminated. Ignore it.
359 # has already terminated. Ignore it.
360 if sys.platform == 'win32':
360 if sys.platform == 'win32':
361 if e.winerror != 5:
361 if e.winerror != 5:
362 raise
362 raise
363 # On Unix, we may get an ESRCH error if the process has already
363 # On Unix, we may get an ESRCH error if the process has already
364 # terminated. Ignore it.
364 # terminated. Ignore it.
365 else:
365 else:
366 from errno import ESRCH
366 from errno import ESRCH
367 if e.errno != ESRCH:
367 if e.errno != ESRCH:
368 raise
368 raise
369
369
370 # Block until the kernel terminates.
370 # Block until the kernel terminates.
371 self.kernel.wait()
371 self.kernel.wait()
372 self.kernel = None
372 self.kernel = None
373 else:
373 else:
374 raise RuntimeError("Cannot kill kernel. No kernel is running!")
374 raise RuntimeError("Cannot kill kernel. No kernel is running!")
375
375
376 def interrupt_kernel(self):
376 def interrupt_kernel(self):
377 """Interrupts the kernel by sending it a signal.
377 """Interrupts the kernel by sending it a signal.
378
378
379 Unlike ``signal_kernel``, this operation is well supported on all
379 Unlike ``signal_kernel``, this operation is well supported on all
380 platforms.
380 platforms.
381 """
381 """
382 if self.has_kernel:
382 if self.has_kernel:
383 if sys.platform == 'win32':
383 if sys.platform == 'win32':
384 from .zmq.parentpoller import ParentPollerWindows as Poller
384 from .parentpoller import ParentPollerWindows as Poller
385 Poller.send_interrupt(self.kernel.win32_interrupt_event)
385 Poller.send_interrupt(self.kernel.win32_interrupt_event)
386 else:
386 else:
387 self.kernel.send_signal(signal.SIGINT)
387 self.kernel.send_signal(signal.SIGINT)
388 else:
388 else:
389 raise RuntimeError("Cannot interrupt kernel. No kernel is running!")
389 raise RuntimeError("Cannot interrupt kernel. No kernel is running!")
390
390
391 def signal_kernel(self, signum):
391 def signal_kernel(self, signum):
392 """Sends a signal to the kernel.
392 """Sends a signal to the kernel.
393
393
394 Note that since only SIGTERM is supported on Windows, this function is
394 Note that since only SIGTERM is supported on Windows, this function is
395 only useful on Unix systems.
395 only useful on Unix systems.
396 """
396 """
397 if self.has_kernel:
397 if self.has_kernel:
398 self.kernel.send_signal(signum)
398 self.kernel.send_signal(signum)
399 else:
399 else:
400 raise RuntimeError("Cannot signal kernel. No kernel is running!")
400 raise RuntimeError("Cannot signal kernel. No kernel is running!")
401
401
402 def is_alive(self):
402 def is_alive(self):
403 """Is the kernel process still running?"""
403 """Is the kernel process still running?"""
404 if self.has_kernel:
404 if self.has_kernel:
405 if self.kernel.poll() is None:
405 if self.kernel.poll() is None:
406 return True
406 return True
407 else:
407 else:
408 return False
408 return False
409 else:
409 else:
410 # we don't have a kernel
410 # we don't have a kernel
411 return False
411 return False
412
412
413
413
414 KernelManagerABC.register(KernelManager)
414 KernelManagerABC.register(KernelManager)
415
415
416
416
417 def start_new_kernel(startup_timeout=60, kernel_name='python', **kwargs):
417 def start_new_kernel(startup_timeout=60, kernel_name='python', **kwargs):
418 """Start a new kernel, and return its Manager and Client"""
418 """Start a new kernel, and return its Manager and Client"""
419 km = KernelManager(kernel_name=kernel_name)
419 km = KernelManager(kernel_name=kernel_name)
420 km.start_kernel(**kwargs)
420 km.start_kernel(**kwargs)
421 kc = km.client()
421 kc = km.client()
422 kc.start_channels()
422 kc.start_channels()
423 kc.wait_for_ready()
423 kc.wait_for_ready()
424
424
425 return km, kc
425 return km, kc
426
426
427 @contextmanager
427 @contextmanager
428 def run_kernel(**kwargs):
428 def run_kernel(**kwargs):
429 """Context manager to create a kernel in a subprocess.
429 """Context manager to create a kernel in a subprocess.
430
430
431 The kernel is shut down when the context exits.
431 The kernel is shut down when the context exits.
432
432
433 Returns
433 Returns
434 -------
434 -------
435 kernel_client: connected KernelClient instance
435 kernel_client: connected KernelClient instance
436 """
436 """
437 km, kc = start_new_kernel(**kwargs)
437 km, kc = start_new_kernel(**kwargs)
438 try:
438 try:
439 yield kc
439 yield kc
440 finally:
440 finally:
441 kc.stop_channels()
441 kc.stop_channels()
442 km.shutdown_kernel(now=True)
442 km.shutdown_kernel(now=True)
@@ -1,53 +1,53 b''
1 """Abstract base class for kernel managers."""
1 """Abstract base class for kernel managers."""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import abc
6 import abc
7
7
8 from IPython.utils.py3compat import with_metaclass
8 from IPython.utils.py3compat import with_metaclass
9
9
10
10
11 class KernelManagerABC(with_metaclass(abc.ABCMeta, object)):
11 class KernelManagerABC(with_metaclass(abc.ABCMeta, object)):
12 """KernelManager ABC.
12 """KernelManager ABC.
13
13
14 The docstrings for this class can be found in the base implementation:
14 The docstrings for this class can be found in the base implementation:
15
15
16 `IPython.kernel.kernelmanager.KernelManager`
16 `jupyter_client.kernelmanager.KernelManager`
17 """
17 """
18
18
19 @abc.abstractproperty
19 @abc.abstractproperty
20 def kernel(self):
20 def kernel(self):
21 pass
21 pass
22
22
23 #--------------------------------------------------------------------------
23 #--------------------------------------------------------------------------
24 # Kernel management
24 # Kernel management
25 #--------------------------------------------------------------------------
25 #--------------------------------------------------------------------------
26
26
27 @abc.abstractmethod
27 @abc.abstractmethod
28 def start_kernel(self, **kw):
28 def start_kernel(self, **kw):
29 pass
29 pass
30
30
31 @abc.abstractmethod
31 @abc.abstractmethod
32 def shutdown_kernel(self, now=False, restart=False):
32 def shutdown_kernel(self, now=False, restart=False):
33 pass
33 pass
34
34
35 @abc.abstractmethod
35 @abc.abstractmethod
36 def restart_kernel(self, now=False, **kw):
36 def restart_kernel(self, now=False, **kw):
37 pass
37 pass
38
38
39 @abc.abstractproperty
39 @abc.abstractproperty
40 def has_kernel(self):
40 def has_kernel(self):
41 pass
41 pass
42
42
43 @abc.abstractmethod
43 @abc.abstractmethod
44 def interrupt_kernel(self):
44 def interrupt_kernel(self):
45 pass
45 pass
46
46
47 @abc.abstractmethod
47 @abc.abstractmethod
48 def signal_kernel(self, signum):
48 def signal_kernel(self, signum):
49 pass
49 pass
50
50
51 @abc.abstractmethod
51 @abc.abstractmethod
52 def is_alive(self):
52 def is_alive(self):
53 pass
53 pass
@@ -1,319 +1,319 b''
1 """A kernel manager for multiple kernels"""
1 """A kernel manager for multiple kernels"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7
7
8 import os
8 import os
9 import uuid
9 import uuid
10
10
11 import zmq
11 import zmq
12
12
13 from IPython.config.configurable import LoggingConfigurable
13 from IPython.config.configurable import LoggingConfigurable
14 from IPython.utils.importstring import import_item
14 from IPython.utils.importstring import import_item
15 from IPython.utils.traitlets import (
15 from IPython.utils.traitlets import (
16 Instance, Dict, List, Unicode, Any, DottedObjectName
16 Instance, Dict, List, Unicode, Any, DottedObjectName
17 )
17 )
18 from IPython.utils.py3compat import unicode_type
18 from IPython.utils.py3compat import unicode_type
19
19
20 from .kernelspec import NATIVE_KERNEL_NAME
20 from .kernelspec import NATIVE_KERNEL_NAME
21
21
22 class DuplicateKernelError(Exception):
22 class DuplicateKernelError(Exception):
23 pass
23 pass
24
24
25
25
26 def kernel_method(f):
26 def kernel_method(f):
27 """decorator for proxying MKM.method(kernel_id) to individual KMs by ID"""
27 """decorator for proxying MKM.method(kernel_id) to individual KMs by ID"""
28 def wrapped(self, kernel_id, *args, **kwargs):
28 def wrapped(self, kernel_id, *args, **kwargs):
29 # get the kernel
29 # get the kernel
30 km = self.get_kernel(kernel_id)
30 km = self.get_kernel(kernel_id)
31 method = getattr(km, f.__name__)
31 method = getattr(km, f.__name__)
32 # call the kernel's method
32 # call the kernel's method
33 r = method(*args, **kwargs)
33 r = method(*args, **kwargs)
34 # last thing, call anything defined in the actual class method
34 # last thing, call anything defined in the actual class method
35 # such as logging messages
35 # such as logging messages
36 f(self, kernel_id, *args, **kwargs)
36 f(self, kernel_id, *args, **kwargs)
37 # return the method result
37 # return the method result
38 return r
38 return r
39 return wrapped
39 return wrapped
40
40
41
41
42 class MultiKernelManager(LoggingConfigurable):
42 class MultiKernelManager(LoggingConfigurable):
43 """A class for managing multiple kernels."""
43 """A class for managing multiple kernels."""
44
44
45 ipython_kernel_argv = List(Unicode)
45 ipython_kernel_argv = List(Unicode)
46
46
47 default_kernel_name = Unicode(NATIVE_KERNEL_NAME, config=True,
47 default_kernel_name = Unicode(NATIVE_KERNEL_NAME, config=True,
48 help="The name of the default kernel to start"
48 help="The name of the default kernel to start"
49 )
49 )
50
50
51 kernel_manager_class = DottedObjectName(
51 kernel_manager_class = DottedObjectName(
52 "IPython.kernel.ioloop.IOLoopKernelManager", config=True,
52 "jupyter_client.ioloop.IOLoopKernelManager", config=True,
53 help="""The kernel manager class. This is configurable to allow
53 help="""The kernel manager class. This is configurable to allow
54 subclassing of the KernelManager for customized behavior.
54 subclassing of the KernelManager for customized behavior.
55 """
55 """
56 )
56 )
57 def _kernel_manager_class_changed(self, name, old, new):
57 def _kernel_manager_class_changed(self, name, old, new):
58 self.kernel_manager_factory = import_item(new)
58 self.kernel_manager_factory = import_item(new)
59
59
60 kernel_manager_factory = Any(help="this is kernel_manager_class after import")
60 kernel_manager_factory = Any(help="this is kernel_manager_class after import")
61 def _kernel_manager_factory_default(self):
61 def _kernel_manager_factory_default(self):
62 return import_item(self.kernel_manager_class)
62 return import_item(self.kernel_manager_class)
63
63
64 context = Instance('zmq.Context')
64 context = Instance('zmq.Context')
65 def _context_default(self):
65 def _context_default(self):
66 return zmq.Context.instance()
66 return zmq.Context.instance()
67
67
68 connection_dir = Unicode('')
68 connection_dir = Unicode('')
69
69
70 _kernels = Dict()
70 _kernels = Dict()
71
71
72 def list_kernel_ids(self):
72 def list_kernel_ids(self):
73 """Return a list of the kernel ids of the active kernels."""
73 """Return a list of the kernel ids of the active kernels."""
74 # Create a copy so we can iterate over kernels in operations
74 # Create a copy so we can iterate over kernels in operations
75 # that delete keys.
75 # that delete keys.
76 return list(self._kernels.keys())
76 return list(self._kernels.keys())
77
77
78 def __len__(self):
78 def __len__(self):
79 """Return the number of running kernels."""
79 """Return the number of running kernels."""
80 return len(self.list_kernel_ids())
80 return len(self.list_kernel_ids())
81
81
82 def __contains__(self, kernel_id):
82 def __contains__(self, kernel_id):
83 return kernel_id in self._kernels
83 return kernel_id in self._kernels
84
84
85 def start_kernel(self, kernel_name=None, **kwargs):
85 def start_kernel(self, kernel_name=None, **kwargs):
86 """Start a new kernel.
86 """Start a new kernel.
87
87
88 The caller can pick a kernel_id by passing one in as a keyword arg,
88 The caller can pick a kernel_id by passing one in as a keyword arg,
89 otherwise one will be picked using a uuid.
89 otherwise one will be picked using a uuid.
90
90
91 To silence the kernel's stdout/stderr, call this using::
91 To silence the kernel's stdout/stderr, call this using::
92
92
93 km.start_kernel(stdout=PIPE, stderr=PIPE)
93 km.start_kernel(stdout=PIPE, stderr=PIPE)
94
94
95 """
95 """
96 kernel_id = kwargs.pop('kernel_id', unicode_type(uuid.uuid4()))
96 kernel_id = kwargs.pop('kernel_id', unicode_type(uuid.uuid4()))
97 if kernel_id in self:
97 if kernel_id in self:
98 raise DuplicateKernelError('Kernel already exists: %s' % kernel_id)
98 raise DuplicateKernelError('Kernel already exists: %s' % kernel_id)
99
99
100 if kernel_name is None:
100 if kernel_name is None:
101 kernel_name = self.default_kernel_name
101 kernel_name = self.default_kernel_name
102 # kernel_manager_factory is the constructor for the KernelManager
102 # kernel_manager_factory is the constructor for the KernelManager
103 # subclass we are using. It can be configured as any Configurable,
103 # subclass we are using. It can be configured as any Configurable,
104 # including things like its transport and ip.
104 # including things like its transport and ip.
105 km = self.kernel_manager_factory(connection_file=os.path.join(
105 km = self.kernel_manager_factory(connection_file=os.path.join(
106 self.connection_dir, "kernel-%s.json" % kernel_id),
106 self.connection_dir, "kernel-%s.json" % kernel_id),
107 parent=self, autorestart=True, log=self.log, kernel_name=kernel_name,
107 parent=self, autorestart=True, log=self.log, kernel_name=kernel_name,
108 )
108 )
109 # FIXME: remove special treatment of IPython kernels
109 # FIXME: remove special treatment of IPython kernels
110 if km.ipython_kernel:
110 if km.ipython_kernel:
111 kwargs.setdefault('extra_arguments', self.ipython_kernel_argv)
111 kwargs.setdefault('extra_arguments', self.ipython_kernel_argv)
112 km.start_kernel(**kwargs)
112 km.start_kernel(**kwargs)
113 self._kernels[kernel_id] = km
113 self._kernels[kernel_id] = km
114 return kernel_id
114 return kernel_id
115
115
116 @kernel_method
116 @kernel_method
117 def shutdown_kernel(self, kernel_id, now=False, restart=False):
117 def shutdown_kernel(self, kernel_id, now=False, restart=False):
118 """Shutdown a kernel by its kernel uuid.
118 """Shutdown a kernel by its kernel uuid.
119
119
120 Parameters
120 Parameters
121 ==========
121 ==========
122 kernel_id : uuid
122 kernel_id : uuid
123 The id of the kernel to shutdown.
123 The id of the kernel to shutdown.
124 now : bool
124 now : bool
125 Should the kernel be shutdown forcibly using a signal.
125 Should the kernel be shutdown forcibly using a signal.
126 restart : bool
126 restart : bool
127 Will the kernel be restarted?
127 Will the kernel be restarted?
128 """
128 """
129 self.log.info("Kernel shutdown: %s" % kernel_id)
129 self.log.info("Kernel shutdown: %s" % kernel_id)
130 self.remove_kernel(kernel_id)
130 self.remove_kernel(kernel_id)
131
131
132 @kernel_method
132 @kernel_method
133 def request_shutdown(self, kernel_id, restart=False):
133 def request_shutdown(self, kernel_id, restart=False):
134 """Ask a kernel to shut down by its kernel uuid"""
134 """Ask a kernel to shut down by its kernel uuid"""
135
135
136 @kernel_method
136 @kernel_method
137 def finish_shutdown(self, kernel_id, waittime=1, pollinterval=0.1):
137 def finish_shutdown(self, kernel_id, waittime=1, pollinterval=0.1):
138 """Wait for a kernel to finish shutting down, and kill it if it doesn't
138 """Wait for a kernel to finish shutting down, and kill it if it doesn't
139 """
139 """
140 self.log.info("Kernel shutdown: %s" % kernel_id)
140 self.log.info("Kernel shutdown: %s" % kernel_id)
141
141
142 @kernel_method
142 @kernel_method
143 def cleanup(self, kernel_id, connection_file=True):
143 def cleanup(self, kernel_id, connection_file=True):
144 """Clean up a kernel's resources"""
144 """Clean up a kernel's resources"""
145
145
146 def remove_kernel(self, kernel_id):
146 def remove_kernel(self, kernel_id):
147 """remove a kernel from our mapping.
147 """remove a kernel from our mapping.
148
148
149 Mainly so that a kernel can be removed if it is already dead,
149 Mainly so that a kernel can be removed if it is already dead,
150 without having to call shutdown_kernel.
150 without having to call shutdown_kernel.
151
151
152 The kernel object is returned.
152 The kernel object is returned.
153 """
153 """
154 return self._kernels.pop(kernel_id)
154 return self._kernels.pop(kernel_id)
155
155
156 def shutdown_all(self, now=False):
156 def shutdown_all(self, now=False):
157 """Shutdown all kernels."""
157 """Shutdown all kernels."""
158 kids = self.list_kernel_ids()
158 kids = self.list_kernel_ids()
159 for kid in kids:
159 for kid in kids:
160 self.request_shutdown(kid)
160 self.request_shutdown(kid)
161 for kid in kids:
161 for kid in kids:
162 self.finish_shutdown(kid)
162 self.finish_shutdown(kid)
163 self.cleanup(kid)
163 self.cleanup(kid)
164 self.remove_kernel(kid)
164 self.remove_kernel(kid)
165
165
166 @kernel_method
166 @kernel_method
167 def interrupt_kernel(self, kernel_id):
167 def interrupt_kernel(self, kernel_id):
168 """Interrupt (SIGINT) the kernel by its uuid.
168 """Interrupt (SIGINT) the kernel by its uuid.
169
169
170 Parameters
170 Parameters
171 ==========
171 ==========
172 kernel_id : uuid
172 kernel_id : uuid
173 The id of the kernel to interrupt.
173 The id of the kernel to interrupt.
174 """
174 """
175 self.log.info("Kernel interrupted: %s" % kernel_id)
175 self.log.info("Kernel interrupted: %s" % kernel_id)
176
176
177 @kernel_method
177 @kernel_method
178 def signal_kernel(self, kernel_id, signum):
178 def signal_kernel(self, kernel_id, signum):
179 """Sends a signal to the kernel by its uuid.
179 """Sends a signal to the kernel by its uuid.
180
180
181 Note that since only SIGTERM is supported on Windows, this function
181 Note that since only SIGTERM is supported on Windows, this function
182 is only useful on Unix systems.
182 is only useful on Unix systems.
183
183
184 Parameters
184 Parameters
185 ==========
185 ==========
186 kernel_id : uuid
186 kernel_id : uuid
187 The id of the kernel to signal.
187 The id of the kernel to signal.
188 """
188 """
189 self.log.info("Signaled Kernel %s with %s" % (kernel_id, signum))
189 self.log.info("Signaled Kernel %s with %s" % (kernel_id, signum))
190
190
191 @kernel_method
191 @kernel_method
192 def restart_kernel(self, kernel_id, now=False):
192 def restart_kernel(self, kernel_id, now=False):
193 """Restart a kernel by its uuid, keeping the same ports.
193 """Restart a kernel by its uuid, keeping the same ports.
194
194
195 Parameters
195 Parameters
196 ==========
196 ==========
197 kernel_id : uuid
197 kernel_id : uuid
198 The id of the kernel to interrupt.
198 The id of the kernel to interrupt.
199 """
199 """
200 self.log.info("Kernel restarted: %s" % kernel_id)
200 self.log.info("Kernel restarted: %s" % kernel_id)
201
201
202 @kernel_method
202 @kernel_method
203 def is_alive(self, kernel_id):
203 def is_alive(self, kernel_id):
204 """Is the kernel alive.
204 """Is the kernel alive.
205
205
206 This calls KernelManager.is_alive() which calls Popen.poll on the
206 This calls KernelManager.is_alive() which calls Popen.poll on the
207 actual kernel subprocess.
207 actual kernel subprocess.
208
208
209 Parameters
209 Parameters
210 ==========
210 ==========
211 kernel_id : uuid
211 kernel_id : uuid
212 The id of the kernel.
212 The id of the kernel.
213 """
213 """
214
214
215 def _check_kernel_id(self, kernel_id):
215 def _check_kernel_id(self, kernel_id):
216 """check that a kernel id is valid"""
216 """check that a kernel id is valid"""
217 if kernel_id not in self:
217 if kernel_id not in self:
218 raise KeyError("Kernel with id not found: %s" % kernel_id)
218 raise KeyError("Kernel with id not found: %s" % kernel_id)
219
219
220 def get_kernel(self, kernel_id):
220 def get_kernel(self, kernel_id):
221 """Get the single KernelManager object for a kernel by its uuid.
221 """Get the single KernelManager object for a kernel by its uuid.
222
222
223 Parameters
223 Parameters
224 ==========
224 ==========
225 kernel_id : uuid
225 kernel_id : uuid
226 The id of the kernel.
226 The id of the kernel.
227 """
227 """
228 self._check_kernel_id(kernel_id)
228 self._check_kernel_id(kernel_id)
229 return self._kernels[kernel_id]
229 return self._kernels[kernel_id]
230
230
231 @kernel_method
231 @kernel_method
232 def add_restart_callback(self, kernel_id, callback, event='restart'):
232 def add_restart_callback(self, kernel_id, callback, event='restart'):
233 """add a callback for the KernelRestarter"""
233 """add a callback for the KernelRestarter"""
234
234
235 @kernel_method
235 @kernel_method
236 def remove_restart_callback(self, kernel_id, callback, event='restart'):
236 def remove_restart_callback(self, kernel_id, callback, event='restart'):
237 """remove a callback for the KernelRestarter"""
237 """remove a callback for the KernelRestarter"""
238
238
239 @kernel_method
239 @kernel_method
240 def get_connection_info(self, kernel_id):
240 def get_connection_info(self, kernel_id):
241 """Return a dictionary of connection data for a kernel.
241 """Return a dictionary of connection data for a kernel.
242
242
243 Parameters
243 Parameters
244 ==========
244 ==========
245 kernel_id : uuid
245 kernel_id : uuid
246 The id of the kernel.
246 The id of the kernel.
247
247
248 Returns
248 Returns
249 =======
249 =======
250 connection_dict : dict
250 connection_dict : dict
251 A dict of the information needed to connect to a kernel.
251 A dict of the information needed to connect to a kernel.
252 This includes the ip address and the integer port
252 This includes the ip address and the integer port
253 numbers of the different channels (stdin_port, iopub_port,
253 numbers of the different channels (stdin_port, iopub_port,
254 shell_port, hb_port).
254 shell_port, hb_port).
255 """
255 """
256
256
257 @kernel_method
257 @kernel_method
258 def connect_iopub(self, kernel_id, identity=None):
258 def connect_iopub(self, kernel_id, identity=None):
259 """Return a zmq Socket connected to the iopub channel.
259 """Return a zmq Socket connected to the iopub channel.
260
260
261 Parameters
261 Parameters
262 ==========
262 ==========
263 kernel_id : uuid
263 kernel_id : uuid
264 The id of the kernel
264 The id of the kernel
265 identity : bytes (optional)
265 identity : bytes (optional)
266 The zmq identity of the socket
266 The zmq identity of the socket
267
267
268 Returns
268 Returns
269 =======
269 =======
270 stream : zmq Socket or ZMQStream
270 stream : zmq Socket or ZMQStream
271 """
271 """
272
272
273 @kernel_method
273 @kernel_method
274 def connect_shell(self, kernel_id, identity=None):
274 def connect_shell(self, kernel_id, identity=None):
275 """Return a zmq Socket connected to the shell channel.
275 """Return a zmq Socket connected to the shell channel.
276
276
277 Parameters
277 Parameters
278 ==========
278 ==========
279 kernel_id : uuid
279 kernel_id : uuid
280 The id of the kernel
280 The id of the kernel
281 identity : bytes (optional)
281 identity : bytes (optional)
282 The zmq identity of the socket
282 The zmq identity of the socket
283
283
284 Returns
284 Returns
285 =======
285 =======
286 stream : zmq Socket or ZMQStream
286 stream : zmq Socket or ZMQStream
287 """
287 """
288
288
289 @kernel_method
289 @kernel_method
290 def connect_stdin(self, kernel_id, identity=None):
290 def connect_stdin(self, kernel_id, identity=None):
291 """Return a zmq Socket connected to the stdin channel.
291 """Return a zmq Socket connected to the stdin channel.
292
292
293 Parameters
293 Parameters
294 ==========
294 ==========
295 kernel_id : uuid
295 kernel_id : uuid
296 The id of the kernel
296 The id of the kernel
297 identity : bytes (optional)
297 identity : bytes (optional)
298 The zmq identity of the socket
298 The zmq identity of the socket
299
299
300 Returns
300 Returns
301 =======
301 =======
302 stream : zmq Socket or ZMQStream
302 stream : zmq Socket or ZMQStream
303 """
303 """
304
304
305 @kernel_method
305 @kernel_method
306 def connect_hb(self, kernel_id, identity=None):
306 def connect_hb(self, kernel_id, identity=None):
307 """Return a zmq Socket connected to the hb channel.
307 """Return a zmq Socket connected to the hb channel.
308
308
309 Parameters
309 Parameters
310 ==========
310 ==========
311 kernel_id : uuid
311 kernel_id : uuid
312 The id of the kernel
312 The id of the kernel
313 identity : bytes (optional)
313 identity : bytes (optional)
314 The zmq identity of the socket
314 The zmq identity of the socket
315
315
316 Returns
316 Returns
317 =======
317 =======
318 stream : zmq Socket or ZMQStream
318 stream : zmq Socket or ZMQStream
319 """
319 """
@@ -1,111 +1,111 b''
1 """A basic kernel monitor with autorestarting.
1 """A basic kernel monitor with autorestarting.
2
2
3 This watches a kernel's state using KernelManager.is_alive and auto
3 This watches a kernel's state using KernelManager.is_alive and auto
4 restarts the kernel if it dies.
4 restarts the kernel if it dies.
5
5
6 It is an incomplete base class, and must be subclassed.
6 It is an incomplete base class, and must be subclassed.
7 """
7 """
8
8
9 # Copyright (c) IPython Development Team.
9 # Copyright (c) IPython Development Team.
10 # Distributed under the terms of the Modified BSD License.
10 # Distributed under the terms of the Modified BSD License.
11
11
12 from IPython.config.configurable import LoggingConfigurable
12 from IPython.config.configurable import LoggingConfigurable
13 from IPython.utils.traitlets import (
13 from IPython.utils.traitlets import (
14 Instance, Float, Dict, Bool, Integer,
14 Instance, Float, Dict, Bool, Integer,
15 )
15 )
16
16
17
17
18 class KernelRestarter(LoggingConfigurable):
18 class KernelRestarter(LoggingConfigurable):
19 """Monitor and autorestart a kernel."""
19 """Monitor and autorestart a kernel."""
20
20
21 kernel_manager = Instance('IPython.kernel.KernelManager')
21 kernel_manager = Instance('jupyter_client.KernelManager')
22
22
23 debug = Bool(False, config=True,
23 debug = Bool(False, config=True,
24 help="""Whether to include every poll event in debugging output.
24 help="""Whether to include every poll event in debugging output.
25
25
26 Has to be set explicitly, because there will be *a lot* of output.
26 Has to be set explicitly, because there will be *a lot* of output.
27 """
27 """
28 )
28 )
29
29
30 time_to_dead = Float(3.0, config=True,
30 time_to_dead = Float(3.0, config=True,
31 help="""Kernel heartbeat interval in seconds."""
31 help="""Kernel heartbeat interval in seconds."""
32 )
32 )
33
33
34 restart_limit = Integer(5, config=True,
34 restart_limit = Integer(5, config=True,
35 help="""The number of consecutive autorestarts before the kernel is presumed dead."""
35 help="""The number of consecutive autorestarts before the kernel is presumed dead."""
36 )
36 )
37 _restarting = Bool(False)
37 _restarting = Bool(False)
38 _restart_count = Integer(0)
38 _restart_count = Integer(0)
39
39
40 callbacks = Dict()
40 callbacks = Dict()
41 def _callbacks_default(self):
41 def _callbacks_default(self):
42 return dict(restart=[], dead=[])
42 return dict(restart=[], dead=[])
43
43
44 def start(self):
44 def start(self):
45 """Start the polling of the kernel."""
45 """Start the polling of the kernel."""
46 raise NotImplementedError("Must be implemented in a subclass")
46 raise NotImplementedError("Must be implemented in a subclass")
47
47
48 def stop(self):
48 def stop(self):
49 """Stop the kernel polling."""
49 """Stop the kernel polling."""
50 raise NotImplementedError("Must be implemented in a subclass")
50 raise NotImplementedError("Must be implemented in a subclass")
51
51
52 def add_callback(self, f, event='restart'):
52 def add_callback(self, f, event='restart'):
53 """register a callback to fire on a particular event
53 """register a callback to fire on a particular event
54
54
55 Possible values for event:
55 Possible values for event:
56
56
57 'restart' (default): kernel has died, and will be restarted.
57 'restart' (default): kernel has died, and will be restarted.
58 'dead': restart has failed, kernel will be left dead.
58 'dead': restart has failed, kernel will be left dead.
59
59
60 """
60 """
61 self.callbacks[event].append(f)
61 self.callbacks[event].append(f)
62
62
63 def remove_callback(self, f, event='restart'):
63 def remove_callback(self, f, event='restart'):
64 """unregister a callback to fire on a particular event
64 """unregister a callback to fire on a particular event
65
65
66 Possible values for event:
66 Possible values for event:
67
67
68 'restart' (default): kernel has died, and will be restarted.
68 'restart' (default): kernel has died, and will be restarted.
69 'dead': restart has failed, kernel will be left dead.
69 'dead': restart has failed, kernel will be left dead.
70
70
71 """
71 """
72 try:
72 try:
73 self.callbacks[event].remove(f)
73 self.callbacks[event].remove(f)
74 except ValueError:
74 except ValueError:
75 pass
75 pass
76
76
77 def _fire_callbacks(self, event):
77 def _fire_callbacks(self, event):
78 """fire our callbacks for a particular event"""
78 """fire our callbacks for a particular event"""
79 for callback in self.callbacks[event]:
79 for callback in self.callbacks[event]:
80 try:
80 try:
81 callback()
81 callback()
82 except Exception as e:
82 except Exception as e:
83 self.log.error("KernelRestarter: %s callback %r failed", event, callback, exc_info=True)
83 self.log.error("KernelRestarter: %s callback %r failed", event, callback, exc_info=True)
84
84
85 def poll(self):
85 def poll(self):
86 if self.debug:
86 if self.debug:
87 self.log.debug('Polling kernel...')
87 self.log.debug('Polling kernel...')
88 if not self.kernel_manager.is_alive():
88 if not self.kernel_manager.is_alive():
89 if self._restarting:
89 if self._restarting:
90 self._restart_count += 1
90 self._restart_count += 1
91 else:
91 else:
92 self._restart_count = 1
92 self._restart_count = 1
93
93
94 if self._restart_count >= self.restart_limit:
94 if self._restart_count >= self.restart_limit:
95 self.log.warn("KernelRestarter: restart failed")
95 self.log.warn("KernelRestarter: restart failed")
96 self._fire_callbacks('dead')
96 self._fire_callbacks('dead')
97 self._restarting = False
97 self._restarting = False
98 self._restart_count = 0
98 self._restart_count = 0
99 self.stop()
99 self.stop()
100 else:
100 else:
101 self.log.info('KernelRestarter: restarting kernel (%i/%i)',
101 self.log.info('KernelRestarter: restarting kernel (%i/%i)',
102 self._restart_count,
102 self._restart_count,
103 self.restart_limit
103 self.restart_limit
104 )
104 )
105 self._fire_callbacks('restart')
105 self._fire_callbacks('restart')
106 self.kernel_manager.restart_kernel(now=True)
106 self.kernel_manager.restart_kernel(now=True)
107 self._restarting = True
107 self._restarting = True
108 else:
108 else:
109 if self._restarting:
109 if self._restarting:
110 self.log.debug("KernelRestarter: restart apparently succeeded")
110 self.log.debug("KernelRestarter: restart apparently succeeded")
111 self._restarting = False
111 self._restarting = False
@@ -1,881 +1,881 b''
1 """Session object for building, serializing, sending, and receiving messages in
1 """Session object for building, serializing, sending, and receiving messages in
2 IPython. The Session object supports serialization, HMAC signatures, and
2 IPython. The Session object supports serialization, HMAC signatures, and
3 metadata on messages.
3 metadata on messages.
4
4
5 Also defined here are utilities for working with Sessions:
5 Also defined here are utilities for working with Sessions:
6 * A SessionFactory to be used as a base class for configurables that work with
6 * A SessionFactory to be used as a base class for configurables that work with
7 Sessions.
7 Sessions.
8 * A Message object for convenience that allows attribute-access to the msg dict.
8 * A Message object for convenience that allows attribute-access to the msg dict.
9 """
9 """
10
10
11 # Copyright (c) IPython Development Team.
11 # Copyright (c) IPython Development Team.
12 # Distributed under the terms of the Modified BSD License.
12 # Distributed under the terms of the Modified BSD License.
13
13
14 import hashlib
14 import hashlib
15 import hmac
15 import hmac
16 import logging
16 import logging
17 import os
17 import os
18 import pprint
18 import pprint
19 import random
19 import random
20 import uuid
20 import uuid
21 import warnings
21 import warnings
22 from datetime import datetime
22 from datetime import datetime
23
23
24 try:
24 try:
25 import cPickle
25 import cPickle
26 pickle = cPickle
26 pickle = cPickle
27 except:
27 except:
28 cPickle = None
28 cPickle = None
29 import pickle
29 import pickle
30
30
31 try:
31 try:
32 # We are using compare_digest to limit the surface of timing attacks
32 # We are using compare_digest to limit the surface of timing attacks
33 from hmac import compare_digest
33 from hmac import compare_digest
34 except ImportError:
34 except ImportError:
35 # Python < 2.7.7: When digests don't match no feedback is provided,
35 # Python < 2.7.7: When digests don't match no feedback is provided,
36 # limiting the surface of attack
36 # limiting the surface of attack
37 def compare_digest(a,b): return a == b
37 def compare_digest(a,b): return a == b
38
38
39 import zmq
39 import zmq
40 from zmq.utils import jsonapi
40 from zmq.utils import jsonapi
41 from zmq.eventloop.ioloop import IOLoop
41 from zmq.eventloop.ioloop import IOLoop
42 from zmq.eventloop.zmqstream import ZMQStream
42 from zmq.eventloop.zmqstream import ZMQStream
43
43
44 from IPython.core.release import kernel_protocol_version
44 from IPython.core.release import kernel_protocol_version
45 from IPython.config.configurable import Configurable, LoggingConfigurable
45 from IPython.config.configurable import Configurable, LoggingConfigurable
46 from IPython.utils import io
46 from IPython.utils import io
47 from IPython.utils.importstring import import_item
47 from IPython.utils.importstring import import_item
48 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
48 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
49 from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type,
49 from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type,
50 iteritems)
50 iteritems)
51 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
51 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
52 DottedObjectName, CUnicode, Dict, Integer,
52 DottedObjectName, CUnicode, Dict, Integer,
53 TraitError,
53 TraitError,
54 )
54 )
55 from IPython.utils.pickleutil import PICKLE_PROTOCOL
55 from IPython.utils.pickleutil import PICKLE_PROTOCOL
56 from IPython.kernel.adapter import adapt
57 from IPython.kernel.zmq.serialize import MAX_ITEMS, MAX_BYTES
56 from IPython.kernel.zmq.serialize import MAX_ITEMS, MAX_BYTES
57 from jupyter_client.adapter import adapt
58
58
59 #-----------------------------------------------------------------------------
59 #-----------------------------------------------------------------------------
60 # utility functions
60 # utility functions
61 #-----------------------------------------------------------------------------
61 #-----------------------------------------------------------------------------
62
62
63 def squash_unicode(obj):
63 def squash_unicode(obj):
64 """coerce unicode back to bytestrings."""
64 """coerce unicode back to bytestrings."""
65 if isinstance(obj,dict):
65 if isinstance(obj,dict):
66 for key in obj.keys():
66 for key in obj.keys():
67 obj[key] = squash_unicode(obj[key])
67 obj[key] = squash_unicode(obj[key])
68 if isinstance(key, unicode_type):
68 if isinstance(key, unicode_type):
69 obj[squash_unicode(key)] = obj.pop(key)
69 obj[squash_unicode(key)] = obj.pop(key)
70 elif isinstance(obj, list):
70 elif isinstance(obj, list):
71 for i,v in enumerate(obj):
71 for i,v in enumerate(obj):
72 obj[i] = squash_unicode(v)
72 obj[i] = squash_unicode(v)
73 elif isinstance(obj, unicode_type):
73 elif isinstance(obj, unicode_type):
74 obj = obj.encode('utf8')
74 obj = obj.encode('utf8')
75 return obj
75 return obj
76
76
77 #-----------------------------------------------------------------------------
77 #-----------------------------------------------------------------------------
78 # globals and defaults
78 # globals and defaults
79 #-----------------------------------------------------------------------------
79 #-----------------------------------------------------------------------------
80
80
81 # ISO8601-ify datetime objects
81 # ISO8601-ify datetime objects
82 # allow unicode
82 # allow unicode
83 # disallow nan, because it's not actually valid JSON
83 # disallow nan, because it's not actually valid JSON
84 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default,
84 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default,
85 ensure_ascii=False, allow_nan=False,
85 ensure_ascii=False, allow_nan=False,
86 )
86 )
87 json_unpacker = lambda s: jsonapi.loads(s)
87 json_unpacker = lambda s: jsonapi.loads(s)
88
88
89 pickle_packer = lambda o: pickle.dumps(squash_dates(o), PICKLE_PROTOCOL)
89 pickle_packer = lambda o: pickle.dumps(squash_dates(o), PICKLE_PROTOCOL)
90 pickle_unpacker = pickle.loads
90 pickle_unpacker = pickle.loads
91
91
92 default_packer = json_packer
92 default_packer = json_packer
93 default_unpacker = json_unpacker
93 default_unpacker = json_unpacker
94
94
95 DELIM = b"<IDS|MSG>"
95 DELIM = b"<IDS|MSG>"
96 # singleton dummy tracker, which will always report as done
96 # singleton dummy tracker, which will always report as done
97 DONE = zmq.MessageTracker()
97 DONE = zmq.MessageTracker()
98
98
99 #-----------------------------------------------------------------------------
99 #-----------------------------------------------------------------------------
100 # Mixin tools for apps that use Sessions
100 # Mixin tools for apps that use Sessions
101 #-----------------------------------------------------------------------------
101 #-----------------------------------------------------------------------------
102
102
103 session_aliases = dict(
103 session_aliases = dict(
104 ident = 'Session.session',
104 ident = 'Session.session',
105 user = 'Session.username',
105 user = 'Session.username',
106 keyfile = 'Session.keyfile',
106 keyfile = 'Session.keyfile',
107 )
107 )
108
108
109 session_flags = {
109 session_flags = {
110 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
110 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
111 'keyfile' : '' }},
111 'keyfile' : '' }},
112 """Use HMAC digests for authentication of messages.
112 """Use HMAC digests for authentication of messages.
113 Setting this flag will generate a new UUID to use as the HMAC key.
113 Setting this flag will generate a new UUID to use as the HMAC key.
114 """),
114 """),
115 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
115 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
116 """Don't authenticate messages."""),
116 """Don't authenticate messages."""),
117 }
117 }
118
118
119 def default_secure(cfg):
119 def default_secure(cfg):
120 """Set the default behavior for a config environment to be secure.
120 """Set the default behavior for a config environment to be secure.
121
121
122 If Session.key/keyfile have not been set, set Session.key to
122 If Session.key/keyfile have not been set, set Session.key to
123 a new random UUID.
123 a new random UUID.
124 """
124 """
125 warnings.warn("default_secure is deprecated", DeprecationWarning)
125 warnings.warn("default_secure is deprecated", DeprecationWarning)
126 if 'Session' in cfg:
126 if 'Session' in cfg:
127 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
127 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
128 return
128 return
129 # key/keyfile not specified, generate new UUID:
129 # key/keyfile not specified, generate new UUID:
130 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
130 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
131
131
132
132
133 #-----------------------------------------------------------------------------
133 #-----------------------------------------------------------------------------
134 # Classes
134 # Classes
135 #-----------------------------------------------------------------------------
135 #-----------------------------------------------------------------------------
136
136
137 class SessionFactory(LoggingConfigurable):
137 class SessionFactory(LoggingConfigurable):
138 """The Base class for configurables that have a Session, Context, logger,
138 """The Base class for configurables that have a Session, Context, logger,
139 and IOLoop.
139 and IOLoop.
140 """
140 """
141
141
142 logname = Unicode('')
142 logname = Unicode('')
143 def _logname_changed(self, name, old, new):
143 def _logname_changed(self, name, old, new):
144 self.log = logging.getLogger(new)
144 self.log = logging.getLogger(new)
145
145
146 # not configurable:
146 # not configurable:
147 context = Instance('zmq.Context')
147 context = Instance('zmq.Context')
148 def _context_default(self):
148 def _context_default(self):
149 return zmq.Context.instance()
149 return zmq.Context.instance()
150
150
151 session = Instance('IPython.kernel.zmq.session.Session',
151 session = Instance('jupyter_client.session.Session',
152 allow_none=True)
152 allow_none=True)
153
153
154 loop = Instance('zmq.eventloop.ioloop.IOLoop')
154 loop = Instance('zmq.eventloop.ioloop.IOLoop')
155 def _loop_default(self):
155 def _loop_default(self):
156 return IOLoop.instance()
156 return IOLoop.instance()
157
157
158 def __init__(self, **kwargs):
158 def __init__(self, **kwargs):
159 super(SessionFactory, self).__init__(**kwargs)
159 super(SessionFactory, self).__init__(**kwargs)
160
160
161 if self.session is None:
161 if self.session is None:
162 # construct the session
162 # construct the session
163 self.session = Session(**kwargs)
163 self.session = Session(**kwargs)
164
164
165
165
166 class Message(object):
166 class Message(object):
167 """A simple message object that maps dict keys to attributes.
167 """A simple message object that maps dict keys to attributes.
168
168
169 A Message can be created from a dict and a dict from a Message instance
169 A Message can be created from a dict and a dict from a Message instance
170 simply by calling dict(msg_obj)."""
170 simply by calling dict(msg_obj)."""
171
171
172 def __init__(self, msg_dict):
172 def __init__(self, msg_dict):
173 dct = self.__dict__
173 dct = self.__dict__
174 for k, v in iteritems(dict(msg_dict)):
174 for k, v in iteritems(dict(msg_dict)):
175 if isinstance(v, dict):
175 if isinstance(v, dict):
176 v = Message(v)
176 v = Message(v)
177 dct[k] = v
177 dct[k] = v
178
178
179 # Having this iterator lets dict(msg_obj) work out of the box.
179 # Having this iterator lets dict(msg_obj) work out of the box.
180 def __iter__(self):
180 def __iter__(self):
181 return iter(iteritems(self.__dict__))
181 return iter(iteritems(self.__dict__))
182
182
183 def __repr__(self):
183 def __repr__(self):
184 return repr(self.__dict__)
184 return repr(self.__dict__)
185
185
186 def __str__(self):
186 def __str__(self):
187 return pprint.pformat(self.__dict__)
187 return pprint.pformat(self.__dict__)
188
188
189 def __contains__(self, k):
189 def __contains__(self, k):
190 return k in self.__dict__
190 return k in self.__dict__
191
191
192 def __getitem__(self, k):
192 def __getitem__(self, k):
193 return self.__dict__[k]
193 return self.__dict__[k]
194
194
195
195
196 def msg_header(msg_id, msg_type, username, session):
196 def msg_header(msg_id, msg_type, username, session):
197 date = datetime.now()
197 date = datetime.now()
198 version = kernel_protocol_version
198 version = kernel_protocol_version
199 return locals()
199 return locals()
200
200
201 def extract_header(msg_or_header):
201 def extract_header(msg_or_header):
202 """Given a message or header, return the header."""
202 """Given a message or header, return the header."""
203 if not msg_or_header:
203 if not msg_or_header:
204 return {}
204 return {}
205 try:
205 try:
206 # See if msg_or_header is the entire message.
206 # See if msg_or_header is the entire message.
207 h = msg_or_header['header']
207 h = msg_or_header['header']
208 except KeyError:
208 except KeyError:
209 try:
209 try:
210 # See if msg_or_header is just the header
210 # See if msg_or_header is just the header
211 h = msg_or_header['msg_id']
211 h = msg_or_header['msg_id']
212 except KeyError:
212 except KeyError:
213 raise
213 raise
214 else:
214 else:
215 h = msg_or_header
215 h = msg_or_header
216 if not isinstance(h, dict):
216 if not isinstance(h, dict):
217 h = dict(h)
217 h = dict(h)
218 return h
218 return h
219
219
220 class Session(Configurable):
220 class Session(Configurable):
221 """Object for handling serialization and sending of messages.
221 """Object for handling serialization and sending of messages.
222
222
223 The Session object handles building messages and sending them
223 The Session object handles building messages and sending them
224 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
224 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
225 other over the network via Session objects, and only need to work with the
225 other over the network via Session objects, and only need to work with the
226 dict-based IPython message spec. The Session will handle
226 dict-based IPython message spec. The Session will handle
227 serialization/deserialization, security, and metadata.
227 serialization/deserialization, security, and metadata.
228
228
229 Sessions support configurable serialization via packer/unpacker traits,
229 Sessions support configurable serialization via packer/unpacker traits,
230 and signing with HMAC digests via the key/keyfile traits.
230 and signing with HMAC digests via the key/keyfile traits.
231
231
232 Parameters
232 Parameters
233 ----------
233 ----------
234
234
235 debug : bool
235 debug : bool
236 whether to trigger extra debugging statements
236 whether to trigger extra debugging statements
237 packer/unpacker : str : 'json', 'pickle' or import_string
237 packer/unpacker : str : 'json', 'pickle' or import_string
238 importstrings for methods to serialize message parts. If just
238 importstrings for methods to serialize message parts. If just
239 'json' or 'pickle', predefined JSON and pickle packers will be used.
239 'json' or 'pickle', predefined JSON and pickle packers will be used.
240 Otherwise, the entire importstring must be used.
240 Otherwise, the entire importstring must be used.
241
241
242 The functions must accept at least valid JSON input, and output *bytes*.
242 The functions must accept at least valid JSON input, and output *bytes*.
243
243
244 For example, to use msgpack:
244 For example, to use msgpack:
245 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
245 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
246 pack/unpack : callables
246 pack/unpack : callables
247 You can also set the pack/unpack callables for serialization directly.
247 You can also set the pack/unpack callables for serialization directly.
248 session : bytes
248 session : bytes
249 the ID of this Session object. The default is to generate a new UUID.
249 the ID of this Session object. The default is to generate a new UUID.
250 username : unicode
250 username : unicode
251 username added to message headers. The default is to ask the OS.
251 username added to message headers. The default is to ask the OS.
252 key : bytes
252 key : bytes
253 The key used to initialize an HMAC signature. If unset, messages
253 The key used to initialize an HMAC signature. If unset, messages
254 will not be signed or checked.
254 will not be signed or checked.
255 keyfile : filepath
255 keyfile : filepath
256 The file containing a key. If this is set, `key` will be initialized
256 The file containing a key. If this is set, `key` will be initialized
257 to the contents of the file.
257 to the contents of the file.
258
258
259 """
259 """
260
260
261 debug=Bool(False, config=True, help="""Debug output in the Session""")
261 debug=Bool(False, config=True, help="""Debug output in the Session""")
262
262
263 packer = DottedObjectName('json',config=True,
263 packer = DottedObjectName('json',config=True,
264 help="""The name of the packer for serializing messages.
264 help="""The name of the packer for serializing messages.
265 Should be one of 'json', 'pickle', or an import name
265 Should be one of 'json', 'pickle', or an import name
266 for a custom callable serializer.""")
266 for a custom callable serializer.""")
267 def _packer_changed(self, name, old, new):
267 def _packer_changed(self, name, old, new):
268 if new.lower() == 'json':
268 if new.lower() == 'json':
269 self.pack = json_packer
269 self.pack = json_packer
270 self.unpack = json_unpacker
270 self.unpack = json_unpacker
271 self.unpacker = new
271 self.unpacker = new
272 elif new.lower() == 'pickle':
272 elif new.lower() == 'pickle':
273 self.pack = pickle_packer
273 self.pack = pickle_packer
274 self.unpack = pickle_unpacker
274 self.unpack = pickle_unpacker
275 self.unpacker = new
275 self.unpacker = new
276 else:
276 else:
277 self.pack = import_item(str(new))
277 self.pack = import_item(str(new))
278
278
279 unpacker = DottedObjectName('json', config=True,
279 unpacker = DottedObjectName('json', config=True,
280 help="""The name of the unpacker for unserializing messages.
280 help="""The name of the unpacker for unserializing messages.
281 Only used with custom functions for `packer`.""")
281 Only used with custom functions for `packer`.""")
282 def _unpacker_changed(self, name, old, new):
282 def _unpacker_changed(self, name, old, new):
283 if new.lower() == 'json':
283 if new.lower() == 'json':
284 self.pack = json_packer
284 self.pack = json_packer
285 self.unpack = json_unpacker
285 self.unpack = json_unpacker
286 self.packer = new
286 self.packer = new
287 elif new.lower() == 'pickle':
287 elif new.lower() == 'pickle':
288 self.pack = pickle_packer
288 self.pack = pickle_packer
289 self.unpack = pickle_unpacker
289 self.unpack = pickle_unpacker
290 self.packer = new
290 self.packer = new
291 else:
291 else:
292 self.unpack = import_item(str(new))
292 self.unpack = import_item(str(new))
293
293
294 session = CUnicode(u'', config=True,
294 session = CUnicode(u'', config=True,
295 help="""The UUID identifying this session.""")
295 help="""The UUID identifying this session.""")
296 def _session_default(self):
296 def _session_default(self):
297 u = unicode_type(uuid.uuid4())
297 u = unicode_type(uuid.uuid4())
298 self.bsession = u.encode('ascii')
298 self.bsession = u.encode('ascii')
299 return u
299 return u
300
300
301 def _session_changed(self, name, old, new):
301 def _session_changed(self, name, old, new):
302 self.bsession = self.session.encode('ascii')
302 self.bsession = self.session.encode('ascii')
303
303
304 # bsession is the session as bytes
304 # bsession is the session as bytes
305 bsession = CBytes(b'')
305 bsession = CBytes(b'')
306
306
307 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
307 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
308 help="""Username for the Session. Default is your system username.""",
308 help="""Username for the Session. Default is your system username.""",
309 config=True)
309 config=True)
310
310
311 metadata = Dict({}, config=True,
311 metadata = Dict({}, config=True,
312 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
312 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
313
313
314 # if 0, no adapting to do.
314 # if 0, no adapting to do.
315 adapt_version = Integer(0)
315 adapt_version = Integer(0)
316
316
317 # message signature related traits:
317 # message signature related traits:
318
318
319 key = CBytes(config=True,
319 key = CBytes(config=True,
320 help="""execution key, for signing messages.""")
320 help="""execution key, for signing messages.""")
321 def _key_default(self):
321 def _key_default(self):
322 return str_to_bytes(str(uuid.uuid4()))
322 return str_to_bytes(str(uuid.uuid4()))
323
323
324 def _key_changed(self):
324 def _key_changed(self):
325 self._new_auth()
325 self._new_auth()
326
326
327 signature_scheme = Unicode('hmac-sha256', config=True,
327 signature_scheme = Unicode('hmac-sha256', config=True,
328 help="""The digest scheme used to construct the message signatures.
328 help="""The digest scheme used to construct the message signatures.
329 Must have the form 'hmac-HASH'.""")
329 Must have the form 'hmac-HASH'.""")
330 def _signature_scheme_changed(self, name, old, new):
330 def _signature_scheme_changed(self, name, old, new):
331 if not new.startswith('hmac-'):
331 if not new.startswith('hmac-'):
332 raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)
332 raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)
333 hash_name = new.split('-', 1)[1]
333 hash_name = new.split('-', 1)[1]
334 try:
334 try:
335 self.digest_mod = getattr(hashlib, hash_name)
335 self.digest_mod = getattr(hashlib, hash_name)
336 except AttributeError:
336 except AttributeError:
337 raise TraitError("hashlib has no such attribute: %s" % hash_name)
337 raise TraitError("hashlib has no such attribute: %s" % hash_name)
338 self._new_auth()
338 self._new_auth()
339
339
340 digest_mod = Any()
340 digest_mod = Any()
341 def _digest_mod_default(self):
341 def _digest_mod_default(self):
342 return hashlib.sha256
342 return hashlib.sha256
343
343
344 auth = Instance(hmac.HMAC, allow_none=True)
344 auth = Instance(hmac.HMAC, allow_none=True)
345
345
346 def _new_auth(self):
346 def _new_auth(self):
347 if self.key:
347 if self.key:
348 self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod)
348 self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod)
349 else:
349 else:
350 self.auth = None
350 self.auth = None
351
351
352 digest_history = Set()
352 digest_history = Set()
353 digest_history_size = Integer(2**16, config=True,
353 digest_history_size = Integer(2**16, config=True,
354 help="""The maximum number of digests to remember.
354 help="""The maximum number of digests to remember.
355
355
356 The digest history will be culled when it exceeds this value.
356 The digest history will be culled when it exceeds this value.
357 """
357 """
358 )
358 )
359
359
360 keyfile = Unicode('', config=True,
360 keyfile = Unicode('', config=True,
361 help="""path to file containing execution key.""")
361 help="""path to file containing execution key.""")
362 def _keyfile_changed(self, name, old, new):
362 def _keyfile_changed(self, name, old, new):
363 with open(new, 'rb') as f:
363 with open(new, 'rb') as f:
364 self.key = f.read().strip()
364 self.key = f.read().strip()
365
365
366 # for protecting against sends from forks
366 # for protecting against sends from forks
367 pid = Integer()
367 pid = Integer()
368
368
369 # serialization traits:
369 # serialization traits:
370
370
371 pack = Any(default_packer) # the actual packer function
371 pack = Any(default_packer) # the actual packer function
372 def _pack_changed(self, name, old, new):
372 def _pack_changed(self, name, old, new):
373 if not callable(new):
373 if not callable(new):
374 raise TypeError("packer must be callable, not %s"%type(new))
374 raise TypeError("packer must be callable, not %s"%type(new))
375
375
376 unpack = Any(default_unpacker) # the actual packer function
376 unpack = Any(default_unpacker) # the actual packer function
377 def _unpack_changed(self, name, old, new):
377 def _unpack_changed(self, name, old, new):
378 # unpacker is not checked - it is assumed to be
378 # unpacker is not checked - it is assumed to be
379 if not callable(new):
379 if not callable(new):
380 raise TypeError("unpacker must be callable, not %s"%type(new))
380 raise TypeError("unpacker must be callable, not %s"%type(new))
381
381
382 # thresholds:
382 # thresholds:
383 copy_threshold = Integer(2**16, config=True,
383 copy_threshold = Integer(2**16, config=True,
384 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
384 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
385 buffer_threshold = Integer(MAX_BYTES, config=True,
385 buffer_threshold = Integer(MAX_BYTES, config=True,
386 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
386 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
387 item_threshold = Integer(MAX_ITEMS, config=True,
387 item_threshold = Integer(MAX_ITEMS, config=True,
388 help="""The maximum number of items for a container to be introspected for custom serialization.
388 help="""The maximum number of items for a container to be introspected for custom serialization.
389 Containers larger than this are pickled outright.
389 Containers larger than this are pickled outright.
390 """
390 """
391 )
391 )
392
392
393
393
394 def __init__(self, **kwargs):
394 def __init__(self, **kwargs):
395 """create a Session object
395 """create a Session object
396
396
397 Parameters
397 Parameters
398 ----------
398 ----------
399
399
400 debug : bool
400 debug : bool
401 whether to trigger extra debugging statements
401 whether to trigger extra debugging statements
402 packer/unpacker : str : 'json', 'pickle' or import_string
402 packer/unpacker : str : 'json', 'pickle' or import_string
403 importstrings for methods to serialize message parts. If just
403 importstrings for methods to serialize message parts. If just
404 'json' or 'pickle', predefined JSON and pickle packers will be used.
404 'json' or 'pickle', predefined JSON and pickle packers will be used.
405 Otherwise, the entire importstring must be used.
405 Otherwise, the entire importstring must be used.
406
406
407 The functions must accept at least valid JSON input, and output
407 The functions must accept at least valid JSON input, and output
408 *bytes*.
408 *bytes*.
409
409
410 For example, to use msgpack:
410 For example, to use msgpack:
411 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
411 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
412 pack/unpack : callables
412 pack/unpack : callables
413 You can also set the pack/unpack callables for serialization
413 You can also set the pack/unpack callables for serialization
414 directly.
414 directly.
415 session : unicode (must be ascii)
415 session : unicode (must be ascii)
416 the ID of this Session object. The default is to generate a new
416 the ID of this Session object. The default is to generate a new
417 UUID.
417 UUID.
418 bsession : bytes
418 bsession : bytes
419 The session as bytes
419 The session as bytes
420 username : unicode
420 username : unicode
421 username added to message headers. The default is to ask the OS.
421 username added to message headers. The default is to ask the OS.
422 key : bytes
422 key : bytes
423 The key used to initialize an HMAC signature. If unset, messages
423 The key used to initialize an HMAC signature. If unset, messages
424 will not be signed or checked.
424 will not be signed or checked.
425 signature_scheme : str
425 signature_scheme : str
426 The message digest scheme. Currently must be of the form 'hmac-HASH',
426 The message digest scheme. Currently must be of the form 'hmac-HASH',
427 where 'HASH' is a hashing function available in Python's hashlib.
427 where 'HASH' is a hashing function available in Python's hashlib.
428 The default is 'hmac-sha256'.
428 The default is 'hmac-sha256'.
429 This is ignored if 'key' is empty.
429 This is ignored if 'key' is empty.
430 keyfile : filepath
430 keyfile : filepath
431 The file containing a key. If this is set, `key` will be
431 The file containing a key. If this is set, `key` will be
432 initialized to the contents of the file.
432 initialized to the contents of the file.
433 """
433 """
434 super(Session, self).__init__(**kwargs)
434 super(Session, self).__init__(**kwargs)
435 self._check_packers()
435 self._check_packers()
436 self.none = self.pack({})
436 self.none = self.pack({})
437 # ensure self._session_default() if necessary, so bsession is defined:
437 # ensure self._session_default() if necessary, so bsession is defined:
438 self.session
438 self.session
439 self.pid = os.getpid()
439 self.pid = os.getpid()
440 self._new_auth()
440 self._new_auth()
441
441
442 @property
442 @property
443 def msg_id(self):
443 def msg_id(self):
444 """always return new uuid"""
444 """always return new uuid"""
445 return str(uuid.uuid4())
445 return str(uuid.uuid4())
446
446
447 def _check_packers(self):
447 def _check_packers(self):
448 """check packers for datetime support."""
448 """check packers for datetime support."""
449 pack = self.pack
449 pack = self.pack
450 unpack = self.unpack
450 unpack = self.unpack
451
451
452 # check simple serialization
452 # check simple serialization
453 msg = dict(a=[1,'hi'])
453 msg = dict(a=[1,'hi'])
454 try:
454 try:
455 packed = pack(msg)
455 packed = pack(msg)
456 except Exception as e:
456 except Exception as e:
457 msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}"
457 msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}"
458 if self.packer == 'json':
458 if self.packer == 'json':
459 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
459 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
460 else:
460 else:
461 jsonmsg = ""
461 jsonmsg = ""
462 raise ValueError(
462 raise ValueError(
463 msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg)
463 msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg)
464 )
464 )
465
465
466 # ensure packed message is bytes
466 # ensure packed message is bytes
467 if not isinstance(packed, bytes):
467 if not isinstance(packed, bytes):
468 raise ValueError("message packed to %r, but bytes are required"%type(packed))
468 raise ValueError("message packed to %r, but bytes are required"%type(packed))
469
469
470 # check that unpack is pack's inverse
470 # check that unpack is pack's inverse
471 try:
471 try:
472 unpacked = unpack(packed)
472 unpacked = unpack(packed)
473 assert unpacked == msg
473 assert unpacked == msg
474 except Exception as e:
474 except Exception as e:
475 msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}"
475 msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}"
476 if self.packer == 'json':
476 if self.packer == 'json':
477 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
477 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
478 else:
478 else:
479 jsonmsg = ""
479 jsonmsg = ""
480 raise ValueError(
480 raise ValueError(
481 msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg)
481 msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg)
482 )
482 )
483
483
484 # check datetime support
484 # check datetime support
485 msg = dict(t=datetime.now())
485 msg = dict(t=datetime.now())
486 try:
486 try:
487 unpacked = unpack(pack(msg))
487 unpacked = unpack(pack(msg))
488 if isinstance(unpacked['t'], datetime):
488 if isinstance(unpacked['t'], datetime):
489 raise ValueError("Shouldn't deserialize to datetime")
489 raise ValueError("Shouldn't deserialize to datetime")
490 except Exception:
490 except Exception:
491 self.pack = lambda o: pack(squash_dates(o))
491 self.pack = lambda o: pack(squash_dates(o))
492 self.unpack = lambda s: unpack(s)
492 self.unpack = lambda s: unpack(s)
493
493
494 def msg_header(self, msg_type):
494 def msg_header(self, msg_type):
495 return msg_header(self.msg_id, msg_type, self.username, self.session)
495 return msg_header(self.msg_id, msg_type, self.username, self.session)
496
496
497 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
497 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
498 """Return the nested message dict.
498 """Return the nested message dict.
499
499
500 This format is different from what is sent over the wire. The
500 This format is different from what is sent over the wire. The
501 serialize/deserialize methods converts this nested message dict to the wire
501 serialize/deserialize methods converts this nested message dict to the wire
502 format, which is a list of message parts.
502 format, which is a list of message parts.
503 """
503 """
504 msg = {}
504 msg = {}
505 header = self.msg_header(msg_type) if header is None else header
505 header = self.msg_header(msg_type) if header is None else header
506 msg['header'] = header
506 msg['header'] = header
507 msg['msg_id'] = header['msg_id']
507 msg['msg_id'] = header['msg_id']
508 msg['msg_type'] = header['msg_type']
508 msg['msg_type'] = header['msg_type']
509 msg['parent_header'] = {} if parent is None else extract_header(parent)
509 msg['parent_header'] = {} if parent is None else extract_header(parent)
510 msg['content'] = {} if content is None else content
510 msg['content'] = {} if content is None else content
511 msg['metadata'] = self.metadata.copy()
511 msg['metadata'] = self.metadata.copy()
512 if metadata is not None:
512 if metadata is not None:
513 msg['metadata'].update(metadata)
513 msg['metadata'].update(metadata)
514 return msg
514 return msg
515
515
516 def sign(self, msg_list):
516 def sign(self, msg_list):
517 """Sign a message with HMAC digest. If no auth, return b''.
517 """Sign a message with HMAC digest. If no auth, return b''.
518
518
519 Parameters
519 Parameters
520 ----------
520 ----------
521 msg_list : list
521 msg_list : list
522 The [p_header,p_parent,p_content] part of the message list.
522 The [p_header,p_parent,p_content] part of the message list.
523 """
523 """
524 if self.auth is None:
524 if self.auth is None:
525 return b''
525 return b''
526 h = self.auth.copy()
526 h = self.auth.copy()
527 for m in msg_list:
527 for m in msg_list:
528 h.update(m)
528 h.update(m)
529 return str_to_bytes(h.hexdigest())
529 return str_to_bytes(h.hexdigest())
530
530
531 def serialize(self, msg, ident=None):
531 def serialize(self, msg, ident=None):
532 """Serialize the message components to bytes.
532 """Serialize the message components to bytes.
533
533
534 This is roughly the inverse of deserialize. The serialize/deserialize
534 This is roughly the inverse of deserialize. The serialize/deserialize
535 methods work with full message lists, whereas pack/unpack work with
535 methods work with full message lists, whereas pack/unpack work with
536 the individual message parts in the message list.
536 the individual message parts in the message list.
537
537
538 Parameters
538 Parameters
539 ----------
539 ----------
540 msg : dict or Message
540 msg : dict or Message
541 The next message dict as returned by the self.msg method.
541 The next message dict as returned by the self.msg method.
542
542
543 Returns
543 Returns
544 -------
544 -------
545 msg_list : list
545 msg_list : list
546 The list of bytes objects to be sent with the format::
546 The list of bytes objects to be sent with the format::
547
547
548 [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent,
548 [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent,
549 p_metadata, p_content, buffer1, buffer2, ...]
549 p_metadata, p_content, buffer1, buffer2, ...]
550
550
551 In this list, the ``p_*`` entities are the packed or serialized
551 In this list, the ``p_*`` entities are the packed or serialized
552 versions, so if JSON is used, these are utf8 encoded JSON strings.
552 versions, so if JSON is used, these are utf8 encoded JSON strings.
553 """
553 """
554 content = msg.get('content', {})
554 content = msg.get('content', {})
555 if content is None:
555 if content is None:
556 content = self.none
556 content = self.none
557 elif isinstance(content, dict):
557 elif isinstance(content, dict):
558 content = self.pack(content)
558 content = self.pack(content)
559 elif isinstance(content, bytes):
559 elif isinstance(content, bytes):
560 # content is already packed, as in a relayed message
560 # content is already packed, as in a relayed message
561 pass
561 pass
562 elif isinstance(content, unicode_type):
562 elif isinstance(content, unicode_type):
563 # should be bytes, but JSON often spits out unicode
563 # should be bytes, but JSON often spits out unicode
564 content = content.encode('utf8')
564 content = content.encode('utf8')
565 else:
565 else:
566 raise TypeError("Content incorrect type: %s"%type(content))
566 raise TypeError("Content incorrect type: %s"%type(content))
567
567
568 real_message = [self.pack(msg['header']),
568 real_message = [self.pack(msg['header']),
569 self.pack(msg['parent_header']),
569 self.pack(msg['parent_header']),
570 self.pack(msg['metadata']),
570 self.pack(msg['metadata']),
571 content,
571 content,
572 ]
572 ]
573
573
574 to_send = []
574 to_send = []
575
575
576 if isinstance(ident, list):
576 if isinstance(ident, list):
577 # accept list of idents
577 # accept list of idents
578 to_send.extend(ident)
578 to_send.extend(ident)
579 elif ident is not None:
579 elif ident is not None:
580 to_send.append(ident)
580 to_send.append(ident)
581 to_send.append(DELIM)
581 to_send.append(DELIM)
582
582
583 signature = self.sign(real_message)
583 signature = self.sign(real_message)
584 to_send.append(signature)
584 to_send.append(signature)
585
585
586 to_send.extend(real_message)
586 to_send.extend(real_message)
587
587
588 return to_send
588 return to_send
589
589
590 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
590 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
591 buffers=None, track=False, header=None, metadata=None):
591 buffers=None, track=False, header=None, metadata=None):
592 """Build and send a message via stream or socket.
592 """Build and send a message via stream or socket.
593
593
594 The message format used by this function internally is as follows:
594 The message format used by this function internally is as follows:
595
595
596 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
596 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
597 buffer1,buffer2,...]
597 buffer1,buffer2,...]
598
598
599 The serialize/deserialize methods convert the nested message dict into this
599 The serialize/deserialize methods convert the nested message dict into this
600 format.
600 format.
601
601
602 Parameters
602 Parameters
603 ----------
603 ----------
604
604
605 stream : zmq.Socket or ZMQStream
605 stream : zmq.Socket or ZMQStream
606 The socket-like object used to send the data.
606 The socket-like object used to send the data.
607 msg_or_type : str or Message/dict
607 msg_or_type : str or Message/dict
608 Normally, msg_or_type will be a msg_type unless a message is being
608 Normally, msg_or_type will be a msg_type unless a message is being
609 sent more than once. If a header is supplied, this can be set to
609 sent more than once. If a header is supplied, this can be set to
610 None and the msg_type will be pulled from the header.
610 None and the msg_type will be pulled from the header.
611
611
612 content : dict or None
612 content : dict or None
613 The content of the message (ignored if msg_or_type is a message).
613 The content of the message (ignored if msg_or_type is a message).
614 header : dict or None
614 header : dict or None
615 The header dict for the message (ignored if msg_to_type is a message).
615 The header dict for the message (ignored if msg_to_type is a message).
616 parent : Message or dict or None
616 parent : Message or dict or None
617 The parent or parent header describing the parent of this message
617 The parent or parent header describing the parent of this message
618 (ignored if msg_or_type is a message).
618 (ignored if msg_or_type is a message).
619 ident : bytes or list of bytes
619 ident : bytes or list of bytes
620 The zmq.IDENTITY routing path.
620 The zmq.IDENTITY routing path.
621 metadata : dict or None
621 metadata : dict or None
622 The metadata describing the message
622 The metadata describing the message
623 buffers : list or None
623 buffers : list or None
624 The already-serialized buffers to be appended to the message.
624 The already-serialized buffers to be appended to the message.
625 track : bool
625 track : bool
626 Whether to track. Only for use with Sockets, because ZMQStream
626 Whether to track. Only for use with Sockets, because ZMQStream
627 objects cannot track messages.
627 objects cannot track messages.
628
628
629
629
630 Returns
630 Returns
631 -------
631 -------
632 msg : dict
632 msg : dict
633 The constructed message.
633 The constructed message.
634 """
634 """
635 if not isinstance(stream, zmq.Socket):
635 if not isinstance(stream, zmq.Socket):
636 # ZMQStreams and dummy sockets do not support tracking.
636 # ZMQStreams and dummy sockets do not support tracking.
637 track = False
637 track = False
638
638
639 if isinstance(msg_or_type, (Message, dict)):
639 if isinstance(msg_or_type, (Message, dict)):
640 # We got a Message or message dict, not a msg_type so don't
640 # We got a Message or message dict, not a msg_type so don't
641 # build a new Message.
641 # build a new Message.
642 msg = msg_or_type
642 msg = msg_or_type
643 buffers = buffers or msg.get('buffers', [])
643 buffers = buffers or msg.get('buffers', [])
644 else:
644 else:
645 msg = self.msg(msg_or_type, content=content, parent=parent,
645 msg = self.msg(msg_or_type, content=content, parent=parent,
646 header=header, metadata=metadata)
646 header=header, metadata=metadata)
647 if not os.getpid() == self.pid:
647 if not os.getpid() == self.pid:
648 io.rprint("WARNING: attempted to send message from fork")
648 io.rprint("WARNING: attempted to send message from fork")
649 io.rprint(msg)
649 io.rprint(msg)
650 return
650 return
651 buffers = [] if buffers is None else buffers
651 buffers = [] if buffers is None else buffers
652 if self.adapt_version:
652 if self.adapt_version:
653 msg = adapt(msg, self.adapt_version)
653 msg = adapt(msg, self.adapt_version)
654 to_send = self.serialize(msg, ident)
654 to_send = self.serialize(msg, ident)
655 to_send.extend(buffers)
655 to_send.extend(buffers)
656 longest = max([ len(s) for s in to_send ])
656 longest = max([ len(s) for s in to_send ])
657 copy = (longest < self.copy_threshold)
657 copy = (longest < self.copy_threshold)
658
658
659 if buffers and track and not copy:
659 if buffers and track and not copy:
660 # only really track when we are doing zero-copy buffers
660 # only really track when we are doing zero-copy buffers
661 tracker = stream.send_multipart(to_send, copy=False, track=True)
661 tracker = stream.send_multipart(to_send, copy=False, track=True)
662 else:
662 else:
663 # use dummy tracker, which will be done immediately
663 # use dummy tracker, which will be done immediately
664 tracker = DONE
664 tracker = DONE
665 stream.send_multipart(to_send, copy=copy)
665 stream.send_multipart(to_send, copy=copy)
666
666
667 if self.debug:
667 if self.debug:
668 pprint.pprint(msg)
668 pprint.pprint(msg)
669 pprint.pprint(to_send)
669 pprint.pprint(to_send)
670 pprint.pprint(buffers)
670 pprint.pprint(buffers)
671
671
672 msg['tracker'] = tracker
672 msg['tracker'] = tracker
673
673
674 return msg
674 return msg
675
675
676 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
676 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
677 """Send a raw message via ident path.
677 """Send a raw message via ident path.
678
678
679 This method is used to send a already serialized message.
679 This method is used to send a already serialized message.
680
680
681 Parameters
681 Parameters
682 ----------
682 ----------
683 stream : ZMQStream or Socket
683 stream : ZMQStream or Socket
684 The ZMQ stream or socket to use for sending the message.
684 The ZMQ stream or socket to use for sending the message.
685 msg_list : list
685 msg_list : list
686 The serialized list of messages to send. This only includes the
686 The serialized list of messages to send. This only includes the
687 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
687 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
688 the message.
688 the message.
689 ident : ident or list
689 ident : ident or list
690 A single ident or a list of idents to use in sending.
690 A single ident or a list of idents to use in sending.
691 """
691 """
692 to_send = []
692 to_send = []
693 if isinstance(ident, bytes):
693 if isinstance(ident, bytes):
694 ident = [ident]
694 ident = [ident]
695 if ident is not None:
695 if ident is not None:
696 to_send.extend(ident)
696 to_send.extend(ident)
697
697
698 to_send.append(DELIM)
698 to_send.append(DELIM)
699 to_send.append(self.sign(msg_list))
699 to_send.append(self.sign(msg_list))
700 to_send.extend(msg_list)
700 to_send.extend(msg_list)
701 stream.send_multipart(to_send, flags, copy=copy)
701 stream.send_multipart(to_send, flags, copy=copy)
702
702
703 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
703 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
704 """Receive and unpack a message.
704 """Receive and unpack a message.
705
705
706 Parameters
706 Parameters
707 ----------
707 ----------
708 socket : ZMQStream or Socket
708 socket : ZMQStream or Socket
709 The socket or stream to use in receiving.
709 The socket or stream to use in receiving.
710
710
711 Returns
711 Returns
712 -------
712 -------
713 [idents], msg
713 [idents], msg
714 [idents] is a list of idents and msg is a nested message dict of
714 [idents] is a list of idents and msg is a nested message dict of
715 same format as self.msg returns.
715 same format as self.msg returns.
716 """
716 """
717 if isinstance(socket, ZMQStream):
717 if isinstance(socket, ZMQStream):
718 socket = socket.socket
718 socket = socket.socket
719 try:
719 try:
720 msg_list = socket.recv_multipart(mode, copy=copy)
720 msg_list = socket.recv_multipart(mode, copy=copy)
721 except zmq.ZMQError as e:
721 except zmq.ZMQError as e:
722 if e.errno == zmq.EAGAIN:
722 if e.errno == zmq.EAGAIN:
723 # We can convert EAGAIN to None as we know in this case
723 # We can convert EAGAIN to None as we know in this case
724 # recv_multipart won't return None.
724 # recv_multipart won't return None.
725 return None,None
725 return None,None
726 else:
726 else:
727 raise
727 raise
728 # split multipart message into identity list and message dict
728 # split multipart message into identity list and message dict
729 # invalid large messages can cause very expensive string comparisons
729 # invalid large messages can cause very expensive string comparisons
730 idents, msg_list = self.feed_identities(msg_list, copy)
730 idents, msg_list = self.feed_identities(msg_list, copy)
731 try:
731 try:
732 return idents, self.deserialize(msg_list, content=content, copy=copy)
732 return idents, self.deserialize(msg_list, content=content, copy=copy)
733 except Exception as e:
733 except Exception as e:
734 # TODO: handle it
734 # TODO: handle it
735 raise e
735 raise e
736
736
737 def feed_identities(self, msg_list, copy=True):
737 def feed_identities(self, msg_list, copy=True):
738 """Split the identities from the rest of the message.
738 """Split the identities from the rest of the message.
739
739
740 Feed until DELIM is reached, then return the prefix as idents and
740 Feed until DELIM is reached, then return the prefix as idents and
741 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
741 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
742 but that would be silly.
742 but that would be silly.
743
743
744 Parameters
744 Parameters
745 ----------
745 ----------
746 msg_list : a list of Message or bytes objects
746 msg_list : a list of Message or bytes objects
747 The message to be split.
747 The message to be split.
748 copy : bool
748 copy : bool
749 flag determining whether the arguments are bytes or Messages
749 flag determining whether the arguments are bytes or Messages
750
750
751 Returns
751 Returns
752 -------
752 -------
753 (idents, msg_list) : two lists
753 (idents, msg_list) : two lists
754 idents will always be a list of bytes, each of which is a ZMQ
754 idents will always be a list of bytes, each of which is a ZMQ
755 identity. msg_list will be a list of bytes or zmq.Messages of the
755 identity. msg_list will be a list of bytes or zmq.Messages of the
756 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
756 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
757 should be unpackable/unserializable via self.deserialize at this
757 should be unpackable/unserializable via self.deserialize at this
758 point.
758 point.
759 """
759 """
760 if copy:
760 if copy:
761 idx = msg_list.index(DELIM)
761 idx = msg_list.index(DELIM)
762 return msg_list[:idx], msg_list[idx+1:]
762 return msg_list[:idx], msg_list[idx+1:]
763 else:
763 else:
764 failed = True
764 failed = True
765 for idx,m in enumerate(msg_list):
765 for idx,m in enumerate(msg_list):
766 if m.bytes == DELIM:
766 if m.bytes == DELIM:
767 failed = False
767 failed = False
768 break
768 break
769 if failed:
769 if failed:
770 raise ValueError("DELIM not in msg_list")
770 raise ValueError("DELIM not in msg_list")
771 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
771 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
772 return [m.bytes for m in idents], msg_list
772 return [m.bytes for m in idents], msg_list
773
773
774 def _add_digest(self, signature):
774 def _add_digest(self, signature):
775 """add a digest to history to protect against replay attacks"""
775 """add a digest to history to protect against replay attacks"""
776 if self.digest_history_size == 0:
776 if self.digest_history_size == 0:
777 # no history, never add digests
777 # no history, never add digests
778 return
778 return
779
779
780 self.digest_history.add(signature)
780 self.digest_history.add(signature)
781 if len(self.digest_history) > self.digest_history_size:
781 if len(self.digest_history) > self.digest_history_size:
782 # threshold reached, cull 10%
782 # threshold reached, cull 10%
783 self._cull_digest_history()
783 self._cull_digest_history()
784
784
785 def _cull_digest_history(self):
785 def _cull_digest_history(self):
786 """cull the digest history
786 """cull the digest history
787
787
788 Removes a randomly selected 10% of the digest history
788 Removes a randomly selected 10% of the digest history
789 """
789 """
790 current = len(self.digest_history)
790 current = len(self.digest_history)
791 n_to_cull = max(int(current // 10), current - self.digest_history_size)
791 n_to_cull = max(int(current // 10), current - self.digest_history_size)
792 if n_to_cull >= current:
792 if n_to_cull >= current:
793 self.digest_history = set()
793 self.digest_history = set()
794 return
794 return
795 to_cull = random.sample(self.digest_history, n_to_cull)
795 to_cull = random.sample(self.digest_history, n_to_cull)
796 self.digest_history.difference_update(to_cull)
796 self.digest_history.difference_update(to_cull)
797
797
798 def deserialize(self, msg_list, content=True, copy=True):
798 def deserialize(self, msg_list, content=True, copy=True):
799 """Unserialize a msg_list to a nested message dict.
799 """Unserialize a msg_list to a nested message dict.
800
800
801 This is roughly the inverse of serialize. The serialize/deserialize
801 This is roughly the inverse of serialize. The serialize/deserialize
802 methods work with full message lists, whereas pack/unpack work with
802 methods work with full message lists, whereas pack/unpack work with
803 the individual message parts in the message list.
803 the individual message parts in the message list.
804
804
805 Parameters
805 Parameters
806 ----------
806 ----------
807 msg_list : list of bytes or Message objects
807 msg_list : list of bytes or Message objects
808 The list of message parts of the form [HMAC,p_header,p_parent,
808 The list of message parts of the form [HMAC,p_header,p_parent,
809 p_metadata,p_content,buffer1,buffer2,...].
809 p_metadata,p_content,buffer1,buffer2,...].
810 content : bool (True)
810 content : bool (True)
811 Whether to unpack the content dict (True), or leave it packed
811 Whether to unpack the content dict (True), or leave it packed
812 (False).
812 (False).
813 copy : bool (True)
813 copy : bool (True)
814 Whether msg_list contains bytes (True) or the non-copying Message
814 Whether msg_list contains bytes (True) or the non-copying Message
815 objects in each place (False).
815 objects in each place (False).
816
816
817 Returns
817 Returns
818 -------
818 -------
819 msg : dict
819 msg : dict
820 The nested message dict with top-level keys [header, parent_header,
820 The nested message dict with top-level keys [header, parent_header,
821 content, buffers]. The buffers are returned as memoryviews.
821 content, buffers]. The buffers are returned as memoryviews.
822 """
822 """
823 minlen = 5
823 minlen = 5
824 message = {}
824 message = {}
825 if not copy:
825 if not copy:
826 # pyzmq didn't copy the first parts of the message, so we'll do it
826 # pyzmq didn't copy the first parts of the message, so we'll do it
827 for i in range(minlen):
827 for i in range(minlen):
828 msg_list[i] = msg_list[i].bytes
828 msg_list[i] = msg_list[i].bytes
829 if self.auth is not None:
829 if self.auth is not None:
830 signature = msg_list[0]
830 signature = msg_list[0]
831 if not signature:
831 if not signature:
832 raise ValueError("Unsigned Message")
832 raise ValueError("Unsigned Message")
833 if signature in self.digest_history:
833 if signature in self.digest_history:
834 raise ValueError("Duplicate Signature: %r" % signature)
834 raise ValueError("Duplicate Signature: %r" % signature)
835 self._add_digest(signature)
835 self._add_digest(signature)
836 check = self.sign(msg_list[1:5])
836 check = self.sign(msg_list[1:5])
837 if not compare_digest(signature, check):
837 if not compare_digest(signature, check):
838 raise ValueError("Invalid Signature: %r" % signature)
838 raise ValueError("Invalid Signature: %r" % signature)
839 if not len(msg_list) >= minlen:
839 if not len(msg_list) >= minlen:
840 raise TypeError("malformed message, must have at least %i elements"%minlen)
840 raise TypeError("malformed message, must have at least %i elements"%minlen)
841 header = self.unpack(msg_list[1])
841 header = self.unpack(msg_list[1])
842 message['header'] = extract_dates(header)
842 message['header'] = extract_dates(header)
843 message['msg_id'] = header['msg_id']
843 message['msg_id'] = header['msg_id']
844 message['msg_type'] = header['msg_type']
844 message['msg_type'] = header['msg_type']
845 message['parent_header'] = extract_dates(self.unpack(msg_list[2]))
845 message['parent_header'] = extract_dates(self.unpack(msg_list[2]))
846 message['metadata'] = self.unpack(msg_list[3])
846 message['metadata'] = self.unpack(msg_list[3])
847 if content:
847 if content:
848 message['content'] = self.unpack(msg_list[4])
848 message['content'] = self.unpack(msg_list[4])
849 else:
849 else:
850 message['content'] = msg_list[4]
850 message['content'] = msg_list[4]
851 buffers = [memoryview(b) for b in msg_list[5:]]
851 buffers = [memoryview(b) for b in msg_list[5:]]
852 if buffers and buffers[0].shape is None:
852 if buffers and buffers[0].shape is None:
853 # force copy to workaround pyzmq #646
853 # force copy to workaround pyzmq #646
854 buffers = [memoryview(b.bytes) for b in msg_list[5:]]
854 buffers = [memoryview(b.bytes) for b in msg_list[5:]]
855 message['buffers'] = buffers
855 message['buffers'] = buffers
856 # adapt to the current version
856 # adapt to the current version
857 return adapt(message)
857 return adapt(message)
858
858
859 def unserialize(self, *args, **kwargs):
859 def unserialize(self, *args, **kwargs):
860 warnings.warn(
860 warnings.warn(
861 "Session.unserialize is deprecated. Use Session.deserialize.",
861 "Session.unserialize is deprecated. Use Session.deserialize.",
862 DeprecationWarning,
862 DeprecationWarning,
863 )
863 )
864 return self.deserialize(*args, **kwargs)
864 return self.deserialize(*args, **kwargs)
865
865
866
866
867 def test_msg2obj():
867 def test_msg2obj():
868 am = dict(x=1)
868 am = dict(x=1)
869 ao = Message(am)
869 ao = Message(am)
870 assert ao.x == am['x']
870 assert ao.x == am['x']
871
871
872 am['y'] = dict(z=1)
872 am['y'] = dict(z=1)
873 ao = Message(am)
873 ao = Message(am)
874 assert ao.y.z == am['y']['z']
874 assert ao.y.z == am['y']['z']
875
875
876 k1, k2 = 'y', 'z'
876 k1, k2 = 'y', 'z'
877 assert ao[k1][k2] == am[k1][k2]
877 assert ao[k1][k2] == am[k1][k2]
878
878
879 am2 = dict(ao)
879 am2 = dict(ao)
880 assert am['x'] == am2['x']
880 assert am['x'] == am2['x']
881 assert am['y']['z'] == am2['y']['z']
881 assert am['y']['z'] == am2['y']['z']
@@ -1,375 +1,375 b''
1 """Tests for adapting IPython msg spec versions"""
1 """Tests for adapting IPython msg spec versions"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import copy
6 import copy
7 import json
7 import json
8 from unittest import TestCase
8 from unittest import TestCase
9 import nose.tools as nt
9 import nose.tools as nt
10
10
11 from IPython.kernel.adapter import adapt, V4toV5, V5toV4, code_to_line
11 from jupyter_client.adapter import adapt, V4toV5, V5toV4, code_to_line
12 from IPython.kernel.zmq.session import Session
12 from jupyter_client.session import Session
13
13
14
14
15 def test_default_version():
15 def test_default_version():
16 s = Session()
16 s = Session()
17 msg = s.msg("msg_type")
17 msg = s.msg("msg_type")
18 msg['header'].pop('version')
18 msg['header'].pop('version')
19 original = copy.deepcopy(msg)
19 original = copy.deepcopy(msg)
20 adapted = adapt(original)
20 adapted = adapt(original)
21 nt.assert_equal(adapted['header']['version'], V4toV5.version)
21 nt.assert_equal(adapted['header']['version'], V4toV5.version)
22
22
23 def test_code_to_line_no_code():
23 def test_code_to_line_no_code():
24 line, pos = code_to_line("", 0)
24 line, pos = code_to_line("", 0)
25 nt.assert_equal(line, "")
25 nt.assert_equal(line, "")
26 nt.assert_equal(pos, 0)
26 nt.assert_equal(pos, 0)
27
27
28 class AdapterTest(TestCase):
28 class AdapterTest(TestCase):
29
29
30 def setUp(self):
30 def setUp(self):
31 self.session = Session()
31 self.session = Session()
32
32
33 def adapt(self, msg, version=None):
33 def adapt(self, msg, version=None):
34 original = copy.deepcopy(msg)
34 original = copy.deepcopy(msg)
35 adapted = adapt(msg, version or self.to_version)
35 adapted = adapt(msg, version or self.to_version)
36 return original, adapted
36 return original, adapted
37
37
38 def check_header(self, msg):
38 def check_header(self, msg):
39 pass
39 pass
40
40
41
41
42 class V4toV5TestCase(AdapterTest):
42 class V4toV5TestCase(AdapterTest):
43 from_version = 4
43 from_version = 4
44 to_version = 5
44 to_version = 5
45
45
46 def msg(self, msg_type, content):
46 def msg(self, msg_type, content):
47 """Create a v4 msg (same as v5, minus version header)"""
47 """Create a v4 msg (same as v5, minus version header)"""
48 msg = self.session.msg(msg_type, content)
48 msg = self.session.msg(msg_type, content)
49 msg['header'].pop('version')
49 msg['header'].pop('version')
50 return msg
50 return msg
51
51
52 def test_same_version(self):
52 def test_same_version(self):
53 msg = self.msg("execute_result",
53 msg = self.msg("execute_result",
54 content={'status' : 'ok'}
54 content={'status' : 'ok'}
55 )
55 )
56 original, adapted = self.adapt(msg, self.from_version)
56 original, adapted = self.adapt(msg, self.from_version)
57
57
58 self.assertEqual(original, adapted)
58 self.assertEqual(original, adapted)
59
59
60 def test_no_adapt(self):
60 def test_no_adapt(self):
61 msg = self.msg("input_reply", {'value' : 'some text'})
61 msg = self.msg("input_reply", {'value' : 'some text'})
62 v4, v5 = self.adapt(msg)
62 v4, v5 = self.adapt(msg)
63 self.assertEqual(v5['header']['version'], V4toV5.version)
63 self.assertEqual(v5['header']['version'], V4toV5.version)
64 v5['header'].pop('version')
64 v5['header'].pop('version')
65 self.assertEqual(v4, v5)
65 self.assertEqual(v4, v5)
66
66
67 def test_rename_type(self):
67 def test_rename_type(self):
68 for v5_type, v4_type in [
68 for v5_type, v4_type in [
69 ('execute_result', 'pyout'),
69 ('execute_result', 'pyout'),
70 ('execute_input', 'pyin'),
70 ('execute_input', 'pyin'),
71 ('error', 'pyerr'),
71 ('error', 'pyerr'),
72 ]:
72 ]:
73 msg = self.msg(v4_type, {'key' : 'value'})
73 msg = self.msg(v4_type, {'key' : 'value'})
74 v4, v5 = self.adapt(msg)
74 v4, v5 = self.adapt(msg)
75 self.assertEqual(v5['header']['version'], V4toV5.version)
75 self.assertEqual(v5['header']['version'], V4toV5.version)
76 self.assertEqual(v5['header']['msg_type'], v5_type)
76 self.assertEqual(v5['header']['msg_type'], v5_type)
77 self.assertEqual(v4['content'], v5['content'])
77 self.assertEqual(v4['content'], v5['content'])
78
78
79 def test_execute_request(self):
79 def test_execute_request(self):
80 msg = self.msg("execute_request", {
80 msg = self.msg("execute_request", {
81 'code' : 'a=5',
81 'code' : 'a=5',
82 'silent' : False,
82 'silent' : False,
83 'user_expressions' : {'a' : 'apple'},
83 'user_expressions' : {'a' : 'apple'},
84 'user_variables' : ['b'],
84 'user_variables' : ['b'],
85 })
85 })
86 v4, v5 = self.adapt(msg)
86 v4, v5 = self.adapt(msg)
87 self.assertEqual(v4['header']['msg_type'], v5['header']['msg_type'])
87 self.assertEqual(v4['header']['msg_type'], v5['header']['msg_type'])
88 v4c = v4['content']
88 v4c = v4['content']
89 v5c = v5['content']
89 v5c = v5['content']
90 self.assertEqual(v5c['user_expressions'], {'a' : 'apple', 'b': 'b'})
90 self.assertEqual(v5c['user_expressions'], {'a' : 'apple', 'b': 'b'})
91 self.assertNotIn('user_variables', v5c)
91 self.assertNotIn('user_variables', v5c)
92 self.assertEqual(v5c['code'], v4c['code'])
92 self.assertEqual(v5c['code'], v4c['code'])
93
93
94 def test_execute_reply(self):
94 def test_execute_reply(self):
95 msg = self.msg("execute_reply", {
95 msg = self.msg("execute_reply", {
96 'status': 'ok',
96 'status': 'ok',
97 'execution_count': 7,
97 'execution_count': 7,
98 'user_variables': {'a': 1},
98 'user_variables': {'a': 1},
99 'user_expressions': {'a+a': 2},
99 'user_expressions': {'a+a': 2},
100 'payload': [{'source':'page', 'text':'blah'}]
100 'payload': [{'source':'page', 'text':'blah'}]
101 })
101 })
102 v4, v5 = self.adapt(msg)
102 v4, v5 = self.adapt(msg)
103 v5c = v5['content']
103 v5c = v5['content']
104 self.assertNotIn('user_variables', v5c)
104 self.assertNotIn('user_variables', v5c)
105 self.assertEqual(v5c['user_expressions'], {'a': 1, 'a+a': 2})
105 self.assertEqual(v5c['user_expressions'], {'a': 1, 'a+a': 2})
106 self.assertEqual(v5c['payload'], [{'source': 'page',
106 self.assertEqual(v5c['payload'], [{'source': 'page',
107 'data': {'text/plain': 'blah'}}
107 'data': {'text/plain': 'blah'}}
108 ])
108 ])
109
109
110 def test_complete_request(self):
110 def test_complete_request(self):
111 msg = self.msg("complete_request", {
111 msg = self.msg("complete_request", {
112 'text' : 'a.is',
112 'text' : 'a.is',
113 'line' : 'foo = a.is',
113 'line' : 'foo = a.is',
114 'block' : None,
114 'block' : None,
115 'cursor_pos' : 10,
115 'cursor_pos' : 10,
116 })
116 })
117 v4, v5 = self.adapt(msg)
117 v4, v5 = self.adapt(msg)
118 v4c = v4['content']
118 v4c = v4['content']
119 v5c = v5['content']
119 v5c = v5['content']
120 for key in ('text', 'line', 'block'):
120 for key in ('text', 'line', 'block'):
121 self.assertNotIn(key, v5c)
121 self.assertNotIn(key, v5c)
122 self.assertEqual(v5c['cursor_pos'], v4c['cursor_pos'])
122 self.assertEqual(v5c['cursor_pos'], v4c['cursor_pos'])
123 self.assertEqual(v5c['code'], v4c['line'])
123 self.assertEqual(v5c['code'], v4c['line'])
124
124
125 def test_complete_reply(self):
125 def test_complete_reply(self):
126 msg = self.msg("complete_reply", {
126 msg = self.msg("complete_reply", {
127 'matched_text' : 'a.is',
127 'matched_text' : 'a.is',
128 'matches' : ['a.isalnum',
128 'matches' : ['a.isalnum',
129 'a.isalpha',
129 'a.isalpha',
130 'a.isdigit',
130 'a.isdigit',
131 'a.islower',
131 'a.islower',
132 ],
132 ],
133 })
133 })
134 v4, v5 = self.adapt(msg)
134 v4, v5 = self.adapt(msg)
135 v4c = v4['content']
135 v4c = v4['content']
136 v5c = v5['content']
136 v5c = v5['content']
137
137
138 self.assertEqual(v5c['matches'], v4c['matches'])
138 self.assertEqual(v5c['matches'], v4c['matches'])
139 self.assertEqual(v5c['metadata'], {})
139 self.assertEqual(v5c['metadata'], {})
140 self.assertEqual(v5c['cursor_start'], -4)
140 self.assertEqual(v5c['cursor_start'], -4)
141 self.assertEqual(v5c['cursor_end'], None)
141 self.assertEqual(v5c['cursor_end'], None)
142
142
143 def test_object_info_request(self):
143 def test_object_info_request(self):
144 msg = self.msg("object_info_request", {
144 msg = self.msg("object_info_request", {
145 'oname' : 'foo',
145 'oname' : 'foo',
146 'detail_level' : 1,
146 'detail_level' : 1,
147 })
147 })
148 v4, v5 = self.adapt(msg)
148 v4, v5 = self.adapt(msg)
149 self.assertEqual(v5['header']['msg_type'], 'inspect_request')
149 self.assertEqual(v5['header']['msg_type'], 'inspect_request')
150 v4c = v4['content']
150 v4c = v4['content']
151 v5c = v5['content']
151 v5c = v5['content']
152 self.assertEqual(v5c['code'], v4c['oname'])
152 self.assertEqual(v5c['code'], v4c['oname'])
153 self.assertEqual(v5c['cursor_pos'], len(v4c['oname']))
153 self.assertEqual(v5c['cursor_pos'], len(v4c['oname']))
154 self.assertEqual(v5c['detail_level'], v4c['detail_level'])
154 self.assertEqual(v5c['detail_level'], v4c['detail_level'])
155
155
156 def test_object_info_reply(self):
156 def test_object_info_reply(self):
157 msg = self.msg("object_info_reply", {
157 msg = self.msg("object_info_reply", {
158 'oname' : 'foo',
158 'oname' : 'foo',
159 'found' : True,
159 'found' : True,
160 'status' : 'ok',
160 'status' : 'ok',
161 'definition' : 'foo(a=5)',
161 'definition' : 'foo(a=5)',
162 'docstring' : "the docstring",
162 'docstring' : "the docstring",
163 })
163 })
164 v4, v5 = self.adapt(msg)
164 v4, v5 = self.adapt(msg)
165 self.assertEqual(v5['header']['msg_type'], 'inspect_reply')
165 self.assertEqual(v5['header']['msg_type'], 'inspect_reply')
166 v4c = v4['content']
166 v4c = v4['content']
167 v5c = v5['content']
167 v5c = v5['content']
168 self.assertEqual(sorted(v5c), [ 'data', 'found', 'metadata', 'name', 'status'])
168 self.assertEqual(sorted(v5c), [ 'data', 'found', 'metadata', 'name', 'status'])
169 text = v5c['data']['text/plain']
169 text = v5c['data']['text/plain']
170 self.assertEqual(text, '\n'.join([v4c['definition'], v4c['docstring']]))
170 self.assertEqual(text, '\n'.join([v4c['definition'], v4c['docstring']]))
171
171
172 def test_kernel_info_reply(self):
172 def test_kernel_info_reply(self):
173 msg = self.msg("kernel_info_reply", {
173 msg = self.msg("kernel_info_reply", {
174 'language': 'python',
174 'language': 'python',
175 'language_version': [2,8,0],
175 'language_version': [2,8,0],
176 'ipython_version': [1,2,3],
176 'ipython_version': [1,2,3],
177 })
177 })
178 v4, v5 = self.adapt(msg)
178 v4, v5 = self.adapt(msg)
179 v4c = v4['content']
179 v4c = v4['content']
180 v5c = v5['content']
180 v5c = v5['content']
181 self.assertEqual(v5c, {
181 self.assertEqual(v5c, {
182 'protocol_version': '4.1',
182 'protocol_version': '4.1',
183 'implementation': 'ipython',
183 'implementation': 'ipython',
184 'implementation_version': '1.2.3',
184 'implementation_version': '1.2.3',
185 'language_info': {
185 'language_info': {
186 'name': 'python',
186 'name': 'python',
187 'version': '2.8.0',
187 'version': '2.8.0',
188 },
188 },
189 'banner' : '',
189 'banner' : '',
190 })
190 })
191
191
192 # iopub channel
192 # iopub channel
193
193
194 def test_display_data(self):
194 def test_display_data(self):
195 jsondata = dict(a=5)
195 jsondata = dict(a=5)
196 msg = self.msg("display_data", {
196 msg = self.msg("display_data", {
197 'data' : {
197 'data' : {
198 'text/plain' : 'some text',
198 'text/plain' : 'some text',
199 'application/json' : json.dumps(jsondata)
199 'application/json' : json.dumps(jsondata)
200 },
200 },
201 'metadata' : {'text/plain' : { 'key' : 'value' }},
201 'metadata' : {'text/plain' : { 'key' : 'value' }},
202 })
202 })
203 v4, v5 = self.adapt(msg)
203 v4, v5 = self.adapt(msg)
204 v4c = v4['content']
204 v4c = v4['content']
205 v5c = v5['content']
205 v5c = v5['content']
206 self.assertEqual(v5c['metadata'], v4c['metadata'])
206 self.assertEqual(v5c['metadata'], v4c['metadata'])
207 self.assertEqual(v5c['data']['text/plain'], v4c['data']['text/plain'])
207 self.assertEqual(v5c['data']['text/plain'], v4c['data']['text/plain'])
208 self.assertEqual(v5c['data']['application/json'], jsondata)
208 self.assertEqual(v5c['data']['application/json'], jsondata)
209
209
210 # stdin channel
210 # stdin channel
211
211
212 def test_input_request(self):
212 def test_input_request(self):
213 msg = self.msg('input_request', {'prompt': "$>"})
213 msg = self.msg('input_request', {'prompt': "$>"})
214 v4, v5 = self.adapt(msg)
214 v4, v5 = self.adapt(msg)
215 self.assertEqual(v5['content']['prompt'], v4['content']['prompt'])
215 self.assertEqual(v5['content']['prompt'], v4['content']['prompt'])
216 self.assertFalse(v5['content']['password'])
216 self.assertFalse(v5['content']['password'])
217
217
218
218
219 class V5toV4TestCase(AdapterTest):
219 class V5toV4TestCase(AdapterTest):
220 from_version = 5
220 from_version = 5
221 to_version = 4
221 to_version = 4
222
222
223 def msg(self, msg_type, content):
223 def msg(self, msg_type, content):
224 return self.session.msg(msg_type, content)
224 return self.session.msg(msg_type, content)
225
225
226 def test_same_version(self):
226 def test_same_version(self):
227 msg = self.msg("execute_result",
227 msg = self.msg("execute_result",
228 content={'status' : 'ok'}
228 content={'status' : 'ok'}
229 )
229 )
230 original, adapted = self.adapt(msg, self.from_version)
230 original, adapted = self.adapt(msg, self.from_version)
231
231
232 self.assertEqual(original, adapted)
232 self.assertEqual(original, adapted)
233
233
234 def test_no_adapt(self):
234 def test_no_adapt(self):
235 msg = self.msg("input_reply", {'value' : 'some text'})
235 msg = self.msg("input_reply", {'value' : 'some text'})
236 v5, v4 = self.adapt(msg)
236 v5, v4 = self.adapt(msg)
237 self.assertNotIn('version', v4['header'])
237 self.assertNotIn('version', v4['header'])
238 v5['header'].pop('version')
238 v5['header'].pop('version')
239 self.assertEqual(v4, v5)
239 self.assertEqual(v4, v5)
240
240
241 def test_rename_type(self):
241 def test_rename_type(self):
242 for v5_type, v4_type in [
242 for v5_type, v4_type in [
243 ('execute_result', 'pyout'),
243 ('execute_result', 'pyout'),
244 ('execute_input', 'pyin'),
244 ('execute_input', 'pyin'),
245 ('error', 'pyerr'),
245 ('error', 'pyerr'),
246 ]:
246 ]:
247 msg = self.msg(v5_type, {'key' : 'value'})
247 msg = self.msg(v5_type, {'key' : 'value'})
248 v5, v4 = self.adapt(msg)
248 v5, v4 = self.adapt(msg)
249 self.assertEqual(v4['header']['msg_type'], v4_type)
249 self.assertEqual(v4['header']['msg_type'], v4_type)
250 nt.assert_not_in('version', v4['header'])
250 nt.assert_not_in('version', v4['header'])
251 self.assertEqual(v4['content'], v5['content'])
251 self.assertEqual(v4['content'], v5['content'])
252
252
253 def test_execute_request(self):
253 def test_execute_request(self):
254 msg = self.msg("execute_request", {
254 msg = self.msg("execute_request", {
255 'code' : 'a=5',
255 'code' : 'a=5',
256 'silent' : False,
256 'silent' : False,
257 'user_expressions' : {'a' : 'apple'},
257 'user_expressions' : {'a' : 'apple'},
258 })
258 })
259 v5, v4 = self.adapt(msg)
259 v5, v4 = self.adapt(msg)
260 self.assertEqual(v4['header']['msg_type'], v5['header']['msg_type'])
260 self.assertEqual(v4['header']['msg_type'], v5['header']['msg_type'])
261 v4c = v4['content']
261 v4c = v4['content']
262 v5c = v5['content']
262 v5c = v5['content']
263 self.assertEqual(v4c['user_variables'], [])
263 self.assertEqual(v4c['user_variables'], [])
264 self.assertEqual(v5c['code'], v4c['code'])
264 self.assertEqual(v5c['code'], v4c['code'])
265
265
266 def test_complete_request(self):
266 def test_complete_request(self):
267 msg = self.msg("complete_request", {
267 msg = self.msg("complete_request", {
268 'code' : 'def foo():\n'
268 'code' : 'def foo():\n'
269 ' a.is\n'
269 ' a.is\n'
270 'foo()',
270 'foo()',
271 'cursor_pos': 19,
271 'cursor_pos': 19,
272 })
272 })
273 v5, v4 = self.adapt(msg)
273 v5, v4 = self.adapt(msg)
274 v4c = v4['content']
274 v4c = v4['content']
275 v5c = v5['content']
275 v5c = v5['content']
276 self.assertNotIn('code', v4c)
276 self.assertNotIn('code', v4c)
277 self.assertEqual(v4c['line'], v5c['code'].splitlines(True)[1])
277 self.assertEqual(v4c['line'], v5c['code'].splitlines(True)[1])
278 self.assertEqual(v4c['cursor_pos'], 8)
278 self.assertEqual(v4c['cursor_pos'], 8)
279 self.assertEqual(v4c['text'], '')
279 self.assertEqual(v4c['text'], '')
280 self.assertEqual(v4c['block'], None)
280 self.assertEqual(v4c['block'], None)
281
281
282 def test_complete_reply(self):
282 def test_complete_reply(self):
283 msg = self.msg("complete_reply", {
283 msg = self.msg("complete_reply", {
284 'cursor_start' : 10,
284 'cursor_start' : 10,
285 'cursor_end' : 14,
285 'cursor_end' : 14,
286 'matches' : ['a.isalnum',
286 'matches' : ['a.isalnum',
287 'a.isalpha',
287 'a.isalpha',
288 'a.isdigit',
288 'a.isdigit',
289 'a.islower',
289 'a.islower',
290 ],
290 ],
291 'metadata' : {},
291 'metadata' : {},
292 })
292 })
293 v5, v4 = self.adapt(msg)
293 v5, v4 = self.adapt(msg)
294 v4c = v4['content']
294 v4c = v4['content']
295 v5c = v5['content']
295 v5c = v5['content']
296 self.assertEqual(v4c['matched_text'], 'a.is')
296 self.assertEqual(v4c['matched_text'], 'a.is')
297 self.assertEqual(v4c['matches'], v5c['matches'])
297 self.assertEqual(v4c['matches'], v5c['matches'])
298
298
299 def test_inspect_request(self):
299 def test_inspect_request(self):
300 msg = self.msg("inspect_request", {
300 msg = self.msg("inspect_request", {
301 'code' : 'def foo():\n'
301 'code' : 'def foo():\n'
302 ' apple\n'
302 ' apple\n'
303 'bar()',
303 'bar()',
304 'cursor_pos': 18,
304 'cursor_pos': 18,
305 'detail_level' : 1,
305 'detail_level' : 1,
306 })
306 })
307 v5, v4 = self.adapt(msg)
307 v5, v4 = self.adapt(msg)
308 self.assertEqual(v4['header']['msg_type'], 'object_info_request')
308 self.assertEqual(v4['header']['msg_type'], 'object_info_request')
309 v4c = v4['content']
309 v4c = v4['content']
310 v5c = v5['content']
310 v5c = v5['content']
311 self.assertEqual(v4c['oname'], 'apple')
311 self.assertEqual(v4c['oname'], 'apple')
312 self.assertEqual(v5c['detail_level'], v4c['detail_level'])
312 self.assertEqual(v5c['detail_level'], v4c['detail_level'])
313
313
314 def test_inspect_reply(self):
314 def test_inspect_reply(self):
315 msg = self.msg("inspect_reply", {
315 msg = self.msg("inspect_reply", {
316 'name' : 'foo',
316 'name' : 'foo',
317 'found' : True,
317 'found' : True,
318 'data' : {'text/plain' : 'some text'},
318 'data' : {'text/plain' : 'some text'},
319 'metadata' : {},
319 'metadata' : {},
320 })
320 })
321 v5, v4 = self.adapt(msg)
321 v5, v4 = self.adapt(msg)
322 self.assertEqual(v4['header']['msg_type'], 'object_info_reply')
322 self.assertEqual(v4['header']['msg_type'], 'object_info_reply')
323 v4c = v4['content']
323 v4c = v4['content']
324 v5c = v5['content']
324 v5c = v5['content']
325 self.assertEqual(sorted(v4c), ['found', 'oname'])
325 self.assertEqual(sorted(v4c), ['found', 'oname'])
326 self.assertEqual(v4c['found'], False)
326 self.assertEqual(v4c['found'], False)
327
327
328 def test_kernel_info_reply(self):
328 def test_kernel_info_reply(self):
329 msg = self.msg("kernel_info_reply", {
329 msg = self.msg("kernel_info_reply", {
330 'protocol_version': '5.0',
330 'protocol_version': '5.0',
331 'implementation': 'ipython',
331 'implementation': 'ipython',
332 'implementation_version': '1.2.3',
332 'implementation_version': '1.2.3',
333 'language_info': {
333 'language_info': {
334 'name': 'python',
334 'name': 'python',
335 'version': '2.8.0',
335 'version': '2.8.0',
336 'mimetype': 'text/x-python',
336 'mimetype': 'text/x-python',
337 },
337 },
338 'banner' : 'the banner',
338 'banner' : 'the banner',
339 })
339 })
340 v5, v4 = self.adapt(msg)
340 v5, v4 = self.adapt(msg)
341 v4c = v4['content']
341 v4c = v4['content']
342 v5c = v5['content']
342 v5c = v5['content']
343 info = v5c['language_info']
343 info = v5c['language_info']
344 self.assertEqual(v4c, {
344 self.assertEqual(v4c, {
345 'protocol_version': [5,0],
345 'protocol_version': [5,0],
346 'language': 'python',
346 'language': 'python',
347 'language_version': [2,8,0],
347 'language_version': [2,8,0],
348 'ipython_version': [1,2,3],
348 'ipython_version': [1,2,3],
349 })
349 })
350
350
351 # iopub channel
351 # iopub channel
352
352
353 def test_display_data(self):
353 def test_display_data(self):
354 jsondata = dict(a=5)
354 jsondata = dict(a=5)
355 msg = self.msg("display_data", {
355 msg = self.msg("display_data", {
356 'data' : {
356 'data' : {
357 'text/plain' : 'some text',
357 'text/plain' : 'some text',
358 'application/json' : jsondata,
358 'application/json' : jsondata,
359 },
359 },
360 'metadata' : {'text/plain' : { 'key' : 'value' }},
360 'metadata' : {'text/plain' : { 'key' : 'value' }},
361 })
361 })
362 v5, v4 = self.adapt(msg)
362 v5, v4 = self.adapt(msg)
363 v4c = v4['content']
363 v4c = v4['content']
364 v5c = v5['content']
364 v5c = v5['content']
365 self.assertEqual(v5c['metadata'], v4c['metadata'])
365 self.assertEqual(v5c['metadata'], v4c['metadata'])
366 self.assertEqual(v5c['data']['text/plain'], v4c['data']['text/plain'])
366 self.assertEqual(v5c['data']['text/plain'], v4c['data']['text/plain'])
367 self.assertEqual(v4c['data']['application/json'], json.dumps(jsondata))
367 self.assertEqual(v4c['data']['application/json'], json.dumps(jsondata))
368
368
369 # stdin channel
369 # stdin channel
370
370
371 def test_input_request(self):
371 def test_input_request(self):
372 msg = self.msg('input_request', {'prompt': "$>", 'password' : True})
372 msg = self.msg('input_request', {'prompt': "$>", 'password' : True})
373 v5, v4 = self.adapt(msg)
373 v5, v4 = self.adapt(msg)
374 self.assertEqual(v5['content']['prompt'], v4['content']['prompt'])
374 self.assertEqual(v5['content']['prompt'], v4['content']['prompt'])
375 self.assertNotIn('password', v4['content'])
375 self.assertNotIn('password', v4['content'])
@@ -1,141 +1,141 b''
1 """Tests for kernel connection utilities
1 """Tests for kernel connection utilities
2
2
3 Authors
3 Authors
4 -------
4 -------
5 * MinRK
5 * MinRK
6 """
6 """
7 #-----------------------------------------------------------------------------
7 #-----------------------------------------------------------------------------
8 # Copyright (c) 2013, the IPython Development Team.
8 # Copyright (c) 2013, the IPython Development Team.
9 #
9 #
10 # Distributed under the terms of the Modified BSD License.
10 # Distributed under the terms of the Modified BSD License.
11 #
11 #
12 # The full license is in the file COPYING.txt, distributed with this software.
12 # The full license is in the file COPYING.txt, distributed with this software.
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-----------------------------------------------------------------------------
17 #-----------------------------------------------------------------------------
18
18
19 import json
19 import json
20 import os
20 import os
21
21
22 import nose.tools as nt
22 import nose.tools as nt
23
23
24 from IPython.config import Config
24 from IPython.config import Config
25 from IPython.consoleapp import IPythonConsoleApp
25 from IPython.consoleapp import IPythonConsoleApp
26 from IPython.core.application import BaseIPythonApplication
26 from IPython.core.application import BaseIPythonApplication
27 from IPython.utils.tempdir import TemporaryDirectory, TemporaryWorkingDirectory
27 from IPython.utils.tempdir import TemporaryDirectory, TemporaryWorkingDirectory
28 from IPython.utils.py3compat import str_to_bytes
28 from IPython.utils.py3compat import str_to_bytes
29 from IPython.kernel import connect
29 from jupyter_client import connect
30 from IPython.kernel.zmq.session import Session
30 from jupyter_client.session import Session
31
31
32 #-----------------------------------------------------------------------------
32 #-----------------------------------------------------------------------------
33 # Classes and functions
33 # Classes and functions
34 #-----------------------------------------------------------------------------
34 #-----------------------------------------------------------------------------
35
35
36 class DummyConsoleApp(BaseIPythonApplication, IPythonConsoleApp):
36 class DummyConsoleApp(BaseIPythonApplication, IPythonConsoleApp):
37 def initialize(self, argv=[]):
37 def initialize(self, argv=[]):
38 BaseIPythonApplication.initialize(self, argv=argv)
38 BaseIPythonApplication.initialize(self, argv=argv)
39 self.init_connection_file()
39 self.init_connection_file()
40
40
41 sample_info = dict(ip='1.2.3.4', transport='ipc',
41 sample_info = dict(ip='1.2.3.4', transport='ipc',
42 shell_port=1, hb_port=2, iopub_port=3, stdin_port=4, control_port=5,
42 shell_port=1, hb_port=2, iopub_port=3, stdin_port=4, control_port=5,
43 key=b'abc123', signature_scheme='hmac-md5',
43 key=b'abc123', signature_scheme='hmac-md5',
44 )
44 )
45
45
46 def test_write_connection_file():
46 def test_write_connection_file():
47 with TemporaryDirectory() as d:
47 with TemporaryDirectory() as d:
48 cf = os.path.join(d, 'kernel.json')
48 cf = os.path.join(d, 'kernel.json')
49 connect.write_connection_file(cf, **sample_info)
49 connect.write_connection_file(cf, **sample_info)
50 nt.assert_true(os.path.exists(cf))
50 nt.assert_true(os.path.exists(cf))
51 with open(cf, 'r') as f:
51 with open(cf, 'r') as f:
52 info = json.load(f)
52 info = json.load(f)
53 info['key'] = str_to_bytes(info['key'])
53 info['key'] = str_to_bytes(info['key'])
54 nt.assert_equal(info, sample_info)
54 nt.assert_equal(info, sample_info)
55
55
56
56
57 def test_load_connection_file_session():
57 def test_load_connection_file_session():
58 """test load_connection_file() after """
58 """test load_connection_file() after """
59 session = Session()
59 session = Session()
60 app = DummyConsoleApp(session=Session())
60 app = DummyConsoleApp(session=Session())
61 app.initialize(argv=[])
61 app.initialize(argv=[])
62 session = app.session
62 session = app.session
63
63
64 with TemporaryDirectory() as d:
64 with TemporaryDirectory() as d:
65 cf = os.path.join(d, 'kernel.json')
65 cf = os.path.join(d, 'kernel.json')
66 connect.write_connection_file(cf, **sample_info)
66 connect.write_connection_file(cf, **sample_info)
67 app.connection_file = cf
67 app.connection_file = cf
68 app.load_connection_file()
68 app.load_connection_file()
69
69
70 nt.assert_equal(session.key, sample_info['key'])
70 nt.assert_equal(session.key, sample_info['key'])
71 nt.assert_equal(session.signature_scheme, sample_info['signature_scheme'])
71 nt.assert_equal(session.signature_scheme, sample_info['signature_scheme'])
72
72
73
73
74 def test_app_load_connection_file():
74 def test_app_load_connection_file():
75 """test `ipython console --existing` loads a connection file"""
75 """test `ipython console --existing` loads a connection file"""
76 with TemporaryDirectory() as d:
76 with TemporaryDirectory() as d:
77 cf = os.path.join(d, 'kernel.json')
77 cf = os.path.join(d, 'kernel.json')
78 connect.write_connection_file(cf, **sample_info)
78 connect.write_connection_file(cf, **sample_info)
79 app = DummyConsoleApp(connection_file=cf)
79 app = DummyConsoleApp(connection_file=cf)
80 app.initialize(argv=[])
80 app.initialize(argv=[])
81
81
82 for attr, expected in sample_info.items():
82 for attr, expected in sample_info.items():
83 if attr in ('key', 'signature_scheme'):
83 if attr in ('key', 'signature_scheme'):
84 continue
84 continue
85 value = getattr(app, attr)
85 value = getattr(app, attr)
86 nt.assert_equal(value, expected, "app.%s = %s != %s" % (attr, value, expected))
86 nt.assert_equal(value, expected, "app.%s = %s != %s" % (attr, value, expected))
87
87
88 def test_get_connection_file():
88 def test_get_connection_file():
89 cfg = Config()
89 cfg = Config()
90 with TemporaryWorkingDirectory() as d:
90 with TemporaryWorkingDirectory() as d:
91 cfg.ProfileDir.location = d
91 cfg.ProfileDir.location = d
92 cf = 'kernel.json'
92 cf = 'kernel.json'
93 app = DummyConsoleApp(config=cfg, connection_file=cf)
93 app = DummyConsoleApp(config=cfg, connection_file=cf)
94 app.initialize(argv=[])
94 app.initialize(argv=[])
95
95
96 profile_cf = os.path.join(app.profile_dir.location, 'security', cf)
96 profile_cf = os.path.join(app.profile_dir.location, 'security', cf)
97 nt.assert_equal(profile_cf, app.connection_file)
97 nt.assert_equal(profile_cf, app.connection_file)
98 with open(profile_cf, 'w') as f:
98 with open(profile_cf, 'w') as f:
99 f.write("{}")
99 f.write("{}")
100 nt.assert_true(os.path.exists(profile_cf))
100 nt.assert_true(os.path.exists(profile_cf))
101 nt.assert_equal(connect.get_connection_file(app), profile_cf)
101 nt.assert_equal(connect.get_connection_file(app), profile_cf)
102
102
103 app.connection_file = cf
103 app.connection_file = cf
104 nt.assert_equal(connect.get_connection_file(app), profile_cf)
104 nt.assert_equal(connect.get_connection_file(app), profile_cf)
105
105
106 def test_find_connection_file():
106 def test_find_connection_file():
107 cfg = Config()
107 cfg = Config()
108 with TemporaryDirectory() as d:
108 with TemporaryDirectory() as d:
109 cfg.ProfileDir.location = d
109 cfg.ProfileDir.location = d
110 cf = 'kernel.json'
110 cf = 'kernel.json'
111 app = DummyConsoleApp(config=cfg, connection_file=cf)
111 app = DummyConsoleApp(config=cfg, connection_file=cf)
112 app.initialize(argv=[])
112 app.initialize(argv=[])
113 BaseIPythonApplication._instance = app
113 BaseIPythonApplication._instance = app
114
114
115 profile_cf = os.path.join(app.profile_dir.location, 'security', cf)
115 profile_cf = os.path.join(app.profile_dir.location, 'security', cf)
116 with open(profile_cf, 'w') as f:
116 with open(profile_cf, 'w') as f:
117 f.write("{}")
117 f.write("{}")
118
118
119 for query in (
119 for query in (
120 'kernel.json',
120 'kernel.json',
121 'kern*',
121 'kern*',
122 '*ernel*',
122 '*ernel*',
123 'k*',
123 'k*',
124 ):
124 ):
125 nt.assert_equal(connect.find_connection_file(query), profile_cf)
125 nt.assert_equal(connect.find_connection_file(query), profile_cf)
126
126
127 BaseIPythonApplication._instance = None
127 BaseIPythonApplication._instance = None
128
128
129 def test_get_connection_info():
129 def test_get_connection_info():
130 with TemporaryDirectory() as d:
130 with TemporaryDirectory() as d:
131 cf = os.path.join(d, 'kernel.json')
131 cf = os.path.join(d, 'kernel.json')
132 connect.write_connection_file(cf, **sample_info)
132 connect.write_connection_file(cf, **sample_info)
133 json_info = connect.get_connection_info(cf)
133 json_info = connect.get_connection_info(cf)
134 info = connect.get_connection_info(cf, unpack=True)
134 info = connect.get_connection_info(cf, unpack=True)
135
135
136 nt.assert_equal(type(json_info), type(""))
136 nt.assert_equal(type(json_info), type(""))
137 nt.assert_equal(info, sample_info)
137 nt.assert_equal(info, sample_info)
138
138
139 info2 = json.loads(json_info)
139 info2 = json.loads(json_info)
140 info2['key'] = str_to_bytes(info2['key'])
140 info2['key'] = str_to_bytes(info2['key'])
141 nt.assert_equal(info2, sample_info)
141 nt.assert_equal(info2, sample_info)
@@ -1,53 +1,53 b''
1 """Tests for the notebook kernel and session manager"""
1 """Tests for the notebook kernel and session manager"""
2
2
3 from subprocess import PIPE
3 from subprocess import PIPE
4 import time
4 import time
5 from unittest import TestCase
5 from unittest import TestCase
6
6
7 from IPython.testing import decorators as dec
7 from IPython.testing import decorators as dec
8
8
9 from IPython.config.loader import Config
9 from IPython.config.loader import Config
10 from IPython.kernel import KernelManager
10 from jupyter_client import KernelManager
11
11
12 class TestKernelManager(TestCase):
12 class TestKernelManager(TestCase):
13
13
14 def _get_tcp_km(self):
14 def _get_tcp_km(self):
15 c = Config()
15 c = Config()
16 km = KernelManager(config=c)
16 km = KernelManager(config=c)
17 return km
17 return km
18
18
19 def _get_ipc_km(self):
19 def _get_ipc_km(self):
20 c = Config()
20 c = Config()
21 c.KernelManager.transport = 'ipc'
21 c.KernelManager.transport = 'ipc'
22 c.KernelManager.ip = 'test'
22 c.KernelManager.ip = 'test'
23 km = KernelManager(config=c)
23 km = KernelManager(config=c)
24 return km
24 return km
25
25
26 def _run_lifecycle(self, km):
26 def _run_lifecycle(self, km):
27 km.start_kernel(stdout=PIPE, stderr=PIPE)
27 km.start_kernel(stdout=PIPE, stderr=PIPE)
28 self.assertTrue(km.is_alive())
28 self.assertTrue(km.is_alive())
29 km.restart_kernel(now=True)
29 km.restart_kernel(now=True)
30 self.assertTrue(km.is_alive())
30 self.assertTrue(km.is_alive())
31 km.interrupt_kernel()
31 km.interrupt_kernel()
32 self.assertTrue(isinstance(km, KernelManager))
32 self.assertTrue(isinstance(km, KernelManager))
33 km.shutdown_kernel(now=True)
33 km.shutdown_kernel(now=True)
34
34
35 def test_tcp_lifecycle(self):
35 def test_tcp_lifecycle(self):
36 km = self._get_tcp_km()
36 km = self._get_tcp_km()
37 self._run_lifecycle(km)
37 self._run_lifecycle(km)
38
38
39 @dec.skip_win32
39 @dec.skip_win32
40 def test_ipc_lifecycle(self):
40 def test_ipc_lifecycle(self):
41 km = self._get_ipc_km()
41 km = self._get_ipc_km()
42 self._run_lifecycle(km)
42 self._run_lifecycle(km)
43
43
44 def test_get_connect_info(self):
44 def test_get_connect_info(self):
45 km = self._get_tcp_km()
45 km = self._get_tcp_km()
46 cinfo = km.get_connection_info()
46 cinfo = km.get_connection_info()
47 keys = sorted(cinfo.keys())
47 keys = sorted(cinfo.keys())
48 expected = sorted([
48 expected = sorted([
49 'ip', 'transport',
49 'ip', 'transport',
50 'hb_port', 'shell_port', 'stdin_port', 'iopub_port', 'control_port',
50 'hb_port', 'shell_port', 'stdin_port', 'iopub_port', 'control_port',
51 'key', 'signature_scheme',
51 'key', 'signature_scheme',
52 ])
52 ])
53 self.assertEqual(keys, expected)
53 self.assertEqual(keys, expected)
@@ -1,64 +1,64 b''
1 import json
1 import json
2 import os
2 import os
3 from os.path import join as pjoin
3 from os.path import join as pjoin
4 import unittest
4 import unittest
5
5
6 from IPython.testing.decorators import onlyif
6 from IPython.testing.decorators import onlyif
7 from IPython.utils.tempdir import TemporaryDirectory
7 from IPython.utils.tempdir import TemporaryDirectory
8 from IPython.kernel import kernelspec
8 from jupyter_client import kernelspec
9
9
10 sample_kernel_json = {'argv':['cat', '{connection_file}'],
10 sample_kernel_json = {'argv':['cat', '{connection_file}'],
11 'display_name':'Test kernel',
11 'display_name':'Test kernel',
12 }
12 }
13
13
14 class KernelSpecTests(unittest.TestCase):
14 class KernelSpecTests(unittest.TestCase):
15 def setUp(self):
15 def setUp(self):
16 td = TemporaryDirectory()
16 td = TemporaryDirectory()
17 self.addCleanup(td.cleanup)
17 self.addCleanup(td.cleanup)
18 self.sample_kernel_dir = pjoin(td.name, 'kernels', 'Sample')
18 self.sample_kernel_dir = pjoin(td.name, 'kernels', 'Sample')
19 os.makedirs(self.sample_kernel_dir)
19 os.makedirs(self.sample_kernel_dir)
20 json_file = pjoin(self.sample_kernel_dir, 'kernel.json')
20 json_file = pjoin(self.sample_kernel_dir, 'kernel.json')
21 with open(json_file, 'w') as f:
21 with open(json_file, 'w') as f:
22 json.dump(sample_kernel_json, f)
22 json.dump(sample_kernel_json, f)
23
23
24 self.ksm = kernelspec.KernelSpecManager(ipython_dir=td.name)
24 self.ksm = kernelspec.KernelSpecManager(ipython_dir=td.name)
25
25
26 td2 = TemporaryDirectory()
26 td2 = TemporaryDirectory()
27 self.addCleanup(td2.cleanup)
27 self.addCleanup(td2.cleanup)
28 self.installable_kernel = td2.name
28 self.installable_kernel = td2.name
29 with open(pjoin(self.installable_kernel, 'kernel.json'), 'w') as f:
29 with open(pjoin(self.installable_kernel, 'kernel.json'), 'w') as f:
30 json.dump(sample_kernel_json, f)
30 json.dump(sample_kernel_json, f)
31
31
32 def test_find_kernel_specs(self):
32 def test_find_kernel_specs(self):
33 kernels = self.ksm.find_kernel_specs()
33 kernels = self.ksm.find_kernel_specs()
34 self.assertEqual(kernels['sample'], self.sample_kernel_dir)
34 self.assertEqual(kernels['sample'], self.sample_kernel_dir)
35
35
36 def test_get_kernel_spec(self):
36 def test_get_kernel_spec(self):
37 ks = self.ksm.get_kernel_spec('SAMPLE') # Case insensitive
37 ks = self.ksm.get_kernel_spec('SAMPLE') # Case insensitive
38 self.assertEqual(ks.resource_dir, self.sample_kernel_dir)
38 self.assertEqual(ks.resource_dir, self.sample_kernel_dir)
39 self.assertEqual(ks.argv, sample_kernel_json['argv'])
39 self.assertEqual(ks.argv, sample_kernel_json['argv'])
40 self.assertEqual(ks.display_name, sample_kernel_json['display_name'])
40 self.assertEqual(ks.display_name, sample_kernel_json['display_name'])
41 self.assertEqual(ks.env, {})
41 self.assertEqual(ks.env, {})
42
42
43 def test_install_kernel_spec(self):
43 def test_install_kernel_spec(self):
44 self.ksm.install_kernel_spec(self.installable_kernel,
44 self.ksm.install_kernel_spec(self.installable_kernel,
45 kernel_name='tstinstalled',
45 kernel_name='tstinstalled',
46 user=True)
46 user=True)
47 self.assertIn('tstinstalled', self.ksm.find_kernel_specs())
47 self.assertIn('tstinstalled', self.ksm.find_kernel_specs())
48
48
49 with self.assertRaises(OSError):
49 with self.assertRaises(OSError):
50 self.ksm.install_kernel_spec(self.installable_kernel,
50 self.ksm.install_kernel_spec(self.installable_kernel,
51 kernel_name='tstinstalled',
51 kernel_name='tstinstalled',
52 user=True)
52 user=True)
53
53
54 # Smoketest that this succeeds
54 # Smoketest that this succeeds
55 self.ksm.install_kernel_spec(self.installable_kernel,
55 self.ksm.install_kernel_spec(self.installable_kernel,
56 kernel_name='tstinstalled',
56 kernel_name='tstinstalled',
57 replace=True, user=True)
57 replace=True, user=True)
58
58
59 @onlyif(os.name != 'nt' and not os.access('/usr/local/share', os.W_OK), "needs Unix system without root privileges")
59 @onlyif(os.name != 'nt' and not os.access('/usr/local/share', os.W_OK), "needs Unix system without root privileges")
60 def test_cant_install_kernel_spec(self):
60 def test_cant_install_kernel_spec(self):
61 with self.assertRaises(OSError):
61 with self.assertRaises(OSError):
62 self.ksm.install_kernel_spec(self.installable_kernel,
62 self.ksm.install_kernel_spec(self.installable_kernel,
63 kernel_name='tstinstalled',
63 kernel_name='tstinstalled',
64 user=False)
64 user=False)
@@ -1,57 +1,57 b''
1 """Tests for kernel utility functions
1 """Tests for kernel utility functions
2
2
3 Authors
3 Authors
4 -------
4 -------
5 * MinRK
5 * MinRK
6 """
6 """
7 #-----------------------------------------------------------------------------
7 #-----------------------------------------------------------------------------
8 # Copyright (c) 2011, the IPython Development Team.
8 # Copyright (c) 2011, the IPython Development Team.
9 #
9 #
10 # Distributed under the terms of the Modified BSD License.
10 # Distributed under the terms of the Modified BSD License.
11 #
11 #
12 # The full license is in the file COPYING.txt, distributed with this software.
12 # The full license is in the file COPYING.txt, distributed with this software.
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-----------------------------------------------------------------------------
17 #-----------------------------------------------------------------------------
18
18
19 # Third-party imports
19 # Third-party imports
20 import nose.tools as nt
20 import nose.tools as nt
21
21
22 # Our own imports
22 # Our own imports
23 from IPython.kernel.launcher import swallow_argv
23 from jupyter_client.launcher import swallow_argv
24
24
25 #-----------------------------------------------------------------------------
25 #-----------------------------------------------------------------------------
26 # Classes and functions
26 # Classes and functions
27 #-----------------------------------------------------------------------------
27 #-----------------------------------------------------------------------------
28
28
29 def test_swallow_argv():
29 def test_swallow_argv():
30 tests = [
30 tests = [
31 # expected , argv , aliases, flags
31 # expected , argv , aliases, flags
32 (['-a', '5'], ['-a', '5'], None, None),
32 (['-a', '5'], ['-a', '5'], None, None),
33 (['5'], ['-a', '5'], None, ['a']),
33 (['5'], ['-a', '5'], None, ['a']),
34 ([], ['-a', '5'], ['a'], None),
34 ([], ['-a', '5'], ['a'], None),
35 ([], ['-a', '5'], ['a'], ['a']),
35 ([], ['-a', '5'], ['a'], ['a']),
36 ([], ['--foo'], None, ['foo']),
36 ([], ['--foo'], None, ['foo']),
37 ([], ['--foo'], ['foobar'], []),
37 ([], ['--foo'], ['foobar'], []),
38 ([], ['--foo', '5'], ['foo'], []),
38 ([], ['--foo', '5'], ['foo'], []),
39 ([], ['--foo=5'], ['foo'], []),
39 ([], ['--foo=5'], ['foo'], []),
40 (['--foo=5'], ['--foo=5'], [], ['foo']),
40 (['--foo=5'], ['--foo=5'], [], ['foo']),
41 (['5'], ['--foo', '5'], [], ['foo']),
41 (['5'], ['--foo', '5'], [], ['foo']),
42 (['bar'], ['--foo', '5', 'bar'], ['foo'], ['foo']),
42 (['bar'], ['--foo', '5', 'bar'], ['foo'], ['foo']),
43 (['bar'], ['--foo=5', 'bar'], ['foo'], ['foo']),
43 (['bar'], ['--foo=5', 'bar'], ['foo'], ['foo']),
44 (['5','bar'], ['--foo', '5', 'bar'], None, ['foo']),
44 (['5','bar'], ['--foo', '5', 'bar'], None, ['foo']),
45 (['bar'], ['--foo', '5', 'bar'], ['foo'], None),
45 (['bar'], ['--foo', '5', 'bar'], ['foo'], None),
46 (['bar'], ['--foo=5', 'bar'], ['foo'], None),
46 (['bar'], ['--foo=5', 'bar'], ['foo'], None),
47 ]
47 ]
48 for expected, argv, aliases, flags in tests:
48 for expected, argv, aliases, flags in tests:
49 stripped = swallow_argv(argv, aliases=aliases, flags=flags)
49 stripped = swallow_argv(argv, aliases=aliases, flags=flags)
50 message = '\n'.join(['',
50 message = '\n'.join(['',
51 "argv: %r" % argv,
51 "argv: %r" % argv,
52 "aliases: %r" % aliases,
52 "aliases: %r" % aliases,
53 "flags : %r" % flags,
53 "flags : %r" % flags,
54 "expected : %r" % expected,
54 "expected : %r" % expected,
55 "returned : %r" % stripped,
55 "returned : %r" % stripped,
56 ])
56 ])
57 nt.assert_equal(expected, stripped, message)
57 nt.assert_equal(expected, stripped, message)
@@ -1,86 +1,86 b''
1 """Tests for the notebook kernel and session manager."""
1 """Tests for the notebook kernel and session manager."""
2
2
3 from subprocess import PIPE
3 from subprocess import PIPE
4 import time
4 import time
5 from unittest import TestCase
5 from unittest import TestCase
6
6
7 from IPython.testing import decorators as dec
7 from IPython.testing import decorators as dec
8
8
9 from IPython.config.loader import Config
9 from IPython.config.loader import Config
10 from IPython.utils.localinterfaces import localhost
10 from IPython.utils.localinterfaces import localhost
11 from IPython.kernel import KernelManager
11 from jupyter_client import KernelManager
12 from IPython.kernel.multikernelmanager import MultiKernelManager
12 from jupyter_client.multikernelmanager import MultiKernelManager
13
13
14 class TestKernelManager(TestCase):
14 class TestKernelManager(TestCase):
15
15
16 def _get_tcp_km(self):
16 def _get_tcp_km(self):
17 c = Config()
17 c = Config()
18 km = MultiKernelManager(config=c)
18 km = MultiKernelManager(config=c)
19 return km
19 return km
20
20
21 def _get_ipc_km(self):
21 def _get_ipc_km(self):
22 c = Config()
22 c = Config()
23 c.KernelManager.transport = 'ipc'
23 c.KernelManager.transport = 'ipc'
24 c.KernelManager.ip = 'test'
24 c.KernelManager.ip = 'test'
25 km = MultiKernelManager(config=c)
25 km = MultiKernelManager(config=c)
26 return km
26 return km
27
27
28 def _run_lifecycle(self, km):
28 def _run_lifecycle(self, km):
29 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
29 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
30 self.assertTrue(km.is_alive(kid))
30 self.assertTrue(km.is_alive(kid))
31 self.assertTrue(kid in km)
31 self.assertTrue(kid in km)
32 self.assertTrue(kid in km.list_kernel_ids())
32 self.assertTrue(kid in km.list_kernel_ids())
33 self.assertEqual(len(km),1)
33 self.assertEqual(len(km),1)
34 km.restart_kernel(kid, now=True)
34 km.restart_kernel(kid, now=True)
35 self.assertTrue(km.is_alive(kid))
35 self.assertTrue(km.is_alive(kid))
36 self.assertTrue(kid in km.list_kernel_ids())
36 self.assertTrue(kid in km.list_kernel_ids())
37 km.interrupt_kernel(kid)
37 km.interrupt_kernel(kid)
38 k = km.get_kernel(kid)
38 k = km.get_kernel(kid)
39 self.assertTrue(isinstance(k, KernelManager))
39 self.assertTrue(isinstance(k, KernelManager))
40 km.shutdown_kernel(kid, now=True)
40 km.shutdown_kernel(kid, now=True)
41 self.assertTrue(not kid in km)
41 self.assertTrue(not kid in km)
42
42
43 def _run_cinfo(self, km, transport, ip):
43 def _run_cinfo(self, km, transport, ip):
44 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
44 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
45 k = km.get_kernel(kid)
45 k = km.get_kernel(kid)
46 cinfo = km.get_connection_info(kid)
46 cinfo = km.get_connection_info(kid)
47 self.assertEqual(transport, cinfo['transport'])
47 self.assertEqual(transport, cinfo['transport'])
48 self.assertEqual(ip, cinfo['ip'])
48 self.assertEqual(ip, cinfo['ip'])
49 self.assertTrue('stdin_port' in cinfo)
49 self.assertTrue('stdin_port' in cinfo)
50 self.assertTrue('iopub_port' in cinfo)
50 self.assertTrue('iopub_port' in cinfo)
51 stream = km.connect_iopub(kid)
51 stream = km.connect_iopub(kid)
52 stream.close()
52 stream.close()
53 self.assertTrue('shell_port' in cinfo)
53 self.assertTrue('shell_port' in cinfo)
54 stream = km.connect_shell(kid)
54 stream = km.connect_shell(kid)
55 stream.close()
55 stream.close()
56 self.assertTrue('hb_port' in cinfo)
56 self.assertTrue('hb_port' in cinfo)
57 stream = km.connect_hb(kid)
57 stream = km.connect_hb(kid)
58 stream.close()
58 stream.close()
59 km.shutdown_kernel(kid, now=True)
59 km.shutdown_kernel(kid, now=True)
60
60
61 def test_tcp_lifecycle(self):
61 def test_tcp_lifecycle(self):
62 km = self._get_tcp_km()
62 km = self._get_tcp_km()
63 self._run_lifecycle(km)
63 self._run_lifecycle(km)
64
64
65 def test_shutdown_all(self):
65 def test_shutdown_all(self):
66 km = self._get_tcp_km()
66 km = self._get_tcp_km()
67 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
67 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
68 self.assertIn(kid, km)
68 self.assertIn(kid, km)
69 km.shutdown_all()
69 km.shutdown_all()
70 self.assertNotIn(kid, km)
70 self.assertNotIn(kid, km)
71 # shutdown again is okay, because we have no kernels
71 # shutdown again is okay, because we have no kernels
72 km.shutdown_all()
72 km.shutdown_all()
73
73
74 def test_tcp_cinfo(self):
74 def test_tcp_cinfo(self):
75 km = self._get_tcp_km()
75 km = self._get_tcp_km()
76 self._run_cinfo(km, 'tcp', localhost())
76 self._run_cinfo(km, 'tcp', localhost())
77
77
78 @dec.skip_win32
78 @dec.skip_win32
79 def test_ipc_lifecycle(self):
79 def test_ipc_lifecycle(self):
80 km = self._get_ipc_km()
80 km = self._get_ipc_km()
81 self._run_lifecycle(km)
81 self._run_lifecycle(km)
82
82
83 @dec.skip_win32
83 @dec.skip_win32
84 def test_ipc_cinfo(self):
84 def test_ipc_cinfo(self):
85 km = self._get_ipc_km()
85 km = self._get_ipc_km()
86 self._run_cinfo(km, 'ipc', 'test')
86 self._run_cinfo(km, 'ipc', 'test')
@@ -1,40 +1,40 b''
1 """Test the IPython.kernel public API
1 """Test the jupyter_client public API
2
2
3 Authors
3 Authors
4 -------
4 -------
5 * MinRK
5 * MinRK
6 """
6 """
7 #-----------------------------------------------------------------------------
7 #-----------------------------------------------------------------------------
8 # Copyright (c) 2013, the IPython Development Team.
8 # Copyright (c) 2013, the IPython Development Team.
9 #
9 #
10 # Distributed under the terms of the Modified BSD License.
10 # Distributed under the terms of the Modified BSD License.
11 #
11 #
12 # The full license is in the file COPYING.txt, distributed with this software.
12 # The full license is in the file COPYING.txt, distributed with this software.
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 import nose.tools as nt
15 import nose.tools as nt
16
16
17 from IPython.kernel import launcher, connect
17 from jupyter_client import launcher, connect
18 from IPython import kernel
18 from IPython import kernel
19
19
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
21 # Classes and functions
21 # Classes and functions
22 #-----------------------------------------------------------------------------
22 #-----------------------------------------------------------------------------
23
23
24 def test_kms():
24 def test_kms():
25 for base in ("", "Multi"):
25 for base in ("", "Multi"):
26 KM = base + "KernelManager"
26 KM = base + "KernelManager"
27 nt.assert_in(KM, dir(kernel))
27 nt.assert_in(KM, dir(kernel))
28
28
29 def test_kcs():
29 def test_kcs():
30 for base in ("", "Blocking"):
30 for base in ("", "Blocking"):
31 KM = base + "KernelClient"
31 KM = base + "KernelClient"
32 nt.assert_in(KM, dir(kernel))
32 nt.assert_in(KM, dir(kernel))
33
33
34 def test_launcher():
34 def test_launcher():
35 for name in launcher.__all__:
35 for name in launcher.__all__:
36 nt.assert_in(name, dir(kernel))
36 nt.assert_in(name, dir(kernel))
37
37
38 def test_connect():
38 def test_connect():
39 for name in connect.__all__:
39 for name in connect.__all__:
40 nt.assert_in(name, dir(kernel))
40 nt.assert_in(name, dir(kernel))
@@ -1,318 +1,318 b''
1 """test building messages with Session"""
1 """test building messages with Session"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import hmac
6 import hmac
7 import os
7 import os
8 import uuid
8 import uuid
9 from datetime import datetime
9 from datetime import datetime
10
10
11 import zmq
11 import zmq
12
12
13 from zmq.tests import BaseZMQTestCase
13 from zmq.tests import BaseZMQTestCase
14 from zmq.eventloop.zmqstream import ZMQStream
14 from zmq.eventloop.zmqstream import ZMQStream
15
15
16 from IPython.kernel.zmq import session as ss
16 from jupyter_client import session as ss
17
17
18 from IPython.testing.decorators import skipif, module_not_available
18 from IPython.testing.decorators import skipif, module_not_available
19 from IPython.utils.py3compat import string_types
19 from IPython.utils.py3compat import string_types
20 from IPython.utils import jsonutil
20 from IPython.utils import jsonutil
21
21
22 def _bad_packer(obj):
22 def _bad_packer(obj):
23 raise TypeError("I don't work")
23 raise TypeError("I don't work")
24
24
25 def _bad_unpacker(bytes):
25 def _bad_unpacker(bytes):
26 raise TypeError("I don't work either")
26 raise TypeError("I don't work either")
27
27
28 class SessionTestCase(BaseZMQTestCase):
28 class SessionTestCase(BaseZMQTestCase):
29
29
30 def setUp(self):
30 def setUp(self):
31 BaseZMQTestCase.setUp(self)
31 BaseZMQTestCase.setUp(self)
32 self.session = ss.Session()
32 self.session = ss.Session()
33
33
34
34
35 class TestSession(SessionTestCase):
35 class TestSession(SessionTestCase):
36
36
37 def test_msg(self):
37 def test_msg(self):
38 """message format"""
38 """message format"""
39 msg = self.session.msg('execute')
39 msg = self.session.msg('execute')
40 thekeys = set('header parent_header metadata content msg_type msg_id'.split())
40 thekeys = set('header parent_header metadata content msg_type msg_id'.split())
41 s = set(msg.keys())
41 s = set(msg.keys())
42 self.assertEqual(s, thekeys)
42 self.assertEqual(s, thekeys)
43 self.assertTrue(isinstance(msg['content'],dict))
43 self.assertTrue(isinstance(msg['content'],dict))
44 self.assertTrue(isinstance(msg['metadata'],dict))
44 self.assertTrue(isinstance(msg['metadata'],dict))
45 self.assertTrue(isinstance(msg['header'],dict))
45 self.assertTrue(isinstance(msg['header'],dict))
46 self.assertTrue(isinstance(msg['parent_header'],dict))
46 self.assertTrue(isinstance(msg['parent_header'],dict))
47 self.assertTrue(isinstance(msg['msg_id'],str))
47 self.assertTrue(isinstance(msg['msg_id'],str))
48 self.assertTrue(isinstance(msg['msg_type'],str))
48 self.assertTrue(isinstance(msg['msg_type'],str))
49 self.assertEqual(msg['header']['msg_type'], 'execute')
49 self.assertEqual(msg['header']['msg_type'], 'execute')
50 self.assertEqual(msg['msg_type'], 'execute')
50 self.assertEqual(msg['msg_type'], 'execute')
51
51
52 def test_serialize(self):
52 def test_serialize(self):
53 msg = self.session.msg('execute', content=dict(a=10, b=1.1))
53 msg = self.session.msg('execute', content=dict(a=10, b=1.1))
54 msg_list = self.session.serialize(msg, ident=b'foo')
54 msg_list = self.session.serialize(msg, ident=b'foo')
55 ident, msg_list = self.session.feed_identities(msg_list)
55 ident, msg_list = self.session.feed_identities(msg_list)
56 new_msg = self.session.deserialize(msg_list)
56 new_msg = self.session.deserialize(msg_list)
57 self.assertEqual(ident[0], b'foo')
57 self.assertEqual(ident[0], b'foo')
58 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
58 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
59 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
59 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
60 self.assertEqual(new_msg['header'],msg['header'])
60 self.assertEqual(new_msg['header'],msg['header'])
61 self.assertEqual(new_msg['content'],msg['content'])
61 self.assertEqual(new_msg['content'],msg['content'])
62 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
62 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
63 self.assertEqual(new_msg['metadata'],msg['metadata'])
63 self.assertEqual(new_msg['metadata'],msg['metadata'])
64 # ensure floats don't come out as Decimal:
64 # ensure floats don't come out as Decimal:
65 self.assertEqual(type(new_msg['content']['b']),type(new_msg['content']['b']))
65 self.assertEqual(type(new_msg['content']['b']),type(new_msg['content']['b']))
66
66
67 def test_default_secure(self):
67 def test_default_secure(self):
68 self.assertIsInstance(self.session.key, bytes)
68 self.assertIsInstance(self.session.key, bytes)
69 self.assertIsInstance(self.session.auth, hmac.HMAC)
69 self.assertIsInstance(self.session.auth, hmac.HMAC)
70
70
71 def test_send(self):
71 def test_send(self):
72 ctx = zmq.Context.instance()
72 ctx = zmq.Context.instance()
73 A = ctx.socket(zmq.PAIR)
73 A = ctx.socket(zmq.PAIR)
74 B = ctx.socket(zmq.PAIR)
74 B = ctx.socket(zmq.PAIR)
75 A.bind("inproc://test")
75 A.bind("inproc://test")
76 B.connect("inproc://test")
76 B.connect("inproc://test")
77
77
78 msg = self.session.msg('execute', content=dict(a=10))
78 msg = self.session.msg('execute', content=dict(a=10))
79 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
79 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
80
80
81 ident, msg_list = self.session.feed_identities(B.recv_multipart())
81 ident, msg_list = self.session.feed_identities(B.recv_multipart())
82 new_msg = self.session.deserialize(msg_list)
82 new_msg = self.session.deserialize(msg_list)
83 self.assertEqual(ident[0], b'foo')
83 self.assertEqual(ident[0], b'foo')
84 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
84 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
85 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
85 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
86 self.assertEqual(new_msg['header'],msg['header'])
86 self.assertEqual(new_msg['header'],msg['header'])
87 self.assertEqual(new_msg['content'],msg['content'])
87 self.assertEqual(new_msg['content'],msg['content'])
88 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
88 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
89 self.assertEqual(new_msg['metadata'],msg['metadata'])
89 self.assertEqual(new_msg['metadata'],msg['metadata'])
90 self.assertEqual(new_msg['buffers'],[b'bar'])
90 self.assertEqual(new_msg['buffers'],[b'bar'])
91
91
92 content = msg['content']
92 content = msg['content']
93 header = msg['header']
93 header = msg['header']
94 header['msg_id'] = self.session.msg_id
94 header['msg_id'] = self.session.msg_id
95 parent = msg['parent_header']
95 parent = msg['parent_header']
96 metadata = msg['metadata']
96 metadata = msg['metadata']
97 msg_type = header['msg_type']
97 msg_type = header['msg_type']
98 self.session.send(A, None, content=content, parent=parent,
98 self.session.send(A, None, content=content, parent=parent,
99 header=header, metadata=metadata, ident=b'foo', buffers=[b'bar'])
99 header=header, metadata=metadata, ident=b'foo', buffers=[b'bar'])
100 ident, msg_list = self.session.feed_identities(B.recv_multipart())
100 ident, msg_list = self.session.feed_identities(B.recv_multipart())
101 new_msg = self.session.deserialize(msg_list)
101 new_msg = self.session.deserialize(msg_list)
102 self.assertEqual(ident[0], b'foo')
102 self.assertEqual(ident[0], b'foo')
103 self.assertEqual(new_msg['msg_id'],header['msg_id'])
103 self.assertEqual(new_msg['msg_id'],header['msg_id'])
104 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
104 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
105 self.assertEqual(new_msg['header'],msg['header'])
105 self.assertEqual(new_msg['header'],msg['header'])
106 self.assertEqual(new_msg['content'],msg['content'])
106 self.assertEqual(new_msg['content'],msg['content'])
107 self.assertEqual(new_msg['metadata'],msg['metadata'])
107 self.assertEqual(new_msg['metadata'],msg['metadata'])
108 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
108 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
109 self.assertEqual(new_msg['buffers'],[b'bar'])
109 self.assertEqual(new_msg['buffers'],[b'bar'])
110
110
111 header['msg_id'] = self.session.msg_id
111 header['msg_id'] = self.session.msg_id
112
112
113 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
113 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
114 ident, new_msg = self.session.recv(B)
114 ident, new_msg = self.session.recv(B)
115 self.assertEqual(ident[0], b'foo')
115 self.assertEqual(ident[0], b'foo')
116 self.assertEqual(new_msg['msg_id'],header['msg_id'])
116 self.assertEqual(new_msg['msg_id'],header['msg_id'])
117 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
117 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
118 self.assertEqual(new_msg['header'],msg['header'])
118 self.assertEqual(new_msg['header'],msg['header'])
119 self.assertEqual(new_msg['content'],msg['content'])
119 self.assertEqual(new_msg['content'],msg['content'])
120 self.assertEqual(new_msg['metadata'],msg['metadata'])
120 self.assertEqual(new_msg['metadata'],msg['metadata'])
121 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
121 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
122 self.assertEqual(new_msg['buffers'],[b'bar'])
122 self.assertEqual(new_msg['buffers'],[b'bar'])
123
123
124 A.close()
124 A.close()
125 B.close()
125 B.close()
126 ctx.term()
126 ctx.term()
127
127
128 def test_args(self):
128 def test_args(self):
129 """initialization arguments for Session"""
129 """initialization arguments for Session"""
130 s = self.session
130 s = self.session
131 self.assertTrue(s.pack is ss.default_packer)
131 self.assertTrue(s.pack is ss.default_packer)
132 self.assertTrue(s.unpack is ss.default_unpacker)
132 self.assertTrue(s.unpack is ss.default_unpacker)
133 self.assertEqual(s.username, os.environ.get('USER', u'username'))
133 self.assertEqual(s.username, os.environ.get('USER', u'username'))
134
134
135 s = ss.Session()
135 s = ss.Session()
136 self.assertEqual(s.username, os.environ.get('USER', u'username'))
136 self.assertEqual(s.username, os.environ.get('USER', u'username'))
137
137
138 self.assertRaises(TypeError, ss.Session, pack='hi')
138 self.assertRaises(TypeError, ss.Session, pack='hi')
139 self.assertRaises(TypeError, ss.Session, unpack='hi')
139 self.assertRaises(TypeError, ss.Session, unpack='hi')
140 u = str(uuid.uuid4())
140 u = str(uuid.uuid4())
141 s = ss.Session(username=u'carrot', session=u)
141 s = ss.Session(username=u'carrot', session=u)
142 self.assertEqual(s.session, u)
142 self.assertEqual(s.session, u)
143 self.assertEqual(s.username, u'carrot')
143 self.assertEqual(s.username, u'carrot')
144
144
145 def test_tracking(self):
145 def test_tracking(self):
146 """test tracking messages"""
146 """test tracking messages"""
147 a,b = self.create_bound_pair(zmq.PAIR, zmq.PAIR)
147 a,b = self.create_bound_pair(zmq.PAIR, zmq.PAIR)
148 s = self.session
148 s = self.session
149 s.copy_threshold = 1
149 s.copy_threshold = 1
150 stream = ZMQStream(a)
150 stream = ZMQStream(a)
151 msg = s.send(a, 'hello', track=False)
151 msg = s.send(a, 'hello', track=False)
152 self.assertTrue(msg['tracker'] is ss.DONE)
152 self.assertTrue(msg['tracker'] is ss.DONE)
153 msg = s.send(a, 'hello', track=True)
153 msg = s.send(a, 'hello', track=True)
154 self.assertTrue(isinstance(msg['tracker'], zmq.MessageTracker))
154 self.assertTrue(isinstance(msg['tracker'], zmq.MessageTracker))
155 M = zmq.Message(b'hi there', track=True)
155 M = zmq.Message(b'hi there', track=True)
156 msg = s.send(a, 'hello', buffers=[M], track=True)
156 msg = s.send(a, 'hello', buffers=[M], track=True)
157 t = msg['tracker']
157 t = msg['tracker']
158 self.assertTrue(isinstance(t, zmq.MessageTracker))
158 self.assertTrue(isinstance(t, zmq.MessageTracker))
159 self.assertRaises(zmq.NotDone, t.wait, .1)
159 self.assertRaises(zmq.NotDone, t.wait, .1)
160 del M
160 del M
161 t.wait(1) # this will raise
161 t.wait(1) # this will raise
162
162
163
163
164 def test_unique_msg_ids(self):
164 def test_unique_msg_ids(self):
165 """test that messages receive unique ids"""
165 """test that messages receive unique ids"""
166 ids = set()
166 ids = set()
167 for i in range(2**12):
167 for i in range(2**12):
168 h = self.session.msg_header('test')
168 h = self.session.msg_header('test')
169 msg_id = h['msg_id']
169 msg_id = h['msg_id']
170 self.assertTrue(msg_id not in ids)
170 self.assertTrue(msg_id not in ids)
171 ids.add(msg_id)
171 ids.add(msg_id)
172
172
173 def test_feed_identities(self):
173 def test_feed_identities(self):
174 """scrub the front for zmq IDENTITIES"""
174 """scrub the front for zmq IDENTITIES"""
175 theids = "engine client other".split()
175 theids = "engine client other".split()
176 content = dict(code='whoda',stuff=object())
176 content = dict(code='whoda',stuff=object())
177 themsg = self.session.msg('execute',content=content)
177 themsg = self.session.msg('execute',content=content)
178 pmsg = theids
178 pmsg = theids
179
179
180 def test_session_id(self):
180 def test_session_id(self):
181 session = ss.Session()
181 session = ss.Session()
182 # get bs before us
182 # get bs before us
183 bs = session.bsession
183 bs = session.bsession
184 us = session.session
184 us = session.session
185 self.assertEqual(us.encode('ascii'), bs)
185 self.assertEqual(us.encode('ascii'), bs)
186 session = ss.Session()
186 session = ss.Session()
187 # get us before bs
187 # get us before bs
188 us = session.session
188 us = session.session
189 bs = session.bsession
189 bs = session.bsession
190 self.assertEqual(us.encode('ascii'), bs)
190 self.assertEqual(us.encode('ascii'), bs)
191 # change propagates:
191 # change propagates:
192 session.session = 'something else'
192 session.session = 'something else'
193 bs = session.bsession
193 bs = session.bsession
194 us = session.session
194 us = session.session
195 self.assertEqual(us.encode('ascii'), bs)
195 self.assertEqual(us.encode('ascii'), bs)
196 session = ss.Session(session='stuff')
196 session = ss.Session(session='stuff')
197 # get us before bs
197 # get us before bs
198 self.assertEqual(session.bsession, session.session.encode('ascii'))
198 self.assertEqual(session.bsession, session.session.encode('ascii'))
199 self.assertEqual(b'stuff', session.bsession)
199 self.assertEqual(b'stuff', session.bsession)
200
200
201 def test_zero_digest_history(self):
201 def test_zero_digest_history(self):
202 session = ss.Session(digest_history_size=0)
202 session = ss.Session(digest_history_size=0)
203 for i in range(11):
203 for i in range(11):
204 session._add_digest(uuid.uuid4().bytes)
204 session._add_digest(uuid.uuid4().bytes)
205 self.assertEqual(len(session.digest_history), 0)
205 self.assertEqual(len(session.digest_history), 0)
206
206
207 def test_cull_digest_history(self):
207 def test_cull_digest_history(self):
208 session = ss.Session(digest_history_size=100)
208 session = ss.Session(digest_history_size=100)
209 for i in range(100):
209 for i in range(100):
210 session._add_digest(uuid.uuid4().bytes)
210 session._add_digest(uuid.uuid4().bytes)
211 self.assertTrue(len(session.digest_history) == 100)
211 self.assertTrue(len(session.digest_history) == 100)
212 session._add_digest(uuid.uuid4().bytes)
212 session._add_digest(uuid.uuid4().bytes)
213 self.assertTrue(len(session.digest_history) == 91)
213 self.assertTrue(len(session.digest_history) == 91)
214 for i in range(9):
214 for i in range(9):
215 session._add_digest(uuid.uuid4().bytes)
215 session._add_digest(uuid.uuid4().bytes)
216 self.assertTrue(len(session.digest_history) == 100)
216 self.assertTrue(len(session.digest_history) == 100)
217 session._add_digest(uuid.uuid4().bytes)
217 session._add_digest(uuid.uuid4().bytes)
218 self.assertTrue(len(session.digest_history) == 91)
218 self.assertTrue(len(session.digest_history) == 91)
219
219
220 def test_bad_pack(self):
220 def test_bad_pack(self):
221 try:
221 try:
222 session = ss.Session(pack=_bad_packer)
222 session = ss.Session(pack=_bad_packer)
223 except ValueError as e:
223 except ValueError as e:
224 self.assertIn("could not serialize", str(e))
224 self.assertIn("could not serialize", str(e))
225 self.assertIn("don't work", str(e))
225 self.assertIn("don't work", str(e))
226 else:
226 else:
227 self.fail("Should have raised ValueError")
227 self.fail("Should have raised ValueError")
228
228
229 def test_bad_unpack(self):
229 def test_bad_unpack(self):
230 try:
230 try:
231 session = ss.Session(unpack=_bad_unpacker)
231 session = ss.Session(unpack=_bad_unpacker)
232 except ValueError as e:
232 except ValueError as e:
233 self.assertIn("could not handle output", str(e))
233 self.assertIn("could not handle output", str(e))
234 self.assertIn("don't work either", str(e))
234 self.assertIn("don't work either", str(e))
235 else:
235 else:
236 self.fail("Should have raised ValueError")
236 self.fail("Should have raised ValueError")
237
237
238 def test_bad_packer(self):
238 def test_bad_packer(self):
239 try:
239 try:
240 session = ss.Session(packer=__name__ + '._bad_packer')
240 session = ss.Session(packer=__name__ + '._bad_packer')
241 except ValueError as e:
241 except ValueError as e:
242 self.assertIn("could not serialize", str(e))
242 self.assertIn("could not serialize", str(e))
243 self.assertIn("don't work", str(e))
243 self.assertIn("don't work", str(e))
244 else:
244 else:
245 self.fail("Should have raised ValueError")
245 self.fail("Should have raised ValueError")
246
246
247 def test_bad_unpacker(self):
247 def test_bad_unpacker(self):
248 try:
248 try:
249 session = ss.Session(unpacker=__name__ + '._bad_unpacker')
249 session = ss.Session(unpacker=__name__ + '._bad_unpacker')
250 except ValueError as e:
250 except ValueError as e:
251 self.assertIn("could not handle output", str(e))
251 self.assertIn("could not handle output", str(e))
252 self.assertIn("don't work either", str(e))
252 self.assertIn("don't work either", str(e))
253 else:
253 else:
254 self.fail("Should have raised ValueError")
254 self.fail("Should have raised ValueError")
255
255
256 def test_bad_roundtrip(self):
256 def test_bad_roundtrip(self):
257 with self.assertRaises(ValueError):
257 with self.assertRaises(ValueError):
258 session = ss.Session(unpack=lambda b: 5)
258 session = ss.Session(unpack=lambda b: 5)
259
259
260 def _datetime_test(self, session):
260 def _datetime_test(self, session):
261 content = dict(t=datetime.now())
261 content = dict(t=datetime.now())
262 metadata = dict(t=datetime.now())
262 metadata = dict(t=datetime.now())
263 p = session.msg('msg')
263 p = session.msg('msg')
264 msg = session.msg('msg', content=content, metadata=metadata, parent=p['header'])
264 msg = session.msg('msg', content=content, metadata=metadata, parent=p['header'])
265 smsg = session.serialize(msg)
265 smsg = session.serialize(msg)
266 msg2 = session.deserialize(session.feed_identities(smsg)[1])
266 msg2 = session.deserialize(session.feed_identities(smsg)[1])
267 assert isinstance(msg2['header']['date'], datetime)
267 assert isinstance(msg2['header']['date'], datetime)
268 self.assertEqual(msg['header'], msg2['header'])
268 self.assertEqual(msg['header'], msg2['header'])
269 self.assertEqual(msg['parent_header'], msg2['parent_header'])
269 self.assertEqual(msg['parent_header'], msg2['parent_header'])
270 self.assertEqual(msg['parent_header'], msg2['parent_header'])
270 self.assertEqual(msg['parent_header'], msg2['parent_header'])
271 assert isinstance(msg['content']['t'], datetime)
271 assert isinstance(msg['content']['t'], datetime)
272 assert isinstance(msg['metadata']['t'], datetime)
272 assert isinstance(msg['metadata']['t'], datetime)
273 assert isinstance(msg2['content']['t'], string_types)
273 assert isinstance(msg2['content']['t'], string_types)
274 assert isinstance(msg2['metadata']['t'], string_types)
274 assert isinstance(msg2['metadata']['t'], string_types)
275 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
275 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
276 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
276 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
277
277
278 def test_datetimes(self):
278 def test_datetimes(self):
279 self._datetime_test(self.session)
279 self._datetime_test(self.session)
280
280
281 def test_datetimes_pickle(self):
281 def test_datetimes_pickle(self):
282 session = ss.Session(packer='pickle')
282 session = ss.Session(packer='pickle')
283 self._datetime_test(session)
283 self._datetime_test(session)
284
284
285 @skipif(module_not_available('msgpack'))
285 @skipif(module_not_available('msgpack'))
286 def test_datetimes_msgpack(self):
286 def test_datetimes_msgpack(self):
287 import msgpack
287 import msgpack
288
288
289 session = ss.Session(
289 session = ss.Session(
290 pack=msgpack.packb,
290 pack=msgpack.packb,
291 unpack=lambda buf: msgpack.unpackb(buf, encoding='utf8'),
291 unpack=lambda buf: msgpack.unpackb(buf, encoding='utf8'),
292 )
292 )
293 self._datetime_test(session)
293 self._datetime_test(session)
294
294
295 def test_send_raw(self):
295 def test_send_raw(self):
296 ctx = zmq.Context.instance()
296 ctx = zmq.Context.instance()
297 A = ctx.socket(zmq.PAIR)
297 A = ctx.socket(zmq.PAIR)
298 B = ctx.socket(zmq.PAIR)
298 B = ctx.socket(zmq.PAIR)
299 A.bind("inproc://test")
299 A.bind("inproc://test")
300 B.connect("inproc://test")
300 B.connect("inproc://test")
301
301
302 msg = self.session.msg('execute', content=dict(a=10))
302 msg = self.session.msg('execute', content=dict(a=10))
303 msg_list = [self.session.pack(msg[part]) for part in
303 msg_list = [self.session.pack(msg[part]) for part in
304 ['header', 'parent_header', 'metadata', 'content']]
304 ['header', 'parent_header', 'metadata', 'content']]
305 self.session.send_raw(A, msg_list, ident=b'foo')
305 self.session.send_raw(A, msg_list, ident=b'foo')
306
306
307 ident, new_msg_list = self.session.feed_identities(B.recv_multipart())
307 ident, new_msg_list = self.session.feed_identities(B.recv_multipart())
308 new_msg = self.session.deserialize(new_msg_list)
308 new_msg = self.session.deserialize(new_msg_list)
309 self.assertEqual(ident[0], b'foo')
309 self.assertEqual(ident[0], b'foo')
310 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
310 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
311 self.assertEqual(new_msg['header'],msg['header'])
311 self.assertEqual(new_msg['header'],msg['header'])
312 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
312 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
313 self.assertEqual(new_msg['content'],msg['content'])
313 self.assertEqual(new_msg['content'],msg['content'])
314 self.assertEqual(new_msg['metadata'],msg['metadata'])
314 self.assertEqual(new_msg['metadata'],msg['metadata'])
315
315
316 A.close()
316 A.close()
317 B.close()
317 B.close()
318 ctx.term()
318 ctx.term()
@@ -1,230 +1,230 b''
1 """ Defines a KernelClient that provides thread-safe sockets with async callbacks on message replies.
1 """ Defines a KernelClient that provides thread-safe sockets with async callbacks on message replies.
2 """
2 """
3 from __future__ import absolute_import
3 from __future__ import absolute_import
4 import atexit
4 import atexit
5 import errno
5 import errno
6 from threading import Thread
6 from threading import Thread
7 import time
7 import time
8
8
9 import zmq
9 import zmq
10 # import ZMQError in top-level namespace, to avoid ugly attribute-error messages
10 # import ZMQError in top-level namespace, to avoid ugly attribute-error messages
11 # during garbage collection of threads at exit:
11 # during garbage collection of threads at exit:
12 from zmq import ZMQError
12 from zmq import ZMQError
13 from zmq.eventloop import ioloop, zmqstream
13 from zmq.eventloop import ioloop, zmqstream
14
14
15 # Local imports
15 # Local imports
16 from IPython.utils.traitlets import Type, Instance
16 from IPython.utils.traitlets import Type, Instance
17 from IPython.kernel.channels import HBChannel
17 from jupyter_client.channels import HBChannel
18 from IPython.kernel import KernelClient
18 from jupyter_client import KernelClient
19 from IPython.kernel.channels import HBChannel
19 from jupyter_client.channels import HBChannel
20
20
21 class ThreadedZMQSocketChannel(object):
21 class ThreadedZMQSocketChannel(object):
22 """A ZMQ socket invoking a callback in the ioloop"""
22 """A ZMQ socket invoking a callback in the ioloop"""
23 session = None
23 session = None
24 socket = None
24 socket = None
25 ioloop = None
25 ioloop = None
26 stream = None
26 stream = None
27 _inspect = None
27 _inspect = None
28
28
29 def __init__(self, socket, session, loop):
29 def __init__(self, socket, session, loop):
30 """Create a channel.
30 """Create a channel.
31
31
32 Parameters
32 Parameters
33 ----------
33 ----------
34 socket : :class:`zmq.Socket`
34 socket : :class:`zmq.Socket`
35 The ZMQ socket to use.
35 The ZMQ socket to use.
36 session : :class:`session.Session`
36 session : :class:`session.Session`
37 The session to use.
37 The session to use.
38 loop
38 loop
39 A pyzmq ioloop to connect the socket to using a ZMQStream
39 A pyzmq ioloop to connect the socket to using a ZMQStream
40 """
40 """
41 super(ThreadedZMQSocketChannel, self).__init__()
41 super(ThreadedZMQSocketChannel, self).__init__()
42
42
43 self.socket = socket
43 self.socket = socket
44 self.session = session
44 self.session = session
45 self.ioloop = loop
45 self.ioloop = loop
46
46
47 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
47 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
48 self.stream.on_recv(self._handle_recv)
48 self.stream.on_recv(self._handle_recv)
49
49
50 _is_alive = False
50 _is_alive = False
51 def is_alive(self):
51 def is_alive(self):
52 return self._is_alive
52 return self._is_alive
53
53
54 def start(self):
54 def start(self):
55 self._is_alive = True
55 self._is_alive = True
56
56
57 def stop(self):
57 def stop(self):
58 self._is_alive = False
58 self._is_alive = False
59
59
60 def close(self):
60 def close(self):
61 if self.socket is not None:
61 if self.socket is not None:
62 try:
62 try:
63 self.socket.close(linger=0)
63 self.socket.close(linger=0)
64 except Exception:
64 except Exception:
65 pass
65 pass
66 self.socket = None
66 self.socket = None
67
67
68 def send(self, msg):
68 def send(self, msg):
69 """Queue a message to be sent from the IOLoop's thread.
69 """Queue a message to be sent from the IOLoop's thread.
70
70
71 Parameters
71 Parameters
72 ----------
72 ----------
73 msg : message to send
73 msg : message to send
74
74
75 This is threadsafe, as it uses IOLoop.add_callback to give the loop's
75 This is threadsafe, as it uses IOLoop.add_callback to give the loop's
76 thread control of the action.
76 thread control of the action.
77 """
77 """
78 def thread_send():
78 def thread_send():
79 self.session.send(self.stream, msg)
79 self.session.send(self.stream, msg)
80 self.ioloop.add_callback(thread_send)
80 self.ioloop.add_callback(thread_send)
81
81
82 def _handle_recv(self, msg):
82 def _handle_recv(self, msg):
83 """Callback for stream.on_recv.
83 """Callback for stream.on_recv.
84
84
85 Unpacks message, and calls handlers with it.
85 Unpacks message, and calls handlers with it.
86 """
86 """
87 ident,smsg = self.session.feed_identities(msg)
87 ident,smsg = self.session.feed_identities(msg)
88 msg = self.session.deserialize(smsg)
88 msg = self.session.deserialize(smsg)
89 # let client inspect messages
89 # let client inspect messages
90 if self._inspect:
90 if self._inspect:
91 self._inspect(msg)
91 self._inspect(msg)
92 self.call_handlers(msg)
92 self.call_handlers(msg)
93
93
94 def call_handlers(self, msg):
94 def call_handlers(self, msg):
95 """This method is called in the ioloop thread when a message arrives.
95 """This method is called in the ioloop thread when a message arrives.
96
96
97 Subclasses should override this method to handle incoming messages.
97 Subclasses should override this method to handle incoming messages.
98 It is important to remember that this method is called in the thread
98 It is important to remember that this method is called in the thread
99 so that some logic must be done to ensure that the application level
99 so that some logic must be done to ensure that the application level
100 handlers are called in the application thread.
100 handlers are called in the application thread.
101 """
101 """
102 pass
102 pass
103
103
104 def process_events(self):
104 def process_events(self):
105 """Subclasses should override this with a method
105 """Subclasses should override this with a method
106 processing any pending GUI events.
106 processing any pending GUI events.
107 """
107 """
108 pass
108 pass
109
109
110
110
111 def flush(self, timeout=1.0):
111 def flush(self, timeout=1.0):
112 """Immediately processes all pending messages on this channel.
112 """Immediately processes all pending messages on this channel.
113
113
114 This is only used for the IOPub channel.
114 This is only used for the IOPub channel.
115
115
116 Callers should use this method to ensure that :meth:`call_handlers`
116 Callers should use this method to ensure that :meth:`call_handlers`
117 has been called for all messages that have been received on the
117 has been called for all messages that have been received on the
118 0MQ SUB socket of this channel.
118 0MQ SUB socket of this channel.
119
119
120 This method is thread safe.
120 This method is thread safe.
121
121
122 Parameters
122 Parameters
123 ----------
123 ----------
124 timeout : float, optional
124 timeout : float, optional
125 The maximum amount of time to spend flushing, in seconds. The
125 The maximum amount of time to spend flushing, in seconds. The
126 default is one second.
126 default is one second.
127 """
127 """
128 # We do the IOLoop callback process twice to ensure that the IOLoop
128 # We do the IOLoop callback process twice to ensure that the IOLoop
129 # gets to perform at least one full poll.
129 # gets to perform at least one full poll.
130 stop_time = time.time() + timeout
130 stop_time = time.time() + timeout
131 for i in range(2):
131 for i in range(2):
132 self._flushed = False
132 self._flushed = False
133 self.ioloop.add_callback(self._flush)
133 self.ioloop.add_callback(self._flush)
134 while not self._flushed and time.time() < stop_time:
134 while not self._flushed and time.time() < stop_time:
135 time.sleep(0.01)
135 time.sleep(0.01)
136
136
137 def _flush(self):
137 def _flush(self):
138 """Callback for :method:`self.flush`."""
138 """Callback for :method:`self.flush`."""
139 self.stream.flush()
139 self.stream.flush()
140 self._flushed = True
140 self._flushed = True
141
141
142
142
143 class IOLoopThread(Thread):
143 class IOLoopThread(Thread):
144 """Run a pyzmq ioloop in a thread to send and receive messages
144 """Run a pyzmq ioloop in a thread to send and receive messages
145 """
145 """
146 def __init__(self, loop):
146 def __init__(self, loop):
147 super(IOLoopThread, self).__init__()
147 super(IOLoopThread, self).__init__()
148 self.daemon = True
148 self.daemon = True
149 atexit.register(self._notice_exit)
149 atexit.register(self._notice_exit)
150 self.ioloop = loop or ioloop.IOLoop()
150 self.ioloop = loop or ioloop.IOLoop()
151
151
152 def _notice_exit(self):
152 def _notice_exit(self):
153 self._exiting = True
153 self._exiting = True
154
154
155 def run(self):
155 def run(self):
156 """Run my loop, ignoring EINTR events in the poller"""
156 """Run my loop, ignoring EINTR events in the poller"""
157 while True:
157 while True:
158 try:
158 try:
159 self.ioloop.start()
159 self.ioloop.start()
160 except ZMQError as e:
160 except ZMQError as e:
161 if e.errno == errno.EINTR:
161 if e.errno == errno.EINTR:
162 continue
162 continue
163 else:
163 else:
164 raise
164 raise
165 except Exception:
165 except Exception:
166 if self._exiting:
166 if self._exiting:
167 break
167 break
168 else:
168 else:
169 raise
169 raise
170 else:
170 else:
171 break
171 break
172
172
173 def stop(self):
173 def stop(self):
174 """Stop the channel's event loop and join its thread.
174 """Stop the channel's event loop and join its thread.
175
175
176 This calls :meth:`~threading.Thread.join` and returns when the thread
176 This calls :meth:`~threading.Thread.join` and returns when the thread
177 terminates. :class:`RuntimeError` will be raised if
177 terminates. :class:`RuntimeError` will be raised if
178 :meth:`~threading.Thread.start` is called again.
178 :meth:`~threading.Thread.start` is called again.
179 """
179 """
180 if self.ioloop is not None:
180 if self.ioloop is not None:
181 self.ioloop.stop()
181 self.ioloop.stop()
182 self.join()
182 self.join()
183 self.close()
183 self.close()
184
184
185 def close(self):
185 def close(self):
186 if self.ioloop is not None:
186 if self.ioloop is not None:
187 try:
187 try:
188 self.ioloop.close(all_fds=True)
188 self.ioloop.close(all_fds=True)
189 except Exception:
189 except Exception:
190 pass
190 pass
191
191
192
192
193 class ThreadedKernelClient(KernelClient):
193 class ThreadedKernelClient(KernelClient):
194 """ A KernelClient that provides thread-safe sockets with async callbacks on message replies.
194 """ A KernelClient that provides thread-safe sockets with async callbacks on message replies.
195 """
195 """
196
196
197 _ioloop = None
197 _ioloop = None
198 @property
198 @property
199 def ioloop(self):
199 def ioloop(self):
200 if self._ioloop is None:
200 if self._ioloop is None:
201 self._ioloop = ioloop.IOLoop()
201 self._ioloop = ioloop.IOLoop()
202 return self._ioloop
202 return self._ioloop
203
203
204 ioloop_thread = Instance(IOLoopThread)
204 ioloop_thread = Instance(IOLoopThread)
205
205
206 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
206 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
207 if shell:
207 if shell:
208 self.shell_channel._inspect = self._check_kernel_info_reply
208 self.shell_channel._inspect = self._check_kernel_info_reply
209
209
210 self.ioloop_thread = IOLoopThread(self.ioloop)
210 self.ioloop_thread = IOLoopThread(self.ioloop)
211 self.ioloop_thread.start()
211 self.ioloop_thread.start()
212
212
213 super(ThreadedKernelClient, self).start_channels(shell, iopub, stdin, hb)
213 super(ThreadedKernelClient, self).start_channels(shell, iopub, stdin, hb)
214
214
215 def _check_kernel_info_reply(self, msg):
215 def _check_kernel_info_reply(self, msg):
216 """This is run in the ioloop thread when the kernel info reply is recieved
216 """This is run in the ioloop thread when the kernel info reply is recieved
217 """
217 """
218 if msg['msg_type'] == 'kernel_info_reply':
218 if msg['msg_type'] == 'kernel_info_reply':
219 self._handle_kernel_info_reply(msg)
219 self._handle_kernel_info_reply(msg)
220 self.shell_channel._inspect = None
220 self.shell_channel._inspect = None
221
221
222 def stop_channels(self):
222 def stop_channels(self):
223 super(ThreadedKernelClient, self).stop_channels()
223 super(ThreadedKernelClient, self).stop_channels()
224 if self.ioloop_thread.is_alive():
224 if self.ioloop_thread.is_alive():
225 self.ioloop_thread.stop()
225 self.ioloop_thread.stop()
226
226
227 iopub_channel_class = Type(ThreadedZMQSocketChannel)
227 iopub_channel_class = Type(ThreadedZMQSocketChannel)
228 shell_channel_class = Type(ThreadedZMQSocketChannel)
228 shell_channel_class = Type(ThreadedZMQSocketChannel)
229 stdin_channel_class = Type(ThreadedZMQSocketChannel)
229 stdin_channel_class = Type(ThreadedZMQSocketChannel)
230 hb_channel_class = Type(HBChannel)
230 hb_channel_class = Type(HBChannel)
General Comments 0
You need to be logged in to leave comments. Login now