##// END OF EJS Templates
s/IPython.kernel/jupyter_client in jupyter_client
Min RK -
Show More
@@ -1,11 +1,8 b''
1 1 """IPython kernels and associated utilities"""
2 2
3 # just for friendlier zmq version check
4 from . import zmq
5
6 3 from .connect import *
7 4 from .launcher import *
8 5 from .client import KernelClient
9 6 from .manager import KernelManager, run_kernel
10 7 from .blocking import BlockingKernelClient
11 8 from .multikernelmanager import MultiKernelManager
@@ -1,38 +1,38 b''
1 1 """Implements a fully blocking kernel client.
2 2
3 3 Useful for test suites and blocking terminal interfaces.
4 4 """
5 5 # Copyright (c) IPython Development Team.
6 6 # Distributed under the terms of the Modified BSD License.
7 7
8 8 try:
9 9 from queue import Empty # Python 3
10 10 except ImportError:
11 11 from Queue import Empty # Python 2
12 12
13 13 from IPython.utils.traitlets import Type
14 from IPython.kernel.channels import HBChannel
15 from IPython.kernel.client import KernelClient
14 from jupyter_client.channels import HBChannel
15 from jupyter_client.client import KernelClient
16 16 from .channels import ZMQSocketChannel
17 17
18 18 class BlockingKernelClient(KernelClient):
19 19 def wait_for_ready(self):
20 20 # Wait for kernel info reply on shell channel
21 21 while True:
22 22 msg = self.shell_channel.get_msg(block=True)
23 23 if msg['msg_type'] == 'kernel_info_reply':
24 24 self._handle_kernel_info_reply(msg)
25 25 break
26 26
27 27 # Flush IOPub channel
28 28 while True:
29 29 try:
30 30 msg = self.iopub_channel.get_msg(block=True, timeout=0.2)
31 31 except Empty:
32 32 break
33 33
34 34 # The classes to use for the various channels
35 35 shell_channel_class = Type(ZMQSocketChannel)
36 36 iopub_channel_class = Type(ZMQSocketChannel)
37 37 stdin_channel_class = Type(ZMQSocketChannel)
38 38 hb_channel_class = Type(HBChannel)
@@ -1,49 +1,49 b''
1 1 """Abstract base classes for kernel client channels"""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 import abc
7 7
8 8 from IPython.utils.py3compat import with_metaclass
9 9
10 10
11 11 class ChannelABC(with_metaclass(abc.ABCMeta, object)):
12 12 """A base class for all channel ABCs."""
13 13
14 14 @abc.abstractmethod
15 15 def start(self):
16 16 pass
17 17
18 18 @abc.abstractmethod
19 19 def stop(self):
20 20 pass
21 21
22 22 @abc.abstractmethod
23 23 def is_alive(self):
24 24 pass
25 25
26 26
27 27 class HBChannelABC(ChannelABC):
28 28 """HBChannel ABC.
29 29
30 30 The docstrings for this class can be found in the base implementation:
31 31
32 `IPython.kernel.channels.HBChannel`
32 `jupyter_client.channels.HBChannel`
33 33 """
34 34
35 35 @abc.abstractproperty
36 36 def time_to_dead(self):
37 37 pass
38 38
39 39 @abc.abstractmethod
40 40 def pause(self):
41 41 pass
42 42
43 43 @abc.abstractmethod
44 44 def unpause(self):
45 45 pass
46 46
47 47 @abc.abstractmethod
48 48 def is_beating(self):
49 49 pass
@@ -1,390 +1,390 b''
1 1 """Base class to manage the interaction with a running kernel"""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 from __future__ import absolute_import
7 from IPython.kernel.channels import major_protocol_version
7 from jupyter_client.channels import major_protocol_version
8 8 from IPython.utils.py3compat import string_types, iteritems
9 9
10 10 import zmq
11 11
12 12 from IPython.utils.traitlets import (
13 13 Any, Instance, Type,
14 14 )
15 15
16 16 from .channelsabc import (ChannelABC, HBChannelABC)
17 17 from .clientabc import KernelClientABC
18 18 from .connect import ConnectionFileMixin
19 19
20 20
21 21 # some utilities to validate message structure, these might get moved elsewhere
22 22 # if they prove to have more generic utility
23 23
24 24 def validate_string_dict(dct):
25 25 """Validate that the input is a dict with string keys and values.
26 26
27 27 Raises ValueError if not."""
28 28 for k,v in iteritems(dct):
29 29 if not isinstance(k, string_types):
30 30 raise ValueError('key %r in dict must be a string' % k)
31 31 if not isinstance(v, string_types):
32 32 raise ValueError('value %r in dict must be a string' % v)
33 33
34 34
35 35 class KernelClient(ConnectionFileMixin):
36 36 """Communicates with a single kernel on any host via zmq channels.
37 37
38 38 There are four channels associated with each kernel:
39 39
40 40 * shell: for request/reply calls to the kernel.
41 41 * iopub: for the kernel to publish results to frontends.
42 42 * hb: for monitoring the kernel's heartbeat.
43 43 * stdin: for frontends to reply to raw_input calls in the kernel.
44 44
45 45 The methods of the channels are exposed as methods of the client itself
46 46 (KernelClient.execute, complete, history, etc.).
47 47 See the channels themselves for documentation of these methods.
48 48
49 49 """
50 50
51 51 # The PyZMQ Context to use for communication with the kernel.
52 52 context = Instance(zmq.Context)
53 53 def _context_default(self):
54 54 return zmq.Context.instance()
55 55
56 56 # The classes to use for the various channels
57 57 shell_channel_class = Type(ChannelABC)
58 58 iopub_channel_class = Type(ChannelABC)
59 59 stdin_channel_class = Type(ChannelABC)
60 60 hb_channel_class = Type(HBChannelABC)
61 61
62 62 # Protected traits
63 63 _shell_channel = Any
64 64 _iopub_channel = Any
65 65 _stdin_channel = Any
66 66 _hb_channel = Any
67 67
68 68 # flag for whether execute requests should be allowed to call raw_input:
69 69 allow_stdin = True
70 70
71 71 #--------------------------------------------------------------------------
72 72 # Channel proxy methods
73 73 #--------------------------------------------------------------------------
74 74
75 75 def _get_msg(channel, *args, **kwargs):
76 76 return channel.get_msg(*args, **kwargs)
77 77
78 78 def get_shell_msg(self, *args, **kwargs):
79 79 """Get a message from the shell channel"""
80 80 return self.shell_channel.get_msg(*args, **kwargs)
81 81
82 82 def get_iopub_msg(self, *args, **kwargs):
83 83 """Get a message from the iopub channel"""
84 84 return self.iopub_channel.get_msg(*args, **kwargs)
85 85
86 86 def get_stdin_msg(self, *args, **kwargs):
87 87 """Get a message from the stdin channel"""
88 88 return self.stdin_channel.get_msg(*args, **kwargs)
89 89
90 90 #--------------------------------------------------------------------------
91 91 # Channel management methods
92 92 #--------------------------------------------------------------------------
93 93
94 94 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
95 95 """Starts the channels for this kernel.
96 96
97 97 This will create the channels if they do not exist and then start
98 98 them (their activity runs in a thread). If port numbers of 0 are
99 99 being used (random ports) then you must first call
100 100 :meth:`start_kernel`. If the channels have been stopped and you
101 101 call this, :class:`RuntimeError` will be raised.
102 102 """
103 103 if shell:
104 104 self.shell_channel.start()
105 105 self.kernel_info()
106 106 if iopub:
107 107 self.iopub_channel.start()
108 108 if stdin:
109 109 self.stdin_channel.start()
110 110 self.allow_stdin = True
111 111 else:
112 112 self.allow_stdin = False
113 113 if hb:
114 114 self.hb_channel.start()
115 115
116 116 def stop_channels(self):
117 117 """Stops all the running channels for this kernel.
118 118
119 119 This stops their event loops and joins their threads.
120 120 """
121 121 if self.shell_channel.is_alive():
122 122 self.shell_channel.stop()
123 123 if self.iopub_channel.is_alive():
124 124 self.iopub_channel.stop()
125 125 if self.stdin_channel.is_alive():
126 126 self.stdin_channel.stop()
127 127 if self.hb_channel.is_alive():
128 128 self.hb_channel.stop()
129 129
130 130 @property
131 131 def channels_running(self):
132 132 """Are any of the channels created and running?"""
133 133 return (self.shell_channel.is_alive() or self.iopub_channel.is_alive() or
134 134 self.stdin_channel.is_alive() or self.hb_channel.is_alive())
135 135
136 136 ioloop = None # Overridden in subclasses that use pyzmq event loop
137 137
138 138 @property
139 139 def shell_channel(self):
140 140 """Get the shell channel object for this kernel."""
141 141 if self._shell_channel is None:
142 142 url = self._make_url('shell')
143 143 self.log.debug("connecting shell channel to %s", url)
144 144 socket = self.connect_shell(identity=self.session.bsession)
145 145 self._shell_channel = self.shell_channel_class(
146 146 socket, self.session, self.ioloop
147 147 )
148 148 return self._shell_channel
149 149
150 150 @property
151 151 def iopub_channel(self):
152 152 """Get the iopub channel object for this kernel."""
153 153 if self._iopub_channel is None:
154 154 url = self._make_url('iopub')
155 155 self.log.debug("connecting iopub channel to %s", url)
156 156 socket = self.connect_iopub()
157 157 self._iopub_channel = self.iopub_channel_class(
158 158 socket, self.session, self.ioloop
159 159 )
160 160 return self._iopub_channel
161 161
162 162 @property
163 163 def stdin_channel(self):
164 164 """Get the stdin channel object for this kernel."""
165 165 if self._stdin_channel is None:
166 166 url = self._make_url('stdin')
167 167 self.log.debug("connecting stdin channel to %s", url)
168 168 socket = self.connect_stdin(identity=self.session.bsession)
169 169 self._stdin_channel = self.stdin_channel_class(
170 170 socket, self.session, self.ioloop
171 171 )
172 172 return self._stdin_channel
173 173
174 174 @property
175 175 def hb_channel(self):
176 176 """Get the hb channel object for this kernel."""
177 177 if self._hb_channel is None:
178 178 url = self._make_url('hb')
179 179 self.log.debug("connecting heartbeat channel to %s", url)
180 180 self._hb_channel = self.hb_channel_class(
181 181 self.context, self.session, url
182 182 )
183 183 return self._hb_channel
184 184
185 185 def is_alive(self):
186 186 """Is the kernel process still running?"""
187 187 if self._hb_channel is not None:
188 188 # We didn't start the kernel with this KernelManager so we
189 189 # use the heartbeat.
190 190 return self._hb_channel.is_beating()
191 191 else:
192 192 # no heartbeat and not local, we can't tell if it's running,
193 193 # so naively return True
194 194 return True
195 195
196 196
197 197 # Methods to send specific messages on channels
198 198 def execute(self, code, silent=False, store_history=True,
199 199 user_expressions=None, allow_stdin=None, stop_on_error=True):
200 200 """Execute code in the kernel.
201 201
202 202 Parameters
203 203 ----------
204 204 code : str
205 205 A string of Python code.
206 206
207 207 silent : bool, optional (default False)
208 208 If set, the kernel will execute the code as quietly possible, and
209 209 will force store_history to be False.
210 210
211 211 store_history : bool, optional (default True)
212 212 If set, the kernel will store command history. This is forced
213 213 to be False if silent is True.
214 214
215 215 user_expressions : dict, optional
216 216 A dict mapping names to expressions to be evaluated in the user's
217 217 dict. The expression values are returned as strings formatted using
218 218 :func:`repr`.
219 219
220 220 allow_stdin : bool, optional (default self.allow_stdin)
221 221 Flag for whether the kernel can send stdin requests to frontends.
222 222
223 223 Some frontends (e.g. the Notebook) do not support stdin requests.
224 224 If raw_input is called from code executed from such a frontend, a
225 225 StdinNotImplementedError will be raised.
226 226
227 227 stop_on_error: bool, optional (default True)
228 228 Flag whether to abort the execution queue, if an exception is encountered.
229 229
230 230 Returns
231 231 -------
232 232 The msg_id of the message sent.
233 233 """
234 234 if user_expressions is None:
235 235 user_expressions = {}
236 236 if allow_stdin is None:
237 237 allow_stdin = self.allow_stdin
238 238
239 239
240 240 # Don't waste network traffic if inputs are invalid
241 241 if not isinstance(code, string_types):
242 242 raise ValueError('code %r must be a string' % code)
243 243 validate_string_dict(user_expressions)
244 244
245 245 # Create class for content/msg creation. Related to, but possibly
246 246 # not in Session.
247 247 content = dict(code=code, silent=silent, store_history=store_history,
248 248 user_expressions=user_expressions,
249 249 allow_stdin=allow_stdin, stop_on_error=stop_on_error
250 250 )
251 251 msg = self.session.msg('execute_request', content)
252 252 self.shell_channel.send(msg)
253 253 return msg['header']['msg_id']
254 254
255 255 def complete(self, code, cursor_pos=None):
256 256 """Tab complete text in the kernel's namespace.
257 257
258 258 Parameters
259 259 ----------
260 260 code : str
261 261 The context in which completion is requested.
262 262 Can be anything between a variable name and an entire cell.
263 263 cursor_pos : int, optional
264 264 The position of the cursor in the block of code where the completion was requested.
265 265 Default: ``len(code)``
266 266
267 267 Returns
268 268 -------
269 269 The msg_id of the message sent.
270 270 """
271 271 if cursor_pos is None:
272 272 cursor_pos = len(code)
273 273 content = dict(code=code, cursor_pos=cursor_pos)
274 274 msg = self.session.msg('complete_request', content)
275 275 self.shell_channel.send(msg)
276 276 return msg['header']['msg_id']
277 277
278 278 def inspect(self, code, cursor_pos=None, detail_level=0):
279 279 """Get metadata information about an object in the kernel's namespace.
280 280
281 281 It is up to the kernel to determine the appropriate object to inspect.
282 282
283 283 Parameters
284 284 ----------
285 285 code : str
286 286 The context in which info is requested.
287 287 Can be anything between a variable name and an entire cell.
288 288 cursor_pos : int, optional
289 289 The position of the cursor in the block of code where the info was requested.
290 290 Default: ``len(code)``
291 291 detail_level : int, optional
292 292 The level of detail for the introspection (0-2)
293 293
294 294 Returns
295 295 -------
296 296 The msg_id of the message sent.
297 297 """
298 298 if cursor_pos is None:
299 299 cursor_pos = len(code)
300 300 content = dict(code=code, cursor_pos=cursor_pos,
301 301 detail_level=detail_level,
302 302 )
303 303 msg = self.session.msg('inspect_request', content)
304 304 self.shell_channel.send(msg)
305 305 return msg['header']['msg_id']
306 306
307 307 def history(self, raw=True, output=False, hist_access_type='range', **kwargs):
308 308 """Get entries from the kernel's history list.
309 309
310 310 Parameters
311 311 ----------
312 312 raw : bool
313 313 If True, return the raw input.
314 314 output : bool
315 315 If True, then return the output as well.
316 316 hist_access_type : str
317 317 'range' (fill in session, start and stop params), 'tail' (fill in n)
318 318 or 'search' (fill in pattern param).
319 319
320 320 session : int
321 321 For a range request, the session from which to get lines. Session
322 322 numbers are positive integers; negative ones count back from the
323 323 current session.
324 324 start : int
325 325 The first line number of a history range.
326 326 stop : int
327 327 The final (excluded) line number of a history range.
328 328
329 329 n : int
330 330 The number of lines of history to get for a tail request.
331 331
332 332 pattern : str
333 333 The glob-syntax pattern for a search request.
334 334
335 335 Returns
336 336 -------
337 337 The msg_id of the message sent.
338 338 """
339 339 content = dict(raw=raw, output=output, hist_access_type=hist_access_type,
340 340 **kwargs)
341 341 msg = self.session.msg('history_request', content)
342 342 self.shell_channel.send(msg)
343 343 return msg['header']['msg_id']
344 344
345 345 def kernel_info(self):
346 346 """Request kernel info."""
347 347 msg = self.session.msg('kernel_info_request')
348 348 self.shell_channel.send(msg)
349 349 return msg['header']['msg_id']
350 350
351 351 def _handle_kernel_info_reply(self, msg):
352 352 """handle kernel info reply
353 353
354 354 sets protocol adaptation version. This might
355 355 be run from a separate thread.
356 356 """
357 357 adapt_version = int(msg['content']['protocol_version'].split('.')[0])
358 358 if adapt_version != major_protocol_version:
359 359 self.session.adapt_version = adapt_version
360 360
361 361 def shutdown(self, restart=False):
362 362 """Request an immediate kernel shutdown.
363 363
364 364 Upon receipt of the (empty) reply, client code can safely assume that
365 365 the kernel has shut down and it's safe to forcefully terminate it if
366 366 it's still alive.
367 367
368 368 The kernel will send the reply via a function registered with Python's
369 369 atexit module, ensuring it's truly done as the kernel is done with all
370 370 normal operation.
371 371 """
372 372 # Send quit message to kernel. Once we implement kernel-side setattr,
373 373 # this should probably be done that way, but for now this will do.
374 374 msg = self.session.msg('shutdown_request', {'restart':restart})
375 375 self.shell_channel.send(msg)
376 376 return msg['header']['msg_id']
377 377
378 378 def is_complete(self, code):
379 379 msg = self.session.msg('is_complete_request', {'code': code})
380 380 self.shell_channel.send(msg)
381 381 return msg['header']['msg_id']
382 382
383 383 def input(self, string):
384 384 """Send a string of raw input to the kernel."""
385 385 content = dict(value=string)
386 386 msg = self.session.msg('input_reply', content)
387 387 self.stdin_channel.send(msg)
388 388
389 389
390 390 KernelClientABC.register(KernelClient)
@@ -1,80 +1,80 b''
1 1 """Abstract base class for kernel clients"""
2 2
3 3 #-----------------------------------------------------------------------------
4 4 # Copyright (C) 2013 The IPython Development Team
5 5 #
6 6 # Distributed under the terms of the BSD License. The full license is in
7 7 # the file COPYING, distributed as part of this software.
8 8 #-----------------------------------------------------------------------------
9 9
10 10 #-----------------------------------------------------------------------------
11 11 # Imports
12 12 #-----------------------------------------------------------------------------
13 13
14 14 import abc
15 15
16 16 from IPython.utils.py3compat import with_metaclass
17 17
18 18 #-----------------------------------------------------------------------------
19 19 # Main kernel client class
20 20 #-----------------------------------------------------------------------------
21 21
22 22 class KernelClientABC(with_metaclass(abc.ABCMeta, object)):
23 23 """KernelManager ABC.
24 24
25 25 The docstrings for this class can be found in the base implementation:
26 26
27 `IPython.kernel.client.KernelClient`
27 `jupyter_client.client.KernelClient`
28 28 """
29 29
30 30 @abc.abstractproperty
31 31 def kernel(self):
32 32 pass
33 33
34 34 @abc.abstractproperty
35 35 def shell_channel_class(self):
36 36 pass
37 37
38 38 @abc.abstractproperty
39 39 def iopub_channel_class(self):
40 40 pass
41 41
42 42 @abc.abstractproperty
43 43 def hb_channel_class(self):
44 44 pass
45 45
46 46 @abc.abstractproperty
47 47 def stdin_channel_class(self):
48 48 pass
49 49
50 50 #--------------------------------------------------------------------------
51 51 # Channel management methods
52 52 #--------------------------------------------------------------------------
53 53
54 54 @abc.abstractmethod
55 55 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
56 56 pass
57 57
58 58 @abc.abstractmethod
59 59 def stop_channels(self):
60 60 pass
61 61
62 62 @abc.abstractproperty
63 63 def channels_running(self):
64 64 pass
65 65
66 66 @abc.abstractproperty
67 67 def shell_channel(self):
68 68 pass
69 69
70 70 @abc.abstractproperty
71 71 def iopub_channel(self):
72 72 pass
73 73
74 74 @abc.abstractproperty
75 75 def stdin_channel(self):
76 76 pass
77 77
78 78 @abc.abstractproperty
79 79 def hb_channel(self):
80 80 pass
@@ -1,576 +1,576 b''
1 1 """Utilities for connecting to kernels
2 2
3 3 The :class:`ConnectionFileMixin` class in this module encapsulates the logic
4 4 related to writing and reading connections files.
5 5 """
6 6 # Copyright (c) IPython Development Team.
7 7 # Distributed under the terms of the Modified BSD License.
8 8
9 9 #-----------------------------------------------------------------------------
10 10 # Imports
11 11 #-----------------------------------------------------------------------------
12 12
13 13 from __future__ import absolute_import
14 14
15 15 import glob
16 16 import json
17 17 import os
18 18 import socket
19 19 import sys
20 20 from getpass import getpass
21 21 from subprocess import Popen, PIPE
22 22 import tempfile
23 23
24 24 import zmq
25 25
26 26 # IPython imports
27 27 from IPython.config import LoggingConfigurable
28 28 from IPython.core.profiledir import ProfileDir
29 29 from IPython.utils.localinterfaces import localhost
30 30 from IPython.utils.path import filefind, get_ipython_dir
31 31 from IPython.utils.py3compat import (str_to_bytes, bytes_to_str, cast_bytes_py2,
32 32 string_types)
33 33 from IPython.utils.traitlets import (
34 34 Bool, Integer, Unicode, CaselessStrEnum, Instance,
35 35 )
36 36
37 37
38 38 #-----------------------------------------------------------------------------
39 39 # Working with Connection Files
40 40 #-----------------------------------------------------------------------------
41 41
42 42 def write_connection_file(fname=None, shell_port=0, iopub_port=0, stdin_port=0, hb_port=0,
43 43 control_port=0, ip='', key=b'', transport='tcp',
44 44 signature_scheme='hmac-sha256',
45 45 ):
46 46 """Generates a JSON config file, including the selection of random ports.
47 47
48 48 Parameters
49 49 ----------
50 50
51 51 fname : unicode
52 52 The path to the file to write
53 53
54 54 shell_port : int, optional
55 55 The port to use for ROUTER (shell) channel.
56 56
57 57 iopub_port : int, optional
58 58 The port to use for the SUB channel.
59 59
60 60 stdin_port : int, optional
61 61 The port to use for the ROUTER (raw input) channel.
62 62
63 63 control_port : int, optional
64 64 The port to use for the ROUTER (control) channel.
65 65
66 66 hb_port : int, optional
67 67 The port to use for the heartbeat REP channel.
68 68
69 69 ip : str, optional
70 70 The ip address the kernel will bind to.
71 71
72 72 key : str, optional
73 73 The Session key used for message authentication.
74 74
75 75 signature_scheme : str, optional
76 76 The scheme used for message authentication.
77 77 This has the form 'digest-hash', where 'digest'
78 78 is the scheme used for digests, and 'hash' is the name of the hash function
79 79 used by the digest scheme.
80 80 Currently, 'hmac' is the only supported digest scheme,
81 81 and 'sha256' is the default hash function.
82 82
83 83 """
84 84 if not ip:
85 85 ip = localhost()
86 86 # default to temporary connector file
87 87 if not fname:
88 88 fd, fname = tempfile.mkstemp('.json')
89 89 os.close(fd)
90 90
91 91 # Find open ports as necessary.
92 92
93 93 ports = []
94 94 ports_needed = int(shell_port <= 0) + \
95 95 int(iopub_port <= 0) + \
96 96 int(stdin_port <= 0) + \
97 97 int(control_port <= 0) + \
98 98 int(hb_port <= 0)
99 99 if transport == 'tcp':
100 100 for i in range(ports_needed):
101 101 sock = socket.socket()
102 102 # struct.pack('ii', (0,0)) is 8 null bytes
103 103 sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, b'\0' * 8)
104 104 sock.bind(('', 0))
105 105 ports.append(sock)
106 106 for i, sock in enumerate(ports):
107 107 port = sock.getsockname()[1]
108 108 sock.close()
109 109 ports[i] = port
110 110 else:
111 111 N = 1
112 112 for i in range(ports_needed):
113 113 while os.path.exists("%s-%s" % (ip, str(N))):
114 114 N += 1
115 115 ports.append(N)
116 116 N += 1
117 117 if shell_port <= 0:
118 118 shell_port = ports.pop(0)
119 119 if iopub_port <= 0:
120 120 iopub_port = ports.pop(0)
121 121 if stdin_port <= 0:
122 122 stdin_port = ports.pop(0)
123 123 if control_port <= 0:
124 124 control_port = ports.pop(0)
125 125 if hb_port <= 0:
126 126 hb_port = ports.pop(0)
127 127
128 128 cfg = dict( shell_port=shell_port,
129 129 iopub_port=iopub_port,
130 130 stdin_port=stdin_port,
131 131 control_port=control_port,
132 132 hb_port=hb_port,
133 133 )
134 134 cfg['ip'] = ip
135 135 cfg['key'] = bytes_to_str(key)
136 136 cfg['transport'] = transport
137 137 cfg['signature_scheme'] = signature_scheme
138 138
139 139 with open(fname, 'w') as f:
140 140 f.write(json.dumps(cfg, indent=2))
141 141
142 142 return fname, cfg
143 143
144 144
145 145 def get_connection_file(app=None):
146 146 """Return the path to the connection file of an app
147 147
148 148 Parameters
149 149 ----------
150 150 app : IPKernelApp instance [optional]
151 151 If unspecified, the currently running app will be used
152 152 """
153 153 if app is None:
154 from IPython.kernel.zmq.kernelapp import IPKernelApp
154 from jupyter_client.kernelapp import IPKernelApp
155 155 if not IPKernelApp.initialized():
156 156 raise RuntimeError("app not specified, and not in a running Kernel")
157 157
158 158 app = IPKernelApp.instance()
159 159 return filefind(app.connection_file, ['.', app.profile_dir.security_dir])
160 160
161 161
162 162 def find_connection_file(filename='kernel-*.json', profile=None):
163 163 """find a connection file, and return its absolute path.
164 164
165 165 The current working directory and the profile's security
166 166 directory will be searched for the file if it is not given by
167 167 absolute path.
168 168
169 169 If profile is unspecified, then the current running application's
170 170 profile will be used, or 'default', if not run from IPython.
171 171
172 172 If the argument does not match an existing file, it will be interpreted as a
173 173 fileglob, and the matching file in the profile's security dir with
174 174 the latest access time will be used.
175 175
176 176 Parameters
177 177 ----------
178 178 filename : str
179 179 The connection file or fileglob to search for.
180 180 profile : str [optional]
181 181 The name of the profile to use when searching for the connection file,
182 182 if different from the current IPython session or 'default'.
183 183
184 184 Returns
185 185 -------
186 186 str : The absolute path of the connection file.
187 187 """
188 188 from IPython.core.application import BaseIPythonApplication as IPApp
189 189 try:
190 190 # quick check for absolute path, before going through logic
191 191 return filefind(filename)
192 192 except IOError:
193 193 pass
194 194
195 195 if profile is None:
196 196 # profile unspecified, check if running from an IPython app
197 197 if IPApp.initialized():
198 198 app = IPApp.instance()
199 199 profile_dir = app.profile_dir
200 200 else:
201 201 # not running in IPython, use default profile
202 202 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), 'default')
203 203 else:
204 204 # find profiledir by profile name:
205 205 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
206 206 security_dir = profile_dir.security_dir
207 207
208 208 try:
209 209 # first, try explicit name
210 210 return filefind(filename, ['.', security_dir])
211 211 except IOError:
212 212 pass
213 213
214 214 # not found by full name
215 215
216 216 if '*' in filename:
217 217 # given as a glob already
218 218 pat = filename
219 219 else:
220 220 # accept any substring match
221 221 pat = '*%s*' % filename
222 222 matches = glob.glob( os.path.join(security_dir, pat) )
223 223 if not matches:
224 224 raise IOError("Could not find %r in %r" % (filename, security_dir))
225 225 elif len(matches) == 1:
226 226 return matches[0]
227 227 else:
228 228 # get most recent match, by access time:
229 229 return sorted(matches, key=lambda f: os.stat(f).st_atime)[-1]
230 230
231 231
232 232 def get_connection_info(connection_file=None, unpack=False, profile=None):
233 233 """Return the connection information for the current Kernel.
234 234
235 235 Parameters
236 236 ----------
237 237 connection_file : str [optional]
238 238 The connection file to be used. Can be given by absolute path, or
239 239 IPython will search in the security directory of a given profile.
240 240 If run from IPython,
241 241
242 242 If unspecified, the connection file for the currently running
243 243 IPython Kernel will be used, which is only allowed from inside a kernel.
244 244 unpack : bool [default: False]
245 245 if True, return the unpacked dict, otherwise just the string contents
246 246 of the file.
247 247 profile : str [optional]
248 248 The name of the profile to use when searching for the connection file,
249 249 if different from the current IPython session or 'default'.
250 250
251 251
252 252 Returns
253 253 -------
254 254 The connection dictionary of the current kernel, as string or dict,
255 255 depending on `unpack`.
256 256 """
257 257 if connection_file is None:
258 258 # get connection file from current kernel
259 259 cf = get_connection_file()
260 260 else:
261 261 # connection file specified, allow shortnames:
262 262 cf = find_connection_file(connection_file, profile=profile)
263 263
264 264 with open(cf) as f:
265 265 info = f.read()
266 266
267 267 if unpack:
268 268 info = json.loads(info)
269 269 # ensure key is bytes:
270 270 info['key'] = str_to_bytes(info.get('key', ''))
271 271 return info
272 272
273 273
274 274 def connect_qtconsole(connection_file=None, argv=None, profile=None):
275 275 """Connect a qtconsole to the current kernel.
276 276
277 277 This is useful for connecting a second qtconsole to a kernel, or to a
278 278 local notebook.
279 279
280 280 Parameters
281 281 ----------
282 282 connection_file : str [optional]
283 283 The connection file to be used. Can be given by absolute path, or
284 284 IPython will search in the security directory of a given profile.
285 285 If run from IPython,
286 286
287 287 If unspecified, the connection file for the currently running
288 288 IPython Kernel will be used, which is only allowed from inside a kernel.
289 289 argv : list [optional]
290 290 Any extra args to be passed to the console.
291 291 profile : str [optional]
292 292 The name of the profile to use when searching for the connection file,
293 293 if different from the current IPython session or 'default'.
294 294
295 295
296 296 Returns
297 297 -------
298 298 :class:`subprocess.Popen` instance running the qtconsole frontend
299 299 """
300 300 argv = [] if argv is None else argv
301 301
302 302 if connection_file is None:
303 303 # get connection file from current kernel
304 304 cf = get_connection_file()
305 305 else:
306 306 cf = find_connection_file(connection_file, profile=profile)
307 307
308 308 cmd = ';'.join([
309 309 "from IPython.qt.console import qtconsoleapp",
310 310 "qtconsoleapp.main()"
311 311 ])
312 312
313 313 return Popen([sys.executable, '-c', cmd, '--existing', cf] + argv,
314 314 stdout=PIPE, stderr=PIPE, close_fds=(sys.platform != 'win32'),
315 315 )
316 316
317 317
318 318 def tunnel_to_kernel(connection_info, sshserver, sshkey=None):
319 319 """tunnel connections to a kernel via ssh
320 320
321 321 This will open four SSH tunnels from localhost on this machine to the
322 322 ports associated with the kernel. They can be either direct
323 323 localhost-localhost tunnels, or if an intermediate server is necessary,
324 324 the kernel must be listening on a public IP.
325 325
326 326 Parameters
327 327 ----------
328 328 connection_info : dict or str (path)
329 329 Either a connection dict, or the path to a JSON connection file
330 330 sshserver : str
331 331 The ssh sever to use to tunnel to the kernel. Can be a full
332 332 `user@server:port` string. ssh config aliases are respected.
333 333 sshkey : str [optional]
334 334 Path to file containing ssh key to use for authentication.
335 335 Only necessary if your ssh config does not already associate
336 336 a keyfile with the host.
337 337
338 338 Returns
339 339 -------
340 340
341 341 (shell, iopub, stdin, hb) : ints
342 342 The four ports on localhost that have been forwarded to the kernel.
343 343 """
344 344 from zmq.ssh import tunnel
345 345 if isinstance(connection_info, string_types):
346 346 # it's a path, unpack it
347 347 with open(connection_info) as f:
348 348 connection_info = json.loads(f.read())
349 349
350 350 cf = connection_info
351 351
352 352 lports = tunnel.select_random_ports(4)
353 353 rports = cf['shell_port'], cf['iopub_port'], cf['stdin_port'], cf['hb_port']
354 354
355 355 remote_ip = cf['ip']
356 356
357 357 if tunnel.try_passwordless_ssh(sshserver, sshkey):
358 358 password=False
359 359 else:
360 360 password = getpass("SSH Password for %s: " % cast_bytes_py2(sshserver))
361 361
362 362 for lp,rp in zip(lports, rports):
363 363 tunnel.ssh_tunnel(lp, rp, sshserver, remote_ip, sshkey, password)
364 364
365 365 return tuple(lports)
366 366
367 367
368 368 #-----------------------------------------------------------------------------
369 369 # Mixin for classes that work with connection files
370 370 #-----------------------------------------------------------------------------
371 371
372 372 channel_socket_types = {
373 373 'hb' : zmq.REQ,
374 374 'shell' : zmq.DEALER,
375 375 'iopub' : zmq.SUB,
376 376 'stdin' : zmq.DEALER,
377 377 'control': zmq.DEALER,
378 378 }
379 379
380 380 port_names = [ "%s_port" % channel for channel in ('shell', 'stdin', 'iopub', 'hb', 'control')]
381 381
382 382 class ConnectionFileMixin(LoggingConfigurable):
383 383 """Mixin for configurable classes that work with connection files"""
384 384
385 385 # The addresses for the communication channels
386 386 connection_file = Unicode('', config=True,
387 387 help="""JSON file in which to store connection info [default: kernel-<pid>.json]
388 388
389 389 This file will contain the IP, ports, and authentication key needed to connect
390 390 clients to this kernel. By default, this file will be created in the security dir
391 391 of the current profile, but can be specified by absolute path.
392 392 """)
393 393 _connection_file_written = Bool(False)
394 394
395 395 transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True)
396 396
397 397 ip = Unicode(config=True,
398 398 help="""Set the kernel\'s IP address [default localhost].
399 399 If the IP address is something other than localhost, then
400 400 Consoles on other machines will be able to connect
401 401 to the Kernel, so be careful!"""
402 402 )
403 403
404 404 def _ip_default(self):
405 405 if self.transport == 'ipc':
406 406 if self.connection_file:
407 407 return os.path.splitext(self.connection_file)[0] + '-ipc'
408 408 else:
409 409 return 'kernel-ipc'
410 410 else:
411 411 return localhost()
412 412
413 413 def _ip_changed(self, name, old, new):
414 414 if new == '*':
415 415 self.ip = '0.0.0.0'
416 416
417 417 # protected traits
418 418
419 419 hb_port = Integer(0, config=True,
420 420 help="set the heartbeat port [default: random]")
421 421 shell_port = Integer(0, config=True,
422 422 help="set the shell (ROUTER) port [default: random]")
423 423 iopub_port = Integer(0, config=True,
424 424 help="set the iopub (PUB) port [default: random]")
425 425 stdin_port = Integer(0, config=True,
426 426 help="set the stdin (ROUTER) port [default: random]")
427 427 control_port = Integer(0, config=True,
428 428 help="set the control (ROUTER) port [default: random]")
429 429
430 430 @property
431 431 def ports(self):
432 432 return [ getattr(self, name) for name in port_names ]
433 433
434 434 # The Session to use for communication with the kernel.
435 session = Instance('IPython.kernel.zmq.session.Session')
435 session = Instance('jupyter_client.session.Session')
436 436 def _session_default(self):
437 from IPython.kernel.zmq.session import Session
437 from jupyter_client.session import Session
438 438 return Session(parent=self)
439 439
440 440 #--------------------------------------------------------------------------
441 441 # Connection and ipc file management
442 442 #--------------------------------------------------------------------------
443 443
444 444 def get_connection_info(self):
445 445 """return the connection info as a dict"""
446 446 return dict(
447 447 transport=self.transport,
448 448 ip=self.ip,
449 449 shell_port=self.shell_port,
450 450 iopub_port=self.iopub_port,
451 451 stdin_port=self.stdin_port,
452 452 hb_port=self.hb_port,
453 453 control_port=self.control_port,
454 454 signature_scheme=self.session.signature_scheme,
455 455 key=self.session.key,
456 456 )
457 457
458 458 def cleanup_connection_file(self):
459 459 """Cleanup connection file *if we wrote it*
460 460
461 461 Will not raise if the connection file was already removed somehow.
462 462 """
463 463 if self._connection_file_written:
464 464 # cleanup connection files on full shutdown of kernel we started
465 465 self._connection_file_written = False
466 466 try:
467 467 os.remove(self.connection_file)
468 468 except (IOError, OSError, AttributeError):
469 469 pass
470 470
471 471 def cleanup_ipc_files(self):
472 472 """Cleanup ipc files if we wrote them."""
473 473 if self.transport != 'ipc':
474 474 return
475 475 for port in self.ports:
476 476 ipcfile = "%s-%i" % (self.ip, port)
477 477 try:
478 478 os.remove(ipcfile)
479 479 except (IOError, OSError):
480 480 pass
481 481
482 482 def write_connection_file(self):
483 483 """Write connection info to JSON dict in self.connection_file."""
484 484 if self._connection_file_written and os.path.exists(self.connection_file):
485 485 return
486 486
487 487 self.connection_file, cfg = write_connection_file(self.connection_file,
488 488 transport=self.transport, ip=self.ip, key=self.session.key,
489 489 stdin_port=self.stdin_port, iopub_port=self.iopub_port,
490 490 shell_port=self.shell_port, hb_port=self.hb_port,
491 491 control_port=self.control_port,
492 492 signature_scheme=self.session.signature_scheme,
493 493 )
494 494 # write_connection_file also sets default ports:
495 495 for name in port_names:
496 496 setattr(self, name, cfg[name])
497 497
498 498 self._connection_file_written = True
499 499
500 500 def load_connection_file(self):
501 501 """Load connection info from JSON dict in self.connection_file."""
502 502 self.log.debug(u"Loading connection file %s", self.connection_file)
503 503 with open(self.connection_file) as f:
504 504 cfg = json.load(f)
505 505 self.transport = cfg.get('transport', self.transport)
506 506 self.ip = cfg.get('ip', self._ip_default())
507 507
508 508 for name in port_names:
509 509 if getattr(self, name) == 0 and name in cfg:
510 510 # not overridden by config or cl_args
511 511 setattr(self, name, cfg[name])
512 512
513 513 if 'key' in cfg:
514 514 self.session.key = str_to_bytes(cfg['key'])
515 515 if 'signature_scheme' in cfg:
516 516 self.session.signature_scheme = cfg['signature_scheme']
517 517
518 518 #--------------------------------------------------------------------------
519 519 # Creating connected sockets
520 520 #--------------------------------------------------------------------------
521 521
522 522 def _make_url(self, channel):
523 523 """Make a ZeroMQ URL for a given channel."""
524 524 transport = self.transport
525 525 ip = self.ip
526 526 port = getattr(self, '%s_port' % channel)
527 527
528 528 if transport == 'tcp':
529 529 return "tcp://%s:%i" % (ip, port)
530 530 else:
531 531 return "%s://%s-%s" % (transport, ip, port)
532 532
533 533 def _create_connected_socket(self, channel, identity=None):
534 534 """Create a zmq Socket and connect it to the kernel."""
535 535 url = self._make_url(channel)
536 536 socket_type = channel_socket_types[channel]
537 537 self.log.debug("Connecting to: %s" % url)
538 538 sock = self.context.socket(socket_type)
539 539 # set linger to 1s to prevent hangs at exit
540 540 sock.linger = 1000
541 541 if identity:
542 542 sock.identity = identity
543 543 sock.connect(url)
544 544 return sock
545 545
546 546 def connect_iopub(self, identity=None):
547 547 """return zmq Socket connected to the IOPub channel"""
548 548 sock = self._create_connected_socket('iopub', identity=identity)
549 549 sock.setsockopt(zmq.SUBSCRIBE, b'')
550 550 return sock
551 551
552 552 def connect_shell(self, identity=None):
553 553 """return zmq Socket connected to the Shell channel"""
554 554 return self._create_connected_socket('shell', identity=identity)
555 555
556 556 def connect_stdin(self, identity=None):
557 557 """return zmq Socket connected to the StdIn channel"""
558 558 return self._create_connected_socket('stdin', identity=identity)
559 559
560 560 def connect_hb(self, identity=None):
561 561 """return zmq Socket connected to the Heartbeat channel"""
562 562 return self._create_connected_socket('hb', identity=identity)
563 563
564 564 def connect_control(self, identity=None):
565 565 """return zmq Socket connected to the Control channel"""
566 566 return self._create_connected_socket('control', identity=identity)
567 567
568 568
569 569 __all__ = [
570 570 'write_connection_file',
571 571 'get_connection_file',
572 572 'find_connection_file',
573 573 'get_connection_info',
574 574 'connect_qtconsole',
575 575 'tunnel_to_kernel',
576 576 ]
@@ -1,62 +1,62 b''
1 1 """A kernel manager with a tornado IOLoop"""
2 2
3 3 #-----------------------------------------------------------------------------
4 4 # Copyright (C) 2013 The IPython Development Team
5 5 #
6 6 # Distributed under the terms of the BSD License. The full license is in
7 7 # the file COPYING, distributed as part of this software.
8 8 #-----------------------------------------------------------------------------
9 9
10 10 #-----------------------------------------------------------------------------
11 11 # Imports
12 12 #-----------------------------------------------------------------------------
13 13
14 14 from __future__ import absolute_import
15 15
16 16 from zmq.eventloop import ioloop
17 17 from zmq.eventloop.zmqstream import ZMQStream
18 18
19 19 from IPython.utils.traitlets import (
20 20 Instance
21 21 )
22 22
23 from IPython.kernel.manager import KernelManager
23 from jupyter_client.manager import KernelManager
24 24 from .restarter import IOLoopKernelRestarter
25 25
26 26 #-----------------------------------------------------------------------------
27 27 # Code
28 28 #-----------------------------------------------------------------------------
29 29
30 30
31 31 def as_zmqstream(f):
32 32 def wrapped(self, *args, **kwargs):
33 33 socket = f(self, *args, **kwargs)
34 34 return ZMQStream(socket, self.loop)
35 35 return wrapped
36 36
37 37 class IOLoopKernelManager(KernelManager):
38 38
39 39 loop = Instance('zmq.eventloop.ioloop.IOLoop')
40 40 def _loop_default(self):
41 41 return ioloop.IOLoop.instance()
42 42
43 _restarter = Instance('IPython.kernel.ioloop.IOLoopKernelRestarter', allow_none=True)
43 _restarter = Instance('jupyter_client.ioloop.IOLoopKernelRestarter', allow_none=True)
44 44
45 45 def start_restarter(self):
46 46 if self.autorestart and self.has_kernel:
47 47 if self._restarter is None:
48 48 self._restarter = IOLoopKernelRestarter(
49 49 kernel_manager=self, loop=self.loop,
50 50 parent=self, log=self.log
51 51 )
52 52 self._restarter.start()
53 53
54 54 def stop_restarter(self):
55 55 if self.autorestart:
56 56 if self._restarter is not None:
57 57 self._restarter.stop()
58 58
59 59 connect_shell = as_zmqstream(KernelManager.connect_shell)
60 60 connect_iopub = as_zmqstream(KernelManager.connect_iopub)
61 61 connect_stdin = as_zmqstream(KernelManager.connect_stdin)
62 62 connect_hb = as_zmqstream(KernelManager.connect_hb)
@@ -1,53 +1,53 b''
1 1 """A basic in process kernel monitor with autorestarting.
2 2
3 3 This watches a kernel's state using KernelManager.is_alive and auto
4 4 restarts the kernel if it dies.
5 5 """
6 6
7 7 #-----------------------------------------------------------------------------
8 8 # Copyright (C) 2013 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-----------------------------------------------------------------------------
13 13
14 14 #-----------------------------------------------------------------------------
15 15 # Imports
16 16 #-----------------------------------------------------------------------------
17 17
18 18 from __future__ import absolute_import
19 19
20 20 from zmq.eventloop import ioloop
21 21
22 22
23 from IPython.kernel.restarter import KernelRestarter
23 from jupyter_client.restarter import KernelRestarter
24 24 from IPython.utils.traitlets import (
25 25 Instance,
26 26 )
27 27
28 28 #-----------------------------------------------------------------------------
29 29 # Code
30 30 #-----------------------------------------------------------------------------
31 31
32 32 class IOLoopKernelRestarter(KernelRestarter):
33 33 """Monitor and autorestart a kernel."""
34 34
35 35 loop = Instance('zmq.eventloop.ioloop.IOLoop')
36 36 def _loop_default(self):
37 37 return ioloop.IOLoop.instance()
38 38
39 39 _pcallback = None
40 40
41 41 def start(self):
42 42 """Start the polling of the kernel."""
43 43 if self._pcallback is None:
44 44 self._pcallback = ioloop.PeriodicCallback(
45 45 self.poll, 1000*self.time_to_dead, self.loop
46 46 )
47 47 self._pcallback.start()
48 48
49 49 def stop(self):
50 50 """Stop the kernel polling."""
51 51 if self._pcallback is not None:
52 52 self._pcallback.stop()
53 53 self._pcallback = None
@@ -1,226 +1,226 b''
1 1 """Utilities for launching kernels
2 2 """
3 3
4 4 # Copyright (c) IPython Development Team.
5 5 # Distributed under the terms of the Modified BSD License.
6 6
7 7 import os
8 8 import sys
9 9 from subprocess import Popen, PIPE
10 10
11 11 from IPython.utils.encoding import getdefaultencoding
12 12 from IPython.utils.py3compat import cast_bytes_py2
13 13
14 14
15 15 def swallow_argv(argv, aliases=None, flags=None):
16 16 """strip frontend-specific aliases and flags from an argument list
17 17
18 18 For use primarily in frontend apps that want to pass a subset of command-line
19 19 arguments through to a subprocess, where frontend-specific flags and aliases
20 20 should be removed from the list.
21 21
22 22 Parameters
23 23 ----------
24 24
25 25 argv : list(str)
26 26 The starting argv, to be filtered
27 27 aliases : container of aliases (dict, list, set, etc.)
28 28 The frontend-specific aliases to be removed
29 29 flags : container of flags (dict, list, set, etc.)
30 30 The frontend-specific flags to be removed
31 31
32 32 Returns
33 33 -------
34 34
35 35 argv : list(str)
36 36 The argv list, excluding flags and aliases that have been stripped
37 37 """
38 38
39 39 if aliases is None:
40 40 aliases = set()
41 41 if flags is None:
42 42 flags = set()
43 43
44 44 stripped = list(argv) # copy
45 45
46 46 swallow_next = False
47 47 was_flag = False
48 48 for a in argv:
49 49 if a == '--':
50 50 break
51 51 if swallow_next:
52 52 swallow_next = False
53 53 # last arg was an alias, remove the next one
54 54 # *unless* the last alias has a no-arg flag version, in which
55 55 # case, don't swallow the next arg if it's also a flag:
56 56 if not (was_flag and a.startswith('-')):
57 57 stripped.remove(a)
58 58 continue
59 59 if a.startswith('-'):
60 60 split = a.lstrip('-').split('=')
61 61 name = split[0]
62 62 # we use startswith because argparse accepts any arg to be specified
63 63 # by any leading section, as long as it is unique,
64 64 # so `--no-br` means `--no-browser` in the notebook, etc.
65 65 if any(alias.startswith(name) for alias in aliases):
66 66 stripped.remove(a)
67 67 if len(split) == 1:
68 68 # alias passed with arg via space
69 69 swallow_next = True
70 70 # could have been a flag that matches an alias, e.g. `existing`
71 71 # in which case, we might not swallow the next arg
72 72 was_flag = name in flags
73 73 elif len(split) == 1 and any(flag.startswith(name) for flag in flags):
74 74 # strip flag, but don't swallow next, as flags don't take args
75 75 stripped.remove(a)
76 76
77 77 # return shortened list
78 78 return stripped
79 79
80 80
81 81 def make_ipkernel_cmd(mod='IPython.kernel', executable=None, extra_arguments=[], **kw):
82 82 """Build Popen command list for launching an IPython kernel.
83 83
84 84 Parameters
85 85 ----------
86 86 mod : str, optional (default 'IPython.kernel')
87 87 A string of an IPython module whose __main__ starts an IPython kernel
88 88
89 89 executable : str, optional (default sys.executable)
90 90 The Python executable to use for the kernel process.
91 91
92 92 extra_arguments : list, optional
93 93 A list of extra arguments to pass when executing the launch code.
94 94
95 95 Returns
96 96 -------
97 97
98 98 A Popen command list
99 99 """
100 100 if executable is None:
101 101 executable = sys.executable
102 102 arguments = [ executable, '-m', mod, '-f', '{connection_file}' ]
103 103 arguments.extend(extra_arguments)
104 104
105 105 return arguments
106 106
107 107
108 108 def launch_kernel(cmd, stdin=None, stdout=None, stderr=None, env=None,
109 109 independent=False,
110 110 cwd=None,
111 111 **kw
112 112 ):
113 113 """ Launches a localhost kernel, binding to the specified ports.
114 114
115 115 Parameters
116 116 ----------
117 117 cmd : Popen list,
118 118 A string of Python code that imports and executes a kernel entry point.
119 119
120 120 stdin, stdout, stderr : optional (default None)
121 121 Standards streams, as defined in subprocess.Popen.
122 122
123 123 independent : bool, optional (default False)
124 124 If set, the kernel process is guaranteed to survive if this process
125 125 dies. If not set, an effort is made to ensure that the kernel is killed
126 126 when this process dies. Note that in this case it is still good practice
127 127 to kill kernels manually before exiting.
128 128
129 129 cwd : path, optional
130 130 The working dir of the kernel process (default: cwd of this process).
131 131
132 132 Returns
133 133 -------
134 134
135 135 Popen instance for the kernel subprocess
136 136 """
137 137
138 138 # Popen will fail (sometimes with a deadlock) if stdin, stdout, and stderr
139 139 # are invalid. Unfortunately, there is in general no way to detect whether
140 140 # they are valid. The following two blocks redirect them to (temporary)
141 141 # pipes in certain important cases.
142 142
143 143 # If this process has been backgrounded, our stdin is invalid. Since there
144 144 # is no compelling reason for the kernel to inherit our stdin anyway, we'll
145 145 # place this one safe and always redirect.
146 146 redirect_in = True
147 147 _stdin = PIPE if stdin is None else stdin
148 148
149 149 # If this process in running on pythonw, we know that stdin, stdout, and
150 150 # stderr are all invalid.
151 151 redirect_out = sys.executable.endswith('pythonw.exe')
152 152 if redirect_out:
153 153 blackhole = open(os.devnull, 'w')
154 154 _stdout = blackhole if stdout is None else stdout
155 155 _stderr = blackhole if stderr is None else stderr
156 156 else:
157 157 _stdout, _stderr = stdout, stderr
158 158
159 159 env = env if (env is not None) else os.environ.copy()
160 160
161 161 encoding = getdefaultencoding(prefer_stream=False)
162 162 kwargs = dict(
163 163 stdin=_stdin,
164 164 stdout=_stdout,
165 165 stderr=_stderr,
166 166 cwd=cwd,
167 167 env=env,
168 168 )
169 169
170 170 # Spawn a kernel.
171 171 if sys.platform == 'win32':
172 172 # Popen on Python 2 on Windows cannot handle unicode args or cwd
173 173 cmd = [ cast_bytes_py2(c, encoding) for c in cmd ]
174 174 if cwd:
175 175 cwd = cast_bytes_py2(cwd, sys.getfilesystemencoding() or 'ascii')
176 176 kwargs['cwd'] = cwd
177 177
178 from IPython.kernel.zmq.parentpoller import ParentPollerWindows
178 from jupyter_client.parentpoller import ParentPollerWindows
179 179 # Create a Win32 event for interrupting the kernel
180 180 # and store it in an environment variable.
181 181 interrupt_event = ParentPollerWindows.create_interrupt_event()
182 182 env["JPY_INTERRUPT_EVENT"] = str(interrupt_event)
183 183 # deprecated old env name:
184 184 env["IPY_INTERRUPT_EVENT"] = env["JPY_INTERRUPT_EVENT"]
185 185
186 186 try:
187 187 from _winapi import DuplicateHandle, GetCurrentProcess, \
188 188 DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
189 189 except:
190 190 from _subprocess import DuplicateHandle, GetCurrentProcess, \
191 191 DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
192 192 # Launch the kernel process
193 193 if independent:
194 194 kwargs['creationflags'] = CREATE_NEW_PROCESS_GROUP
195 195 else:
196 196 pid = GetCurrentProcess()
197 197 handle = DuplicateHandle(pid, pid, pid, 0,
198 198 True, # Inheritable by new processes.
199 199 DUPLICATE_SAME_ACCESS)
200 200 env['JPY_PARENT_PID'] = str(int(handle))
201 201
202 202 proc = Popen(cmd, **kwargs)
203 203
204 204 # Attach the interrupt event to the Popen objet so it can be used later.
205 205 proc.win32_interrupt_event = interrupt_event
206 206
207 207 else:
208 208 if independent:
209 209 kwargs['preexec_fn'] = lambda: os.setsid()
210 210 else:
211 211 env['JPY_PARENT_PID'] = str(os.getpid())
212 212
213 213 proc = Popen(cmd, **kwargs)
214 214
215 215 # Clean up pipes created to work around Popen bug.
216 216 if redirect_in:
217 217 if stdin is None:
218 218 proc.stdin.close()
219 219
220 220 return proc
221 221
222 222 __all__ = [
223 223 'swallow_argv',
224 224 'make_ipkernel_cmd',
225 225 'launch_kernel',
226 226 ]
@@ -1,442 +1,442 b''
1 1 """Base class to manage a running kernel"""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 from __future__ import absolute_import
7 7
8 8 from contextlib import contextmanager
9 9 import os
10 10 import re
11 11 import signal
12 12 import sys
13 13 import time
14 14 import warnings
15 15 try:
16 16 from queue import Empty # Py 3
17 17 except ImportError:
18 18 from Queue import Empty # Py 2
19 19
20 20 import zmq
21 21
22 22 from IPython.utils.importstring import import_item
23 23 from IPython.utils.localinterfaces import is_local_ip, local_ips
24 24 from IPython.utils.path import get_ipython_dir
25 25 from IPython.utils.traitlets import (
26 26 Any, Instance, Unicode, List, Bool, Type, DottedObjectName
27 27 )
28 from IPython.kernel import (
28 from jupyter_client import (
29 29 launch_kernel,
30 30 kernelspec,
31 31 )
32 32 from .connect import ConnectionFileMixin
33 from .zmq.session import Session
33 from .session import Session
34 34 from .managerabc import (
35 35 KernelManagerABC
36 36 )
37 37
38 38
39 39 class KernelManager(ConnectionFileMixin):
40 40 """Manages a single kernel in a subprocess on this host.
41 41
42 42 This version starts kernels with Popen.
43 43 """
44 44
45 45 # The PyZMQ Context to use for communication with the kernel.
46 46 context = Instance(zmq.Context)
47 47 def _context_default(self):
48 48 return zmq.Context.instance()
49 49
50 50 # the class to create with our `client` method
51 client_class = DottedObjectName('IPython.kernel.blocking.BlockingKernelClient')
51 client_class = DottedObjectName('jupyter_client.blocking.BlockingKernelClient')
52 52 client_factory = Type(allow_none=True)
53 53 def _client_class_changed(self, name, old, new):
54 54 self.client_factory = import_item(str(new))
55 55
56 56 # The kernel process with which the KernelManager is communicating.
57 57 # generally a Popen instance
58 58 kernel = Any()
59 59
60 60 kernel_spec_manager = Instance(kernelspec.KernelSpecManager)
61 61
62 62 def _kernel_spec_manager_default(self):
63 63 return kernelspec.KernelSpecManager(ipython_dir=self.ipython_dir)
64 64
65 65 kernel_name = Unicode(kernelspec.NATIVE_KERNEL_NAME)
66 66
67 67 kernel_spec = Instance(kernelspec.KernelSpec)
68 68
69 69 def _kernel_spec_default(self):
70 70 return self.kernel_spec_manager.get_kernel_spec(self.kernel_name)
71 71
72 72 def _kernel_name_changed(self, name, old, new):
73 73 if new == 'python':
74 74 self.kernel_name = kernelspec.NATIVE_KERNEL_NAME
75 75 # This triggered another run of this function, so we can exit now
76 76 return
77 77 self.kernel_spec = self.kernel_spec_manager.get_kernel_spec(new)
78 78 self.ipython_kernel = new in {'python', 'python2', 'python3'}
79 79
80 80 kernel_cmd = List(Unicode, config=True,
81 81 help="""DEPRECATED: Use kernel_name instead.
82 82
83 83 The Popen Command to launch the kernel.
84 84 Override this if you have a custom kernel.
85 85 If kernel_cmd is specified in a configuration file,
86 86 IPython does not pass any arguments to the kernel,
87 87 because it cannot make any assumptions about the
88 88 arguments that the kernel understands. In particular,
89 89 this means that the kernel does not receive the
90 90 option --debug if it given on the IPython command line.
91 91 """
92 92 )
93 93
94 94 def _kernel_cmd_changed(self, name, old, new):
95 95 warnings.warn("Setting kernel_cmd is deprecated, use kernel_spec to "
96 96 "start different kernels.")
97 97 self.ipython_kernel = False
98 98
99 99 ipython_kernel = Bool(True)
100 100
101 101 ipython_dir = Unicode()
102 102 def _ipython_dir_default(self):
103 103 return get_ipython_dir()
104 104
105 105 # Protected traits
106 106 _launch_args = Any()
107 107 _control_socket = Any()
108 108
109 109 _restarter = Any()
110 110
111 111 autorestart = Bool(False, config=True,
112 112 help="""Should we autorestart the kernel if it dies."""
113 113 )
114 114
115 115 def __del__(self):
116 116 self._close_control_socket()
117 117 self.cleanup_connection_file()
118 118
119 119 #--------------------------------------------------------------------------
120 120 # Kernel restarter
121 121 #--------------------------------------------------------------------------
122 122
123 123 def start_restarter(self):
124 124 pass
125 125
126 126 def stop_restarter(self):
127 127 pass
128 128
129 129 def add_restart_callback(self, callback, event='restart'):
130 130 """register a callback to be called when a kernel is restarted"""
131 131 if self._restarter is None:
132 132 return
133 133 self._restarter.add_callback(callback, event)
134 134
135 135 def remove_restart_callback(self, callback, event='restart'):
136 136 """unregister a callback to be called when a kernel is restarted"""
137 137 if self._restarter is None:
138 138 return
139 139 self._restarter.remove_callback(callback, event)
140 140
141 141 #--------------------------------------------------------------------------
142 142 # create a Client connected to our Kernel
143 143 #--------------------------------------------------------------------------
144 144
145 145 def client(self, **kwargs):
146 146 """Create a client configured to connect to our kernel"""
147 147 if self.client_factory is None:
148 148 self.client_factory = import_item(self.client_class)
149 149
150 150 kw = {}
151 151 kw.update(self.get_connection_info())
152 152 kw.update(dict(
153 153 connection_file=self.connection_file,
154 154 session=self.session,
155 155 parent=self,
156 156 ))
157 157
158 158 # add kwargs last, for manual overrides
159 159 kw.update(kwargs)
160 160 return self.client_factory(**kw)
161 161
162 162 #--------------------------------------------------------------------------
163 163 # Kernel management
164 164 #--------------------------------------------------------------------------
165 165
166 166 def format_kernel_cmd(self, extra_arguments=None):
167 167 """replace templated args (e.g. {connection_file})"""
168 168 extra_arguments = extra_arguments or []
169 169 if self.kernel_cmd:
170 170 cmd = self.kernel_cmd + extra_arguments
171 171 else:
172 172 cmd = self.kernel_spec.argv + extra_arguments
173 173
174 174 ns = dict(connection_file=self.connection_file)
175 175 ns.update(self._launch_args)
176 176
177 177 pat = re.compile(r'\{([A-Za-z0-9_]+)\}')
178 178 def from_ns(match):
179 179 """Get the key out of ns if it's there, otherwise no change."""
180 180 return ns.get(match.group(1), match.group())
181 181
182 182 return [ pat.sub(from_ns, arg) for arg in cmd ]
183 183
184 184 def _launch_kernel(self, kernel_cmd, **kw):
185 185 """actually launch the kernel
186 186
187 187 override in a subclass to launch kernel subprocesses differently
188 188 """
189 189 return launch_kernel(kernel_cmd, **kw)
190 190
191 191 # Control socket used for polite kernel shutdown
192 192
193 193 def _connect_control_socket(self):
194 194 if self._control_socket is None:
195 195 self._control_socket = self.connect_control()
196 196 self._control_socket.linger = 100
197 197
198 198 def _close_control_socket(self):
199 199 if self._control_socket is None:
200 200 return
201 201 self._control_socket.close()
202 202 self._control_socket = None
203 203
204 204 def start_kernel(self, **kw):
205 205 """Starts a kernel on this host in a separate process.
206 206
207 207 If random ports (port=0) are being used, this method must be called
208 208 before the channels are created.
209 209
210 210 Parameters
211 211 ----------
212 212 **kw : optional
213 213 keyword arguments that are passed down to build the kernel_cmd
214 214 and launching the kernel (e.g. Popen kwargs).
215 215 """
216 216 if self.transport == 'tcp' and not is_local_ip(self.ip):
217 217 raise RuntimeError("Can only launch a kernel on a local interface. "
218 218 "Make sure that the '*_address' attributes are "
219 219 "configured properly. "
220 220 "Currently valid addresses are: %s" % local_ips()
221 221 )
222 222
223 223 # write connection file / get default ports
224 224 self.write_connection_file()
225 225
226 226 # save kwargs for use in restart
227 227 self._launch_args = kw.copy()
228 228 # build the Popen cmd
229 229 extra_arguments = kw.pop('extra_arguments', [])
230 230 kernel_cmd = self.format_kernel_cmd(extra_arguments=extra_arguments)
231 231 if self.kernel_cmd:
232 232 # If kernel_cmd has been set manually, don't refer to a kernel spec
233 233 env = os.environ
234 234 else:
235 235 # Environment variables from kernel spec are added to os.environ
236 236 env = os.environ.copy()
237 237 env.update(self.kernel_spec.env or {})
238 238 # launch the kernel subprocess
239 239 self.kernel = self._launch_kernel(kernel_cmd, env=env,
240 240 **kw)
241 241 self.start_restarter()
242 242 self._connect_control_socket()
243 243
244 244 def request_shutdown(self, restart=False):
245 245 """Send a shutdown request via control channel
246 246
247 247 On Windows, this just kills kernels instead, because the shutdown
248 248 messages don't work.
249 249 """
250 250 content = dict(restart=restart)
251 251 msg = self.session.msg("shutdown_request", content=content)
252 252 self.session.send(self._control_socket, msg)
253 253
254 254 def finish_shutdown(self, waittime=1, pollinterval=0.1):
255 255 """Wait for kernel shutdown, then kill process if it doesn't shutdown.
256 256
257 257 This does not send shutdown requests - use :meth:`request_shutdown`
258 258 first.
259 259 """
260 260 for i in range(int(waittime/pollinterval)):
261 261 if self.is_alive():
262 262 time.sleep(pollinterval)
263 263 else:
264 264 break
265 265 else:
266 266 # OK, we've waited long enough.
267 267 if self.has_kernel:
268 268 self._kill_kernel()
269 269
270 270 def cleanup(self, connection_file=True):
271 271 """Clean up resources when the kernel is shut down"""
272 272 if connection_file:
273 273 self.cleanup_connection_file()
274 274
275 275 self.cleanup_ipc_files()
276 276 self._close_control_socket()
277 277
278 278 def shutdown_kernel(self, now=False, restart=False):
279 279 """Attempts to the stop the kernel process cleanly.
280 280
281 281 This attempts to shutdown the kernels cleanly by:
282 282
283 283 1. Sending it a shutdown message over the shell channel.
284 284 2. If that fails, the kernel is shutdown forcibly by sending it
285 285 a signal.
286 286
287 287 Parameters
288 288 ----------
289 289 now : bool
290 290 Should the kernel be forcible killed *now*. This skips the
291 291 first, nice shutdown attempt.
292 292 restart: bool
293 293 Will this kernel be restarted after it is shutdown. When this
294 294 is True, connection files will not be cleaned up.
295 295 """
296 296 # Stop monitoring for restarting while we shutdown.
297 297 self.stop_restarter()
298 298
299 299 if now:
300 300 self._kill_kernel()
301 301 else:
302 302 self.request_shutdown(restart=restart)
303 303 # Don't send any additional kernel kill messages immediately, to give
304 304 # the kernel a chance to properly execute shutdown actions. Wait for at
305 305 # most 1s, checking every 0.1s.
306 306 self.finish_shutdown()
307 307
308 308 self.cleanup(connection_file=not restart)
309 309
310 310 def restart_kernel(self, now=False, **kw):
311 311 """Restarts a kernel with the arguments that were used to launch it.
312 312
313 313 If the old kernel was launched with random ports, the same ports will be
314 314 used for the new kernel. The same connection file is used again.
315 315
316 316 Parameters
317 317 ----------
318 318 now : bool, optional
319 319 If True, the kernel is forcefully restarted *immediately*, without
320 320 having a chance to do any cleanup action. Otherwise the kernel is
321 321 given 1s to clean up before a forceful restart is issued.
322 322
323 323 In all cases the kernel is restarted, the only difference is whether
324 324 it is given a chance to perform a clean shutdown or not.
325 325
326 326 **kw : optional
327 327 Any options specified here will overwrite those used to launch the
328 328 kernel.
329 329 """
330 330 if self._launch_args is None:
331 331 raise RuntimeError("Cannot restart the kernel. "
332 332 "No previous call to 'start_kernel'.")
333 333 else:
334 334 # Stop currently running kernel.
335 335 self.shutdown_kernel(now=now, restart=True)
336 336
337 337 # Start new kernel.
338 338 self._launch_args.update(kw)
339 339 self.start_kernel(**self._launch_args)
340 340
341 341 @property
342 342 def has_kernel(self):
343 343 """Has a kernel been started that we are managing."""
344 344 return self.kernel is not None
345 345
346 346 def _kill_kernel(self):
347 347 """Kill the running kernel.
348 348
349 349 This is a private method, callers should use shutdown_kernel(now=True).
350 350 """
351 351 if self.has_kernel:
352 352
353 353 # Signal the kernel to terminate (sends SIGKILL on Unix and calls
354 354 # TerminateProcess() on Win32).
355 355 try:
356 356 self.kernel.kill()
357 357 except OSError as e:
358 358 # In Windows, we will get an Access Denied error if the process
359 359 # has already terminated. Ignore it.
360 360 if sys.platform == 'win32':
361 361 if e.winerror != 5:
362 362 raise
363 363 # On Unix, we may get an ESRCH error if the process has already
364 364 # terminated. Ignore it.
365 365 else:
366 366 from errno import ESRCH
367 367 if e.errno != ESRCH:
368 368 raise
369 369
370 370 # Block until the kernel terminates.
371 371 self.kernel.wait()
372 372 self.kernel = None
373 373 else:
374 374 raise RuntimeError("Cannot kill kernel. No kernel is running!")
375 375
376 376 def interrupt_kernel(self):
377 377 """Interrupts the kernel by sending it a signal.
378 378
379 379 Unlike ``signal_kernel``, this operation is well supported on all
380 380 platforms.
381 381 """
382 382 if self.has_kernel:
383 383 if sys.platform == 'win32':
384 from .zmq.parentpoller import ParentPollerWindows as Poller
384 from .parentpoller import ParentPollerWindows as Poller
385 385 Poller.send_interrupt(self.kernel.win32_interrupt_event)
386 386 else:
387 387 self.kernel.send_signal(signal.SIGINT)
388 388 else:
389 389 raise RuntimeError("Cannot interrupt kernel. No kernel is running!")
390 390
391 391 def signal_kernel(self, signum):
392 392 """Sends a signal to the kernel.
393 393
394 394 Note that since only SIGTERM is supported on Windows, this function is
395 395 only useful on Unix systems.
396 396 """
397 397 if self.has_kernel:
398 398 self.kernel.send_signal(signum)
399 399 else:
400 400 raise RuntimeError("Cannot signal kernel. No kernel is running!")
401 401
402 402 def is_alive(self):
403 403 """Is the kernel process still running?"""
404 404 if self.has_kernel:
405 405 if self.kernel.poll() is None:
406 406 return True
407 407 else:
408 408 return False
409 409 else:
410 410 # we don't have a kernel
411 411 return False
412 412
413 413
414 414 KernelManagerABC.register(KernelManager)
415 415
416 416
417 417 def start_new_kernel(startup_timeout=60, kernel_name='python', **kwargs):
418 418 """Start a new kernel, and return its Manager and Client"""
419 419 km = KernelManager(kernel_name=kernel_name)
420 420 km.start_kernel(**kwargs)
421 421 kc = km.client()
422 422 kc.start_channels()
423 423 kc.wait_for_ready()
424 424
425 425 return km, kc
426 426
427 427 @contextmanager
428 428 def run_kernel(**kwargs):
429 429 """Context manager to create a kernel in a subprocess.
430 430
431 431 The kernel is shut down when the context exits.
432 432
433 433 Returns
434 434 -------
435 435 kernel_client: connected KernelClient instance
436 436 """
437 437 km, kc = start_new_kernel(**kwargs)
438 438 try:
439 439 yield kc
440 440 finally:
441 441 kc.stop_channels()
442 442 km.shutdown_kernel(now=True)
@@ -1,53 +1,53 b''
1 1 """Abstract base class for kernel managers."""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 import abc
7 7
8 8 from IPython.utils.py3compat import with_metaclass
9 9
10 10
11 11 class KernelManagerABC(with_metaclass(abc.ABCMeta, object)):
12 12 """KernelManager ABC.
13 13
14 14 The docstrings for this class can be found in the base implementation:
15 15
16 `IPython.kernel.kernelmanager.KernelManager`
16 `jupyter_client.kernelmanager.KernelManager`
17 17 """
18 18
19 19 @abc.abstractproperty
20 20 def kernel(self):
21 21 pass
22 22
23 23 #--------------------------------------------------------------------------
24 24 # Kernel management
25 25 #--------------------------------------------------------------------------
26 26
27 27 @abc.abstractmethod
28 28 def start_kernel(self, **kw):
29 29 pass
30 30
31 31 @abc.abstractmethod
32 32 def shutdown_kernel(self, now=False, restart=False):
33 33 pass
34 34
35 35 @abc.abstractmethod
36 36 def restart_kernel(self, now=False, **kw):
37 37 pass
38 38
39 39 @abc.abstractproperty
40 40 def has_kernel(self):
41 41 pass
42 42
43 43 @abc.abstractmethod
44 44 def interrupt_kernel(self):
45 45 pass
46 46
47 47 @abc.abstractmethod
48 48 def signal_kernel(self, signum):
49 49 pass
50 50
51 51 @abc.abstractmethod
52 52 def is_alive(self):
53 53 pass
@@ -1,319 +1,319 b''
1 1 """A kernel manager for multiple kernels"""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 from __future__ import absolute_import
7 7
8 8 import os
9 9 import uuid
10 10
11 11 import zmq
12 12
13 13 from IPython.config.configurable import LoggingConfigurable
14 14 from IPython.utils.importstring import import_item
15 15 from IPython.utils.traitlets import (
16 16 Instance, Dict, List, Unicode, Any, DottedObjectName
17 17 )
18 18 from IPython.utils.py3compat import unicode_type
19 19
20 20 from .kernelspec import NATIVE_KERNEL_NAME
21 21
22 22 class DuplicateKernelError(Exception):
23 23 pass
24 24
25 25
26 26 def kernel_method(f):
27 27 """decorator for proxying MKM.method(kernel_id) to individual KMs by ID"""
28 28 def wrapped(self, kernel_id, *args, **kwargs):
29 29 # get the kernel
30 30 km = self.get_kernel(kernel_id)
31 31 method = getattr(km, f.__name__)
32 32 # call the kernel's method
33 33 r = method(*args, **kwargs)
34 34 # last thing, call anything defined in the actual class method
35 35 # such as logging messages
36 36 f(self, kernel_id, *args, **kwargs)
37 37 # return the method result
38 38 return r
39 39 return wrapped
40 40
41 41
42 42 class MultiKernelManager(LoggingConfigurable):
43 43 """A class for managing multiple kernels."""
44 44
45 45 ipython_kernel_argv = List(Unicode)
46 46
47 47 default_kernel_name = Unicode(NATIVE_KERNEL_NAME, config=True,
48 48 help="The name of the default kernel to start"
49 49 )
50 50
51 51 kernel_manager_class = DottedObjectName(
52 "IPython.kernel.ioloop.IOLoopKernelManager", config=True,
52 "jupyter_client.ioloop.IOLoopKernelManager", config=True,
53 53 help="""The kernel manager class. This is configurable to allow
54 54 subclassing of the KernelManager for customized behavior.
55 55 """
56 56 )
57 57 def _kernel_manager_class_changed(self, name, old, new):
58 58 self.kernel_manager_factory = import_item(new)
59 59
60 60 kernel_manager_factory = Any(help="this is kernel_manager_class after import")
61 61 def _kernel_manager_factory_default(self):
62 62 return import_item(self.kernel_manager_class)
63 63
64 64 context = Instance('zmq.Context')
65 65 def _context_default(self):
66 66 return zmq.Context.instance()
67 67
68 68 connection_dir = Unicode('')
69 69
70 70 _kernels = Dict()
71 71
72 72 def list_kernel_ids(self):
73 73 """Return a list of the kernel ids of the active kernels."""
74 74 # Create a copy so we can iterate over kernels in operations
75 75 # that delete keys.
76 76 return list(self._kernels.keys())
77 77
78 78 def __len__(self):
79 79 """Return the number of running kernels."""
80 80 return len(self.list_kernel_ids())
81 81
82 82 def __contains__(self, kernel_id):
83 83 return kernel_id in self._kernels
84 84
85 85 def start_kernel(self, kernel_name=None, **kwargs):
86 86 """Start a new kernel.
87 87
88 88 The caller can pick a kernel_id by passing one in as a keyword arg,
89 89 otherwise one will be picked using a uuid.
90 90
91 91 To silence the kernel's stdout/stderr, call this using::
92 92
93 93 km.start_kernel(stdout=PIPE, stderr=PIPE)
94 94
95 95 """
96 96 kernel_id = kwargs.pop('kernel_id', unicode_type(uuid.uuid4()))
97 97 if kernel_id in self:
98 98 raise DuplicateKernelError('Kernel already exists: %s' % kernel_id)
99 99
100 100 if kernel_name is None:
101 101 kernel_name = self.default_kernel_name
102 102 # kernel_manager_factory is the constructor for the KernelManager
103 103 # subclass we are using. It can be configured as any Configurable,
104 104 # including things like its transport and ip.
105 105 km = self.kernel_manager_factory(connection_file=os.path.join(
106 106 self.connection_dir, "kernel-%s.json" % kernel_id),
107 107 parent=self, autorestart=True, log=self.log, kernel_name=kernel_name,
108 108 )
109 109 # FIXME: remove special treatment of IPython kernels
110 110 if km.ipython_kernel:
111 111 kwargs.setdefault('extra_arguments', self.ipython_kernel_argv)
112 112 km.start_kernel(**kwargs)
113 113 self._kernels[kernel_id] = km
114 114 return kernel_id
115 115
116 116 @kernel_method
117 117 def shutdown_kernel(self, kernel_id, now=False, restart=False):
118 118 """Shutdown a kernel by its kernel uuid.
119 119
120 120 Parameters
121 121 ==========
122 122 kernel_id : uuid
123 123 The id of the kernel to shutdown.
124 124 now : bool
125 125 Should the kernel be shutdown forcibly using a signal.
126 126 restart : bool
127 127 Will the kernel be restarted?
128 128 """
129 129 self.log.info("Kernel shutdown: %s" % kernel_id)
130 130 self.remove_kernel(kernel_id)
131 131
132 132 @kernel_method
133 133 def request_shutdown(self, kernel_id, restart=False):
134 134 """Ask a kernel to shut down by its kernel uuid"""
135 135
136 136 @kernel_method
137 137 def finish_shutdown(self, kernel_id, waittime=1, pollinterval=0.1):
138 138 """Wait for a kernel to finish shutting down, and kill it if it doesn't
139 139 """
140 140 self.log.info("Kernel shutdown: %s" % kernel_id)
141 141
142 142 @kernel_method
143 143 def cleanup(self, kernel_id, connection_file=True):
144 144 """Clean up a kernel's resources"""
145 145
146 146 def remove_kernel(self, kernel_id):
147 147 """remove a kernel from our mapping.
148 148
149 149 Mainly so that a kernel can be removed if it is already dead,
150 150 without having to call shutdown_kernel.
151 151
152 152 The kernel object is returned.
153 153 """
154 154 return self._kernels.pop(kernel_id)
155 155
156 156 def shutdown_all(self, now=False):
157 157 """Shutdown all kernels."""
158 158 kids = self.list_kernel_ids()
159 159 for kid in kids:
160 160 self.request_shutdown(kid)
161 161 for kid in kids:
162 162 self.finish_shutdown(kid)
163 163 self.cleanup(kid)
164 164 self.remove_kernel(kid)
165 165
166 166 @kernel_method
167 167 def interrupt_kernel(self, kernel_id):
168 168 """Interrupt (SIGINT) the kernel by its uuid.
169 169
170 170 Parameters
171 171 ==========
172 172 kernel_id : uuid
173 173 The id of the kernel to interrupt.
174 174 """
175 175 self.log.info("Kernel interrupted: %s" % kernel_id)
176 176
177 177 @kernel_method
178 178 def signal_kernel(self, kernel_id, signum):
179 179 """Sends a signal to the kernel by its uuid.
180 180
181 181 Note that since only SIGTERM is supported on Windows, this function
182 182 is only useful on Unix systems.
183 183
184 184 Parameters
185 185 ==========
186 186 kernel_id : uuid
187 187 The id of the kernel to signal.
188 188 """
189 189 self.log.info("Signaled Kernel %s with %s" % (kernel_id, signum))
190 190
191 191 @kernel_method
192 192 def restart_kernel(self, kernel_id, now=False):
193 193 """Restart a kernel by its uuid, keeping the same ports.
194 194
195 195 Parameters
196 196 ==========
197 197 kernel_id : uuid
198 198 The id of the kernel to interrupt.
199 199 """
200 200 self.log.info("Kernel restarted: %s" % kernel_id)
201 201
202 202 @kernel_method
203 203 def is_alive(self, kernel_id):
204 204 """Is the kernel alive.
205 205
206 206 This calls KernelManager.is_alive() which calls Popen.poll on the
207 207 actual kernel subprocess.
208 208
209 209 Parameters
210 210 ==========
211 211 kernel_id : uuid
212 212 The id of the kernel.
213 213 """
214 214
215 215 def _check_kernel_id(self, kernel_id):
216 216 """check that a kernel id is valid"""
217 217 if kernel_id not in self:
218 218 raise KeyError("Kernel with id not found: %s" % kernel_id)
219 219
220 220 def get_kernel(self, kernel_id):
221 221 """Get the single KernelManager object for a kernel by its uuid.
222 222
223 223 Parameters
224 224 ==========
225 225 kernel_id : uuid
226 226 The id of the kernel.
227 227 """
228 228 self._check_kernel_id(kernel_id)
229 229 return self._kernels[kernel_id]
230 230
231 231 @kernel_method
232 232 def add_restart_callback(self, kernel_id, callback, event='restart'):
233 233 """add a callback for the KernelRestarter"""
234 234
235 235 @kernel_method
236 236 def remove_restart_callback(self, kernel_id, callback, event='restart'):
237 237 """remove a callback for the KernelRestarter"""
238 238
239 239 @kernel_method
240 240 def get_connection_info(self, kernel_id):
241 241 """Return a dictionary of connection data for a kernel.
242 242
243 243 Parameters
244 244 ==========
245 245 kernel_id : uuid
246 246 The id of the kernel.
247 247
248 248 Returns
249 249 =======
250 250 connection_dict : dict
251 251 A dict of the information needed to connect to a kernel.
252 252 This includes the ip address and the integer port
253 253 numbers of the different channels (stdin_port, iopub_port,
254 254 shell_port, hb_port).
255 255 """
256 256
257 257 @kernel_method
258 258 def connect_iopub(self, kernel_id, identity=None):
259 259 """Return a zmq Socket connected to the iopub channel.
260 260
261 261 Parameters
262 262 ==========
263 263 kernel_id : uuid
264 264 The id of the kernel
265 265 identity : bytes (optional)
266 266 The zmq identity of the socket
267 267
268 268 Returns
269 269 =======
270 270 stream : zmq Socket or ZMQStream
271 271 """
272 272
273 273 @kernel_method
274 274 def connect_shell(self, kernel_id, identity=None):
275 275 """Return a zmq Socket connected to the shell channel.
276 276
277 277 Parameters
278 278 ==========
279 279 kernel_id : uuid
280 280 The id of the kernel
281 281 identity : bytes (optional)
282 282 The zmq identity of the socket
283 283
284 284 Returns
285 285 =======
286 286 stream : zmq Socket or ZMQStream
287 287 """
288 288
289 289 @kernel_method
290 290 def connect_stdin(self, kernel_id, identity=None):
291 291 """Return a zmq Socket connected to the stdin channel.
292 292
293 293 Parameters
294 294 ==========
295 295 kernel_id : uuid
296 296 The id of the kernel
297 297 identity : bytes (optional)
298 298 The zmq identity of the socket
299 299
300 300 Returns
301 301 =======
302 302 stream : zmq Socket or ZMQStream
303 303 """
304 304
305 305 @kernel_method
306 306 def connect_hb(self, kernel_id, identity=None):
307 307 """Return a zmq Socket connected to the hb channel.
308 308
309 309 Parameters
310 310 ==========
311 311 kernel_id : uuid
312 312 The id of the kernel
313 313 identity : bytes (optional)
314 314 The zmq identity of the socket
315 315
316 316 Returns
317 317 =======
318 318 stream : zmq Socket or ZMQStream
319 319 """
@@ -1,111 +1,111 b''
1 1 """A basic kernel monitor with autorestarting.
2 2
3 3 This watches a kernel's state using KernelManager.is_alive and auto
4 4 restarts the kernel if it dies.
5 5
6 6 It is an incomplete base class, and must be subclassed.
7 7 """
8 8
9 9 # Copyright (c) IPython Development Team.
10 10 # Distributed under the terms of the Modified BSD License.
11 11
12 12 from IPython.config.configurable import LoggingConfigurable
13 13 from IPython.utils.traitlets import (
14 14 Instance, Float, Dict, Bool, Integer,
15 15 )
16 16
17 17
18 18 class KernelRestarter(LoggingConfigurable):
19 19 """Monitor and autorestart a kernel."""
20 20
21 kernel_manager = Instance('IPython.kernel.KernelManager')
21 kernel_manager = Instance('jupyter_client.KernelManager')
22 22
23 23 debug = Bool(False, config=True,
24 24 help="""Whether to include every poll event in debugging output.
25 25
26 26 Has to be set explicitly, because there will be *a lot* of output.
27 27 """
28 28 )
29 29
30 30 time_to_dead = Float(3.0, config=True,
31 31 help="""Kernel heartbeat interval in seconds."""
32 32 )
33 33
34 34 restart_limit = Integer(5, config=True,
35 35 help="""The number of consecutive autorestarts before the kernel is presumed dead."""
36 36 )
37 37 _restarting = Bool(False)
38 38 _restart_count = Integer(0)
39 39
40 40 callbacks = Dict()
41 41 def _callbacks_default(self):
42 42 return dict(restart=[], dead=[])
43 43
44 44 def start(self):
45 45 """Start the polling of the kernel."""
46 46 raise NotImplementedError("Must be implemented in a subclass")
47 47
48 48 def stop(self):
49 49 """Stop the kernel polling."""
50 50 raise NotImplementedError("Must be implemented in a subclass")
51 51
52 52 def add_callback(self, f, event='restart'):
53 53 """register a callback to fire on a particular event
54 54
55 55 Possible values for event:
56 56
57 57 'restart' (default): kernel has died, and will be restarted.
58 58 'dead': restart has failed, kernel will be left dead.
59 59
60 60 """
61 61 self.callbacks[event].append(f)
62 62
63 63 def remove_callback(self, f, event='restart'):
64 64 """unregister a callback to fire on a particular event
65 65
66 66 Possible values for event:
67 67
68 68 'restart' (default): kernel has died, and will be restarted.
69 69 'dead': restart has failed, kernel will be left dead.
70 70
71 71 """
72 72 try:
73 73 self.callbacks[event].remove(f)
74 74 except ValueError:
75 75 pass
76 76
77 77 def _fire_callbacks(self, event):
78 78 """fire our callbacks for a particular event"""
79 79 for callback in self.callbacks[event]:
80 80 try:
81 81 callback()
82 82 except Exception as e:
83 83 self.log.error("KernelRestarter: %s callback %r failed", event, callback, exc_info=True)
84 84
85 85 def poll(self):
86 86 if self.debug:
87 87 self.log.debug('Polling kernel...')
88 88 if not self.kernel_manager.is_alive():
89 89 if self._restarting:
90 90 self._restart_count += 1
91 91 else:
92 92 self._restart_count = 1
93 93
94 94 if self._restart_count >= self.restart_limit:
95 95 self.log.warn("KernelRestarter: restart failed")
96 96 self._fire_callbacks('dead')
97 97 self._restarting = False
98 98 self._restart_count = 0
99 99 self.stop()
100 100 else:
101 101 self.log.info('KernelRestarter: restarting kernel (%i/%i)',
102 102 self._restart_count,
103 103 self.restart_limit
104 104 )
105 105 self._fire_callbacks('restart')
106 106 self.kernel_manager.restart_kernel(now=True)
107 107 self._restarting = True
108 108 else:
109 109 if self._restarting:
110 110 self.log.debug("KernelRestarter: restart apparently succeeded")
111 111 self._restarting = False
@@ -1,881 +1,881 b''
1 1 """Session object for building, serializing, sending, and receiving messages in
2 2 IPython. The Session object supports serialization, HMAC signatures, and
3 3 metadata on messages.
4 4
5 5 Also defined here are utilities for working with Sessions:
6 6 * A SessionFactory to be used as a base class for configurables that work with
7 7 Sessions.
8 8 * A Message object for convenience that allows attribute-access to the msg dict.
9 9 """
10 10
11 11 # Copyright (c) IPython Development Team.
12 12 # Distributed under the terms of the Modified BSD License.
13 13
14 14 import hashlib
15 15 import hmac
16 16 import logging
17 17 import os
18 18 import pprint
19 19 import random
20 20 import uuid
21 21 import warnings
22 22 from datetime import datetime
23 23
24 24 try:
25 25 import cPickle
26 26 pickle = cPickle
27 27 except:
28 28 cPickle = None
29 29 import pickle
30 30
31 31 try:
32 32 # We are using compare_digest to limit the surface of timing attacks
33 33 from hmac import compare_digest
34 34 except ImportError:
35 35 # Python < 2.7.7: When digests don't match no feedback is provided,
36 36 # limiting the surface of attack
37 37 def compare_digest(a,b): return a == b
38 38
39 39 import zmq
40 40 from zmq.utils import jsonapi
41 41 from zmq.eventloop.ioloop import IOLoop
42 42 from zmq.eventloop.zmqstream import ZMQStream
43 43
44 44 from IPython.core.release import kernel_protocol_version
45 45 from IPython.config.configurable import Configurable, LoggingConfigurable
46 46 from IPython.utils import io
47 47 from IPython.utils.importstring import import_item
48 48 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
49 49 from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type,
50 50 iteritems)
51 51 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
52 52 DottedObjectName, CUnicode, Dict, Integer,
53 53 TraitError,
54 54 )
55 55 from IPython.utils.pickleutil import PICKLE_PROTOCOL
56 from IPython.kernel.adapter import adapt
57 56 from IPython.kernel.zmq.serialize import MAX_ITEMS, MAX_BYTES
57 from jupyter_client.adapter import adapt
58 58
59 59 #-----------------------------------------------------------------------------
60 60 # utility functions
61 61 #-----------------------------------------------------------------------------
62 62
63 63 def squash_unicode(obj):
64 64 """coerce unicode back to bytestrings."""
65 65 if isinstance(obj,dict):
66 66 for key in obj.keys():
67 67 obj[key] = squash_unicode(obj[key])
68 68 if isinstance(key, unicode_type):
69 69 obj[squash_unicode(key)] = obj.pop(key)
70 70 elif isinstance(obj, list):
71 71 for i,v in enumerate(obj):
72 72 obj[i] = squash_unicode(v)
73 73 elif isinstance(obj, unicode_type):
74 74 obj = obj.encode('utf8')
75 75 return obj
76 76
77 77 #-----------------------------------------------------------------------------
78 78 # globals and defaults
79 79 #-----------------------------------------------------------------------------
80 80
81 81 # ISO8601-ify datetime objects
82 82 # allow unicode
83 83 # disallow nan, because it's not actually valid JSON
84 84 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default,
85 85 ensure_ascii=False, allow_nan=False,
86 86 )
87 87 json_unpacker = lambda s: jsonapi.loads(s)
88 88
89 89 pickle_packer = lambda o: pickle.dumps(squash_dates(o), PICKLE_PROTOCOL)
90 90 pickle_unpacker = pickle.loads
91 91
92 92 default_packer = json_packer
93 93 default_unpacker = json_unpacker
94 94
95 95 DELIM = b"<IDS|MSG>"
96 96 # singleton dummy tracker, which will always report as done
97 97 DONE = zmq.MessageTracker()
98 98
99 99 #-----------------------------------------------------------------------------
100 100 # Mixin tools for apps that use Sessions
101 101 #-----------------------------------------------------------------------------
102 102
103 103 session_aliases = dict(
104 104 ident = 'Session.session',
105 105 user = 'Session.username',
106 106 keyfile = 'Session.keyfile',
107 107 )
108 108
109 109 session_flags = {
110 110 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
111 111 'keyfile' : '' }},
112 112 """Use HMAC digests for authentication of messages.
113 113 Setting this flag will generate a new UUID to use as the HMAC key.
114 114 """),
115 115 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
116 116 """Don't authenticate messages."""),
117 117 }
118 118
119 119 def default_secure(cfg):
120 120 """Set the default behavior for a config environment to be secure.
121 121
122 122 If Session.key/keyfile have not been set, set Session.key to
123 123 a new random UUID.
124 124 """
125 125 warnings.warn("default_secure is deprecated", DeprecationWarning)
126 126 if 'Session' in cfg:
127 127 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
128 128 return
129 129 # key/keyfile not specified, generate new UUID:
130 130 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
131 131
132 132
133 133 #-----------------------------------------------------------------------------
134 134 # Classes
135 135 #-----------------------------------------------------------------------------
136 136
137 137 class SessionFactory(LoggingConfigurable):
138 138 """The Base class for configurables that have a Session, Context, logger,
139 139 and IOLoop.
140 140 """
141 141
142 142 logname = Unicode('')
143 143 def _logname_changed(self, name, old, new):
144 144 self.log = logging.getLogger(new)
145 145
146 146 # not configurable:
147 147 context = Instance('zmq.Context')
148 148 def _context_default(self):
149 149 return zmq.Context.instance()
150 150
151 session = Instance('IPython.kernel.zmq.session.Session',
151 session = Instance('jupyter_client.session.Session',
152 152 allow_none=True)
153 153
154 154 loop = Instance('zmq.eventloop.ioloop.IOLoop')
155 155 def _loop_default(self):
156 156 return IOLoop.instance()
157 157
158 158 def __init__(self, **kwargs):
159 159 super(SessionFactory, self).__init__(**kwargs)
160 160
161 161 if self.session is None:
162 162 # construct the session
163 163 self.session = Session(**kwargs)
164 164
165 165
166 166 class Message(object):
167 167 """A simple message object that maps dict keys to attributes.
168 168
169 169 A Message can be created from a dict and a dict from a Message instance
170 170 simply by calling dict(msg_obj)."""
171 171
172 172 def __init__(self, msg_dict):
173 173 dct = self.__dict__
174 174 for k, v in iteritems(dict(msg_dict)):
175 175 if isinstance(v, dict):
176 176 v = Message(v)
177 177 dct[k] = v
178 178
179 179 # Having this iterator lets dict(msg_obj) work out of the box.
180 180 def __iter__(self):
181 181 return iter(iteritems(self.__dict__))
182 182
183 183 def __repr__(self):
184 184 return repr(self.__dict__)
185 185
186 186 def __str__(self):
187 187 return pprint.pformat(self.__dict__)
188 188
189 189 def __contains__(self, k):
190 190 return k in self.__dict__
191 191
192 192 def __getitem__(self, k):
193 193 return self.__dict__[k]
194 194
195 195
196 196 def msg_header(msg_id, msg_type, username, session):
197 197 date = datetime.now()
198 198 version = kernel_protocol_version
199 199 return locals()
200 200
201 201 def extract_header(msg_or_header):
202 202 """Given a message or header, return the header."""
203 203 if not msg_or_header:
204 204 return {}
205 205 try:
206 206 # See if msg_or_header is the entire message.
207 207 h = msg_or_header['header']
208 208 except KeyError:
209 209 try:
210 210 # See if msg_or_header is just the header
211 211 h = msg_or_header['msg_id']
212 212 except KeyError:
213 213 raise
214 214 else:
215 215 h = msg_or_header
216 216 if not isinstance(h, dict):
217 217 h = dict(h)
218 218 return h
219 219
220 220 class Session(Configurable):
221 221 """Object for handling serialization and sending of messages.
222 222
223 223 The Session object handles building messages and sending them
224 224 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
225 225 other over the network via Session objects, and only need to work with the
226 226 dict-based IPython message spec. The Session will handle
227 227 serialization/deserialization, security, and metadata.
228 228
229 229 Sessions support configurable serialization via packer/unpacker traits,
230 230 and signing with HMAC digests via the key/keyfile traits.
231 231
232 232 Parameters
233 233 ----------
234 234
235 235 debug : bool
236 236 whether to trigger extra debugging statements
237 237 packer/unpacker : str : 'json', 'pickle' or import_string
238 238 importstrings for methods to serialize message parts. If just
239 239 'json' or 'pickle', predefined JSON and pickle packers will be used.
240 240 Otherwise, the entire importstring must be used.
241 241
242 242 The functions must accept at least valid JSON input, and output *bytes*.
243 243
244 244 For example, to use msgpack:
245 245 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
246 246 pack/unpack : callables
247 247 You can also set the pack/unpack callables for serialization directly.
248 248 session : bytes
249 249 the ID of this Session object. The default is to generate a new UUID.
250 250 username : unicode
251 251 username added to message headers. The default is to ask the OS.
252 252 key : bytes
253 253 The key used to initialize an HMAC signature. If unset, messages
254 254 will not be signed or checked.
255 255 keyfile : filepath
256 256 The file containing a key. If this is set, `key` will be initialized
257 257 to the contents of the file.
258 258
259 259 """
260 260
261 261 debug=Bool(False, config=True, help="""Debug output in the Session""")
262 262
263 263 packer = DottedObjectName('json',config=True,
264 264 help="""The name of the packer for serializing messages.
265 265 Should be one of 'json', 'pickle', or an import name
266 266 for a custom callable serializer.""")
267 267 def _packer_changed(self, name, old, new):
268 268 if new.lower() == 'json':
269 269 self.pack = json_packer
270 270 self.unpack = json_unpacker
271 271 self.unpacker = new
272 272 elif new.lower() == 'pickle':
273 273 self.pack = pickle_packer
274 274 self.unpack = pickle_unpacker
275 275 self.unpacker = new
276 276 else:
277 277 self.pack = import_item(str(new))
278 278
279 279 unpacker = DottedObjectName('json', config=True,
280 280 help="""The name of the unpacker for unserializing messages.
281 281 Only used with custom functions for `packer`.""")
282 282 def _unpacker_changed(self, name, old, new):
283 283 if new.lower() == 'json':
284 284 self.pack = json_packer
285 285 self.unpack = json_unpacker
286 286 self.packer = new
287 287 elif new.lower() == 'pickle':
288 288 self.pack = pickle_packer
289 289 self.unpack = pickle_unpacker
290 290 self.packer = new
291 291 else:
292 292 self.unpack = import_item(str(new))
293 293
294 294 session = CUnicode(u'', config=True,
295 295 help="""The UUID identifying this session.""")
296 296 def _session_default(self):
297 297 u = unicode_type(uuid.uuid4())
298 298 self.bsession = u.encode('ascii')
299 299 return u
300 300
301 301 def _session_changed(self, name, old, new):
302 302 self.bsession = self.session.encode('ascii')
303 303
304 304 # bsession is the session as bytes
305 305 bsession = CBytes(b'')
306 306
307 307 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
308 308 help="""Username for the Session. Default is your system username.""",
309 309 config=True)
310 310
311 311 metadata = Dict({}, config=True,
312 312 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
313 313
314 314 # if 0, no adapting to do.
315 315 adapt_version = Integer(0)
316 316
317 317 # message signature related traits:
318 318
319 319 key = CBytes(config=True,
320 320 help="""execution key, for signing messages.""")
321 321 def _key_default(self):
322 322 return str_to_bytes(str(uuid.uuid4()))
323 323
324 324 def _key_changed(self):
325 325 self._new_auth()
326 326
327 327 signature_scheme = Unicode('hmac-sha256', config=True,
328 328 help="""The digest scheme used to construct the message signatures.
329 329 Must have the form 'hmac-HASH'.""")
330 330 def _signature_scheme_changed(self, name, old, new):
331 331 if not new.startswith('hmac-'):
332 332 raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)
333 333 hash_name = new.split('-', 1)[1]
334 334 try:
335 335 self.digest_mod = getattr(hashlib, hash_name)
336 336 except AttributeError:
337 337 raise TraitError("hashlib has no such attribute: %s" % hash_name)
338 338 self._new_auth()
339 339
340 340 digest_mod = Any()
341 341 def _digest_mod_default(self):
342 342 return hashlib.sha256
343 343
344 344 auth = Instance(hmac.HMAC, allow_none=True)
345 345
346 346 def _new_auth(self):
347 347 if self.key:
348 348 self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod)
349 349 else:
350 350 self.auth = None
351 351
352 352 digest_history = Set()
353 353 digest_history_size = Integer(2**16, config=True,
354 354 help="""The maximum number of digests to remember.
355 355
356 356 The digest history will be culled when it exceeds this value.
357 357 """
358 358 )
359 359
360 360 keyfile = Unicode('', config=True,
361 361 help="""path to file containing execution key.""")
362 362 def _keyfile_changed(self, name, old, new):
363 363 with open(new, 'rb') as f:
364 364 self.key = f.read().strip()
365 365
366 366 # for protecting against sends from forks
367 367 pid = Integer()
368 368
369 369 # serialization traits:
370 370
371 371 pack = Any(default_packer) # the actual packer function
372 372 def _pack_changed(self, name, old, new):
373 373 if not callable(new):
374 374 raise TypeError("packer must be callable, not %s"%type(new))
375 375
376 376 unpack = Any(default_unpacker) # the actual packer function
377 377 def _unpack_changed(self, name, old, new):
378 378 # unpacker is not checked - it is assumed to be
379 379 if not callable(new):
380 380 raise TypeError("unpacker must be callable, not %s"%type(new))
381 381
382 382 # thresholds:
383 383 copy_threshold = Integer(2**16, config=True,
384 384 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
385 385 buffer_threshold = Integer(MAX_BYTES, config=True,
386 386 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
387 387 item_threshold = Integer(MAX_ITEMS, config=True,
388 388 help="""The maximum number of items for a container to be introspected for custom serialization.
389 389 Containers larger than this are pickled outright.
390 390 """
391 391 )
392 392
393 393
394 394 def __init__(self, **kwargs):
395 395 """create a Session object
396 396
397 397 Parameters
398 398 ----------
399 399
400 400 debug : bool
401 401 whether to trigger extra debugging statements
402 402 packer/unpacker : str : 'json', 'pickle' or import_string
403 403 importstrings for methods to serialize message parts. If just
404 404 'json' or 'pickle', predefined JSON and pickle packers will be used.
405 405 Otherwise, the entire importstring must be used.
406 406
407 407 The functions must accept at least valid JSON input, and output
408 408 *bytes*.
409 409
410 410 For example, to use msgpack:
411 411 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
412 412 pack/unpack : callables
413 413 You can also set the pack/unpack callables for serialization
414 414 directly.
415 415 session : unicode (must be ascii)
416 416 the ID of this Session object. The default is to generate a new
417 417 UUID.
418 418 bsession : bytes
419 419 The session as bytes
420 420 username : unicode
421 421 username added to message headers. The default is to ask the OS.
422 422 key : bytes
423 423 The key used to initialize an HMAC signature. If unset, messages
424 424 will not be signed or checked.
425 425 signature_scheme : str
426 426 The message digest scheme. Currently must be of the form 'hmac-HASH',
427 427 where 'HASH' is a hashing function available in Python's hashlib.
428 428 The default is 'hmac-sha256'.
429 429 This is ignored if 'key' is empty.
430 430 keyfile : filepath
431 431 The file containing a key. If this is set, `key` will be
432 432 initialized to the contents of the file.
433 433 """
434 434 super(Session, self).__init__(**kwargs)
435 435 self._check_packers()
436 436 self.none = self.pack({})
437 437 # ensure self._session_default() if necessary, so bsession is defined:
438 438 self.session
439 439 self.pid = os.getpid()
440 440 self._new_auth()
441 441
442 442 @property
443 443 def msg_id(self):
444 444 """always return new uuid"""
445 445 return str(uuid.uuid4())
446 446
447 447 def _check_packers(self):
448 448 """check packers for datetime support."""
449 449 pack = self.pack
450 450 unpack = self.unpack
451 451
452 452 # check simple serialization
453 453 msg = dict(a=[1,'hi'])
454 454 try:
455 455 packed = pack(msg)
456 456 except Exception as e:
457 457 msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}"
458 458 if self.packer == 'json':
459 459 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
460 460 else:
461 461 jsonmsg = ""
462 462 raise ValueError(
463 463 msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg)
464 464 )
465 465
466 466 # ensure packed message is bytes
467 467 if not isinstance(packed, bytes):
468 468 raise ValueError("message packed to %r, but bytes are required"%type(packed))
469 469
470 470 # check that unpack is pack's inverse
471 471 try:
472 472 unpacked = unpack(packed)
473 473 assert unpacked == msg
474 474 except Exception as e:
475 475 msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}"
476 476 if self.packer == 'json':
477 477 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
478 478 else:
479 479 jsonmsg = ""
480 480 raise ValueError(
481 481 msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg)
482 482 )
483 483
484 484 # check datetime support
485 485 msg = dict(t=datetime.now())
486 486 try:
487 487 unpacked = unpack(pack(msg))
488 488 if isinstance(unpacked['t'], datetime):
489 489 raise ValueError("Shouldn't deserialize to datetime")
490 490 except Exception:
491 491 self.pack = lambda o: pack(squash_dates(o))
492 492 self.unpack = lambda s: unpack(s)
493 493
494 494 def msg_header(self, msg_type):
495 495 return msg_header(self.msg_id, msg_type, self.username, self.session)
496 496
497 497 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
498 498 """Return the nested message dict.
499 499
500 500 This format is different from what is sent over the wire. The
501 501 serialize/deserialize methods converts this nested message dict to the wire
502 502 format, which is a list of message parts.
503 503 """
504 504 msg = {}
505 505 header = self.msg_header(msg_type) if header is None else header
506 506 msg['header'] = header
507 507 msg['msg_id'] = header['msg_id']
508 508 msg['msg_type'] = header['msg_type']
509 509 msg['parent_header'] = {} if parent is None else extract_header(parent)
510 510 msg['content'] = {} if content is None else content
511 511 msg['metadata'] = self.metadata.copy()
512 512 if metadata is not None:
513 513 msg['metadata'].update(metadata)
514 514 return msg
515 515
516 516 def sign(self, msg_list):
517 517 """Sign a message with HMAC digest. If no auth, return b''.
518 518
519 519 Parameters
520 520 ----------
521 521 msg_list : list
522 522 The [p_header,p_parent,p_content] part of the message list.
523 523 """
524 524 if self.auth is None:
525 525 return b''
526 526 h = self.auth.copy()
527 527 for m in msg_list:
528 528 h.update(m)
529 529 return str_to_bytes(h.hexdigest())
530 530
531 531 def serialize(self, msg, ident=None):
532 532 """Serialize the message components to bytes.
533 533
534 534 This is roughly the inverse of deserialize. The serialize/deserialize
535 535 methods work with full message lists, whereas pack/unpack work with
536 536 the individual message parts in the message list.
537 537
538 538 Parameters
539 539 ----------
540 540 msg : dict or Message
541 541 The next message dict as returned by the self.msg method.
542 542
543 543 Returns
544 544 -------
545 545 msg_list : list
546 546 The list of bytes objects to be sent with the format::
547 547
548 548 [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent,
549 549 p_metadata, p_content, buffer1, buffer2, ...]
550 550
551 551 In this list, the ``p_*`` entities are the packed or serialized
552 552 versions, so if JSON is used, these are utf8 encoded JSON strings.
553 553 """
554 554 content = msg.get('content', {})
555 555 if content is None:
556 556 content = self.none
557 557 elif isinstance(content, dict):
558 558 content = self.pack(content)
559 559 elif isinstance(content, bytes):
560 560 # content is already packed, as in a relayed message
561 561 pass
562 562 elif isinstance(content, unicode_type):
563 563 # should be bytes, but JSON often spits out unicode
564 564 content = content.encode('utf8')
565 565 else:
566 566 raise TypeError("Content incorrect type: %s"%type(content))
567 567
568 568 real_message = [self.pack(msg['header']),
569 569 self.pack(msg['parent_header']),
570 570 self.pack(msg['metadata']),
571 571 content,
572 572 ]
573 573
574 574 to_send = []
575 575
576 576 if isinstance(ident, list):
577 577 # accept list of idents
578 578 to_send.extend(ident)
579 579 elif ident is not None:
580 580 to_send.append(ident)
581 581 to_send.append(DELIM)
582 582
583 583 signature = self.sign(real_message)
584 584 to_send.append(signature)
585 585
586 586 to_send.extend(real_message)
587 587
588 588 return to_send
589 589
590 590 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
591 591 buffers=None, track=False, header=None, metadata=None):
592 592 """Build and send a message via stream or socket.
593 593
594 594 The message format used by this function internally is as follows:
595 595
596 596 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
597 597 buffer1,buffer2,...]
598 598
599 599 The serialize/deserialize methods convert the nested message dict into this
600 600 format.
601 601
602 602 Parameters
603 603 ----------
604 604
605 605 stream : zmq.Socket or ZMQStream
606 606 The socket-like object used to send the data.
607 607 msg_or_type : str or Message/dict
608 608 Normally, msg_or_type will be a msg_type unless a message is being
609 609 sent more than once. If a header is supplied, this can be set to
610 610 None and the msg_type will be pulled from the header.
611 611
612 612 content : dict or None
613 613 The content of the message (ignored if msg_or_type is a message).
614 614 header : dict or None
615 615 The header dict for the message (ignored if msg_to_type is a message).
616 616 parent : Message or dict or None
617 617 The parent or parent header describing the parent of this message
618 618 (ignored if msg_or_type is a message).
619 619 ident : bytes or list of bytes
620 620 The zmq.IDENTITY routing path.
621 621 metadata : dict or None
622 622 The metadata describing the message
623 623 buffers : list or None
624 624 The already-serialized buffers to be appended to the message.
625 625 track : bool
626 626 Whether to track. Only for use with Sockets, because ZMQStream
627 627 objects cannot track messages.
628 628
629 629
630 630 Returns
631 631 -------
632 632 msg : dict
633 633 The constructed message.
634 634 """
635 635 if not isinstance(stream, zmq.Socket):
636 636 # ZMQStreams and dummy sockets do not support tracking.
637 637 track = False
638 638
639 639 if isinstance(msg_or_type, (Message, dict)):
640 640 # We got a Message or message dict, not a msg_type so don't
641 641 # build a new Message.
642 642 msg = msg_or_type
643 643 buffers = buffers or msg.get('buffers', [])
644 644 else:
645 645 msg = self.msg(msg_or_type, content=content, parent=parent,
646 646 header=header, metadata=metadata)
647 647 if not os.getpid() == self.pid:
648 648 io.rprint("WARNING: attempted to send message from fork")
649 649 io.rprint(msg)
650 650 return
651 651 buffers = [] if buffers is None else buffers
652 652 if self.adapt_version:
653 653 msg = adapt(msg, self.adapt_version)
654 654 to_send = self.serialize(msg, ident)
655 655 to_send.extend(buffers)
656 656 longest = max([ len(s) for s in to_send ])
657 657 copy = (longest < self.copy_threshold)
658 658
659 659 if buffers and track and not copy:
660 660 # only really track when we are doing zero-copy buffers
661 661 tracker = stream.send_multipart(to_send, copy=False, track=True)
662 662 else:
663 663 # use dummy tracker, which will be done immediately
664 664 tracker = DONE
665 665 stream.send_multipart(to_send, copy=copy)
666 666
667 667 if self.debug:
668 668 pprint.pprint(msg)
669 669 pprint.pprint(to_send)
670 670 pprint.pprint(buffers)
671 671
672 672 msg['tracker'] = tracker
673 673
674 674 return msg
675 675
676 676 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
677 677 """Send a raw message via ident path.
678 678
679 679 This method is used to send a already serialized message.
680 680
681 681 Parameters
682 682 ----------
683 683 stream : ZMQStream or Socket
684 684 The ZMQ stream or socket to use for sending the message.
685 685 msg_list : list
686 686 The serialized list of messages to send. This only includes the
687 687 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
688 688 the message.
689 689 ident : ident or list
690 690 A single ident or a list of idents to use in sending.
691 691 """
692 692 to_send = []
693 693 if isinstance(ident, bytes):
694 694 ident = [ident]
695 695 if ident is not None:
696 696 to_send.extend(ident)
697 697
698 698 to_send.append(DELIM)
699 699 to_send.append(self.sign(msg_list))
700 700 to_send.extend(msg_list)
701 701 stream.send_multipart(to_send, flags, copy=copy)
702 702
703 703 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
704 704 """Receive and unpack a message.
705 705
706 706 Parameters
707 707 ----------
708 708 socket : ZMQStream or Socket
709 709 The socket or stream to use in receiving.
710 710
711 711 Returns
712 712 -------
713 713 [idents], msg
714 714 [idents] is a list of idents and msg is a nested message dict of
715 715 same format as self.msg returns.
716 716 """
717 717 if isinstance(socket, ZMQStream):
718 718 socket = socket.socket
719 719 try:
720 720 msg_list = socket.recv_multipart(mode, copy=copy)
721 721 except zmq.ZMQError as e:
722 722 if e.errno == zmq.EAGAIN:
723 723 # We can convert EAGAIN to None as we know in this case
724 724 # recv_multipart won't return None.
725 725 return None,None
726 726 else:
727 727 raise
728 728 # split multipart message into identity list and message dict
729 729 # invalid large messages can cause very expensive string comparisons
730 730 idents, msg_list = self.feed_identities(msg_list, copy)
731 731 try:
732 732 return idents, self.deserialize(msg_list, content=content, copy=copy)
733 733 except Exception as e:
734 734 # TODO: handle it
735 735 raise e
736 736
737 737 def feed_identities(self, msg_list, copy=True):
738 738 """Split the identities from the rest of the message.
739 739
740 740 Feed until DELIM is reached, then return the prefix as idents and
741 741 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
742 742 but that would be silly.
743 743
744 744 Parameters
745 745 ----------
746 746 msg_list : a list of Message or bytes objects
747 747 The message to be split.
748 748 copy : bool
749 749 flag determining whether the arguments are bytes or Messages
750 750
751 751 Returns
752 752 -------
753 753 (idents, msg_list) : two lists
754 754 idents will always be a list of bytes, each of which is a ZMQ
755 755 identity. msg_list will be a list of bytes or zmq.Messages of the
756 756 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
757 757 should be unpackable/unserializable via self.deserialize at this
758 758 point.
759 759 """
760 760 if copy:
761 761 idx = msg_list.index(DELIM)
762 762 return msg_list[:idx], msg_list[idx+1:]
763 763 else:
764 764 failed = True
765 765 for idx,m in enumerate(msg_list):
766 766 if m.bytes == DELIM:
767 767 failed = False
768 768 break
769 769 if failed:
770 770 raise ValueError("DELIM not in msg_list")
771 771 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
772 772 return [m.bytes for m in idents], msg_list
773 773
774 774 def _add_digest(self, signature):
775 775 """add a digest to history to protect against replay attacks"""
776 776 if self.digest_history_size == 0:
777 777 # no history, never add digests
778 778 return
779 779
780 780 self.digest_history.add(signature)
781 781 if len(self.digest_history) > self.digest_history_size:
782 782 # threshold reached, cull 10%
783 783 self._cull_digest_history()
784 784
785 785 def _cull_digest_history(self):
786 786 """cull the digest history
787 787
788 788 Removes a randomly selected 10% of the digest history
789 789 """
790 790 current = len(self.digest_history)
791 791 n_to_cull = max(int(current // 10), current - self.digest_history_size)
792 792 if n_to_cull >= current:
793 793 self.digest_history = set()
794 794 return
795 795 to_cull = random.sample(self.digest_history, n_to_cull)
796 796 self.digest_history.difference_update(to_cull)
797 797
798 798 def deserialize(self, msg_list, content=True, copy=True):
799 799 """Unserialize a msg_list to a nested message dict.
800 800
801 801 This is roughly the inverse of serialize. The serialize/deserialize
802 802 methods work with full message lists, whereas pack/unpack work with
803 803 the individual message parts in the message list.
804 804
805 805 Parameters
806 806 ----------
807 807 msg_list : list of bytes or Message objects
808 808 The list of message parts of the form [HMAC,p_header,p_parent,
809 809 p_metadata,p_content,buffer1,buffer2,...].
810 810 content : bool (True)
811 811 Whether to unpack the content dict (True), or leave it packed
812 812 (False).
813 813 copy : bool (True)
814 814 Whether msg_list contains bytes (True) or the non-copying Message
815 815 objects in each place (False).
816 816
817 817 Returns
818 818 -------
819 819 msg : dict
820 820 The nested message dict with top-level keys [header, parent_header,
821 821 content, buffers]. The buffers are returned as memoryviews.
822 822 """
823 823 minlen = 5
824 824 message = {}
825 825 if not copy:
826 826 # pyzmq didn't copy the first parts of the message, so we'll do it
827 827 for i in range(minlen):
828 828 msg_list[i] = msg_list[i].bytes
829 829 if self.auth is not None:
830 830 signature = msg_list[0]
831 831 if not signature:
832 832 raise ValueError("Unsigned Message")
833 833 if signature in self.digest_history:
834 834 raise ValueError("Duplicate Signature: %r" % signature)
835 835 self._add_digest(signature)
836 836 check = self.sign(msg_list[1:5])
837 837 if not compare_digest(signature, check):
838 838 raise ValueError("Invalid Signature: %r" % signature)
839 839 if not len(msg_list) >= minlen:
840 840 raise TypeError("malformed message, must have at least %i elements"%minlen)
841 841 header = self.unpack(msg_list[1])
842 842 message['header'] = extract_dates(header)
843 843 message['msg_id'] = header['msg_id']
844 844 message['msg_type'] = header['msg_type']
845 845 message['parent_header'] = extract_dates(self.unpack(msg_list[2]))
846 846 message['metadata'] = self.unpack(msg_list[3])
847 847 if content:
848 848 message['content'] = self.unpack(msg_list[4])
849 849 else:
850 850 message['content'] = msg_list[4]
851 851 buffers = [memoryview(b) for b in msg_list[5:]]
852 852 if buffers and buffers[0].shape is None:
853 853 # force copy to workaround pyzmq #646
854 854 buffers = [memoryview(b.bytes) for b in msg_list[5:]]
855 855 message['buffers'] = buffers
856 856 # adapt to the current version
857 857 return adapt(message)
858 858
859 859 def unserialize(self, *args, **kwargs):
860 860 warnings.warn(
861 861 "Session.unserialize is deprecated. Use Session.deserialize.",
862 862 DeprecationWarning,
863 863 )
864 864 return self.deserialize(*args, **kwargs)
865 865
866 866
867 867 def test_msg2obj():
868 868 am = dict(x=1)
869 869 ao = Message(am)
870 870 assert ao.x == am['x']
871 871
872 872 am['y'] = dict(z=1)
873 873 ao = Message(am)
874 874 assert ao.y.z == am['y']['z']
875 875
876 876 k1, k2 = 'y', 'z'
877 877 assert ao[k1][k2] == am[k1][k2]
878 878
879 879 am2 = dict(ao)
880 880 assert am['x'] == am2['x']
881 881 assert am['y']['z'] == am2['y']['z']
@@ -1,375 +1,375 b''
1 1 """Tests for adapting IPython msg spec versions"""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 import copy
7 7 import json
8 8 from unittest import TestCase
9 9 import nose.tools as nt
10 10
11 from IPython.kernel.adapter import adapt, V4toV5, V5toV4, code_to_line
12 from IPython.kernel.zmq.session import Session
11 from jupyter_client.adapter import adapt, V4toV5, V5toV4, code_to_line
12 from jupyter_client.session import Session
13 13
14 14
15 15 def test_default_version():
16 16 s = Session()
17 17 msg = s.msg("msg_type")
18 18 msg['header'].pop('version')
19 19 original = copy.deepcopy(msg)
20 20 adapted = adapt(original)
21 21 nt.assert_equal(adapted['header']['version'], V4toV5.version)
22 22
23 23 def test_code_to_line_no_code():
24 24 line, pos = code_to_line("", 0)
25 25 nt.assert_equal(line, "")
26 26 nt.assert_equal(pos, 0)
27 27
28 28 class AdapterTest(TestCase):
29 29
30 30 def setUp(self):
31 31 self.session = Session()
32 32
33 33 def adapt(self, msg, version=None):
34 34 original = copy.deepcopy(msg)
35 35 adapted = adapt(msg, version or self.to_version)
36 36 return original, adapted
37 37
38 38 def check_header(self, msg):
39 39 pass
40 40
41 41
42 42 class V4toV5TestCase(AdapterTest):
43 43 from_version = 4
44 44 to_version = 5
45 45
46 46 def msg(self, msg_type, content):
47 47 """Create a v4 msg (same as v5, minus version header)"""
48 48 msg = self.session.msg(msg_type, content)
49 49 msg['header'].pop('version')
50 50 return msg
51 51
52 52 def test_same_version(self):
53 53 msg = self.msg("execute_result",
54 54 content={'status' : 'ok'}
55 55 )
56 56 original, adapted = self.adapt(msg, self.from_version)
57 57
58 58 self.assertEqual(original, adapted)
59 59
60 60 def test_no_adapt(self):
61 61 msg = self.msg("input_reply", {'value' : 'some text'})
62 62 v4, v5 = self.adapt(msg)
63 63 self.assertEqual(v5['header']['version'], V4toV5.version)
64 64 v5['header'].pop('version')
65 65 self.assertEqual(v4, v5)
66 66
67 67 def test_rename_type(self):
68 68 for v5_type, v4_type in [
69 69 ('execute_result', 'pyout'),
70 70 ('execute_input', 'pyin'),
71 71 ('error', 'pyerr'),
72 72 ]:
73 73 msg = self.msg(v4_type, {'key' : 'value'})
74 74 v4, v5 = self.adapt(msg)
75 75 self.assertEqual(v5['header']['version'], V4toV5.version)
76 76 self.assertEqual(v5['header']['msg_type'], v5_type)
77 77 self.assertEqual(v4['content'], v5['content'])
78 78
79 79 def test_execute_request(self):
80 80 msg = self.msg("execute_request", {
81 81 'code' : 'a=5',
82 82 'silent' : False,
83 83 'user_expressions' : {'a' : 'apple'},
84 84 'user_variables' : ['b'],
85 85 })
86 86 v4, v5 = self.adapt(msg)
87 87 self.assertEqual(v4['header']['msg_type'], v5['header']['msg_type'])
88 88 v4c = v4['content']
89 89 v5c = v5['content']
90 90 self.assertEqual(v5c['user_expressions'], {'a' : 'apple', 'b': 'b'})
91 91 self.assertNotIn('user_variables', v5c)
92 92 self.assertEqual(v5c['code'], v4c['code'])
93 93
94 94 def test_execute_reply(self):
95 95 msg = self.msg("execute_reply", {
96 96 'status': 'ok',
97 97 'execution_count': 7,
98 98 'user_variables': {'a': 1},
99 99 'user_expressions': {'a+a': 2},
100 100 'payload': [{'source':'page', 'text':'blah'}]
101 101 })
102 102 v4, v5 = self.adapt(msg)
103 103 v5c = v5['content']
104 104 self.assertNotIn('user_variables', v5c)
105 105 self.assertEqual(v5c['user_expressions'], {'a': 1, 'a+a': 2})
106 106 self.assertEqual(v5c['payload'], [{'source': 'page',
107 107 'data': {'text/plain': 'blah'}}
108 108 ])
109 109
110 110 def test_complete_request(self):
111 111 msg = self.msg("complete_request", {
112 112 'text' : 'a.is',
113 113 'line' : 'foo = a.is',
114 114 'block' : None,
115 115 'cursor_pos' : 10,
116 116 })
117 117 v4, v5 = self.adapt(msg)
118 118 v4c = v4['content']
119 119 v5c = v5['content']
120 120 for key in ('text', 'line', 'block'):
121 121 self.assertNotIn(key, v5c)
122 122 self.assertEqual(v5c['cursor_pos'], v4c['cursor_pos'])
123 123 self.assertEqual(v5c['code'], v4c['line'])
124 124
125 125 def test_complete_reply(self):
126 126 msg = self.msg("complete_reply", {
127 127 'matched_text' : 'a.is',
128 128 'matches' : ['a.isalnum',
129 129 'a.isalpha',
130 130 'a.isdigit',
131 131 'a.islower',
132 132 ],
133 133 })
134 134 v4, v5 = self.adapt(msg)
135 135 v4c = v4['content']
136 136 v5c = v5['content']
137 137
138 138 self.assertEqual(v5c['matches'], v4c['matches'])
139 139 self.assertEqual(v5c['metadata'], {})
140 140 self.assertEqual(v5c['cursor_start'], -4)
141 141 self.assertEqual(v5c['cursor_end'], None)
142 142
143 143 def test_object_info_request(self):
144 144 msg = self.msg("object_info_request", {
145 145 'oname' : 'foo',
146 146 'detail_level' : 1,
147 147 })
148 148 v4, v5 = self.adapt(msg)
149 149 self.assertEqual(v5['header']['msg_type'], 'inspect_request')
150 150 v4c = v4['content']
151 151 v5c = v5['content']
152 152 self.assertEqual(v5c['code'], v4c['oname'])
153 153 self.assertEqual(v5c['cursor_pos'], len(v4c['oname']))
154 154 self.assertEqual(v5c['detail_level'], v4c['detail_level'])
155 155
156 156 def test_object_info_reply(self):
157 157 msg = self.msg("object_info_reply", {
158 158 'oname' : 'foo',
159 159 'found' : True,
160 160 'status' : 'ok',
161 161 'definition' : 'foo(a=5)',
162 162 'docstring' : "the docstring",
163 163 })
164 164 v4, v5 = self.adapt(msg)
165 165 self.assertEqual(v5['header']['msg_type'], 'inspect_reply')
166 166 v4c = v4['content']
167 167 v5c = v5['content']
168 168 self.assertEqual(sorted(v5c), [ 'data', 'found', 'metadata', 'name', 'status'])
169 169 text = v5c['data']['text/plain']
170 170 self.assertEqual(text, '\n'.join([v4c['definition'], v4c['docstring']]))
171 171
172 172 def test_kernel_info_reply(self):
173 173 msg = self.msg("kernel_info_reply", {
174 174 'language': 'python',
175 175 'language_version': [2,8,0],
176 176 'ipython_version': [1,2,3],
177 177 })
178 178 v4, v5 = self.adapt(msg)
179 179 v4c = v4['content']
180 180 v5c = v5['content']
181 181 self.assertEqual(v5c, {
182 182 'protocol_version': '4.1',
183 183 'implementation': 'ipython',
184 184 'implementation_version': '1.2.3',
185 185 'language_info': {
186 186 'name': 'python',
187 187 'version': '2.8.0',
188 188 },
189 189 'banner' : '',
190 190 })
191 191
192 192 # iopub channel
193 193
194 194 def test_display_data(self):
195 195 jsondata = dict(a=5)
196 196 msg = self.msg("display_data", {
197 197 'data' : {
198 198 'text/plain' : 'some text',
199 199 'application/json' : json.dumps(jsondata)
200 200 },
201 201 'metadata' : {'text/plain' : { 'key' : 'value' }},
202 202 })
203 203 v4, v5 = self.adapt(msg)
204 204 v4c = v4['content']
205 205 v5c = v5['content']
206 206 self.assertEqual(v5c['metadata'], v4c['metadata'])
207 207 self.assertEqual(v5c['data']['text/plain'], v4c['data']['text/plain'])
208 208 self.assertEqual(v5c['data']['application/json'], jsondata)
209 209
210 210 # stdin channel
211 211
212 212 def test_input_request(self):
213 213 msg = self.msg('input_request', {'prompt': "$>"})
214 214 v4, v5 = self.adapt(msg)
215 215 self.assertEqual(v5['content']['prompt'], v4['content']['prompt'])
216 216 self.assertFalse(v5['content']['password'])
217 217
218 218
219 219 class V5toV4TestCase(AdapterTest):
220 220 from_version = 5
221 221 to_version = 4
222 222
223 223 def msg(self, msg_type, content):
224 224 return self.session.msg(msg_type, content)
225 225
226 226 def test_same_version(self):
227 227 msg = self.msg("execute_result",
228 228 content={'status' : 'ok'}
229 229 )
230 230 original, adapted = self.adapt(msg, self.from_version)
231 231
232 232 self.assertEqual(original, adapted)
233 233
234 234 def test_no_adapt(self):
235 235 msg = self.msg("input_reply", {'value' : 'some text'})
236 236 v5, v4 = self.adapt(msg)
237 237 self.assertNotIn('version', v4['header'])
238 238 v5['header'].pop('version')
239 239 self.assertEqual(v4, v5)
240 240
241 241 def test_rename_type(self):
242 242 for v5_type, v4_type in [
243 243 ('execute_result', 'pyout'),
244 244 ('execute_input', 'pyin'),
245 245 ('error', 'pyerr'),
246 246 ]:
247 247 msg = self.msg(v5_type, {'key' : 'value'})
248 248 v5, v4 = self.adapt(msg)
249 249 self.assertEqual(v4['header']['msg_type'], v4_type)
250 250 nt.assert_not_in('version', v4['header'])
251 251 self.assertEqual(v4['content'], v5['content'])
252 252
253 253 def test_execute_request(self):
254 254 msg = self.msg("execute_request", {
255 255 'code' : 'a=5',
256 256 'silent' : False,
257 257 'user_expressions' : {'a' : 'apple'},
258 258 })
259 259 v5, v4 = self.adapt(msg)
260 260 self.assertEqual(v4['header']['msg_type'], v5['header']['msg_type'])
261 261 v4c = v4['content']
262 262 v5c = v5['content']
263 263 self.assertEqual(v4c['user_variables'], [])
264 264 self.assertEqual(v5c['code'], v4c['code'])
265 265
266 266 def test_complete_request(self):
267 267 msg = self.msg("complete_request", {
268 268 'code' : 'def foo():\n'
269 269 ' a.is\n'
270 270 'foo()',
271 271 'cursor_pos': 19,
272 272 })
273 273 v5, v4 = self.adapt(msg)
274 274 v4c = v4['content']
275 275 v5c = v5['content']
276 276 self.assertNotIn('code', v4c)
277 277 self.assertEqual(v4c['line'], v5c['code'].splitlines(True)[1])
278 278 self.assertEqual(v4c['cursor_pos'], 8)
279 279 self.assertEqual(v4c['text'], '')
280 280 self.assertEqual(v4c['block'], None)
281 281
282 282 def test_complete_reply(self):
283 283 msg = self.msg("complete_reply", {
284 284 'cursor_start' : 10,
285 285 'cursor_end' : 14,
286 286 'matches' : ['a.isalnum',
287 287 'a.isalpha',
288 288 'a.isdigit',
289 289 'a.islower',
290 290 ],
291 291 'metadata' : {},
292 292 })
293 293 v5, v4 = self.adapt(msg)
294 294 v4c = v4['content']
295 295 v5c = v5['content']
296 296 self.assertEqual(v4c['matched_text'], 'a.is')
297 297 self.assertEqual(v4c['matches'], v5c['matches'])
298 298
299 299 def test_inspect_request(self):
300 300 msg = self.msg("inspect_request", {
301 301 'code' : 'def foo():\n'
302 302 ' apple\n'
303 303 'bar()',
304 304 'cursor_pos': 18,
305 305 'detail_level' : 1,
306 306 })
307 307 v5, v4 = self.adapt(msg)
308 308 self.assertEqual(v4['header']['msg_type'], 'object_info_request')
309 309 v4c = v4['content']
310 310 v5c = v5['content']
311 311 self.assertEqual(v4c['oname'], 'apple')
312 312 self.assertEqual(v5c['detail_level'], v4c['detail_level'])
313 313
314 314 def test_inspect_reply(self):
315 315 msg = self.msg("inspect_reply", {
316 316 'name' : 'foo',
317 317 'found' : True,
318 318 'data' : {'text/plain' : 'some text'},
319 319 'metadata' : {},
320 320 })
321 321 v5, v4 = self.adapt(msg)
322 322 self.assertEqual(v4['header']['msg_type'], 'object_info_reply')
323 323 v4c = v4['content']
324 324 v5c = v5['content']
325 325 self.assertEqual(sorted(v4c), ['found', 'oname'])
326 326 self.assertEqual(v4c['found'], False)
327 327
328 328 def test_kernel_info_reply(self):
329 329 msg = self.msg("kernel_info_reply", {
330 330 'protocol_version': '5.0',
331 331 'implementation': 'ipython',
332 332 'implementation_version': '1.2.3',
333 333 'language_info': {
334 334 'name': 'python',
335 335 'version': '2.8.0',
336 336 'mimetype': 'text/x-python',
337 337 },
338 338 'banner' : 'the banner',
339 339 })
340 340 v5, v4 = self.adapt(msg)
341 341 v4c = v4['content']
342 342 v5c = v5['content']
343 343 info = v5c['language_info']
344 344 self.assertEqual(v4c, {
345 345 'protocol_version': [5,0],
346 346 'language': 'python',
347 347 'language_version': [2,8,0],
348 348 'ipython_version': [1,2,3],
349 349 })
350 350
351 351 # iopub channel
352 352
353 353 def test_display_data(self):
354 354 jsondata = dict(a=5)
355 355 msg = self.msg("display_data", {
356 356 'data' : {
357 357 'text/plain' : 'some text',
358 358 'application/json' : jsondata,
359 359 },
360 360 'metadata' : {'text/plain' : { 'key' : 'value' }},
361 361 })
362 362 v5, v4 = self.adapt(msg)
363 363 v4c = v4['content']
364 364 v5c = v5['content']
365 365 self.assertEqual(v5c['metadata'], v4c['metadata'])
366 366 self.assertEqual(v5c['data']['text/plain'], v4c['data']['text/plain'])
367 367 self.assertEqual(v4c['data']['application/json'], json.dumps(jsondata))
368 368
369 369 # stdin channel
370 370
371 371 def test_input_request(self):
372 372 msg = self.msg('input_request', {'prompt': "$>", 'password' : True})
373 373 v5, v4 = self.adapt(msg)
374 374 self.assertEqual(v5['content']['prompt'], v4['content']['prompt'])
375 375 self.assertNotIn('password', v4['content'])
@@ -1,141 +1,141 b''
1 1 """Tests for kernel connection utilities
2 2
3 3 Authors
4 4 -------
5 5 * MinRK
6 6 """
7 7 #-----------------------------------------------------------------------------
8 8 # Copyright (c) 2013, the IPython Development Team.
9 9 #
10 10 # Distributed under the terms of the Modified BSD License.
11 11 #
12 12 # The full license is in the file COPYING.txt, distributed with this software.
13 13 #-----------------------------------------------------------------------------
14 14
15 15 #-----------------------------------------------------------------------------
16 16 # Imports
17 17 #-----------------------------------------------------------------------------
18 18
19 19 import json
20 20 import os
21 21
22 22 import nose.tools as nt
23 23
24 24 from IPython.config import Config
25 25 from IPython.consoleapp import IPythonConsoleApp
26 26 from IPython.core.application import BaseIPythonApplication
27 27 from IPython.utils.tempdir import TemporaryDirectory, TemporaryWorkingDirectory
28 28 from IPython.utils.py3compat import str_to_bytes
29 from IPython.kernel import connect
30 from IPython.kernel.zmq.session import Session
29 from jupyter_client import connect
30 from jupyter_client.session import Session
31 31
32 32 #-----------------------------------------------------------------------------
33 33 # Classes and functions
34 34 #-----------------------------------------------------------------------------
35 35
36 36 class DummyConsoleApp(BaseIPythonApplication, IPythonConsoleApp):
37 37 def initialize(self, argv=[]):
38 38 BaseIPythonApplication.initialize(self, argv=argv)
39 39 self.init_connection_file()
40 40
41 41 sample_info = dict(ip='1.2.3.4', transport='ipc',
42 42 shell_port=1, hb_port=2, iopub_port=3, stdin_port=4, control_port=5,
43 43 key=b'abc123', signature_scheme='hmac-md5',
44 44 )
45 45
46 46 def test_write_connection_file():
47 47 with TemporaryDirectory() as d:
48 48 cf = os.path.join(d, 'kernel.json')
49 49 connect.write_connection_file(cf, **sample_info)
50 50 nt.assert_true(os.path.exists(cf))
51 51 with open(cf, 'r') as f:
52 52 info = json.load(f)
53 53 info['key'] = str_to_bytes(info['key'])
54 54 nt.assert_equal(info, sample_info)
55 55
56 56
57 57 def test_load_connection_file_session():
58 58 """test load_connection_file() after """
59 59 session = Session()
60 60 app = DummyConsoleApp(session=Session())
61 61 app.initialize(argv=[])
62 62 session = app.session
63 63
64 64 with TemporaryDirectory() as d:
65 65 cf = os.path.join(d, 'kernel.json')
66 66 connect.write_connection_file(cf, **sample_info)
67 67 app.connection_file = cf
68 68 app.load_connection_file()
69 69
70 70 nt.assert_equal(session.key, sample_info['key'])
71 71 nt.assert_equal(session.signature_scheme, sample_info['signature_scheme'])
72 72
73 73
74 74 def test_app_load_connection_file():
75 75 """test `ipython console --existing` loads a connection file"""
76 76 with TemporaryDirectory() as d:
77 77 cf = os.path.join(d, 'kernel.json')
78 78 connect.write_connection_file(cf, **sample_info)
79 79 app = DummyConsoleApp(connection_file=cf)
80 80 app.initialize(argv=[])
81 81
82 82 for attr, expected in sample_info.items():
83 83 if attr in ('key', 'signature_scheme'):
84 84 continue
85 85 value = getattr(app, attr)
86 86 nt.assert_equal(value, expected, "app.%s = %s != %s" % (attr, value, expected))
87 87
88 88 def test_get_connection_file():
89 89 cfg = Config()
90 90 with TemporaryWorkingDirectory() as d:
91 91 cfg.ProfileDir.location = d
92 92 cf = 'kernel.json'
93 93 app = DummyConsoleApp(config=cfg, connection_file=cf)
94 94 app.initialize(argv=[])
95 95
96 96 profile_cf = os.path.join(app.profile_dir.location, 'security', cf)
97 97 nt.assert_equal(profile_cf, app.connection_file)
98 98 with open(profile_cf, 'w') as f:
99 99 f.write("{}")
100 100 nt.assert_true(os.path.exists(profile_cf))
101 101 nt.assert_equal(connect.get_connection_file(app), profile_cf)
102 102
103 103 app.connection_file = cf
104 104 nt.assert_equal(connect.get_connection_file(app), profile_cf)
105 105
106 106 def test_find_connection_file():
107 107 cfg = Config()
108 108 with TemporaryDirectory() as d:
109 109 cfg.ProfileDir.location = d
110 110 cf = 'kernel.json'
111 111 app = DummyConsoleApp(config=cfg, connection_file=cf)
112 112 app.initialize(argv=[])
113 113 BaseIPythonApplication._instance = app
114 114
115 115 profile_cf = os.path.join(app.profile_dir.location, 'security', cf)
116 116 with open(profile_cf, 'w') as f:
117 117 f.write("{}")
118 118
119 119 for query in (
120 120 'kernel.json',
121 121 'kern*',
122 122 '*ernel*',
123 123 'k*',
124 124 ):
125 125 nt.assert_equal(connect.find_connection_file(query), profile_cf)
126 126
127 127 BaseIPythonApplication._instance = None
128 128
129 129 def test_get_connection_info():
130 130 with TemporaryDirectory() as d:
131 131 cf = os.path.join(d, 'kernel.json')
132 132 connect.write_connection_file(cf, **sample_info)
133 133 json_info = connect.get_connection_info(cf)
134 134 info = connect.get_connection_info(cf, unpack=True)
135 135
136 136 nt.assert_equal(type(json_info), type(""))
137 137 nt.assert_equal(info, sample_info)
138 138
139 139 info2 = json.loads(json_info)
140 140 info2['key'] = str_to_bytes(info2['key'])
141 141 nt.assert_equal(info2, sample_info)
@@ -1,53 +1,53 b''
1 1 """Tests for the notebook kernel and session manager"""
2 2
3 3 from subprocess import PIPE
4 4 import time
5 5 from unittest import TestCase
6 6
7 7 from IPython.testing import decorators as dec
8 8
9 9 from IPython.config.loader import Config
10 from IPython.kernel import KernelManager
10 from jupyter_client import KernelManager
11 11
12 12 class TestKernelManager(TestCase):
13 13
14 14 def _get_tcp_km(self):
15 15 c = Config()
16 16 km = KernelManager(config=c)
17 17 return km
18 18
19 19 def _get_ipc_km(self):
20 20 c = Config()
21 21 c.KernelManager.transport = 'ipc'
22 22 c.KernelManager.ip = 'test'
23 23 km = KernelManager(config=c)
24 24 return km
25 25
26 26 def _run_lifecycle(self, km):
27 27 km.start_kernel(stdout=PIPE, stderr=PIPE)
28 28 self.assertTrue(km.is_alive())
29 29 km.restart_kernel(now=True)
30 30 self.assertTrue(km.is_alive())
31 31 km.interrupt_kernel()
32 32 self.assertTrue(isinstance(km, KernelManager))
33 33 km.shutdown_kernel(now=True)
34 34
35 35 def test_tcp_lifecycle(self):
36 36 km = self._get_tcp_km()
37 37 self._run_lifecycle(km)
38 38
39 39 @dec.skip_win32
40 40 def test_ipc_lifecycle(self):
41 41 km = self._get_ipc_km()
42 42 self._run_lifecycle(km)
43 43
44 44 def test_get_connect_info(self):
45 45 km = self._get_tcp_km()
46 46 cinfo = km.get_connection_info()
47 47 keys = sorted(cinfo.keys())
48 48 expected = sorted([
49 49 'ip', 'transport',
50 50 'hb_port', 'shell_port', 'stdin_port', 'iopub_port', 'control_port',
51 51 'key', 'signature_scheme',
52 52 ])
53 53 self.assertEqual(keys, expected)
@@ -1,64 +1,64 b''
1 1 import json
2 2 import os
3 3 from os.path import join as pjoin
4 4 import unittest
5 5
6 6 from IPython.testing.decorators import onlyif
7 7 from IPython.utils.tempdir import TemporaryDirectory
8 from IPython.kernel import kernelspec
8 from jupyter_client import kernelspec
9 9
10 10 sample_kernel_json = {'argv':['cat', '{connection_file}'],
11 11 'display_name':'Test kernel',
12 12 }
13 13
14 14 class KernelSpecTests(unittest.TestCase):
15 15 def setUp(self):
16 16 td = TemporaryDirectory()
17 17 self.addCleanup(td.cleanup)
18 18 self.sample_kernel_dir = pjoin(td.name, 'kernels', 'Sample')
19 19 os.makedirs(self.sample_kernel_dir)
20 20 json_file = pjoin(self.sample_kernel_dir, 'kernel.json')
21 21 with open(json_file, 'w') as f:
22 22 json.dump(sample_kernel_json, f)
23 23
24 24 self.ksm = kernelspec.KernelSpecManager(ipython_dir=td.name)
25 25
26 26 td2 = TemporaryDirectory()
27 27 self.addCleanup(td2.cleanup)
28 28 self.installable_kernel = td2.name
29 29 with open(pjoin(self.installable_kernel, 'kernel.json'), 'w') as f:
30 30 json.dump(sample_kernel_json, f)
31 31
32 32 def test_find_kernel_specs(self):
33 33 kernels = self.ksm.find_kernel_specs()
34 34 self.assertEqual(kernels['sample'], self.sample_kernel_dir)
35 35
36 36 def test_get_kernel_spec(self):
37 37 ks = self.ksm.get_kernel_spec('SAMPLE') # Case insensitive
38 38 self.assertEqual(ks.resource_dir, self.sample_kernel_dir)
39 39 self.assertEqual(ks.argv, sample_kernel_json['argv'])
40 40 self.assertEqual(ks.display_name, sample_kernel_json['display_name'])
41 41 self.assertEqual(ks.env, {})
42 42
43 43 def test_install_kernel_spec(self):
44 44 self.ksm.install_kernel_spec(self.installable_kernel,
45 45 kernel_name='tstinstalled',
46 46 user=True)
47 47 self.assertIn('tstinstalled', self.ksm.find_kernel_specs())
48 48
49 49 with self.assertRaises(OSError):
50 50 self.ksm.install_kernel_spec(self.installable_kernel,
51 51 kernel_name='tstinstalled',
52 52 user=True)
53 53
54 54 # Smoketest that this succeeds
55 55 self.ksm.install_kernel_spec(self.installable_kernel,
56 56 kernel_name='tstinstalled',
57 57 replace=True, user=True)
58 58
59 59 @onlyif(os.name != 'nt' and not os.access('/usr/local/share', os.W_OK), "needs Unix system without root privileges")
60 60 def test_cant_install_kernel_spec(self):
61 61 with self.assertRaises(OSError):
62 62 self.ksm.install_kernel_spec(self.installable_kernel,
63 63 kernel_name='tstinstalled',
64 64 user=False)
@@ -1,57 +1,57 b''
1 1 """Tests for kernel utility functions
2 2
3 3 Authors
4 4 -------
5 5 * MinRK
6 6 """
7 7 #-----------------------------------------------------------------------------
8 8 # Copyright (c) 2011, the IPython Development Team.
9 9 #
10 10 # Distributed under the terms of the Modified BSD License.
11 11 #
12 12 # The full license is in the file COPYING.txt, distributed with this software.
13 13 #-----------------------------------------------------------------------------
14 14
15 15 #-----------------------------------------------------------------------------
16 16 # Imports
17 17 #-----------------------------------------------------------------------------
18 18
19 19 # Third-party imports
20 20 import nose.tools as nt
21 21
22 22 # Our own imports
23 from IPython.kernel.launcher import swallow_argv
23 from jupyter_client.launcher import swallow_argv
24 24
25 25 #-----------------------------------------------------------------------------
26 26 # Classes and functions
27 27 #-----------------------------------------------------------------------------
28 28
29 29 def test_swallow_argv():
30 30 tests = [
31 31 # expected , argv , aliases, flags
32 32 (['-a', '5'], ['-a', '5'], None, None),
33 33 (['5'], ['-a', '5'], None, ['a']),
34 34 ([], ['-a', '5'], ['a'], None),
35 35 ([], ['-a', '5'], ['a'], ['a']),
36 36 ([], ['--foo'], None, ['foo']),
37 37 ([], ['--foo'], ['foobar'], []),
38 38 ([], ['--foo', '5'], ['foo'], []),
39 39 ([], ['--foo=5'], ['foo'], []),
40 40 (['--foo=5'], ['--foo=5'], [], ['foo']),
41 41 (['5'], ['--foo', '5'], [], ['foo']),
42 42 (['bar'], ['--foo', '5', 'bar'], ['foo'], ['foo']),
43 43 (['bar'], ['--foo=5', 'bar'], ['foo'], ['foo']),
44 44 (['5','bar'], ['--foo', '5', 'bar'], None, ['foo']),
45 45 (['bar'], ['--foo', '5', 'bar'], ['foo'], None),
46 46 (['bar'], ['--foo=5', 'bar'], ['foo'], None),
47 47 ]
48 48 for expected, argv, aliases, flags in tests:
49 49 stripped = swallow_argv(argv, aliases=aliases, flags=flags)
50 50 message = '\n'.join(['',
51 51 "argv: %r" % argv,
52 52 "aliases: %r" % aliases,
53 53 "flags : %r" % flags,
54 54 "expected : %r" % expected,
55 55 "returned : %r" % stripped,
56 56 ])
57 57 nt.assert_equal(expected, stripped, message)
@@ -1,86 +1,86 b''
1 1 """Tests for the notebook kernel and session manager."""
2 2
3 3 from subprocess import PIPE
4 4 import time
5 5 from unittest import TestCase
6 6
7 7 from IPython.testing import decorators as dec
8 8
9 9 from IPython.config.loader import Config
10 10 from IPython.utils.localinterfaces import localhost
11 from IPython.kernel import KernelManager
12 from IPython.kernel.multikernelmanager import MultiKernelManager
11 from jupyter_client import KernelManager
12 from jupyter_client.multikernelmanager import MultiKernelManager
13 13
14 14 class TestKernelManager(TestCase):
15 15
16 16 def _get_tcp_km(self):
17 17 c = Config()
18 18 km = MultiKernelManager(config=c)
19 19 return km
20 20
21 21 def _get_ipc_km(self):
22 22 c = Config()
23 23 c.KernelManager.transport = 'ipc'
24 24 c.KernelManager.ip = 'test'
25 25 km = MultiKernelManager(config=c)
26 26 return km
27 27
28 28 def _run_lifecycle(self, km):
29 29 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
30 30 self.assertTrue(km.is_alive(kid))
31 31 self.assertTrue(kid in km)
32 32 self.assertTrue(kid in km.list_kernel_ids())
33 33 self.assertEqual(len(km),1)
34 34 km.restart_kernel(kid, now=True)
35 35 self.assertTrue(km.is_alive(kid))
36 36 self.assertTrue(kid in km.list_kernel_ids())
37 37 km.interrupt_kernel(kid)
38 38 k = km.get_kernel(kid)
39 39 self.assertTrue(isinstance(k, KernelManager))
40 40 km.shutdown_kernel(kid, now=True)
41 41 self.assertTrue(not kid in km)
42 42
43 43 def _run_cinfo(self, km, transport, ip):
44 44 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
45 45 k = km.get_kernel(kid)
46 46 cinfo = km.get_connection_info(kid)
47 47 self.assertEqual(transport, cinfo['transport'])
48 48 self.assertEqual(ip, cinfo['ip'])
49 49 self.assertTrue('stdin_port' in cinfo)
50 50 self.assertTrue('iopub_port' in cinfo)
51 51 stream = km.connect_iopub(kid)
52 52 stream.close()
53 53 self.assertTrue('shell_port' in cinfo)
54 54 stream = km.connect_shell(kid)
55 55 stream.close()
56 56 self.assertTrue('hb_port' in cinfo)
57 57 stream = km.connect_hb(kid)
58 58 stream.close()
59 59 km.shutdown_kernel(kid, now=True)
60 60
61 61 def test_tcp_lifecycle(self):
62 62 km = self._get_tcp_km()
63 63 self._run_lifecycle(km)
64 64
65 65 def test_shutdown_all(self):
66 66 km = self._get_tcp_km()
67 67 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
68 68 self.assertIn(kid, km)
69 69 km.shutdown_all()
70 70 self.assertNotIn(kid, km)
71 71 # shutdown again is okay, because we have no kernels
72 72 km.shutdown_all()
73 73
74 74 def test_tcp_cinfo(self):
75 75 km = self._get_tcp_km()
76 76 self._run_cinfo(km, 'tcp', localhost())
77 77
78 78 @dec.skip_win32
79 79 def test_ipc_lifecycle(self):
80 80 km = self._get_ipc_km()
81 81 self._run_lifecycle(km)
82 82
83 83 @dec.skip_win32
84 84 def test_ipc_cinfo(self):
85 85 km = self._get_ipc_km()
86 86 self._run_cinfo(km, 'ipc', 'test')
@@ -1,40 +1,40 b''
1 """Test the IPython.kernel public API
1 """Test the jupyter_client public API
2 2
3 3 Authors
4 4 -------
5 5 * MinRK
6 6 """
7 7 #-----------------------------------------------------------------------------
8 8 # Copyright (c) 2013, the IPython Development Team.
9 9 #
10 10 # Distributed under the terms of the Modified BSD License.
11 11 #
12 12 # The full license is in the file COPYING.txt, distributed with this software.
13 13 #-----------------------------------------------------------------------------
14 14
15 15 import nose.tools as nt
16 16
17 from IPython.kernel import launcher, connect
17 from jupyter_client import launcher, connect
18 18 from IPython import kernel
19 19
20 20 #-----------------------------------------------------------------------------
21 21 # Classes and functions
22 22 #-----------------------------------------------------------------------------
23 23
24 24 def test_kms():
25 25 for base in ("", "Multi"):
26 26 KM = base + "KernelManager"
27 27 nt.assert_in(KM, dir(kernel))
28 28
29 29 def test_kcs():
30 30 for base in ("", "Blocking"):
31 31 KM = base + "KernelClient"
32 32 nt.assert_in(KM, dir(kernel))
33 33
34 34 def test_launcher():
35 35 for name in launcher.__all__:
36 36 nt.assert_in(name, dir(kernel))
37 37
38 38 def test_connect():
39 39 for name in connect.__all__:
40 40 nt.assert_in(name, dir(kernel))
@@ -1,318 +1,318 b''
1 1 """test building messages with Session"""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 import hmac
7 7 import os
8 8 import uuid
9 9 from datetime import datetime
10 10
11 11 import zmq
12 12
13 13 from zmq.tests import BaseZMQTestCase
14 14 from zmq.eventloop.zmqstream import ZMQStream
15 15
16 from IPython.kernel.zmq import session as ss
16 from jupyter_client import session as ss
17 17
18 18 from IPython.testing.decorators import skipif, module_not_available
19 19 from IPython.utils.py3compat import string_types
20 20 from IPython.utils import jsonutil
21 21
22 22 def _bad_packer(obj):
23 23 raise TypeError("I don't work")
24 24
25 25 def _bad_unpacker(bytes):
26 26 raise TypeError("I don't work either")
27 27
28 28 class SessionTestCase(BaseZMQTestCase):
29 29
30 30 def setUp(self):
31 31 BaseZMQTestCase.setUp(self)
32 32 self.session = ss.Session()
33 33
34 34
35 35 class TestSession(SessionTestCase):
36 36
37 37 def test_msg(self):
38 38 """message format"""
39 39 msg = self.session.msg('execute')
40 40 thekeys = set('header parent_header metadata content msg_type msg_id'.split())
41 41 s = set(msg.keys())
42 42 self.assertEqual(s, thekeys)
43 43 self.assertTrue(isinstance(msg['content'],dict))
44 44 self.assertTrue(isinstance(msg['metadata'],dict))
45 45 self.assertTrue(isinstance(msg['header'],dict))
46 46 self.assertTrue(isinstance(msg['parent_header'],dict))
47 47 self.assertTrue(isinstance(msg['msg_id'],str))
48 48 self.assertTrue(isinstance(msg['msg_type'],str))
49 49 self.assertEqual(msg['header']['msg_type'], 'execute')
50 50 self.assertEqual(msg['msg_type'], 'execute')
51 51
52 52 def test_serialize(self):
53 53 msg = self.session.msg('execute', content=dict(a=10, b=1.1))
54 54 msg_list = self.session.serialize(msg, ident=b'foo')
55 55 ident, msg_list = self.session.feed_identities(msg_list)
56 56 new_msg = self.session.deserialize(msg_list)
57 57 self.assertEqual(ident[0], b'foo')
58 58 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
59 59 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
60 60 self.assertEqual(new_msg['header'],msg['header'])
61 61 self.assertEqual(new_msg['content'],msg['content'])
62 62 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
63 63 self.assertEqual(new_msg['metadata'],msg['metadata'])
64 64 # ensure floats don't come out as Decimal:
65 65 self.assertEqual(type(new_msg['content']['b']),type(new_msg['content']['b']))
66 66
67 67 def test_default_secure(self):
68 68 self.assertIsInstance(self.session.key, bytes)
69 69 self.assertIsInstance(self.session.auth, hmac.HMAC)
70 70
71 71 def test_send(self):
72 72 ctx = zmq.Context.instance()
73 73 A = ctx.socket(zmq.PAIR)
74 74 B = ctx.socket(zmq.PAIR)
75 75 A.bind("inproc://test")
76 76 B.connect("inproc://test")
77 77
78 78 msg = self.session.msg('execute', content=dict(a=10))
79 79 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
80 80
81 81 ident, msg_list = self.session.feed_identities(B.recv_multipart())
82 82 new_msg = self.session.deserialize(msg_list)
83 83 self.assertEqual(ident[0], b'foo')
84 84 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
85 85 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
86 86 self.assertEqual(new_msg['header'],msg['header'])
87 87 self.assertEqual(new_msg['content'],msg['content'])
88 88 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
89 89 self.assertEqual(new_msg['metadata'],msg['metadata'])
90 90 self.assertEqual(new_msg['buffers'],[b'bar'])
91 91
92 92 content = msg['content']
93 93 header = msg['header']
94 94 header['msg_id'] = self.session.msg_id
95 95 parent = msg['parent_header']
96 96 metadata = msg['metadata']
97 97 msg_type = header['msg_type']
98 98 self.session.send(A, None, content=content, parent=parent,
99 99 header=header, metadata=metadata, ident=b'foo', buffers=[b'bar'])
100 100 ident, msg_list = self.session.feed_identities(B.recv_multipart())
101 101 new_msg = self.session.deserialize(msg_list)
102 102 self.assertEqual(ident[0], b'foo')
103 103 self.assertEqual(new_msg['msg_id'],header['msg_id'])
104 104 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
105 105 self.assertEqual(new_msg['header'],msg['header'])
106 106 self.assertEqual(new_msg['content'],msg['content'])
107 107 self.assertEqual(new_msg['metadata'],msg['metadata'])
108 108 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
109 109 self.assertEqual(new_msg['buffers'],[b'bar'])
110 110
111 111 header['msg_id'] = self.session.msg_id
112 112
113 113 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
114 114 ident, new_msg = self.session.recv(B)
115 115 self.assertEqual(ident[0], b'foo')
116 116 self.assertEqual(new_msg['msg_id'],header['msg_id'])
117 117 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
118 118 self.assertEqual(new_msg['header'],msg['header'])
119 119 self.assertEqual(new_msg['content'],msg['content'])
120 120 self.assertEqual(new_msg['metadata'],msg['metadata'])
121 121 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
122 122 self.assertEqual(new_msg['buffers'],[b'bar'])
123 123
124 124 A.close()
125 125 B.close()
126 126 ctx.term()
127 127
128 128 def test_args(self):
129 129 """initialization arguments for Session"""
130 130 s = self.session
131 131 self.assertTrue(s.pack is ss.default_packer)
132 132 self.assertTrue(s.unpack is ss.default_unpacker)
133 133 self.assertEqual(s.username, os.environ.get('USER', u'username'))
134 134
135 135 s = ss.Session()
136 136 self.assertEqual(s.username, os.environ.get('USER', u'username'))
137 137
138 138 self.assertRaises(TypeError, ss.Session, pack='hi')
139 139 self.assertRaises(TypeError, ss.Session, unpack='hi')
140 140 u = str(uuid.uuid4())
141 141 s = ss.Session(username=u'carrot', session=u)
142 142 self.assertEqual(s.session, u)
143 143 self.assertEqual(s.username, u'carrot')
144 144
145 145 def test_tracking(self):
146 146 """test tracking messages"""
147 147 a,b = self.create_bound_pair(zmq.PAIR, zmq.PAIR)
148 148 s = self.session
149 149 s.copy_threshold = 1
150 150 stream = ZMQStream(a)
151 151 msg = s.send(a, 'hello', track=False)
152 152 self.assertTrue(msg['tracker'] is ss.DONE)
153 153 msg = s.send(a, 'hello', track=True)
154 154 self.assertTrue(isinstance(msg['tracker'], zmq.MessageTracker))
155 155 M = zmq.Message(b'hi there', track=True)
156 156 msg = s.send(a, 'hello', buffers=[M], track=True)
157 157 t = msg['tracker']
158 158 self.assertTrue(isinstance(t, zmq.MessageTracker))
159 159 self.assertRaises(zmq.NotDone, t.wait, .1)
160 160 del M
161 161 t.wait(1) # this will raise
162 162
163 163
164 164 def test_unique_msg_ids(self):
165 165 """test that messages receive unique ids"""
166 166 ids = set()
167 167 for i in range(2**12):
168 168 h = self.session.msg_header('test')
169 169 msg_id = h['msg_id']
170 170 self.assertTrue(msg_id not in ids)
171 171 ids.add(msg_id)
172 172
173 173 def test_feed_identities(self):
174 174 """scrub the front for zmq IDENTITIES"""
175 175 theids = "engine client other".split()
176 176 content = dict(code='whoda',stuff=object())
177 177 themsg = self.session.msg('execute',content=content)
178 178 pmsg = theids
179 179
180 180 def test_session_id(self):
181 181 session = ss.Session()
182 182 # get bs before us
183 183 bs = session.bsession
184 184 us = session.session
185 185 self.assertEqual(us.encode('ascii'), bs)
186 186 session = ss.Session()
187 187 # get us before bs
188 188 us = session.session
189 189 bs = session.bsession
190 190 self.assertEqual(us.encode('ascii'), bs)
191 191 # change propagates:
192 192 session.session = 'something else'
193 193 bs = session.bsession
194 194 us = session.session
195 195 self.assertEqual(us.encode('ascii'), bs)
196 196 session = ss.Session(session='stuff')
197 197 # get us before bs
198 198 self.assertEqual(session.bsession, session.session.encode('ascii'))
199 199 self.assertEqual(b'stuff', session.bsession)
200 200
201 201 def test_zero_digest_history(self):
202 202 session = ss.Session(digest_history_size=0)
203 203 for i in range(11):
204 204 session._add_digest(uuid.uuid4().bytes)
205 205 self.assertEqual(len(session.digest_history), 0)
206 206
207 207 def test_cull_digest_history(self):
208 208 session = ss.Session(digest_history_size=100)
209 209 for i in range(100):
210 210 session._add_digest(uuid.uuid4().bytes)
211 211 self.assertTrue(len(session.digest_history) == 100)
212 212 session._add_digest(uuid.uuid4().bytes)
213 213 self.assertTrue(len(session.digest_history) == 91)
214 214 for i in range(9):
215 215 session._add_digest(uuid.uuid4().bytes)
216 216 self.assertTrue(len(session.digest_history) == 100)
217 217 session._add_digest(uuid.uuid4().bytes)
218 218 self.assertTrue(len(session.digest_history) == 91)
219 219
220 220 def test_bad_pack(self):
221 221 try:
222 222 session = ss.Session(pack=_bad_packer)
223 223 except ValueError as e:
224 224 self.assertIn("could not serialize", str(e))
225 225 self.assertIn("don't work", str(e))
226 226 else:
227 227 self.fail("Should have raised ValueError")
228 228
229 229 def test_bad_unpack(self):
230 230 try:
231 231 session = ss.Session(unpack=_bad_unpacker)
232 232 except ValueError as e:
233 233 self.assertIn("could not handle output", str(e))
234 234 self.assertIn("don't work either", str(e))
235 235 else:
236 236 self.fail("Should have raised ValueError")
237 237
238 238 def test_bad_packer(self):
239 239 try:
240 240 session = ss.Session(packer=__name__ + '._bad_packer')
241 241 except ValueError as e:
242 242 self.assertIn("could not serialize", str(e))
243 243 self.assertIn("don't work", str(e))
244 244 else:
245 245 self.fail("Should have raised ValueError")
246 246
247 247 def test_bad_unpacker(self):
248 248 try:
249 249 session = ss.Session(unpacker=__name__ + '._bad_unpacker')
250 250 except ValueError as e:
251 251 self.assertIn("could not handle output", str(e))
252 252 self.assertIn("don't work either", str(e))
253 253 else:
254 254 self.fail("Should have raised ValueError")
255 255
256 256 def test_bad_roundtrip(self):
257 257 with self.assertRaises(ValueError):
258 258 session = ss.Session(unpack=lambda b: 5)
259 259
260 260 def _datetime_test(self, session):
261 261 content = dict(t=datetime.now())
262 262 metadata = dict(t=datetime.now())
263 263 p = session.msg('msg')
264 264 msg = session.msg('msg', content=content, metadata=metadata, parent=p['header'])
265 265 smsg = session.serialize(msg)
266 266 msg2 = session.deserialize(session.feed_identities(smsg)[1])
267 267 assert isinstance(msg2['header']['date'], datetime)
268 268 self.assertEqual(msg['header'], msg2['header'])
269 269 self.assertEqual(msg['parent_header'], msg2['parent_header'])
270 270 self.assertEqual(msg['parent_header'], msg2['parent_header'])
271 271 assert isinstance(msg['content']['t'], datetime)
272 272 assert isinstance(msg['metadata']['t'], datetime)
273 273 assert isinstance(msg2['content']['t'], string_types)
274 274 assert isinstance(msg2['metadata']['t'], string_types)
275 275 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
276 276 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
277 277
278 278 def test_datetimes(self):
279 279 self._datetime_test(self.session)
280 280
281 281 def test_datetimes_pickle(self):
282 282 session = ss.Session(packer='pickle')
283 283 self._datetime_test(session)
284 284
285 285 @skipif(module_not_available('msgpack'))
286 286 def test_datetimes_msgpack(self):
287 287 import msgpack
288 288
289 289 session = ss.Session(
290 290 pack=msgpack.packb,
291 291 unpack=lambda buf: msgpack.unpackb(buf, encoding='utf8'),
292 292 )
293 293 self._datetime_test(session)
294 294
295 295 def test_send_raw(self):
296 296 ctx = zmq.Context.instance()
297 297 A = ctx.socket(zmq.PAIR)
298 298 B = ctx.socket(zmq.PAIR)
299 299 A.bind("inproc://test")
300 300 B.connect("inproc://test")
301 301
302 302 msg = self.session.msg('execute', content=dict(a=10))
303 303 msg_list = [self.session.pack(msg[part]) for part in
304 304 ['header', 'parent_header', 'metadata', 'content']]
305 305 self.session.send_raw(A, msg_list, ident=b'foo')
306 306
307 307 ident, new_msg_list = self.session.feed_identities(B.recv_multipart())
308 308 new_msg = self.session.deserialize(new_msg_list)
309 309 self.assertEqual(ident[0], b'foo')
310 310 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
311 311 self.assertEqual(new_msg['header'],msg['header'])
312 312 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
313 313 self.assertEqual(new_msg['content'],msg['content'])
314 314 self.assertEqual(new_msg['metadata'],msg['metadata'])
315 315
316 316 A.close()
317 317 B.close()
318 318 ctx.term()
@@ -1,230 +1,230 b''
1 1 """ Defines a KernelClient that provides thread-safe sockets with async callbacks on message replies.
2 2 """
3 3 from __future__ import absolute_import
4 4 import atexit
5 5 import errno
6 6 from threading import Thread
7 7 import time
8 8
9 9 import zmq
10 10 # import ZMQError in top-level namespace, to avoid ugly attribute-error messages
11 11 # during garbage collection of threads at exit:
12 12 from zmq import ZMQError
13 13 from zmq.eventloop import ioloop, zmqstream
14 14
15 15 # Local imports
16 16 from IPython.utils.traitlets import Type, Instance
17 from IPython.kernel.channels import HBChannel
18 from IPython.kernel import KernelClient
19 from IPython.kernel.channels import HBChannel
17 from jupyter_client.channels import HBChannel
18 from jupyter_client import KernelClient
19 from jupyter_client.channels import HBChannel
20 20
21 21 class ThreadedZMQSocketChannel(object):
22 22 """A ZMQ socket invoking a callback in the ioloop"""
23 23 session = None
24 24 socket = None
25 25 ioloop = None
26 26 stream = None
27 27 _inspect = None
28 28
29 29 def __init__(self, socket, session, loop):
30 30 """Create a channel.
31 31
32 32 Parameters
33 33 ----------
34 34 socket : :class:`zmq.Socket`
35 35 The ZMQ socket to use.
36 36 session : :class:`session.Session`
37 37 The session to use.
38 38 loop
39 39 A pyzmq ioloop to connect the socket to using a ZMQStream
40 40 """
41 41 super(ThreadedZMQSocketChannel, self).__init__()
42 42
43 43 self.socket = socket
44 44 self.session = session
45 45 self.ioloop = loop
46 46
47 47 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
48 48 self.stream.on_recv(self._handle_recv)
49 49
50 50 _is_alive = False
51 51 def is_alive(self):
52 52 return self._is_alive
53 53
54 54 def start(self):
55 55 self._is_alive = True
56 56
57 57 def stop(self):
58 58 self._is_alive = False
59 59
60 60 def close(self):
61 61 if self.socket is not None:
62 62 try:
63 63 self.socket.close(linger=0)
64 64 except Exception:
65 65 pass
66 66 self.socket = None
67 67
68 68 def send(self, msg):
69 69 """Queue a message to be sent from the IOLoop's thread.
70 70
71 71 Parameters
72 72 ----------
73 73 msg : message to send
74 74
75 75 This is threadsafe, as it uses IOLoop.add_callback to give the loop's
76 76 thread control of the action.
77 77 """
78 78 def thread_send():
79 79 self.session.send(self.stream, msg)
80 80 self.ioloop.add_callback(thread_send)
81 81
82 82 def _handle_recv(self, msg):
83 83 """Callback for stream.on_recv.
84 84
85 85 Unpacks message, and calls handlers with it.
86 86 """
87 87 ident,smsg = self.session.feed_identities(msg)
88 88 msg = self.session.deserialize(smsg)
89 89 # let client inspect messages
90 90 if self._inspect:
91 91 self._inspect(msg)
92 92 self.call_handlers(msg)
93 93
94 94 def call_handlers(self, msg):
95 95 """This method is called in the ioloop thread when a message arrives.
96 96
97 97 Subclasses should override this method to handle incoming messages.
98 98 It is important to remember that this method is called in the thread
99 99 so that some logic must be done to ensure that the application level
100 100 handlers are called in the application thread.
101 101 """
102 102 pass
103 103
104 104 def process_events(self):
105 105 """Subclasses should override this with a method
106 106 processing any pending GUI events.
107 107 """
108 108 pass
109 109
110 110
111 111 def flush(self, timeout=1.0):
112 112 """Immediately processes all pending messages on this channel.
113 113
114 114 This is only used for the IOPub channel.
115 115
116 116 Callers should use this method to ensure that :meth:`call_handlers`
117 117 has been called for all messages that have been received on the
118 118 0MQ SUB socket of this channel.
119 119
120 120 This method is thread safe.
121 121
122 122 Parameters
123 123 ----------
124 124 timeout : float, optional
125 125 The maximum amount of time to spend flushing, in seconds. The
126 126 default is one second.
127 127 """
128 128 # We do the IOLoop callback process twice to ensure that the IOLoop
129 129 # gets to perform at least one full poll.
130 130 stop_time = time.time() + timeout
131 131 for i in range(2):
132 132 self._flushed = False
133 133 self.ioloop.add_callback(self._flush)
134 134 while not self._flushed and time.time() < stop_time:
135 135 time.sleep(0.01)
136 136
137 137 def _flush(self):
138 138 """Callback for :method:`self.flush`."""
139 139 self.stream.flush()
140 140 self._flushed = True
141 141
142 142
143 143 class IOLoopThread(Thread):
144 144 """Run a pyzmq ioloop in a thread to send and receive messages
145 145 """
146 146 def __init__(self, loop):
147 147 super(IOLoopThread, self).__init__()
148 148 self.daemon = True
149 149 atexit.register(self._notice_exit)
150 150 self.ioloop = loop or ioloop.IOLoop()
151 151
152 152 def _notice_exit(self):
153 153 self._exiting = True
154 154
155 155 def run(self):
156 156 """Run my loop, ignoring EINTR events in the poller"""
157 157 while True:
158 158 try:
159 159 self.ioloop.start()
160 160 except ZMQError as e:
161 161 if e.errno == errno.EINTR:
162 162 continue
163 163 else:
164 164 raise
165 165 except Exception:
166 166 if self._exiting:
167 167 break
168 168 else:
169 169 raise
170 170 else:
171 171 break
172 172
173 173 def stop(self):
174 174 """Stop the channel's event loop and join its thread.
175 175
176 176 This calls :meth:`~threading.Thread.join` and returns when the thread
177 177 terminates. :class:`RuntimeError` will be raised if
178 178 :meth:`~threading.Thread.start` is called again.
179 179 """
180 180 if self.ioloop is not None:
181 181 self.ioloop.stop()
182 182 self.join()
183 183 self.close()
184 184
185 185 def close(self):
186 186 if self.ioloop is not None:
187 187 try:
188 188 self.ioloop.close(all_fds=True)
189 189 except Exception:
190 190 pass
191 191
192 192
193 193 class ThreadedKernelClient(KernelClient):
194 194 """ A KernelClient that provides thread-safe sockets with async callbacks on message replies.
195 195 """
196 196
197 197 _ioloop = None
198 198 @property
199 199 def ioloop(self):
200 200 if self._ioloop is None:
201 201 self._ioloop = ioloop.IOLoop()
202 202 return self._ioloop
203 203
204 204 ioloop_thread = Instance(IOLoopThread)
205 205
206 206 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
207 207 if shell:
208 208 self.shell_channel._inspect = self._check_kernel_info_reply
209 209
210 210 self.ioloop_thread = IOLoopThread(self.ioloop)
211 211 self.ioloop_thread.start()
212 212
213 213 super(ThreadedKernelClient, self).start_channels(shell, iopub, stdin, hb)
214 214
215 215 def _check_kernel_info_reply(self, msg):
216 216 """This is run in the ioloop thread when the kernel info reply is recieved
217 217 """
218 218 if msg['msg_type'] == 'kernel_info_reply':
219 219 self._handle_kernel_info_reply(msg)
220 220 self.shell_channel._inspect = None
221 221
222 222 def stop_channels(self):
223 223 super(ThreadedKernelClient, self).stop_channels()
224 224 if self.ioloop_thread.is_alive():
225 225 self.ioloop_thread.stop()
226 226
227 227 iopub_channel_class = Type(ThreadedZMQSocketChannel)
228 228 shell_channel_class = Type(ThreadedZMQSocketChannel)
229 229 stdin_channel_class = Type(ThreadedZMQSocketChannel)
230 230 hb_channel_class = Type(HBChannel)
General Comments 0
You need to be logged in to leave comments. Login now