##// END OF EJS Templates
Merge pull request #8175 from minrk/bigsplit-kernel...
Min RK -
r20964:b5170531 merge
parent child Browse files
Show More
This diff has been collapsed as it changes many lines, (883 lines changed) Show them Hide them
@@ -0,0 +1,883 b''
1 """Session object for building, serializing, sending, and receiving messages in
2 IPython. The Session object supports serialization, HMAC signatures, and
3 metadata on messages.
4
5 Also defined here are utilities for working with Sessions:
6 * A SessionFactory to be used as a base class for configurables that work with
7 Sessions.
8 * A Message object for convenience that allows attribute-access to the msg dict.
9 """
10
11 # Copyright (c) IPython Development Team.
12 # Distributed under the terms of the Modified BSD License.
13
14 import hashlib
15 import hmac
16 import logging
17 import os
18 import pprint
19 import random
20 import uuid
21 import warnings
22 from datetime import datetime
23
24 try:
25 import cPickle
26 pickle = cPickle
27 except:
28 cPickle = None
29 import pickle
30
31 try:
32 # We are using compare_digest to limit the surface of timing attacks
33 from hmac import compare_digest
34 except ImportError:
35 # Python < 2.7.7: When digests don't match no feedback is provided,
36 # limiting the surface of attack
37 def compare_digest(a,b): return a == b
38
39 import zmq
40 from zmq.utils import jsonapi
41 from zmq.eventloop.ioloop import IOLoop
42 from zmq.eventloop.zmqstream import ZMQStream
43
44 from IPython.core.release import kernel_protocol_version
45 from IPython.config.configurable import Configurable, LoggingConfigurable
46 from IPython.utils import io
47 from IPython.utils.importstring import import_item
48 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
49 from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type,
50 iteritems)
51 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
52 DottedObjectName, CUnicode, Dict, Integer,
53 TraitError,
54 )
55 from IPython.utils.pickleutil import PICKLE_PROTOCOL
56 from jupyter_client.adapter import adapt
57
58 #-----------------------------------------------------------------------------
59 # utility functions
60 #-----------------------------------------------------------------------------
61
62 def squash_unicode(obj):
63 """coerce unicode back to bytestrings."""
64 if isinstance(obj,dict):
65 for key in obj.keys():
66 obj[key] = squash_unicode(obj[key])
67 if isinstance(key, unicode_type):
68 obj[squash_unicode(key)] = obj.pop(key)
69 elif isinstance(obj, list):
70 for i,v in enumerate(obj):
71 obj[i] = squash_unicode(v)
72 elif isinstance(obj, unicode_type):
73 obj = obj.encode('utf8')
74 return obj
75
76 #-----------------------------------------------------------------------------
77 # globals and defaults
78 #-----------------------------------------------------------------------------
79
80 # default values for the thresholds:
81 MAX_ITEMS = 64
82 MAX_BYTES = 1024
83
84 # ISO8601-ify datetime objects
85 # allow unicode
86 # disallow nan, because it's not actually valid JSON
87 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default,
88 ensure_ascii=False, allow_nan=False,
89 )
90 json_unpacker = lambda s: jsonapi.loads(s)
91
92 pickle_packer = lambda o: pickle.dumps(squash_dates(o), PICKLE_PROTOCOL)
93 pickle_unpacker = pickle.loads
94
95 default_packer = json_packer
96 default_unpacker = json_unpacker
97
98 DELIM = b"<IDS|MSG>"
99 # singleton dummy tracker, which will always report as done
100 DONE = zmq.MessageTracker()
101
102 #-----------------------------------------------------------------------------
103 # Mixin tools for apps that use Sessions
104 #-----------------------------------------------------------------------------
105
106 session_aliases = dict(
107 ident = 'Session.session',
108 user = 'Session.username',
109 keyfile = 'Session.keyfile',
110 )
111
112 session_flags = {
113 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
114 'keyfile' : '' }},
115 """Use HMAC digests for authentication of messages.
116 Setting this flag will generate a new UUID to use as the HMAC key.
117 """),
118 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
119 """Don't authenticate messages."""),
120 }
121
122 def default_secure(cfg):
123 """Set the default behavior for a config environment to be secure.
124
125 If Session.key/keyfile have not been set, set Session.key to
126 a new random UUID.
127 """
128 warnings.warn("default_secure is deprecated", DeprecationWarning)
129 if 'Session' in cfg:
130 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
131 return
132 # key/keyfile not specified, generate new UUID:
133 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
134
135
136 #-----------------------------------------------------------------------------
137 # Classes
138 #-----------------------------------------------------------------------------
139
140 class SessionFactory(LoggingConfigurable):
141 """The Base class for configurables that have a Session, Context, logger,
142 and IOLoop.
143 """
144
145 logname = Unicode('')
146 def _logname_changed(self, name, old, new):
147 self.log = logging.getLogger(new)
148
149 # not configurable:
150 context = Instance('zmq.Context')
151 def _context_default(self):
152 return zmq.Context.instance()
153
154 session = Instance('jupyter_client.session.Session')
155
156 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
157 def _loop_default(self):
158 return IOLoop.instance()
159
160 def __init__(self, **kwargs):
161 super(SessionFactory, self).__init__(**kwargs)
162
163 if self.session is None:
164 # construct the session
165 self.session = Session(**kwargs)
166
167
168 class Message(object):
169 """A simple message object that maps dict keys to attributes.
170
171 A Message can be created from a dict and a dict from a Message instance
172 simply by calling dict(msg_obj)."""
173
174 def __init__(self, msg_dict):
175 dct = self.__dict__
176 for k, v in iteritems(dict(msg_dict)):
177 if isinstance(v, dict):
178 v = Message(v)
179 dct[k] = v
180
181 # Having this iterator lets dict(msg_obj) work out of the box.
182 def __iter__(self):
183 return iter(iteritems(self.__dict__))
184
185 def __repr__(self):
186 return repr(self.__dict__)
187
188 def __str__(self):
189 return pprint.pformat(self.__dict__)
190
191 def __contains__(self, k):
192 return k in self.__dict__
193
194 def __getitem__(self, k):
195 return self.__dict__[k]
196
197
198 def msg_header(msg_id, msg_type, username, session):
199 date = datetime.now()
200 version = kernel_protocol_version
201 return locals()
202
203 def extract_header(msg_or_header):
204 """Given a message or header, return the header."""
205 if not msg_or_header:
206 return {}
207 try:
208 # See if msg_or_header is the entire message.
209 h = msg_or_header['header']
210 except KeyError:
211 try:
212 # See if msg_or_header is just the header
213 h = msg_or_header['msg_id']
214 except KeyError:
215 raise
216 else:
217 h = msg_or_header
218 if not isinstance(h, dict):
219 h = dict(h)
220 return h
221
222 class Session(Configurable):
223 """Object for handling serialization and sending of messages.
224
225 The Session object handles building messages and sending them
226 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
227 other over the network via Session objects, and only need to work with the
228 dict-based IPython message spec. The Session will handle
229 serialization/deserialization, security, and metadata.
230
231 Sessions support configurable serialization via packer/unpacker traits,
232 and signing with HMAC digests via the key/keyfile traits.
233
234 Parameters
235 ----------
236
237 debug : bool
238 whether to trigger extra debugging statements
239 packer/unpacker : str : 'json', 'pickle' or import_string
240 importstrings for methods to serialize message parts. If just
241 'json' or 'pickle', predefined JSON and pickle packers will be used.
242 Otherwise, the entire importstring must be used.
243
244 The functions must accept at least valid JSON input, and output *bytes*.
245
246 For example, to use msgpack:
247 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
248 pack/unpack : callables
249 You can also set the pack/unpack callables for serialization directly.
250 session : bytes
251 the ID of this Session object. The default is to generate a new UUID.
252 username : unicode
253 username added to message headers. The default is to ask the OS.
254 key : bytes
255 The key used to initialize an HMAC signature. If unset, messages
256 will not be signed or checked.
257 keyfile : filepath
258 The file containing a key. If this is set, `key` will be initialized
259 to the contents of the file.
260
261 """
262
263 debug=Bool(False, config=True, help="""Debug output in the Session""")
264
265 packer = DottedObjectName('json',config=True,
266 help="""The name of the packer for serializing messages.
267 Should be one of 'json', 'pickle', or an import name
268 for a custom callable serializer.""")
269 def _packer_changed(self, name, old, new):
270 if new.lower() == 'json':
271 self.pack = json_packer
272 self.unpack = json_unpacker
273 self.unpacker = new
274 elif new.lower() == 'pickle':
275 self.pack = pickle_packer
276 self.unpack = pickle_unpacker
277 self.unpacker = new
278 else:
279 self.pack = import_item(str(new))
280
281 unpacker = DottedObjectName('json', config=True,
282 help="""The name of the unpacker for unserializing messages.
283 Only used with custom functions for `packer`.""")
284 def _unpacker_changed(self, name, old, new):
285 if new.lower() == 'json':
286 self.pack = json_packer
287 self.unpack = json_unpacker
288 self.packer = new
289 elif new.lower() == 'pickle':
290 self.pack = pickle_packer
291 self.unpack = pickle_unpacker
292 self.packer = new
293 else:
294 self.unpack = import_item(str(new))
295
296 session = CUnicode(u'', config=True,
297 help="""The UUID identifying this session.""")
298 def _session_default(self):
299 u = unicode_type(uuid.uuid4())
300 self.bsession = u.encode('ascii')
301 return u
302
303 def _session_changed(self, name, old, new):
304 self.bsession = self.session.encode('ascii')
305
306 # bsession is the session as bytes
307 bsession = CBytes(b'')
308
309 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
310 help="""Username for the Session. Default is your system username.""",
311 config=True)
312
313 metadata = Dict({}, config=True,
314 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
315
316 # if 0, no adapting to do.
317 adapt_version = Integer(0)
318
319 # message signature related traits:
320
321 key = CBytes(config=True,
322 help="""execution key, for signing messages.""")
323 def _key_default(self):
324 return str_to_bytes(str(uuid.uuid4()))
325
326 def _key_changed(self):
327 self._new_auth()
328
329 signature_scheme = Unicode('hmac-sha256', config=True,
330 help="""The digest scheme used to construct the message signatures.
331 Must have the form 'hmac-HASH'.""")
332 def _signature_scheme_changed(self, name, old, new):
333 if not new.startswith('hmac-'):
334 raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)
335 hash_name = new.split('-', 1)[1]
336 try:
337 self.digest_mod = getattr(hashlib, hash_name)
338 except AttributeError:
339 raise TraitError("hashlib has no such attribute: %s" % hash_name)
340 self._new_auth()
341
342 digest_mod = Any()
343 def _digest_mod_default(self):
344 return hashlib.sha256
345
346 auth = Instance(hmac.HMAC)
347
348 def _new_auth(self):
349 if self.key:
350 self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod)
351 else:
352 self.auth = None
353
354 digest_history = Set()
355 digest_history_size = Integer(2**16, config=True,
356 help="""The maximum number of digests to remember.
357
358 The digest history will be culled when it exceeds this value.
359 """
360 )
361
362 keyfile = Unicode('', config=True,
363 help="""path to file containing execution key.""")
364 def _keyfile_changed(self, name, old, new):
365 with open(new, 'rb') as f:
366 self.key = f.read().strip()
367
368 # for protecting against sends from forks
369 pid = Integer()
370
371 # serialization traits:
372
373 pack = Any(default_packer) # the actual packer function
374 def _pack_changed(self, name, old, new):
375 if not callable(new):
376 raise TypeError("packer must be callable, not %s"%type(new))
377
378 unpack = Any(default_unpacker) # the actual packer function
379 def _unpack_changed(self, name, old, new):
380 # unpacker is not checked - it is assumed to be
381 if not callable(new):
382 raise TypeError("unpacker must be callable, not %s"%type(new))
383
384 # thresholds:
385 copy_threshold = Integer(2**16, config=True,
386 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
387 buffer_threshold = Integer(MAX_BYTES, config=True,
388 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
389 item_threshold = Integer(MAX_ITEMS, config=True,
390 help="""The maximum number of items for a container to be introspected for custom serialization.
391 Containers larger than this are pickled outright.
392 """
393 )
394
395
396 def __init__(self, **kwargs):
397 """create a Session object
398
399 Parameters
400 ----------
401
402 debug : bool
403 whether to trigger extra debugging statements
404 packer/unpacker : str : 'json', 'pickle' or import_string
405 importstrings for methods to serialize message parts. If just
406 'json' or 'pickle', predefined JSON and pickle packers will be used.
407 Otherwise, the entire importstring must be used.
408
409 The functions must accept at least valid JSON input, and output
410 *bytes*.
411
412 For example, to use msgpack:
413 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
414 pack/unpack : callables
415 You can also set the pack/unpack callables for serialization
416 directly.
417 session : unicode (must be ascii)
418 the ID of this Session object. The default is to generate a new
419 UUID.
420 bsession : bytes
421 The session as bytes
422 username : unicode
423 username added to message headers. The default is to ask the OS.
424 key : bytes
425 The key used to initialize an HMAC signature. If unset, messages
426 will not be signed or checked.
427 signature_scheme : str
428 The message digest scheme. Currently must be of the form 'hmac-HASH',
429 where 'HASH' is a hashing function available in Python's hashlib.
430 The default is 'hmac-sha256'.
431 This is ignored if 'key' is empty.
432 keyfile : filepath
433 The file containing a key. If this is set, `key` will be
434 initialized to the contents of the file.
435 """
436 super(Session, self).__init__(**kwargs)
437 self._check_packers()
438 self.none = self.pack({})
439 # ensure self._session_default() if necessary, so bsession is defined:
440 self.session
441 self.pid = os.getpid()
442 self._new_auth()
443
444 @property
445 def msg_id(self):
446 """always return new uuid"""
447 return str(uuid.uuid4())
448
449 def _check_packers(self):
450 """check packers for datetime support."""
451 pack = self.pack
452 unpack = self.unpack
453
454 # check simple serialization
455 msg = dict(a=[1,'hi'])
456 try:
457 packed = pack(msg)
458 except Exception as e:
459 msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}"
460 if self.packer == 'json':
461 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
462 else:
463 jsonmsg = ""
464 raise ValueError(
465 msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg)
466 )
467
468 # ensure packed message is bytes
469 if not isinstance(packed, bytes):
470 raise ValueError("message packed to %r, but bytes are required"%type(packed))
471
472 # check that unpack is pack's inverse
473 try:
474 unpacked = unpack(packed)
475 assert unpacked == msg
476 except Exception as e:
477 msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}"
478 if self.packer == 'json':
479 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
480 else:
481 jsonmsg = ""
482 raise ValueError(
483 msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg)
484 )
485
486 # check datetime support
487 msg = dict(t=datetime.now())
488 try:
489 unpacked = unpack(pack(msg))
490 if isinstance(unpacked['t'], datetime):
491 raise ValueError("Shouldn't deserialize to datetime")
492 except Exception:
493 self.pack = lambda o: pack(squash_dates(o))
494 self.unpack = lambda s: unpack(s)
495
496 def msg_header(self, msg_type):
497 return msg_header(self.msg_id, msg_type, self.username, self.session)
498
499 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
500 """Return the nested message dict.
501
502 This format is different from what is sent over the wire. The
503 serialize/deserialize methods converts this nested message dict to the wire
504 format, which is a list of message parts.
505 """
506 msg = {}
507 header = self.msg_header(msg_type) if header is None else header
508 msg['header'] = header
509 msg['msg_id'] = header['msg_id']
510 msg['msg_type'] = header['msg_type']
511 msg['parent_header'] = {} if parent is None else extract_header(parent)
512 msg['content'] = {} if content is None else content
513 msg['metadata'] = self.metadata.copy()
514 if metadata is not None:
515 msg['metadata'].update(metadata)
516 return msg
517
518 def sign(self, msg_list):
519 """Sign a message with HMAC digest. If no auth, return b''.
520
521 Parameters
522 ----------
523 msg_list : list
524 The [p_header,p_parent,p_content] part of the message list.
525 """
526 if self.auth is None:
527 return b''
528 h = self.auth.copy()
529 for m in msg_list:
530 h.update(m)
531 return str_to_bytes(h.hexdigest())
532
533 def serialize(self, msg, ident=None):
534 """Serialize the message components to bytes.
535
536 This is roughly the inverse of deserialize. The serialize/deserialize
537 methods work with full message lists, whereas pack/unpack work with
538 the individual message parts in the message list.
539
540 Parameters
541 ----------
542 msg : dict or Message
543 The next message dict as returned by the self.msg method.
544
545 Returns
546 -------
547 msg_list : list
548 The list of bytes objects to be sent with the format::
549
550 [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent,
551 p_metadata, p_content, buffer1, buffer2, ...]
552
553 In this list, the ``p_*`` entities are the packed or serialized
554 versions, so if JSON is used, these are utf8 encoded JSON strings.
555 """
556 content = msg.get('content', {})
557 if content is None:
558 content = self.none
559 elif isinstance(content, dict):
560 content = self.pack(content)
561 elif isinstance(content, bytes):
562 # content is already packed, as in a relayed message
563 pass
564 elif isinstance(content, unicode_type):
565 # should be bytes, but JSON often spits out unicode
566 content = content.encode('utf8')
567 else:
568 raise TypeError("Content incorrect type: %s"%type(content))
569
570 real_message = [self.pack(msg['header']),
571 self.pack(msg['parent_header']),
572 self.pack(msg['metadata']),
573 content,
574 ]
575
576 to_send = []
577
578 if isinstance(ident, list):
579 # accept list of idents
580 to_send.extend(ident)
581 elif ident is not None:
582 to_send.append(ident)
583 to_send.append(DELIM)
584
585 signature = self.sign(real_message)
586 to_send.append(signature)
587
588 to_send.extend(real_message)
589
590 return to_send
591
592 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
593 buffers=None, track=False, header=None, metadata=None):
594 """Build and send a message via stream or socket.
595
596 The message format used by this function internally is as follows:
597
598 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
599 buffer1,buffer2,...]
600
601 The serialize/deserialize methods convert the nested message dict into this
602 format.
603
604 Parameters
605 ----------
606
607 stream : zmq.Socket or ZMQStream
608 The socket-like object used to send the data.
609 msg_or_type : str or Message/dict
610 Normally, msg_or_type will be a msg_type unless a message is being
611 sent more than once. If a header is supplied, this can be set to
612 None and the msg_type will be pulled from the header.
613
614 content : dict or None
615 The content of the message (ignored if msg_or_type is a message).
616 header : dict or None
617 The header dict for the message (ignored if msg_to_type is a message).
618 parent : Message or dict or None
619 The parent or parent header describing the parent of this message
620 (ignored if msg_or_type is a message).
621 ident : bytes or list of bytes
622 The zmq.IDENTITY routing path.
623 metadata : dict or None
624 The metadata describing the message
625 buffers : list or None
626 The already-serialized buffers to be appended to the message.
627 track : bool
628 Whether to track. Only for use with Sockets, because ZMQStream
629 objects cannot track messages.
630
631
632 Returns
633 -------
634 msg : dict
635 The constructed message.
636 """
637 if not isinstance(stream, zmq.Socket):
638 # ZMQStreams and dummy sockets do not support tracking.
639 track = False
640
641 if isinstance(msg_or_type, (Message, dict)):
642 # We got a Message or message dict, not a msg_type so don't
643 # build a new Message.
644 msg = msg_or_type
645 buffers = buffers or msg.get('buffers', [])
646 else:
647 msg = self.msg(msg_or_type, content=content, parent=parent,
648 header=header, metadata=metadata)
649 if not os.getpid() == self.pid:
650 io.rprint("WARNING: attempted to send message from fork")
651 io.rprint(msg)
652 return
653 buffers = [] if buffers is None else buffers
654 if self.adapt_version:
655 msg = adapt(msg, self.adapt_version)
656 to_send = self.serialize(msg, ident)
657 to_send.extend(buffers)
658 longest = max([ len(s) for s in to_send ])
659 copy = (longest < self.copy_threshold)
660
661 if buffers and track and not copy:
662 # only really track when we are doing zero-copy buffers
663 tracker = stream.send_multipart(to_send, copy=False, track=True)
664 else:
665 # use dummy tracker, which will be done immediately
666 tracker = DONE
667 stream.send_multipart(to_send, copy=copy)
668
669 if self.debug:
670 pprint.pprint(msg)
671 pprint.pprint(to_send)
672 pprint.pprint(buffers)
673
674 msg['tracker'] = tracker
675
676 return msg
677
678 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
679 """Send a raw message via ident path.
680
681 This method is used to send a already serialized message.
682
683 Parameters
684 ----------
685 stream : ZMQStream or Socket
686 The ZMQ stream or socket to use for sending the message.
687 msg_list : list
688 The serialized list of messages to send. This only includes the
689 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
690 the message.
691 ident : ident or list
692 A single ident or a list of idents to use in sending.
693 """
694 to_send = []
695 if isinstance(ident, bytes):
696 ident = [ident]
697 if ident is not None:
698 to_send.extend(ident)
699
700 to_send.append(DELIM)
701 to_send.append(self.sign(msg_list))
702 to_send.extend(msg_list)
703 stream.send_multipart(to_send, flags, copy=copy)
704
705 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
706 """Receive and unpack a message.
707
708 Parameters
709 ----------
710 socket : ZMQStream or Socket
711 The socket or stream to use in receiving.
712
713 Returns
714 -------
715 [idents], msg
716 [idents] is a list of idents and msg is a nested message dict of
717 same format as self.msg returns.
718 """
719 if isinstance(socket, ZMQStream):
720 socket = socket.socket
721 try:
722 msg_list = socket.recv_multipart(mode, copy=copy)
723 except zmq.ZMQError as e:
724 if e.errno == zmq.EAGAIN:
725 # We can convert EAGAIN to None as we know in this case
726 # recv_multipart won't return None.
727 return None,None
728 else:
729 raise
730 # split multipart message into identity list and message dict
731 # invalid large messages can cause very expensive string comparisons
732 idents, msg_list = self.feed_identities(msg_list, copy)
733 try:
734 return idents, self.deserialize(msg_list, content=content, copy=copy)
735 except Exception as e:
736 # TODO: handle it
737 raise e
738
739 def feed_identities(self, msg_list, copy=True):
740 """Split the identities from the rest of the message.
741
742 Feed until DELIM is reached, then return the prefix as idents and
743 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
744 but that would be silly.
745
746 Parameters
747 ----------
748 msg_list : a list of Message or bytes objects
749 The message to be split.
750 copy : bool
751 flag determining whether the arguments are bytes or Messages
752
753 Returns
754 -------
755 (idents, msg_list) : two lists
756 idents will always be a list of bytes, each of which is a ZMQ
757 identity. msg_list will be a list of bytes or zmq.Messages of the
758 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
759 should be unpackable/unserializable via self.deserialize at this
760 point.
761 """
762 if copy:
763 idx = msg_list.index(DELIM)
764 return msg_list[:idx], msg_list[idx+1:]
765 else:
766 failed = True
767 for idx,m in enumerate(msg_list):
768 if m.bytes == DELIM:
769 failed = False
770 break
771 if failed:
772 raise ValueError("DELIM not in msg_list")
773 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
774 return [m.bytes for m in idents], msg_list
775
776 def _add_digest(self, signature):
777 """add a digest to history to protect against replay attacks"""
778 if self.digest_history_size == 0:
779 # no history, never add digests
780 return
781
782 self.digest_history.add(signature)
783 if len(self.digest_history) > self.digest_history_size:
784 # threshold reached, cull 10%
785 self._cull_digest_history()
786
787 def _cull_digest_history(self):
788 """cull the digest history
789
790 Removes a randomly selected 10% of the digest history
791 """
792 current = len(self.digest_history)
793 n_to_cull = max(int(current // 10), current - self.digest_history_size)
794 if n_to_cull >= current:
795 self.digest_history = set()
796 return
797 to_cull = random.sample(self.digest_history, n_to_cull)
798 self.digest_history.difference_update(to_cull)
799
800 def deserialize(self, msg_list, content=True, copy=True):
801 """Unserialize a msg_list to a nested message dict.
802
803 This is roughly the inverse of serialize. The serialize/deserialize
804 methods work with full message lists, whereas pack/unpack work with
805 the individual message parts in the message list.
806
807 Parameters
808 ----------
809 msg_list : list of bytes or Message objects
810 The list of message parts of the form [HMAC,p_header,p_parent,
811 p_metadata,p_content,buffer1,buffer2,...].
812 content : bool (True)
813 Whether to unpack the content dict (True), or leave it packed
814 (False).
815 copy : bool (True)
816 Whether msg_list contains bytes (True) or the non-copying Message
817 objects in each place (False).
818
819 Returns
820 -------
821 msg : dict
822 The nested message dict with top-level keys [header, parent_header,
823 content, buffers]. The buffers are returned as memoryviews.
824 """
825 minlen = 5
826 message = {}
827 if not copy:
828 # pyzmq didn't copy the first parts of the message, so we'll do it
829 for i in range(minlen):
830 msg_list[i] = msg_list[i].bytes
831 if self.auth is not None:
832 signature = msg_list[0]
833 if not signature:
834 raise ValueError("Unsigned Message")
835 if signature in self.digest_history:
836 raise ValueError("Duplicate Signature: %r" % signature)
837 self._add_digest(signature)
838 check = self.sign(msg_list[1:5])
839 if not compare_digest(signature, check):
840 raise ValueError("Invalid Signature: %r" % signature)
841 if not len(msg_list) >= minlen:
842 raise TypeError("malformed message, must have at least %i elements"%minlen)
843 header = self.unpack(msg_list[1])
844 message['header'] = extract_dates(header)
845 message['msg_id'] = header['msg_id']
846 message['msg_type'] = header['msg_type']
847 message['parent_header'] = extract_dates(self.unpack(msg_list[2]))
848 message['metadata'] = self.unpack(msg_list[3])
849 if content:
850 message['content'] = self.unpack(msg_list[4])
851 else:
852 message['content'] = msg_list[4]
853 buffers = [memoryview(b) for b in msg_list[5:]]
854 if buffers and buffers[0].shape is None:
855 # force copy to workaround pyzmq #646
856 buffers = [memoryview(b.bytes) for b in msg_list[5:]]
857 message['buffers'] = buffers
858 # adapt to the current version
859 return adapt(message)
860
861 def unserialize(self, *args, **kwargs):
862 warnings.warn(
863 "Session.unserialize is deprecated. Use Session.deserialize.",
864 DeprecationWarning,
865 )
866 return self.deserialize(*args, **kwargs)
867
868
869 def test_msg2obj():
870 am = dict(x=1)
871 ao = Message(am)
872 assert ao.x == am['x']
873
874 am['y'] = dict(z=1)
875 ao = Message(am)
876 assert ao.y.z == am['y']['z']
877
878 k1, k2 = 'y', 'z'
879 assert ao[k1][k2] == am[k1][k2]
880
881 am2 = dict(ao)
882 assert am['x'] == am2['x']
883 assert am['y']['z'] == am2['y']['z']
@@ -0,0 +1,1 b''
1 from .connect import * No newline at end of file
@@ -0,0 +1,3 b''
1 if __name__ == '__main__':
2 from ipython_kernel import kernelapp as app
3 app.launch_new_instance()
This diff has been collapsed as it changes many lines, (576 lines changed) Show them Hide them
@@ -0,0 +1,576 b''
1 """Utilities for connecting to kernels
2
3 The :class:`ConnectionFileMixin` class in this module encapsulates the logic
4 related to writing and reading connections files.
5 """
6 # Copyright (c) IPython Development Team.
7 # Distributed under the terms of the Modified BSD License.
8
9 #-----------------------------------------------------------------------------
10 # Imports
11 #-----------------------------------------------------------------------------
12
13 from __future__ import absolute_import
14
15 import glob
16 import json
17 import os
18 import socket
19 import sys
20 from getpass import getpass
21 from subprocess import Popen, PIPE
22 import tempfile
23
24 import zmq
25
26 # IPython imports
27 from IPython.config import LoggingConfigurable
28 from IPython.core.profiledir import ProfileDir
29 from IPython.utils.localinterfaces import localhost
30 from IPython.utils.path import filefind, get_ipython_dir
31 from IPython.utils.py3compat import (str_to_bytes, bytes_to_str, cast_bytes_py2,
32 string_types)
33 from IPython.utils.traitlets import (
34 Bool, Integer, Unicode, CaselessStrEnum, Instance,
35 )
36
37
38 #-----------------------------------------------------------------------------
39 # Working with Connection Files
40 #-----------------------------------------------------------------------------
41
42 def write_connection_file(fname=None, shell_port=0, iopub_port=0, stdin_port=0, hb_port=0,
43 control_port=0, ip='', key=b'', transport='tcp',
44 signature_scheme='hmac-sha256',
45 ):
46 """Generates a JSON config file, including the selection of random ports.
47
48 Parameters
49 ----------
50
51 fname : unicode
52 The path to the file to write
53
54 shell_port : int, optional
55 The port to use for ROUTER (shell) channel.
56
57 iopub_port : int, optional
58 The port to use for the SUB channel.
59
60 stdin_port : int, optional
61 The port to use for the ROUTER (raw input) channel.
62
63 control_port : int, optional
64 The port to use for the ROUTER (control) channel.
65
66 hb_port : int, optional
67 The port to use for the heartbeat REP channel.
68
69 ip : str, optional
70 The ip address the kernel will bind to.
71
72 key : str, optional
73 The Session key used for message authentication.
74
75 signature_scheme : str, optional
76 The scheme used for message authentication.
77 This has the form 'digest-hash', where 'digest'
78 is the scheme used for digests, and 'hash' is the name of the hash function
79 used by the digest scheme.
80 Currently, 'hmac' is the only supported digest scheme,
81 and 'sha256' is the default hash function.
82
83 """
84 if not ip:
85 ip = localhost()
86 # default to temporary connector file
87 if not fname:
88 fd, fname = tempfile.mkstemp('.json')
89 os.close(fd)
90
91 # Find open ports as necessary.
92
93 ports = []
94 ports_needed = int(shell_port <= 0) + \
95 int(iopub_port <= 0) + \
96 int(stdin_port <= 0) + \
97 int(control_port <= 0) + \
98 int(hb_port <= 0)
99 if transport == 'tcp':
100 for i in range(ports_needed):
101 sock = socket.socket()
102 # struct.pack('ii', (0,0)) is 8 null bytes
103 sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, b'\0' * 8)
104 sock.bind(('', 0))
105 ports.append(sock)
106 for i, sock in enumerate(ports):
107 port = sock.getsockname()[1]
108 sock.close()
109 ports[i] = port
110 else:
111 N = 1
112 for i in range(ports_needed):
113 while os.path.exists("%s-%s" % (ip, str(N))):
114 N += 1
115 ports.append(N)
116 N += 1
117 if shell_port <= 0:
118 shell_port = ports.pop(0)
119 if iopub_port <= 0:
120 iopub_port = ports.pop(0)
121 if stdin_port <= 0:
122 stdin_port = ports.pop(0)
123 if control_port <= 0:
124 control_port = ports.pop(0)
125 if hb_port <= 0:
126 hb_port = ports.pop(0)
127
128 cfg = dict( shell_port=shell_port,
129 iopub_port=iopub_port,
130 stdin_port=stdin_port,
131 control_port=control_port,
132 hb_port=hb_port,
133 )
134 cfg['ip'] = ip
135 cfg['key'] = bytes_to_str(key)
136 cfg['transport'] = transport
137 cfg['signature_scheme'] = signature_scheme
138
139 with open(fname, 'w') as f:
140 f.write(json.dumps(cfg, indent=2))
141
142 return fname, cfg
143
144
145 def get_connection_file(app=None):
146 """Return the path to the connection file of an app
147
148 Parameters
149 ----------
150 app : IPKernelApp instance [optional]
151 If unspecified, the currently running app will be used
152 """
153 if app is None:
154 from ipython_kernel.kernelapp import IPKernelApp
155 if not IPKernelApp.initialized():
156 raise RuntimeError("app not specified, and not in a running Kernel")
157
158 app = IPKernelApp.instance()
159 return filefind(app.connection_file, ['.', app.profile_dir.security_dir])
160
161
162 def find_connection_file(filename='kernel-*.json', profile=None):
163 """find a connection file, and return its absolute path.
164
165 The current working directory and the profile's security
166 directory will be searched for the file if it is not given by
167 absolute path.
168
169 If profile is unspecified, then the current running application's
170 profile will be used, or 'default', if not run from IPython.
171
172 If the argument does not match an existing file, it will be interpreted as a
173 fileglob, and the matching file in the profile's security dir with
174 the latest access time will be used.
175
176 Parameters
177 ----------
178 filename : str
179 The connection file or fileglob to search for.
180 profile : str [optional]
181 The name of the profile to use when searching for the connection file,
182 if different from the current IPython session or 'default'.
183
184 Returns
185 -------
186 str : The absolute path of the connection file.
187 """
188 from IPython.core.application import BaseIPythonApplication as IPApp
189 try:
190 # quick check for absolute path, before going through logic
191 return filefind(filename)
192 except IOError:
193 pass
194
195 if profile is None:
196 # profile unspecified, check if running from an IPython app
197 if IPApp.initialized():
198 app = IPApp.instance()
199 profile_dir = app.profile_dir
200 else:
201 # not running in IPython, use default profile
202 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), 'default')
203 else:
204 # find profiledir by profile name:
205 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
206 security_dir = profile_dir.security_dir
207
208 try:
209 # first, try explicit name
210 return filefind(filename, ['.', security_dir])
211 except IOError:
212 pass
213
214 # not found by full name
215
216 if '*' in filename:
217 # given as a glob already
218 pat = filename
219 else:
220 # accept any substring match
221 pat = '*%s*' % filename
222 matches = glob.glob( os.path.join(security_dir, pat) )
223 if not matches:
224 raise IOError("Could not find %r in %r" % (filename, security_dir))
225 elif len(matches) == 1:
226 return matches[0]
227 else:
228 # get most recent match, by access time:
229 return sorted(matches, key=lambda f: os.stat(f).st_atime)[-1]
230
231
232 def get_connection_info(connection_file=None, unpack=False, profile=None):
233 """Return the connection information for the current Kernel.
234
235 Parameters
236 ----------
237 connection_file : str [optional]
238 The connection file to be used. Can be given by absolute path, or
239 IPython will search in the security directory of a given profile.
240 If run from IPython,
241
242 If unspecified, the connection file for the currently running
243 IPython Kernel will be used, which is only allowed from inside a kernel.
244 unpack : bool [default: False]
245 if True, return the unpacked dict, otherwise just the string contents
246 of the file.
247 profile : str [optional]
248 The name of the profile to use when searching for the connection file,
249 if different from the current IPython session or 'default'.
250
251
252 Returns
253 -------
254 The connection dictionary of the current kernel, as string or dict,
255 depending on `unpack`.
256 """
257 if connection_file is None:
258 # get connection file from current kernel
259 cf = get_connection_file()
260 else:
261 # connection file specified, allow shortnames:
262 cf = find_connection_file(connection_file, profile=profile)
263
264 with open(cf) as f:
265 info = f.read()
266
267 if unpack:
268 info = json.loads(info)
269 # ensure key is bytes:
270 info['key'] = str_to_bytes(info.get('key', ''))
271 return info
272
273
274 def connect_qtconsole(connection_file=None, argv=None, profile=None):
275 """Connect a qtconsole to the current kernel.
276
277 This is useful for connecting a second qtconsole to a kernel, or to a
278 local notebook.
279
280 Parameters
281 ----------
282 connection_file : str [optional]
283 The connection file to be used. Can be given by absolute path, or
284 IPython will search in the security directory of a given profile.
285 If run from IPython,
286
287 If unspecified, the connection file for the currently running
288 IPython Kernel will be used, which is only allowed from inside a kernel.
289 argv : list [optional]
290 Any extra args to be passed to the console.
291 profile : str [optional]
292 The name of the profile to use when searching for the connection file,
293 if different from the current IPython session or 'default'.
294
295
296 Returns
297 -------
298 :class:`subprocess.Popen` instance running the qtconsole frontend
299 """
300 argv = [] if argv is None else argv
301
302 if connection_file is None:
303 # get connection file from current kernel
304 cf = get_connection_file()
305 else:
306 cf = find_connection_file(connection_file, profile=profile)
307
308 cmd = ';'.join([
309 "from IPython.qt.console import qtconsoleapp",
310 "qtconsoleapp.main()"
311 ])
312
313 return Popen([sys.executable, '-c', cmd, '--existing', cf] + argv,
314 stdout=PIPE, stderr=PIPE, close_fds=(sys.platform != 'win32'),
315 )
316
317
318 def tunnel_to_kernel(connection_info, sshserver, sshkey=None):
319 """tunnel connections to a kernel via ssh
320
321 This will open four SSH tunnels from localhost on this machine to the
322 ports associated with the kernel. They can be either direct
323 localhost-localhost tunnels, or if an intermediate server is necessary,
324 the kernel must be listening on a public IP.
325
326 Parameters
327 ----------
328 connection_info : dict or str (path)
329 Either a connection dict, or the path to a JSON connection file
330 sshserver : str
331 The ssh sever to use to tunnel to the kernel. Can be a full
332 `user@server:port` string. ssh config aliases are respected.
333 sshkey : str [optional]
334 Path to file containing ssh key to use for authentication.
335 Only necessary if your ssh config does not already associate
336 a keyfile with the host.
337
338 Returns
339 -------
340
341 (shell, iopub, stdin, hb) : ints
342 The four ports on localhost that have been forwarded to the kernel.
343 """
344 from zmq.ssh import tunnel
345 if isinstance(connection_info, string_types):
346 # it's a path, unpack it
347 with open(connection_info) as f:
348 connection_info = json.loads(f.read())
349
350 cf = connection_info
351
352 lports = tunnel.select_random_ports(4)
353 rports = cf['shell_port'], cf['iopub_port'], cf['stdin_port'], cf['hb_port']
354
355 remote_ip = cf['ip']
356
357 if tunnel.try_passwordless_ssh(sshserver, sshkey):
358 password=False
359 else:
360 password = getpass("SSH Password for %s: " % cast_bytes_py2(sshserver))
361
362 for lp,rp in zip(lports, rports):
363 tunnel.ssh_tunnel(lp, rp, sshserver, remote_ip, sshkey, password)
364
365 return tuple(lports)
366
367
368 #-----------------------------------------------------------------------------
369 # Mixin for classes that work with connection files
370 #-----------------------------------------------------------------------------
371
372 channel_socket_types = {
373 'hb' : zmq.REQ,
374 'shell' : zmq.DEALER,
375 'iopub' : zmq.SUB,
376 'stdin' : zmq.DEALER,
377 'control': zmq.DEALER,
378 }
379
380 port_names = [ "%s_port" % channel for channel in ('shell', 'stdin', 'iopub', 'hb', 'control')]
381
382 class ConnectionFileMixin(LoggingConfigurable):
383 """Mixin for configurable classes that work with connection files"""
384
385 # The addresses for the communication channels
386 connection_file = Unicode('', config=True,
387 help="""JSON file in which to store connection info [default: kernel-<pid>.json]
388
389 This file will contain the IP, ports, and authentication key needed to connect
390 clients to this kernel. By default, this file will be created in the security dir
391 of the current profile, but can be specified by absolute path.
392 """)
393 _connection_file_written = Bool(False)
394
395 transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True)
396
397 ip = Unicode(config=True,
398 help="""Set the kernel\'s IP address [default localhost].
399 If the IP address is something other than localhost, then
400 Consoles on other machines will be able to connect
401 to the Kernel, so be careful!"""
402 )
403
404 def _ip_default(self):
405 if self.transport == 'ipc':
406 if self.connection_file:
407 return os.path.splitext(self.connection_file)[0] + '-ipc'
408 else:
409 return 'kernel-ipc'
410 else:
411 return localhost()
412
413 def _ip_changed(self, name, old, new):
414 if new == '*':
415 self.ip = '0.0.0.0'
416
417 # protected traits
418
419 hb_port = Integer(0, config=True,
420 help="set the heartbeat port [default: random]")
421 shell_port = Integer(0, config=True,
422 help="set the shell (ROUTER) port [default: random]")
423 iopub_port = Integer(0, config=True,
424 help="set the iopub (PUB) port [default: random]")
425 stdin_port = Integer(0, config=True,
426 help="set the stdin (ROUTER) port [default: random]")
427 control_port = Integer(0, config=True,
428 help="set the control (ROUTER) port [default: random]")
429
430 @property
431 def ports(self):
432 return [ getattr(self, name) for name in port_names ]
433
434 # The Session to use for communication with the kernel.
435 session = Instance('jupyter_client.session.Session')
436 def _session_default(self):
437 from jupyter_client.session import Session
438 return Session(parent=self)
439
440 #--------------------------------------------------------------------------
441 # Connection and ipc file management
442 #--------------------------------------------------------------------------
443
444 def get_connection_info(self):
445 """return the connection info as a dict"""
446 return dict(
447 transport=self.transport,
448 ip=self.ip,
449 shell_port=self.shell_port,
450 iopub_port=self.iopub_port,
451 stdin_port=self.stdin_port,
452 hb_port=self.hb_port,
453 control_port=self.control_port,
454 signature_scheme=self.session.signature_scheme,
455 key=self.session.key,
456 )
457
458 def cleanup_connection_file(self):
459 """Cleanup connection file *if we wrote it*
460
461 Will not raise if the connection file was already removed somehow.
462 """
463 if self._connection_file_written:
464 # cleanup connection files on full shutdown of kernel we started
465 self._connection_file_written = False
466 try:
467 os.remove(self.connection_file)
468 except (IOError, OSError, AttributeError):
469 pass
470
471 def cleanup_ipc_files(self):
472 """Cleanup ipc files if we wrote them."""
473 if self.transport != 'ipc':
474 return
475 for port in self.ports:
476 ipcfile = "%s-%i" % (self.ip, port)
477 try:
478 os.remove(ipcfile)
479 except (IOError, OSError):
480 pass
481
482 def write_connection_file(self):
483 """Write connection info to JSON dict in self.connection_file."""
484 if self._connection_file_written and os.path.exists(self.connection_file):
485 return
486
487 self.connection_file, cfg = write_connection_file(self.connection_file,
488 transport=self.transport, ip=self.ip, key=self.session.key,
489 stdin_port=self.stdin_port, iopub_port=self.iopub_port,
490 shell_port=self.shell_port, hb_port=self.hb_port,
491 control_port=self.control_port,
492 signature_scheme=self.session.signature_scheme,
493 )
494 # write_connection_file also sets default ports:
495 for name in port_names:
496 setattr(self, name, cfg[name])
497
498 self._connection_file_written = True
499
500 def load_connection_file(self):
501 """Load connection info from JSON dict in self.connection_file."""
502 self.log.debug(u"Loading connection file %s", self.connection_file)
503 with open(self.connection_file) as f:
504 cfg = json.load(f)
505 self.transport = cfg.get('transport', self.transport)
506 self.ip = cfg.get('ip', self._ip_default())
507
508 for name in port_names:
509 if getattr(self, name) == 0 and name in cfg:
510 # not overridden by config or cl_args
511 setattr(self, name, cfg[name])
512
513 if 'key' in cfg:
514 self.session.key = str_to_bytes(cfg['key'])
515 if 'signature_scheme' in cfg:
516 self.session.signature_scheme = cfg['signature_scheme']
517
518 #--------------------------------------------------------------------------
519 # Creating connected sockets
520 #--------------------------------------------------------------------------
521
522 def _make_url(self, channel):
523 """Make a ZeroMQ URL for a given channel."""
524 transport = self.transport
525 ip = self.ip
526 port = getattr(self, '%s_port' % channel)
527
528 if transport == 'tcp':
529 return "tcp://%s:%i" % (ip, port)
530 else:
531 return "%s://%s-%s" % (transport, ip, port)
532
533 def _create_connected_socket(self, channel, identity=None):
534 """Create a zmq Socket and connect it to the kernel."""
535 url = self._make_url(channel)
536 socket_type = channel_socket_types[channel]
537 self.log.debug("Connecting to: %s" % url)
538 sock = self.context.socket(socket_type)
539 # set linger to 1s to prevent hangs at exit
540 sock.linger = 1000
541 if identity:
542 sock.identity = identity
543 sock.connect(url)
544 return sock
545
546 def connect_iopub(self, identity=None):
547 """return zmq Socket connected to the IOPub channel"""
548 sock = self._create_connected_socket('iopub', identity=identity)
549 sock.setsockopt(zmq.SUBSCRIBE, b'')
550 return sock
551
552 def connect_shell(self, identity=None):
553 """return zmq Socket connected to the Shell channel"""
554 return self._create_connected_socket('shell', identity=identity)
555
556 def connect_stdin(self, identity=None):
557 """return zmq Socket connected to the StdIn channel"""
558 return self._create_connected_socket('stdin', identity=identity)
559
560 def connect_hb(self, identity=None):
561 """return zmq Socket connected to the Heartbeat channel"""
562 return self._create_connected_socket('hb', identity=identity)
563
564 def connect_control(self, identity=None):
565 """return zmq Socket connected to the Control channel"""
566 return self._create_connected_socket('control', identity=identity)
567
568
569 __all__ = [
570 'write_connection_file',
571 'get_connection_file',
572 'find_connection_file',
573 'get_connection_info',
574 'connect_qtconsole',
575 'tunnel_to_kernel',
576 ]
@@ -0,0 +1,226 b''
1 """Utilities for launching kernels
2 """
3
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
6
7 import os
8 import sys
9 from subprocess import Popen, PIPE
10
11 from IPython.utils.encoding import getdefaultencoding
12 from IPython.utils.py3compat import cast_bytes_py2
13
14
15 def swallow_argv(argv, aliases=None, flags=None):
16 """strip frontend-specific aliases and flags from an argument list
17
18 For use primarily in frontend apps that want to pass a subset of command-line
19 arguments through to a subprocess, where frontend-specific flags and aliases
20 should be removed from the list.
21
22 Parameters
23 ----------
24
25 argv : list(str)
26 The starting argv, to be filtered
27 aliases : container of aliases (dict, list, set, etc.)
28 The frontend-specific aliases to be removed
29 flags : container of flags (dict, list, set, etc.)
30 The frontend-specific flags to be removed
31
32 Returns
33 -------
34
35 argv : list(str)
36 The argv list, excluding flags and aliases that have been stripped
37 """
38
39 if aliases is None:
40 aliases = set()
41 if flags is None:
42 flags = set()
43
44 stripped = list(argv) # copy
45
46 swallow_next = False
47 was_flag = False
48 for a in argv:
49 if a == '--':
50 break
51 if swallow_next:
52 swallow_next = False
53 # last arg was an alias, remove the next one
54 # *unless* the last alias has a no-arg flag version, in which
55 # case, don't swallow the next arg if it's also a flag:
56 if not (was_flag and a.startswith('-')):
57 stripped.remove(a)
58 continue
59 if a.startswith('-'):
60 split = a.lstrip('-').split('=')
61 name = split[0]
62 # we use startswith because argparse accepts any arg to be specified
63 # by any leading section, as long as it is unique,
64 # so `--no-br` means `--no-browser` in the notebook, etc.
65 if any(alias.startswith(name) for alias in aliases):
66 stripped.remove(a)
67 if len(split) == 1:
68 # alias passed with arg via space
69 swallow_next = True
70 # could have been a flag that matches an alias, e.g. `existing`
71 # in which case, we might not swallow the next arg
72 was_flag = name in flags
73 elif len(split) == 1 and any(flag.startswith(name) for flag in flags):
74 # strip flag, but don't swallow next, as flags don't take args
75 stripped.remove(a)
76
77 # return shortened list
78 return stripped
79
80
81 def make_ipkernel_cmd(mod='ipython_kernel', executable=None, extra_arguments=[], **kw):
82 """Build Popen command list for launching an IPython kernel.
83
84 Parameters
85 ----------
86 mod : str, optional (default 'ipython_kernel')
87 A string of an IPython module whose __main__ starts an IPython kernel
88
89 executable : str, optional (default sys.executable)
90 The Python executable to use for the kernel process.
91
92 extra_arguments : list, optional
93 A list of extra arguments to pass when executing the launch code.
94
95 Returns
96 -------
97
98 A Popen command list
99 """
100 if executable is None:
101 executable = sys.executable
102 arguments = [ executable, '-m', mod, '-f', '{connection_file}' ]
103 arguments.extend(extra_arguments)
104
105 return arguments
106
107
108 def launch_kernel(cmd, stdin=None, stdout=None, stderr=None, env=None,
109 independent=False,
110 cwd=None,
111 **kw
112 ):
113 """ Launches a localhost kernel, binding to the specified ports.
114
115 Parameters
116 ----------
117 cmd : Popen list,
118 A string of Python code that imports and executes a kernel entry point.
119
120 stdin, stdout, stderr : optional (default None)
121 Standards streams, as defined in subprocess.Popen.
122
123 independent : bool, optional (default False)
124 If set, the kernel process is guaranteed to survive if this process
125 dies. If not set, an effort is made to ensure that the kernel is killed
126 when this process dies. Note that in this case it is still good practice
127 to kill kernels manually before exiting.
128
129 cwd : path, optional
130 The working dir of the kernel process (default: cwd of this process).
131
132 Returns
133 -------
134
135 Popen instance for the kernel subprocess
136 """
137
138 # Popen will fail (sometimes with a deadlock) if stdin, stdout, and stderr
139 # are invalid. Unfortunately, there is in general no way to detect whether
140 # they are valid. The following two blocks redirect them to (temporary)
141 # pipes in certain important cases.
142
143 # If this process has been backgrounded, our stdin is invalid. Since there
144 # is no compelling reason for the kernel to inherit our stdin anyway, we'll
145 # place this one safe and always redirect.
146 redirect_in = True
147 _stdin = PIPE if stdin is None else stdin
148
149 # If this process in running on pythonw, we know that stdin, stdout, and
150 # stderr are all invalid.
151 redirect_out = sys.executable.endswith('pythonw.exe')
152 if redirect_out:
153 blackhole = open(os.devnull, 'w')
154 _stdout = blackhole if stdout is None else stdout
155 _stderr = blackhole if stderr is None else stderr
156 else:
157 _stdout, _stderr = stdout, stderr
158
159 env = env if (env is not None) else os.environ.copy()
160
161 encoding = getdefaultencoding(prefer_stream=False)
162 kwargs = dict(
163 stdin=_stdin,
164 stdout=_stdout,
165 stderr=_stderr,
166 cwd=cwd,
167 env=env,
168 )
169
170 # Spawn a kernel.
171 if sys.platform == 'win32':
172 # Popen on Python 2 on Windows cannot handle unicode args or cwd
173 cmd = [ cast_bytes_py2(c, encoding) for c in cmd ]
174 if cwd:
175 cwd = cast_bytes_py2(cwd, sys.getfilesystemencoding() or 'ascii')
176 kwargs['cwd'] = cwd
177
178 from jupyter_client.parentpoller import ParentPollerWindows
179 # Create a Win32 event for interrupting the kernel
180 # and store it in an environment variable.
181 interrupt_event = ParentPollerWindows.create_interrupt_event()
182 env["JPY_INTERRUPT_EVENT"] = str(interrupt_event)
183 # deprecated old env name:
184 env["IPY_INTERRUPT_EVENT"] = env["JPY_INTERRUPT_EVENT"]
185
186 try:
187 from _winapi import DuplicateHandle, GetCurrentProcess, \
188 DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
189 except:
190 from _subprocess import DuplicateHandle, GetCurrentProcess, \
191 DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
192 # Launch the kernel process
193 if independent:
194 kwargs['creationflags'] = CREATE_NEW_PROCESS_GROUP
195 else:
196 pid = GetCurrentProcess()
197 handle = DuplicateHandle(pid, pid, pid, 0,
198 True, # Inheritable by new processes.
199 DUPLICATE_SAME_ACCESS)
200 env['JPY_PARENT_PID'] = str(int(handle))
201
202 proc = Popen(cmd, **kwargs)
203
204 # Attach the interrupt event to the Popen objet so it can be used later.
205 proc.win32_interrupt_event = interrupt_event
206
207 else:
208 if independent:
209 kwargs['preexec_fn'] = lambda: os.setsid()
210 else:
211 env['JPY_PARENT_PID'] = str(os.getpid())
212
213 proc = Popen(cmd, **kwargs)
214
215 # Clean up pipes created to work around Popen bug.
216 if redirect_in:
217 if stdin is None:
218 proc.stdin.close()
219
220 return proc
221
222 __all__ = [
223 'swallow_argv',
224 'make_ipkernel_cmd',
225 'launch_kernel',
226 ]
@@ -0,0 +1,1 b''
1 from jupyter_client.session import *
@@ -0,0 +1,8 b''
1 """IPython kernels and associated utilities"""
2
3 from .connect import *
4 from .launcher import *
5 from .client import KernelClient
6 from .manager import KernelManager, run_kernel
7 from .blocking import BlockingKernelClient
8 from .multikernelmanager import MultiKernelManager
@@ -0,0 +1,374 b''
1 """Adapters for IPython msg spec versions."""
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 import json
7
8 from IPython.core.release import kernel_protocol_version_info
9 from IPython.utils.tokenutil import token_at_cursor
10
11
12 def code_to_line(code, cursor_pos):
13 """Turn a multiline code block and cursor position into a single line
14 and new cursor position.
15
16 For adapting ``complete_`` and ``object_info_request``.
17 """
18 if not code:
19 return "", 0
20 for line in code.splitlines(True):
21 n = len(line)
22 if cursor_pos > n:
23 cursor_pos -= n
24 else:
25 break
26 return line, cursor_pos
27
28
29 class Adapter(object):
30 """Base class for adapting messages
31
32 Override message_type(msg) methods to create adapters.
33 """
34
35 msg_type_map = {}
36
37 def update_header(self, msg):
38 return msg
39
40 def update_metadata(self, msg):
41 return msg
42
43 def update_msg_type(self, msg):
44 header = msg['header']
45 msg_type = header['msg_type']
46 if msg_type in self.msg_type_map:
47 msg['msg_type'] = header['msg_type'] = self.msg_type_map[msg_type]
48 return msg
49
50 def handle_reply_status_error(self, msg):
51 """This will be called *instead of* the regular handler
52
53 on any reply with status != ok
54 """
55 return msg
56
57 def __call__(self, msg):
58 msg = self.update_header(msg)
59 msg = self.update_metadata(msg)
60 msg = self.update_msg_type(msg)
61 header = msg['header']
62
63 handler = getattr(self, header['msg_type'], None)
64 if handler is None:
65 return msg
66
67 # handle status=error replies separately (no change, at present)
68 if msg['content'].get('status', None) in {'error', 'aborted'}:
69 return self.handle_reply_status_error(msg)
70 return handler(msg)
71
72 def _version_str_to_list(version):
73 """convert a version string to a list of ints
74
75 non-int segments are excluded
76 """
77 v = []
78 for part in version.split('.'):
79 try:
80 v.append(int(part))
81 except ValueError:
82 pass
83 return v
84
85 class V5toV4(Adapter):
86 """Adapt msg protocol v5 to v4"""
87
88 version = '4.1'
89
90 msg_type_map = {
91 'execute_result' : 'pyout',
92 'execute_input' : 'pyin',
93 'error' : 'pyerr',
94 'inspect_request' : 'object_info_request',
95 'inspect_reply' : 'object_info_reply',
96 }
97
98 def update_header(self, msg):
99 msg['header'].pop('version', None)
100 return msg
101
102 # shell channel
103
104 def kernel_info_reply(self, msg):
105 v4c = {}
106 content = msg['content']
107 for key in ('language_version', 'protocol_version'):
108 if key in content:
109 v4c[key] = _version_str_to_list(content[key])
110 if content.get('implementation', '') == 'ipython' \
111 and 'implementation_version' in content:
112 v4c['ipython_version'] = _version_str_to_list(content['implementation_version'])
113 language_info = content.get('language_info', {})
114 language = language_info.get('name', '')
115 v4c.setdefault('language', language)
116 if 'version' in language_info:
117 v4c.setdefault('language_version', _version_str_to_list(language_info['version']))
118 msg['content'] = v4c
119 return msg
120
121 def execute_request(self, msg):
122 content = msg['content']
123 content.setdefault('user_variables', [])
124 return msg
125
126 def execute_reply(self, msg):
127 content = msg['content']
128 content.setdefault('user_variables', {})
129 # TODO: handle payloads
130 return msg
131
132 def complete_request(self, msg):
133 content = msg['content']
134 code = content['code']
135 cursor_pos = content['cursor_pos']
136 line, cursor_pos = code_to_line(code, cursor_pos)
137
138 new_content = msg['content'] = {}
139 new_content['text'] = ''
140 new_content['line'] = line
141 new_content['block'] = None
142 new_content['cursor_pos'] = cursor_pos
143 return msg
144
145 def complete_reply(self, msg):
146 content = msg['content']
147 cursor_start = content.pop('cursor_start')
148 cursor_end = content.pop('cursor_end')
149 match_len = cursor_end - cursor_start
150 content['matched_text'] = content['matches'][0][:match_len]
151 content.pop('metadata', None)
152 return msg
153
154 def object_info_request(self, msg):
155 content = msg['content']
156 code = content['code']
157 cursor_pos = content['cursor_pos']
158 line, _ = code_to_line(code, cursor_pos)
159
160 new_content = msg['content'] = {}
161 new_content['oname'] = token_at_cursor(code, cursor_pos)
162 new_content['detail_level'] = content['detail_level']
163 return msg
164
165 def object_info_reply(self, msg):
166 """inspect_reply can't be easily backward compatible"""
167 msg['content'] = {'found' : False, 'oname' : 'unknown'}
168 return msg
169
170 # iopub channel
171
172 def stream(self, msg):
173 content = msg['content']
174 content['data'] = content.pop('text')
175 return msg
176
177 def display_data(self, msg):
178 content = msg['content']
179 content.setdefault("source", "display")
180 data = content['data']
181 if 'application/json' in data:
182 try:
183 data['application/json'] = json.dumps(data['application/json'])
184 except Exception:
185 # warn?
186 pass
187 return msg
188
189 # stdin channel
190
191 def input_request(self, msg):
192 msg['content'].pop('password', None)
193 return msg
194
195
196 class V4toV5(Adapter):
197 """Convert msg spec V4 to V5"""
198 version = '5.0'
199
200 # invert message renames above
201 msg_type_map = {v:k for k,v in V5toV4.msg_type_map.items()}
202
203 def update_header(self, msg):
204 msg['header']['version'] = self.version
205 return msg
206
207 # shell channel
208
209 def kernel_info_reply(self, msg):
210 content = msg['content']
211 for key in ('protocol_version', 'ipython_version'):
212 if key in content:
213 content[key] = '.'.join(map(str, content[key]))
214
215 content.setdefault('protocol_version', '4.1')
216
217 if content['language'].startswith('python') and 'ipython_version' in content:
218 content['implementation'] = 'ipython'
219 content['implementation_version'] = content.pop('ipython_version')
220
221 language = content.pop('language')
222 language_info = content.setdefault('language_info', {})
223 language_info.setdefault('name', language)
224 if 'language_version' in content:
225 language_version = '.'.join(map(str, content.pop('language_version')))
226 language_info.setdefault('version', language_version)
227
228 content['banner'] = ''
229 return msg
230
231 def execute_request(self, msg):
232 content = msg['content']
233 user_variables = content.pop('user_variables', [])
234 user_expressions = content.setdefault('user_expressions', {})
235 for v in user_variables:
236 user_expressions[v] = v
237 return msg
238
239 def execute_reply(self, msg):
240 content = msg['content']
241 user_expressions = content.setdefault('user_expressions', {})
242 user_variables = content.pop('user_variables', {})
243 if user_variables:
244 user_expressions.update(user_variables)
245
246 # Pager payloads became a mime bundle
247 for payload in content.get('payload', []):
248 if payload.get('source', None) == 'page' and ('text' in payload):
249 if 'data' not in payload:
250 payload['data'] = {}
251 payload['data']['text/plain'] = payload.pop('text')
252
253 return msg
254
255 def complete_request(self, msg):
256 old_content = msg['content']
257
258 new_content = msg['content'] = {}
259 new_content['code'] = old_content['line']
260 new_content['cursor_pos'] = old_content['cursor_pos']
261 return msg
262
263 def complete_reply(self, msg):
264 # complete_reply needs more context than we have to get cursor_start and end.
265 # use special end=null to indicate current cursor position and negative offset
266 # for start relative to the cursor.
267 # start=None indicates that start == end (accounts for no -0).
268 content = msg['content']
269 new_content = msg['content'] = {'status' : 'ok'}
270 new_content['matches'] = content['matches']
271 if content['matched_text']:
272 new_content['cursor_start'] = -len(content['matched_text'])
273 else:
274 # no -0, use None to indicate that start == end
275 new_content['cursor_start'] = None
276 new_content['cursor_end'] = None
277 new_content['metadata'] = {}
278 return msg
279
280 def inspect_request(self, msg):
281 content = msg['content']
282 name = content['oname']
283
284 new_content = msg['content'] = {}
285 new_content['code'] = name
286 new_content['cursor_pos'] = len(name)
287 new_content['detail_level'] = content['detail_level']
288 return msg
289
290 def inspect_reply(self, msg):
291 """inspect_reply can't be easily backward compatible"""
292 content = msg['content']
293 new_content = msg['content'] = {'status' : 'ok'}
294 found = new_content['found'] = content['found']
295 new_content['name'] = content['oname']
296 new_content['data'] = data = {}
297 new_content['metadata'] = {}
298 if found:
299 lines = []
300 for key in ('call_def', 'init_definition', 'definition'):
301 if content.get(key, False):
302 lines.append(content[key])
303 break
304 for key in ('call_docstring', 'init_docstring', 'docstring'):
305 if content.get(key, False):
306 lines.append(content[key])
307 break
308 if not lines:
309 lines.append("<empty docstring>")
310 data['text/plain'] = '\n'.join(lines)
311 return msg
312
313 # iopub channel
314
315 def stream(self, msg):
316 content = msg['content']
317 content['text'] = content.pop('data')
318 return msg
319
320 def display_data(self, msg):
321 content = msg['content']
322 content.pop("source", None)
323 data = content['data']
324 if 'application/json' in data:
325 try:
326 data['application/json'] = json.loads(data['application/json'])
327 except Exception:
328 # warn?
329 pass
330 return msg
331
332 # stdin channel
333
334 def input_request(self, msg):
335 msg['content'].setdefault('password', False)
336 return msg
337
338
339
340 def adapt(msg, to_version=kernel_protocol_version_info[0]):
341 """Adapt a single message to a target version
342
343 Parameters
344 ----------
345
346 msg : dict
347 An IPython message.
348 to_version : int, optional
349 The target major version.
350 If unspecified, adapt to the current version for IPython.
351
352 Returns
353 -------
354
355 msg : dict
356 An IPython message appropriate in the new version.
357 """
358 header = msg['header']
359 if 'version' in header:
360 from_version = int(header['version'].split('.')[0])
361 else:
362 # assume last version before adding the key to the header
363 from_version = 4
364 adapter = adapters.get((from_version, to_version), None)
365 if adapter is None:
366 return msg
367 return adapter(msg)
368
369
370 # one adapter per major version from,to
371 adapters = {
372 (5,4) : V5toV4(),
373 (4,5) : V4toV5(),
374 }
@@ -0,0 +1,203 b''
1 """Base classes to manage a Client's interaction with a running kernel"""
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 from __future__ import absolute_import
7
8 import atexit
9 import errno
10 from threading import Thread
11 import time
12
13 import zmq
14 # import ZMQError in top-level namespace, to avoid ugly attribute-error messages
15 # during garbage collection of threads at exit:
16 from zmq import ZMQError
17
18 from IPython.core.release import kernel_protocol_version_info
19
20 from .channelsabc import HBChannelABC
21
22 #-----------------------------------------------------------------------------
23 # Constants and exceptions
24 #-----------------------------------------------------------------------------
25
26 major_protocol_version = kernel_protocol_version_info[0]
27
28 class InvalidPortNumber(Exception):
29 pass
30
31 class HBChannel(Thread):
32 """The heartbeat channel which monitors the kernel heartbeat.
33
34 Note that the heartbeat channel is paused by default. As long as you start
35 this channel, the kernel manager will ensure that it is paused and un-paused
36 as appropriate.
37 """
38 context = None
39 session = None
40 socket = None
41 address = None
42 _exiting = False
43
44 time_to_dead = 1.
45 poller = None
46 _running = None
47 _pause = None
48 _beating = None
49
50 def __init__(self, context=None, session=None, address=None):
51 """Create the heartbeat monitor thread.
52
53 Parameters
54 ----------
55 context : :class:`zmq.Context`
56 The ZMQ context to use.
57 session : :class:`session.Session`
58 The session to use.
59 address : zmq url
60 Standard (ip, port) tuple that the kernel is listening on.
61 """
62 super(HBChannel, self).__init__()
63 self.daemon = True
64
65 self.context = context
66 self.session = session
67 if isinstance(address, tuple):
68 if address[1] == 0:
69 message = 'The port number for a channel cannot be 0.'
70 raise InvalidPortNumber(message)
71 address = "tcp://%s:%i" % address
72 self.address = address
73 atexit.register(self._notice_exit)
74
75 self._running = False
76 self._pause = True
77 self.poller = zmq.Poller()
78
79 def _notice_exit(self):
80 self._exiting = True
81
82 def _create_socket(self):
83 if self.socket is not None:
84 # close previous socket, before opening a new one
85 self.poller.unregister(self.socket)
86 self.socket.close()
87 self.socket = self.context.socket(zmq.REQ)
88 self.socket.linger = 1000
89 self.socket.connect(self.address)
90
91 self.poller.register(self.socket, zmq.POLLIN)
92
93 def _poll(self, start_time):
94 """poll for heartbeat replies until we reach self.time_to_dead.
95
96 Ignores interrupts, and returns the result of poll(), which
97 will be an empty list if no messages arrived before the timeout,
98 or the event tuple if there is a message to receive.
99 """
100
101 until_dead = self.time_to_dead - (time.time() - start_time)
102 # ensure poll at least once
103 until_dead = max(until_dead, 1e-3)
104 events = []
105 while True:
106 try:
107 events = self.poller.poll(1000 * until_dead)
108 except ZMQError as e:
109 if e.errno == errno.EINTR:
110 # ignore interrupts during heartbeat
111 # this may never actually happen
112 until_dead = self.time_to_dead - (time.time() - start_time)
113 until_dead = max(until_dead, 1e-3)
114 pass
115 else:
116 raise
117 except Exception:
118 if self._exiting:
119 break
120 else:
121 raise
122 else:
123 break
124 return events
125
126 def run(self):
127 """The thread's main activity. Call start() instead."""
128 self._create_socket()
129 self._running = True
130 self._beating = True
131
132 while self._running:
133 if self._pause:
134 # just sleep, and skip the rest of the loop
135 time.sleep(self.time_to_dead)
136 continue
137
138 since_last_heartbeat = 0.0
139 # io.rprint('Ping from HB channel') # dbg
140 # no need to catch EFSM here, because the previous event was
141 # either a recv or connect, which cannot be followed by EFSM
142 self.socket.send(b'ping')
143 request_time = time.time()
144 ready = self._poll(request_time)
145 if ready:
146 self._beating = True
147 # the poll above guarantees we have something to recv
148 self.socket.recv()
149 # sleep the remainder of the cycle
150 remainder = self.time_to_dead - (time.time() - request_time)
151 if remainder > 0:
152 time.sleep(remainder)
153 continue
154 else:
155 # nothing was received within the time limit, signal heart failure
156 self._beating = False
157 since_last_heartbeat = time.time() - request_time
158 self.call_handlers(since_last_heartbeat)
159 # and close/reopen the socket, because the REQ/REP cycle has been broken
160 self._create_socket()
161 continue
162
163 def pause(self):
164 """Pause the heartbeat."""
165 self._pause = True
166
167 def unpause(self):
168 """Unpause the heartbeat."""
169 self._pause = False
170
171 def is_beating(self):
172 """Is the heartbeat running and responsive (and not paused)."""
173 if self.is_alive() and not self._pause and self._beating:
174 return True
175 else:
176 return False
177
178 def stop(self):
179 """Stop the channel's event loop and join its thread."""
180 self._running = False
181 self.join()
182 self.close()
183
184 def close(self):
185 if self.socket is not None:
186 try:
187 self.socket.close(linger=0)
188 except Exception:
189 pass
190 self.socket = None
191
192 def call_handlers(self, since_last_heartbeat):
193 """This method is called in the ioloop thread when a message arrives.
194
195 Subclasses should override this method to handle incoming messages.
196 It is important to remember that this method is called in the thread
197 so that some logic must be done to ensure that the application level
198 handlers are called in the application thread.
199 """
200 pass
201
202
203 HBChannelABC.register(HBChannel)
@@ -0,0 +1,49 b''
1 """Abstract base classes for kernel client channels"""
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 import abc
7
8 from IPython.utils.py3compat import with_metaclass
9
10
11 class ChannelABC(with_metaclass(abc.ABCMeta, object)):
12 """A base class for all channel ABCs."""
13
14 @abc.abstractmethod
15 def start(self):
16 pass
17
18 @abc.abstractmethod
19 def stop(self):
20 pass
21
22 @abc.abstractmethod
23 def is_alive(self):
24 pass
25
26
27 class HBChannelABC(ChannelABC):
28 """HBChannel ABC.
29
30 The docstrings for this class can be found in the base implementation:
31
32 `jupyter_client.channels.HBChannel`
33 """
34
35 @abc.abstractproperty
36 def time_to_dead(self):
37 pass
38
39 @abc.abstractmethod
40 def pause(self):
41 pass
42
43 @abc.abstractmethod
44 def unpause(self):
45 pass
46
47 @abc.abstractmethod
48 def is_beating(self):
49 pass
@@ -0,0 +1,390 b''
1 """Base class to manage the interaction with a running kernel"""
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 from __future__ import absolute_import
7 from jupyter_client.channels import major_protocol_version
8 from IPython.utils.py3compat import string_types, iteritems
9
10 import zmq
11
12 from IPython.utils.traitlets import (
13 Any, Instance, Type,
14 )
15
16 from .channelsabc import (ChannelABC, HBChannelABC)
17 from .clientabc import KernelClientABC
18 from .connect import ConnectionFileMixin
19
20
21 # some utilities to validate message structure, these might get moved elsewhere
22 # if they prove to have more generic utility
23
24 def validate_string_dict(dct):
25 """Validate that the input is a dict with string keys and values.
26
27 Raises ValueError if not."""
28 for k,v in iteritems(dct):
29 if not isinstance(k, string_types):
30 raise ValueError('key %r in dict must be a string' % k)
31 if not isinstance(v, string_types):
32 raise ValueError('value %r in dict must be a string' % v)
33
34
35 class KernelClient(ConnectionFileMixin):
36 """Communicates with a single kernel on any host via zmq channels.
37
38 There are four channels associated with each kernel:
39
40 * shell: for request/reply calls to the kernel.
41 * iopub: for the kernel to publish results to frontends.
42 * hb: for monitoring the kernel's heartbeat.
43 * stdin: for frontends to reply to raw_input calls in the kernel.
44
45 The methods of the channels are exposed as methods of the client itself
46 (KernelClient.execute, complete, history, etc.).
47 See the channels themselves for documentation of these methods.
48
49 """
50
51 # The PyZMQ Context to use for communication with the kernel.
52 context = Instance(zmq.Context)
53 def _context_default(self):
54 return zmq.Context.instance()
55
56 # The classes to use for the various channels
57 shell_channel_class = Type(ChannelABC)
58 iopub_channel_class = Type(ChannelABC)
59 stdin_channel_class = Type(ChannelABC)
60 hb_channel_class = Type(HBChannelABC)
61
62 # Protected traits
63 _shell_channel = Any
64 _iopub_channel = Any
65 _stdin_channel = Any
66 _hb_channel = Any
67
68 # flag for whether execute requests should be allowed to call raw_input:
69 allow_stdin = True
70
71 #--------------------------------------------------------------------------
72 # Channel proxy methods
73 #--------------------------------------------------------------------------
74
75 def _get_msg(channel, *args, **kwargs):
76 return channel.get_msg(*args, **kwargs)
77
78 def get_shell_msg(self, *args, **kwargs):
79 """Get a message from the shell channel"""
80 return self.shell_channel.get_msg(*args, **kwargs)
81
82 def get_iopub_msg(self, *args, **kwargs):
83 """Get a message from the iopub channel"""
84 return self.iopub_channel.get_msg(*args, **kwargs)
85
86 def get_stdin_msg(self, *args, **kwargs):
87 """Get a message from the stdin channel"""
88 return self.stdin_channel.get_msg(*args, **kwargs)
89
90 #--------------------------------------------------------------------------
91 # Channel management methods
92 #--------------------------------------------------------------------------
93
94 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
95 """Starts the channels for this kernel.
96
97 This will create the channels if they do not exist and then start
98 them (their activity runs in a thread). If port numbers of 0 are
99 being used (random ports) then you must first call
100 :meth:`start_kernel`. If the channels have been stopped and you
101 call this, :class:`RuntimeError` will be raised.
102 """
103 if shell:
104 self.shell_channel.start()
105 self.kernel_info()
106 if iopub:
107 self.iopub_channel.start()
108 if stdin:
109 self.stdin_channel.start()
110 self.allow_stdin = True
111 else:
112 self.allow_stdin = False
113 if hb:
114 self.hb_channel.start()
115
116 def stop_channels(self):
117 """Stops all the running channels for this kernel.
118
119 This stops their event loops and joins their threads.
120 """
121 if self.shell_channel.is_alive():
122 self.shell_channel.stop()
123 if self.iopub_channel.is_alive():
124 self.iopub_channel.stop()
125 if self.stdin_channel.is_alive():
126 self.stdin_channel.stop()
127 if self.hb_channel.is_alive():
128 self.hb_channel.stop()
129
130 @property
131 def channels_running(self):
132 """Are any of the channels created and running?"""
133 return (self.shell_channel.is_alive() or self.iopub_channel.is_alive() or
134 self.stdin_channel.is_alive() or self.hb_channel.is_alive())
135
136 ioloop = None # Overridden in subclasses that use pyzmq event loop
137
138 @property
139 def shell_channel(self):
140 """Get the shell channel object for this kernel."""
141 if self._shell_channel is None:
142 url = self._make_url('shell')
143 self.log.debug("connecting shell channel to %s", url)
144 socket = self.connect_shell(identity=self.session.bsession)
145 self._shell_channel = self.shell_channel_class(
146 socket, self.session, self.ioloop
147 )
148 return self._shell_channel
149
150 @property
151 def iopub_channel(self):
152 """Get the iopub channel object for this kernel."""
153 if self._iopub_channel is None:
154 url = self._make_url('iopub')
155 self.log.debug("connecting iopub channel to %s", url)
156 socket = self.connect_iopub()
157 self._iopub_channel = self.iopub_channel_class(
158 socket, self.session, self.ioloop
159 )
160 return self._iopub_channel
161
162 @property
163 def stdin_channel(self):
164 """Get the stdin channel object for this kernel."""
165 if self._stdin_channel is None:
166 url = self._make_url('stdin')
167 self.log.debug("connecting stdin channel to %s", url)
168 socket = self.connect_stdin(identity=self.session.bsession)
169 self._stdin_channel = self.stdin_channel_class(
170 socket, self.session, self.ioloop
171 )
172 return self._stdin_channel
173
174 @property
175 def hb_channel(self):
176 """Get the hb channel object for this kernel."""
177 if self._hb_channel is None:
178 url = self._make_url('hb')
179 self.log.debug("connecting heartbeat channel to %s", url)
180 self._hb_channel = self.hb_channel_class(
181 self.context, self.session, url
182 )
183 return self._hb_channel
184
185 def is_alive(self):
186 """Is the kernel process still running?"""
187 if self._hb_channel is not None:
188 # We didn't start the kernel with this KernelManager so we
189 # use the heartbeat.
190 return self._hb_channel.is_beating()
191 else:
192 # no heartbeat and not local, we can't tell if it's running,
193 # so naively return True
194 return True
195
196
197 # Methods to send specific messages on channels
198 def execute(self, code, silent=False, store_history=True,
199 user_expressions=None, allow_stdin=None, stop_on_error=True):
200 """Execute code in the kernel.
201
202 Parameters
203 ----------
204 code : str
205 A string of Python code.
206
207 silent : bool, optional (default False)
208 If set, the kernel will execute the code as quietly possible, and
209 will force store_history to be False.
210
211 store_history : bool, optional (default True)
212 If set, the kernel will store command history. This is forced
213 to be False if silent is True.
214
215 user_expressions : dict, optional
216 A dict mapping names to expressions to be evaluated in the user's
217 dict. The expression values are returned as strings formatted using
218 :func:`repr`.
219
220 allow_stdin : bool, optional (default self.allow_stdin)
221 Flag for whether the kernel can send stdin requests to frontends.
222
223 Some frontends (e.g. the Notebook) do not support stdin requests.
224 If raw_input is called from code executed from such a frontend, a
225 StdinNotImplementedError will be raised.
226
227 stop_on_error: bool, optional (default True)
228 Flag whether to abort the execution queue, if an exception is encountered.
229
230 Returns
231 -------
232 The msg_id of the message sent.
233 """
234 if user_expressions is None:
235 user_expressions = {}
236 if allow_stdin is None:
237 allow_stdin = self.allow_stdin
238
239
240 # Don't waste network traffic if inputs are invalid
241 if not isinstance(code, string_types):
242 raise ValueError('code %r must be a string' % code)
243 validate_string_dict(user_expressions)
244
245 # Create class for content/msg creation. Related to, but possibly
246 # not in Session.
247 content = dict(code=code, silent=silent, store_history=store_history,
248 user_expressions=user_expressions,
249 allow_stdin=allow_stdin, stop_on_error=stop_on_error
250 )
251 msg = self.session.msg('execute_request', content)
252 self.shell_channel.send(msg)
253 return msg['header']['msg_id']
254
255 def complete(self, code, cursor_pos=None):
256 """Tab complete text in the kernel's namespace.
257
258 Parameters
259 ----------
260 code : str
261 The context in which completion is requested.
262 Can be anything between a variable name and an entire cell.
263 cursor_pos : int, optional
264 The position of the cursor in the block of code where the completion was requested.
265 Default: ``len(code)``
266
267 Returns
268 -------
269 The msg_id of the message sent.
270 """
271 if cursor_pos is None:
272 cursor_pos = len(code)
273 content = dict(code=code, cursor_pos=cursor_pos)
274 msg = self.session.msg('complete_request', content)
275 self.shell_channel.send(msg)
276 return msg['header']['msg_id']
277
278 def inspect(self, code, cursor_pos=None, detail_level=0):
279 """Get metadata information about an object in the kernel's namespace.
280
281 It is up to the kernel to determine the appropriate object to inspect.
282
283 Parameters
284 ----------
285 code : str
286 The context in which info is requested.
287 Can be anything between a variable name and an entire cell.
288 cursor_pos : int, optional
289 The position of the cursor in the block of code where the info was requested.
290 Default: ``len(code)``
291 detail_level : int, optional
292 The level of detail for the introspection (0-2)
293
294 Returns
295 -------
296 The msg_id of the message sent.
297 """
298 if cursor_pos is None:
299 cursor_pos = len(code)
300 content = dict(code=code, cursor_pos=cursor_pos,
301 detail_level=detail_level,
302 )
303 msg = self.session.msg('inspect_request', content)
304 self.shell_channel.send(msg)
305 return msg['header']['msg_id']
306
307 def history(self, raw=True, output=False, hist_access_type='range', **kwargs):
308 """Get entries from the kernel's history list.
309
310 Parameters
311 ----------
312 raw : bool
313 If True, return the raw input.
314 output : bool
315 If True, then return the output as well.
316 hist_access_type : str
317 'range' (fill in session, start and stop params), 'tail' (fill in n)
318 or 'search' (fill in pattern param).
319
320 session : int
321 For a range request, the session from which to get lines. Session
322 numbers are positive integers; negative ones count back from the
323 current session.
324 start : int
325 The first line number of a history range.
326 stop : int
327 The final (excluded) line number of a history range.
328
329 n : int
330 The number of lines of history to get for a tail request.
331
332 pattern : str
333 The glob-syntax pattern for a search request.
334
335 Returns
336 -------
337 The msg_id of the message sent.
338 """
339 content = dict(raw=raw, output=output, hist_access_type=hist_access_type,
340 **kwargs)
341 msg = self.session.msg('history_request', content)
342 self.shell_channel.send(msg)
343 return msg['header']['msg_id']
344
345 def kernel_info(self):
346 """Request kernel info."""
347 msg = self.session.msg('kernel_info_request')
348 self.shell_channel.send(msg)
349 return msg['header']['msg_id']
350
351 def _handle_kernel_info_reply(self, msg):
352 """handle kernel info reply
353
354 sets protocol adaptation version. This might
355 be run from a separate thread.
356 """
357 adapt_version = int(msg['content']['protocol_version'].split('.')[0])
358 if adapt_version != major_protocol_version:
359 self.session.adapt_version = adapt_version
360
361 def shutdown(self, restart=False):
362 """Request an immediate kernel shutdown.
363
364 Upon receipt of the (empty) reply, client code can safely assume that
365 the kernel has shut down and it's safe to forcefully terminate it if
366 it's still alive.
367
368 The kernel will send the reply via a function registered with Python's
369 atexit module, ensuring it's truly done as the kernel is done with all
370 normal operation.
371 """
372 # Send quit message to kernel. Once we implement kernel-side setattr,
373 # this should probably be done that way, but for now this will do.
374 msg = self.session.msg('shutdown_request', {'restart':restart})
375 self.shell_channel.send(msg)
376 return msg['header']['msg_id']
377
378 def is_complete(self, code):
379 msg = self.session.msg('is_complete_request', {'code': code})
380 self.shell_channel.send(msg)
381 return msg['header']['msg_id']
382
383 def input(self, string):
384 """Send a string of raw input to the kernel."""
385 content = dict(value=string)
386 msg = self.session.msg('input_reply', content)
387 self.stdin_channel.send(msg)
388
389
390 KernelClientABC.register(KernelClient)
@@ -0,0 +1,80 b''
1 """Abstract base class for kernel clients"""
2
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2013 The IPython Development Team
5 #
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
8 #-----------------------------------------------------------------------------
9
10 #-----------------------------------------------------------------------------
11 # Imports
12 #-----------------------------------------------------------------------------
13
14 import abc
15
16 from IPython.utils.py3compat import with_metaclass
17
18 #-----------------------------------------------------------------------------
19 # Main kernel client class
20 #-----------------------------------------------------------------------------
21
22 class KernelClientABC(with_metaclass(abc.ABCMeta, object)):
23 """KernelManager ABC.
24
25 The docstrings for this class can be found in the base implementation:
26
27 `jupyter_client.client.KernelClient`
28 """
29
30 @abc.abstractproperty
31 def kernel(self):
32 pass
33
34 @abc.abstractproperty
35 def shell_channel_class(self):
36 pass
37
38 @abc.abstractproperty
39 def iopub_channel_class(self):
40 pass
41
42 @abc.abstractproperty
43 def hb_channel_class(self):
44 pass
45
46 @abc.abstractproperty
47 def stdin_channel_class(self):
48 pass
49
50 #--------------------------------------------------------------------------
51 # Channel management methods
52 #--------------------------------------------------------------------------
53
54 @abc.abstractmethod
55 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
56 pass
57
58 @abc.abstractmethod
59 def stop_channels(self):
60 pass
61
62 @abc.abstractproperty
63 def channels_running(self):
64 pass
65
66 @abc.abstractproperty
67 def shell_channel(self):
68 pass
69
70 @abc.abstractproperty
71 def iopub_channel(self):
72 pass
73
74 @abc.abstractproperty
75 def stdin_channel(self):
76 pass
77
78 @abc.abstractproperty
79 def hb_channel(self):
80 pass
This diff has been collapsed as it changes many lines, (576 lines changed) Show them Hide them
@@ -0,0 +1,576 b''
1 """Utilities for connecting to kernels
2
3 The :class:`ConnectionFileMixin` class in this module encapsulates the logic
4 related to writing and reading connections files.
5 """
6 # Copyright (c) IPython Development Team.
7 # Distributed under the terms of the Modified BSD License.
8
9 #-----------------------------------------------------------------------------
10 # Imports
11 #-----------------------------------------------------------------------------
12
13 from __future__ import absolute_import
14
15 import glob
16 import json
17 import os
18 import socket
19 import sys
20 from getpass import getpass
21 from subprocess import Popen, PIPE
22 import tempfile
23
24 import zmq
25
26 # IPython imports
27 from IPython.config import LoggingConfigurable
28 from IPython.core.profiledir import ProfileDir
29 from IPython.utils.localinterfaces import localhost
30 from IPython.utils.path import filefind, get_ipython_dir
31 from IPython.utils.py3compat import (str_to_bytes, bytes_to_str, cast_bytes_py2,
32 string_types)
33 from IPython.utils.traitlets import (
34 Bool, Integer, Unicode, CaselessStrEnum, Instance,
35 )
36
37
38 #-----------------------------------------------------------------------------
39 # Working with Connection Files
40 #-----------------------------------------------------------------------------
41
42 def write_connection_file(fname=None, shell_port=0, iopub_port=0, stdin_port=0, hb_port=0,
43 control_port=0, ip='', key=b'', transport='tcp',
44 signature_scheme='hmac-sha256',
45 ):
46 """Generates a JSON config file, including the selection of random ports.
47
48 Parameters
49 ----------
50
51 fname : unicode
52 The path to the file to write
53
54 shell_port : int, optional
55 The port to use for ROUTER (shell) channel.
56
57 iopub_port : int, optional
58 The port to use for the SUB channel.
59
60 stdin_port : int, optional
61 The port to use for the ROUTER (raw input) channel.
62
63 control_port : int, optional
64 The port to use for the ROUTER (control) channel.
65
66 hb_port : int, optional
67 The port to use for the heartbeat REP channel.
68
69 ip : str, optional
70 The ip address the kernel will bind to.
71
72 key : str, optional
73 The Session key used for message authentication.
74
75 signature_scheme : str, optional
76 The scheme used for message authentication.
77 This has the form 'digest-hash', where 'digest'
78 is the scheme used for digests, and 'hash' is the name of the hash function
79 used by the digest scheme.
80 Currently, 'hmac' is the only supported digest scheme,
81 and 'sha256' is the default hash function.
82
83 """
84 if not ip:
85 ip = localhost()
86 # default to temporary connector file
87 if not fname:
88 fd, fname = tempfile.mkstemp('.json')
89 os.close(fd)
90
91 # Find open ports as necessary.
92
93 ports = []
94 ports_needed = int(shell_port <= 0) + \
95 int(iopub_port <= 0) + \
96 int(stdin_port <= 0) + \
97 int(control_port <= 0) + \
98 int(hb_port <= 0)
99 if transport == 'tcp':
100 for i in range(ports_needed):
101 sock = socket.socket()
102 # struct.pack('ii', (0,0)) is 8 null bytes
103 sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, b'\0' * 8)
104 sock.bind(('', 0))
105 ports.append(sock)
106 for i, sock in enumerate(ports):
107 port = sock.getsockname()[1]
108 sock.close()
109 ports[i] = port
110 else:
111 N = 1
112 for i in range(ports_needed):
113 while os.path.exists("%s-%s" % (ip, str(N))):
114 N += 1
115 ports.append(N)
116 N += 1
117 if shell_port <= 0:
118 shell_port = ports.pop(0)
119 if iopub_port <= 0:
120 iopub_port = ports.pop(0)
121 if stdin_port <= 0:
122 stdin_port = ports.pop(0)
123 if control_port <= 0:
124 control_port = ports.pop(0)
125 if hb_port <= 0:
126 hb_port = ports.pop(0)
127
128 cfg = dict( shell_port=shell_port,
129 iopub_port=iopub_port,
130 stdin_port=stdin_port,
131 control_port=control_port,
132 hb_port=hb_port,
133 )
134 cfg['ip'] = ip
135 cfg['key'] = bytes_to_str(key)
136 cfg['transport'] = transport
137 cfg['signature_scheme'] = signature_scheme
138
139 with open(fname, 'w') as f:
140 f.write(json.dumps(cfg, indent=2))
141
142 return fname, cfg
143
144
145 def get_connection_file(app=None):
146 """Return the path to the connection file of an app
147
148 Parameters
149 ----------
150 app : IPKernelApp instance [optional]
151 If unspecified, the currently running app will be used
152 """
153 if app is None:
154 from ipython_kernel.kernelapp import IPKernelApp
155 if not IPKernelApp.initialized():
156 raise RuntimeError("app not specified, and not in a running Kernel")
157
158 app = IPKernelApp.instance()
159 return filefind(app.connection_file, ['.', app.profile_dir.security_dir])
160
161
162 def find_connection_file(filename='kernel-*.json', profile=None):
163 """find a connection file, and return its absolute path.
164
165 The current working directory and the profile's security
166 directory will be searched for the file if it is not given by
167 absolute path.
168
169 If profile is unspecified, then the current running application's
170 profile will be used, or 'default', if not run from IPython.
171
172 If the argument does not match an existing file, it will be interpreted as a
173 fileglob, and the matching file in the profile's security dir with
174 the latest access time will be used.
175
176 Parameters
177 ----------
178 filename : str
179 The connection file or fileglob to search for.
180 profile : str [optional]
181 The name of the profile to use when searching for the connection file,
182 if different from the current IPython session or 'default'.
183
184 Returns
185 -------
186 str : The absolute path of the connection file.
187 """
188 from IPython.core.application import BaseIPythonApplication as IPApp
189 try:
190 # quick check for absolute path, before going through logic
191 return filefind(filename)
192 except IOError:
193 pass
194
195 if profile is None:
196 # profile unspecified, check if running from an IPython app
197 if IPApp.initialized():
198 app = IPApp.instance()
199 profile_dir = app.profile_dir
200 else:
201 # not running in IPython, use default profile
202 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), 'default')
203 else:
204 # find profiledir by profile name:
205 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
206 security_dir = profile_dir.security_dir
207
208 try:
209 # first, try explicit name
210 return filefind(filename, ['.', security_dir])
211 except IOError:
212 pass
213
214 # not found by full name
215
216 if '*' in filename:
217 # given as a glob already
218 pat = filename
219 else:
220 # accept any substring match
221 pat = '*%s*' % filename
222 matches = glob.glob( os.path.join(security_dir, pat) )
223 if not matches:
224 raise IOError("Could not find %r in %r" % (filename, security_dir))
225 elif len(matches) == 1:
226 return matches[0]
227 else:
228 # get most recent match, by access time:
229 return sorted(matches, key=lambda f: os.stat(f).st_atime)[-1]
230
231
232 def get_connection_info(connection_file=None, unpack=False, profile=None):
233 """Return the connection information for the current Kernel.
234
235 Parameters
236 ----------
237 connection_file : str [optional]
238 The connection file to be used. Can be given by absolute path, or
239 IPython will search in the security directory of a given profile.
240 If run from IPython,
241
242 If unspecified, the connection file for the currently running
243 IPython Kernel will be used, which is only allowed from inside a kernel.
244 unpack : bool [default: False]
245 if True, return the unpacked dict, otherwise just the string contents
246 of the file.
247 profile : str [optional]
248 The name of the profile to use when searching for the connection file,
249 if different from the current IPython session or 'default'.
250
251
252 Returns
253 -------
254 The connection dictionary of the current kernel, as string or dict,
255 depending on `unpack`.
256 """
257 if connection_file is None:
258 # get connection file from current kernel
259 cf = get_connection_file()
260 else:
261 # connection file specified, allow shortnames:
262 cf = find_connection_file(connection_file, profile=profile)
263
264 with open(cf) as f:
265 info = f.read()
266
267 if unpack:
268 info = json.loads(info)
269 # ensure key is bytes:
270 info['key'] = str_to_bytes(info.get('key', ''))
271 return info
272
273
274 def connect_qtconsole(connection_file=None, argv=None, profile=None):
275 """Connect a qtconsole to the current kernel.
276
277 This is useful for connecting a second qtconsole to a kernel, or to a
278 local notebook.
279
280 Parameters
281 ----------
282 connection_file : str [optional]
283 The connection file to be used. Can be given by absolute path, or
284 IPython will search in the security directory of a given profile.
285 If run from IPython,
286
287 If unspecified, the connection file for the currently running
288 IPython Kernel will be used, which is only allowed from inside a kernel.
289 argv : list [optional]
290 Any extra args to be passed to the console.
291 profile : str [optional]
292 The name of the profile to use when searching for the connection file,
293 if different from the current IPython session or 'default'.
294
295
296 Returns
297 -------
298 :class:`subprocess.Popen` instance running the qtconsole frontend
299 """
300 argv = [] if argv is None else argv
301
302 if connection_file is None:
303 # get connection file from current kernel
304 cf = get_connection_file()
305 else:
306 cf = find_connection_file(connection_file, profile=profile)
307
308 cmd = ';'.join([
309 "from IPython.qt.console import qtconsoleapp",
310 "qtconsoleapp.main()"
311 ])
312
313 return Popen([sys.executable, '-c', cmd, '--existing', cf] + argv,
314 stdout=PIPE, stderr=PIPE, close_fds=(sys.platform != 'win32'),
315 )
316
317
318 def tunnel_to_kernel(connection_info, sshserver, sshkey=None):
319 """tunnel connections to a kernel via ssh
320
321 This will open four SSH tunnels from localhost on this machine to the
322 ports associated with the kernel. They can be either direct
323 localhost-localhost tunnels, or if an intermediate server is necessary,
324 the kernel must be listening on a public IP.
325
326 Parameters
327 ----------
328 connection_info : dict or str (path)
329 Either a connection dict, or the path to a JSON connection file
330 sshserver : str
331 The ssh sever to use to tunnel to the kernel. Can be a full
332 `user@server:port` string. ssh config aliases are respected.
333 sshkey : str [optional]
334 Path to file containing ssh key to use for authentication.
335 Only necessary if your ssh config does not already associate
336 a keyfile with the host.
337
338 Returns
339 -------
340
341 (shell, iopub, stdin, hb) : ints
342 The four ports on localhost that have been forwarded to the kernel.
343 """
344 from zmq.ssh import tunnel
345 if isinstance(connection_info, string_types):
346 # it's a path, unpack it
347 with open(connection_info) as f:
348 connection_info = json.loads(f.read())
349
350 cf = connection_info
351
352 lports = tunnel.select_random_ports(4)
353 rports = cf['shell_port'], cf['iopub_port'], cf['stdin_port'], cf['hb_port']
354
355 remote_ip = cf['ip']
356
357 if tunnel.try_passwordless_ssh(sshserver, sshkey):
358 password=False
359 else:
360 password = getpass("SSH Password for %s: " % cast_bytes_py2(sshserver))
361
362 for lp,rp in zip(lports, rports):
363 tunnel.ssh_tunnel(lp, rp, sshserver, remote_ip, sshkey, password)
364
365 return tuple(lports)
366
367
368 #-----------------------------------------------------------------------------
369 # Mixin for classes that work with connection files
370 #-----------------------------------------------------------------------------
371
372 channel_socket_types = {
373 'hb' : zmq.REQ,
374 'shell' : zmq.DEALER,
375 'iopub' : zmq.SUB,
376 'stdin' : zmq.DEALER,
377 'control': zmq.DEALER,
378 }
379
380 port_names = [ "%s_port" % channel for channel in ('shell', 'stdin', 'iopub', 'hb', 'control')]
381
382 class ConnectionFileMixin(LoggingConfigurable):
383 """Mixin for configurable classes that work with connection files"""
384
385 # The addresses for the communication channels
386 connection_file = Unicode('', config=True,
387 help="""JSON file in which to store connection info [default: kernel-<pid>.json]
388
389 This file will contain the IP, ports, and authentication key needed to connect
390 clients to this kernel. By default, this file will be created in the security dir
391 of the current profile, but can be specified by absolute path.
392 """)
393 _connection_file_written = Bool(False)
394
395 transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True)
396
397 ip = Unicode(config=True,
398 help="""Set the kernel\'s IP address [default localhost].
399 If the IP address is something other than localhost, then
400 Consoles on other machines will be able to connect
401 to the Kernel, so be careful!"""
402 )
403
404 def _ip_default(self):
405 if self.transport == 'ipc':
406 if self.connection_file:
407 return os.path.splitext(self.connection_file)[0] + '-ipc'
408 else:
409 return 'kernel-ipc'
410 else:
411 return localhost()
412
413 def _ip_changed(self, name, old, new):
414 if new == '*':
415 self.ip = '0.0.0.0'
416
417 # protected traits
418
419 hb_port = Integer(0, config=True,
420 help="set the heartbeat port [default: random]")
421 shell_port = Integer(0, config=True,
422 help="set the shell (ROUTER) port [default: random]")
423 iopub_port = Integer(0, config=True,
424 help="set the iopub (PUB) port [default: random]")
425 stdin_port = Integer(0, config=True,
426 help="set the stdin (ROUTER) port [default: random]")
427 control_port = Integer(0, config=True,
428 help="set the control (ROUTER) port [default: random]")
429
430 @property
431 def ports(self):
432 return [ getattr(self, name) for name in port_names ]
433
434 # The Session to use for communication with the kernel.
435 session = Instance('jupyter_client.session.Session')
436 def _session_default(self):
437 from jupyter_client.session import Session
438 return Session(parent=self)
439
440 #--------------------------------------------------------------------------
441 # Connection and ipc file management
442 #--------------------------------------------------------------------------
443
444 def get_connection_info(self):
445 """return the connection info as a dict"""
446 return dict(
447 transport=self.transport,
448 ip=self.ip,
449 shell_port=self.shell_port,
450 iopub_port=self.iopub_port,
451 stdin_port=self.stdin_port,
452 hb_port=self.hb_port,
453 control_port=self.control_port,
454 signature_scheme=self.session.signature_scheme,
455 key=self.session.key,
456 )
457
458 def cleanup_connection_file(self):
459 """Cleanup connection file *if we wrote it*
460
461 Will not raise if the connection file was already removed somehow.
462 """
463 if self._connection_file_written:
464 # cleanup connection files on full shutdown of kernel we started
465 self._connection_file_written = False
466 try:
467 os.remove(self.connection_file)
468 except (IOError, OSError, AttributeError):
469 pass
470
471 def cleanup_ipc_files(self):
472 """Cleanup ipc files if we wrote them."""
473 if self.transport != 'ipc':
474 return
475 for port in self.ports:
476 ipcfile = "%s-%i" % (self.ip, port)
477 try:
478 os.remove(ipcfile)
479 except (IOError, OSError):
480 pass
481
482 def write_connection_file(self):
483 """Write connection info to JSON dict in self.connection_file."""
484 if self._connection_file_written and os.path.exists(self.connection_file):
485 return
486
487 self.connection_file, cfg = write_connection_file(self.connection_file,
488 transport=self.transport, ip=self.ip, key=self.session.key,
489 stdin_port=self.stdin_port, iopub_port=self.iopub_port,
490 shell_port=self.shell_port, hb_port=self.hb_port,
491 control_port=self.control_port,
492 signature_scheme=self.session.signature_scheme,
493 )
494 # write_connection_file also sets default ports:
495 for name in port_names:
496 setattr(self, name, cfg[name])
497
498 self._connection_file_written = True
499
500 def load_connection_file(self):
501 """Load connection info from JSON dict in self.connection_file."""
502 self.log.debug(u"Loading connection file %s", self.connection_file)
503 with open(self.connection_file) as f:
504 cfg = json.load(f)
505 self.transport = cfg.get('transport', self.transport)
506 self.ip = cfg.get('ip', self._ip_default())
507
508 for name in port_names:
509 if getattr(self, name) == 0 and name in cfg:
510 # not overridden by config or cl_args
511 setattr(self, name, cfg[name])
512
513 if 'key' in cfg:
514 self.session.key = str_to_bytes(cfg['key'])
515 if 'signature_scheme' in cfg:
516 self.session.signature_scheme = cfg['signature_scheme']
517
518 #--------------------------------------------------------------------------
519 # Creating connected sockets
520 #--------------------------------------------------------------------------
521
522 def _make_url(self, channel):
523 """Make a ZeroMQ URL for a given channel."""
524 transport = self.transport
525 ip = self.ip
526 port = getattr(self, '%s_port' % channel)
527
528 if transport == 'tcp':
529 return "tcp://%s:%i" % (ip, port)
530 else:
531 return "%s://%s-%s" % (transport, ip, port)
532
533 def _create_connected_socket(self, channel, identity=None):
534 """Create a zmq Socket and connect it to the kernel."""
535 url = self._make_url(channel)
536 socket_type = channel_socket_types[channel]
537 self.log.debug("Connecting to: %s" % url)
538 sock = self.context.socket(socket_type)
539 # set linger to 1s to prevent hangs at exit
540 sock.linger = 1000
541 if identity:
542 sock.identity = identity
543 sock.connect(url)
544 return sock
545
546 def connect_iopub(self, identity=None):
547 """return zmq Socket connected to the IOPub channel"""
548 sock = self._create_connected_socket('iopub', identity=identity)
549 sock.setsockopt(zmq.SUBSCRIBE, b'')
550 return sock
551
552 def connect_shell(self, identity=None):
553 """return zmq Socket connected to the Shell channel"""
554 return self._create_connected_socket('shell', identity=identity)
555
556 def connect_stdin(self, identity=None):
557 """return zmq Socket connected to the StdIn channel"""
558 return self._create_connected_socket('stdin', identity=identity)
559
560 def connect_hb(self, identity=None):
561 """return zmq Socket connected to the Heartbeat channel"""
562 return self._create_connected_socket('hb', identity=identity)
563
564 def connect_control(self, identity=None):
565 """return zmq Socket connected to the Control channel"""
566 return self._create_connected_socket('control', identity=identity)
567
568
569 __all__ = [
570 'write_connection_file',
571 'get_connection_file',
572 'find_connection_file',
573 'get_connection_info',
574 'connect_qtconsole',
575 'tunnel_to_kernel',
576 ]
@@ -0,0 +1,237 b''
1 import io
2 import json
3 import os
4 import shutil
5 import sys
6
7 pjoin = os.path.join
8
9 from IPython.utils.path import get_ipython_dir
10 from IPython.utils.py3compat import PY3
11 from IPython.utils.traitlets import HasTraits, List, Unicode, Dict, Any, Set
12 from IPython.config import Configurable
13 from .launcher import make_ipkernel_cmd
14
15 if os.name == 'nt':
16 programdata = os.environ.get('PROGRAMDATA', None)
17 if programdata:
18 SYSTEM_KERNEL_DIRS = [pjoin(programdata, 'jupyter', 'kernels')]
19 else: # PROGRAMDATA is not defined by default on XP.
20 SYSTEM_KERNEL_DIRS = []
21 else:
22 SYSTEM_KERNEL_DIRS = ["/usr/share/jupyter/kernels",
23 "/usr/local/share/jupyter/kernels",
24 ]
25
26 NATIVE_KERNEL_NAME = 'python3' if PY3 else 'python2'
27
28 def _pythonfirst(s):
29 "Sort key function that will put strings starting with 'python' first."
30 if s == NATIVE_KERNEL_NAME:
31 return ' ' + s # Two spaces to sort this first of all
32 elif s.startswith('python'):
33 # Space is not valid in kernel names, so this should sort first
34 return ' ' + s
35 return s
36
37 class KernelSpec(HasTraits):
38 argv = List()
39 display_name = Unicode()
40 language = Unicode()
41 env = Dict()
42 resource_dir = Unicode()
43
44 @classmethod
45 def from_resource_dir(cls, resource_dir):
46 """Create a KernelSpec object by reading kernel.json
47
48 Pass the path to the *directory* containing kernel.json.
49 """
50 kernel_file = pjoin(resource_dir, 'kernel.json')
51 with io.open(kernel_file, 'r', encoding='utf-8') as f:
52 kernel_dict = json.load(f)
53 return cls(resource_dir=resource_dir, **kernel_dict)
54
55 def to_dict(self):
56 d = dict(argv=self.argv,
57 env=self.env,
58 display_name=self.display_name,
59 language=self.language,
60 )
61
62 return d
63
64 def to_json(self):
65 return json.dumps(self.to_dict())
66
67 def _is_kernel_dir(path):
68 """Is ``path`` a kernel directory?"""
69 return os.path.isdir(path) and os.path.isfile(pjoin(path, 'kernel.json'))
70
71 def _list_kernels_in(dir):
72 """Return a mapping of kernel names to resource directories from dir.
73
74 If dir is None or does not exist, returns an empty dict.
75 """
76 if dir is None or not os.path.isdir(dir):
77 return {}
78 return {f.lower(): pjoin(dir, f) for f in os.listdir(dir)
79 if _is_kernel_dir(pjoin(dir, f))}
80
81 class NoSuchKernel(KeyError):
82 def __init__(self, name):
83 self.name = name
84
85 class KernelSpecManager(Configurable):
86 ipython_dir = Unicode()
87 def _ipython_dir_default(self):
88 return get_ipython_dir()
89
90 user_kernel_dir = Unicode()
91 def _user_kernel_dir_default(self):
92 return pjoin(self.ipython_dir, 'kernels')
93
94 @property
95 def env_kernel_dir(self):
96 return pjoin(sys.prefix, 'share', 'jupyter', 'kernels')
97
98 whitelist = Set(config=True,
99 help="""Whitelist of allowed kernel names.
100
101 By default, all installed kernels are allowed.
102 """
103 )
104 kernel_dirs = List(
105 help="List of kernel directories to search. Later ones take priority over earlier."
106 )
107 def _kernel_dirs_default(self):
108 dirs = SYSTEM_KERNEL_DIRS[:]
109 if self.env_kernel_dir not in dirs:
110 dirs.append(self.env_kernel_dir)
111 dirs.append(self.user_kernel_dir)
112 return dirs
113
114 @property
115 def _native_kernel_dict(self):
116 """Makes a kernel directory for the native kernel.
117
118 The native kernel is the kernel using the same Python runtime as this
119 process. This will put its information in the user kernels directory.
120 """
121 return {
122 'argv': make_ipkernel_cmd(),
123 'display_name': 'Python %i' % (3 if PY3 else 2),
124 'language': 'python',
125 }
126
127 @property
128 def _native_kernel_resource_dir(self):
129 return pjoin(os.path.dirname(__file__), 'resources')
130
131 def find_kernel_specs(self):
132 """Returns a dict mapping kernel names to resource directories."""
133 d = {}
134 for kernel_dir in self.kernel_dirs:
135 d.update(_list_kernels_in(kernel_dir))
136
137 d[NATIVE_KERNEL_NAME] = self._native_kernel_resource_dir
138 if self.whitelist:
139 # filter if there's a whitelist
140 d = {name:spec for name,spec in d.items() if name in self.whitelist}
141 return d
142 # TODO: Caching?
143
144 def get_kernel_spec(self, kernel_name):
145 """Returns a :class:`KernelSpec` instance for the given kernel_name.
146
147 Raises :exc:`NoSuchKernel` if the given kernel name is not found.
148 """
149 if kernel_name in {'python', NATIVE_KERNEL_NAME} and \
150 (not self.whitelist or kernel_name in self.whitelist):
151 return KernelSpec(resource_dir=self._native_kernel_resource_dir,
152 **self._native_kernel_dict)
153
154 d = self.find_kernel_specs()
155 try:
156 resource_dir = d[kernel_name.lower()]
157 except KeyError:
158 raise NoSuchKernel(kernel_name)
159 return KernelSpec.from_resource_dir(resource_dir)
160
161 def _get_destination_dir(self, kernel_name, user=False):
162 if user:
163 return os.path.join(self.user_kernel_dir, kernel_name)
164 else:
165 if SYSTEM_KERNEL_DIRS:
166 return os.path.join(SYSTEM_KERNEL_DIRS[-1], kernel_name)
167 else:
168 raise EnvironmentError("No system kernel directory is available")
169
170
171 def install_kernel_spec(self, source_dir, kernel_name=None, user=False,
172 replace=False):
173 """Install a kernel spec by copying its directory.
174
175 If ``kernel_name`` is not given, the basename of ``source_dir`` will
176 be used.
177
178 If ``user`` is False, it will attempt to install into the systemwide
179 kernel registry. If the process does not have appropriate permissions,
180 an :exc:`OSError` will be raised.
181
182 If ``replace`` is True, this will replace an existing kernel of the same
183 name. Otherwise, if the destination already exists, an :exc:`OSError`
184 will be raised.
185 """
186 if not kernel_name:
187 kernel_name = os.path.basename(source_dir)
188 kernel_name = kernel_name.lower()
189
190 destination = self._get_destination_dir(kernel_name, user=user)
191
192 if replace and os.path.isdir(destination):
193 shutil.rmtree(destination)
194
195 shutil.copytree(source_dir, destination)
196
197 def install_native_kernel_spec(self, user=False):
198 """Install the native kernel spec to the filesystem
199
200 This allows a Python 3 frontend to use a Python 2 kernel, or vice versa.
201 The kernelspec will be written pointing to the Python executable on
202 which this is run.
203
204 If ``user`` is False, it will attempt to install into the systemwide
205 kernel registry. If the process does not have appropriate permissions,
206 an :exc:`OSError` will be raised.
207 """
208 path = self._get_destination_dir(NATIVE_KERNEL_NAME, user=user)
209 os.makedirs(path, mode=0o755)
210 with open(pjoin(path, 'kernel.json'), 'w') as f:
211 json.dump(self._native_kernel_dict, f, indent=1)
212 copy_from = self._native_kernel_resource_dir
213 for file in os.listdir(copy_from):
214 shutil.copy(pjoin(copy_from, file), path)
215 return path
216
217 def find_kernel_specs():
218 """Returns a dict mapping kernel names to resource directories."""
219 return KernelSpecManager().find_kernel_specs()
220
221 def get_kernel_spec(kernel_name):
222 """Returns a :class:`KernelSpec` instance for the given kernel_name.
223
224 Raises KeyError if the given kernel name is not found.
225 """
226 return KernelSpecManager().get_kernel_spec(kernel_name)
227
228 def install_kernel_spec(source_dir, kernel_name=None, user=False, replace=False):
229 return KernelSpecManager().install_kernel_spec(source_dir, kernel_name,
230 user, replace)
231
232 install_kernel_spec.__doc__ = KernelSpecManager.install_kernel_spec.__doc__
233
234 def install_native_kernel_spec(user=False):
235 return KernelSpecManager().install_native_kernel_spec(user=user)
236
237 install_native_kernel_spec.__doc__ = KernelSpecManager.install_native_kernel_spec.__doc__
@@ -0,0 +1,142 b''
1
2 # Copyright (c) IPython Development Team.
3 # Distributed under the terms of the Modified BSD License.
4
5 import errno
6 import os.path
7
8 from IPython.config.application import Application
9 from IPython.core.application import (
10 BaseIPythonApplication, base_flags, base_aliases
11 )
12 from IPython.utils.traitlets import Instance, Dict, Unicode, Bool
13
14 from .kernelspec import KernelSpecManager, _pythonfirst
15
16 class ListKernelSpecs(BaseIPythonApplication):
17 description = """List installed kernel specifications."""
18 kernel_spec_manager = Instance(KernelSpecManager)
19
20 # Not all of the base aliases are meaningful (e.g. profile)
21 aliases = {k: base_aliases[k] for k in ['ipython-dir', 'log-level']}
22 flags = {'debug': base_flags['debug'],}
23
24 def _kernel_spec_manager_default(self):
25 return KernelSpecManager(parent=self, ipython_dir=self.ipython_dir)
26
27 def start(self):
28 print("Available kernels:")
29 for kernelname in sorted(self.kernel_spec_manager.find_kernel_specs(),
30 key=_pythonfirst):
31 print(" %s" % kernelname)
32
33
34 class InstallKernelSpec(BaseIPythonApplication):
35 description = """Install a kernel specification directory."""
36 kernel_spec_manager = Instance(KernelSpecManager)
37
38 def _kernel_spec_manager_default(self):
39 return KernelSpecManager(ipython_dir=self.ipython_dir)
40
41 sourcedir = Unicode()
42 kernel_name = Unicode("", config=True,
43 help="Install the kernel spec with this name"
44 )
45 def _kernel_name_default(self):
46 return os.path.basename(self.sourcedir)
47
48 user = Bool(False, config=True,
49 help="""
50 Try to install the kernel spec to the per-user directory instead of
51 the system or environment directory.
52 """
53 )
54 replace = Bool(False, config=True,
55 help="Replace any existing kernel spec with this name."
56 )
57
58 aliases = {'name': 'InstallKernelSpec.kernel_name'}
59 for k in ['ipython-dir', 'log-level']:
60 aliases[k] = base_aliases[k]
61
62 flags = {'user': ({'InstallKernelSpec': {'user': True}},
63 "Install to the per-user kernel registry"),
64 'replace': ({'InstallKernelSpec': {'replace': True}},
65 "Replace any existing kernel spec with this name."),
66 'debug': base_flags['debug'],
67 }
68
69 def parse_command_line(self, argv):
70 super(InstallKernelSpec, self).parse_command_line(argv)
71 # accept positional arg as profile name
72 if self.extra_args:
73 self.sourcedir = self.extra_args[0]
74 else:
75 print("No source directory specified.")
76 self.exit(1)
77
78 def start(self):
79 try:
80 self.kernel_spec_manager.install_kernel_spec(self.sourcedir,
81 kernel_name=self.kernel_name,
82 user=self.user,
83 replace=self.replace,
84 )
85 except OSError as e:
86 if e.errno == errno.EACCES:
87 print("Permission denied")
88 self.exit(1)
89 elif e.errno == errno.EEXIST:
90 print("A kernel spec is already present at %s" % e.filename)
91 self.exit(1)
92 raise
93
94 class InstallNativeKernelSpec(BaseIPythonApplication):
95 description = """Install the native kernel spec directory for this Python."""
96 kernel_spec_manager = Instance(KernelSpecManager)
97
98 def _kernel_spec_manager_default(self):
99 return KernelSpecManager(ipython_dir=self.ipython_dir)
100
101 user = Bool(False, config=True,
102 help="""
103 Try to install the kernel spec to the per-user directory instead of
104 the system or environment directory.
105 """
106 )
107
108 # Not all of the base aliases are meaningful (e.g. profile)
109 aliases = {k: base_aliases[k] for k in ['ipython-dir', 'log-level']}
110 flags = {'user': ({'InstallNativeKernelSpec': {'user': True}},
111 "Install to the per-user kernel registry"),
112 'debug': base_flags['debug'],
113 }
114
115 def start(self):
116 try:
117 self.kernel_spec_manager.install_native_kernel_spec(user=self.user)
118 except OSError as e:
119 self.exit(e)
120
121 class KernelSpecApp(Application):
122 name = "ipython kernelspec"
123 description = """Manage IPython kernel specifications."""
124
125 subcommands = Dict({
126 'list': (ListKernelSpecs, ListKernelSpecs.description.splitlines()[0]),
127 'install': (InstallKernelSpec, InstallKernelSpec.description.splitlines()[0]),
128 'install-self': (InstallNativeKernelSpec, InstallNativeKernelSpec.description.splitlines()[0]),
129 })
130
131 aliases = {}
132 flags = {}
133
134 def start(self):
135 if self.subapp is None:
136 print("No subcommand specified. Must specify one of: %s"% list(self.subcommands))
137 print()
138 self.print_description()
139 self.print_subcommands()
140 self.exit(1)
141 else:
142 return self.subapp.start()
@@ -0,0 +1,226 b''
1 """Utilities for launching kernels
2 """
3
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
6
7 import os
8 import sys
9 from subprocess import Popen, PIPE
10
11 from IPython.utils.encoding import getdefaultencoding
12 from IPython.utils.py3compat import cast_bytes_py2
13
14
15 def swallow_argv(argv, aliases=None, flags=None):
16 """strip frontend-specific aliases and flags from an argument list
17
18 For use primarily in frontend apps that want to pass a subset of command-line
19 arguments through to a subprocess, where frontend-specific flags and aliases
20 should be removed from the list.
21
22 Parameters
23 ----------
24
25 argv : list(str)
26 The starting argv, to be filtered
27 aliases : container of aliases (dict, list, set, etc.)
28 The frontend-specific aliases to be removed
29 flags : container of flags (dict, list, set, etc.)
30 The frontend-specific flags to be removed
31
32 Returns
33 -------
34
35 argv : list(str)
36 The argv list, excluding flags and aliases that have been stripped
37 """
38
39 if aliases is None:
40 aliases = set()
41 if flags is None:
42 flags = set()
43
44 stripped = list(argv) # copy
45
46 swallow_next = False
47 was_flag = False
48 for a in argv:
49 if a == '--':
50 break
51 if swallow_next:
52 swallow_next = False
53 # last arg was an alias, remove the next one
54 # *unless* the last alias has a no-arg flag version, in which
55 # case, don't swallow the next arg if it's also a flag:
56 if not (was_flag and a.startswith('-')):
57 stripped.remove(a)
58 continue
59 if a.startswith('-'):
60 split = a.lstrip('-').split('=')
61 name = split[0]
62 # we use startswith because argparse accepts any arg to be specified
63 # by any leading section, as long as it is unique,
64 # so `--no-br` means `--no-browser` in the notebook, etc.
65 if any(alias.startswith(name) for alias in aliases):
66 stripped.remove(a)
67 if len(split) == 1:
68 # alias passed with arg via space
69 swallow_next = True
70 # could have been a flag that matches an alias, e.g. `existing`
71 # in which case, we might not swallow the next arg
72 was_flag = name in flags
73 elif len(split) == 1 and any(flag.startswith(name) for flag in flags):
74 # strip flag, but don't swallow next, as flags don't take args
75 stripped.remove(a)
76
77 # return shortened list
78 return stripped
79
80
81 def make_ipkernel_cmd(mod='ipython_kernel', executable=None, extra_arguments=[], **kw):
82 """Build Popen command list for launching an IPython kernel.
83
84 Parameters
85 ----------
86 mod : str, optional (default 'ipython_kernel')
87 A string of an IPython module whose __main__ starts an IPython kernel
88
89 executable : str, optional (default sys.executable)
90 The Python executable to use for the kernel process.
91
92 extra_arguments : list, optional
93 A list of extra arguments to pass when executing the launch code.
94
95 Returns
96 -------
97
98 A Popen command list
99 """
100 if executable is None:
101 executable = sys.executable
102 arguments = [ executable, '-m', mod, '-f', '{connection_file}' ]
103 arguments.extend(extra_arguments)
104
105 return arguments
106
107
108 def launch_kernel(cmd, stdin=None, stdout=None, stderr=None, env=None,
109 independent=False,
110 cwd=None,
111 **kw
112 ):
113 """ Launches a localhost kernel, binding to the specified ports.
114
115 Parameters
116 ----------
117 cmd : Popen list,
118 A string of Python code that imports and executes a kernel entry point.
119
120 stdin, stdout, stderr : optional (default None)
121 Standards streams, as defined in subprocess.Popen.
122
123 independent : bool, optional (default False)
124 If set, the kernel process is guaranteed to survive if this process
125 dies. If not set, an effort is made to ensure that the kernel is killed
126 when this process dies. Note that in this case it is still good practice
127 to kill kernels manually before exiting.
128
129 cwd : path, optional
130 The working dir of the kernel process (default: cwd of this process).
131
132 Returns
133 -------
134
135 Popen instance for the kernel subprocess
136 """
137
138 # Popen will fail (sometimes with a deadlock) if stdin, stdout, and stderr
139 # are invalid. Unfortunately, there is in general no way to detect whether
140 # they are valid. The following two blocks redirect them to (temporary)
141 # pipes in certain important cases.
142
143 # If this process has been backgrounded, our stdin is invalid. Since there
144 # is no compelling reason for the kernel to inherit our stdin anyway, we'll
145 # place this one safe and always redirect.
146 redirect_in = True
147 _stdin = PIPE if stdin is None else stdin
148
149 # If this process in running on pythonw, we know that stdin, stdout, and
150 # stderr are all invalid.
151 redirect_out = sys.executable.endswith('pythonw.exe')
152 if redirect_out:
153 blackhole = open(os.devnull, 'w')
154 _stdout = blackhole if stdout is None else stdout
155 _stderr = blackhole if stderr is None else stderr
156 else:
157 _stdout, _stderr = stdout, stderr
158
159 env = env if (env is not None) else os.environ.copy()
160
161 encoding = getdefaultencoding(prefer_stream=False)
162 kwargs = dict(
163 stdin=_stdin,
164 stdout=_stdout,
165 stderr=_stderr,
166 cwd=cwd,
167 env=env,
168 )
169
170 # Spawn a kernel.
171 if sys.platform == 'win32':
172 # Popen on Python 2 on Windows cannot handle unicode args or cwd
173 cmd = [ cast_bytes_py2(c, encoding) for c in cmd ]
174 if cwd:
175 cwd = cast_bytes_py2(cwd, sys.getfilesystemencoding() or 'ascii')
176 kwargs['cwd'] = cwd
177
178 from jupyter_client.parentpoller import ParentPollerWindows
179 # Create a Win32 event for interrupting the kernel
180 # and store it in an environment variable.
181 interrupt_event = ParentPollerWindows.create_interrupt_event()
182 env["JPY_INTERRUPT_EVENT"] = str(interrupt_event)
183 # deprecated old env name:
184 env["IPY_INTERRUPT_EVENT"] = env["JPY_INTERRUPT_EVENT"]
185
186 try:
187 from _winapi import DuplicateHandle, GetCurrentProcess, \
188 DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
189 except:
190 from _subprocess import DuplicateHandle, GetCurrentProcess, \
191 DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
192 # Launch the kernel process
193 if independent:
194 kwargs['creationflags'] = CREATE_NEW_PROCESS_GROUP
195 else:
196 pid = GetCurrentProcess()
197 handle = DuplicateHandle(pid, pid, pid, 0,
198 True, # Inheritable by new processes.
199 DUPLICATE_SAME_ACCESS)
200 env['JPY_PARENT_PID'] = str(int(handle))
201
202 proc = Popen(cmd, **kwargs)
203
204 # Attach the interrupt event to the Popen objet so it can be used later.
205 proc.win32_interrupt_event = interrupt_event
206
207 else:
208 if independent:
209 kwargs['preexec_fn'] = lambda: os.setsid()
210 else:
211 env['JPY_PARENT_PID'] = str(os.getpid())
212
213 proc = Popen(cmd, **kwargs)
214
215 # Clean up pipes created to work around Popen bug.
216 if redirect_in:
217 if stdin is None:
218 proc.stdin.close()
219
220 return proc
221
222 __all__ = [
223 'swallow_argv',
224 'make_ipkernel_cmd',
225 'launch_kernel',
226 ]
@@ -0,0 +1,442 b''
1 """Base class to manage a running kernel"""
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 from __future__ import absolute_import
7
8 from contextlib import contextmanager
9 import os
10 import re
11 import signal
12 import sys
13 import time
14 import warnings
15 try:
16 from queue import Empty # Py 3
17 except ImportError:
18 from Queue import Empty # Py 2
19
20 import zmq
21
22 from IPython.utils.importstring import import_item
23 from IPython.utils.localinterfaces import is_local_ip, local_ips
24 from IPython.utils.path import get_ipython_dir
25 from IPython.utils.traitlets import (
26 Any, Instance, Unicode, List, Bool, Type, DottedObjectName
27 )
28 from jupyter_client import (
29 launch_kernel,
30 kernelspec,
31 )
32 from .connect import ConnectionFileMixin
33 from .session import Session
34 from .managerabc import (
35 KernelManagerABC
36 )
37
38
39 class KernelManager(ConnectionFileMixin):
40 """Manages a single kernel in a subprocess on this host.
41
42 This version starts kernels with Popen.
43 """
44
45 # The PyZMQ Context to use for communication with the kernel.
46 context = Instance(zmq.Context)
47 def _context_default(self):
48 return zmq.Context.instance()
49
50 # the class to create with our `client` method
51 client_class = DottedObjectName('jupyter_client.blocking.BlockingKernelClient')
52 client_factory = Type(allow_none=True)
53 def _client_class_changed(self, name, old, new):
54 self.client_factory = import_item(str(new))
55
56 # The kernel process with which the KernelManager is communicating.
57 # generally a Popen instance
58 kernel = Any()
59
60 kernel_spec_manager = Instance(kernelspec.KernelSpecManager)
61
62 def _kernel_spec_manager_default(self):
63 return kernelspec.KernelSpecManager(ipython_dir=self.ipython_dir)
64
65 kernel_name = Unicode(kernelspec.NATIVE_KERNEL_NAME)
66
67 kernel_spec = Instance(kernelspec.KernelSpec)
68
69 def _kernel_spec_default(self):
70 return self.kernel_spec_manager.get_kernel_spec(self.kernel_name)
71
72 def _kernel_name_changed(self, name, old, new):
73 if new == 'python':
74 self.kernel_name = kernelspec.NATIVE_KERNEL_NAME
75 # This triggered another run of this function, so we can exit now
76 return
77 self.kernel_spec = self.kernel_spec_manager.get_kernel_spec(new)
78 self.ipython_kernel = new in {'python', 'python2', 'python3'}
79
80 kernel_cmd = List(Unicode, config=True,
81 help="""DEPRECATED: Use kernel_name instead.
82
83 The Popen Command to launch the kernel.
84 Override this if you have a custom kernel.
85 If kernel_cmd is specified in a configuration file,
86 IPython does not pass any arguments to the kernel,
87 because it cannot make any assumptions about the
88 arguments that the kernel understands. In particular,
89 this means that the kernel does not receive the
90 option --debug if it given on the IPython command line.
91 """
92 )
93
94 def _kernel_cmd_changed(self, name, old, new):
95 warnings.warn("Setting kernel_cmd is deprecated, use kernel_spec to "
96 "start different kernels.")
97 self.ipython_kernel = False
98
99 ipython_kernel = Bool(True)
100
101 ipython_dir = Unicode()
102 def _ipython_dir_default(self):
103 return get_ipython_dir()
104
105 # Protected traits
106 _launch_args = Any()
107 _control_socket = Any()
108
109 _restarter = Any()
110
111 autorestart = Bool(False, config=True,
112 help="""Should we autorestart the kernel if it dies."""
113 )
114
115 def __del__(self):
116 self._close_control_socket()
117 self.cleanup_connection_file()
118
119 #--------------------------------------------------------------------------
120 # Kernel restarter
121 #--------------------------------------------------------------------------
122
123 def start_restarter(self):
124 pass
125
126 def stop_restarter(self):
127 pass
128
129 def add_restart_callback(self, callback, event='restart'):
130 """register a callback to be called when a kernel is restarted"""
131 if self._restarter is None:
132 return
133 self._restarter.add_callback(callback, event)
134
135 def remove_restart_callback(self, callback, event='restart'):
136 """unregister a callback to be called when a kernel is restarted"""
137 if self._restarter is None:
138 return
139 self._restarter.remove_callback(callback, event)
140
141 #--------------------------------------------------------------------------
142 # create a Client connected to our Kernel
143 #--------------------------------------------------------------------------
144
145 def client(self, **kwargs):
146 """Create a client configured to connect to our kernel"""
147 if self.client_factory is None:
148 self.client_factory = import_item(self.client_class)
149
150 kw = {}
151 kw.update(self.get_connection_info())
152 kw.update(dict(
153 connection_file=self.connection_file,
154 session=self.session,
155 parent=self,
156 ))
157
158 # add kwargs last, for manual overrides
159 kw.update(kwargs)
160 return self.client_factory(**kw)
161
162 #--------------------------------------------------------------------------
163 # Kernel management
164 #--------------------------------------------------------------------------
165
166 def format_kernel_cmd(self, extra_arguments=None):
167 """replace templated args (e.g. {connection_file})"""
168 extra_arguments = extra_arguments or []
169 if self.kernel_cmd:
170 cmd = self.kernel_cmd + extra_arguments
171 else:
172 cmd = self.kernel_spec.argv + extra_arguments
173
174 ns = dict(connection_file=self.connection_file)
175 ns.update(self._launch_args)
176
177 pat = re.compile(r'\{([A-Za-z0-9_]+)\}')
178 def from_ns(match):
179 """Get the key out of ns if it's there, otherwise no change."""
180 return ns.get(match.group(1), match.group())
181
182 return [ pat.sub(from_ns, arg) for arg in cmd ]
183
184 def _launch_kernel(self, kernel_cmd, **kw):
185 """actually launch the kernel
186
187 override in a subclass to launch kernel subprocesses differently
188 """
189 return launch_kernel(kernel_cmd, **kw)
190
191 # Control socket used for polite kernel shutdown
192
193 def _connect_control_socket(self):
194 if self._control_socket is None:
195 self._control_socket = self.connect_control()
196 self._control_socket.linger = 100
197
198 def _close_control_socket(self):
199 if self._control_socket is None:
200 return
201 self._control_socket.close()
202 self._control_socket = None
203
204 def start_kernel(self, **kw):
205 """Starts a kernel on this host in a separate process.
206
207 If random ports (port=0) are being used, this method must be called
208 before the channels are created.
209
210 Parameters
211 ----------
212 **kw : optional
213 keyword arguments that are passed down to build the kernel_cmd
214 and launching the kernel (e.g. Popen kwargs).
215 """
216 if self.transport == 'tcp' and not is_local_ip(self.ip):
217 raise RuntimeError("Can only launch a kernel on a local interface. "
218 "Make sure that the '*_address' attributes are "
219 "configured properly. "
220 "Currently valid addresses are: %s" % local_ips()
221 )
222
223 # write connection file / get default ports
224 self.write_connection_file()
225
226 # save kwargs for use in restart
227 self._launch_args = kw.copy()
228 # build the Popen cmd
229 extra_arguments = kw.pop('extra_arguments', [])
230 kernel_cmd = self.format_kernel_cmd(extra_arguments=extra_arguments)
231 if self.kernel_cmd:
232 # If kernel_cmd has been set manually, don't refer to a kernel spec
233 env = os.environ
234 else:
235 # Environment variables from kernel spec are added to os.environ
236 env = os.environ.copy()
237 env.update(self.kernel_spec.env or {})
238 # launch the kernel subprocess
239 self.kernel = self._launch_kernel(kernel_cmd, env=env,
240 **kw)
241 self.start_restarter()
242 self._connect_control_socket()
243
244 def request_shutdown(self, restart=False):
245 """Send a shutdown request via control channel
246
247 On Windows, this just kills kernels instead, because the shutdown
248 messages don't work.
249 """
250 content = dict(restart=restart)
251 msg = self.session.msg("shutdown_request", content=content)
252 self.session.send(self._control_socket, msg)
253
254 def finish_shutdown(self, waittime=1, pollinterval=0.1):
255 """Wait for kernel shutdown, then kill process if it doesn't shutdown.
256
257 This does not send shutdown requests - use :meth:`request_shutdown`
258 first.
259 """
260 for i in range(int(waittime/pollinterval)):
261 if self.is_alive():
262 time.sleep(pollinterval)
263 else:
264 break
265 else:
266 # OK, we've waited long enough.
267 if self.has_kernel:
268 self._kill_kernel()
269
270 def cleanup(self, connection_file=True):
271 """Clean up resources when the kernel is shut down"""
272 if connection_file:
273 self.cleanup_connection_file()
274
275 self.cleanup_ipc_files()
276 self._close_control_socket()
277
278 def shutdown_kernel(self, now=False, restart=False):
279 """Attempts to the stop the kernel process cleanly.
280
281 This attempts to shutdown the kernels cleanly by:
282
283 1. Sending it a shutdown message over the shell channel.
284 2. If that fails, the kernel is shutdown forcibly by sending it
285 a signal.
286
287 Parameters
288 ----------
289 now : bool
290 Should the kernel be forcible killed *now*. This skips the
291 first, nice shutdown attempt.
292 restart: bool
293 Will this kernel be restarted after it is shutdown. When this
294 is True, connection files will not be cleaned up.
295 """
296 # Stop monitoring for restarting while we shutdown.
297 self.stop_restarter()
298
299 if now:
300 self._kill_kernel()
301 else:
302 self.request_shutdown(restart=restart)
303 # Don't send any additional kernel kill messages immediately, to give
304 # the kernel a chance to properly execute shutdown actions. Wait for at
305 # most 1s, checking every 0.1s.
306 self.finish_shutdown()
307
308 self.cleanup(connection_file=not restart)
309
310 def restart_kernel(self, now=False, **kw):
311 """Restarts a kernel with the arguments that were used to launch it.
312
313 If the old kernel was launched with random ports, the same ports will be
314 used for the new kernel. The same connection file is used again.
315
316 Parameters
317 ----------
318 now : bool, optional
319 If True, the kernel is forcefully restarted *immediately*, without
320 having a chance to do any cleanup action. Otherwise the kernel is
321 given 1s to clean up before a forceful restart is issued.
322
323 In all cases the kernel is restarted, the only difference is whether
324 it is given a chance to perform a clean shutdown or not.
325
326 **kw : optional
327 Any options specified here will overwrite those used to launch the
328 kernel.
329 """
330 if self._launch_args is None:
331 raise RuntimeError("Cannot restart the kernel. "
332 "No previous call to 'start_kernel'.")
333 else:
334 # Stop currently running kernel.
335 self.shutdown_kernel(now=now, restart=True)
336
337 # Start new kernel.
338 self._launch_args.update(kw)
339 self.start_kernel(**self._launch_args)
340
341 @property
342 def has_kernel(self):
343 """Has a kernel been started that we are managing."""
344 return self.kernel is not None
345
346 def _kill_kernel(self):
347 """Kill the running kernel.
348
349 This is a private method, callers should use shutdown_kernel(now=True).
350 """
351 if self.has_kernel:
352
353 # Signal the kernel to terminate (sends SIGKILL on Unix and calls
354 # TerminateProcess() on Win32).
355 try:
356 self.kernel.kill()
357 except OSError as e:
358 # In Windows, we will get an Access Denied error if the process
359 # has already terminated. Ignore it.
360 if sys.platform == 'win32':
361 if e.winerror != 5:
362 raise
363 # On Unix, we may get an ESRCH error if the process has already
364 # terminated. Ignore it.
365 else:
366 from errno import ESRCH
367 if e.errno != ESRCH:
368 raise
369
370 # Block until the kernel terminates.
371 self.kernel.wait()
372 self.kernel = None
373 else:
374 raise RuntimeError("Cannot kill kernel. No kernel is running!")
375
376 def interrupt_kernel(self):
377 """Interrupts the kernel by sending it a signal.
378
379 Unlike ``signal_kernel``, this operation is well supported on all
380 platforms.
381 """
382 if self.has_kernel:
383 if sys.platform == 'win32':
384 from .parentpoller import ParentPollerWindows as Poller
385 Poller.send_interrupt(self.kernel.win32_interrupt_event)
386 else:
387 self.kernel.send_signal(signal.SIGINT)
388 else:
389 raise RuntimeError("Cannot interrupt kernel. No kernel is running!")
390
391 def signal_kernel(self, signum):
392 """Sends a signal to the kernel.
393
394 Note that since only SIGTERM is supported on Windows, this function is
395 only useful on Unix systems.
396 """
397 if self.has_kernel:
398 self.kernel.send_signal(signum)
399 else:
400 raise RuntimeError("Cannot signal kernel. No kernel is running!")
401
402 def is_alive(self):
403 """Is the kernel process still running?"""
404 if self.has_kernel:
405 if self.kernel.poll() is None:
406 return True
407 else:
408 return False
409 else:
410 # we don't have a kernel
411 return False
412
413
414 KernelManagerABC.register(KernelManager)
415
416
417 def start_new_kernel(startup_timeout=60, kernel_name='python', **kwargs):
418 """Start a new kernel, and return its Manager and Client"""
419 km = KernelManager(kernel_name=kernel_name)
420 km.start_kernel(**kwargs)
421 kc = km.client()
422 kc.start_channels()
423 kc.wait_for_ready()
424
425 return km, kc
426
427 @contextmanager
428 def run_kernel(**kwargs):
429 """Context manager to create a kernel in a subprocess.
430
431 The kernel is shut down when the context exits.
432
433 Returns
434 -------
435 kernel_client: connected KernelClient instance
436 """
437 km, kc = start_new_kernel(**kwargs)
438 try:
439 yield kc
440 finally:
441 kc.stop_channels()
442 km.shutdown_kernel(now=True)
@@ -0,0 +1,53 b''
1 """Abstract base class for kernel managers."""
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 import abc
7
8 from IPython.utils.py3compat import with_metaclass
9
10
11 class KernelManagerABC(with_metaclass(abc.ABCMeta, object)):
12 """KernelManager ABC.
13
14 The docstrings for this class can be found in the base implementation:
15
16 `jupyter_client.kernelmanager.KernelManager`
17 """
18
19 @abc.abstractproperty
20 def kernel(self):
21 pass
22
23 #--------------------------------------------------------------------------
24 # Kernel management
25 #--------------------------------------------------------------------------
26
27 @abc.abstractmethod
28 def start_kernel(self, **kw):
29 pass
30
31 @abc.abstractmethod
32 def shutdown_kernel(self, now=False, restart=False):
33 pass
34
35 @abc.abstractmethod
36 def restart_kernel(self, now=False, **kw):
37 pass
38
39 @abc.abstractproperty
40 def has_kernel(self):
41 pass
42
43 @abc.abstractmethod
44 def interrupt_kernel(self):
45 pass
46
47 @abc.abstractmethod
48 def signal_kernel(self, signum):
49 pass
50
51 @abc.abstractmethod
52 def is_alive(self):
53 pass
@@ -0,0 +1,319 b''
1 """A kernel manager for multiple kernels"""
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 from __future__ import absolute_import
7
8 import os
9 import uuid
10
11 import zmq
12
13 from IPython.config.configurable import LoggingConfigurable
14 from IPython.utils.importstring import import_item
15 from IPython.utils.traitlets import (
16 Instance, Dict, List, Unicode, Any, DottedObjectName
17 )
18 from IPython.utils.py3compat import unicode_type
19
20 from .kernelspec import NATIVE_KERNEL_NAME
21
22 class DuplicateKernelError(Exception):
23 pass
24
25
26 def kernel_method(f):
27 """decorator for proxying MKM.method(kernel_id) to individual KMs by ID"""
28 def wrapped(self, kernel_id, *args, **kwargs):
29 # get the kernel
30 km = self.get_kernel(kernel_id)
31 method = getattr(km, f.__name__)
32 # call the kernel's method
33 r = method(*args, **kwargs)
34 # last thing, call anything defined in the actual class method
35 # such as logging messages
36 f(self, kernel_id, *args, **kwargs)
37 # return the method result
38 return r
39 return wrapped
40
41
42 class MultiKernelManager(LoggingConfigurable):
43 """A class for managing multiple kernels."""
44
45 ipython_kernel_argv = List(Unicode)
46
47 default_kernel_name = Unicode(NATIVE_KERNEL_NAME, config=True,
48 help="The name of the default kernel to start"
49 )
50
51 kernel_manager_class = DottedObjectName(
52 "jupyter_client.ioloop.IOLoopKernelManager", config=True,
53 help="""The kernel manager class. This is configurable to allow
54 subclassing of the KernelManager for customized behavior.
55 """
56 )
57 def _kernel_manager_class_changed(self, name, old, new):
58 self.kernel_manager_factory = import_item(new)
59
60 kernel_manager_factory = Any(help="this is kernel_manager_class after import")
61 def _kernel_manager_factory_default(self):
62 return import_item(self.kernel_manager_class)
63
64 context = Instance('zmq.Context')
65 def _context_default(self):
66 return zmq.Context.instance()
67
68 connection_dir = Unicode('')
69
70 _kernels = Dict()
71
72 def list_kernel_ids(self):
73 """Return a list of the kernel ids of the active kernels."""
74 # Create a copy so we can iterate over kernels in operations
75 # that delete keys.
76 return list(self._kernels.keys())
77
78 def __len__(self):
79 """Return the number of running kernels."""
80 return len(self.list_kernel_ids())
81
82 def __contains__(self, kernel_id):
83 return kernel_id in self._kernels
84
85 def start_kernel(self, kernel_name=None, **kwargs):
86 """Start a new kernel.
87
88 The caller can pick a kernel_id by passing one in as a keyword arg,
89 otherwise one will be picked using a uuid.
90
91 To silence the kernel's stdout/stderr, call this using::
92
93 km.start_kernel(stdout=PIPE, stderr=PIPE)
94
95 """
96 kernel_id = kwargs.pop('kernel_id', unicode_type(uuid.uuid4()))
97 if kernel_id in self:
98 raise DuplicateKernelError('Kernel already exists: %s' % kernel_id)
99
100 if kernel_name is None:
101 kernel_name = self.default_kernel_name
102 # kernel_manager_factory is the constructor for the KernelManager
103 # subclass we are using. It can be configured as any Configurable,
104 # including things like its transport and ip.
105 km = self.kernel_manager_factory(connection_file=os.path.join(
106 self.connection_dir, "kernel-%s.json" % kernel_id),
107 parent=self, autorestart=True, log=self.log, kernel_name=kernel_name,
108 )
109 # FIXME: remove special treatment of IPython kernels
110 if km.ipython_kernel:
111 kwargs.setdefault('extra_arguments', self.ipython_kernel_argv)
112 km.start_kernel(**kwargs)
113 self._kernels[kernel_id] = km
114 return kernel_id
115
116 @kernel_method
117 def shutdown_kernel(self, kernel_id, now=False, restart=False):
118 """Shutdown a kernel by its kernel uuid.
119
120 Parameters
121 ==========
122 kernel_id : uuid
123 The id of the kernel to shutdown.
124 now : bool
125 Should the kernel be shutdown forcibly using a signal.
126 restart : bool
127 Will the kernel be restarted?
128 """
129 self.log.info("Kernel shutdown: %s" % kernel_id)
130 self.remove_kernel(kernel_id)
131
132 @kernel_method
133 def request_shutdown(self, kernel_id, restart=False):
134 """Ask a kernel to shut down by its kernel uuid"""
135
136 @kernel_method
137 def finish_shutdown(self, kernel_id, waittime=1, pollinterval=0.1):
138 """Wait for a kernel to finish shutting down, and kill it if it doesn't
139 """
140 self.log.info("Kernel shutdown: %s" % kernel_id)
141
142 @kernel_method
143 def cleanup(self, kernel_id, connection_file=True):
144 """Clean up a kernel's resources"""
145
146 def remove_kernel(self, kernel_id):
147 """remove a kernel from our mapping.
148
149 Mainly so that a kernel can be removed if it is already dead,
150 without having to call shutdown_kernel.
151
152 The kernel object is returned.
153 """
154 return self._kernels.pop(kernel_id)
155
156 def shutdown_all(self, now=False):
157 """Shutdown all kernels."""
158 kids = self.list_kernel_ids()
159 for kid in kids:
160 self.request_shutdown(kid)
161 for kid in kids:
162 self.finish_shutdown(kid)
163 self.cleanup(kid)
164 self.remove_kernel(kid)
165
166 @kernel_method
167 def interrupt_kernel(self, kernel_id):
168 """Interrupt (SIGINT) the kernel by its uuid.
169
170 Parameters
171 ==========
172 kernel_id : uuid
173 The id of the kernel to interrupt.
174 """
175 self.log.info("Kernel interrupted: %s" % kernel_id)
176
177 @kernel_method
178 def signal_kernel(self, kernel_id, signum):
179 """Sends a signal to the kernel by its uuid.
180
181 Note that since only SIGTERM is supported on Windows, this function
182 is only useful on Unix systems.
183
184 Parameters
185 ==========
186 kernel_id : uuid
187 The id of the kernel to signal.
188 """
189 self.log.info("Signaled Kernel %s with %s" % (kernel_id, signum))
190
191 @kernel_method
192 def restart_kernel(self, kernel_id, now=False):
193 """Restart a kernel by its uuid, keeping the same ports.
194
195 Parameters
196 ==========
197 kernel_id : uuid
198 The id of the kernel to interrupt.
199 """
200 self.log.info("Kernel restarted: %s" % kernel_id)
201
202 @kernel_method
203 def is_alive(self, kernel_id):
204 """Is the kernel alive.
205
206 This calls KernelManager.is_alive() which calls Popen.poll on the
207 actual kernel subprocess.
208
209 Parameters
210 ==========
211 kernel_id : uuid
212 The id of the kernel.
213 """
214
215 def _check_kernel_id(self, kernel_id):
216 """check that a kernel id is valid"""
217 if kernel_id not in self:
218 raise KeyError("Kernel with id not found: %s" % kernel_id)
219
220 def get_kernel(self, kernel_id):
221 """Get the single KernelManager object for a kernel by its uuid.
222
223 Parameters
224 ==========
225 kernel_id : uuid
226 The id of the kernel.
227 """
228 self._check_kernel_id(kernel_id)
229 return self._kernels[kernel_id]
230
231 @kernel_method
232 def add_restart_callback(self, kernel_id, callback, event='restart'):
233 """add a callback for the KernelRestarter"""
234
235 @kernel_method
236 def remove_restart_callback(self, kernel_id, callback, event='restart'):
237 """remove a callback for the KernelRestarter"""
238
239 @kernel_method
240 def get_connection_info(self, kernel_id):
241 """Return a dictionary of connection data for a kernel.
242
243 Parameters
244 ==========
245 kernel_id : uuid
246 The id of the kernel.
247
248 Returns
249 =======
250 connection_dict : dict
251 A dict of the information needed to connect to a kernel.
252 This includes the ip address and the integer port
253 numbers of the different channels (stdin_port, iopub_port,
254 shell_port, hb_port).
255 """
256
257 @kernel_method
258 def connect_iopub(self, kernel_id, identity=None):
259 """Return a zmq Socket connected to the iopub channel.
260
261 Parameters
262 ==========
263 kernel_id : uuid
264 The id of the kernel
265 identity : bytes (optional)
266 The zmq identity of the socket
267
268 Returns
269 =======
270 stream : zmq Socket or ZMQStream
271 """
272
273 @kernel_method
274 def connect_shell(self, kernel_id, identity=None):
275 """Return a zmq Socket connected to the shell channel.
276
277 Parameters
278 ==========
279 kernel_id : uuid
280 The id of the kernel
281 identity : bytes (optional)
282 The zmq identity of the socket
283
284 Returns
285 =======
286 stream : zmq Socket or ZMQStream
287 """
288
289 @kernel_method
290 def connect_stdin(self, kernel_id, identity=None):
291 """Return a zmq Socket connected to the stdin channel.
292
293 Parameters
294 ==========
295 kernel_id : uuid
296 The id of the kernel
297 identity : bytes (optional)
298 The zmq identity of the socket
299
300 Returns
301 =======
302 stream : zmq Socket or ZMQStream
303 """
304
305 @kernel_method
306 def connect_hb(self, kernel_id, identity=None):
307 """Return a zmq Socket connected to the hb channel.
308
309 Parameters
310 ==========
311 kernel_id : uuid
312 The id of the kernel
313 identity : bytes (optional)
314 The zmq identity of the socket
315
316 Returns
317 =======
318 stream : zmq Socket or ZMQStream
319 """
@@ -0,0 +1,111 b''
1 """A basic kernel monitor with autorestarting.
2
3 This watches a kernel's state using KernelManager.is_alive and auto
4 restarts the kernel if it dies.
5
6 It is an incomplete base class, and must be subclassed.
7 """
8
9 # Copyright (c) IPython Development Team.
10 # Distributed under the terms of the Modified BSD License.
11
12 from IPython.config.configurable import LoggingConfigurable
13 from IPython.utils.traitlets import (
14 Instance, Float, Dict, Bool, Integer,
15 )
16
17
18 class KernelRestarter(LoggingConfigurable):
19 """Monitor and autorestart a kernel."""
20
21 kernel_manager = Instance('jupyter_client.KernelManager')
22
23 debug = Bool(False, config=True,
24 help="""Whether to include every poll event in debugging output.
25
26 Has to be set explicitly, because there will be *a lot* of output.
27 """
28 )
29
30 time_to_dead = Float(3.0, config=True,
31 help="""Kernel heartbeat interval in seconds."""
32 )
33
34 restart_limit = Integer(5, config=True,
35 help="""The number of consecutive autorestarts before the kernel is presumed dead."""
36 )
37 _restarting = Bool(False)
38 _restart_count = Integer(0)
39
40 callbacks = Dict()
41 def _callbacks_default(self):
42 return dict(restart=[], dead=[])
43
44 def start(self):
45 """Start the polling of the kernel."""
46 raise NotImplementedError("Must be implemented in a subclass")
47
48 def stop(self):
49 """Stop the kernel polling."""
50 raise NotImplementedError("Must be implemented in a subclass")
51
52 def add_callback(self, f, event='restart'):
53 """register a callback to fire on a particular event
54
55 Possible values for event:
56
57 'restart' (default): kernel has died, and will be restarted.
58 'dead': restart has failed, kernel will be left dead.
59
60 """
61 self.callbacks[event].append(f)
62
63 def remove_callback(self, f, event='restart'):
64 """unregister a callback to fire on a particular event
65
66 Possible values for event:
67
68 'restart' (default): kernel has died, and will be restarted.
69 'dead': restart has failed, kernel will be left dead.
70
71 """
72 try:
73 self.callbacks[event].remove(f)
74 except ValueError:
75 pass
76
77 def _fire_callbacks(self, event):
78 """fire our callbacks for a particular event"""
79 for callback in self.callbacks[event]:
80 try:
81 callback()
82 except Exception as e:
83 self.log.error("KernelRestarter: %s callback %r failed", event, callback, exc_info=True)
84
85 def poll(self):
86 if self.debug:
87 self.log.debug('Polling kernel...')
88 if not self.kernel_manager.is_alive():
89 if self._restarting:
90 self._restart_count += 1
91 else:
92 self._restart_count = 1
93
94 if self._restart_count >= self.restart_limit:
95 self.log.warn("KernelRestarter: restart failed")
96 self._fire_callbacks('dead')
97 self._restarting = False
98 self._restart_count = 0
99 self.stop()
100 else:
101 self.log.info('KernelRestarter: restarting kernel (%i/%i)',
102 self._restart_count,
103 self.restart_limit
104 )
105 self._fire_callbacks('restart')
106 self.kernel_manager.restart_kernel(now=True)
107 self._restarting = True
108 else:
109 if self._restarting:
110 self.log.debug("KernelRestarter: restart apparently succeeded")
111 self._restarting = False
@@ -0,0 +1,230 b''
1 """ Defines a KernelClient that provides thread-safe sockets with async callbacks on message replies.
2 """
3 from __future__ import absolute_import
4 import atexit
5 import errno
6 from threading import Thread
7 import time
8
9 import zmq
10 # import ZMQError in top-level namespace, to avoid ugly attribute-error messages
11 # during garbage collection of threads at exit:
12 from zmq import ZMQError
13 from zmq.eventloop import ioloop, zmqstream
14
15 # Local imports
16 from IPython.utils.traitlets import Type, Instance
17 from jupyter_client.channels import HBChannel
18 from jupyter_client import KernelClient
19 from jupyter_client.channels import HBChannel
20
21 class ThreadedZMQSocketChannel(object):
22 """A ZMQ socket invoking a callback in the ioloop"""
23 session = None
24 socket = None
25 ioloop = None
26 stream = None
27 _inspect = None
28
29 def __init__(self, socket, session, loop):
30 """Create a channel.
31
32 Parameters
33 ----------
34 socket : :class:`zmq.Socket`
35 The ZMQ socket to use.
36 session : :class:`session.Session`
37 The session to use.
38 loop
39 A pyzmq ioloop to connect the socket to using a ZMQStream
40 """
41 super(ThreadedZMQSocketChannel, self).__init__()
42
43 self.socket = socket
44 self.session = session
45 self.ioloop = loop
46
47 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
48 self.stream.on_recv(self._handle_recv)
49
50 _is_alive = False
51 def is_alive(self):
52 return self._is_alive
53
54 def start(self):
55 self._is_alive = True
56
57 def stop(self):
58 self._is_alive = False
59
60 def close(self):
61 if self.socket is not None:
62 try:
63 self.socket.close(linger=0)
64 except Exception:
65 pass
66 self.socket = None
67
68 def send(self, msg):
69 """Queue a message to be sent from the IOLoop's thread.
70
71 Parameters
72 ----------
73 msg : message to send
74
75 This is threadsafe, as it uses IOLoop.add_callback to give the loop's
76 thread control of the action.
77 """
78 def thread_send():
79 self.session.send(self.stream, msg)
80 self.ioloop.add_callback(thread_send)
81
82 def _handle_recv(self, msg):
83 """Callback for stream.on_recv.
84
85 Unpacks message, and calls handlers with it.
86 """
87 ident,smsg = self.session.feed_identities(msg)
88 msg = self.session.deserialize(smsg)
89 # let client inspect messages
90 if self._inspect:
91 self._inspect(msg)
92 self.call_handlers(msg)
93
94 def call_handlers(self, msg):
95 """This method is called in the ioloop thread when a message arrives.
96
97 Subclasses should override this method to handle incoming messages.
98 It is important to remember that this method is called in the thread
99 so that some logic must be done to ensure that the application level
100 handlers are called in the application thread.
101 """
102 pass
103
104 def process_events(self):
105 """Subclasses should override this with a method
106 processing any pending GUI events.
107 """
108 pass
109
110
111 def flush(self, timeout=1.0):
112 """Immediately processes all pending messages on this channel.
113
114 This is only used for the IOPub channel.
115
116 Callers should use this method to ensure that :meth:`call_handlers`
117 has been called for all messages that have been received on the
118 0MQ SUB socket of this channel.
119
120 This method is thread safe.
121
122 Parameters
123 ----------
124 timeout : float, optional
125 The maximum amount of time to spend flushing, in seconds. The
126 default is one second.
127 """
128 # We do the IOLoop callback process twice to ensure that the IOLoop
129 # gets to perform at least one full poll.
130 stop_time = time.time() + timeout
131 for i in range(2):
132 self._flushed = False
133 self.ioloop.add_callback(self._flush)
134 while not self._flushed and time.time() < stop_time:
135 time.sleep(0.01)
136
137 def _flush(self):
138 """Callback for :method:`self.flush`."""
139 self.stream.flush()
140 self._flushed = True
141
142
143 class IOLoopThread(Thread):
144 """Run a pyzmq ioloop in a thread to send and receive messages
145 """
146 def __init__(self, loop):
147 super(IOLoopThread, self).__init__()
148 self.daemon = True
149 atexit.register(self._notice_exit)
150 self.ioloop = loop or ioloop.IOLoop()
151
152 def _notice_exit(self):
153 self._exiting = True
154
155 def run(self):
156 """Run my loop, ignoring EINTR events in the poller"""
157 while True:
158 try:
159 self.ioloop.start()
160 except ZMQError as e:
161 if e.errno == errno.EINTR:
162 continue
163 else:
164 raise
165 except Exception:
166 if self._exiting:
167 break
168 else:
169 raise
170 else:
171 break
172
173 def stop(self):
174 """Stop the channel's event loop and join its thread.
175
176 This calls :meth:`~threading.Thread.join` and returns when the thread
177 terminates. :class:`RuntimeError` will be raised if
178 :meth:`~threading.Thread.start` is called again.
179 """
180 if self.ioloop is not None:
181 self.ioloop.stop()
182 self.join()
183 self.close()
184
185 def close(self):
186 if self.ioloop is not None:
187 try:
188 self.ioloop.close(all_fds=True)
189 except Exception:
190 pass
191
192
193 class ThreadedKernelClient(KernelClient):
194 """ A KernelClient that provides thread-safe sockets with async callbacks on message replies.
195 """
196
197 _ioloop = None
198 @property
199 def ioloop(self):
200 if self._ioloop is None:
201 self._ioloop = ioloop.IOLoop()
202 return self._ioloop
203
204 ioloop_thread = Instance(IOLoopThread, allow_none=True)
205
206 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
207 if shell:
208 self.shell_channel._inspect = self._check_kernel_info_reply
209
210 self.ioloop_thread = IOLoopThread(self.ioloop)
211 self.ioloop_thread.start()
212
213 super(ThreadedKernelClient, self).start_channels(shell, iopub, stdin, hb)
214
215 def _check_kernel_info_reply(self, msg):
216 """This is run in the ioloop thread when the kernel info reply is recieved
217 """
218 if msg['msg_type'] == 'kernel_info_reply':
219 self._handle_kernel_info_reply(msg)
220 self.shell_channel._inspect = None
221
222 def stop_channels(self):
223 super(ThreadedKernelClient, self).stop_channels()
224 if self.ioloop_thread.is_alive():
225 self.ioloop_thread.stop()
226
227 iopub_channel_class = Type(ThreadedZMQSocketChannel)
228 shell_channel_class = Type(ThreadedZMQSocketChannel)
229 stdin_channel_class = Type(ThreadedZMQSocketChannel)
230 hb_channel_class = Type(HBChannel)
@@ -25,7 +25,8 b' from IPython.utils.traitlets import ('
25 Dict, List, Unicode, CUnicode, CBool, Any
25 Dict, List, Unicode, CUnicode, CBool, Any
26 )
26 )
27 from IPython.kernel.zmq.session import Session
27 from IPython.kernel.zmq.session import Session
28 from IPython.kernel.connect import ConnectionFileMixin
28 from IPython.kernel import connect
29 ConnectionFileMixin = connect.ConnectionFileMixin
29
30
30 from IPython.utils.localinterfaces import localhost
31 from IPython.utils.localinterfaces import localhost
31
32
@@ -1,11 +1,31 b''
1 """IPython kernels and associated utilities"""
1 """
2 Shim to maintain backwards compatibility with old IPython.kernel imports.
3 """
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
2
6
3 # just for friendlier zmq version check
7 import sys
4 from . import zmq
8 from warnings import warn
5
9
6 from .connect import *
10 warn("The `IPython.kernel` package has been deprecated. "
7 from .launcher import *
11 "You should import from ipython_kernel or jupyter_client instead.")
8 from .client import KernelClient
12
9 from .manager import KernelManager, run_kernel
13
10 from .blocking import BlockingKernelClient
14 from IPython.utils.shimmodule import ShimModule
11 from .multikernelmanager import MultiKernelManager
15
16 # zmq subdir is gone
17 sys.modules['IPython.kernel.zmq.session'] = ShimModule('session', mirror='jupyter_client.session')
18 sys.modules['IPython.kernel.zmq'] = ShimModule('zmq', mirror='ipython_kernel')
19
20 for pkg in ('comm', 'inprocess'):
21 sys.modules['IPython.kernel.%s' % pkg] = ShimModule(pkg, mirror='ipython_kernel.%s' % pkg)
22
23 for pkg in ('ioloop', 'blocking'):
24 sys.modules['IPython.kernel.%s' % pkg] = ShimModule(pkg, mirror='jupyter_client.%s' % pkg)
25
26 # required for `from IPython.kernel import PKG`
27 from ipython_kernel import comm, inprocess
28 from jupyter_client import ioloop, blocking
29 # public API
30 from ipython_kernel.connect import *
31 from jupyter_client import *
@@ -1,3 +1,3 b''
1 if __name__ == '__main__':
1 if __name__ == '__main__':
2 from IPython.kernel.zmq import kernelapp as app
2 from ipython_kernel.zmq import kernelapp as app
3 app.launch_new_instance()
3 app.launch_new_instance()
@@ -1,374 +1,1 b''
1 """Adapters for IPython msg spec versions."""
1 from jupyter_client.adapter import *
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 import json
7
8 from IPython.core.release import kernel_protocol_version_info
9 from IPython.utils.tokenutil import token_at_cursor
10
11
12 def code_to_line(code, cursor_pos):
13 """Turn a multiline code block and cursor position into a single line
14 and new cursor position.
15
16 For adapting ``complete_`` and ``object_info_request``.
17 """
18 if not code:
19 return "", 0
20 for line in code.splitlines(True):
21 n = len(line)
22 if cursor_pos > n:
23 cursor_pos -= n
24 else:
25 break
26 return line, cursor_pos
27
28
29 class Adapter(object):
30 """Base class for adapting messages
31
32 Override message_type(msg) methods to create adapters.
33 """
34
35 msg_type_map = {}
36
37 def update_header(self, msg):
38 return msg
39
40 def update_metadata(self, msg):
41 return msg
42
43 def update_msg_type(self, msg):
44 header = msg['header']
45 msg_type = header['msg_type']
46 if msg_type in self.msg_type_map:
47 msg['msg_type'] = header['msg_type'] = self.msg_type_map[msg_type]
48 return msg
49
50 def handle_reply_status_error(self, msg):
51 """This will be called *instead of* the regular handler
52
53 on any reply with status != ok
54 """
55 return msg
56
57 def __call__(self, msg):
58 msg = self.update_header(msg)
59 msg = self.update_metadata(msg)
60 msg = self.update_msg_type(msg)
61 header = msg['header']
62
63 handler = getattr(self, header['msg_type'], None)
64 if handler is None:
65 return msg
66
67 # handle status=error replies separately (no change, at present)
68 if msg['content'].get('status', None) in {'error', 'aborted'}:
69 return self.handle_reply_status_error(msg)
70 return handler(msg)
71
72 def _version_str_to_list(version):
73 """convert a version string to a list of ints
74
75 non-int segments are excluded
76 """
77 v = []
78 for part in version.split('.'):
79 try:
80 v.append(int(part))
81 except ValueError:
82 pass
83 return v
84
85 class V5toV4(Adapter):
86 """Adapt msg protocol v5 to v4"""
87
88 version = '4.1'
89
90 msg_type_map = {
91 'execute_result' : 'pyout',
92 'execute_input' : 'pyin',
93 'error' : 'pyerr',
94 'inspect_request' : 'object_info_request',
95 'inspect_reply' : 'object_info_reply',
96 }
97
98 def update_header(self, msg):
99 msg['header'].pop('version', None)
100 return msg
101
102 # shell channel
103
104 def kernel_info_reply(self, msg):
105 v4c = {}
106 content = msg['content']
107 for key in ('language_version', 'protocol_version'):
108 if key in content:
109 v4c[key] = _version_str_to_list(content[key])
110 if content.get('implementation', '') == 'ipython' \
111 and 'implementation_version' in content:
112 v4c['ipython_version'] = _version_str_to_list(content['implementation_version'])
113 language_info = content.get('language_info', {})
114 language = language_info.get('name', '')
115 v4c.setdefault('language', language)
116 if 'version' in language_info:
117 v4c.setdefault('language_version', _version_str_to_list(language_info['version']))
118 msg['content'] = v4c
119 return msg
120
121 def execute_request(self, msg):
122 content = msg['content']
123 content.setdefault('user_variables', [])
124 return msg
125
126 def execute_reply(self, msg):
127 content = msg['content']
128 content.setdefault('user_variables', {})
129 # TODO: handle payloads
130 return msg
131
132 def complete_request(self, msg):
133 content = msg['content']
134 code = content['code']
135 cursor_pos = content['cursor_pos']
136 line, cursor_pos = code_to_line(code, cursor_pos)
137
138 new_content = msg['content'] = {}
139 new_content['text'] = ''
140 new_content['line'] = line
141 new_content['block'] = None
142 new_content['cursor_pos'] = cursor_pos
143 return msg
144
145 def complete_reply(self, msg):
146 content = msg['content']
147 cursor_start = content.pop('cursor_start')
148 cursor_end = content.pop('cursor_end')
149 match_len = cursor_end - cursor_start
150 content['matched_text'] = content['matches'][0][:match_len]
151 content.pop('metadata', None)
152 return msg
153
154 def object_info_request(self, msg):
155 content = msg['content']
156 code = content['code']
157 cursor_pos = content['cursor_pos']
158 line, _ = code_to_line(code, cursor_pos)
159
160 new_content = msg['content'] = {}
161 new_content['oname'] = token_at_cursor(code, cursor_pos)
162 new_content['detail_level'] = content['detail_level']
163 return msg
164
165 def object_info_reply(self, msg):
166 """inspect_reply can't be easily backward compatible"""
167 msg['content'] = {'found' : False, 'oname' : 'unknown'}
168 return msg
169
170 # iopub channel
171
172 def stream(self, msg):
173 content = msg['content']
174 content['data'] = content.pop('text')
175 return msg
176
177 def display_data(self, msg):
178 content = msg['content']
179 content.setdefault("source", "display")
180 data = content['data']
181 if 'application/json' in data:
182 try:
183 data['application/json'] = json.dumps(data['application/json'])
184 except Exception:
185 # warn?
186 pass
187 return msg
188
189 # stdin channel
190
191 def input_request(self, msg):
192 msg['content'].pop('password', None)
193 return msg
194
195
196 class V4toV5(Adapter):
197 """Convert msg spec V4 to V5"""
198 version = '5.0'
199
200 # invert message renames above
201 msg_type_map = {v:k for k,v in V5toV4.msg_type_map.items()}
202
203 def update_header(self, msg):
204 msg['header']['version'] = self.version
205 return msg
206
207 # shell channel
208
209 def kernel_info_reply(self, msg):
210 content = msg['content']
211 for key in ('protocol_version', 'ipython_version'):
212 if key in content:
213 content[key] = '.'.join(map(str, content[key]))
214
215 content.setdefault('protocol_version', '4.1')
216
217 if content['language'].startswith('python') and 'ipython_version' in content:
218 content['implementation'] = 'ipython'
219 content['implementation_version'] = content.pop('ipython_version')
220
221 language = content.pop('language')
222 language_info = content.setdefault('language_info', {})
223 language_info.setdefault('name', language)
224 if 'language_version' in content:
225 language_version = '.'.join(map(str, content.pop('language_version')))
226 language_info.setdefault('version', language_version)
227
228 content['banner'] = ''
229 return msg
230
231 def execute_request(self, msg):
232 content = msg['content']
233 user_variables = content.pop('user_variables', [])
234 user_expressions = content.setdefault('user_expressions', {})
235 for v in user_variables:
236 user_expressions[v] = v
237 return msg
238
239 def execute_reply(self, msg):
240 content = msg['content']
241 user_expressions = content.setdefault('user_expressions', {})
242 user_variables = content.pop('user_variables', {})
243 if user_variables:
244 user_expressions.update(user_variables)
245
246 # Pager payloads became a mime bundle
247 for payload in content.get('payload', []):
248 if payload.get('source', None) == 'page' and ('text' in payload):
249 if 'data' not in payload:
250 payload['data'] = {}
251 payload['data']['text/plain'] = payload.pop('text')
252
253 return msg
254
255 def complete_request(self, msg):
256 old_content = msg['content']
257
258 new_content = msg['content'] = {}
259 new_content['code'] = old_content['line']
260 new_content['cursor_pos'] = old_content['cursor_pos']
261 return msg
262
263 def complete_reply(self, msg):
264 # complete_reply needs more context than we have to get cursor_start and end.
265 # use special end=null to indicate current cursor position and negative offset
266 # for start relative to the cursor.
267 # start=None indicates that start == end (accounts for no -0).
268 content = msg['content']
269 new_content = msg['content'] = {'status' : 'ok'}
270 new_content['matches'] = content['matches']
271 if content['matched_text']:
272 new_content['cursor_start'] = -len(content['matched_text'])
273 else:
274 # no -0, use None to indicate that start == end
275 new_content['cursor_start'] = None
276 new_content['cursor_end'] = None
277 new_content['metadata'] = {}
278 return msg
279
280 def inspect_request(self, msg):
281 content = msg['content']
282 name = content['oname']
283
284 new_content = msg['content'] = {}
285 new_content['code'] = name
286 new_content['cursor_pos'] = len(name)
287 new_content['detail_level'] = content['detail_level']
288 return msg
289
290 def inspect_reply(self, msg):
291 """inspect_reply can't be easily backward compatible"""
292 content = msg['content']
293 new_content = msg['content'] = {'status' : 'ok'}
294 found = new_content['found'] = content['found']
295 new_content['name'] = content['oname']
296 new_content['data'] = data = {}
297 new_content['metadata'] = {}
298 if found:
299 lines = []
300 for key in ('call_def', 'init_definition', 'definition'):
301 if content.get(key, False):
302 lines.append(content[key])
303 break
304 for key in ('call_docstring', 'init_docstring', 'docstring'):
305 if content.get(key, False):
306 lines.append(content[key])
307 break
308 if not lines:
309 lines.append("<empty docstring>")
310 data['text/plain'] = '\n'.join(lines)
311 return msg
312
313 # iopub channel
314
315 def stream(self, msg):
316 content = msg['content']
317 content['text'] = content.pop('data')
318 return msg
319
320 def display_data(self, msg):
321 content = msg['content']
322 content.pop("source", None)
323 data = content['data']
324 if 'application/json' in data:
325 try:
326 data['application/json'] = json.loads(data['application/json'])
327 except Exception:
328 # warn?
329 pass
330 return msg
331
332 # stdin channel
333
334 def input_request(self, msg):
335 msg['content'].setdefault('password', False)
336 return msg
337
338
339
340 def adapt(msg, to_version=kernel_protocol_version_info[0]):
341 """Adapt a single message to a target version
342
343 Parameters
344 ----------
345
346 msg : dict
347 An IPython message.
348 to_version : int, optional
349 The target major version.
350 If unspecified, adapt to the current version for IPython.
351
352 Returns
353 -------
354
355 msg : dict
356 An IPython message appropriate in the new version.
357 """
358 header = msg['header']
359 if 'version' in header:
360 from_version = int(header['version'].split('.')[0])
361 else:
362 # assume last version before adding the key to the header
363 from_version = 4
364 adapter = adapters.get((from_version, to_version), None)
365 if adapter is None:
366 return msg
367 return adapter(msg)
368
369
370 # one adapter per major version from,to
371 adapters = {
372 (5,4) : V5toV4(),
373 (4,5) : V4toV5(),
374 }
@@ -1,203 +1,1 b''
1 """Base classes to manage a Client's interaction with a running kernel"""
1 from jupyter_client.channels import *
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 from __future__ import absolute_import
7
8 import atexit
9 import errno
10 from threading import Thread
11 import time
12
13 import zmq
14 # import ZMQError in top-level namespace, to avoid ugly attribute-error messages
15 # during garbage collection of threads at exit:
16 from zmq import ZMQError
17
18 from IPython.core.release import kernel_protocol_version_info
19
20 from .channelsabc import HBChannelABC
21
22 #-----------------------------------------------------------------------------
23 # Constants and exceptions
24 #-----------------------------------------------------------------------------
25
26 major_protocol_version = kernel_protocol_version_info[0]
27
28 class InvalidPortNumber(Exception):
29 pass
30
31 class HBChannel(Thread):
32 """The heartbeat channel which monitors the kernel heartbeat.
33
34 Note that the heartbeat channel is paused by default. As long as you start
35 this channel, the kernel manager will ensure that it is paused and un-paused
36 as appropriate.
37 """
38 context = None
39 session = None
40 socket = None
41 address = None
42 _exiting = False
43
44 time_to_dead = 1.
45 poller = None
46 _running = None
47 _pause = None
48 _beating = None
49
50 def __init__(self, context=None, session=None, address=None):
51 """Create the heartbeat monitor thread.
52
53 Parameters
54 ----------
55 context : :class:`zmq.Context`
56 The ZMQ context to use.
57 session : :class:`session.Session`
58 The session to use.
59 address : zmq url
60 Standard (ip, port) tuple that the kernel is listening on.
61 """
62 super(HBChannel, self).__init__()
63 self.daemon = True
64
65 self.context = context
66 self.session = session
67 if isinstance(address, tuple):
68 if address[1] == 0:
69 message = 'The port number for a channel cannot be 0.'
70 raise InvalidPortNumber(message)
71 address = "tcp://%s:%i" % address
72 self.address = address
73 atexit.register(self._notice_exit)
74
75 self._running = False
76 self._pause = True
77 self.poller = zmq.Poller()
78
79 def _notice_exit(self):
80 self._exiting = True
81
82 def _create_socket(self):
83 if self.socket is not None:
84 # close previous socket, before opening a new one
85 self.poller.unregister(self.socket)
86 self.socket.close()
87 self.socket = self.context.socket(zmq.REQ)
88 self.socket.linger = 1000
89 self.socket.connect(self.address)
90
91 self.poller.register(self.socket, zmq.POLLIN)
92
93 def _poll(self, start_time):
94 """poll for heartbeat replies until we reach self.time_to_dead.
95
96 Ignores interrupts, and returns the result of poll(), which
97 will be an empty list if no messages arrived before the timeout,
98 or the event tuple if there is a message to receive.
99 """
100
101 until_dead = self.time_to_dead - (time.time() - start_time)
102 # ensure poll at least once
103 until_dead = max(until_dead, 1e-3)
104 events = []
105 while True:
106 try:
107 events = self.poller.poll(1000 * until_dead)
108 except ZMQError as e:
109 if e.errno == errno.EINTR:
110 # ignore interrupts during heartbeat
111 # this may never actually happen
112 until_dead = self.time_to_dead - (time.time() - start_time)
113 until_dead = max(until_dead, 1e-3)
114 pass
115 else:
116 raise
117 except Exception:
118 if self._exiting:
119 break
120 else:
121 raise
122 else:
123 break
124 return events
125
126 def run(self):
127 """The thread's main activity. Call start() instead."""
128 self._create_socket()
129 self._running = True
130 self._beating = True
131
132 while self._running:
133 if self._pause:
134 # just sleep, and skip the rest of the loop
135 time.sleep(self.time_to_dead)
136 continue
137
138 since_last_heartbeat = 0.0
139 # io.rprint('Ping from HB channel') # dbg
140 # no need to catch EFSM here, because the previous event was
141 # either a recv or connect, which cannot be followed by EFSM
142 self.socket.send(b'ping')
143 request_time = time.time()
144 ready = self._poll(request_time)
145 if ready:
146 self._beating = True
147 # the poll above guarantees we have something to recv
148 self.socket.recv()
149 # sleep the remainder of the cycle
150 remainder = self.time_to_dead - (time.time() - request_time)
151 if remainder > 0:
152 time.sleep(remainder)
153 continue
154 else:
155 # nothing was received within the time limit, signal heart failure
156 self._beating = False
157 since_last_heartbeat = time.time() - request_time
158 self.call_handlers(since_last_heartbeat)
159 # and close/reopen the socket, because the REQ/REP cycle has been broken
160 self._create_socket()
161 continue
162
163 def pause(self):
164 """Pause the heartbeat."""
165 self._pause = True
166
167 def unpause(self):
168 """Unpause the heartbeat."""
169 self._pause = False
170
171 def is_beating(self):
172 """Is the heartbeat running and responsive (and not paused)."""
173 if self.is_alive() and not self._pause and self._beating:
174 return True
175 else:
176 return False
177
178 def stop(self):
179 """Stop the channel's event loop and join its thread."""
180 self._running = False
181 self.join()
182 self.close()
183
184 def close(self):
185 if self.socket is not None:
186 try:
187 self.socket.close(linger=0)
188 except Exception:
189 pass
190 self.socket = None
191
192 def call_handlers(self, since_last_heartbeat):
193 """This method is called in the ioloop thread when a message arrives.
194
195 Subclasses should override this method to handle incoming messages.
196 It is important to remember that this method is called in the thread
197 so that some logic must be done to ensure that the application level
198 handlers are called in the application thread.
199 """
200 pass
201
202
203 HBChannelABC.register(HBChannel)
@@ -1,49 +1,1 b''
1 """Abstract base classes for kernel client channels"""
1 from jupyter_client.channelsabc import *
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 import abc
7
8 from IPython.utils.py3compat import with_metaclass
9
10
11 class ChannelABC(with_metaclass(abc.ABCMeta, object)):
12 """A base class for all channel ABCs."""
13
14 @abc.abstractmethod
15 def start(self):
16 pass
17
18 @abc.abstractmethod
19 def stop(self):
20 pass
21
22 @abc.abstractmethod
23 def is_alive(self):
24 pass
25
26
27 class HBChannelABC(ChannelABC):
28 """HBChannel ABC.
29
30 The docstrings for this class can be found in the base implementation:
31
32 `IPython.kernel.channels.HBChannel`
33 """
34
35 @abc.abstractproperty
36 def time_to_dead(self):
37 pass
38
39 @abc.abstractmethod
40 def pause(self):
41 pass
42
43 @abc.abstractmethod
44 def unpause(self):
45 pass
46
47 @abc.abstractmethod
48 def is_beating(self):
49 pass
@@ -1,390 +1,1 b''
1 """Base class to manage the interaction with a running kernel"""
1 from jupyter_client.client import *
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 from __future__ import absolute_import
7 from IPython.kernel.channels import major_protocol_version
8 from IPython.utils.py3compat import string_types, iteritems
9
10 import zmq
11
12 from IPython.utils.traitlets import (
13 Any, Instance, Type,
14 )
15
16 from .channelsabc import (ChannelABC, HBChannelABC)
17 from .clientabc import KernelClientABC
18 from .connect import ConnectionFileMixin
19
20
21 # some utilities to validate message structure, these might get moved elsewhere
22 # if they prove to have more generic utility
23
24 def validate_string_dict(dct):
25 """Validate that the input is a dict with string keys and values.
26
27 Raises ValueError if not."""
28 for k,v in iteritems(dct):
29 if not isinstance(k, string_types):
30 raise ValueError('key %r in dict must be a string' % k)
31 if not isinstance(v, string_types):
32 raise ValueError('value %r in dict must be a string' % v)
33
34
35 class KernelClient(ConnectionFileMixin):
36 """Communicates with a single kernel on any host via zmq channels.
37
38 There are four channels associated with each kernel:
39
40 * shell: for request/reply calls to the kernel.
41 * iopub: for the kernel to publish results to frontends.
42 * hb: for monitoring the kernel's heartbeat.
43 * stdin: for frontends to reply to raw_input calls in the kernel.
44
45 The methods of the channels are exposed as methods of the client itself
46 (KernelClient.execute, complete, history, etc.).
47 See the channels themselves for documentation of these methods.
48
49 """
50
51 # The PyZMQ Context to use for communication with the kernel.
52 context = Instance(zmq.Context)
53 def _context_default(self):
54 return zmq.Context.instance()
55
56 # The classes to use for the various channels
57 shell_channel_class = Type(ChannelABC)
58 iopub_channel_class = Type(ChannelABC)
59 stdin_channel_class = Type(ChannelABC)
60 hb_channel_class = Type(HBChannelABC)
61
62 # Protected traits
63 _shell_channel = Any
64 _iopub_channel = Any
65 _stdin_channel = Any
66 _hb_channel = Any
67
68 # flag for whether execute requests should be allowed to call raw_input:
69 allow_stdin = True
70
71 #--------------------------------------------------------------------------
72 # Channel proxy methods
73 #--------------------------------------------------------------------------
74
75 def _get_msg(channel, *args, **kwargs):
76 return channel.get_msg(*args, **kwargs)
77
78 def get_shell_msg(self, *args, **kwargs):
79 """Get a message from the shell channel"""
80 return self.shell_channel.get_msg(*args, **kwargs)
81
82 def get_iopub_msg(self, *args, **kwargs):
83 """Get a message from the iopub channel"""
84 return self.iopub_channel.get_msg(*args, **kwargs)
85
86 def get_stdin_msg(self, *args, **kwargs):
87 """Get a message from the stdin channel"""
88 return self.stdin_channel.get_msg(*args, **kwargs)
89
90 #--------------------------------------------------------------------------
91 # Channel management methods
92 #--------------------------------------------------------------------------
93
94 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
95 """Starts the channels for this kernel.
96
97 This will create the channels if they do not exist and then start
98 them (their activity runs in a thread). If port numbers of 0 are
99 being used (random ports) then you must first call
100 :meth:`start_kernel`. If the channels have been stopped and you
101 call this, :class:`RuntimeError` will be raised.
102 """
103 if shell:
104 self.shell_channel.start()
105 self.kernel_info()
106 if iopub:
107 self.iopub_channel.start()
108 if stdin:
109 self.stdin_channel.start()
110 self.allow_stdin = True
111 else:
112 self.allow_stdin = False
113 if hb:
114 self.hb_channel.start()
115
116 def stop_channels(self):
117 """Stops all the running channels for this kernel.
118
119 This stops their event loops and joins their threads.
120 """
121 if self.shell_channel.is_alive():
122 self.shell_channel.stop()
123 if self.iopub_channel.is_alive():
124 self.iopub_channel.stop()
125 if self.stdin_channel.is_alive():
126 self.stdin_channel.stop()
127 if self.hb_channel.is_alive():
128 self.hb_channel.stop()
129
130 @property
131 def channels_running(self):
132 """Are any of the channels created and running?"""
133 return (self.shell_channel.is_alive() or self.iopub_channel.is_alive() or
134 self.stdin_channel.is_alive() or self.hb_channel.is_alive())
135
136 ioloop = None # Overridden in subclasses that use pyzmq event loop
137
138 @property
139 def shell_channel(self):
140 """Get the shell channel object for this kernel."""
141 if self._shell_channel is None:
142 url = self._make_url('shell')
143 self.log.debug("connecting shell channel to %s", url)
144 socket = self.connect_shell(identity=self.session.bsession)
145 self._shell_channel = self.shell_channel_class(
146 socket, self.session, self.ioloop
147 )
148 return self._shell_channel
149
150 @property
151 def iopub_channel(self):
152 """Get the iopub channel object for this kernel."""
153 if self._iopub_channel is None:
154 url = self._make_url('iopub')
155 self.log.debug("connecting iopub channel to %s", url)
156 socket = self.connect_iopub()
157 self._iopub_channel = self.iopub_channel_class(
158 socket, self.session, self.ioloop
159 )
160 return self._iopub_channel
161
162 @property
163 def stdin_channel(self):
164 """Get the stdin channel object for this kernel."""
165 if self._stdin_channel is None:
166 url = self._make_url('stdin')
167 self.log.debug("connecting stdin channel to %s", url)
168 socket = self.connect_stdin(identity=self.session.bsession)
169 self._stdin_channel = self.stdin_channel_class(
170 socket, self.session, self.ioloop
171 )
172 return self._stdin_channel
173
174 @property
175 def hb_channel(self):
176 """Get the hb channel object for this kernel."""
177 if self._hb_channel is None:
178 url = self._make_url('hb')
179 self.log.debug("connecting heartbeat channel to %s", url)
180 self._hb_channel = self.hb_channel_class(
181 self.context, self.session, url
182 )
183 return self._hb_channel
184
185 def is_alive(self):
186 """Is the kernel process still running?"""
187 if self._hb_channel is not None:
188 # We didn't start the kernel with this KernelManager so we
189 # use the heartbeat.
190 return self._hb_channel.is_beating()
191 else:
192 # no heartbeat and not local, we can't tell if it's running,
193 # so naively return True
194 return True
195
196
197 # Methods to send specific messages on channels
198 def execute(self, code, silent=False, store_history=True,
199 user_expressions=None, allow_stdin=None, stop_on_error=True):
200 """Execute code in the kernel.
201
202 Parameters
203 ----------
204 code : str
205 A string of Python code.
206
207 silent : bool, optional (default False)
208 If set, the kernel will execute the code as quietly possible, and
209 will force store_history to be False.
210
211 store_history : bool, optional (default True)
212 If set, the kernel will store command history. This is forced
213 to be False if silent is True.
214
215 user_expressions : dict, optional
216 A dict mapping names to expressions to be evaluated in the user's
217 dict. The expression values are returned as strings formatted using
218 :func:`repr`.
219
220 allow_stdin : bool, optional (default self.allow_stdin)
221 Flag for whether the kernel can send stdin requests to frontends.
222
223 Some frontends (e.g. the Notebook) do not support stdin requests.
224 If raw_input is called from code executed from such a frontend, a
225 StdinNotImplementedError will be raised.
226
227 stop_on_error: bool, optional (default True)
228 Flag whether to abort the execution queue, if an exception is encountered.
229
230 Returns
231 -------
232 The msg_id of the message sent.
233 """
234 if user_expressions is None:
235 user_expressions = {}
236 if allow_stdin is None:
237 allow_stdin = self.allow_stdin
238
239
240 # Don't waste network traffic if inputs are invalid
241 if not isinstance(code, string_types):
242 raise ValueError('code %r must be a string' % code)
243 validate_string_dict(user_expressions)
244
245 # Create class for content/msg creation. Related to, but possibly
246 # not in Session.
247 content = dict(code=code, silent=silent, store_history=store_history,
248 user_expressions=user_expressions,
249 allow_stdin=allow_stdin, stop_on_error=stop_on_error
250 )
251 msg = self.session.msg('execute_request', content)
252 self.shell_channel.send(msg)
253 return msg['header']['msg_id']
254
255 def complete(self, code, cursor_pos=None):
256 """Tab complete text in the kernel's namespace.
257
258 Parameters
259 ----------
260 code : str
261 The context in which completion is requested.
262 Can be anything between a variable name and an entire cell.
263 cursor_pos : int, optional
264 The position of the cursor in the block of code where the completion was requested.
265 Default: ``len(code)``
266
267 Returns
268 -------
269 The msg_id of the message sent.
270 """
271 if cursor_pos is None:
272 cursor_pos = len(code)
273 content = dict(code=code, cursor_pos=cursor_pos)
274 msg = self.session.msg('complete_request', content)
275 self.shell_channel.send(msg)
276 return msg['header']['msg_id']
277
278 def inspect(self, code, cursor_pos=None, detail_level=0):
279 """Get metadata information about an object in the kernel's namespace.
280
281 It is up to the kernel to determine the appropriate object to inspect.
282
283 Parameters
284 ----------
285 code : str
286 The context in which info is requested.
287 Can be anything between a variable name and an entire cell.
288 cursor_pos : int, optional
289 The position of the cursor in the block of code where the info was requested.
290 Default: ``len(code)``
291 detail_level : int, optional
292 The level of detail for the introspection (0-2)
293
294 Returns
295 -------
296 The msg_id of the message sent.
297 """
298 if cursor_pos is None:
299 cursor_pos = len(code)
300 content = dict(code=code, cursor_pos=cursor_pos,
301 detail_level=detail_level,
302 )
303 msg = self.session.msg('inspect_request', content)
304 self.shell_channel.send(msg)
305 return msg['header']['msg_id']
306
307 def history(self, raw=True, output=False, hist_access_type='range', **kwargs):
308 """Get entries from the kernel's history list.
309
310 Parameters
311 ----------
312 raw : bool
313 If True, return the raw input.
314 output : bool
315 If True, then return the output as well.
316 hist_access_type : str
317 'range' (fill in session, start and stop params), 'tail' (fill in n)
318 or 'search' (fill in pattern param).
319
320 session : int
321 For a range request, the session from which to get lines. Session
322 numbers are positive integers; negative ones count back from the
323 current session.
324 start : int
325 The first line number of a history range.
326 stop : int
327 The final (excluded) line number of a history range.
328
329 n : int
330 The number of lines of history to get for a tail request.
331
332 pattern : str
333 The glob-syntax pattern for a search request.
334
335 Returns
336 -------
337 The msg_id of the message sent.
338 """
339 content = dict(raw=raw, output=output, hist_access_type=hist_access_type,
340 **kwargs)
341 msg = self.session.msg('history_request', content)
342 self.shell_channel.send(msg)
343 return msg['header']['msg_id']
344
345 def kernel_info(self):
346 """Request kernel info."""
347 msg = self.session.msg('kernel_info_request')
348 self.shell_channel.send(msg)
349 return msg['header']['msg_id']
350
351 def _handle_kernel_info_reply(self, msg):
352 """handle kernel info reply
353
354 sets protocol adaptation version. This might
355 be run from a separate thread.
356 """
357 adapt_version = int(msg['content']['protocol_version'].split('.')[0])
358 if adapt_version != major_protocol_version:
359 self.session.adapt_version = adapt_version
360
361 def shutdown(self, restart=False):
362 """Request an immediate kernel shutdown.
363
364 Upon receipt of the (empty) reply, client code can safely assume that
365 the kernel has shut down and it's safe to forcefully terminate it if
366 it's still alive.
367
368 The kernel will send the reply via a function registered with Python's
369 atexit module, ensuring it's truly done as the kernel is done with all
370 normal operation.
371 """
372 # Send quit message to kernel. Once we implement kernel-side setattr,
373 # this should probably be done that way, but for now this will do.
374 msg = self.session.msg('shutdown_request', {'restart':restart})
375 self.shell_channel.send(msg)
376 return msg['header']['msg_id']
377
378 def is_complete(self, code):
379 msg = self.session.msg('is_complete_request', {'code': code})
380 self.shell_channel.send(msg)
381 return msg['header']['msg_id']
382
383 def input(self, string):
384 """Send a string of raw input to the kernel."""
385 content = dict(value=string)
386 msg = self.session.msg('input_reply', content)
387 self.stdin_channel.send(msg)
388
389
390 KernelClientABC.register(KernelClient)
@@ -1,80 +1,1 b''
1 """Abstract base class for kernel clients"""
1 from jupyter_client.clientabc import *
2
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2013 The IPython Development Team
5 #
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
8 #-----------------------------------------------------------------------------
9
10 #-----------------------------------------------------------------------------
11 # Imports
12 #-----------------------------------------------------------------------------
13
14 import abc
15
16 from IPython.utils.py3compat import with_metaclass
17
18 #-----------------------------------------------------------------------------
19 # Main kernel client class
20 #-----------------------------------------------------------------------------
21
22 class KernelClientABC(with_metaclass(abc.ABCMeta, object)):
23 """KernelManager ABC.
24
25 The docstrings for this class can be found in the base implementation:
26
27 `IPython.kernel.client.KernelClient`
28 """
29
30 @abc.abstractproperty
31 def kernel(self):
32 pass
33
34 @abc.abstractproperty
35 def shell_channel_class(self):
36 pass
37
38 @abc.abstractproperty
39 def iopub_channel_class(self):
40 pass
41
42 @abc.abstractproperty
43 def hb_channel_class(self):
44 pass
45
46 @abc.abstractproperty
47 def stdin_channel_class(self):
48 pass
49
50 #--------------------------------------------------------------------------
51 # Channel management methods
52 #--------------------------------------------------------------------------
53
54 @abc.abstractmethod
55 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
56 pass
57
58 @abc.abstractmethod
59 def stop_channels(self):
60 pass
61
62 @abc.abstractproperty
63 def channels_running(self):
64 pass
65
66 @abc.abstractproperty
67 def shell_channel(self):
68 pass
69
70 @abc.abstractproperty
71 def iopub_channel(self):
72 pass
73
74 @abc.abstractproperty
75 def stdin_channel(self):
76 pass
77
78 @abc.abstractproperty
79 def hb_channel(self):
80 pass
This diff has been collapsed as it changes many lines, (578 lines changed) Show them Hide them
@@ -1,576 +1,2 b''
1 """Utilities for connecting to kernels
1 from ipython_kernel.connect import *
2
2 from jupyter_client.connect import *
3 The :class:`ConnectionFileMixin` class in this module encapsulates the logic
4 related to writing and reading connections files.
5 """
6 # Copyright (c) IPython Development Team.
7 # Distributed under the terms of the Modified BSD License.
8
9 #-----------------------------------------------------------------------------
10 # Imports
11 #-----------------------------------------------------------------------------
12
13 from __future__ import absolute_import
14
15 import glob
16 import json
17 import os
18 import socket
19 import sys
20 from getpass import getpass
21 from subprocess import Popen, PIPE
22 import tempfile
23
24 import zmq
25
26 # IPython imports
27 from IPython.config import LoggingConfigurable
28 from IPython.core.profiledir import ProfileDir
29 from IPython.utils.localinterfaces import localhost
30 from IPython.utils.path import filefind, get_ipython_dir
31 from IPython.utils.py3compat import (str_to_bytes, bytes_to_str, cast_bytes_py2,
32 string_types)
33 from IPython.utils.traitlets import (
34 Bool, Integer, Unicode, CaselessStrEnum, Instance,
35 )
36
37
38 #-----------------------------------------------------------------------------
39 # Working with Connection Files
40 #-----------------------------------------------------------------------------
41
42 def write_connection_file(fname=None, shell_port=0, iopub_port=0, stdin_port=0, hb_port=0,
43 control_port=0, ip='', key=b'', transport='tcp',
44 signature_scheme='hmac-sha256',
45 ):
46 """Generates a JSON config file, including the selection of random ports.
47
48 Parameters
49 ----------
50
51 fname : unicode
52 The path to the file to write
53
54 shell_port : int, optional
55 The port to use for ROUTER (shell) channel.
56
57 iopub_port : int, optional
58 The port to use for the SUB channel.
59
60 stdin_port : int, optional
61 The port to use for the ROUTER (raw input) channel.
62
63 control_port : int, optional
64 The port to use for the ROUTER (control) channel.
65
66 hb_port : int, optional
67 The port to use for the heartbeat REP channel.
68
69 ip : str, optional
70 The ip address the kernel will bind to.
71
72 key : str, optional
73 The Session key used for message authentication.
74
75 signature_scheme : str, optional
76 The scheme used for message authentication.
77 This has the form 'digest-hash', where 'digest'
78 is the scheme used for digests, and 'hash' is the name of the hash function
79 used by the digest scheme.
80 Currently, 'hmac' is the only supported digest scheme,
81 and 'sha256' is the default hash function.
82
83 """
84 if not ip:
85 ip = localhost()
86 # default to temporary connector file
87 if not fname:
88 fd, fname = tempfile.mkstemp('.json')
89 os.close(fd)
90
91 # Find open ports as necessary.
92
93 ports = []
94 ports_needed = int(shell_port <= 0) + \
95 int(iopub_port <= 0) + \
96 int(stdin_port <= 0) + \
97 int(control_port <= 0) + \
98 int(hb_port <= 0)
99 if transport == 'tcp':
100 for i in range(ports_needed):
101 sock = socket.socket()
102 # struct.pack('ii', (0,0)) is 8 null bytes
103 sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, b'\0' * 8)
104 sock.bind(('', 0))
105 ports.append(sock)
106 for i, sock in enumerate(ports):
107 port = sock.getsockname()[1]
108 sock.close()
109 ports[i] = port
110 else:
111 N = 1
112 for i in range(ports_needed):
113 while os.path.exists("%s-%s" % (ip, str(N))):
114 N += 1
115 ports.append(N)
116 N += 1
117 if shell_port <= 0:
118 shell_port = ports.pop(0)
119 if iopub_port <= 0:
120 iopub_port = ports.pop(0)
121 if stdin_port <= 0:
122 stdin_port = ports.pop(0)
123 if control_port <= 0:
124 control_port = ports.pop(0)
125 if hb_port <= 0:
126 hb_port = ports.pop(0)
127
128 cfg = dict( shell_port=shell_port,
129 iopub_port=iopub_port,
130 stdin_port=stdin_port,
131 control_port=control_port,
132 hb_port=hb_port,
133 )
134 cfg['ip'] = ip
135 cfg['key'] = bytes_to_str(key)
136 cfg['transport'] = transport
137 cfg['signature_scheme'] = signature_scheme
138
139 with open(fname, 'w') as f:
140 f.write(json.dumps(cfg, indent=2))
141
142 return fname, cfg
143
144
145 def get_connection_file(app=None):
146 """Return the path to the connection file of an app
147
148 Parameters
149 ----------
150 app : IPKernelApp instance [optional]
151 If unspecified, the currently running app will be used
152 """
153 if app is None:
154 from IPython.kernel.zmq.kernelapp import IPKernelApp
155 if not IPKernelApp.initialized():
156 raise RuntimeError("app not specified, and not in a running Kernel")
157
158 app = IPKernelApp.instance()
159 return filefind(app.connection_file, ['.', app.profile_dir.security_dir])
160
161
162 def find_connection_file(filename='kernel-*.json', profile=None):
163 """find a connection file, and return its absolute path.
164
165 The current working directory and the profile's security
166 directory will be searched for the file if it is not given by
167 absolute path.
168
169 If profile is unspecified, then the current running application's
170 profile will be used, or 'default', if not run from IPython.
171
172 If the argument does not match an existing file, it will be interpreted as a
173 fileglob, and the matching file in the profile's security dir with
174 the latest access time will be used.
175
176 Parameters
177 ----------
178 filename : str
179 The connection file or fileglob to search for.
180 profile : str [optional]
181 The name of the profile to use when searching for the connection file,
182 if different from the current IPython session or 'default'.
183
184 Returns
185 -------
186 str : The absolute path of the connection file.
187 """
188 from IPython.core.application import BaseIPythonApplication as IPApp
189 try:
190 # quick check for absolute path, before going through logic
191 return filefind(filename)
192 except IOError:
193 pass
194
195 if profile is None:
196 # profile unspecified, check if running from an IPython app
197 if IPApp.initialized():
198 app = IPApp.instance()
199 profile_dir = app.profile_dir
200 else:
201 # not running in IPython, use default profile
202 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), 'default')
203 else:
204 # find profiledir by profile name:
205 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
206 security_dir = profile_dir.security_dir
207
208 try:
209 # first, try explicit name
210 return filefind(filename, ['.', security_dir])
211 except IOError:
212 pass
213
214 # not found by full name
215
216 if '*' in filename:
217 # given as a glob already
218 pat = filename
219 else:
220 # accept any substring match
221 pat = '*%s*' % filename
222 matches = glob.glob( os.path.join(security_dir, pat) )
223 if not matches:
224 raise IOError("Could not find %r in %r" % (filename, security_dir))
225 elif len(matches) == 1:
226 return matches[0]
227 else:
228 # get most recent match, by access time:
229 return sorted(matches, key=lambda f: os.stat(f).st_atime)[-1]
230
231
232 def get_connection_info(connection_file=None, unpack=False, profile=None):
233 """Return the connection information for the current Kernel.
234
235 Parameters
236 ----------
237 connection_file : str [optional]
238 The connection file to be used. Can be given by absolute path, or
239 IPython will search in the security directory of a given profile.
240 If run from IPython,
241
242 If unspecified, the connection file for the currently running
243 IPython Kernel will be used, which is only allowed from inside a kernel.
244 unpack : bool [default: False]
245 if True, return the unpacked dict, otherwise just the string contents
246 of the file.
247 profile : str [optional]
248 The name of the profile to use when searching for the connection file,
249 if different from the current IPython session or 'default'.
250
251
252 Returns
253 -------
254 The connection dictionary of the current kernel, as string or dict,
255 depending on `unpack`.
256 """
257 if connection_file is None:
258 # get connection file from current kernel
259 cf = get_connection_file()
260 else:
261 # connection file specified, allow shortnames:
262 cf = find_connection_file(connection_file, profile=profile)
263
264 with open(cf) as f:
265 info = f.read()
266
267 if unpack:
268 info = json.loads(info)
269 # ensure key is bytes:
270 info['key'] = str_to_bytes(info.get('key', ''))
271 return info
272
273
274 def connect_qtconsole(connection_file=None, argv=None, profile=None):
275 """Connect a qtconsole to the current kernel.
276
277 This is useful for connecting a second qtconsole to a kernel, or to a
278 local notebook.
279
280 Parameters
281 ----------
282 connection_file : str [optional]
283 The connection file to be used. Can be given by absolute path, or
284 IPython will search in the security directory of a given profile.
285 If run from IPython,
286
287 If unspecified, the connection file for the currently running
288 IPython Kernel will be used, which is only allowed from inside a kernel.
289 argv : list [optional]
290 Any extra args to be passed to the console.
291 profile : str [optional]
292 The name of the profile to use when searching for the connection file,
293 if different from the current IPython session or 'default'.
294
295
296 Returns
297 -------
298 :class:`subprocess.Popen` instance running the qtconsole frontend
299 """
300 argv = [] if argv is None else argv
301
302 if connection_file is None:
303 # get connection file from current kernel
304 cf = get_connection_file()
305 else:
306 cf = find_connection_file(connection_file, profile=profile)
307
308 cmd = ';'.join([
309 "from IPython.qt.console import qtconsoleapp",
310 "qtconsoleapp.main()"
311 ])
312
313 return Popen([sys.executable, '-c', cmd, '--existing', cf] + argv,
314 stdout=PIPE, stderr=PIPE, close_fds=(sys.platform != 'win32'),
315 )
316
317
318 def tunnel_to_kernel(connection_info, sshserver, sshkey=None):
319 """tunnel connections to a kernel via ssh
320
321 This will open four SSH tunnels from localhost on this machine to the
322 ports associated with the kernel. They can be either direct
323 localhost-localhost tunnels, or if an intermediate server is necessary,
324 the kernel must be listening on a public IP.
325
326 Parameters
327 ----------
328 connection_info : dict or str (path)
329 Either a connection dict, or the path to a JSON connection file
330 sshserver : str
331 The ssh sever to use to tunnel to the kernel. Can be a full
332 `user@server:port` string. ssh config aliases are respected.
333 sshkey : str [optional]
334 Path to file containing ssh key to use for authentication.
335 Only necessary if your ssh config does not already associate
336 a keyfile with the host.
337
338 Returns
339 -------
340
341 (shell, iopub, stdin, hb) : ints
342 The four ports on localhost that have been forwarded to the kernel.
343 """
344 from zmq.ssh import tunnel
345 if isinstance(connection_info, string_types):
346 # it's a path, unpack it
347 with open(connection_info) as f:
348 connection_info = json.loads(f.read())
349
350 cf = connection_info
351
352 lports = tunnel.select_random_ports(4)
353 rports = cf['shell_port'], cf['iopub_port'], cf['stdin_port'], cf['hb_port']
354
355 remote_ip = cf['ip']
356
357 if tunnel.try_passwordless_ssh(sshserver, sshkey):
358 password=False
359 else:
360 password = getpass("SSH Password for %s: " % cast_bytes_py2(sshserver))
361
362 for lp,rp in zip(lports, rports):
363 tunnel.ssh_tunnel(lp, rp, sshserver, remote_ip, sshkey, password)
364
365 return tuple(lports)
366
367
368 #-----------------------------------------------------------------------------
369 # Mixin for classes that work with connection files
370 #-----------------------------------------------------------------------------
371
372 channel_socket_types = {
373 'hb' : zmq.REQ,
374 'shell' : zmq.DEALER,
375 'iopub' : zmq.SUB,
376 'stdin' : zmq.DEALER,
377 'control': zmq.DEALER,
378 }
379
380 port_names = [ "%s_port" % channel for channel in ('shell', 'stdin', 'iopub', 'hb', 'control')]
381
382 class ConnectionFileMixin(LoggingConfigurable):
383 """Mixin for configurable classes that work with connection files"""
384
385 # The addresses for the communication channels
386 connection_file = Unicode('', config=True,
387 help="""JSON file in which to store connection info [default: kernel-<pid>.json]
388
389 This file will contain the IP, ports, and authentication key needed to connect
390 clients to this kernel. By default, this file will be created in the security dir
391 of the current profile, but can be specified by absolute path.
392 """)
393 _connection_file_written = Bool(False)
394
395 transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True)
396
397 ip = Unicode(config=True,
398 help="""Set the kernel\'s IP address [default localhost].
399 If the IP address is something other than localhost, then
400 Consoles on other machines will be able to connect
401 to the Kernel, so be careful!"""
402 )
403
404 def _ip_default(self):
405 if self.transport == 'ipc':
406 if self.connection_file:
407 return os.path.splitext(self.connection_file)[0] + '-ipc'
408 else:
409 return 'kernel-ipc'
410 else:
411 return localhost()
412
413 def _ip_changed(self, name, old, new):
414 if new == '*':
415 self.ip = '0.0.0.0'
416
417 # protected traits
418
419 hb_port = Integer(0, config=True,
420 help="set the heartbeat port [default: random]")
421 shell_port = Integer(0, config=True,
422 help="set the shell (ROUTER) port [default: random]")
423 iopub_port = Integer(0, config=True,
424 help="set the iopub (PUB) port [default: random]")
425 stdin_port = Integer(0, config=True,
426 help="set the stdin (ROUTER) port [default: random]")
427 control_port = Integer(0, config=True,
428 help="set the control (ROUTER) port [default: random]")
429
430 @property
431 def ports(self):
432 return [ getattr(self, name) for name in port_names ]
433
434 # The Session to use for communication with the kernel.
435 session = Instance('IPython.kernel.zmq.session.Session')
436 def _session_default(self):
437 from IPython.kernel.zmq.session import Session
438 return Session(parent=self)
439
440 #--------------------------------------------------------------------------
441 # Connection and ipc file management
442 #--------------------------------------------------------------------------
443
444 def get_connection_info(self):
445 """return the connection info as a dict"""
446 return dict(
447 transport=self.transport,
448 ip=self.ip,
449 shell_port=self.shell_port,
450 iopub_port=self.iopub_port,
451 stdin_port=self.stdin_port,
452 hb_port=self.hb_port,
453 control_port=self.control_port,
454 signature_scheme=self.session.signature_scheme,
455 key=self.session.key,
456 )
457
458 def cleanup_connection_file(self):
459 """Cleanup connection file *if we wrote it*
460
461 Will not raise if the connection file was already removed somehow.
462 """
463 if self._connection_file_written:
464 # cleanup connection files on full shutdown of kernel we started
465 self._connection_file_written = False
466 try:
467 os.remove(self.connection_file)
468 except (IOError, OSError, AttributeError):
469 pass
470
471 def cleanup_ipc_files(self):
472 """Cleanup ipc files if we wrote them."""
473 if self.transport != 'ipc':
474 return
475 for port in self.ports:
476 ipcfile = "%s-%i" % (self.ip, port)
477 try:
478 os.remove(ipcfile)
479 except (IOError, OSError):
480 pass
481
482 def write_connection_file(self):
483 """Write connection info to JSON dict in self.connection_file."""
484 if self._connection_file_written and os.path.exists(self.connection_file):
485 return
486
487 self.connection_file, cfg = write_connection_file(self.connection_file,
488 transport=self.transport, ip=self.ip, key=self.session.key,
489 stdin_port=self.stdin_port, iopub_port=self.iopub_port,
490 shell_port=self.shell_port, hb_port=self.hb_port,
491 control_port=self.control_port,
492 signature_scheme=self.session.signature_scheme,
493 )
494 # write_connection_file also sets default ports:
495 for name in port_names:
496 setattr(self, name, cfg[name])
497
498 self._connection_file_written = True
499
500 def load_connection_file(self):
501 """Load connection info from JSON dict in self.connection_file."""
502 self.log.debug(u"Loading connection file %s", self.connection_file)
503 with open(self.connection_file) as f:
504 cfg = json.load(f)
505 self.transport = cfg.get('transport', self.transport)
506 self.ip = cfg.get('ip', self._ip_default())
507
508 for name in port_names:
509 if getattr(self, name) == 0 and name in cfg:
510 # not overridden by config or cl_args
511 setattr(self, name, cfg[name])
512
513 if 'key' in cfg:
514 self.session.key = str_to_bytes(cfg['key'])
515 if 'signature_scheme' in cfg:
516 self.session.signature_scheme = cfg['signature_scheme']
517
518 #--------------------------------------------------------------------------
519 # Creating connected sockets
520 #--------------------------------------------------------------------------
521
522 def _make_url(self, channel):
523 """Make a ZeroMQ URL for a given channel."""
524 transport = self.transport
525 ip = self.ip
526 port = getattr(self, '%s_port' % channel)
527
528 if transport == 'tcp':
529 return "tcp://%s:%i" % (ip, port)
530 else:
531 return "%s://%s-%s" % (transport, ip, port)
532
533 def _create_connected_socket(self, channel, identity=None):
534 """Create a zmq Socket and connect it to the kernel."""
535 url = self._make_url(channel)
536 socket_type = channel_socket_types[channel]
537 self.log.debug("Connecting to: %s" % url)
538 sock = self.context.socket(socket_type)
539 # set linger to 1s to prevent hangs at exit
540 sock.linger = 1000
541 if identity:
542 sock.identity = identity
543 sock.connect(url)
544 return sock
545
546 def connect_iopub(self, identity=None):
547 """return zmq Socket connected to the IOPub channel"""
548 sock = self._create_connected_socket('iopub', identity=identity)
549 sock.setsockopt(zmq.SUBSCRIBE, b'')
550 return sock
551
552 def connect_shell(self, identity=None):
553 """return zmq Socket connected to the Shell channel"""
554 return self._create_connected_socket('shell', identity=identity)
555
556 def connect_stdin(self, identity=None):
557 """return zmq Socket connected to the StdIn channel"""
558 return self._create_connected_socket('stdin', identity=identity)
559
560 def connect_hb(self, identity=None):
561 """return zmq Socket connected to the Heartbeat channel"""
562 return self._create_connected_socket('hb', identity=identity)
563
564 def connect_control(self, identity=None):
565 """return zmq Socket connected to the Control channel"""
566 return self._create_connected_socket('control', identity=identity)
567
568
569 __all__ = [
570 'write_connection_file',
571 'get_connection_file',
572 'find_connection_file',
573 'get_connection_info',
574 'connect_qtconsole',
575 'tunnel_to_kernel',
576 ]
@@ -1,237 +1,1 b''
1 import io
1 from jupyter_client.kernelspec import *
2 import json
3 import os
4 import shutil
5 import sys
6
7 pjoin = os.path.join
8
9 from IPython.utils.path import get_ipython_dir
10 from IPython.utils.py3compat import PY3
11 from IPython.utils.traitlets import HasTraits, List, Unicode, Dict, Any, Set
12 from IPython.config import Configurable
13 from .launcher import make_ipkernel_cmd
14
15 if os.name == 'nt':
16 programdata = os.environ.get('PROGRAMDATA', None)
17 if programdata:
18 SYSTEM_KERNEL_DIRS = [pjoin(programdata, 'jupyter', 'kernels')]
19 else: # PROGRAMDATA is not defined by default on XP.
20 SYSTEM_KERNEL_DIRS = []
21 else:
22 SYSTEM_KERNEL_DIRS = ["/usr/share/jupyter/kernels",
23 "/usr/local/share/jupyter/kernels",
24 ]
25
26 NATIVE_KERNEL_NAME = 'python3' if PY3 else 'python2'
27
28 def _pythonfirst(s):
29 "Sort key function that will put strings starting with 'python' first."
30 if s == NATIVE_KERNEL_NAME:
31 return ' ' + s # Two spaces to sort this first of all
32 elif s.startswith('python'):
33 # Space is not valid in kernel names, so this should sort first
34 return ' ' + s
35 return s
36
37 class KernelSpec(HasTraits):
38 argv = List()
39 display_name = Unicode()
40 language = Unicode()
41 env = Dict()
42 resource_dir = Unicode()
43
44 @classmethod
45 def from_resource_dir(cls, resource_dir):
46 """Create a KernelSpec object by reading kernel.json
47
48 Pass the path to the *directory* containing kernel.json.
49 """
50 kernel_file = pjoin(resource_dir, 'kernel.json')
51 with io.open(kernel_file, 'r', encoding='utf-8') as f:
52 kernel_dict = json.load(f)
53 return cls(resource_dir=resource_dir, **kernel_dict)
54
55 def to_dict(self):
56 d = dict(argv=self.argv,
57 env=self.env,
58 display_name=self.display_name,
59 language=self.language,
60 )
61
62 return d
63
64 def to_json(self):
65 return json.dumps(self.to_dict())
66
67 def _is_kernel_dir(path):
68 """Is ``path`` a kernel directory?"""
69 return os.path.isdir(path) and os.path.isfile(pjoin(path, 'kernel.json'))
70
71 def _list_kernels_in(dir):
72 """Return a mapping of kernel names to resource directories from dir.
73
74 If dir is None or does not exist, returns an empty dict.
75 """
76 if dir is None or not os.path.isdir(dir):
77 return {}
78 return {f.lower(): pjoin(dir, f) for f in os.listdir(dir)
79 if _is_kernel_dir(pjoin(dir, f))}
80
81 class NoSuchKernel(KeyError):
82 def __init__(self, name):
83 self.name = name
84
85 class KernelSpecManager(Configurable):
86 ipython_dir = Unicode()
87 def _ipython_dir_default(self):
88 return get_ipython_dir()
89
90 user_kernel_dir = Unicode()
91 def _user_kernel_dir_default(self):
92 return pjoin(self.ipython_dir, 'kernels')
93
94 @property
95 def env_kernel_dir(self):
96 return pjoin(sys.prefix, 'share', 'jupyter', 'kernels')
97
98 whitelist = Set(config=True,
99 help="""Whitelist of allowed kernel names.
100
101 By default, all installed kernels are allowed.
102 """
103 )
104 kernel_dirs = List(
105 help="List of kernel directories to search. Later ones take priority over earlier."
106 )
107 def _kernel_dirs_default(self):
108 dirs = SYSTEM_KERNEL_DIRS[:]
109 if self.env_kernel_dir not in dirs:
110 dirs.append(self.env_kernel_dir)
111 dirs.append(self.user_kernel_dir)
112 return dirs
113
114 @property
115 def _native_kernel_dict(self):
116 """Makes a kernel directory for the native kernel.
117
118 The native kernel is the kernel using the same Python runtime as this
119 process. This will put its information in the user kernels directory.
120 """
121 return {
122 'argv': make_ipkernel_cmd(),
123 'display_name': 'Python %i' % (3 if PY3 else 2),
124 'language': 'python',
125 }
126
127 @property
128 def _native_kernel_resource_dir(self):
129 return pjoin(os.path.dirname(__file__), 'resources')
130
131 def find_kernel_specs(self):
132 """Returns a dict mapping kernel names to resource directories."""
133 d = {}
134 for kernel_dir in self.kernel_dirs:
135 d.update(_list_kernels_in(kernel_dir))
136
137 d[NATIVE_KERNEL_NAME] = self._native_kernel_resource_dir
138 if self.whitelist:
139 # filter if there's a whitelist
140 d = {name:spec for name,spec in d.items() if name in self.whitelist}
141 return d
142 # TODO: Caching?
143
144 def get_kernel_spec(self, kernel_name):
145 """Returns a :class:`KernelSpec` instance for the given kernel_name.
146
147 Raises :exc:`NoSuchKernel` if the given kernel name is not found.
148 """
149 if kernel_name in {'python', NATIVE_KERNEL_NAME} and \
150 (not self.whitelist or kernel_name in self.whitelist):
151 return KernelSpec(resource_dir=self._native_kernel_resource_dir,
152 **self._native_kernel_dict)
153
154 d = self.find_kernel_specs()
155 try:
156 resource_dir = d[kernel_name.lower()]
157 except KeyError:
158 raise NoSuchKernel(kernel_name)
159 return KernelSpec.from_resource_dir(resource_dir)
160
161 def _get_destination_dir(self, kernel_name, user=False):
162 if user:
163 return os.path.join(self.user_kernel_dir, kernel_name)
164 else:
165 if SYSTEM_KERNEL_DIRS:
166 return os.path.join(SYSTEM_KERNEL_DIRS[-1], kernel_name)
167 else:
168 raise EnvironmentError("No system kernel directory is available")
169
170
171 def install_kernel_spec(self, source_dir, kernel_name=None, user=False,
172 replace=False):
173 """Install a kernel spec by copying its directory.
174
175 If ``kernel_name`` is not given, the basename of ``source_dir`` will
176 be used.
177
178 If ``user`` is False, it will attempt to install into the systemwide
179 kernel registry. If the process does not have appropriate permissions,
180 an :exc:`OSError` will be raised.
181
182 If ``replace`` is True, this will replace an existing kernel of the same
183 name. Otherwise, if the destination already exists, an :exc:`OSError`
184 will be raised.
185 """
186 if not kernel_name:
187 kernel_name = os.path.basename(source_dir)
188 kernel_name = kernel_name.lower()
189
190 destination = self._get_destination_dir(kernel_name, user=user)
191
192 if replace and os.path.isdir(destination):
193 shutil.rmtree(destination)
194
195 shutil.copytree(source_dir, destination)
196
197 def install_native_kernel_spec(self, user=False):
198 """Install the native kernel spec to the filesystem
199
200 This allows a Python 3 frontend to use a Python 2 kernel, or vice versa.
201 The kernelspec will be written pointing to the Python executable on
202 which this is run.
203
204 If ``user`` is False, it will attempt to install into the systemwide
205 kernel registry. If the process does not have appropriate permissions,
206 an :exc:`OSError` will be raised.
207 """
208 path = self._get_destination_dir(NATIVE_KERNEL_NAME, user=user)
209 os.makedirs(path, mode=0o755)
210 with open(pjoin(path, 'kernel.json'), 'w') as f:
211 json.dump(self._native_kernel_dict, f, indent=1)
212 copy_from = self._native_kernel_resource_dir
213 for file in os.listdir(copy_from):
214 shutil.copy(pjoin(copy_from, file), path)
215 return path
216
217 def find_kernel_specs():
218 """Returns a dict mapping kernel names to resource directories."""
219 return KernelSpecManager().find_kernel_specs()
220
221 def get_kernel_spec(kernel_name):
222 """Returns a :class:`KernelSpec` instance for the given kernel_name.
223
224 Raises KeyError if the given kernel name is not found.
225 """
226 return KernelSpecManager().get_kernel_spec(kernel_name)
227
228 def install_kernel_spec(source_dir, kernel_name=None, user=False, replace=False):
229 return KernelSpecManager().install_kernel_spec(source_dir, kernel_name,
230 user, replace)
231
232 install_kernel_spec.__doc__ = KernelSpecManager.install_kernel_spec.__doc__
233
234 def install_native_kernel_spec(user=False):
235 return KernelSpecManager().install_native_kernel_spec(user=user)
236
237 install_native_kernel_spec.__doc__ = KernelSpecManager.install_native_kernel_spec.__doc__
@@ -1,142 +1,1 b''
1
1 from jupyter_client.kernelspecapp import *
2 # Copyright (c) IPython Development Team.
3 # Distributed under the terms of the Modified BSD License.
4
5 import errno
6 import os.path
7
8 from IPython.config.application import Application
9 from IPython.core.application import (
10 BaseIPythonApplication, base_flags, base_aliases
11 )
12 from IPython.utils.traitlets import Instance, Dict, Unicode, Bool
13
14 from .kernelspec import KernelSpecManager, _pythonfirst
15
16 class ListKernelSpecs(BaseIPythonApplication):
17 description = """List installed kernel specifications."""
18 kernel_spec_manager = Instance(KernelSpecManager)
19
20 # Not all of the base aliases are meaningful (e.g. profile)
21 aliases = {k: base_aliases[k] for k in ['ipython-dir', 'log-level']}
22 flags = {'debug': base_flags['debug'],}
23
24 def _kernel_spec_manager_default(self):
25 return KernelSpecManager(parent=self, ipython_dir=self.ipython_dir)
26
27 def start(self):
28 print("Available kernels:")
29 for kernelname in sorted(self.kernel_spec_manager.find_kernel_specs(),
30 key=_pythonfirst):
31 print(" %s" % kernelname)
32
33
34 class InstallKernelSpec(BaseIPythonApplication):
35 description = """Install a kernel specification directory."""
36 kernel_spec_manager = Instance(KernelSpecManager)
37
38 def _kernel_spec_manager_default(self):
39 return KernelSpecManager(ipython_dir=self.ipython_dir)
40
41 sourcedir = Unicode()
42 kernel_name = Unicode("", config=True,
43 help="Install the kernel spec with this name"
44 )
45 def _kernel_name_default(self):
46 return os.path.basename(self.sourcedir)
47
48 user = Bool(False, config=True,
49 help="""
50 Try to install the kernel spec to the per-user directory instead of
51 the system or environment directory.
52 """
53 )
54 replace = Bool(False, config=True,
55 help="Replace any existing kernel spec with this name."
56 )
57
58 aliases = {'name': 'InstallKernelSpec.kernel_name'}
59 for k in ['ipython-dir', 'log-level']:
60 aliases[k] = base_aliases[k]
61
62 flags = {'user': ({'InstallKernelSpec': {'user': True}},
63 "Install to the per-user kernel registry"),
64 'replace': ({'InstallKernelSpec': {'replace': True}},
65 "Replace any existing kernel spec with this name."),
66 'debug': base_flags['debug'],
67 }
68
69 def parse_command_line(self, argv):
70 super(InstallKernelSpec, self).parse_command_line(argv)
71 # accept positional arg as profile name
72 if self.extra_args:
73 self.sourcedir = self.extra_args[0]
74 else:
75 print("No source directory specified.")
76 self.exit(1)
77
78 def start(self):
79 try:
80 self.kernel_spec_manager.install_kernel_spec(self.sourcedir,
81 kernel_name=self.kernel_name,
82 user=self.user,
83 replace=self.replace,
84 )
85 except OSError as e:
86 if e.errno == errno.EACCES:
87 print("Permission denied")
88 self.exit(1)
89 elif e.errno == errno.EEXIST:
90 print("A kernel spec is already present at %s" % e.filename)
91 self.exit(1)
92 raise
93
94 class InstallNativeKernelSpec(BaseIPythonApplication):
95 description = """Install the native kernel spec directory for this Python."""
96 kernel_spec_manager = Instance(KernelSpecManager)
97
98 def _kernel_spec_manager_default(self):
99 return KernelSpecManager(ipython_dir=self.ipython_dir)
100
101 user = Bool(False, config=True,
102 help="""
103 Try to install the kernel spec to the per-user directory instead of
104 the system or environment directory.
105 """
106 )
107
108 # Not all of the base aliases are meaningful (e.g. profile)
109 aliases = {k: base_aliases[k] for k in ['ipython-dir', 'log-level']}
110 flags = {'user': ({'InstallNativeKernelSpec': {'user': True}},
111 "Install to the per-user kernel registry"),
112 'debug': base_flags['debug'],
113 }
114
115 def start(self):
116 try:
117 self.kernel_spec_manager.install_native_kernel_spec(user=self.user)
118 except OSError as e:
119 self.exit(e)
120
121 class KernelSpecApp(Application):
122 name = "ipython kernelspec"
123 description = """Manage IPython kernel specifications."""
124
125 subcommands = Dict({
126 'list': (ListKernelSpecs, ListKernelSpecs.description.splitlines()[0]),
127 'install': (InstallKernelSpec, InstallKernelSpec.description.splitlines()[0]),
128 'install-self': (InstallNativeKernelSpec, InstallNativeKernelSpec.description.splitlines()[0]),
129 })
130
131 aliases = {}
132 flags = {}
133
134 def start(self):
135 if self.subapp is None:
136 print("No subcommand specified. Must specify one of: %s"% list(self.subcommands))
137 print()
138 self.print_description()
139 self.print_subcommands()
140 self.exit(1)
141 else:
142 return self.subapp.start()
@@ -1,226 +1,1 b''
1 """Utilities for launching kernels
1 from jupyter_client.launcher import *
2 """
3
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
6
7 import os
8 import sys
9 from subprocess import Popen, PIPE
10
11 from IPython.utils.encoding import getdefaultencoding
12 from IPython.utils.py3compat import cast_bytes_py2
13
14
15 def swallow_argv(argv, aliases=None, flags=None):
16 """strip frontend-specific aliases and flags from an argument list
17
18 For use primarily in frontend apps that want to pass a subset of command-line
19 arguments through to a subprocess, where frontend-specific flags and aliases
20 should be removed from the list.
21
22 Parameters
23 ----------
24
25 argv : list(str)
26 The starting argv, to be filtered
27 aliases : container of aliases (dict, list, set, etc.)
28 The frontend-specific aliases to be removed
29 flags : container of flags (dict, list, set, etc.)
30 The frontend-specific flags to be removed
31
32 Returns
33 -------
34
35 argv : list(str)
36 The argv list, excluding flags and aliases that have been stripped
37 """
38
39 if aliases is None:
40 aliases = set()
41 if flags is None:
42 flags = set()
43
44 stripped = list(argv) # copy
45
46 swallow_next = False
47 was_flag = False
48 for a in argv:
49 if a == '--':
50 break
51 if swallow_next:
52 swallow_next = False
53 # last arg was an alias, remove the next one
54 # *unless* the last alias has a no-arg flag version, in which
55 # case, don't swallow the next arg if it's also a flag:
56 if not (was_flag and a.startswith('-')):
57 stripped.remove(a)
58 continue
59 if a.startswith('-'):
60 split = a.lstrip('-').split('=')
61 name = split[0]
62 # we use startswith because argparse accepts any arg to be specified
63 # by any leading section, as long as it is unique,
64 # so `--no-br` means `--no-browser` in the notebook, etc.
65 if any(alias.startswith(name) for alias in aliases):
66 stripped.remove(a)
67 if len(split) == 1:
68 # alias passed with arg via space
69 swallow_next = True
70 # could have been a flag that matches an alias, e.g. `existing`
71 # in which case, we might not swallow the next arg
72 was_flag = name in flags
73 elif len(split) == 1 and any(flag.startswith(name) for flag in flags):
74 # strip flag, but don't swallow next, as flags don't take args
75 stripped.remove(a)
76
77 # return shortened list
78 return stripped
79
80
81 def make_ipkernel_cmd(mod='IPython.kernel', executable=None, extra_arguments=[], **kw):
82 """Build Popen command list for launching an IPython kernel.
83
84 Parameters
85 ----------
86 mod : str, optional (default 'IPython.kernel')
87 A string of an IPython module whose __main__ starts an IPython kernel
88
89 executable : str, optional (default sys.executable)
90 The Python executable to use for the kernel process.
91
92 extra_arguments : list, optional
93 A list of extra arguments to pass when executing the launch code.
94
95 Returns
96 -------
97
98 A Popen command list
99 """
100 if executable is None:
101 executable = sys.executable
102 arguments = [ executable, '-m', mod, '-f', '{connection_file}' ]
103 arguments.extend(extra_arguments)
104
105 return arguments
106
107
108 def launch_kernel(cmd, stdin=None, stdout=None, stderr=None, env=None,
109 independent=False,
110 cwd=None,
111 **kw
112 ):
113 """ Launches a localhost kernel, binding to the specified ports.
114
115 Parameters
116 ----------
117 cmd : Popen list,
118 A string of Python code that imports and executes a kernel entry point.
119
120 stdin, stdout, stderr : optional (default None)
121 Standards streams, as defined in subprocess.Popen.
122
123 independent : bool, optional (default False)
124 If set, the kernel process is guaranteed to survive if this process
125 dies. If not set, an effort is made to ensure that the kernel is killed
126 when this process dies. Note that in this case it is still good practice
127 to kill kernels manually before exiting.
128
129 cwd : path, optional
130 The working dir of the kernel process (default: cwd of this process).
131
132 Returns
133 -------
134
135 Popen instance for the kernel subprocess
136 """
137
138 # Popen will fail (sometimes with a deadlock) if stdin, stdout, and stderr
139 # are invalid. Unfortunately, there is in general no way to detect whether
140 # they are valid. The following two blocks redirect them to (temporary)
141 # pipes in certain important cases.
142
143 # If this process has been backgrounded, our stdin is invalid. Since there
144 # is no compelling reason for the kernel to inherit our stdin anyway, we'll
145 # place this one safe and always redirect.
146 redirect_in = True
147 _stdin = PIPE if stdin is None else stdin
148
149 # If this process in running on pythonw, we know that stdin, stdout, and
150 # stderr are all invalid.
151 redirect_out = sys.executable.endswith('pythonw.exe')
152 if redirect_out:
153 blackhole = open(os.devnull, 'w')
154 _stdout = blackhole if stdout is None else stdout
155 _stderr = blackhole if stderr is None else stderr
156 else:
157 _stdout, _stderr = stdout, stderr
158
159 env = env if (env is not None) else os.environ.copy()
160
161 encoding = getdefaultencoding(prefer_stream=False)
162 kwargs = dict(
163 stdin=_stdin,
164 stdout=_stdout,
165 stderr=_stderr,
166 cwd=cwd,
167 env=env,
168 )
169
170 # Spawn a kernel.
171 if sys.platform == 'win32':
172 # Popen on Python 2 on Windows cannot handle unicode args or cwd
173 cmd = [ cast_bytes_py2(c, encoding) for c in cmd ]
174 if cwd:
175 cwd = cast_bytes_py2(cwd, sys.getfilesystemencoding() or 'ascii')
176 kwargs['cwd'] = cwd
177
178 from IPython.kernel.zmq.parentpoller import ParentPollerWindows
179 # Create a Win32 event for interrupting the kernel
180 # and store it in an environment variable.
181 interrupt_event = ParentPollerWindows.create_interrupt_event()
182 env["JPY_INTERRUPT_EVENT"] = str(interrupt_event)
183 # deprecated old env name:
184 env["IPY_INTERRUPT_EVENT"] = env["JPY_INTERRUPT_EVENT"]
185
186 try:
187 from _winapi import DuplicateHandle, GetCurrentProcess, \
188 DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
189 except:
190 from _subprocess import DuplicateHandle, GetCurrentProcess, \
191 DUPLICATE_SAME_ACCESS, CREATE_NEW_PROCESS_GROUP
192 # Launch the kernel process
193 if independent:
194 kwargs['creationflags'] = CREATE_NEW_PROCESS_GROUP
195 else:
196 pid = GetCurrentProcess()
197 handle = DuplicateHandle(pid, pid, pid, 0,
198 True, # Inheritable by new processes.
199 DUPLICATE_SAME_ACCESS)
200 env['JPY_PARENT_PID'] = str(int(handle))
201
202 proc = Popen(cmd, **kwargs)
203
204 # Attach the interrupt event to the Popen objet so it can be used later.
205 proc.win32_interrupt_event = interrupt_event
206
207 else:
208 if independent:
209 kwargs['preexec_fn'] = lambda: os.setsid()
210 else:
211 env['JPY_PARENT_PID'] = str(os.getpid())
212
213 proc = Popen(cmd, **kwargs)
214
215 # Clean up pipes created to work around Popen bug.
216 if redirect_in:
217 if stdin is None:
218 proc.stdin.close()
219
220 return proc
221
222 __all__ = [
223 'swallow_argv',
224 'make_ipkernel_cmd',
225 'launch_kernel',
226 ]
@@ -1,442 +1,1 b''
1 """Base class to manage a running kernel"""
1 from jupyter_client.manager import *
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 from __future__ import absolute_import
7
8 from contextlib import contextmanager
9 import os
10 import re
11 import signal
12 import sys
13 import time
14 import warnings
15 try:
16 from queue import Empty # Py 3
17 except ImportError:
18 from Queue import Empty # Py 2
19
20 import zmq
21
22 from IPython.utils.importstring import import_item
23 from IPython.utils.localinterfaces import is_local_ip, local_ips
24 from IPython.utils.path import get_ipython_dir
25 from IPython.utils.traitlets import (
26 Any, Instance, Unicode, List, Bool, Type, DottedObjectName
27 )
28 from IPython.kernel import (
29 launch_kernel,
30 kernelspec,
31 )
32 from .connect import ConnectionFileMixin
33 from .zmq.session import Session
34 from .managerabc import (
35 KernelManagerABC
36 )
37
38
39 class KernelManager(ConnectionFileMixin):
40 """Manages a single kernel in a subprocess on this host.
41
42 This version starts kernels with Popen.
43 """
44
45 # The PyZMQ Context to use for communication with the kernel.
46 context = Instance(zmq.Context)
47 def _context_default(self):
48 return zmq.Context.instance()
49
50 # the class to create with our `client` method
51 client_class = DottedObjectName('IPython.kernel.blocking.BlockingKernelClient')
52 client_factory = Type(allow_none=True)
53 def _client_class_changed(self, name, old, new):
54 self.client_factory = import_item(str(new))
55
56 # The kernel process with which the KernelManager is communicating.
57 # generally a Popen instance
58 kernel = Any()
59
60 kernel_spec_manager = Instance(kernelspec.KernelSpecManager)
61
62 def _kernel_spec_manager_default(self):
63 return kernelspec.KernelSpecManager(ipython_dir=self.ipython_dir)
64
65 kernel_name = Unicode(kernelspec.NATIVE_KERNEL_NAME)
66
67 kernel_spec = Instance(kernelspec.KernelSpec)
68
69 def _kernel_spec_default(self):
70 return self.kernel_spec_manager.get_kernel_spec(self.kernel_name)
71
72 def _kernel_name_changed(self, name, old, new):
73 if new == 'python':
74 self.kernel_name = kernelspec.NATIVE_KERNEL_NAME
75 # This triggered another run of this function, so we can exit now
76 return
77 self.kernel_spec = self.kernel_spec_manager.get_kernel_spec(new)
78 self.ipython_kernel = new in {'python', 'python2', 'python3'}
79
80 kernel_cmd = List(Unicode, config=True,
81 help="""DEPRECATED: Use kernel_name instead.
82
83 The Popen Command to launch the kernel.
84 Override this if you have a custom kernel.
85 If kernel_cmd is specified in a configuration file,
86 IPython does not pass any arguments to the kernel,
87 because it cannot make any assumptions about the
88 arguments that the kernel understands. In particular,
89 this means that the kernel does not receive the
90 option --debug if it given on the IPython command line.
91 """
92 )
93
94 def _kernel_cmd_changed(self, name, old, new):
95 warnings.warn("Setting kernel_cmd is deprecated, use kernel_spec to "
96 "start different kernels.")
97 self.ipython_kernel = False
98
99 ipython_kernel = Bool(True)
100
101 ipython_dir = Unicode()
102 def _ipython_dir_default(self):
103 return get_ipython_dir()
104
105 # Protected traits
106 _launch_args = Any()
107 _control_socket = Any()
108
109 _restarter = Any()
110
111 autorestart = Bool(False, config=True,
112 help="""Should we autorestart the kernel if it dies."""
113 )
114
115 def __del__(self):
116 self._close_control_socket()
117 self.cleanup_connection_file()
118
119 #--------------------------------------------------------------------------
120 # Kernel restarter
121 #--------------------------------------------------------------------------
122
123 def start_restarter(self):
124 pass
125
126 def stop_restarter(self):
127 pass
128
129 def add_restart_callback(self, callback, event='restart'):
130 """register a callback to be called when a kernel is restarted"""
131 if self._restarter is None:
132 return
133 self._restarter.add_callback(callback, event)
134
135 def remove_restart_callback(self, callback, event='restart'):
136 """unregister a callback to be called when a kernel is restarted"""
137 if self._restarter is None:
138 return
139 self._restarter.remove_callback(callback, event)
140
141 #--------------------------------------------------------------------------
142 # create a Client connected to our Kernel
143 #--------------------------------------------------------------------------
144
145 def client(self, **kwargs):
146 """Create a client configured to connect to our kernel"""
147 if self.client_factory is None:
148 self.client_factory = import_item(self.client_class)
149
150 kw = {}
151 kw.update(self.get_connection_info())
152 kw.update(dict(
153 connection_file=self.connection_file,
154 session=self.session,
155 parent=self,
156 ))
157
158 # add kwargs last, for manual overrides
159 kw.update(kwargs)
160 return self.client_factory(**kw)
161
162 #--------------------------------------------------------------------------
163 # Kernel management
164 #--------------------------------------------------------------------------
165
166 def format_kernel_cmd(self, extra_arguments=None):
167 """replace templated args (e.g. {connection_file})"""
168 extra_arguments = extra_arguments or []
169 if self.kernel_cmd:
170 cmd = self.kernel_cmd + extra_arguments
171 else:
172 cmd = self.kernel_spec.argv + extra_arguments
173
174 ns = dict(connection_file=self.connection_file)
175 ns.update(self._launch_args)
176
177 pat = re.compile(r'\{([A-Za-z0-9_]+)\}')
178 def from_ns(match):
179 """Get the key out of ns if it's there, otherwise no change."""
180 return ns.get(match.group(1), match.group())
181
182 return [ pat.sub(from_ns, arg) for arg in cmd ]
183
184 def _launch_kernel(self, kernel_cmd, **kw):
185 """actually launch the kernel
186
187 override in a subclass to launch kernel subprocesses differently
188 """
189 return launch_kernel(kernel_cmd, **kw)
190
191 # Control socket used for polite kernel shutdown
192
193 def _connect_control_socket(self):
194 if self._control_socket is None:
195 self._control_socket = self.connect_control()
196 self._control_socket.linger = 100
197
198 def _close_control_socket(self):
199 if self._control_socket is None:
200 return
201 self._control_socket.close()
202 self._control_socket = None
203
204 def start_kernel(self, **kw):
205 """Starts a kernel on this host in a separate process.
206
207 If random ports (port=0) are being used, this method must be called
208 before the channels are created.
209
210 Parameters
211 ----------
212 **kw : optional
213 keyword arguments that are passed down to build the kernel_cmd
214 and launching the kernel (e.g. Popen kwargs).
215 """
216 if self.transport == 'tcp' and not is_local_ip(self.ip):
217 raise RuntimeError("Can only launch a kernel on a local interface. "
218 "Make sure that the '*_address' attributes are "
219 "configured properly. "
220 "Currently valid addresses are: %s" % local_ips()
221 )
222
223 # write connection file / get default ports
224 self.write_connection_file()
225
226 # save kwargs for use in restart
227 self._launch_args = kw.copy()
228 # build the Popen cmd
229 extra_arguments = kw.pop('extra_arguments', [])
230 kernel_cmd = self.format_kernel_cmd(extra_arguments=extra_arguments)
231 if self.kernel_cmd:
232 # If kernel_cmd has been set manually, don't refer to a kernel spec
233 env = os.environ
234 else:
235 # Environment variables from kernel spec are added to os.environ
236 env = os.environ.copy()
237 env.update(self.kernel_spec.env or {})
238 # launch the kernel subprocess
239 self.kernel = self._launch_kernel(kernel_cmd, env=env,
240 **kw)
241 self.start_restarter()
242 self._connect_control_socket()
243
244 def request_shutdown(self, restart=False):
245 """Send a shutdown request via control channel
246
247 On Windows, this just kills kernels instead, because the shutdown
248 messages don't work.
249 """
250 content = dict(restart=restart)
251 msg = self.session.msg("shutdown_request", content=content)
252 self.session.send(self._control_socket, msg)
253
254 def finish_shutdown(self, waittime=1, pollinterval=0.1):
255 """Wait for kernel shutdown, then kill process if it doesn't shutdown.
256
257 This does not send shutdown requests - use :meth:`request_shutdown`
258 first.
259 """
260 for i in range(int(waittime/pollinterval)):
261 if self.is_alive():
262 time.sleep(pollinterval)
263 else:
264 break
265 else:
266 # OK, we've waited long enough.
267 if self.has_kernel:
268 self._kill_kernel()
269
270 def cleanup(self, connection_file=True):
271 """Clean up resources when the kernel is shut down"""
272 if connection_file:
273 self.cleanup_connection_file()
274
275 self.cleanup_ipc_files()
276 self._close_control_socket()
277
278 def shutdown_kernel(self, now=False, restart=False):
279 """Attempts to the stop the kernel process cleanly.
280
281 This attempts to shutdown the kernels cleanly by:
282
283 1. Sending it a shutdown message over the shell channel.
284 2. If that fails, the kernel is shutdown forcibly by sending it
285 a signal.
286
287 Parameters
288 ----------
289 now : bool
290 Should the kernel be forcible killed *now*. This skips the
291 first, nice shutdown attempt.
292 restart: bool
293 Will this kernel be restarted after it is shutdown. When this
294 is True, connection files will not be cleaned up.
295 """
296 # Stop monitoring for restarting while we shutdown.
297 self.stop_restarter()
298
299 if now:
300 self._kill_kernel()
301 else:
302 self.request_shutdown(restart=restart)
303 # Don't send any additional kernel kill messages immediately, to give
304 # the kernel a chance to properly execute shutdown actions. Wait for at
305 # most 1s, checking every 0.1s.
306 self.finish_shutdown()
307
308 self.cleanup(connection_file=not restart)
309
310 def restart_kernel(self, now=False, **kw):
311 """Restarts a kernel with the arguments that were used to launch it.
312
313 If the old kernel was launched with random ports, the same ports will be
314 used for the new kernel. The same connection file is used again.
315
316 Parameters
317 ----------
318 now : bool, optional
319 If True, the kernel is forcefully restarted *immediately*, without
320 having a chance to do any cleanup action. Otherwise the kernel is
321 given 1s to clean up before a forceful restart is issued.
322
323 In all cases the kernel is restarted, the only difference is whether
324 it is given a chance to perform a clean shutdown or not.
325
326 **kw : optional
327 Any options specified here will overwrite those used to launch the
328 kernel.
329 """
330 if self._launch_args is None:
331 raise RuntimeError("Cannot restart the kernel. "
332 "No previous call to 'start_kernel'.")
333 else:
334 # Stop currently running kernel.
335 self.shutdown_kernel(now=now, restart=True)
336
337 # Start new kernel.
338 self._launch_args.update(kw)
339 self.start_kernel(**self._launch_args)
340
341 @property
342 def has_kernel(self):
343 """Has a kernel been started that we are managing."""
344 return self.kernel is not None
345
346 def _kill_kernel(self):
347 """Kill the running kernel.
348
349 This is a private method, callers should use shutdown_kernel(now=True).
350 """
351 if self.has_kernel:
352
353 # Signal the kernel to terminate (sends SIGKILL on Unix and calls
354 # TerminateProcess() on Win32).
355 try:
356 self.kernel.kill()
357 except OSError as e:
358 # In Windows, we will get an Access Denied error if the process
359 # has already terminated. Ignore it.
360 if sys.platform == 'win32':
361 if e.winerror != 5:
362 raise
363 # On Unix, we may get an ESRCH error if the process has already
364 # terminated. Ignore it.
365 else:
366 from errno import ESRCH
367 if e.errno != ESRCH:
368 raise
369
370 # Block until the kernel terminates.
371 self.kernel.wait()
372 self.kernel = None
373 else:
374 raise RuntimeError("Cannot kill kernel. No kernel is running!")
375
376 def interrupt_kernel(self):
377 """Interrupts the kernel by sending it a signal.
378
379 Unlike ``signal_kernel``, this operation is well supported on all
380 platforms.
381 """
382 if self.has_kernel:
383 if sys.platform == 'win32':
384 from .zmq.parentpoller import ParentPollerWindows as Poller
385 Poller.send_interrupt(self.kernel.win32_interrupt_event)
386 else:
387 self.kernel.send_signal(signal.SIGINT)
388 else:
389 raise RuntimeError("Cannot interrupt kernel. No kernel is running!")
390
391 def signal_kernel(self, signum):
392 """Sends a signal to the kernel.
393
394 Note that since only SIGTERM is supported on Windows, this function is
395 only useful on Unix systems.
396 """
397 if self.has_kernel:
398 self.kernel.send_signal(signum)
399 else:
400 raise RuntimeError("Cannot signal kernel. No kernel is running!")
401
402 def is_alive(self):
403 """Is the kernel process still running?"""
404 if self.has_kernel:
405 if self.kernel.poll() is None:
406 return True
407 else:
408 return False
409 else:
410 # we don't have a kernel
411 return False
412
413
414 KernelManagerABC.register(KernelManager)
415
416
417 def start_new_kernel(startup_timeout=60, kernel_name='python', **kwargs):
418 """Start a new kernel, and return its Manager and Client"""
419 km = KernelManager(kernel_name=kernel_name)
420 km.start_kernel(**kwargs)
421 kc = km.client()
422 kc.start_channels()
423 kc.wait_for_ready()
424
425 return km, kc
426
427 @contextmanager
428 def run_kernel(**kwargs):
429 """Context manager to create a kernel in a subprocess.
430
431 The kernel is shut down when the context exits.
432
433 Returns
434 -------
435 kernel_client: connected KernelClient instance
436 """
437 km, kc = start_new_kernel(**kwargs)
438 try:
439 yield kc
440 finally:
441 kc.stop_channels()
442 km.shutdown_kernel(now=True)
@@ -1,53 +1,1 b''
1 """Abstract base class for kernel managers."""
1 from jupyter_client.managerabc import *
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 import abc
7
8 from IPython.utils.py3compat import with_metaclass
9
10
11 class KernelManagerABC(with_metaclass(abc.ABCMeta, object)):
12 """KernelManager ABC.
13
14 The docstrings for this class can be found in the base implementation:
15
16 `IPython.kernel.kernelmanager.KernelManager`
17 """
18
19 @abc.abstractproperty
20 def kernel(self):
21 pass
22
23 #--------------------------------------------------------------------------
24 # Kernel management
25 #--------------------------------------------------------------------------
26
27 @abc.abstractmethod
28 def start_kernel(self, **kw):
29 pass
30
31 @abc.abstractmethod
32 def shutdown_kernel(self, now=False, restart=False):
33 pass
34
35 @abc.abstractmethod
36 def restart_kernel(self, now=False, **kw):
37 pass
38
39 @abc.abstractproperty
40 def has_kernel(self):
41 pass
42
43 @abc.abstractmethod
44 def interrupt_kernel(self):
45 pass
46
47 @abc.abstractmethod
48 def signal_kernel(self, signum):
49 pass
50
51 @abc.abstractmethod
52 def is_alive(self):
53 pass
@@ -1,319 +1,1 b''
1 """A kernel manager for multiple kernels"""
1 from jupyter_client.multikernelmanager import *
2
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 from __future__ import absolute_import
7
8 import os
9 import uuid
10
11 import zmq
12
13 from IPython.config.configurable import LoggingConfigurable
14 from IPython.utils.importstring import import_item
15 from IPython.utils.traitlets import (
16 Instance, Dict, List, Unicode, Any, DottedObjectName
17 )
18 from IPython.utils.py3compat import unicode_type
19
20 from .kernelspec import NATIVE_KERNEL_NAME
21
22 class DuplicateKernelError(Exception):
23 pass
24
25
26 def kernel_method(f):
27 """decorator for proxying MKM.method(kernel_id) to individual KMs by ID"""
28 def wrapped(self, kernel_id, *args, **kwargs):
29 # get the kernel
30 km = self.get_kernel(kernel_id)
31 method = getattr(km, f.__name__)
32 # call the kernel's method
33 r = method(*args, **kwargs)
34 # last thing, call anything defined in the actual class method
35 # such as logging messages
36 f(self, kernel_id, *args, **kwargs)
37 # return the method result
38 return r
39 return wrapped
40
41
42 class MultiKernelManager(LoggingConfigurable):
43 """A class for managing multiple kernels."""
44
45 ipython_kernel_argv = List(Unicode)
46
47 default_kernel_name = Unicode(NATIVE_KERNEL_NAME, config=True,
48 help="The name of the default kernel to start"
49 )
50
51 kernel_manager_class = DottedObjectName(
52 "IPython.kernel.ioloop.IOLoopKernelManager", config=True,
53 help="""The kernel manager class. This is configurable to allow
54 subclassing of the KernelManager for customized behavior.
55 """
56 )
57 def _kernel_manager_class_changed(self, name, old, new):
58 self.kernel_manager_factory = import_item(new)
59
60 kernel_manager_factory = Any(help="this is kernel_manager_class after import")
61 def _kernel_manager_factory_default(self):
62 return import_item(self.kernel_manager_class)
63
64 context = Instance('zmq.Context')
65 def _context_default(self):
66 return zmq.Context.instance()
67
68 connection_dir = Unicode('')
69
70 _kernels = Dict()
71
72 def list_kernel_ids(self):
73 """Return a list of the kernel ids of the active kernels."""
74 # Create a copy so we can iterate over kernels in operations
75 # that delete keys.
76 return list(self._kernels.keys())
77
78 def __len__(self):
79 """Return the number of running kernels."""
80 return len(self.list_kernel_ids())
81
82 def __contains__(self, kernel_id):
83 return kernel_id in self._kernels
84
85 def start_kernel(self, kernel_name=None, **kwargs):
86 """Start a new kernel.
87
88 The caller can pick a kernel_id by passing one in as a keyword arg,
89 otherwise one will be picked using a uuid.
90
91 To silence the kernel's stdout/stderr, call this using::
92
93 km.start_kernel(stdout=PIPE, stderr=PIPE)
94
95 """
96 kernel_id = kwargs.pop('kernel_id', unicode_type(uuid.uuid4()))
97 if kernel_id in self:
98 raise DuplicateKernelError('Kernel already exists: %s' % kernel_id)
99
100 if kernel_name is None:
101 kernel_name = self.default_kernel_name
102 # kernel_manager_factory is the constructor for the KernelManager
103 # subclass we are using. It can be configured as any Configurable,
104 # including things like its transport and ip.
105 km = self.kernel_manager_factory(connection_file=os.path.join(
106 self.connection_dir, "kernel-%s.json" % kernel_id),
107 parent=self, autorestart=True, log=self.log, kernel_name=kernel_name,
108 )
109 # FIXME: remove special treatment of IPython kernels
110 if km.ipython_kernel:
111 kwargs.setdefault('extra_arguments', self.ipython_kernel_argv)
112 km.start_kernel(**kwargs)
113 self._kernels[kernel_id] = km
114 return kernel_id
115
116 @kernel_method
117 def shutdown_kernel(self, kernel_id, now=False, restart=False):
118 """Shutdown a kernel by its kernel uuid.
119
120 Parameters
121 ==========
122 kernel_id : uuid
123 The id of the kernel to shutdown.
124 now : bool
125 Should the kernel be shutdown forcibly using a signal.
126 restart : bool
127 Will the kernel be restarted?
128 """
129 self.log.info("Kernel shutdown: %s" % kernel_id)
130 self.remove_kernel(kernel_id)
131
132 @kernel_method
133 def request_shutdown(self, kernel_id, restart=False):
134 """Ask a kernel to shut down by its kernel uuid"""
135
136 @kernel_method
137 def finish_shutdown(self, kernel_id, waittime=1, pollinterval=0.1):
138 """Wait for a kernel to finish shutting down, and kill it if it doesn't
139 """
140 self.log.info("Kernel shutdown: %s" % kernel_id)
141
142 @kernel_method
143 def cleanup(self, kernel_id, connection_file=True):
144 """Clean up a kernel's resources"""
145
146 def remove_kernel(self, kernel_id):
147 """remove a kernel from our mapping.
148
149 Mainly so that a kernel can be removed if it is already dead,
150 without having to call shutdown_kernel.
151
152 The kernel object is returned.
153 """
154 return self._kernels.pop(kernel_id)
155
156 def shutdown_all(self, now=False):
157 """Shutdown all kernels."""
158 kids = self.list_kernel_ids()
159 for kid in kids:
160 self.request_shutdown(kid)
161 for kid in kids:
162 self.finish_shutdown(kid)
163 self.cleanup(kid)
164 self.remove_kernel(kid)
165
166 @kernel_method
167 def interrupt_kernel(self, kernel_id):
168 """Interrupt (SIGINT) the kernel by its uuid.
169
170 Parameters
171 ==========
172 kernel_id : uuid
173 The id of the kernel to interrupt.
174 """
175 self.log.info("Kernel interrupted: %s" % kernel_id)
176
177 @kernel_method
178 def signal_kernel(self, kernel_id, signum):
179 """Sends a signal to the kernel by its uuid.
180
181 Note that since only SIGTERM is supported on Windows, this function
182 is only useful on Unix systems.
183
184 Parameters
185 ==========
186 kernel_id : uuid
187 The id of the kernel to signal.
188 """
189 self.log.info("Signaled Kernel %s with %s" % (kernel_id, signum))
190
191 @kernel_method
192 def restart_kernel(self, kernel_id, now=False):
193 """Restart a kernel by its uuid, keeping the same ports.
194
195 Parameters
196 ==========
197 kernel_id : uuid
198 The id of the kernel to interrupt.
199 """
200 self.log.info("Kernel restarted: %s" % kernel_id)
201
202 @kernel_method
203 def is_alive(self, kernel_id):
204 """Is the kernel alive.
205
206 This calls KernelManager.is_alive() which calls Popen.poll on the
207 actual kernel subprocess.
208
209 Parameters
210 ==========
211 kernel_id : uuid
212 The id of the kernel.
213 """
214
215 def _check_kernel_id(self, kernel_id):
216 """check that a kernel id is valid"""
217 if kernel_id not in self:
218 raise KeyError("Kernel with id not found: %s" % kernel_id)
219
220 def get_kernel(self, kernel_id):
221 """Get the single KernelManager object for a kernel by its uuid.
222
223 Parameters
224 ==========
225 kernel_id : uuid
226 The id of the kernel.
227 """
228 self._check_kernel_id(kernel_id)
229 return self._kernels[kernel_id]
230
231 @kernel_method
232 def add_restart_callback(self, kernel_id, callback, event='restart'):
233 """add a callback for the KernelRestarter"""
234
235 @kernel_method
236 def remove_restart_callback(self, kernel_id, callback, event='restart'):
237 """remove a callback for the KernelRestarter"""
238
239 @kernel_method
240 def get_connection_info(self, kernel_id):
241 """Return a dictionary of connection data for a kernel.
242
243 Parameters
244 ==========
245 kernel_id : uuid
246 The id of the kernel.
247
248 Returns
249 =======
250 connection_dict : dict
251 A dict of the information needed to connect to a kernel.
252 This includes the ip address and the integer port
253 numbers of the different channels (stdin_port, iopub_port,
254 shell_port, hb_port).
255 """
256
257 @kernel_method
258 def connect_iopub(self, kernel_id, identity=None):
259 """Return a zmq Socket connected to the iopub channel.
260
261 Parameters
262 ==========
263 kernel_id : uuid
264 The id of the kernel
265 identity : bytes (optional)
266 The zmq identity of the socket
267
268 Returns
269 =======
270 stream : zmq Socket or ZMQStream
271 """
272
273 @kernel_method
274 def connect_shell(self, kernel_id, identity=None):
275 """Return a zmq Socket connected to the shell channel.
276
277 Parameters
278 ==========
279 kernel_id : uuid
280 The id of the kernel
281 identity : bytes (optional)
282 The zmq identity of the socket
283
284 Returns
285 =======
286 stream : zmq Socket or ZMQStream
287 """
288
289 @kernel_method
290 def connect_stdin(self, kernel_id, identity=None):
291 """Return a zmq Socket connected to the stdin channel.
292
293 Parameters
294 ==========
295 kernel_id : uuid
296 The id of the kernel
297 identity : bytes (optional)
298 The zmq identity of the socket
299
300 Returns
301 =======
302 stream : zmq Socket or ZMQStream
303 """
304
305 @kernel_method
306 def connect_hb(self, kernel_id, identity=None):
307 """Return a zmq Socket connected to the hb channel.
308
309 Parameters
310 ==========
311 kernel_id : uuid
312 The id of the kernel
313 identity : bytes (optional)
314 The zmq identity of the socket
315
316 Returns
317 =======
318 stream : zmq Socket or ZMQStream
319 """
@@ -1,111 +1,1 b''
1 """A basic kernel monitor with autorestarting.
1 from jupyter_client.restarter import *
2
3 This watches a kernel's state using KernelManager.is_alive and auto
4 restarts the kernel if it dies.
5
6 It is an incomplete base class, and must be subclassed.
7 """
8
9 # Copyright (c) IPython Development Team.
10 # Distributed under the terms of the Modified BSD License.
11
12 from IPython.config.configurable import LoggingConfigurable
13 from IPython.utils.traitlets import (
14 Instance, Float, Dict, Bool, Integer,
15 )
16
17
18 class KernelRestarter(LoggingConfigurable):
19 """Monitor and autorestart a kernel."""
20
21 kernel_manager = Instance('IPython.kernel.KernelManager')
22
23 debug = Bool(False, config=True,
24 help="""Whether to include every poll event in debugging output.
25
26 Has to be set explicitly, because there will be *a lot* of output.
27 """
28 )
29
30 time_to_dead = Float(3.0, config=True,
31 help="""Kernel heartbeat interval in seconds."""
32 )
33
34 restart_limit = Integer(5, config=True,
35 help="""The number of consecutive autorestarts before the kernel is presumed dead."""
36 )
37 _restarting = Bool(False)
38 _restart_count = Integer(0)
39
40 callbacks = Dict()
41 def _callbacks_default(self):
42 return dict(restart=[], dead=[])
43
44 def start(self):
45 """Start the polling of the kernel."""
46 raise NotImplementedError("Must be implemented in a subclass")
47
48 def stop(self):
49 """Stop the kernel polling."""
50 raise NotImplementedError("Must be implemented in a subclass")
51
52 def add_callback(self, f, event='restart'):
53 """register a callback to fire on a particular event
54
55 Possible values for event:
56
57 'restart' (default): kernel has died, and will be restarted.
58 'dead': restart has failed, kernel will be left dead.
59
60 """
61 self.callbacks[event].append(f)
62
63 def remove_callback(self, f, event='restart'):
64 """unregister a callback to fire on a particular event
65
66 Possible values for event:
67
68 'restart' (default): kernel has died, and will be restarted.
69 'dead': restart has failed, kernel will be left dead.
70
71 """
72 try:
73 self.callbacks[event].remove(f)
74 except ValueError:
75 pass
76
77 def _fire_callbacks(self, event):
78 """fire our callbacks for a particular event"""
79 for callback in self.callbacks[event]:
80 try:
81 callback()
82 except Exception as e:
83 self.log.error("KernelRestarter: %s callback %r failed", event, callback, exc_info=True)
84
85 def poll(self):
86 if self.debug:
87 self.log.debug('Polling kernel...')
88 if not self.kernel_manager.is_alive():
89 if self._restarting:
90 self._restart_count += 1
91 else:
92 self._restart_count = 1
93
94 if self._restart_count >= self.restart_limit:
95 self.log.warn("KernelRestarter: restart failed")
96 self._fire_callbacks('dead')
97 self._restarting = False
98 self._restart_count = 0
99 self.stop()
100 else:
101 self.log.info('KernelRestarter: restarting kernel (%i/%i)',
102 self._restart_count,
103 self.restart_limit
104 )
105 self._fire_callbacks('restart')
106 self.kernel_manager.restart_kernel(now=True)
107 self._restarting = True
108 else:
109 if self._restarting:
110 self.log.debug("KernelRestarter: restart apparently succeeded")
111 self._restarting = False
@@ -1,230 +1,1 b''
1 """ Defines a KernelClient that provides thread-safe sockets with async callbacks on message replies.
1 from jupyter_client.threaded import *
2 """
3 from __future__ import absolute_import
4 import atexit
5 import errno
6 from threading import Thread
7 import time
8
9 import zmq
10 # import ZMQError in top-level namespace, to avoid ugly attribute-error messages
11 # during garbage collection of threads at exit:
12 from zmq import ZMQError
13 from zmq.eventloop import ioloop, zmqstream
14
15 # Local imports
16 from IPython.utils.traitlets import Type, Instance
17 from IPython.kernel.channels import HBChannel
18 from IPython.kernel import KernelClient
19 from IPython.kernel.channels import HBChannel
20
21 class ThreadedZMQSocketChannel(object):
22 """A ZMQ socket invoking a callback in the ioloop"""
23 session = None
24 socket = None
25 ioloop = None
26 stream = None
27 _inspect = None
28
29 def __init__(self, socket, session, loop):
30 """Create a channel.
31
32 Parameters
33 ----------
34 socket : :class:`zmq.Socket`
35 The ZMQ socket to use.
36 session : :class:`session.Session`
37 The session to use.
38 loop
39 A pyzmq ioloop to connect the socket to using a ZMQStream
40 """
41 super(ThreadedZMQSocketChannel, self).__init__()
42
43 self.socket = socket
44 self.session = session
45 self.ioloop = loop
46
47 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
48 self.stream.on_recv(self._handle_recv)
49
50 _is_alive = False
51 def is_alive(self):
52 return self._is_alive
53
54 def start(self):
55 self._is_alive = True
56
57 def stop(self):
58 self._is_alive = False
59
60 def close(self):
61 if self.socket is not None:
62 try:
63 self.socket.close(linger=0)
64 except Exception:
65 pass
66 self.socket = None
67
68 def send(self, msg):
69 """Queue a message to be sent from the IOLoop's thread.
70
71 Parameters
72 ----------
73 msg : message to send
74
75 This is threadsafe, as it uses IOLoop.add_callback to give the loop's
76 thread control of the action.
77 """
78 def thread_send():
79 self.session.send(self.stream, msg)
80 self.ioloop.add_callback(thread_send)
81
82 def _handle_recv(self, msg):
83 """Callback for stream.on_recv.
84
85 Unpacks message, and calls handlers with it.
86 """
87 ident,smsg = self.session.feed_identities(msg)
88 msg = self.session.deserialize(smsg)
89 # let client inspect messages
90 if self._inspect:
91 self._inspect(msg)
92 self.call_handlers(msg)
93
94 def call_handlers(self, msg):
95 """This method is called in the ioloop thread when a message arrives.
96
97 Subclasses should override this method to handle incoming messages.
98 It is important to remember that this method is called in the thread
99 so that some logic must be done to ensure that the application level
100 handlers are called in the application thread.
101 """
102 pass
103
104 def process_events(self):
105 """Subclasses should override this with a method
106 processing any pending GUI events.
107 """
108 pass
109
110
111 def flush(self, timeout=1.0):
112 """Immediately processes all pending messages on this channel.
113
114 This is only used for the IOPub channel.
115
116 Callers should use this method to ensure that :meth:`call_handlers`
117 has been called for all messages that have been received on the
118 0MQ SUB socket of this channel.
119
120 This method is thread safe.
121
122 Parameters
123 ----------
124 timeout : float, optional
125 The maximum amount of time to spend flushing, in seconds. The
126 default is one second.
127 """
128 # We do the IOLoop callback process twice to ensure that the IOLoop
129 # gets to perform at least one full poll.
130 stop_time = time.time() + timeout
131 for i in range(2):
132 self._flushed = False
133 self.ioloop.add_callback(self._flush)
134 while not self._flushed and time.time() < stop_time:
135 time.sleep(0.01)
136
137 def _flush(self):
138 """Callback for :method:`self.flush`."""
139 self.stream.flush()
140 self._flushed = True
141
142
143 class IOLoopThread(Thread):
144 """Run a pyzmq ioloop in a thread to send and receive messages
145 """
146 def __init__(self, loop):
147 super(IOLoopThread, self).__init__()
148 self.daemon = True
149 atexit.register(self._notice_exit)
150 self.ioloop = loop or ioloop.IOLoop()
151
152 def _notice_exit(self):
153 self._exiting = True
154
155 def run(self):
156 """Run my loop, ignoring EINTR events in the poller"""
157 while True:
158 try:
159 self.ioloop.start()
160 except ZMQError as e:
161 if e.errno == errno.EINTR:
162 continue
163 else:
164 raise
165 except Exception:
166 if self._exiting:
167 break
168 else:
169 raise
170 else:
171 break
172
173 def stop(self):
174 """Stop the channel's event loop and join its thread.
175
176 This calls :meth:`~threading.Thread.join` and returns when the thread
177 terminates. :class:`RuntimeError` will be raised if
178 :meth:`~threading.Thread.start` is called again.
179 """
180 if self.ioloop is not None:
181 self.ioloop.stop()
182 self.join()
183 self.close()
184
185 def close(self):
186 if self.ioloop is not None:
187 try:
188 self.ioloop.close(all_fds=True)
189 except Exception:
190 pass
191
192
193 class ThreadedKernelClient(KernelClient):
194 """ A KernelClient that provides thread-safe sockets with async callbacks on message replies.
195 """
196
197 _ioloop = None
198 @property
199 def ioloop(self):
200 if self._ioloop is None:
201 self._ioloop = ioloop.IOLoop()
202 return self._ioloop
203
204 ioloop_thread = Instance(IOLoopThread)
205
206 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
207 if shell:
208 self.shell_channel._inspect = self._check_kernel_info_reply
209
210 self.ioloop_thread = IOLoopThread(self.ioloop)
211 self.ioloop_thread.start()
212
213 super(ThreadedKernelClient, self).start_channels(shell, iopub, stdin, hb)
214
215 def _check_kernel_info_reply(self, msg):
216 """This is run in the ioloop thread when the kernel info reply is recieved
217 """
218 if msg['msg_type'] == 'kernel_info_reply':
219 self._handle_kernel_info_reply(msg)
220 self.shell_channel._inspect = None
221
222 def stop_channels(self):
223 super(ThreadedKernelClient, self).stop_channels()
224 if self.ioloop_thread.is_alive():
225 self.ioloop_thread.stop()
226
227 iopub_channel_class = Type(ThreadedZMQSocketChannel)
228 shell_channel_class = Type(ThreadedZMQSocketChannel)
229 stdin_channel_class = Type(ThreadedZMQSocketChannel)
230 hb_channel_class = Type(HBChannel)
@@ -172,6 +172,8 b' class TestSection(object):'
172
172
173 shims = {
173 shims = {
174 'parallel': 'ipython_parallel',
174 'parallel': 'ipython_parallel',
175 'kernel': 'ipython_kernel',
176 'kernel.inprocess': 'ipython_kernel.inprocess',
175 }
177 }
176
178
177 # Name -> (include, exclude, dependencies_met)
179 # Name -> (include, exclude, dependencies_met)
@@ -230,10 +232,10 b" sec.requires('zmq')"
230 # The in-process kernel tests are done in a separate section
232 # The in-process kernel tests are done in a separate section
231 sec.exclude('inprocess')
233 sec.exclude('inprocess')
232 # importing gtk sets the default encoding, which we want to avoid
234 # importing gtk sets the default encoding, which we want to avoid
233 sec.exclude('zmq.gui.gtkembed')
235 sec.exclude('gui.gtkembed')
234 sec.exclude('zmq.gui.gtk3embed')
236 sec.exclude('gui.gtk3embed')
235 if not have['matplotlib']:
237 if not have['matplotlib']:
236 sec.exclude('zmq.pylab')
238 sec.exclude('pylab')
237
239
238 # kernel.inprocess:
240 # kernel.inprocess:
239 test_sections['kernel.inprocess'].requires('zmq')
241 test_sections['kernel.inprocess'].requires('zmq')
@@ -3,7 +3,6 b''
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import sys
7 import types
6 import types
8
7
9 class ShimModule(types.ModuleType):
8 class ShimModule(types.ModuleType):
@@ -11,8 +10,11 b' class ShimModule(types.ModuleType):'
11 def __init__(self, *args, **kwargs):
10 def __init__(self, *args, **kwargs):
12 self._mirror = kwargs.pop("mirror")
11 self._mirror = kwargs.pop("mirror")
13 super(ShimModule, self).__init__(*args, **kwargs)
12 super(ShimModule, self).__init__(*args, **kwargs)
14 if sys.version_info >= (3,4):
13
15 self.__spec__ = __import__(self._mirror).__spec__
14 @property
15 def __spec__(self):
16 """Don't produce __spec__ until requested"""
17 return __import__(self._mirror).__spec__
16
18
17 def __getattr__(self, key):
19 def __getattr__(self, key):
18 # Use the equivalent of import_item(name), see below
20 # Use the equivalent of import_item(name), see below
1 NO CONTENT: file renamed from IPython/kernel/comm/__init__.py to ipython_kernel/comm/__init__.py
NO CONTENT: file renamed from IPython/kernel/comm/__init__.py to ipython_kernel/comm/__init__.py
@@ -9,7 +9,7 b' import uuid'
9 from zmq.eventloop.ioloop import IOLoop
9 from zmq.eventloop.ioloop import IOLoop
10
10
11 from IPython.config import LoggingConfigurable
11 from IPython.config import LoggingConfigurable
12 from IPython.kernel.zmq.kernelbase import Kernel
12 from ipython_kernel.kernelbase import Kernel
13
13
14 from IPython.utils.jsonutil import json_clean
14 from IPython.utils.jsonutil import json_clean
15 from IPython.utils.traitlets import Instance, Unicode, Bytes, Bool, Dict, Any
15 from IPython.utils.traitlets import Instance, Unicode, Bytes, Bool, Dict, Any
@@ -20,40 +20,40 b' class Comm(LoggingConfigurable):'
20 # If this is instantiated by a non-IPython kernel, shell will be None
20 # If this is instantiated by a non-IPython kernel, shell will be None
21 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
21 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
22 allow_none=True)
22 allow_none=True)
23 kernel = Instance('IPython.kernel.zmq.kernelbase.Kernel')
23 kernel = Instance('ipython_kernel.kernelbase.Kernel')
24 def _kernel_default(self):
24 def _kernel_default(self):
25 if Kernel.initialized():
25 if Kernel.initialized():
26 return Kernel.instance()
26 return Kernel.instance()
27
27
28 iopub_socket = Any()
28 iopub_socket = Any()
29 def _iopub_socket_default(self):
29 def _iopub_socket_default(self):
30 return self.kernel.iopub_socket
30 return self.kernel.iopub_socket
31 session = Instance('IPython.kernel.zmq.session.Session')
31 session = Instance('ipython_kernel.session.Session')
32 def _session_default(self):
32 def _session_default(self):
33 if self.kernel is not None:
33 if self.kernel is not None:
34 return self.kernel.session
34 return self.kernel.session
35
35
36 target_name = Unicode('comm')
36 target_name = Unicode('comm')
37 target_module = Unicode(None, allow_none=True, help="""requirejs module from
37 target_module = Unicode(None, allow_none=True, help="""requirejs module from
38 which to load comm target.""")
38 which to load comm target.""")
39
39
40 topic = Bytes()
40 topic = Bytes()
41 def _topic_default(self):
41 def _topic_default(self):
42 return ('comm-%s' % self.comm_id).encode('ascii')
42 return ('comm-%s' % self.comm_id).encode('ascii')
43
43
44 _open_data = Dict(help="data dict, if any, to be included in comm_open")
44 _open_data = Dict(help="data dict, if any, to be included in comm_open")
45 _close_data = Dict(help="data dict, if any, to be included in comm_close")
45 _close_data = Dict(help="data dict, if any, to be included in comm_close")
46
46
47 _msg_callback = Any()
47 _msg_callback = Any()
48 _close_callback = Any()
48 _close_callback = Any()
49
49
50 _closed = Bool(True)
50 _closed = Bool(True)
51 comm_id = Unicode()
51 comm_id = Unicode()
52 def _comm_id_default(self):
52 def _comm_id_default(self):
53 return uuid.uuid4().hex
53 return uuid.uuid4().hex
54
54
55 primary = Bool(True, help="Am I the primary or secondary Comm?")
55 primary = Bool(True, help="Am I the primary or secondary Comm?")
56
56
57 def __init__(self, target_name='', data=None, **kwargs):
57 def __init__(self, target_name='', data=None, **kwargs):
58 if target_name:
58 if target_name:
59 kwargs['target_name'] = target_name
59 kwargs['target_name'] = target_name
@@ -63,7 +63,7 b' class Comm(LoggingConfigurable):'
63 self.open(data)
63 self.open(data)
64 else:
64 else:
65 self._closed = False
65 self._closed = False
66
66
67 def _publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys):
67 def _publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys):
68 """Helper for sending a comm message on IOPub"""
68 """Helper for sending a comm message on IOPub"""
69 if threading.current_thread().name != 'MainThread' and IOLoop.initialized():
69 if threading.current_thread().name != 'MainThread' and IOLoop.initialized():
@@ -80,13 +80,13 b' class Comm(LoggingConfigurable):'
80 ident=self.topic,
80 ident=self.topic,
81 buffers=buffers,
81 buffers=buffers,
82 )
82 )
83
83
84 def __del__(self):
84 def __del__(self):
85 """trigger close on gc"""
85 """trigger close on gc"""
86 self.close()
86 self.close()
87
87
88 # publishing messages
88 # publishing messages
89
89
90 def open(self, data=None, metadata=None, buffers=None):
90 def open(self, data=None, metadata=None, buffers=None):
91 """Open the frontend-side version of this comm"""
91 """Open the frontend-side version of this comm"""
92 if data is None:
92 if data is None:
@@ -107,7 +107,7 b' class Comm(LoggingConfigurable):'
107 except:
107 except:
108 comm_manager.unregister_comm(self)
108 comm_manager.unregister_comm(self)
109 raise
109 raise
110
110
111 def close(self, data=None, metadata=None, buffers=None):
111 def close(self, data=None, metadata=None, buffers=None):
112 """Close the frontend-side version of this comm"""
112 """Close the frontend-side version of this comm"""
113 if self._closed:
113 if self._closed:
@@ -120,41 +120,41 b' class Comm(LoggingConfigurable):'
120 data=data, metadata=metadata, buffers=buffers,
120 data=data, metadata=metadata, buffers=buffers,
121 )
121 )
122 self.kernel.comm_manager.unregister_comm(self)
122 self.kernel.comm_manager.unregister_comm(self)
123
123
124 def send(self, data=None, metadata=None, buffers=None):
124 def send(self, data=None, metadata=None, buffers=None):
125 """Send a message to the frontend-side version of this comm"""
125 """Send a message to the frontend-side version of this comm"""
126 self._publish_msg('comm_msg',
126 self._publish_msg('comm_msg',
127 data=data, metadata=metadata, buffers=buffers,
127 data=data, metadata=metadata, buffers=buffers,
128 )
128 )
129
129
130 # registering callbacks
130 # registering callbacks
131
131
132 def on_close(self, callback):
132 def on_close(self, callback):
133 """Register a callback for comm_close
133 """Register a callback for comm_close
134
134
135 Will be called with the `data` of the close message.
135 Will be called with the `data` of the close message.
136
136
137 Call `on_close(None)` to disable an existing callback.
137 Call `on_close(None)` to disable an existing callback.
138 """
138 """
139 self._close_callback = callback
139 self._close_callback = callback
140
140
141 def on_msg(self, callback):
141 def on_msg(self, callback):
142 """Register a callback for comm_msg
142 """Register a callback for comm_msg
143
143
144 Will be called with the `data` of any comm_msg messages.
144 Will be called with the `data` of any comm_msg messages.
145
145
146 Call `on_msg(None)` to disable an existing callback.
146 Call `on_msg(None)` to disable an existing callback.
147 """
147 """
148 self._msg_callback = callback
148 self._msg_callback = callback
149
149
150 # handling of incoming messages
150 # handling of incoming messages
151
151
152 def handle_close(self, msg):
152 def handle_close(self, msg):
153 """Handle a comm_close message"""
153 """Handle a comm_close message"""
154 self.log.debug("handle_close[%s](%s)", self.comm_id, msg)
154 self.log.debug("handle_close[%s](%s)", self.comm_id, msg)
155 if self._close_callback:
155 if self._close_callback:
156 self._close_callback(msg)
156 self._close_callback(msg)
157
157
158 def handle_msg(self, msg):
158 def handle_msg(self, msg):
159 """Handle a comm_msg message"""
159 """Handle a comm_msg message"""
160 self.log.debug("handle_msg[%s](%s)", self.comm_id, msg)
160 self.log.debug("handle_msg[%s](%s)", self.comm_id, msg)
@@ -18,7 +18,7 b' from .comm import Comm'
18
18
19 def lazy_keys(dikt):
19 def lazy_keys(dikt):
20 """Return lazy-evaluated string representation of a dictionary's keys
20 """Return lazy-evaluated string representation of a dictionary's keys
21
21
22 Key list is only constructed if it will actually be used.
22 Key list is only constructed if it will actually be used.
23 Used for debug-logging.
23 Used for debug-logging.
24 """
24 """
@@ -27,43 +27,43 b' def lazy_keys(dikt):'
27
27
28 class CommManager(LoggingConfigurable):
28 class CommManager(LoggingConfigurable):
29 """Manager for Comms in the Kernel"""
29 """Manager for Comms in the Kernel"""
30
30
31 # If this is instantiated by a non-IPython kernel, shell will be None
31 # If this is instantiated by a non-IPython kernel, shell will be None
32 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
32 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
33 allow_none=True)
33 allow_none=True)
34 kernel = Instance('IPython.kernel.zmq.kernelbase.Kernel')
34 kernel = Instance('ipython_kernel.kernelbase.Kernel')
35
35
36 iopub_socket = Any()
36 iopub_socket = Any()
37 def _iopub_socket_default(self):
37 def _iopub_socket_default(self):
38 return self.kernel.iopub_socket
38 return self.kernel.iopub_socket
39 session = Instance('IPython.kernel.zmq.session.Session')
39 session = Instance('ipython_kernel.session.Session')
40 def _session_default(self):
40 def _session_default(self):
41 return self.kernel.session
41 return self.kernel.session
42
42
43 comms = Dict()
43 comms = Dict()
44 targets = Dict()
44 targets = Dict()
45
45
46 # Public APIs
46 # Public APIs
47
47
48 def register_target(self, target_name, f):
48 def register_target(self, target_name, f):
49 """Register a callable f for a given target name
49 """Register a callable f for a given target name
50
50
51 f will be called with two arguments when a comm_open message is received with `target`:
51 f will be called with two arguments when a comm_open message is received with `target`:
52
52
53 - the Comm instance
53 - the Comm instance
54 - the `comm_open` message itself.
54 - the `comm_open` message itself.
55
55
56 f can be a Python callable or an import string for one.
56 f can be a Python callable or an import string for one.
57 """
57 """
58 if isinstance(f, string_types):
58 if isinstance(f, string_types):
59 f = import_item(f)
59 f = import_item(f)
60
60
61 self.targets[target_name] = f
61 self.targets[target_name] = f
62
62
63 def unregister_target(self, target_name, f):
63 def unregister_target(self, target_name, f):
64 """Unregister a callable registered with register_target"""
64 """Unregister a callable registered with register_target"""
65 return self.targets.pop(target_name);
65 return self.targets.pop(target_name);
66
66
67 def register_comm(self, comm):
67 def register_comm(self, comm):
68 """Register a new comm"""
68 """Register a new comm"""
69 comm_id = comm.comm_id
69 comm_id = comm.comm_id
@@ -72,17 +72,17 b' class CommManager(LoggingConfigurable):'
72 comm.iopub_socket = self.iopub_socket
72 comm.iopub_socket = self.iopub_socket
73 self.comms[comm_id] = comm
73 self.comms[comm_id] = comm
74 return comm_id
74 return comm_id
75
75
76 def unregister_comm(self, comm):
76 def unregister_comm(self, comm):
77 """Unregister a comm, and close its counterpart"""
77 """Unregister a comm, and close its counterpart"""
78 # unlike get_comm, this should raise a KeyError
78 # unlike get_comm, this should raise a KeyError
79 comm = self.comms.pop(comm.comm_id)
79 comm = self.comms.pop(comm.comm_id)
80
80
81 def get_comm(self, comm_id):
81 def get_comm(self, comm_id):
82 """Get a comm with a particular id
82 """Get a comm with a particular id
83
83
84 Returns the comm if found, otherwise None.
84 Returns the comm if found, otherwise None.
85
85
86 This will not raise an error,
86 This will not raise an error,
87 it will log messages if the comm cannot be found.
87 it will log messages if the comm cannot be found.
88 """
88 """
@@ -93,7 +93,7 b' class CommManager(LoggingConfigurable):'
93 # call, because we store weakrefs
93 # call, because we store weakrefs
94 comm = self.comms[comm_id]
94 comm = self.comms[comm_id]
95 return comm
95 return comm
96
96
97 # Message handlers
97 # Message handlers
98 def comm_open(self, stream, ident, msg):
98 def comm_open(self, stream, ident, msg):
99 """Handler for comm_open messages"""
99 """Handler for comm_open messages"""
@@ -116,14 +116,14 b' class CommManager(LoggingConfigurable):'
116 return
116 return
117 except Exception:
117 except Exception:
118 self.log.error("Exception opening comm with target: %s", target_name, exc_info=True)
118 self.log.error("Exception opening comm with target: %s", target_name, exc_info=True)
119
119
120 # Failure.
120 # Failure.
121 try:
121 try:
122 comm.close()
122 comm.close()
123 except:
123 except:
124 self.log.error("""Could not close comm during `comm_open` failure
124 self.log.error("""Could not close comm during `comm_open` failure
125 clean-up. The comm may not have been opened yet.""", exc_info=True)
125 clean-up. The comm may not have been opened yet.""", exc_info=True)
126
126
127 def comm_msg(self, stream, ident, msg):
127 def comm_msg(self, stream, ident, msg):
128 """Handler for comm_msg messages"""
128 """Handler for comm_msg messages"""
129 content = msg['content']
129 content = msg['content']
@@ -136,7 +136,7 b' class CommManager(LoggingConfigurable):'
136 comm.handle_msg(msg)
136 comm.handle_msg(msg)
137 except Exception:
137 except Exception:
138 self.log.error("Exception in comm_msg for %s", comm_id, exc_info=True)
138 self.log.error("Exception in comm_msg for %s", comm_id, exc_info=True)
139
139
140 def comm_close(self, stream, ident, msg):
140 def comm_close(self, stream, ident, msg):
141 """Handler for comm_close messages"""
141 """Handler for comm_close messages"""
142 content = msg['content']
142 content = msg['content']
@@ -147,7 +147,7 b' class CommManager(LoggingConfigurable):'
147 self.log.debug("No such comm to close: %s", comm_id)
147 self.log.debug("No such comm to close: %s", comm_id)
148 return
148 return
149 del self.comms[comm_id]
149 del self.comms[comm_id]
150
150
151 try:
151 try:
152 comm.handle_close(msg)
152 comm.handle_close(msg)
153 except Exception:
153 except Exception:
@@ -13,11 +13,11 b''
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 from IPython.config import Configurable
15 from IPython.config import Configurable
16 from IPython.kernel.inprocess.socket import SocketABC
16 from ipython_kernel.inprocess.socket import SocketABC
17 from IPython.utils.jsonutil import json_clean
17 from IPython.utils.jsonutil import json_clean
18 from IPython.utils.traitlets import Instance, Dict, CBytes
18 from IPython.utils.traitlets import Instance, Dict, CBytes
19 from IPython.kernel.zmq.serialize import serialize_object
19 from ipython_kernel.serialize import serialize_object
20 from IPython.kernel.zmq.session import Session, extract_header
20 from ipython_kernel.session import Session, extract_header
21
21
22 #-----------------------------------------------------------------------------
22 #-----------------------------------------------------------------------------
23 # Code
23 # Code
@@ -34,13 +34,13 b' class ZMQDataPublisher(Configurable):'
34 def set_parent(self, parent):
34 def set_parent(self, parent):
35 """Set the parent for outbound messages."""
35 """Set the parent for outbound messages."""
36 self.parent_header = extract_header(parent)
36 self.parent_header = extract_header(parent)
37
37
38 def publish_data(self, data):
38 def publish_data(self, data):
39 """publish a data_message on the IOPub channel
39 """publish a data_message on the IOPub channel
40
40
41 Parameters
41 Parameters
42 ----------
42 ----------
43
43
44 data : dict
44 data : dict
45 The data to be published. Think of it as a namespace.
45 The data to be published. Think of it as a namespace.
46 """
46 """
@@ -59,12 +59,12 b' class ZMQDataPublisher(Configurable):'
59
59
60 def publish_data(data):
60 def publish_data(data):
61 """publish a data_message on the IOPub channel
61 """publish a data_message on the IOPub channel
62
62
63 Parameters
63 Parameters
64 ----------
64 ----------
65
65
66 data : dict
66 data : dict
67 The data to be published. Think of it as a namespace.
67 The data to be published. Think of it as a namespace.
68 """
68 """
69 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
69 from ipython_kernel.zmqshell import ZMQInteractiveShell
70 ZMQInteractiveShell.instance().data_pub.publish_data(data)
70 ZMQInteractiveShell.instance().data_pub.publish_data(data)
@@ -6,7 +6,7 b''
6 import sys
6 import sys
7
7
8 from IPython.core.displayhook import DisplayHook
8 from IPython.core.displayhook import DisplayHook
9 from IPython.kernel.inprocess.socket import SocketABC
9 from ipython_kernel.inprocess.socket import SocketABC
10 from IPython.utils.jsonutil import encode_images
10 from IPython.utils.jsonutil import encode_images
11 from IPython.utils.py3compat import builtin_mod
11 from IPython.utils.py3compat import builtin_mod
12 from IPython.utils.traitlets import Instance, Dict
12 from IPython.utils.traitlets import Instance, Dict
@@ -71,4 +71,3 b' class ZMQShellDisplayHook(DisplayHook):'
71 if self.msg['content']['data']:
71 if self.msg['content']['data']:
72 self.session.send(self.pub_socket, self.msg, ident=self.topic)
72 self.session.send(self.pub_socket, self.msg, ident=self.topic)
73 self.msg = None
73 self.msg = None
74
@@ -16,19 +16,19 b' from .kernelapp import IPKernelApp'
16
16
17 def embed_kernel(module=None, local_ns=None, **kwargs):
17 def embed_kernel(module=None, local_ns=None, **kwargs):
18 """Embed and start an IPython kernel in a given scope.
18 """Embed and start an IPython kernel in a given scope.
19
19
20 Parameters
20 Parameters
21 ----------
21 ----------
22 module : ModuleType, optional
22 module : ModuleType, optional
23 The module to load into IPython globals (default: caller)
23 The module to load into IPython globals (default: caller)
24 local_ns : dict, optional
24 local_ns : dict, optional
25 The namespace to load into IPython user namespace (default: caller)
25 The namespace to load into IPython user namespace (default: caller)
26
26
27 kwargs : various, optional
27 kwargs : various, optional
28 Further keyword args are relayed to the IPKernelApp constructor,
28 Further keyword args are relayed to the IPKernelApp constructor,
29 allowing configuration of the Kernel. Will only have an effect
29 allowing configuration of the Kernel. Will only have an effect
30 on the first embed_kernel call for a given process.
30 on the first embed_kernel call for a given process.
31
31
32 """
32 """
33 # get the app if it exists, or set it up if it doesn't
33 # get the app if it exists, or set it up if it doesn't
34 if IPKernelApp.initialized():
34 if IPKernelApp.initialized():
@@ -50,7 +50,7 b' def embed_kernel(module=None, local_ns=None, **kwargs):'
50 module = caller_module
50 module = caller_module
51 if local_ns is None:
51 if local_ns is None:
52 local_ns = caller_locals
52 local_ns = caller_locals
53
53
54 app.kernel.user_module = module
54 app.kernel.user_module = module
55 app.kernel.user_ns = local_ns
55 app.kernel.user_ns = local_ns
56 app.shell.set_completer_frame()
56 app.shell.set_completer_frame()
@@ -14,9 +14,9 b' from IPython.utils import io'
14 from IPython.lib.inputhook import _use_appnope
14 from IPython.lib.inputhook import _use_appnope
15
15
16 def _notify_stream_qt(kernel, stream):
16 def _notify_stream_qt(kernel, stream):
17
17
18 from IPython.external.qt_for_kernel import QtCore
18 from IPython.external.qt_for_kernel import QtCore
19
19
20 if _use_appnope() and kernel._darwin_app_nap:
20 if _use_appnope() and kernel._darwin_app_nap:
21 from appnope import nope_scope as context
21 from appnope import nope_scope as context
22 else:
22 else:
@@ -26,7 +26,7 b' def _notify_stream_qt(kernel, stream):'
26 while stream.getsockopt(zmq.EVENTS) & zmq.POLLIN:
26 while stream.getsockopt(zmq.EVENTS) & zmq.POLLIN:
27 with context():
27 with context():
28 kernel.do_one_iteration()
28 kernel.do_one_iteration()
29
29
30 fd = stream.getsockopt(zmq.FD)
30 fd = stream.getsockopt(zmq.FD)
31 notifier = QtCore.QSocketNotifier(fd, QtCore.QSocketNotifier.Read, kernel.app)
31 notifier = QtCore.QSocketNotifier(fd, QtCore.QSocketNotifier.Read, kernel.app)
32 notifier.activated.connect(process_stream_events)
32 notifier.activated.connect(process_stream_events)
@@ -41,22 +41,22 b' loop_map = {'
41
41
42 def register_integration(*toolkitnames):
42 def register_integration(*toolkitnames):
43 """Decorator to register an event loop to integrate with the IPython kernel
43 """Decorator to register an event loop to integrate with the IPython kernel
44
44
45 The decorator takes names to register the event loop as for the %gui magic.
45 The decorator takes names to register the event loop as for the %gui magic.
46 You can provide alternative names for the same toolkit.
46 You can provide alternative names for the same toolkit.
47
47
48 The decorated function should take a single argument, the IPython kernel
48 The decorated function should take a single argument, the IPython kernel
49 instance, arrange for the event loop to call ``kernel.do_one_iteration()``
49 instance, arrange for the event loop to call ``kernel.do_one_iteration()``
50 at least every ``kernel._poll_interval`` seconds, and start the event loop.
50 at least every ``kernel._poll_interval`` seconds, and start the event loop.
51
51
52 :mod:`IPython.kernel.zmq.eventloops` provides and registers such functions
52 :mod:`ipython_kernel.eventloops` provides and registers such functions
53 for a few common event loops.
53 for a few common event loops.
54 """
54 """
55 def decorator(func):
55 def decorator(func):
56 for name in toolkitnames:
56 for name in toolkitnames:
57 loop_map[name] = func
57 loop_map[name] = func
58 return func
58 return func
59
59
60 return decorator
60 return decorator
61
61
62
62
@@ -68,10 +68,10 b' def loop_qt4(kernel):'
68
68
69 kernel.app = get_app_qt4([" "])
69 kernel.app = get_app_qt4([" "])
70 kernel.app.setQuitOnLastWindowClosed(False)
70 kernel.app.setQuitOnLastWindowClosed(False)
71
71
72 for s in kernel.shell_streams:
72 for s in kernel.shell_streams:
73 _notify_stream_qt(kernel, s)
73 _notify_stream_qt(kernel, s)
74
74
75 start_event_loop_qt4(kernel.app)
75 start_event_loop_qt4(kernel.app)
76
76
77 @register_integration('qt5')
77 @register_integration('qt5')
@@ -87,7 +87,7 b' def loop_wx(kernel):'
87
87
88 import wx
88 import wx
89 from IPython.lib.guisupport import start_event_loop_wx
89 from IPython.lib.guisupport import start_event_loop_wx
90
90
91 if _use_appnope() and kernel._darwin_app_nap:
91 if _use_appnope() and kernel._darwin_app_nap:
92 # we don't hook up App Nap contexts for Wx,
92 # we don't hook up App Nap contexts for Wx,
93 # just disable it outright.
93 # just disable it outright.
@@ -197,7 +197,7 b' def loop_cocoa(kernel):'
197 "you must use matplotlib >= 1.1.0, or a native libtk."
197 "you must use matplotlib >= 1.1.0, or a native libtk."
198 )
198 )
199 return loop_tk(kernel)
199 return loop_tk(kernel)
200
200
201 from matplotlib.backends.backend_macosx import TimerMac, show
201 from matplotlib.backends.backend_macosx import TimerMac, show
202
202
203 # scale interval for sec->ms
203 # scale interval for sec->ms
1 NO CONTENT: file renamed from IPython/kernel/zmq/gui/__init__.py to ipython_kernel/gui/__init__.py
NO CONTENT: file renamed from IPython/kernel/zmq/gui/__init__.py to ipython_kernel/gui/__init__.py
@@ -38,7 +38,7 b' class GTKEmbed(object):'
38
38
39 def _wire_kernel(self):
39 def _wire_kernel(self):
40 """Initializes the kernel inside GTK.
40 """Initializes the kernel inside GTK.
41
41
42 This is meant to run only once at startup, so it does its job and
42 This is meant to run only once at startup, so it does its job and
43 returns False to ensure it doesn't get run again by GTK.
43 returns False to ensure it doesn't get run again by GTK.
44 """
44 """
@@ -46,7 +46,7 b' class GTKEmbed(object):'
46 GObject.timeout_add(int(1000*self.kernel._poll_interval),
46 GObject.timeout_add(int(1000*self.kernel._poll_interval),
47 self.iterate_kernel)
47 self.iterate_kernel)
48 return False
48 return False
49
49
50 def iterate_kernel(self):
50 def iterate_kernel(self):
51 """Run one iteration of the kernel and return True.
51 """Run one iteration of the kernel and return True.
52
52
@@ -39,7 +39,7 b' class GTKEmbed(object):'
39
39
40 def _wire_kernel(self):
40 def _wire_kernel(self):
41 """Initializes the kernel inside GTK.
41 """Initializes the kernel inside GTK.
42
42
43 This is meant to run only once at startup, so it does its job and
43 This is meant to run only once at startup, so it does its job and
44 returns False to ensure it doesn't get run again by GTK.
44 returns False to ensure it doesn't get run again by GTK.
45 """
45 """
@@ -47,7 +47,7 b' class GTKEmbed(object):'
47 gobject.timeout_add(int(1000*self.kernel._poll_interval),
47 gobject.timeout_add(int(1000*self.kernel._poll_interval),
48 self.iterate_kernel)
48 self.iterate_kernel)
49 return False
49 return False
50
50
51 def iterate_kernel(self):
51 def iterate_kernel(self):
52 """Run one iteration of the kernel and return True.
52 """Run one iteration of the kernel and return True.
53
53
1 NO CONTENT: file renamed from IPython/kernel/zmq/heartbeat.py to ipython_kernel/heartbeat.py
NO CONTENT: file renamed from IPython/kernel/zmq/heartbeat.py to ipython_kernel/heartbeat.py
1 NO CONTENT: file renamed from IPython/kernel/inprocess/__init__.py to ipython_kernel/inprocess/__init__.py
NO CONTENT: file renamed from IPython/kernel/inprocess/__init__.py to ipython_kernel/inprocess/__init__.py
@@ -17,7 +17,6 b' except ImportError:'
17 # IPython imports
17 # IPython imports
18 from IPython.utils.io import raw_print
18 from IPython.utils.io import raw_print
19 from IPython.utils.traitlets import Type
19 from IPython.utils.traitlets import Type
20 #from IPython.kernel.blocking.channels import BlockingChannelMixin
21
20
22 # Local imports
21 # Local imports
23 from .channels import (
22 from .channels import (
@@ -3,7 +3,7 b''
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from IPython.kernel.channelsabc import HBChannelABC
6 from jupyter_client.channelsabc import HBChannelABC
7
7
8 from .socket import DummySocket
8 from .socket import DummySocket
9
9
@@ -12,10 +12,10 b''
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 # IPython imports
14 # IPython imports
15 from IPython.kernel.inprocess.socket import DummySocket
15 from ipython_kernel.inprocess.socket import DummySocket
16 from IPython.utils.traitlets import Type, Instance
16 from IPython.utils.traitlets import Type, Instance
17 from IPython.kernel.clientabc import KernelClientABC
17 from jupyter_client.clientabc import KernelClientABC
18 from IPython.kernel.client import KernelClient
18 from jupyter_client.client import KernelClient
19
19
20 # Local imports
20 # Local imports
21 from .channels import (
21 from .channels import (
@@ -32,10 +32,10 b' class InProcessKernelClient(KernelClient):'
32 """A client for an in-process kernel.
32 """A client for an in-process kernel.
33
33
34 This class implements the interface of
34 This class implements the interface of
35 `IPython.kernel.clientabc.KernelClientABC` and allows
35 `jupyter_client.clientabc.KernelClientABC` and allows
36 (asynchronous) frontends to be used seamlessly with an in-process kernel.
36 (asynchronous) frontends to be used seamlessly with an in-process kernel.
37
37
38 See `IPython.kernel.client.KernelClient` for docstrings.
38 See `jupyter_client.client.KernelClient` for docstrings.
39 """
39 """
40
40
41 # The classes to use for the various channels.
41 # The classes to use for the various channels.
@@ -44,7 +44,7 b' class InProcessKernelClient(KernelClient):'
44 stdin_channel_class = Type(InProcessChannel)
44 stdin_channel_class = Type(InProcessChannel)
45 hb_channel_class = Type(InProcessHBChannel)
45 hb_channel_class = Type(InProcessHBChannel)
46
46
47 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel',
47 kernel = Instance('ipython_kernel.inprocess.ipkernel.InProcessKernel',
48 allow_none=True)
48 allow_none=True)
49
49
50 #--------------------------------------------------------------------------
50 #--------------------------------------------------------------------------
@@ -10,8 +10,8 b' import sys'
10 from IPython.core.interactiveshell import InteractiveShellABC
10 from IPython.core.interactiveshell import InteractiveShellABC
11 from IPython.utils.jsonutil import json_clean
11 from IPython.utils.jsonutil import json_clean
12 from IPython.utils.traitlets import Any, Enum, Instance, List, Type
12 from IPython.utils.traitlets import Any, Enum, Instance, List, Type
13 from IPython.kernel.zmq.ipkernel import IPythonKernel
13 from ipython_kernel.ipkernel import IPythonKernel
14 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
14 from ipython_kernel.zmqshell import ZMQInteractiveShell
15
15
16 from .socket import DummySocket
16 from .socket import DummySocket
17
17
@@ -27,7 +27,7 b' class InProcessKernel(IPythonKernel):'
27
27
28 # The frontends connected to this kernel.
28 # The frontends connected to this kernel.
29 frontends = List(
29 frontends = List(
30 Instance('IPython.kernel.inprocess.client.InProcessKernelClient',
30 Instance('ipython_kernel.inprocess.client.InProcessKernelClient',
31 allow_none=True)
31 allow_none=True)
32 )
32 )
33
33
@@ -114,25 +114,25 b' class InProcessKernel(IPythonKernel):'
114 ident, msg = self.session.recv(self.iopub_socket, copy=False)
114 ident, msg = self.session.recv(self.iopub_socket, copy=False)
115 for frontend in self.frontends:
115 for frontend in self.frontends:
116 frontend.iopub_channel.call_handlers(msg)
116 frontend.iopub_channel.call_handlers(msg)
117
117
118 #------ Trait initializers -----------------------------------------------
118 #------ Trait initializers -----------------------------------------------
119
119
120 def _log_default(self):
120 def _log_default(self):
121 return logging.getLogger(__name__)
121 return logging.getLogger(__name__)
122
122
123 def _session_default(self):
123 def _session_default(self):
124 from IPython.kernel.zmq.session import Session
124 from ipython_kernel.session import Session
125 return Session(parent=self, key=b'')
125 return Session(parent=self, key=b'')
126
126
127 def _shell_class_default(self):
127 def _shell_class_default(self):
128 return InProcessInteractiveShell
128 return InProcessInteractiveShell
129
129
130 def _stdout_default(self):
130 def _stdout_default(self):
131 from IPython.kernel.zmq.iostream import OutStream
131 from ipython_kernel.iostream import OutStream
132 return OutStream(self.session, self.iopub_socket, u'stdout', pipe=False)
132 return OutStream(self.session, self.iopub_socket, u'stdout', pipe=False)
133
133
134 def _stderr_default(self):
134 def _stderr_default(self):
135 from IPython.kernel.zmq.iostream import OutStream
135 from ipython_kernel.iostream import OutStream
136 return OutStream(self.session, self.iopub_socket, u'stderr', pipe=False)
136 return OutStream(self.session, self.iopub_socket, u'stderr', pipe=False)
137
137
138 #-----------------------------------------------------------------------------
138 #-----------------------------------------------------------------------------
@@ -141,7 +141,7 b' class InProcessKernel(IPythonKernel):'
141
141
142 class InProcessInteractiveShell(ZMQInteractiveShell):
142 class InProcessInteractiveShell(ZMQInteractiveShell):
143
143
144 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel',
144 kernel = Instance('ipython_kernel.inprocess.ipkernel.InProcessKernel',
145 allow_none=True)
145 allow_none=True)
146
146
147 #-------------------------------------------------------------------------
147 #-------------------------------------------------------------------------
@@ -150,7 +150,7 b' class InProcessInteractiveShell(ZMQInteractiveShell):'
150
150
151 def enable_gui(self, gui=None):
151 def enable_gui(self, gui=None):
152 """Enable GUI integration for the kernel."""
152 """Enable GUI integration for the kernel."""
153 from IPython.kernel.zmq.eventloops import enable_gui
153 from ipython_kernel.eventloops import enable_gui
154 if not gui:
154 if not gui:
155 gui = self.kernel.gui
155 gui = self.kernel.gui
156 return enable_gui(gui, kernel=self.kernel)
156 return enable_gui(gui, kernel=self.kernel)
@@ -4,37 +4,37 b''
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from IPython.utils.traitlets import Instance, DottedObjectName
6 from IPython.utils.traitlets import Instance, DottedObjectName
7 from IPython.kernel.managerabc import KernelManagerABC
7 from jupyter_client.managerabc import KernelManagerABC
8 from IPython.kernel.manager import KernelManager
8 from jupyter_client.manager import KernelManager
9 from IPython.kernel.zmq.session import Session
9 from jupyter_client.session import Session
10
10
11
11
12 class InProcessKernelManager(KernelManager):
12 class InProcessKernelManager(KernelManager):
13 """A manager for an in-process kernel.
13 """A manager for an in-process kernel.
14
14
15 This class implements the interface of
15 This class implements the interface of
16 `IPython.kernel.kernelmanagerabc.KernelManagerABC` and allows
16 `jupyter_client.kernelmanagerabc.KernelManagerABC` and allows
17 (asynchronous) frontends to be used seamlessly with an in-process kernel.
17 (asynchronous) frontends to be used seamlessly with an in-process kernel.
18
18
19 See `IPython.kernel.kernelmanager.KernelManager` for docstrings.
19 See `jupyter_client.kernelmanager.KernelManager` for docstrings.
20 """
20 """
21
21
22 # The kernel process with which the KernelManager is communicating.
22 # The kernel process with which the KernelManager is communicating.
23 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel',
23 kernel = Instance('ipython_kernel.inprocess.ipkernel.InProcessKernel',
24 allow_none=True)
24 allow_none=True)
25 # the client class for KM.client() shortcut
25 # the client class for KM.client() shortcut
26 client_class = DottedObjectName('IPython.kernel.inprocess.BlockingInProcessKernelClient')
26 client_class = DottedObjectName('ipython_kernel.inprocess.BlockingInProcessKernelClient')
27
27
28 def _session_default(self):
28 def _session_default(self):
29 # don't sign in-process messages
29 # don't sign in-process messages
30 return Session(key=b'', parent=self)
30 return Session(key=b'', parent=self)
31
31
32 #--------------------------------------------------------------------------
32 #--------------------------------------------------------------------------
33 # Kernel management methods
33 # Kernel management methods
34 #--------------------------------------------------------------------------
34 #--------------------------------------------------------------------------
35
35
36 def start_kernel(self, **kwds):
36 def start_kernel(self, **kwds):
37 from IPython.kernel.inprocess.ipkernel import InProcessKernel
37 from ipython_kernel.inprocess.ipkernel import InProcessKernel
38 self.kernel = InProcessKernel(parent=self, session=self.session)
38 self.kernel = InProcessKernel(parent=self, session=self.session)
39
39
40 def shutdown_kernel(self):
40 def shutdown_kernel(self):
@@ -46,7 +46,7 b' SocketABC.register(zmq.Socket)'
46
46
47 class DummySocket(HasTraits):
47 class DummySocket(HasTraits):
48 """ A dummy socket implementing (part of) the zmq.Socket interface. """
48 """ A dummy socket implementing (part of) the zmq.Socket interface. """
49
49
50 queue = Instance(Queue, ())
50 queue = Instance(Queue, ())
51 message_sent = Int(0) # Should be an Event
51 message_sent = Int(0) # Should be an Event
52
52
1 NO CONTENT: file renamed from IPython/kernel/inprocess/tests/__init__.py to ipython_kernel/inprocess/tests/__init__.py
NO CONTENT: file renamed from IPython/kernel/inprocess/tests/__init__.py to ipython_kernel/inprocess/tests/__init__.py
@@ -6,10 +6,10 b' from __future__ import print_function'
6 import sys
6 import sys
7 import unittest
7 import unittest
8
8
9 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
9 from ipython_kernel.inprocess.blocking import BlockingInProcessKernelClient
10 from IPython.kernel.inprocess.manager import InProcessKernelManager
10 from ipython_kernel.inprocess.manager import InProcessKernelManager
11 from IPython.kernel.inprocess.ipkernel import InProcessKernel
11 from ipython_kernel.inprocess.ipkernel import InProcessKernel
12 from IPython.kernel.tests.utils import assemble_output
12 from ipython_kernel.tests.utils import assemble_output
13 from IPython.testing.decorators import skipif_not_matplotlib
13 from IPython.testing.decorators import skipif_not_matplotlib
14 from IPython.utils.io import capture_output
14 from IPython.utils.io import capture_output
15 from IPython.utils import py3compat
15 from IPython.utils import py3compat
@@ -66,4 +66,3 b' class InProcessKernelTestCase(unittest.TestCase):'
66 kc.execute('print("bar")')
66 kc.execute('print("bar")')
67 out, err = assemble_output(kc.iopub_channel)
67 out, err = assemble_output(kc.iopub_channel)
68 self.assertEqual(out, 'bar\n')
68 self.assertEqual(out, 'bar\n')
69
@@ -5,8 +5,8 b' from __future__ import print_function'
5
5
6 import unittest
6 import unittest
7
7
8 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
8 from ipython_kernel.inprocess.blocking import BlockingInProcessKernelClient
9 from IPython.kernel.inprocess.manager import InProcessKernelManager
9 from ipython_kernel.inprocess.manager import InProcessKernelManager
10
10
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12 # Test case
12 # Test case
@@ -52,14 +52,14 b' class OutStream(object):'
52 self._pipe_flag = pipe
52 self._pipe_flag = pipe
53 if pipe:
53 if pipe:
54 self._setup_pipe_in()
54 self._setup_pipe_in()
55
55
56 def _setup_pipe_in(self):
56 def _setup_pipe_in(self):
57 """setup listening pipe for subprocesses"""
57 """setup listening pipe for subprocesses"""
58 ctx = self.pub_socket.context
58 ctx = self.pub_socket.context
59
59
60 # use UUID to authenticate pipe messages
60 # use UUID to authenticate pipe messages
61 self._pipe_uuid = uuid.uuid4().bytes
61 self._pipe_uuid = uuid.uuid4().bytes
62
62
63 self._pipe_in = ctx.socket(zmq.PULL)
63 self._pipe_in = ctx.socket(zmq.PULL)
64 self._pipe_in.linger = 0
64 self._pipe_in.linger = 0
65 try:
65 try:
@@ -81,7 +81,7 b' class OutStream(object):'
81 lambda s, event: self.flush(),
81 lambda s, event: self.flush(),
82 IOLoop.READ,
82 IOLoop.READ,
83 )
83 )
84
84
85 def _setup_pipe_out(self):
85 def _setup_pipe_out(self):
86 # must be new context after fork
86 # must be new context after fork
87 ctx = zmq.Context()
87 ctx = zmq.Context()
@@ -89,13 +89,13 b' class OutStream(object):'
89 self._pipe_out = ctx.socket(zmq.PUSH)
89 self._pipe_out = ctx.socket(zmq.PUSH)
90 self._pipe_out_lock = threading.Lock()
90 self._pipe_out_lock = threading.Lock()
91 self._pipe_out.connect("tcp://127.0.0.1:%i" % self._pipe_port)
91 self._pipe_out.connect("tcp://127.0.0.1:%i" % self._pipe_port)
92
92
93 def _is_master_process(self):
93 def _is_master_process(self):
94 return os.getpid() == self._master_pid
94 return os.getpid() == self._master_pid
95
95
96 def _is_master_thread(self):
96 def _is_master_thread(self):
97 return threading.current_thread().ident == self._master_thread
97 return threading.current_thread().ident == self._master_thread
98
98
99 def _have_pipe_out(self):
99 def _have_pipe_out(self):
100 return os.getpid() == self._pipe_pid
100 return os.getpid() == self._pipe_pid
101
101
@@ -136,10 +136,10 b' class OutStream(object):'
136 self._start = 0
136 self._start = 0
137 else:
137 else:
138 break
138 break
139
139
140 def _schedule_flush(self):
140 def _schedule_flush(self):
141 """schedule a flush in the main thread
141 """schedule a flush in the main thread
142
142
143 only works with a tornado/pyzmq eventloop running
143 only works with a tornado/pyzmq eventloop running
144 """
144 """
145 if IOLoop.initialized():
145 if IOLoop.initialized():
@@ -147,14 +147,14 b' class OutStream(object):'
147 else:
147 else:
148 # no async loop, at least force the timer
148 # no async loop, at least force the timer
149 self._start = 0
149 self._start = 0
150
150
151 def flush(self):
151 def flush(self):
152 """trigger actual zmq send"""
152 """trigger actual zmq send"""
153 if self.pub_socket is None:
153 if self.pub_socket is None:
154 raise ValueError(u'I/O operation on closed file')
154 raise ValueError(u'I/O operation on closed file')
155
155
156 mp_mode = self._check_mp_mode()
156 mp_mode = self._check_mp_mode()
157
157
158 if mp_mode != CHILD:
158 if mp_mode != CHILD:
159 # we are master
159 # we are master
160 if not self._is_master_thread():
160 if not self._is_master_thread():
@@ -162,15 +162,15 b' class OutStream(object):'
162 # but at least they can schedule an async flush, or force the timer.
162 # but at least they can schedule an async flush, or force the timer.
163 self._schedule_flush()
163 self._schedule_flush()
164 return
164 return
165
165
166 self._flush_from_subprocesses()
166 self._flush_from_subprocesses()
167 data = self._flush_buffer()
167 data = self._flush_buffer()
168
168
169 if data:
169 if data:
170 content = {u'name':self.name, u'text':data}
170 content = {u'name':self.name, u'text':data}
171 msg = self.session.send(self.pub_socket, u'stream', content=content,
171 msg = self.session.send(self.pub_socket, u'stream', content=content,
172 parent=self.parent_header, ident=self.topic)
172 parent=self.parent_header, ident=self.topic)
173
173
174 if hasattr(self.pub_socket, 'flush'):
174 if hasattr(self.pub_socket, 'flush'):
175 # socket itself has flush (presumably ZMQStream)
175 # socket itself has flush (presumably ZMQStream)
176 self.pub_socket.flush()
176 self.pub_socket.flush()
@@ -200,7 +200,7 b' class OutStream(object):'
200
200
201 def readline(self, size=-1):
201 def readline(self, size=-1):
202 raise IOError('Read not supported on a write only stream.')
202 raise IOError('Read not supported on a write only stream.')
203
203
204 def fileno(self):
204 def fileno(self):
205 raise UnsupportedOperation("IOStream has no fileno.")
205 raise UnsupportedOperation("IOStream has no fileno.")
206
206
@@ -211,7 +211,7 b' class OutStream(object):'
211 # Make sure that we're handling unicode
211 # Make sure that we're handling unicode
212 if not isinstance(string, unicode_type):
212 if not isinstance(string, unicode_type):
213 string = string.decode(self.encoding, 'replace')
213 string = string.decode(self.encoding, 'replace')
214
214
215 is_child = (self._check_mp_mode() == CHILD)
215 is_child = (self._check_mp_mode() == CHILD)
216 self._buffer.write(string)
216 self._buffer.write(string)
217 if is_child:
217 if is_child:
@@ -243,7 +243,7 b' class OutStream(object):'
243 self._buffer.close()
243 self._buffer.close()
244 self._new_buffer()
244 self._new_buffer()
245 return data
245 return data
246
246
247 def _new_buffer(self):
247 def _new_buffer(self):
248 self._buffer = StringIO()
248 self._buffer = StringIO()
249 self._start = -1
249 self._start = -1
@@ -10,7 +10,7 b' from IPython.utils.tokenutil import token_at_cursor, line_at_cursor'
10 from IPython.utils.traitlets import Instance, Type, Any, List
10 from IPython.utils.traitlets import Instance, Type, Any, List
11 from IPython.utils.decorators import undoc
11 from IPython.utils.decorators import undoc
12
12
13 from ..comm import CommManager
13 from .comm import CommManager
14 from .kernelbase import Kernel as KernelBase
14 from .kernelbase import Kernel as KernelBase
15 from .serialize import serialize_object, unpack_apply_message
15 from .serialize import serialize_object, unpack_apply_message
16 from .zmqshell import ZMQInteractiveShell
16 from .zmqshell import ZMQInteractiveShell
@@ -63,7 +63,7 b' class IPythonKernel(KernelBase):'
63 # TMP - hack while developing
63 # TMP - hack while developing
64 self.shell._reply_content = None
64 self.shell._reply_content = None
65
65
66 self.comm_manager = CommManager(shell=self.shell, parent=self,
66 self.comm_manager = CommManager(shell=self.shell, parent=self,
67 kernel=self)
67 kernel=self)
68 self.comm_manager.register_target('ipython.widget', lazy_import_handle_comm_opened)
68 self.comm_manager.register_target('ipython.widget', lazy_import_handle_comm_opened)
69
69
@@ -71,7 +71,7 b' class IPythonKernel(KernelBase):'
71 comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ]
71 comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ]
72 for msg_type in comm_msg_types:
72 for msg_type in comm_msg_types:
73 self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type)
73 self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type)
74
74
75 help_links = List([
75 help_links = List([
76 {
76 {
77 'text': "Python",
77 'text': "Python",
@@ -363,6 +363,6 b' class IPythonKernel(KernelBase):'
363 class Kernel(IPythonKernel):
363 class Kernel(IPythonKernel):
364 def __init__(self, *args, **kwargs):
364 def __init__(self, *args, **kwargs):
365 import warnings
365 import warnings
366 warnings.warn('Kernel is a deprecated alias of IPython.kernel.zmq.ipkernel.IPythonKernel',
366 warnings.warn('Kernel is a deprecated alias of ipython_kernel.ipkernel.IPythonKernel',
367 DeprecationWarning)
367 DeprecationWarning)
368 super(Kernel, self).__init__(*args, **kwargs)
368 super(Kernel, self).__init__(*args, **kwargs)
@@ -28,8 +28,8 b' from IPython.utils.traitlets import ('
28 Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type,
28 Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type,
29 )
29 )
30 from IPython.utils.importstring import import_item
30 from IPython.utils.importstring import import_item
31 from IPython.kernel import write_connection_file
31 from jupyter_client import write_connection_file
32 from IPython.kernel.connect import ConnectionFileMixin
32 from ipython_kernel.connect import ConnectionFileMixin
33
33
34 # local imports
34 # local imports
35 from .heartbeat import Heartbeat
35 from .heartbeat import Heartbeat
@@ -99,10 +99,10 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
99 flags = Dict(kernel_flags)
99 flags = Dict(kernel_flags)
100 classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session]
100 classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session]
101 # the kernel class, as an importstring
101 # the kernel class, as an importstring
102 kernel_class = Type('IPython.kernel.zmq.ipkernel.IPythonKernel', config=True,
102 kernel_class = Type('ipython_kernel.ipkernel.IPythonKernel', config=True,
103 klass='IPython.kernel.zmq.kernelbase.Kernel',
103 klass='ipython_kernel.kernelbase.Kernel',
104 help="""The Kernel subclass to be used.
104 help="""The Kernel subclass to be used.
105
105
106 This should allow easy re-use of the IPKernelApp entry point
106 This should allow easy re-use of the IPKernelApp entry point
107 to configure and launch kernels other than IPython's own.
107 to configure and launch kernels other than IPython's own.
108 """)
108 """)
@@ -110,23 +110,23 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
110 poller = Any() # don't restrict this even though current pollers are all Threads
110 poller = Any() # don't restrict this even though current pollers are all Threads
111 heartbeat = Instance(Heartbeat, allow_none=True)
111 heartbeat = Instance(Heartbeat, allow_none=True)
112 ports = Dict()
112 ports = Dict()
113
113
114 # connection info:
114 # connection info:
115
115
116 @property
116 @property
117 def abs_connection_file(self):
117 def abs_connection_file(self):
118 if os.path.basename(self.connection_file) == self.connection_file:
118 if os.path.basename(self.connection_file) == self.connection_file:
119 return os.path.join(self.profile_dir.security_dir, self.connection_file)
119 return os.path.join(self.profile_dir.security_dir, self.connection_file)
120 else:
120 else:
121 return self.connection_file
121 return self.connection_file
122
122
123
123
124 # streams, etc.
124 # streams, etc.
125 no_stdout = Bool(False, config=True, help="redirect stdout to the null device")
125 no_stdout = Bool(False, config=True, help="redirect stdout to the null device")
126 no_stderr = Bool(False, config=True, help="redirect stderr to the null device")
126 no_stderr = Bool(False, config=True, help="redirect stderr to the null device")
127 outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream',
127 outstream_class = DottedObjectName('ipython_kernel.iostream.OutStream',
128 config=True, help="The importstring for the OutStream factory")
128 config=True, help="The importstring for the OutStream factory")
129 displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook',
129 displayhook_class = DottedObjectName('ipython_kernel.displayhook.ZMQDisplayHook',
130 config=True, help="The importstring for the DisplayHook factory")
130 config=True, help="The importstring for the DisplayHook factory")
131
131
132 # polling
132 # polling
@@ -177,7 +177,7 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
177 write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
177 write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
178 shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
178 shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
179 iopub_port=self.iopub_port, control_port=self.control_port)
179 iopub_port=self.iopub_port, control_port=self.control_port)
180
180
181 def cleanup_connection_file(self):
181 def cleanup_connection_file(self):
182 cf = self.abs_connection_file
182 cf = self.abs_connection_file
183 self.log.debug("Cleaning up connection file: %s", cf)
183 self.log.debug("Cleaning up connection file: %s", cf)
@@ -185,9 +185,9 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
185 os.remove(cf)
185 os.remove(cf)
186 except (IOError, OSError):
186 except (IOError, OSError):
187 pass
187 pass
188
188
189 self.cleanup_ipc_files()
189 self.cleanup_ipc_files()
190
190
191 def init_connection_file(self):
191 def init_connection_file(self):
192 if not self.connection_file:
192 if not self.connection_file:
193 self.connection_file = "kernel-%s.json"%os.getpid()
193 self.connection_file = "kernel-%s.json"%os.getpid()
@@ -203,7 +203,7 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
203 except Exception:
203 except Exception:
204 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
204 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
205 self.exit(1)
205 self.exit(1)
206
206
207 def init_sockets(self):
207 def init_sockets(self):
208 # Create a context, a session, and the kernel sockets.
208 # Create a context, a session, and the kernel sockets.
209 self.log.info("Starting the kernel at pid: %i", os.getpid())
209 self.log.info("Starting the kernel at pid: %i", os.getpid())
@@ -230,7 +230,7 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
230 self.control_socket.linger = 1000
230 self.control_socket.linger = 1000
231 self.control_port = self._bind_socket(self.control_socket, self.control_port)
231 self.control_port = self._bind_socket(self.control_socket, self.control_port)
232 self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
232 self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
233
233
234 def init_heartbeat(self):
234 def init_heartbeat(self):
235 """start the heart beating"""
235 """start the heart beating"""
236 # heartbeat doesn't share context, because it mustn't be blocked
236 # heartbeat doesn't share context, because it mustn't be blocked
@@ -240,7 +240,7 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
240 self.hb_port = self.heartbeat.port
240 self.hb_port = self.heartbeat.port
241 self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
241 self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
242 self.heartbeat.start()
242 self.heartbeat.start()
243
243
244 def log_connection_info(self):
244 def log_connection_info(self):
245 """display connection info, and store ports"""
245 """display connection info, and store ports"""
246 basename = os.path.basename(self.connection_file)
246 basename = os.path.basename(self.connection_file)
@@ -280,7 +280,7 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
280 sys.stdout = sys.__stdout__ = blackhole
280 sys.stdout = sys.__stdout__ = blackhole
281 if self.no_stderr:
281 if self.no_stderr:
282 sys.stderr = sys.__stderr__ = blackhole
282 sys.stderr = sys.__stderr__ = blackhole
283
283
284 def init_io(self):
284 def init_io(self):
285 """Redirect input streams and set a display hook."""
285 """Redirect input streams and set a display hook."""
286 if self.outstream_class:
286 if self.outstream_class:
@@ -298,7 +298,7 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
298 """Create the Kernel object itself"""
298 """Create the Kernel object itself"""
299 shell_stream = ZMQStream(self.shell_socket)
299 shell_stream = ZMQStream(self.shell_socket)
300 control_stream = ZMQStream(self.control_socket)
300 control_stream = ZMQStream(self.control_socket)
301
301
302 kernel_factory = self.kernel_class.instance
302 kernel_factory = self.kernel_class.instance
303
303
304 kernel = kernel_factory(parent=self, session=self.session,
304 kernel = kernel_factory(parent=self, session=self.session,
1 NO CONTENT: file renamed from IPython/kernel/zmq/kernelbase.py to ipython_kernel/kernelbase.py
NO CONTENT: file renamed from IPython/kernel/zmq/kernelbase.py to ipython_kernel/kernelbase.py
@@ -5,11 +5,11 b' from zmq.log.handlers import PUBHandler'
5 class EnginePUBHandler(PUBHandler):
5 class EnginePUBHandler(PUBHandler):
6 """A simple PUBHandler subclass that sets root_topic"""
6 """A simple PUBHandler subclass that sets root_topic"""
7 engine=None
7 engine=None
8
8
9 def __init__(self, engine, *args, **kwargs):
9 def __init__(self, engine, *args, **kwargs):
10 PUBHandler.__init__(self,*args, **kwargs)
10 PUBHandler.__init__(self,*args, **kwargs)
11 self.engine = engine
11 self.engine = engine
12
12
13 @property
13 @property
14 def root_topic(self):
14 def root_topic(self):
15 """this is a property, in case the handler is created
15 """this is a property, in case the handler is created
@@ -18,4 +18,3 b' class EnginePUBHandler(PUBHandler):'
18 return "engine.%i"%self.engine.id
18 return "engine.%i"%self.engine.id
19 else:
19 else:
20 return "engine"
20 return "engine"
21
1 NO CONTENT: file renamed from IPython/kernel/zmq/parentpoller.py to ipython_kernel/parentpoller.py
NO CONTENT: file renamed from IPython/kernel/zmq/parentpoller.py to ipython_kernel/parentpoller.py
1 NO CONTENT: file renamed from IPython/kernel/tests/__init__.py to ipython_kernel/pylab/__init__.py
NO CONTENT: file renamed from IPython/kernel/tests/__init__.py to ipython_kernel/pylab/__init__.py
@@ -65,7 +65,7 b' def draw_if_interactive():'
65 # For further reference:
65 # For further reference:
66 # https://github.com/ipython/ipython/issues/1612
66 # https://github.com/ipython/ipython/issues/1612
67 # https://github.com/matplotlib/matplotlib/issues/835
67 # https://github.com/matplotlib/matplotlib/issues/835
68
68
69 if not hasattr(fig, 'show'):
69 if not hasattr(fig, 'show'):
70 # Queue up `fig` for display
70 # Queue up `fig` for display
71 fig.show = lambda *a: display(fig)
71 fig.show = lambda *a: display(fig)
@@ -94,7 +94,7 b' def flush_figures():'
94
94
95 This is meant to be called automatically and will call show() if, during
95 This is meant to be called automatically and will call show() if, during
96 prior code execution, there had been any calls to draw_if_interactive.
96 prior code execution, there had been any calls to draw_if_interactive.
97
97
98 This function is meant to be used as a post_execute callback in IPython,
98 This function is meant to be used as a post_execute callback in IPython,
99 so user-caused errors are handled with showtraceback() instead of being
99 so user-caused errors are handled with showtraceback() instead of being
100 allowed to raise. If this function is not called from within IPython,
100 allowed to raise. If this function is not called from within IPython,
@@ -102,7 +102,7 b' def flush_figures():'
102 """
102 """
103 if not show._draw_called:
103 if not show._draw_called:
104 return
104 return
105
105
106 if InlineBackend.instance().close_figures:
106 if InlineBackend.instance().close_figures:
107 # ignore the tracking, just draw and close all figures
107 # ignore the tracking, just draw and close all figures
108 try:
108 try:
@@ -139,4 +139,3 b' def flush_figures():'
139 # figurecanvas. This is set here to a Agg canvas
139 # figurecanvas. This is set here to a Agg canvas
140 # See https://github.com/matplotlib/matplotlib/pull/1125
140 # See https://github.com/matplotlib/matplotlib/pull/1125
141 FigureCanvas = FigureCanvasAgg
141 FigureCanvas = FigureCanvasAgg
142
@@ -68,14 +68,14 b' class InlineBackend(InlineBackendConfig):'
68 )
68 )
69
69
70 figure_formats = Set({'png'}, config=True,
70 figure_formats = Set({'png'}, config=True,
71 help="""A set of figure formats to enable: 'png',
71 help="""A set of figure formats to enable: 'png',
72 'retina', 'jpeg', 'svg', 'pdf'.""")
72 'retina', 'jpeg', 'svg', 'pdf'.""")
73
73
74 def _update_figure_formatters(self):
74 def _update_figure_formatters(self):
75 if self.shell is not None:
75 if self.shell is not None:
76 from IPython.core.pylabtools import select_figure_formats
76 from IPython.core.pylabtools import select_figure_formats
77 select_figure_formats(self.shell, self.figure_formats, **self.print_figure_kwargs)
77 select_figure_formats(self.shell, self.figure_formats, **self.print_figure_kwargs)
78
78
79 def _figure_formats_changed(self, name, old, new):
79 def _figure_formats_changed(self, name, old, new):
80 if 'jpg' in new or 'jpeg' in new:
80 if 'jpg' in new or 'jpeg' in new:
81 if not pil_available():
81 if not pil_available():
@@ -91,20 +91,20 b' class InlineBackend(InlineBackendConfig):'
91
91
92 print_figure_kwargs = Dict({'bbox_inches' : 'tight'}, config=True,
92 print_figure_kwargs = Dict({'bbox_inches' : 'tight'}, config=True,
93 help="""Extra kwargs to be passed to fig.canvas.print_figure.
93 help="""Extra kwargs to be passed to fig.canvas.print_figure.
94
94
95 Logical examples include: bbox_inches, quality (for jpeg figures), etc.
95 Logical examples include: bbox_inches, quality (for jpeg figures), etc.
96 """
96 """
97 )
97 )
98 _print_figure_kwargs_changed = _update_figure_formatters
98 _print_figure_kwargs_changed = _update_figure_formatters
99
99
100 close_figures = Bool(True, config=True,
100 close_figures = Bool(True, config=True,
101 help="""Close all figures at the end of each cell.
101 help="""Close all figures at the end of each cell.
102
102
103 When True, ensures that each cell starts with no active figures, but it
103 When True, ensures that each cell starts with no active figures, but it
104 also means that one must keep track of references in order to edit or
104 also means that one must keep track of references in order to edit or
105 redraw figures in subsequent cells. This mode is ideal for the notebook,
105 redraw figures in subsequent cells. This mode is ideal for the notebook,
106 where residual plots from other cells might be surprising.
106 where residual plots from other cells might be surprising.
107
107
108 When False, one must call figure() to create new figures. This means
108 When False, one must call figure() to create new figures. This means
109 that gcf() and getfigs() can reference figures created in other cells,
109 that gcf() and getfigs() can reference figures created in other cells,
110 and the active figure can continue to be edited with pylab/pyplot
110 and the active figure can continue to be edited with pylab/pyplot
@@ -117,4 +117,3 b' class InlineBackend(InlineBackendConfig):'
117 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
117 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
118 allow_none=True)
118 allow_none=True)
119
119
120
1 NO CONTENT: file renamed from IPython/kernel/resources/logo-32x32.png to ipython_kernel/resources/logo-32x32.png
NO CONTENT: file renamed from IPython/kernel/resources/logo-32x32.png to ipython_kernel/resources/logo-32x32.png
1 NO CONTENT: file renamed from IPython/kernel/resources/logo-64x64.png to ipython_kernel/resources/logo-64x64.png
NO CONTENT: file renamed from IPython/kernel/resources/logo-64x64.png to ipython_kernel/resources/logo-64x64.png
@@ -17,6 +17,8 b' from IPython.utils.pickleutil import ('
17 can, uncan, can_sequence, uncan_sequence, CannedObject,
17 can, uncan, can_sequence, uncan_sequence, CannedObject,
18 istype, sequence_types, PICKLE_PROTOCOL,
18 istype, sequence_types, PICKLE_PROTOCOL,
19 )
19 )
20 from jupyter_client.session import MAX_ITEMS, MAX_BYTES
21
20
22
21 if PY3:
23 if PY3:
22 buffer = memoryview
24 buffer = memoryview
@@ -25,9 +27,6 b' if PY3:'
25 # Serialization Functions
27 # Serialization Functions
26 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
27
29
28 # default values for the thresholds:
29 MAX_ITEMS = 64
30 MAX_BYTES = 1024
31
30
32 def _extract_buffers(obj, threshold=MAX_BYTES):
31 def _extract_buffers(obj, threshold=MAX_BYTES):
33 """extract buffers larger than a certain threshold"""
32 """extract buffers larger than a certain threshold"""
@@ -53,10 +52,10 b' def _restore_buffers(obj, buffers):'
53
52
54 def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
53 def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
55 """Serialize an object into a list of sendable buffers.
54 """Serialize an object into a list of sendable buffers.
56
55
57 Parameters
56 Parameters
58 ----------
57 ----------
59
58
60 obj : object
59 obj : object
61 The object to be serialized
60 The object to be serialized
62 buffer_threshold : int
61 buffer_threshold : int
@@ -66,7 +65,7 b' def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):'
66 The maximum number of items over which canning will iterate.
65 The maximum number of items over which canning will iterate.
67 Containers (lists, dicts) larger than this will be pickled without
66 Containers (lists, dicts) larger than this will be pickled without
68 introspection.
67 introspection.
69
68
70 Returns
69 Returns
71 -------
70 -------
72 [bufs] : list of buffers representing the serialized object.
71 [bufs] : list of buffers representing the serialized object.
@@ -91,17 +90,17 b' def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):'
91
90
92 def deserialize_object(buffers, g=None):
91 def deserialize_object(buffers, g=None):
93 """reconstruct an object serialized by serialize_object from data buffers.
92 """reconstruct an object serialized by serialize_object from data buffers.
94
93
95 Parameters
94 Parameters
96 ----------
95 ----------
97
96
98 bufs : list of buffers/bytes
97 bufs : list of buffers/bytes
99
98
100 g : globals to be used when uncanning
99 g : globals to be used when uncanning
101
100
102 Returns
101 Returns
103 -------
102 -------
104
103
105 (newobj, bufs) : unpacked object, and the list of remaining unused buffers.
104 (newobj, bufs) : unpacked object, and the list of remaining unused buffers.
106 """
105 """
107 bufs = list(buffers)
106 bufs = list(buffers)
@@ -120,37 +119,37 b' def deserialize_object(buffers, g=None):'
120 else:
119 else:
121 _restore_buffers(canned, bufs)
120 _restore_buffers(canned, bufs)
122 newobj = uncan(canned, g)
121 newobj = uncan(canned, g)
123
122
124 return newobj, bufs
123 return newobj, bufs
125
124
126 def pack_apply_message(f, args, kwargs, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
125 def pack_apply_message(f, args, kwargs, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
127 """pack up a function, args, and kwargs to be sent over the wire
126 """pack up a function, args, and kwargs to be sent over the wire
128
127
129 Each element of args/kwargs will be canned for special treatment,
128 Each element of args/kwargs will be canned for special treatment,
130 but inspection will not go any deeper than that.
129 but inspection will not go any deeper than that.
131
130
132 Any object whose data is larger than `threshold` will not have their data copied
131 Any object whose data is larger than `threshold` will not have their data copied
133 (only numpy arrays and bytes/buffers support zero-copy)
132 (only numpy arrays and bytes/buffers support zero-copy)
134
133
135 Message will be a list of bytes/buffers of the format:
134 Message will be a list of bytes/buffers of the format:
136
135
137 [ cf, pinfo, <arg_bufs>, <kwarg_bufs> ]
136 [ cf, pinfo, <arg_bufs>, <kwarg_bufs> ]
138
137
139 With length at least two + len(args) + len(kwargs)
138 With length at least two + len(args) + len(kwargs)
140 """
139 """
141
140
142 arg_bufs = flatten(serialize_object(arg, buffer_threshold, item_threshold) for arg in args)
141 arg_bufs = flatten(serialize_object(arg, buffer_threshold, item_threshold) for arg in args)
143
142
144 kw_keys = sorted(kwargs.keys())
143 kw_keys = sorted(kwargs.keys())
145 kwarg_bufs = flatten(serialize_object(kwargs[key], buffer_threshold, item_threshold) for key in kw_keys)
144 kwarg_bufs = flatten(serialize_object(kwargs[key], buffer_threshold, item_threshold) for key in kw_keys)
146
145
147 info = dict(nargs=len(args), narg_bufs=len(arg_bufs), kw_keys=kw_keys)
146 info = dict(nargs=len(args), narg_bufs=len(arg_bufs), kw_keys=kw_keys)
148
147
149 msg = [pickle.dumps(can(f), PICKLE_PROTOCOL)]
148 msg = [pickle.dumps(can(f), PICKLE_PROTOCOL)]
150 msg.append(pickle.dumps(info, PICKLE_PROTOCOL))
149 msg.append(pickle.dumps(info, PICKLE_PROTOCOL))
151 msg.extend(arg_bufs)
150 msg.extend(arg_bufs)
152 msg.extend(kwarg_bufs)
151 msg.extend(kwarg_bufs)
153
152
154 return msg
153 return msg
155
154
156 def unpack_apply_message(bufs, g=None, copy=True):
155 def unpack_apply_message(bufs, g=None, copy=True):
@@ -163,19 +162,18 b' def unpack_apply_message(bufs, g=None, copy=True):'
163 pinfo = buffer_to_bytes_py2(bufs.pop(0))
162 pinfo = buffer_to_bytes_py2(bufs.pop(0))
164 info = pickle.loads(pinfo)
163 info = pickle.loads(pinfo)
165 arg_bufs, kwarg_bufs = bufs[:info['narg_bufs']], bufs[info['narg_bufs']:]
164 arg_bufs, kwarg_bufs = bufs[:info['narg_bufs']], bufs[info['narg_bufs']:]
166
165
167 args = []
166 args = []
168 for i in range(info['nargs']):
167 for i in range(info['nargs']):
169 arg, arg_bufs = deserialize_object(arg_bufs, g)
168 arg, arg_bufs = deserialize_object(arg_bufs, g)
170 args.append(arg)
169 args.append(arg)
171 args = tuple(args)
170 args = tuple(args)
172 assert not arg_bufs, "Shouldn't be any arg bufs left over"
171 assert not arg_bufs, "Shouldn't be any arg bufs left over"
173
172
174 kwargs = {}
173 kwargs = {}
175 for key in info['kw_keys']:
174 for key in info['kw_keys']:
176 kwarg, kwarg_bufs = deserialize_object(kwarg_bufs, g)
175 kwarg, kwarg_bufs = deserialize_object(kwarg_bufs, g)
177 kwargs[key] = kwarg
176 kwargs[key] = kwarg
178 assert not kwarg_bufs, "Shouldn't be any kwarg bufs left over"
177 assert not kwarg_bufs, "Shouldn't be any kwarg bufs left over"
179
180 return f,args,kwargs
181
178
179 return f,args,kwargs
1 NO CONTENT: file renamed from IPython/kernel/zmq/pylab/__init__.py to ipython_kernel/tests/__init__.py
NO CONTENT: file renamed from IPython/kernel/zmq/pylab/__init__.py to ipython_kernel/tests/__init__.py
@@ -22,7 +22,7 b' from subprocess import Popen, PIPE'
22
22
23 import nose.tools as nt
23 import nose.tools as nt
24
24
25 from IPython.kernel import BlockingKernelClient
25 from jupyter_client import BlockingKernelClient
26 from IPython.utils import path, py3compat
26 from IPython.utils import path, py3compat
27 from IPython.utils.py3compat import unicode_type
27 from IPython.utils.py3compat import unicode_type
28
28
@@ -38,7 +38,7 b' def setup():'
38 global IPYTHONDIR
38 global IPYTHONDIR
39 global env
39 global env
40 global save_get_ipython_dir
40 global save_get_ipython_dir
41
41
42 IPYTHONDIR = tempfile.mkdtemp()
42 IPYTHONDIR = tempfile.mkdtemp()
43
43
44 env = os.environ.copy()
44 env = os.environ.copy()
@@ -50,7 +50,7 b' def setup():'
50
50
51 def teardown():
51 def teardown():
52 path.get_ipython_dir = save_get_ipython_dir
52 path.get_ipython_dir = save_get_ipython_dir
53
53
54 try:
54 try:
55 shutil.rmtree(IPYTHONDIR)
55 shutil.rmtree(IPYTHONDIR)
56 except (OSError, IOError):
56 except (OSError, IOError):
@@ -61,7 +61,7 b' def teardown():'
61 @contextmanager
61 @contextmanager
62 def setup_kernel(cmd):
62 def setup_kernel(cmd):
63 """start an embedded kernel in a subprocess, and wait for it to be ready
63 """start an embedded kernel in a subprocess, and wait for it to be ready
64
64
65 Returns
65 Returns
66 -------
66 -------
67 kernel_manager: connected KernelManager instance
67 kernel_manager: connected KernelManager instance
@@ -78,22 +78,22 b' def setup_kernel(cmd):'
78 and kernel.poll() is None \
78 and kernel.poll() is None \
79 and time.time() < tic + SETUP_TIMEOUT:
79 and time.time() < tic + SETUP_TIMEOUT:
80 time.sleep(0.1)
80 time.sleep(0.1)
81
81
82 if kernel.poll() is not None:
82 if kernel.poll() is not None:
83 o,e = kernel.communicate()
83 o,e = kernel.communicate()
84 e = py3compat.cast_unicode(e)
84 e = py3compat.cast_unicode(e)
85 raise IOError("Kernel failed to start:\n%s" % e)
85 raise IOError("Kernel failed to start:\n%s" % e)
86
86
87 if not os.path.exists(connection_file):
87 if not os.path.exists(connection_file):
88 if kernel.poll() is None:
88 if kernel.poll() is None:
89 kernel.terminate()
89 kernel.terminate()
90 raise IOError("Connection file %r never arrived" % connection_file)
90 raise IOError("Connection file %r never arrived" % connection_file)
91
91
92 client = BlockingKernelClient(connection_file=connection_file)
92 client = BlockingKernelClient(connection_file=connection_file)
93 client.load_connection_file()
93 client.load_connection_file()
94 client.start_channels()
94 client.start_channels()
95 client.wait_for_ready()
95 client.wait_for_ready()
96
96
97 try:
97 try:
98 yield client
98 yield client
99 finally:
99 finally:
@@ -111,14 +111,14 b' def test_embed_kernel_basic():'
111 'go()',
111 'go()',
112 '',
112 '',
113 ])
113 ])
114
114
115 with setup_kernel(cmd) as client:
115 with setup_kernel(cmd) as client:
116 # oinfo a (int)
116 # oinfo a (int)
117 msg_id = client.inspect('a')
117 msg_id = client.inspect('a')
118 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
118 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
119 content = msg['content']
119 content = msg['content']
120 nt.assert_true(content['found'])
120 nt.assert_true(content['found'])
121
121
122 msg_id = client.execute("c=a*2")
122 msg_id = client.execute("c=a*2")
123 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
123 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
124 content = msg['content']
124 content = msg['content']
@@ -143,7 +143,7 b' def test_embed_kernel_namespace():'
143 'go()',
143 'go()',
144 '',
144 '',
145 ])
145 ])
146
146
147 with setup_kernel(cmd) as client:
147 with setup_kernel(cmd) as client:
148 # oinfo a (int)
148 # oinfo a (int)
149 msg_id = client.inspect('a')
149 msg_id = client.inspect('a')
@@ -181,7 +181,7 b' def test_embed_kernel_reentrant():'
181 ' go()',
181 ' go()',
182 '',
182 '',
183 ])
183 ])
184
184
185 with setup_kernel(cmd) as client:
185 with setup_kernel(cmd) as client:
186 for i in range(5):
186 for i in range(5):
187 msg_id = client.inspect('count')
187 msg_id = client.inspect('count')
@@ -190,10 +190,8 b' def test_embed_kernel_reentrant():'
190 nt.assert_true(content['found'])
190 nt.assert_true(content['found'])
191 text = content['data']['text/plain']
191 text = content['data']['text/plain']
192 nt.assert_in(unicode_type(i), text)
192 nt.assert_in(unicode_type(i), text)
193
193
194 # exit from embed_kernel
194 # exit from embed_kernel
195 client.execute("get_ipython().exit_now = True")
195 client.execute("get_ipython().exit_now = True")
196 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
196 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
197 time.sleep(0.2)
197 time.sleep(0.2)
198
199
@@ -49,7 +49,7 b' def test_sys_path():'
49
49
50 def test_sys_path_profile_dir():
50 def test_sys_path_profile_dir():
51 """test that sys.path doesn't get messed up when `--profile-dir` is specified"""
51 """test that sys.path doesn't get messed up when `--profile-dir` is specified"""
52
52
53 with new_kernel(['--profile-dir', locate_profile('default')]) as kc:
53 with new_kernel(['--profile-dir', locate_profile('default')]) as kc:
54 msg_id, content = execute(kc=kc, code="import sys; print (repr(sys.path[0]))")
54 msg_id, content = execute(kc=kc, code="import sys; print (repr(sys.path[0]))")
55 stdout, stderr = assemble_output(kc.iopub_channel)
55 stdout, stderr = assemble_output(kc.iopub_channel)
@@ -60,7 +60,7 b' def test_subprocess_print():'
60 """printing from forked mp.Process"""
60 """printing from forked mp.Process"""
61 with new_kernel() as kc:
61 with new_kernel() as kc:
62 iopub = kc.iopub_channel
62 iopub = kc.iopub_channel
63
63
64 _check_mp_mode(kc, expected=False)
64 _check_mp_mode(kc, expected=False)
65 flush_channels(kc)
65 flush_channels(kc)
66 np = 5
66 np = 5
@@ -71,11 +71,11 b' def test_subprocess_print():'
71 "for p in pool: p.start()",
71 "for p in pool: p.start()",
72 "for p in pool: p.join()"
72 "for p in pool: p.join()"
73 ])
73 ])
74
74
75 expected = '\n'.join([
75 expected = '\n'.join([
76 "hello %s" % i for i in range(np)
76 "hello %s" % i for i in range(np)
77 ]) + '\n'
77 ]) + '\n'
78
78
79 msg_id, content = execute(kc=kc, code=code)
79 msg_id, content = execute(kc=kc, code=code)
80 stdout, stderr = assemble_output(iopub)
80 stdout, stderr = assemble_output(iopub)
81 nt.assert_equal(stdout.count("hello"), np, stdout)
81 nt.assert_equal(stdout.count("hello"), np, stdout)
@@ -90,7 +90,7 b' def test_subprocess_noprint():'
90 """mp.Process without print doesn't trigger iostream mp_mode"""
90 """mp.Process without print doesn't trigger iostream mp_mode"""
91 with kernel() as kc:
91 with kernel() as kc:
92 iopub = kc.iopub_channel
92 iopub = kc.iopub_channel
93
93
94 np = 5
94 np = 5
95 code = '\n'.join([
95 code = '\n'.join([
96 "import multiprocessing as mp",
96 "import multiprocessing as mp",
@@ -98,7 +98,7 b' def test_subprocess_noprint():'
98 "for p in pool: p.start()",
98 "for p in pool: p.start()",
99 "for p in pool: p.join()"
99 "for p in pool: p.join()"
100 ])
100 ])
101
101
102 msg_id, content = execute(kc=kc, code=code)
102 msg_id, content = execute(kc=kc, code=code)
103 stdout, stderr = assemble_output(iopub)
103 stdout, stderr = assemble_output(iopub)
104 nt.assert_equal(stdout, '')
104 nt.assert_equal(stdout, '')
@@ -113,14 +113,14 b' def test_subprocess_error():'
113 """error in mp.Process doesn't crash"""
113 """error in mp.Process doesn't crash"""
114 with new_kernel() as kc:
114 with new_kernel() as kc:
115 iopub = kc.iopub_channel
115 iopub = kc.iopub_channel
116
116
117 code = '\n'.join([
117 code = '\n'.join([
118 "import multiprocessing as mp",
118 "import multiprocessing as mp",
119 "p = mp.Process(target=int, args=('hi',))",
119 "p = mp.Process(target=int, args=('hi',))",
120 "p.start()",
120 "p.start()",
121 "p.join()",
121 "p.join()",
122 ])
122 ])
123
123
124 msg_id, content = execute(kc=kc, code=code)
124 msg_id, content = execute(kc=kc, code=code)
125 stdout, stderr = assemble_output(iopub)
125 stdout, stderr = assemble_output(iopub)
126 nt.assert_equal(stdout, '')
126 nt.assert_equal(stdout, '')
@@ -135,7 +135,7 b' def test_raw_input():'
135 """test [raw_]input"""
135 """test [raw_]input"""
136 with kernel() as kc:
136 with kernel() as kc:
137 iopub = kc.iopub_channel
137 iopub = kc.iopub_channel
138
138
139 input_f = "input" if py3compat.PY3 else "raw_input"
139 input_f = "input" if py3compat.PY3 else "raw_input"
140 theprompt = "prompt> "
140 theprompt = "prompt> "
141 code = 'print({input_f}("{theprompt}"))'.format(**locals())
141 code = 'print({input_f}("{theprompt}"))'.format(**locals())
@@ -157,7 +157,7 b' def test_eval_input():'
157 """test input() on Python 2"""
157 """test input() on Python 2"""
158 with kernel() as kc:
158 with kernel() as kc:
159 iopub = kc.iopub_channel
159 iopub = kc.iopub_channel
160
160
161 input_f = "input" if py3compat.PY3 else "raw_input"
161 input_f = "input" if py3compat.PY3 else "raw_input"
162 theprompt = "prompt> "
162 theprompt = "prompt> "
163 code = 'print(input("{theprompt}"))'.format(**locals())
163 code = 'print(input("{theprompt}"))'.format(**locals())
@@ -205,7 +205,7 b' def test_is_complete():'
205 kc.is_complete('raise = 2')
205 kc.is_complete('raise = 2')
206 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
206 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
207 assert reply['content']['status'] == 'invalid'
207 assert reply['content']['status'] == 'invalid'
208
208
209 kc.is_complete('a = [1,\n2,')
209 kc.is_complete('a = [1,\n2,')
210 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
210 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
211 assert reply['content']['status'] == 'incomplete'
211 assert reply['content']['status'] == 'incomplete'
@@ -64,7 +64,7 b' class Version(Unicode):'
64 self.max = kwargs.pop('max', None)
64 self.max = kwargs.pop('max', None)
65 kwargs['default_value'] = self.min
65 kwargs['default_value'] = self.min
66 super(Version, self).__init__(*args, **kwargs)
66 super(Version, self).__init__(*args, **kwargs)
67
67
68 def validate(self, obj, value):
68 def validate(self, obj, value):
69 if self.min and V(value) < V(self.min):
69 if self.min and V(value) < V(self.min):
70 raise TraitError("bad version: %s < %s" % (value, self.min))
70 raise TraitError("bad version: %s < %s" % (value, self.min))
@@ -78,7 +78,7 b' class RMessage(Reference):'
78 header = Dict()
78 header = Dict()
79 parent_header = Dict()
79 parent_header = Dict()
80 content = Dict()
80 content = Dict()
81
81
82 def check(self, d):
82 def check(self, d):
83 super(RMessage, self).check(d)
83 super(RMessage, self).check(d)
84 RHeader().check(self.header)
84 RHeader().check(self.header)
@@ -107,7 +107,7 b' class MimeBundle(Reference):'
107 class ExecuteReply(Reference):
107 class ExecuteReply(Reference):
108 execution_count = Integer()
108 execution_count = Integer()
109 status = Enum((u'ok', u'error'), default_value=u'ok')
109 status = Enum((u'ok', u'error'), default_value=u'ok')
110
110
111 def check(self, d):
111 def check(self, d):
112 Reference.check(self, d)
112 Reference.check(self, d)
113 if d['status'] == 'ok':
113 if d['status'] == 'ok':
@@ -158,7 +158,7 b' class KernelInfoReply(Reference):'
158 implementation_version = Version(min='2.1')
158 implementation_version = Version(min='2.1')
159 language_info = Dict()
159 language_info = Dict()
160 banner = Unicode()
160 banner = Unicode()
161
161
162 def check(self, d):
162 def check(self, d):
163 Reference.check(self, d)
163 Reference.check(self, d)
164 LanguageInfo().check(d['language_info'])
164 LanguageInfo().check(d['language_info'])
@@ -166,7 +166,7 b' class KernelInfoReply(Reference):'
166
166
167 class IsCompleteReply(Reference):
167 class IsCompleteReply(Reference):
168 status = Enum((u'complete', u'incomplete', u'invalid', u'unknown'), default_value=u'complete')
168 status = Enum((u'complete', u'incomplete', u'invalid', u'unknown'), default_value=u'complete')
169
169
170 def check(self, d):
170 def check(self, d):
171 Reference.check(self, d)
171 Reference.check(self, d)
172 if d['status'] == 'incomplete':
172 if d['status'] == 'incomplete':
@@ -224,10 +224,10 b' Specifications of `content` part of the reply messages.'
224
224
225 def validate_message(msg, msg_type=None, parent=None):
225 def validate_message(msg, msg_type=None, parent=None):
226 """validate a message
226 """validate a message
227
227
228 This is a generator, and must be iterated through to actually
228 This is a generator, and must be iterated through to actually
229 trigger each test.
229 trigger each test.
230
230
231 If msg_type and/or parent are given, the msg_type and/or parent msg_id
231 If msg_type and/or parent are given, the msg_type and/or parent msg_id
232 are compared with the given values.
232 are compared with the given values.
233 """
233 """
@@ -249,7 +249,7 b' def validate_message(msg, msg_type=None, parent=None):'
249
249
250 def test_execute():
250 def test_execute():
251 flush_channels()
251 flush_channels()
252
252
253 msg_id = KC.execute(code='x=1')
253 msg_id = KC.execute(code='x=1')
254 reply = KC.get_shell_msg(timeout=TIMEOUT)
254 reply = KC.get_shell_msg(timeout=TIMEOUT)
255 validate_message(reply, 'execute_reply', msg_id)
255 validate_message(reply, 'execute_reply', msg_id)
@@ -258,7 +258,7 b' def test_execute():'
258 def test_execute_silent():
258 def test_execute_silent():
259 flush_channels()
259 flush_channels()
260 msg_id, reply = execute(code='x=1', silent=True)
260 msg_id, reply = execute(code='x=1', silent=True)
261
261
262 # flush status=idle
262 # flush status=idle
263 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
263 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
264 validate_message(status, 'status', msg_id)
264 validate_message(status, 'status', msg_id)
@@ -266,14 +266,14 b' def test_execute_silent():'
266
266
267 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
267 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
268 count = reply['execution_count']
268 count = reply['execution_count']
269
269
270 msg_id, reply = execute(code='x=2', silent=True)
270 msg_id, reply = execute(code='x=2', silent=True)
271
271
272 # flush status=idle
272 # flush status=idle
273 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
273 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
274 validate_message(status, 'status', msg_id)
274 validate_message(status, 'status', msg_id)
275 nt.assert_equal(status['content']['execution_state'], 'idle')
275 nt.assert_equal(status['content']['execution_state'], 'idle')
276
276
277 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
277 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
278 count_2 = reply['execution_count']
278 count_2 = reply['execution_count']
279 nt.assert_equal(count_2, count)
279 nt.assert_equal(count_2, count)
@@ -281,11 +281,11 b' def test_execute_silent():'
281
281
282 def test_execute_error():
282 def test_execute_error():
283 flush_channels()
283 flush_channels()
284
284
285 msg_id, reply = execute(code='1/0')
285 msg_id, reply = execute(code='1/0')
286 nt.assert_equal(reply['status'], 'error')
286 nt.assert_equal(reply['status'], 'error')
287 nt.assert_equal(reply['ename'], 'ZeroDivisionError')
287 nt.assert_equal(reply['ename'], 'ZeroDivisionError')
288
288
289 error = KC.iopub_channel.get_msg(timeout=TIMEOUT)
289 error = KC.iopub_channel.get_msg(timeout=TIMEOUT)
290 validate_message(error, 'error', msg_id)
290 validate_message(error, 'error', msg_id)
291
291
@@ -296,9 +296,9 b' def test_execute_inc():'
296
296
297 msg_id, reply = execute(code='x=1')
297 msg_id, reply = execute(code='x=1')
298 count = reply['execution_count']
298 count = reply['execution_count']
299
299
300 flush_channels()
300 flush_channels()
301
301
302 msg_id, reply = execute(code='x=2')
302 msg_id, reply = execute(code='x=2')
303 count_2 = reply['execution_count']
303 count_2 = reply['execution_count']
304 nt.assert_equal(count_2, count+1)
304 nt.assert_equal(count_2, count+1)
@@ -306,7 +306,7 b' def test_execute_inc():'
306 def test_execute_stop_on_error():
306 def test_execute_stop_on_error():
307 """execute request should not abort execution queue with stop_on_error False"""
307 """execute request should not abort execution queue with stop_on_error False"""
308 flush_channels()
308 flush_channels()
309
309
310 fail = '\n'.join([
310 fail = '\n'.join([
311 # sleep to ensure subsequent message is waiting in the queue to be aborted
311 # sleep to ensure subsequent message is waiting in the queue to be aborted
312 'import time',
312 'import time',
@@ -362,7 +362,7 b' def test_oinfo_found():'
362 flush_channels()
362 flush_channels()
363
363
364 msg_id, reply = execute(code='a=5')
364 msg_id, reply = execute(code='a=5')
365
365
366 msg_id = KC.inspect('a')
366 msg_id = KC.inspect('a')
367 reply = KC.get_shell_msg(timeout=TIMEOUT)
367 reply = KC.get_shell_msg(timeout=TIMEOUT)
368 validate_message(reply, 'inspect_reply', msg_id)
368 validate_message(reply, 'inspect_reply', msg_id)
@@ -377,7 +377,7 b' def test_oinfo_detail():'
377 flush_channels()
377 flush_channels()
378
378
379 msg_id, reply = execute(code='ip=get_ipython()')
379 msg_id, reply = execute(code='ip=get_ipython()')
380
380
381 msg_id = KC.inspect('ip.object_inspect', cursor_pos=10, detail_level=1)
381 msg_id = KC.inspect('ip.object_inspect', cursor_pos=10, detail_level=1)
382 reply = KC.get_shell_msg(timeout=TIMEOUT)
382 reply = KC.get_shell_msg(timeout=TIMEOUT)
383 validate_message(reply, 'inspect_reply', msg_id)
383 validate_message(reply, 'inspect_reply', msg_id)
@@ -402,7 +402,7 b' def test_complete():'
402 flush_channels()
402 flush_channels()
403
403
404 msg_id, reply = execute(code="alpha = albert = 5")
404 msg_id, reply = execute(code="alpha = albert = 5")
405
405
406 msg_id = KC.complete('al', 2)
406 msg_id = KC.complete('al', 2)
407 reply = KC.get_shell_msg(timeout=TIMEOUT)
407 reply = KC.get_shell_msg(timeout=TIMEOUT)
408 validate_message(reply, 'complete_reply', msg_id)
408 validate_message(reply, 'complete_reply', msg_id)
@@ -436,10 +436,10 b' def test_is_complete():'
436
436
437 def test_history_range():
437 def test_history_range():
438 flush_channels()
438 flush_channels()
439
439
440 msg_id_exec = KC.execute(code='x=1', store_history = True)
440 msg_id_exec = KC.execute(code='x=1', store_history = True)
441 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
441 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
442
442
443 msg_id = KC.history(hist_access_type = 'range', raw = True, output = True, start = 1, stop = 2, session = 0)
443 msg_id = KC.history(hist_access_type = 'range', raw = True, output = True, start = 1, stop = 2, session = 0)
444 reply = KC.get_shell_msg(timeout=TIMEOUT)
444 reply = KC.get_shell_msg(timeout=TIMEOUT)
445 validate_message(reply, 'history_reply', msg_id)
445 validate_message(reply, 'history_reply', msg_id)
@@ -448,10 +448,10 b' def test_history_range():'
448
448
449 def test_history_tail():
449 def test_history_tail():
450 flush_channels()
450 flush_channels()
451
451
452 msg_id_exec = KC.execute(code='x=1', store_history = True)
452 msg_id_exec = KC.execute(code='x=1', store_history = True)
453 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
453 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
454
454
455 msg_id = KC.history(hist_access_type = 'tail', raw = True, output = True, n = 1, session = 0)
455 msg_id = KC.history(hist_access_type = 'tail', raw = True, output = True, n = 1, session = 0)
456 reply = KC.get_shell_msg(timeout=TIMEOUT)
456 reply = KC.get_shell_msg(timeout=TIMEOUT)
457 validate_message(reply, 'history_reply', msg_id)
457 validate_message(reply, 'history_reply', msg_id)
@@ -460,10 +460,10 b' def test_history_tail():'
460
460
461 def test_history_search():
461 def test_history_search():
462 flush_channels()
462 flush_channels()
463
463
464 msg_id_exec = KC.execute(code='x=1', store_history = True)
464 msg_id_exec = KC.execute(code='x=1', store_history = True)
465 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
465 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
466
466
467 msg_id = KC.history(hist_access_type = 'search', raw = True, output = True, n = 1, pattern = '*', session = 0)
467 msg_id = KC.history(hist_access_type = 'search', raw = True, output = True, n = 1, pattern = '*', session = 0)
468 reply = KC.get_shell_msg(timeout=TIMEOUT)
468 reply = KC.get_shell_msg(timeout=TIMEOUT)
469 validate_message(reply, 'history_reply', msg_id)
469 validate_message(reply, 'history_reply', msg_id)
@@ -488,9 +488,8 b' def test_display_data():'
488 flush_channels()
488 flush_channels()
489
489
490 msg_id, reply = execute("from IPython.core.display import display; display(1)")
490 msg_id, reply = execute("from IPython.core.display import display; display(1)")
491
491
492 display = KC.iopub_channel.get_msg(timeout=TIMEOUT)
492 display = KC.iopub_channel.get_msg(timeout=TIMEOUT)
493 validate_message(display, 'display_data', parent=msg_id)
493 validate_message(display, 'display_data', parent=msg_id)
494 data = display['content']['data']
494 data = display['content']['data']
495 nt.assert_equal(data['text/plain'], u'1')
495 nt.assert_equal(data['text/plain'], u'1')
496
@@ -9,7 +9,7 b' from collections import namedtuple'
9 import nose.tools as nt
9 import nose.tools as nt
10
10
11 # from unittest import TestCaes
11 # from unittest import TestCaes
12 from IPython.kernel.zmq.serialize import serialize_object, deserialize_object
12 from ipython_kernel.serialize import serialize_object, deserialize_object
13 from IPython.testing import decorators as dec
13 from IPython.testing import decorators as dec
14 from IPython.utils.pickleutil import CannedArray, CannedClass
14 from IPython.utils.pickleutil import CannedArray, CannedClass
15 from IPython.utils.py3compat import iteritems
15 from IPython.utils.py3compat import iteritems
@@ -28,7 +28,7 b' def roundtrip(obj):'
28
28
29 class C(object):
29 class C(object):
30 """dummy class for """
30 """dummy class for """
31
31
32 def __init__(self, **kwargs):
32 def __init__(self, **kwargs):
33 for key,value in iteritems(kwargs):
33 for key,value in iteritems(kwargs):
34 setattr(self, key, value)
34 setattr(self, key, value)
@@ -98,7 +98,7 b' def test_recarray():'
98 [('n', int), ('s', '|S1'), ('u', 'uint32')],
98 [('n', int), ('s', '|S1'), ('u', 'uint32')],
99 ]:
99 ]:
100 A = new_array(shape, dtype=dtype)
100 A = new_array(shape, dtype=dtype)
101
101
102 bufs = serialize_object(A)
102 bufs = serialize_object(A)
103 B, r = deserialize_object(bufs)
103 B, r = deserialize_object(bufs)
104 nt.assert_equal(r, [])
104 nt.assert_equal(r, [])
@@ -155,7 +155,7 b' def test_class_oldstyle():'
155 @interactive
155 @interactive
156 class C:
156 class C:
157 a=5
157 a=5
158
158
159 bufs = serialize_object(dict(C=C))
159 bufs = serialize_object(dict(C=C))
160 canned = pickle.loads(bufs[0])
160 canned = pickle.loads(bufs[0])
161 nt.assert_is_instance(canned['C'], CannedClass)
161 nt.assert_is_instance(canned['C'], CannedClass)
@@ -198,7 +198,7 b' def test_class_inheritance():'
198 @interactive
198 @interactive
199 class D(C):
199 class D(C):
200 b=10
200 b=10
201
201
202 bufs = serialize_object(dict(D=D))
202 bufs = serialize_object(dict(D=D))
203 canned = pickle.loads(bufs[0])
203 canned = pickle.loads(bufs[0])
204 nt.assert_is_instance(canned['D'], CannedClass)
204 nt.assert_is_instance(canned['D'], CannedClass)
1 NO CONTENT: file renamed from IPython/kernel/zmq/tests/test_start_kernel.py to ipython_kernel/tests/test_start_kernel.py
NO CONTENT: file renamed from IPython/kernel/zmq/tests/test_start_kernel.py to ipython_kernel/tests/test_start_kernel.py
@@ -4,6 +4,7 b''
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import atexit
6 import atexit
7 import os
7
8
8 from contextlib import contextmanager
9 from contextlib import contextmanager
9 from subprocess import PIPE, STDOUT
10 from subprocess import PIPE, STDOUT
@@ -15,7 +16,7 b' except ImportError:'
15 import nose
16 import nose
16 import nose.tools as nt
17 import nose.tools as nt
17
18
18 from IPython.kernel import manager
19 from jupyter_client import manager
19
20
20 #-------------------------------------------------------------------------------
21 #-------------------------------------------------------------------------------
21 # Globals
22 # Globals
@@ -32,7 +33,7 b' KC = None'
32 #-------------------------------------------------------------------------------
33 #-------------------------------------------------------------------------------
33 def start_new_kernel(**kwargs):
34 def start_new_kernel(**kwargs):
34 """start a new kernel, and return its Manager and Client
35 """start a new kernel, and return its Manager and Client
35
36
36 Integrates with our output capturing for tests.
37 Integrates with our output capturing for tests.
37 """
38 """
38 kwargs.update(dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT))
39 kwargs.update(dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT))
@@ -41,7 +42,7 b' def start_new_kernel(**kwargs):'
41 def flush_channels(kc=None):
42 def flush_channels(kc=None):
42 """flush any messages waiting on the queue"""
43 """flush any messages waiting on the queue"""
43 from .test_message_spec import validate_message
44 from .test_message_spec import validate_message
44
45
45 if kc is None:
46 if kc is None:
46 kc = KC
47 kc = KC
47 for channel in (kc.shell_channel, kc.iopub_channel):
48 for channel in (kc.shell_channel, kc.iopub_channel):
@@ -65,12 +66,12 b" def execute(code='', kc=None, **kwargs):"
65 busy = kc.get_iopub_msg(timeout=TIMEOUT)
66 busy = kc.get_iopub_msg(timeout=TIMEOUT)
66 validate_message(busy, 'status', msg_id)
67 validate_message(busy, 'status', msg_id)
67 nt.assert_equal(busy['content']['execution_state'], 'busy')
68 nt.assert_equal(busy['content']['execution_state'], 'busy')
68
69
69 if not kwargs.get('silent'):
70 if not kwargs.get('silent'):
70 execute_input = kc.get_iopub_msg(timeout=TIMEOUT)
71 execute_input = kc.get_iopub_msg(timeout=TIMEOUT)
71 validate_message(execute_input, 'execute_input', msg_id)
72 validate_message(execute_input, 'execute_input', msg_id)
72 nt.assert_equal(execute_input['content']['code'], code)
73 nt.assert_equal(execute_input['content']['code'], code)
73
74
74 return msg_id, reply['content']
75 return msg_id, reply['content']
75
76
76 def start_global_kernel():
77 def start_global_kernel():
@@ -86,9 +87,9 b' def start_global_kernel():'
86 @contextmanager
87 @contextmanager
87 def kernel():
88 def kernel():
88 """Context manager for the global kernel instance
89 """Context manager for the global kernel instance
89
90
90 Should be used for most kernel tests
91 Should be used for most kernel tests
91
92
92 Returns
93 Returns
93 -------
94 -------
94 kernel_client: connected KernelClient instance
95 kernel_client: connected KernelClient instance
@@ -116,14 +117,15 b' def stop_global_kernel():'
116
117
117 def new_kernel(argv=None):
118 def new_kernel(argv=None):
118 """Context manager for a new kernel in a subprocess
119 """Context manager for a new kernel in a subprocess
119
120
120 Should only be used for tests where the kernel must not be re-used.
121 Should only be used for tests where the kernel must not be re-used.
121
122
122 Returns
123 Returns
123 -------
124 -------
124 kernel_client: connected KernelClient instance
125 kernel_client: connected KernelClient instance
125 """
126 """
126 kwargs = dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT,
127 kwargs = dict(
128 stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT,
127 startup_timeout=STARTUP_TIMEOUT)
129 startup_timeout=STARTUP_TIMEOUT)
128 if argv is not None:
130 if argv is not None:
129 kwargs['extra_arguments'] = argv
131 kwargs['extra_arguments'] = argv
@@ -34,8 +34,8 b' from IPython.core.magic import magics_class, line_magic, Magics'
34 from IPython.core import payloadpage
34 from IPython.core import payloadpage
35 from IPython.core.usage import default_gui_banner
35 from IPython.core.usage import default_gui_banner
36 from IPython.display import display, Javascript
36 from IPython.display import display, Javascript
37 from IPython.kernel.inprocess.socket import SocketABC
37 from ipython_kernel.inprocess.socket import SocketABC
38 from IPython.kernel import (
38 from ipython_kernel import (
39 get_connection_file, get_connection_info, connect_qtconsole
39 get_connection_file, get_connection_info, connect_qtconsole
40 )
40 )
41 from IPython.testing.skipdoctest import skip_doctest
41 from IPython.testing.skipdoctest import skip_doctest
@@ -46,9 +46,9 b' from IPython.utils import py3compat'
46 from IPython.utils.py3compat import unicode_type
46 from IPython.utils.py3compat import unicode_type
47 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes, Any
47 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes, Any
48 from IPython.utils.warn import error
48 from IPython.utils.warn import error
49 from IPython.kernel.zmq.displayhook import ZMQShellDisplayHook
49 from ipython_kernel.displayhook import ZMQShellDisplayHook
50 from IPython.kernel.zmq.datapub import ZMQDataPublisher
50 from ipython_kernel.datapub import ZMQDataPublisher
51 from IPython.kernel.zmq.session import extract_header
51 from ipython_kernel.session import extract_header
52 from .session import Session
52 from .session import Session
53
53
54 #-----------------------------------------------------------------------------
54 #-----------------------------------------------------------------------------
@@ -66,7 +66,7 b' class ZMQDisplayPublisher(DisplayPublisher):'
66 def set_parent(self, parent):
66 def set_parent(self, parent):
67 """Set the parent for outbound messages."""
67 """Set the parent for outbound messages."""
68 self.parent_header = extract_header(parent)
68 self.parent_header = extract_header(parent)
69
69
70 def _flush_streams(self):
70 def _flush_streams(self):
71 """flush IO Streams prior to display"""
71 """flush IO Streams prior to display"""
72 sys.stdout.flush()
72 sys.stdout.flush()
@@ -102,7 +102,7 b' class KernelMagics(Magics):'
102 # moved into a separate machinery as well. For now, at least isolate here
102 # moved into a separate machinery as well. For now, at least isolate here
103 # the magics which this class needs to implement differently from the base
103 # the magics which this class needs to implement differently from the base
104 # class, or that are unique to it.
104 # class, or that are unique to it.
105
105
106 _find_edit_target = CodeMagics._find_edit_target
106 _find_edit_target = CodeMagics._find_edit_target
107
107
108 @skip_doctest
108 @skip_doctest
@@ -248,19 +248,19 b' class KernelMagics(Magics):'
248 @line_magic
248 @line_magic
249 def connect_info(self, arg_s):
249 def connect_info(self, arg_s):
250 """Print information for connecting other clients to this kernel
250 """Print information for connecting other clients to this kernel
251
251
252 It will print the contents of this session's connection file, as well as
252 It will print the contents of this session's connection file, as well as
253 shortcuts for local clients.
253 shortcuts for local clients.
254
254
255 In the simplest case, when called from the most recently launched kernel,
255 In the simplest case, when called from the most recently launched kernel,
256 secondary clients can be connected, simply with:
256 secondary clients can be connected, simply with:
257
257
258 $> ipython <app> --existing
258 $> ipython <app> --existing
259
259
260 """
260 """
261
261
262 from IPython.core.application import BaseIPythonApplication as BaseIPApp
262 from IPython.core.application import BaseIPythonApplication as BaseIPApp
263
263
264 if BaseIPApp.initialized():
264 if BaseIPApp.initialized():
265 app = BaseIPApp.instance()
265 app = BaseIPApp.instance()
266 security_dir = app.profile_dir.security_dir
266 security_dir = app.profile_dir.security_dir
@@ -268,22 +268,22 b' class KernelMagics(Magics):'
268 else:
268 else:
269 profile = 'default'
269 profile = 'default'
270 security_dir = ''
270 security_dir = ''
271
271
272 try:
272 try:
273 connection_file = get_connection_file()
273 connection_file = get_connection_file()
274 info = get_connection_info(unpack=False)
274 info = get_connection_info(unpack=False)
275 except Exception as e:
275 except Exception as e:
276 error("Could not get connection info: %r" % e)
276 error("Could not get connection info: %r" % e)
277 return
277 return
278
278
279 # add profile flag for non-default profile
279 # add profile flag for non-default profile
280 profile_flag = "--profile %s" % profile if profile != 'default' else ""
280 profile_flag = "--profile %s" % profile if profile != 'default' else ""
281
281
282 # if it's in the security dir, truncate to basename
282 # if it's in the security dir, truncate to basename
283 if security_dir == os.path.dirname(connection_file):
283 if security_dir == os.path.dirname(connection_file):
284 connection_file = os.path.basename(connection_file)
284 connection_file = os.path.basename(connection_file)
285
285
286
286
287 print (info + '\n')
287 print (info + '\n')
288 print ("Paste the above JSON into a file, and connect with:\n"
288 print ("Paste the above JSON into a file, and connect with:\n"
289 " $> ipython <app> --existing <file>\n"
289 " $> ipython <app> --existing <file>\n"
@@ -299,11 +299,11 b' class KernelMagics(Magics):'
299 @line_magic
299 @line_magic
300 def qtconsole(self, arg_s):
300 def qtconsole(self, arg_s):
301 """Open a qtconsole connected to this kernel.
301 """Open a qtconsole connected to this kernel.
302
302
303 Useful for connecting a qtconsole to running notebooks, for better
303 Useful for connecting a qtconsole to running notebooks, for better
304 debugging.
304 debugging.
305 """
305 """
306
306
307 # %qtconsole should imply bind_kernel for engines:
307 # %qtconsole should imply bind_kernel for engines:
308 try:
308 try:
309 from IPython.parallel import bind_kernel
309 from IPython.parallel import bind_kernel
@@ -312,29 +312,29 b' class KernelMagics(Magics):'
312 pass
312 pass
313 else:
313 else:
314 bind_kernel()
314 bind_kernel()
315
315
316 try:
316 try:
317 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
317 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
318 except Exception as e:
318 except Exception as e:
319 error("Could not start qtconsole: %r" % e)
319 error("Could not start qtconsole: %r" % e)
320 return
320 return
321
321
322 @line_magic
322 @line_magic
323 def autosave(self, arg_s):
323 def autosave(self, arg_s):
324 """Set the autosave interval in the notebook (in seconds).
324 """Set the autosave interval in the notebook (in seconds).
325
325
326 The default value is 120, or two minutes.
326 The default value is 120, or two minutes.
327 ``%autosave 0`` will disable autosave.
327 ``%autosave 0`` will disable autosave.
328
328
329 This magic only has an effect when called from the notebook interface.
329 This magic only has an effect when called from the notebook interface.
330 It has no effect when called in a startup file.
330 It has no effect when called in a startup file.
331 """
331 """
332
332
333 try:
333 try:
334 interval = int(arg_s)
334 interval = int(arg_s)
335 except ValueError:
335 except ValueError:
336 raise UsageError("%%autosave requires an integer, got %r" % arg_s)
336 raise UsageError("%%autosave requires an integer, got %r" % arg_s)
337
337
338 # javascript wants milliseconds
338 # javascript wants milliseconds
339 milliseconds = 1000 * interval
339 milliseconds = 1000 * interval
340 display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds),
340 display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds),
@@ -354,7 +354,7 b' class ZMQInteractiveShell(InteractiveShell):'
354 data_pub_class = Type(ZMQDataPublisher)
354 data_pub_class = Type(ZMQDataPublisher)
355 kernel = Any()
355 kernel = Any()
356 parent_header = Any()
356 parent_header = Any()
357
357
358 def _banner1_default(self):
358 def _banner1_default(self):
359 return default_gui_banner
359 return default_gui_banner
360
360
@@ -370,7 +370,7 b' class ZMQInteractiveShell(InteractiveShell):'
370 exiter = Instance(ZMQExitAutocall)
370 exiter = Instance(ZMQExitAutocall)
371 def _exiter_default(self):
371 def _exiter_default(self):
372 return ZMQExitAutocall(self)
372 return ZMQExitAutocall(self)
373
373
374 def _exit_now_changed(self, name, old, new):
374 def _exit_now_changed(self, name, old, new):
375 """stop eventloop when exit_now fires"""
375 """stop eventloop when exit_now fires"""
376 if new:
376 if new:
@@ -400,11 +400,11 b' class ZMQInteractiveShell(InteractiveShell):'
400 # subprocesses as much as possible.
400 # subprocesses as much as possible.
401 env['PAGER'] = 'cat'
401 env['PAGER'] = 'cat'
402 env['GIT_PAGER'] = 'cat'
402 env['GIT_PAGER'] = 'cat'
403
403
404 def init_hooks(self):
404 def init_hooks(self):
405 super(ZMQInteractiveShell, self).init_hooks()
405 super(ZMQInteractiveShell, self).init_hooks()
406 self.set_hook('show_in_pager', page.as_hook(payloadpage.page), 99)
406 self.set_hook('show_in_pager', page.as_hook(payloadpage.page), 99)
407
407
408 def ask_exit(self):
408 def ask_exit(self):
409 """Engage the exit actions."""
409 """Engage the exit actions."""
410 self.exit_now = (not self.keepkernel_on_exit)
410 self.exit_now = (not self.keepkernel_on_exit)
@@ -431,7 +431,7 b' class ZMQInteractiveShell(InteractiveShell):'
431 topic = None
431 topic = None
432 if dh.topic:
432 if dh.topic:
433 topic = dh.topic.replace(b'execute_result', b'error')
433 topic = dh.topic.replace(b'execute_result', b'error')
434
434
435 exc_msg = dh.session.send(dh.pub_socket, u'error', json_clean(exc_content), dh.parent_header, ident=topic)
435 exc_msg = dh.session.send(dh.pub_socket, u'error', json_clean(exc_content), dh.parent_header, ident=topic)
436
436
437 # FIXME - Hack: store exception info in shell object. Right now, the
437 # FIXME - Hack: store exception info in shell object. Right now, the
@@ -454,7 +454,7 b' class ZMQInteractiveShell(InteractiveShell):'
454 replace=replace,
454 replace=replace,
455 )
455 )
456 self.payload_manager.write_payload(payload)
456 self.payload_manager.write_payload(payload)
457
457
458 def set_parent(self, parent):
458 def set_parent(self, parent):
459 """Set the parent header for associating output with its triggering input"""
459 """Set the parent header for associating output with its triggering input"""
460 self.parent_header = parent
460 self.parent_header = parent
@@ -469,10 +469,10 b' class ZMQInteractiveShell(InteractiveShell):'
469 sys.stderr.set_parent(parent)
469 sys.stderr.set_parent(parent)
470 except AttributeError:
470 except AttributeError:
471 pass
471 pass
472
472
473 def get_parent(self):
473 def get_parent(self):
474 return self.parent_header
474 return self.parent_header
475
475
476 #-------------------------------------------------------------------------
476 #-------------------------------------------------------------------------
477 # Things related to magics
477 # Things related to magics
478 #-------------------------------------------------------------------------
478 #-------------------------------------------------------------------------
1 NO CONTENT: file renamed from IPython/kernel/blocking/__init__.py to jupyter_client/blocking/__init__.py
NO CONTENT: file renamed from IPython/kernel/blocking/__init__.py to jupyter_client/blocking/__init__.py
@@ -89,4 +89,3 b' class ZMQSocketChannel(object):'
89
89
90 def start(self):
90 def start(self):
91 pass
91 pass
92
@@ -11,8 +11,8 b' except ImportError:'
11 from Queue import Empty # Python 2
11 from Queue import Empty # Python 2
12
12
13 from IPython.utils.traitlets import Type
13 from IPython.utils.traitlets import Type
14 from IPython.kernel.channels import HBChannel
14 from jupyter_client.channels import HBChannel
15 from IPython.kernel.client import KernelClient
15 from jupyter_client.client import KernelClient
16 from .channels import ZMQSocketChannel
16 from .channels import ZMQSocketChannel
17
17
18 class BlockingKernelClient(KernelClient):
18 class BlockingKernelClient(KernelClient):
1 NO CONTENT: file renamed from IPython/kernel/ioloop/__init__.py to jupyter_client/ioloop/__init__.py
NO CONTENT: file renamed from IPython/kernel/ioloop/__init__.py to jupyter_client/ioloop/__init__.py
@@ -20,7 +20,7 b' from IPython.utils.traitlets import ('
20 Instance
20 Instance
21 )
21 )
22
22
23 from IPython.kernel.manager import KernelManager
23 from jupyter_client.manager import KernelManager
24 from .restarter import IOLoopKernelRestarter
24 from .restarter import IOLoopKernelRestarter
25
25
26 #-----------------------------------------------------------------------------
26 #-----------------------------------------------------------------------------
@@ -40,7 +40,7 b' class IOLoopKernelManager(KernelManager):'
40 def _loop_default(self):
40 def _loop_default(self):
41 return ioloop.IOLoop.instance()
41 return ioloop.IOLoop.instance()
42
42
43 _restarter = Instance('IPython.kernel.ioloop.IOLoopKernelRestarter', allow_none=True)
43 _restarter = Instance('jupyter_client.ioloop.IOLoopKernelRestarter', allow_none=True)
44
44
45 def start_restarter(self):
45 def start_restarter(self):
46 if self.autorestart and self.has_kernel:
46 if self.autorestart and self.has_kernel:
@@ -20,7 +20,7 b' from __future__ import absolute_import'
20 from zmq.eventloop import ioloop
20 from zmq.eventloop import ioloop
21
21
22
22
23 from IPython.kernel.restarter import KernelRestarter
23 from jupyter_client.restarter import KernelRestarter
24 from IPython.utils.traitlets import (
24 from IPython.utils.traitlets import (
25 Instance,
25 Instance,
26 )
26 )
@@ -51,4 +51,3 b' class IOLoopKernelRestarter(KernelRestarter):'
51 if self._pcallback is not None:
51 if self._pcallback is not None:
52 self._pcallback.stop()
52 self._pcallback.stop()
53 self._pcallback = None
53 self._pcallback = None
54
@@ -53,8 +53,7 b' from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,'
53 TraitError,
53 TraitError,
54 )
54 )
55 from IPython.utils.pickleutil import PICKLE_PROTOCOL
55 from IPython.utils.pickleutil import PICKLE_PROTOCOL
56 from IPython.kernel.adapter import adapt
56 from jupyter_client.adapter import adapt
57 from IPython.kernel.zmq.serialize import MAX_ITEMS, MAX_BYTES
58
57
59 #-----------------------------------------------------------------------------
58 #-----------------------------------------------------------------------------
60 # utility functions
59 # utility functions
@@ -78,6 +77,10 b' def squash_unicode(obj):'
78 # globals and defaults
77 # globals and defaults
79 #-----------------------------------------------------------------------------
78 #-----------------------------------------------------------------------------
80
79
80 # default values for the thresholds:
81 MAX_ITEMS = 64
82 MAX_BYTES = 1024
83
81 # ISO8601-ify datetime objects
84 # ISO8601-ify datetime objects
82 # allow unicode
85 # allow unicode
83 # disallow nan, because it's not actually valid JSON
86 # disallow nan, because it's not actually valid JSON
@@ -118,7 +121,7 b' session_flags = {'
118
121
119 def default_secure(cfg):
122 def default_secure(cfg):
120 """Set the default behavior for a config environment to be secure.
123 """Set the default behavior for a config environment to be secure.
121
124
122 If Session.key/keyfile have not been set, set Session.key to
125 If Session.key/keyfile have not been set, set Session.key to
123 a new random UUID.
126 a new random UUID.
124 """
127 """
@@ -148,7 +151,7 b' class SessionFactory(LoggingConfigurable):'
148 def _context_default(self):
151 def _context_default(self):
149 return zmq.Context.instance()
152 return zmq.Context.instance()
150
153
151 session = Instance('IPython.kernel.zmq.session.Session',
154 session = Instance('jupyter_client.session.Session',
152 allow_none=True)
155 allow_none=True)
153
156
154 loop = Instance('zmq.eventloop.ioloop.IOLoop')
157 loop = Instance('zmq.eventloop.ioloop.IOLoop')
@@ -310,20 +313,20 b' class Session(Configurable):'
310
313
311 metadata = Dict({}, config=True,
314 metadata = Dict({}, config=True,
312 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
315 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
313
316
314 # if 0, no adapting to do.
317 # if 0, no adapting to do.
315 adapt_version = Integer(0)
318 adapt_version = Integer(0)
316
319
317 # message signature related traits:
320 # message signature related traits:
318
321
319 key = CBytes(config=True,
322 key = CBytes(config=True,
320 help="""execution key, for signing messages.""")
323 help="""execution key, for signing messages.""")
321 def _key_default(self):
324 def _key_default(self):
322 return str_to_bytes(str(uuid.uuid4()))
325 return str_to_bytes(str(uuid.uuid4()))
323
326
324 def _key_changed(self):
327 def _key_changed(self):
325 self._new_auth()
328 self._new_auth()
326
329
327 signature_scheme = Unicode('hmac-sha256', config=True,
330 signature_scheme = Unicode('hmac-sha256', config=True,
328 help="""The digest scheme used to construct the message signatures.
331 help="""The digest scheme used to construct the message signatures.
329 Must have the form 'hmac-HASH'.""")
332 Must have the form 'hmac-HASH'.""")
@@ -336,7 +339,7 b' class Session(Configurable):'
336 except AttributeError:
339 except AttributeError:
337 raise TraitError("hashlib has no such attribute: %s" % hash_name)
340 raise TraitError("hashlib has no such attribute: %s" % hash_name)
338 self._new_auth()
341 self._new_auth()
339
342
340 digest_mod = Any()
343 digest_mod = Any()
341 def _digest_mod_default(self):
344 def _digest_mod_default(self):
342 return hashlib.sha256
345 return hashlib.sha256
@@ -348,11 +351,11 b' class Session(Configurable):'
348 self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod)
351 self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod)
349 else:
352 else:
350 self.auth = None
353 self.auth = None
351
354
352 digest_history = Set()
355 digest_history = Set()
353 digest_history_size = Integer(2**16, config=True,
356 digest_history_size = Integer(2**16, config=True,
354 help="""The maximum number of digests to remember.
357 help="""The maximum number of digests to remember.
355
358
356 The digest history will be culled when it exceeds this value.
359 The digest history will be culled when it exceeds this value.
357 """
360 """
358 )
361 )
@@ -365,9 +368,9 b' class Session(Configurable):'
365
368
366 # for protecting against sends from forks
369 # for protecting against sends from forks
367 pid = Integer()
370 pid = Integer()
368
371
369 # serialization traits:
372 # serialization traits:
370
373
371 pack = Any(default_packer) # the actual packer function
374 pack = Any(default_packer) # the actual packer function
372 def _pack_changed(self, name, old, new):
375 def _pack_changed(self, name, old, new):
373 if not callable(new):
376 if not callable(new):
@@ -378,7 +381,7 b' class Session(Configurable):'
378 # unpacker is not checked - it is assumed to be
381 # unpacker is not checked - it is assumed to be
379 if not callable(new):
382 if not callable(new):
380 raise TypeError("unpacker must be callable, not %s"%type(new))
383 raise TypeError("unpacker must be callable, not %s"%type(new))
381
384
382 # thresholds:
385 # thresholds:
383 copy_threshold = Integer(2**16, config=True,
386 copy_threshold = Integer(2**16, config=True,
384 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
387 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
@@ -390,7 +393,7 b' class Session(Configurable):'
390 """
393 """
391 )
394 )
392
395
393
396
394 def __init__(self, **kwargs):
397 def __init__(self, **kwargs):
395 """create a Session object
398 """create a Session object
396
399
@@ -625,7 +628,7 b' class Session(Configurable):'
625 track : bool
628 track : bool
626 Whether to track. Only for use with Sockets, because ZMQStream
629 Whether to track. Only for use with Sockets, because ZMQStream
627 objects cannot track messages.
630 objects cannot track messages.
628
631
629
632
630 Returns
633 Returns
631 -------
634 -------
@@ -655,7 +658,7 b' class Session(Configurable):'
655 to_send.extend(buffers)
658 to_send.extend(buffers)
656 longest = max([ len(s) for s in to_send ])
659 longest = max([ len(s) for s in to_send ])
657 copy = (longest < self.copy_threshold)
660 copy = (longest < self.copy_threshold)
658
661
659 if buffers and track and not copy:
662 if buffers and track and not copy:
660 # only really track when we are doing zero-copy buffers
663 # only really track when we are doing zero-copy buffers
661 tracker = stream.send_multipart(to_send, copy=False, track=True)
664 tracker = stream.send_multipart(to_send, copy=False, track=True)
@@ -776,15 +779,15 b' class Session(Configurable):'
776 if self.digest_history_size == 0:
779 if self.digest_history_size == 0:
777 # no history, never add digests
780 # no history, never add digests
778 return
781 return
779
782
780 self.digest_history.add(signature)
783 self.digest_history.add(signature)
781 if len(self.digest_history) > self.digest_history_size:
784 if len(self.digest_history) > self.digest_history_size:
782 # threshold reached, cull 10%
785 # threshold reached, cull 10%
783 self._cull_digest_history()
786 self._cull_digest_history()
784
787
785 def _cull_digest_history(self):
788 def _cull_digest_history(self):
786 """cull the digest history
789 """cull the digest history
787
790
788 Removes a randomly selected 10% of the digest history
791 Removes a randomly selected 10% of the digest history
789 """
792 """
790 current = len(self.digest_history)
793 current = len(self.digest_history)
@@ -794,7 +797,7 b' class Session(Configurable):'
794 return
797 return
795 to_cull = random.sample(self.digest_history, n_to_cull)
798 to_cull = random.sample(self.digest_history, n_to_cull)
796 self.digest_history.difference_update(to_cull)
799 self.digest_history.difference_update(to_cull)
797
800
798 def deserialize(self, msg_list, content=True, copy=True):
801 def deserialize(self, msg_list, content=True, copy=True):
799 """Unserialize a msg_list to a nested message dict.
802 """Unserialize a msg_list to a nested message dict.
800
803
@@ -855,7 +858,7 b' class Session(Configurable):'
855 message['buffers'] = buffers
858 message['buffers'] = buffers
856 # adapt to the current version
859 # adapt to the current version
857 return adapt(message)
860 return adapt(message)
858
861
859 def unserialize(self, *args, **kwargs):
862 def unserialize(self, *args, **kwargs):
860 warnings.warn(
863 warnings.warn(
861 "Session.unserialize is deprecated. Use Session.deserialize.",
864 "Session.unserialize is deprecated. Use Session.deserialize.",
@@ -879,4 +882,3 b' def test_msg2obj():'
879 am2 = dict(ao)
882 am2 = dict(ao)
880 assert am['x'] == am2['x']
883 assert am['x'] == am2['x']
881 assert am['y']['z'] == am2['y']['z']
884 assert am['y']['z'] == am2['y']['z']
882
1 NO CONTENT: file renamed from IPython/kernel/zmq/tests/__init__.py to jupyter_client/tests/__init__.py
NO CONTENT: file renamed from IPython/kernel/zmq/tests/__init__.py to jupyter_client/tests/__init__.py
@@ -8,8 +8,8 b' import json'
8 from unittest import TestCase
8 from unittest import TestCase
9 import nose.tools as nt
9 import nose.tools as nt
10
10
11 from IPython.kernel.adapter import adapt, V4toV5, V5toV4, code_to_line
11 from jupyter_client.adapter import adapt, V4toV5, V5toV4, code_to_line
12 from IPython.kernel.zmq.session import Session
12 from jupyter_client.session import Session
13
13
14
14
15 def test_default_version():
15 def test_default_version():
@@ -26,15 +26,15 b' def test_code_to_line_no_code():'
26 nt.assert_equal(pos, 0)
26 nt.assert_equal(pos, 0)
27
27
28 class AdapterTest(TestCase):
28 class AdapterTest(TestCase):
29
29
30 def setUp(self):
30 def setUp(self):
31 self.session = Session()
31 self.session = Session()
32
32
33 def adapt(self, msg, version=None):
33 def adapt(self, msg, version=None):
34 original = copy.deepcopy(msg)
34 original = copy.deepcopy(msg)
35 adapted = adapt(msg, version or self.to_version)
35 adapted = adapt(msg, version or self.to_version)
36 return original, adapted
36 return original, adapted
37
37
38 def check_header(self, msg):
38 def check_header(self, msg):
39 pass
39 pass
40
40
@@ -42,28 +42,28 b' class AdapterTest(TestCase):'
42 class V4toV5TestCase(AdapterTest):
42 class V4toV5TestCase(AdapterTest):
43 from_version = 4
43 from_version = 4
44 to_version = 5
44 to_version = 5
45
45
46 def msg(self, msg_type, content):
46 def msg(self, msg_type, content):
47 """Create a v4 msg (same as v5, minus version header)"""
47 """Create a v4 msg (same as v5, minus version header)"""
48 msg = self.session.msg(msg_type, content)
48 msg = self.session.msg(msg_type, content)
49 msg['header'].pop('version')
49 msg['header'].pop('version')
50 return msg
50 return msg
51
51
52 def test_same_version(self):
52 def test_same_version(self):
53 msg = self.msg("execute_result",
53 msg = self.msg("execute_result",
54 content={'status' : 'ok'}
54 content={'status' : 'ok'}
55 )
55 )
56 original, adapted = self.adapt(msg, self.from_version)
56 original, adapted = self.adapt(msg, self.from_version)
57
57
58 self.assertEqual(original, adapted)
58 self.assertEqual(original, adapted)
59
59
60 def test_no_adapt(self):
60 def test_no_adapt(self):
61 msg = self.msg("input_reply", {'value' : 'some text'})
61 msg = self.msg("input_reply", {'value' : 'some text'})
62 v4, v5 = self.adapt(msg)
62 v4, v5 = self.adapt(msg)
63 self.assertEqual(v5['header']['version'], V4toV5.version)
63 self.assertEqual(v5['header']['version'], V4toV5.version)
64 v5['header'].pop('version')
64 v5['header'].pop('version')
65 self.assertEqual(v4, v5)
65 self.assertEqual(v4, v5)
66
66
67 def test_rename_type(self):
67 def test_rename_type(self):
68 for v5_type, v4_type in [
68 for v5_type, v4_type in [
69 ('execute_result', 'pyout'),
69 ('execute_result', 'pyout'),
@@ -75,7 +75,7 b' class V4toV5TestCase(AdapterTest):'
75 self.assertEqual(v5['header']['version'], V4toV5.version)
75 self.assertEqual(v5['header']['version'], V4toV5.version)
76 self.assertEqual(v5['header']['msg_type'], v5_type)
76 self.assertEqual(v5['header']['msg_type'], v5_type)
77 self.assertEqual(v4['content'], v5['content'])
77 self.assertEqual(v4['content'], v5['content'])
78
78
79 def test_execute_request(self):
79 def test_execute_request(self):
80 msg = self.msg("execute_request", {
80 msg = self.msg("execute_request", {
81 'code' : 'a=5',
81 'code' : 'a=5',
@@ -106,7 +106,7 b' class V4toV5TestCase(AdapterTest):'
106 self.assertEqual(v5c['payload'], [{'source': 'page',
106 self.assertEqual(v5c['payload'], [{'source': 'page',
107 'data': {'text/plain': 'blah'}}
107 'data': {'text/plain': 'blah'}}
108 ])
108 ])
109
109
110 def test_complete_request(self):
110 def test_complete_request(self):
111 msg = self.msg("complete_request", {
111 msg = self.msg("complete_request", {
112 'text' : 'a.is',
112 'text' : 'a.is',
@@ -121,7 +121,7 b' class V4toV5TestCase(AdapterTest):'
121 self.assertNotIn(key, v5c)
121 self.assertNotIn(key, v5c)
122 self.assertEqual(v5c['cursor_pos'], v4c['cursor_pos'])
122 self.assertEqual(v5c['cursor_pos'], v4c['cursor_pos'])
123 self.assertEqual(v5c['code'], v4c['line'])
123 self.assertEqual(v5c['code'], v4c['line'])
124
124
125 def test_complete_reply(self):
125 def test_complete_reply(self):
126 msg = self.msg("complete_reply", {
126 msg = self.msg("complete_reply", {
127 'matched_text' : 'a.is',
127 'matched_text' : 'a.is',
@@ -134,12 +134,12 b' class V4toV5TestCase(AdapterTest):'
134 v4, v5 = self.adapt(msg)
134 v4, v5 = self.adapt(msg)
135 v4c = v4['content']
135 v4c = v4['content']
136 v5c = v5['content']
136 v5c = v5['content']
137
137
138 self.assertEqual(v5c['matches'], v4c['matches'])
138 self.assertEqual(v5c['matches'], v4c['matches'])
139 self.assertEqual(v5c['metadata'], {})
139 self.assertEqual(v5c['metadata'], {})
140 self.assertEqual(v5c['cursor_start'], -4)
140 self.assertEqual(v5c['cursor_start'], -4)
141 self.assertEqual(v5c['cursor_end'], None)
141 self.assertEqual(v5c['cursor_end'], None)
142
142
143 def test_object_info_request(self):
143 def test_object_info_request(self):
144 msg = self.msg("object_info_request", {
144 msg = self.msg("object_info_request", {
145 'oname' : 'foo',
145 'oname' : 'foo',
@@ -152,7 +152,7 b' class V4toV5TestCase(AdapterTest):'
152 self.assertEqual(v5c['code'], v4c['oname'])
152 self.assertEqual(v5c['code'], v4c['oname'])
153 self.assertEqual(v5c['cursor_pos'], len(v4c['oname']))
153 self.assertEqual(v5c['cursor_pos'], len(v4c['oname']))
154 self.assertEqual(v5c['detail_level'], v4c['detail_level'])
154 self.assertEqual(v5c['detail_level'], v4c['detail_level'])
155
155
156 def test_object_info_reply(self):
156 def test_object_info_reply(self):
157 msg = self.msg("object_info_reply", {
157 msg = self.msg("object_info_reply", {
158 'oname' : 'foo',
158 'oname' : 'foo',
@@ -168,7 +168,7 b' class V4toV5TestCase(AdapterTest):'
168 self.assertEqual(sorted(v5c), [ 'data', 'found', 'metadata', 'name', 'status'])
168 self.assertEqual(sorted(v5c), [ 'data', 'found', 'metadata', 'name', 'status'])
169 text = v5c['data']['text/plain']
169 text = v5c['data']['text/plain']
170 self.assertEqual(text, '\n'.join([v4c['definition'], v4c['docstring']]))
170 self.assertEqual(text, '\n'.join([v4c['definition'], v4c['docstring']]))
171
171
172 def test_kernel_info_reply(self):
172 def test_kernel_info_reply(self):
173 msg = self.msg("kernel_info_reply", {
173 msg = self.msg("kernel_info_reply", {
174 'language': 'python',
174 'language': 'python',
@@ -188,9 +188,9 b' class V4toV5TestCase(AdapterTest):'
188 },
188 },
189 'banner' : '',
189 'banner' : '',
190 })
190 })
191
191
192 # iopub channel
192 # iopub channel
193
193
194 def test_display_data(self):
194 def test_display_data(self):
195 jsondata = dict(a=5)
195 jsondata = dict(a=5)
196 msg = self.msg("display_data", {
196 msg = self.msg("display_data", {
@@ -206,9 +206,9 b' class V4toV5TestCase(AdapterTest):'
206 self.assertEqual(v5c['metadata'], v4c['metadata'])
206 self.assertEqual(v5c['metadata'], v4c['metadata'])
207 self.assertEqual(v5c['data']['text/plain'], v4c['data']['text/plain'])
207 self.assertEqual(v5c['data']['text/plain'], v4c['data']['text/plain'])
208 self.assertEqual(v5c['data']['application/json'], jsondata)
208 self.assertEqual(v5c['data']['application/json'], jsondata)
209
209
210 # stdin channel
210 # stdin channel
211
211
212 def test_input_request(self):
212 def test_input_request(self):
213 msg = self.msg('input_request', {'prompt': "$>"})
213 msg = self.msg('input_request', {'prompt': "$>"})
214 v4, v5 = self.adapt(msg)
214 v4, v5 = self.adapt(msg)
@@ -219,25 +219,25 b' class V4toV5TestCase(AdapterTest):'
219 class V5toV4TestCase(AdapterTest):
219 class V5toV4TestCase(AdapterTest):
220 from_version = 5
220 from_version = 5
221 to_version = 4
221 to_version = 4
222
222
223 def msg(self, msg_type, content):
223 def msg(self, msg_type, content):
224 return self.session.msg(msg_type, content)
224 return self.session.msg(msg_type, content)
225
225
226 def test_same_version(self):
226 def test_same_version(self):
227 msg = self.msg("execute_result",
227 msg = self.msg("execute_result",
228 content={'status' : 'ok'}
228 content={'status' : 'ok'}
229 )
229 )
230 original, adapted = self.adapt(msg, self.from_version)
230 original, adapted = self.adapt(msg, self.from_version)
231
231
232 self.assertEqual(original, adapted)
232 self.assertEqual(original, adapted)
233
233
234 def test_no_adapt(self):
234 def test_no_adapt(self):
235 msg = self.msg("input_reply", {'value' : 'some text'})
235 msg = self.msg("input_reply", {'value' : 'some text'})
236 v5, v4 = self.adapt(msg)
236 v5, v4 = self.adapt(msg)
237 self.assertNotIn('version', v4['header'])
237 self.assertNotIn('version', v4['header'])
238 v5['header'].pop('version')
238 v5['header'].pop('version')
239 self.assertEqual(v4, v5)
239 self.assertEqual(v4, v5)
240
240
241 def test_rename_type(self):
241 def test_rename_type(self):
242 for v5_type, v4_type in [
242 for v5_type, v4_type in [
243 ('execute_result', 'pyout'),
243 ('execute_result', 'pyout'),
@@ -249,7 +249,7 b' class V5toV4TestCase(AdapterTest):'
249 self.assertEqual(v4['header']['msg_type'], v4_type)
249 self.assertEqual(v4['header']['msg_type'], v4_type)
250 nt.assert_not_in('version', v4['header'])
250 nt.assert_not_in('version', v4['header'])
251 self.assertEqual(v4['content'], v5['content'])
251 self.assertEqual(v4['content'], v5['content'])
252
252
253 def test_execute_request(self):
253 def test_execute_request(self):
254 msg = self.msg("execute_request", {
254 msg = self.msg("execute_request", {
255 'code' : 'a=5',
255 'code' : 'a=5',
@@ -262,7 +262,7 b' class V5toV4TestCase(AdapterTest):'
262 v5c = v5['content']
262 v5c = v5['content']
263 self.assertEqual(v4c['user_variables'], [])
263 self.assertEqual(v4c['user_variables'], [])
264 self.assertEqual(v5c['code'], v4c['code'])
264 self.assertEqual(v5c['code'], v4c['code'])
265
265
266 def test_complete_request(self):
266 def test_complete_request(self):
267 msg = self.msg("complete_request", {
267 msg = self.msg("complete_request", {
268 'code' : 'def foo():\n'
268 'code' : 'def foo():\n'
@@ -278,7 +278,7 b' class V5toV4TestCase(AdapterTest):'
278 self.assertEqual(v4c['cursor_pos'], 8)
278 self.assertEqual(v4c['cursor_pos'], 8)
279 self.assertEqual(v4c['text'], '')
279 self.assertEqual(v4c['text'], '')
280 self.assertEqual(v4c['block'], None)
280 self.assertEqual(v4c['block'], None)
281
281
282 def test_complete_reply(self):
282 def test_complete_reply(self):
283 msg = self.msg("complete_reply", {
283 msg = self.msg("complete_reply", {
284 'cursor_start' : 10,
284 'cursor_start' : 10,
@@ -295,7 +295,7 b' class V5toV4TestCase(AdapterTest):'
295 v5c = v5['content']
295 v5c = v5['content']
296 self.assertEqual(v4c['matched_text'], 'a.is')
296 self.assertEqual(v4c['matched_text'], 'a.is')
297 self.assertEqual(v4c['matches'], v5c['matches'])
297 self.assertEqual(v4c['matches'], v5c['matches'])
298
298
299 def test_inspect_request(self):
299 def test_inspect_request(self):
300 msg = self.msg("inspect_request", {
300 msg = self.msg("inspect_request", {
301 'code' : 'def foo():\n'
301 'code' : 'def foo():\n'
@@ -310,7 +310,7 b' class V5toV4TestCase(AdapterTest):'
310 v5c = v5['content']
310 v5c = v5['content']
311 self.assertEqual(v4c['oname'], 'apple')
311 self.assertEqual(v4c['oname'], 'apple')
312 self.assertEqual(v5c['detail_level'], v4c['detail_level'])
312 self.assertEqual(v5c['detail_level'], v4c['detail_level'])
313
313
314 def test_inspect_reply(self):
314 def test_inspect_reply(self):
315 msg = self.msg("inspect_reply", {
315 msg = self.msg("inspect_reply", {
316 'name' : 'foo',
316 'name' : 'foo',
@@ -324,7 +324,7 b' class V5toV4TestCase(AdapterTest):'
324 v5c = v5['content']
324 v5c = v5['content']
325 self.assertEqual(sorted(v4c), ['found', 'oname'])
325 self.assertEqual(sorted(v4c), ['found', 'oname'])
326 self.assertEqual(v4c['found'], False)
326 self.assertEqual(v4c['found'], False)
327
327
328 def test_kernel_info_reply(self):
328 def test_kernel_info_reply(self):
329 msg = self.msg("kernel_info_reply", {
329 msg = self.msg("kernel_info_reply", {
330 'protocol_version': '5.0',
330 'protocol_version': '5.0',
@@ -347,9 +347,9 b' class V5toV4TestCase(AdapterTest):'
347 'language_version': [2,8,0],
347 'language_version': [2,8,0],
348 'ipython_version': [1,2,3],
348 'ipython_version': [1,2,3],
349 })
349 })
350
350
351 # iopub channel
351 # iopub channel
352
352
353 def test_display_data(self):
353 def test_display_data(self):
354 jsondata = dict(a=5)
354 jsondata = dict(a=5)
355 msg = self.msg("display_data", {
355 msg = self.msg("display_data", {
@@ -365,13 +365,11 b' class V5toV4TestCase(AdapterTest):'
365 self.assertEqual(v5c['metadata'], v4c['metadata'])
365 self.assertEqual(v5c['metadata'], v4c['metadata'])
366 self.assertEqual(v5c['data']['text/plain'], v4c['data']['text/plain'])
366 self.assertEqual(v5c['data']['text/plain'], v4c['data']['text/plain'])
367 self.assertEqual(v4c['data']['application/json'], json.dumps(jsondata))
367 self.assertEqual(v4c['data']['application/json'], json.dumps(jsondata))
368
368
369 # stdin channel
369 # stdin channel
370
370
371 def test_input_request(self):
371 def test_input_request(self):
372 msg = self.msg('input_request', {'prompt': "$>", 'password' : True})
372 msg = self.msg('input_request', {'prompt': "$>", 'password' : True})
373 v5, v4 = self.adapt(msg)
373 v5, v4 = self.adapt(msg)
374 self.assertEqual(v5['content']['prompt'], v4['content']['prompt'])
374 self.assertEqual(v5['content']['prompt'], v4['content']['prompt'])
375 self.assertNotIn('password', v4['content'])
375 self.assertNotIn('password', v4['content'])
376
377
@@ -26,8 +26,8 b' from IPython.consoleapp import IPythonConsoleApp'
26 from IPython.core.application import BaseIPythonApplication
26 from IPython.core.application import BaseIPythonApplication
27 from IPython.utils.tempdir import TemporaryDirectory, TemporaryWorkingDirectory
27 from IPython.utils.tempdir import TemporaryDirectory, TemporaryWorkingDirectory
28 from IPython.utils.py3compat import str_to_bytes
28 from IPython.utils.py3compat import str_to_bytes
29 from IPython.kernel import connect
29 from jupyter_client import connect
30 from IPython.kernel.zmq.session import Session
30 from jupyter_client.session import Session
31
31
32 #-----------------------------------------------------------------------------
32 #-----------------------------------------------------------------------------
33 # Classes and functions
33 # Classes and functions
@@ -60,13 +60,13 b' def test_load_connection_file_session():'
60 app = DummyConsoleApp(session=Session())
60 app = DummyConsoleApp(session=Session())
61 app.initialize(argv=[])
61 app.initialize(argv=[])
62 session = app.session
62 session = app.session
63
63
64 with TemporaryDirectory() as d:
64 with TemporaryDirectory() as d:
65 cf = os.path.join(d, 'kernel.json')
65 cf = os.path.join(d, 'kernel.json')
66 connect.write_connection_file(cf, **sample_info)
66 connect.write_connection_file(cf, **sample_info)
67 app.connection_file = cf
67 app.connection_file = cf
68 app.load_connection_file()
68 app.load_connection_file()
69
69
70 nt.assert_equal(session.key, sample_info['key'])
70 nt.assert_equal(session.key, sample_info['key'])
71 nt.assert_equal(session.signature_scheme, sample_info['signature_scheme'])
71 nt.assert_equal(session.signature_scheme, sample_info['signature_scheme'])
72
72
@@ -78,7 +78,7 b' def test_app_load_connection_file():'
78 connect.write_connection_file(cf, **sample_info)
78 connect.write_connection_file(cf, **sample_info)
79 app = DummyConsoleApp(connection_file=cf)
79 app = DummyConsoleApp(connection_file=cf)
80 app.initialize(argv=[])
80 app.initialize(argv=[])
81
81
82 for attr, expected in sample_info.items():
82 for attr, expected in sample_info.items():
83 if attr in ('key', 'signature_scheme'):
83 if attr in ('key', 'signature_scheme'):
84 continue
84 continue
@@ -92,14 +92,14 b' def test_get_connection_file():'
92 cf = 'kernel.json'
92 cf = 'kernel.json'
93 app = DummyConsoleApp(config=cfg, connection_file=cf)
93 app = DummyConsoleApp(config=cfg, connection_file=cf)
94 app.initialize(argv=[])
94 app.initialize(argv=[])
95
95
96 profile_cf = os.path.join(app.profile_dir.location, 'security', cf)
96 profile_cf = os.path.join(app.profile_dir.location, 'security', cf)
97 nt.assert_equal(profile_cf, app.connection_file)
97 nt.assert_equal(profile_cf, app.connection_file)
98 with open(profile_cf, 'w') as f:
98 with open(profile_cf, 'w') as f:
99 f.write("{}")
99 f.write("{}")
100 nt.assert_true(os.path.exists(profile_cf))
100 nt.assert_true(os.path.exists(profile_cf))
101 nt.assert_equal(connect.get_connection_file(app), profile_cf)
101 nt.assert_equal(connect.get_connection_file(app), profile_cf)
102
102
103 app.connection_file = cf
103 app.connection_file = cf
104 nt.assert_equal(connect.get_connection_file(app), profile_cf)
104 nt.assert_equal(connect.get_connection_file(app), profile_cf)
105
105
@@ -111,11 +111,11 b' def test_find_connection_file():'
111 app = DummyConsoleApp(config=cfg, connection_file=cf)
111 app = DummyConsoleApp(config=cfg, connection_file=cf)
112 app.initialize(argv=[])
112 app.initialize(argv=[])
113 BaseIPythonApplication._instance = app
113 BaseIPythonApplication._instance = app
114
114
115 profile_cf = os.path.join(app.profile_dir.location, 'security', cf)
115 profile_cf = os.path.join(app.profile_dir.location, 'security', cf)
116 with open(profile_cf, 'w') as f:
116 with open(profile_cf, 'w') as f:
117 f.write("{}")
117 f.write("{}")
118
118
119 for query in (
119 for query in (
120 'kernel.json',
120 'kernel.json',
121 'kern*',
121 'kern*',
@@ -123,7 +123,7 b' def test_find_connection_file():'
123 'k*',
123 'k*',
124 ):
124 ):
125 nt.assert_equal(connect.find_connection_file(query), profile_cf)
125 nt.assert_equal(connect.find_connection_file(query), profile_cf)
126
126
127 BaseIPythonApplication._instance = None
127 BaseIPythonApplication._instance = None
128
128
129 def test_get_connection_info():
129 def test_get_connection_info():
@@ -132,12 +132,10 b' def test_get_connection_info():'
132 connect.write_connection_file(cf, **sample_info)
132 connect.write_connection_file(cf, **sample_info)
133 json_info = connect.get_connection_info(cf)
133 json_info = connect.get_connection_info(cf)
134 info = connect.get_connection_info(cf, unpack=True)
134 info = connect.get_connection_info(cf, unpack=True)
135
135
136 nt.assert_equal(type(json_info), type(""))
136 nt.assert_equal(type(json_info), type(""))
137 nt.assert_equal(info, sample_info)
137 nt.assert_equal(info, sample_info)
138
138
139 info2 = json.loads(json_info)
139 info2 = json.loads(json_info)
140 info2['key'] = str_to_bytes(info2['key'])
140 info2['key'] = str_to_bytes(info2['key'])
141 nt.assert_equal(info2, sample_info)
141 nt.assert_equal(info2, sample_info)
142
143
@@ -7,7 +7,7 b' from unittest import TestCase'
7 from IPython.testing import decorators as dec
7 from IPython.testing import decorators as dec
8
8
9 from IPython.config.loader import Config
9 from IPython.config.loader import Config
10 from IPython.kernel import KernelManager
10 from jupyter_client import KernelManager
11
11
12 class TestKernelManager(TestCase):
12 class TestKernelManager(TestCase):
13
13
@@ -40,7 +40,7 b' class TestKernelManager(TestCase):'
40 def test_ipc_lifecycle(self):
40 def test_ipc_lifecycle(self):
41 km = self._get_ipc_km()
41 km = self._get_ipc_km()
42 self._run_lifecycle(km)
42 self._run_lifecycle(km)
43
43
44 def test_get_connect_info(self):
44 def test_get_connect_info(self):
45 km = self._get_tcp_km()
45 km = self._get_tcp_km()
46 cinfo = km.get_connection_info()
46 cinfo = km.get_connection_info()
@@ -51,4 +51,3 b' class TestKernelManager(TestCase):'
51 'key', 'signature_scheme',
51 'key', 'signature_scheme',
52 ])
52 ])
53 self.assertEqual(keys, expected)
53 self.assertEqual(keys, expected)
54
@@ -5,7 +5,7 b' import unittest'
5
5
6 from IPython.testing.decorators import onlyif
6 from IPython.testing.decorators import onlyif
7 from IPython.utils.tempdir import TemporaryDirectory
7 from IPython.utils.tempdir import TemporaryDirectory
8 from IPython.kernel import kernelspec
8 from jupyter_client import kernelspec
9
9
10 sample_kernel_json = {'argv':['cat', '{connection_file}'],
10 sample_kernel_json = {'argv':['cat', '{connection_file}'],
11 'display_name':'Test kernel',
11 'display_name':'Test kernel',
@@ -22,7 +22,7 b' class KernelSpecTests(unittest.TestCase):'
22 json.dump(sample_kernel_json, f)
22 json.dump(sample_kernel_json, f)
23
23
24 self.ksm = kernelspec.KernelSpecManager(ipython_dir=td.name)
24 self.ksm = kernelspec.KernelSpecManager(ipython_dir=td.name)
25
25
26 td2 = TemporaryDirectory()
26 td2 = TemporaryDirectory()
27 self.addCleanup(td2.cleanup)
27 self.addCleanup(td2.cleanup)
28 self.installable_kernel = td2.name
28 self.installable_kernel = td2.name
@@ -39,18 +39,18 b' class KernelSpecTests(unittest.TestCase):'
39 self.assertEqual(ks.argv, sample_kernel_json['argv'])
39 self.assertEqual(ks.argv, sample_kernel_json['argv'])
40 self.assertEqual(ks.display_name, sample_kernel_json['display_name'])
40 self.assertEqual(ks.display_name, sample_kernel_json['display_name'])
41 self.assertEqual(ks.env, {})
41 self.assertEqual(ks.env, {})
42
42
43 def test_install_kernel_spec(self):
43 def test_install_kernel_spec(self):
44 self.ksm.install_kernel_spec(self.installable_kernel,
44 self.ksm.install_kernel_spec(self.installable_kernel,
45 kernel_name='tstinstalled',
45 kernel_name='tstinstalled',
46 user=True)
46 user=True)
47 self.assertIn('tstinstalled', self.ksm.find_kernel_specs())
47 self.assertIn('tstinstalled', self.ksm.find_kernel_specs())
48
48
49 with self.assertRaises(OSError):
49 with self.assertRaises(OSError):
50 self.ksm.install_kernel_spec(self.installable_kernel,
50 self.ksm.install_kernel_spec(self.installable_kernel,
51 kernel_name='tstinstalled',
51 kernel_name='tstinstalled',
52 user=True)
52 user=True)
53
53
54 # Smoketest that this succeeds
54 # Smoketest that this succeeds
55 self.ksm.install_kernel_spec(self.installable_kernel,
55 self.ksm.install_kernel_spec(self.installable_kernel,
56 kernel_name='tstinstalled',
56 kernel_name='tstinstalled',
@@ -20,7 +20,7 b' Authors'
20 import nose.tools as nt
20 import nose.tools as nt
21
21
22 # Our own imports
22 # Our own imports
23 from IPython.kernel.launcher import swallow_argv
23 from jupyter_client.launcher import swallow_argv
24
24
25 #-----------------------------------------------------------------------------
25 #-----------------------------------------------------------------------------
26 # Classes and functions
26 # Classes and functions
@@ -55,4 +55,3 b' def test_swallow_argv():'
55 "returned : %r" % stripped,
55 "returned : %r" % stripped,
56 ])
56 ])
57 nt.assert_equal(expected, stripped, message)
57 nt.assert_equal(expected, stripped, message)
58
@@ -8,8 +8,8 b' from IPython.testing import decorators as dec'
8
8
9 from IPython.config.loader import Config
9 from IPython.config.loader import Config
10 from IPython.utils.localinterfaces import localhost
10 from IPython.utils.localinterfaces import localhost
11 from IPython.kernel import KernelManager
11 from jupyter_client import KernelManager
12 from IPython.kernel.multikernelmanager import MultiKernelManager
12 from jupyter_client.multikernelmanager import MultiKernelManager
13
13
14 class TestKernelManager(TestCase):
14 class TestKernelManager(TestCase):
15
15
@@ -61,7 +61,7 b' class TestKernelManager(TestCase):'
61 def test_tcp_lifecycle(self):
61 def test_tcp_lifecycle(self):
62 km = self._get_tcp_km()
62 km = self._get_tcp_km()
63 self._run_lifecycle(km)
63 self._run_lifecycle(km)
64
64
65 def test_shutdown_all(self):
65 def test_shutdown_all(self):
66 km = self._get_tcp_km()
66 km = self._get_tcp_km()
67 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
67 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
@@ -70,7 +70,7 b' class TestKernelManager(TestCase):'
70 self.assertNotIn(kid, km)
70 self.assertNotIn(kid, km)
71 # shutdown again is okay, because we have no kernels
71 # shutdown again is okay, because we have no kernels
72 km.shutdown_all()
72 km.shutdown_all()
73
73
74 def test_tcp_cinfo(self):
74 def test_tcp_cinfo(self):
75 km = self._get_tcp_km()
75 km = self._get_tcp_km()
76 self._run_cinfo(km, 'tcp', localhost())
76 self._run_cinfo(km, 'tcp', localhost())
@@ -79,9 +79,8 b' class TestKernelManager(TestCase):'
79 def test_ipc_lifecycle(self):
79 def test_ipc_lifecycle(self):
80 km = self._get_ipc_km()
80 km = self._get_ipc_km()
81 self._run_lifecycle(km)
81 self._run_lifecycle(km)
82
82
83 @dec.skip_win32
83 @dec.skip_win32
84 def test_ipc_cinfo(self):
84 def test_ipc_cinfo(self):
85 km = self._get_ipc_km()
85 km = self._get_ipc_km()
86 self._run_cinfo(km, 'ipc', 'test')
86 self._run_cinfo(km, 'ipc', 'test')
87
@@ -1,4 +1,4 b''
1 """Test the IPython.kernel public API
1 """Test the jupyter_client public API
2
2
3 Authors
3 Authors
4 -------
4 -------
@@ -14,7 +14,7 b' Authors'
14
14
15 import nose.tools as nt
15 import nose.tools as nt
16
16
17 from IPython.kernel import launcher, connect
17 from jupyter_client import launcher, connect
18 from IPython import kernel
18 from IPython import kernel
19
19
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
@@ -38,4 +38,3 b' def test_launcher():'
38 def test_connect():
38 def test_connect():
39 for name in connect.__all__:
39 for name in connect.__all__:
40 nt.assert_in(name, dir(kernel))
40 nt.assert_in(name, dir(kernel))
41
@@ -13,7 +13,7 b' import zmq'
13 from zmq.tests import BaseZMQTestCase
13 from zmq.tests import BaseZMQTestCase
14 from zmq.eventloop.zmqstream import ZMQStream
14 from zmq.eventloop.zmqstream import ZMQStream
15
15
16 from IPython.kernel.zmq import session as ss
16 from jupyter_client import session as ss
17
17
18 from IPython.testing.decorators import skipif, module_not_available
18 from IPython.testing.decorators import skipif, module_not_available
19 from IPython.utils.py3compat import string_types
19 from IPython.utils.py3compat import string_types
@@ -77,7 +77,7 b' class TestSession(SessionTestCase):'
77
77
78 msg = self.session.msg('execute', content=dict(a=10))
78 msg = self.session.msg('execute', content=dict(a=10))
79 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
79 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
80
80
81 ident, msg_list = self.session.feed_identities(B.recv_multipart())
81 ident, msg_list = self.session.feed_identities(B.recv_multipart())
82 new_msg = self.session.deserialize(msg_list)
82 new_msg = self.session.deserialize(msg_list)
83 self.assertEqual(ident[0], b'foo')
83 self.assertEqual(ident[0], b'foo')
@@ -88,7 +88,7 b' class TestSession(SessionTestCase):'
88 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
88 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
89 self.assertEqual(new_msg['metadata'],msg['metadata'])
89 self.assertEqual(new_msg['metadata'],msg['metadata'])
90 self.assertEqual(new_msg['buffers'],[b'bar'])
90 self.assertEqual(new_msg['buffers'],[b'bar'])
91
91
92 content = msg['content']
92 content = msg['content']
93 header = msg['header']
93 header = msg['header']
94 header['msg_id'] = self.session.msg_id
94 header['msg_id'] = self.session.msg_id
@@ -107,9 +107,9 b' class TestSession(SessionTestCase):'
107 self.assertEqual(new_msg['metadata'],msg['metadata'])
107 self.assertEqual(new_msg['metadata'],msg['metadata'])
108 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
108 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
109 self.assertEqual(new_msg['buffers'],[b'bar'])
109 self.assertEqual(new_msg['buffers'],[b'bar'])
110
110
111 header['msg_id'] = self.session.msg_id
111 header['msg_id'] = self.session.msg_id
112
112
113 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
113 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
114 ident, new_msg = self.session.recv(B)
114 ident, new_msg = self.session.recv(B)
115 self.assertEqual(ident[0], b'foo')
115 self.assertEqual(ident[0], b'foo')
@@ -216,7 +216,7 b' class TestSession(SessionTestCase):'
216 self.assertTrue(len(session.digest_history) == 100)
216 self.assertTrue(len(session.digest_history) == 100)
217 session._add_digest(uuid.uuid4().bytes)
217 session._add_digest(uuid.uuid4().bytes)
218 self.assertTrue(len(session.digest_history) == 91)
218 self.assertTrue(len(session.digest_history) == 91)
219
219
220 def test_bad_pack(self):
220 def test_bad_pack(self):
221 try:
221 try:
222 session = ss.Session(pack=_bad_packer)
222 session = ss.Session(pack=_bad_packer)
@@ -225,7 +225,7 b' class TestSession(SessionTestCase):'
225 self.assertIn("don't work", str(e))
225 self.assertIn("don't work", str(e))
226 else:
226 else:
227 self.fail("Should have raised ValueError")
227 self.fail("Should have raised ValueError")
228
228
229 def test_bad_unpack(self):
229 def test_bad_unpack(self):
230 try:
230 try:
231 session = ss.Session(unpack=_bad_unpacker)
231 session = ss.Session(unpack=_bad_unpacker)
@@ -234,7 +234,7 b' class TestSession(SessionTestCase):'
234 self.assertIn("don't work either", str(e))
234 self.assertIn("don't work either", str(e))
235 else:
235 else:
236 self.fail("Should have raised ValueError")
236 self.fail("Should have raised ValueError")
237
237
238 def test_bad_packer(self):
238 def test_bad_packer(self):
239 try:
239 try:
240 session = ss.Session(packer=__name__ + '._bad_packer')
240 session = ss.Session(packer=__name__ + '._bad_packer')
@@ -243,7 +243,7 b' class TestSession(SessionTestCase):'
243 self.assertIn("don't work", str(e))
243 self.assertIn("don't work", str(e))
244 else:
244 else:
245 self.fail("Should have raised ValueError")
245 self.fail("Should have raised ValueError")
246
246
247 def test_bad_unpacker(self):
247 def test_bad_unpacker(self):
248 try:
248 try:
249 session = ss.Session(unpacker=__name__ + '._bad_unpacker')
249 session = ss.Session(unpacker=__name__ + '._bad_unpacker')
@@ -252,11 +252,11 b' class TestSession(SessionTestCase):'
252 self.assertIn("don't work either", str(e))
252 self.assertIn("don't work either", str(e))
253 else:
253 else:
254 self.fail("Should have raised ValueError")
254 self.fail("Should have raised ValueError")
255
255
256 def test_bad_roundtrip(self):
256 def test_bad_roundtrip(self):
257 with self.assertRaises(ValueError):
257 with self.assertRaises(ValueError):
258 session = ss.Session(unpack=lambda b: 5)
258 session = ss.Session(unpack=lambda b: 5)
259
259
260 def _datetime_test(self, session):
260 def _datetime_test(self, session):
261 content = dict(t=datetime.now())
261 content = dict(t=datetime.now())
262 metadata = dict(t=datetime.now())
262 metadata = dict(t=datetime.now())
@@ -274,24 +274,24 b' class TestSession(SessionTestCase):'
274 assert isinstance(msg2['metadata']['t'], string_types)
274 assert isinstance(msg2['metadata']['t'], string_types)
275 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
275 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
276 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
276 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
277
277
278 def test_datetimes(self):
278 def test_datetimes(self):
279 self._datetime_test(self.session)
279 self._datetime_test(self.session)
280
280
281 def test_datetimes_pickle(self):
281 def test_datetimes_pickle(self):
282 session = ss.Session(packer='pickle')
282 session = ss.Session(packer='pickle')
283 self._datetime_test(session)
283 self._datetime_test(session)
284
284
285 @skipif(module_not_available('msgpack'))
285 @skipif(module_not_available('msgpack'))
286 def test_datetimes_msgpack(self):
286 def test_datetimes_msgpack(self):
287 import msgpack
287 import msgpack
288
288
289 session = ss.Session(
289 session = ss.Session(
290 pack=msgpack.packb,
290 pack=msgpack.packb,
291 unpack=lambda buf: msgpack.unpackb(buf, encoding='utf8'),
291 unpack=lambda buf: msgpack.unpackb(buf, encoding='utf8'),
292 )
292 )
293 self._datetime_test(session)
293 self._datetime_test(session)
294
294
295 def test_send_raw(self):
295 def test_send_raw(self):
296 ctx = zmq.Context.instance()
296 ctx = zmq.Context.instance()
297 A = ctx.socket(zmq.PAIR)
297 A = ctx.socket(zmq.PAIR)
@@ -300,10 +300,10 b' class TestSession(SessionTestCase):'
300 B.connect("inproc://test")
300 B.connect("inproc://test")
301
301
302 msg = self.session.msg('execute', content=dict(a=10))
302 msg = self.session.msg('execute', content=dict(a=10))
303 msg_list = [self.session.pack(msg[part]) for part in
303 msg_list = [self.session.pack(msg[part]) for part in
304 ['header', 'parent_header', 'metadata', 'content']]
304 ['header', 'parent_header', 'metadata', 'content']]
305 self.session.send_raw(A, msg_list, ident=b'foo')
305 self.session.send_raw(A, msg_list, ident=b'foo')
306
306
307 ident, new_msg_list = self.session.feed_identities(B.recv_multipart())
307 ident, new_msg_list = self.session.feed_identities(B.recv_multipart())
308 new_msg = self.session.deserialize(new_msg_list)
308 new_msg = self.session.deserialize(new_msg_list)
309 self.assertEqual(ident[0], b'foo')
309 self.assertEqual(ident[0], b'foo')
@@ -204,7 +204,7 b' def find_package_data():'
204 # 'v3/nbformat.v3.schema.json',
204 # 'v3/nbformat.v3.schema.json',
205 # 'v4/nbformat.v4.schema.json',
205 # 'v4/nbformat.v4.schema.json',
206 # ],
206 # ],
207 'IPython.kernel': ['resources/*.*'],
207 # 'IPython.kernel': ['resources/*.*'],
208 }
208 }
209
209
210 return package_data
210 return package_data
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now