##// END OF EJS Templates
bigsplit: ipython_kernel
Min RK -
Show More
@@ -0,0 +1,1 b''
1 from jupyter_client.adapter import *
@@ -0,0 +1,1 b''
1 from jupyter_client.channels import *
@@ -0,0 +1,1 b''
1 from jupyter_client.channelsabc import *
@@ -0,0 +1,1 b''
1 from jupyter_client.client import *
@@ -0,0 +1,1 b''
1 from jupyter_client.clientabc import *
@@ -0,0 +1,2 b''
1 from ipython_kernel.connect import *
2 from jupyter_client.connect import *
@@ -0,0 +1,1 b''
1 from jupyter_client.kernelspec import *
@@ -0,0 +1,1 b''
1 from jupyter_client.kernelspecapp import *
@@ -0,0 +1,1 b''
1 from jupyter_client.launcher import *
@@ -0,0 +1,1 b''
1 from jupyter_client.manager import *
@@ -0,0 +1,1 b''
1 from jupyter_client.managerabc import *
@@ -0,0 +1,1 b''
1 from jupyter_client.multikernelmanager import *
@@ -0,0 +1,1 b''
1 from jupyter_client.restarter import *
This diff has been collapsed as it changes many lines, (883 lines changed) Show them Hide them
@@ -0,0 +1,883 b''
1 """Session object for building, serializing, sending, and receiving messages in
2 IPython. The Session object supports serialization, HMAC signatures, and
3 metadata on messages.
4
5 Also defined here are utilities for working with Sessions:
6 * A SessionFactory to be used as a base class for configurables that work with
7 Sessions.
8 * A Message object for convenience that allows attribute-access to the msg dict.
9 """
10
11 # Copyright (c) IPython Development Team.
12 # Distributed under the terms of the Modified BSD License.
13
14 import hashlib
15 import hmac
16 import logging
17 import os
18 import pprint
19 import random
20 import uuid
21 import warnings
22 from datetime import datetime
23
24 try:
25 import cPickle
26 pickle = cPickle
27 except:
28 cPickle = None
29 import pickle
30
31 try:
32 # We are using compare_digest to limit the surface of timing attacks
33 from hmac import compare_digest
34 except ImportError:
35 # Python < 2.7.7: When digests don't match no feedback is provided,
36 # limiting the surface of attack
37 def compare_digest(a,b): return a == b
38
39 import zmq
40 from zmq.utils import jsonapi
41 from zmq.eventloop.ioloop import IOLoop
42 from zmq.eventloop.zmqstream import ZMQStream
43
44 from IPython.core.release import kernel_protocol_version
45 from IPython.config.configurable import Configurable, LoggingConfigurable
46 from IPython.utils import io
47 from IPython.utils.importstring import import_item
48 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
49 from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type,
50 iteritems)
51 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
52 DottedObjectName, CUnicode, Dict, Integer,
53 TraitError,
54 )
55 from IPython.utils.pickleutil import PICKLE_PROTOCOL
56 from jupyter_client.adapter import adapt
57
58 #-----------------------------------------------------------------------------
59 # utility functions
60 #-----------------------------------------------------------------------------
61
62 def squash_unicode(obj):
63 """coerce unicode back to bytestrings."""
64 if isinstance(obj,dict):
65 for key in obj.keys():
66 obj[key] = squash_unicode(obj[key])
67 if isinstance(key, unicode_type):
68 obj[squash_unicode(key)] = obj.pop(key)
69 elif isinstance(obj, list):
70 for i,v in enumerate(obj):
71 obj[i] = squash_unicode(v)
72 elif isinstance(obj, unicode_type):
73 obj = obj.encode('utf8')
74 return obj
75
76 #-----------------------------------------------------------------------------
77 # globals and defaults
78 #-----------------------------------------------------------------------------
79
80 # default values for the thresholds:
81 MAX_ITEMS = 64
82 MAX_BYTES = 1024
83
84 # ISO8601-ify datetime objects
85 # allow unicode
86 # disallow nan, because it's not actually valid JSON
87 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default,
88 ensure_ascii=False, allow_nan=False,
89 )
90 json_unpacker = lambda s: jsonapi.loads(s)
91
92 pickle_packer = lambda o: pickle.dumps(squash_dates(o), PICKLE_PROTOCOL)
93 pickle_unpacker = pickle.loads
94
95 default_packer = json_packer
96 default_unpacker = json_unpacker
97
98 DELIM = b"<IDS|MSG>"
99 # singleton dummy tracker, which will always report as done
100 DONE = zmq.MessageTracker()
101
102 #-----------------------------------------------------------------------------
103 # Mixin tools for apps that use Sessions
104 #-----------------------------------------------------------------------------
105
106 session_aliases = dict(
107 ident = 'Session.session',
108 user = 'Session.username',
109 keyfile = 'Session.keyfile',
110 )
111
112 session_flags = {
113 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
114 'keyfile' : '' }},
115 """Use HMAC digests for authentication of messages.
116 Setting this flag will generate a new UUID to use as the HMAC key.
117 """),
118 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
119 """Don't authenticate messages."""),
120 }
121
122 def default_secure(cfg):
123 """Set the default behavior for a config environment to be secure.
124
125 If Session.key/keyfile have not been set, set Session.key to
126 a new random UUID.
127 """
128 warnings.warn("default_secure is deprecated", DeprecationWarning)
129 if 'Session' in cfg:
130 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
131 return
132 # key/keyfile not specified, generate new UUID:
133 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
134
135
136 #-----------------------------------------------------------------------------
137 # Classes
138 #-----------------------------------------------------------------------------
139
140 class SessionFactory(LoggingConfigurable):
141 """The Base class for configurables that have a Session, Context, logger,
142 and IOLoop.
143 """
144
145 logname = Unicode('')
146 def _logname_changed(self, name, old, new):
147 self.log = logging.getLogger(new)
148
149 # not configurable:
150 context = Instance('zmq.Context')
151 def _context_default(self):
152 return zmq.Context.instance()
153
154 session = Instance('jupyter_client.session.Session')
155
156 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
157 def _loop_default(self):
158 return IOLoop.instance()
159
160 def __init__(self, **kwargs):
161 super(SessionFactory, self).__init__(**kwargs)
162
163 if self.session is None:
164 # construct the session
165 self.session = Session(**kwargs)
166
167
168 class Message(object):
169 """A simple message object that maps dict keys to attributes.
170
171 A Message can be created from a dict and a dict from a Message instance
172 simply by calling dict(msg_obj)."""
173
174 def __init__(self, msg_dict):
175 dct = self.__dict__
176 for k, v in iteritems(dict(msg_dict)):
177 if isinstance(v, dict):
178 v = Message(v)
179 dct[k] = v
180
181 # Having this iterator lets dict(msg_obj) work out of the box.
182 def __iter__(self):
183 return iter(iteritems(self.__dict__))
184
185 def __repr__(self):
186 return repr(self.__dict__)
187
188 def __str__(self):
189 return pprint.pformat(self.__dict__)
190
191 def __contains__(self, k):
192 return k in self.__dict__
193
194 def __getitem__(self, k):
195 return self.__dict__[k]
196
197
198 def msg_header(msg_id, msg_type, username, session):
199 date = datetime.now()
200 version = kernel_protocol_version
201 return locals()
202
203 def extract_header(msg_or_header):
204 """Given a message or header, return the header."""
205 if not msg_or_header:
206 return {}
207 try:
208 # See if msg_or_header is the entire message.
209 h = msg_or_header['header']
210 except KeyError:
211 try:
212 # See if msg_or_header is just the header
213 h = msg_or_header['msg_id']
214 except KeyError:
215 raise
216 else:
217 h = msg_or_header
218 if not isinstance(h, dict):
219 h = dict(h)
220 return h
221
222 class Session(Configurable):
223 """Object for handling serialization and sending of messages.
224
225 The Session object handles building messages and sending them
226 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
227 other over the network via Session objects, and only need to work with the
228 dict-based IPython message spec. The Session will handle
229 serialization/deserialization, security, and metadata.
230
231 Sessions support configurable serialization via packer/unpacker traits,
232 and signing with HMAC digests via the key/keyfile traits.
233
234 Parameters
235 ----------
236
237 debug : bool
238 whether to trigger extra debugging statements
239 packer/unpacker : str : 'json', 'pickle' or import_string
240 importstrings for methods to serialize message parts. If just
241 'json' or 'pickle', predefined JSON and pickle packers will be used.
242 Otherwise, the entire importstring must be used.
243
244 The functions must accept at least valid JSON input, and output *bytes*.
245
246 For example, to use msgpack:
247 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
248 pack/unpack : callables
249 You can also set the pack/unpack callables for serialization directly.
250 session : bytes
251 the ID of this Session object. The default is to generate a new UUID.
252 username : unicode
253 username added to message headers. The default is to ask the OS.
254 key : bytes
255 The key used to initialize an HMAC signature. If unset, messages
256 will not be signed or checked.
257 keyfile : filepath
258 The file containing a key. If this is set, `key` will be initialized
259 to the contents of the file.
260
261 """
262
263 debug=Bool(False, config=True, help="""Debug output in the Session""")
264
265 packer = DottedObjectName('json',config=True,
266 help="""The name of the packer for serializing messages.
267 Should be one of 'json', 'pickle', or an import name
268 for a custom callable serializer.""")
269 def _packer_changed(self, name, old, new):
270 if new.lower() == 'json':
271 self.pack = json_packer
272 self.unpack = json_unpacker
273 self.unpacker = new
274 elif new.lower() == 'pickle':
275 self.pack = pickle_packer
276 self.unpack = pickle_unpacker
277 self.unpacker = new
278 else:
279 self.pack = import_item(str(new))
280
281 unpacker = DottedObjectName('json', config=True,
282 help="""The name of the unpacker for unserializing messages.
283 Only used with custom functions for `packer`.""")
284 def _unpacker_changed(self, name, old, new):
285 if new.lower() == 'json':
286 self.pack = json_packer
287 self.unpack = json_unpacker
288 self.packer = new
289 elif new.lower() == 'pickle':
290 self.pack = pickle_packer
291 self.unpack = pickle_unpacker
292 self.packer = new
293 else:
294 self.unpack = import_item(str(new))
295
296 session = CUnicode(u'', config=True,
297 help="""The UUID identifying this session.""")
298 def _session_default(self):
299 u = unicode_type(uuid.uuid4())
300 self.bsession = u.encode('ascii')
301 return u
302
303 def _session_changed(self, name, old, new):
304 self.bsession = self.session.encode('ascii')
305
306 # bsession is the session as bytes
307 bsession = CBytes(b'')
308
309 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
310 help="""Username for the Session. Default is your system username.""",
311 config=True)
312
313 metadata = Dict({}, config=True,
314 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
315
316 # if 0, no adapting to do.
317 adapt_version = Integer(0)
318
319 # message signature related traits:
320
321 key = CBytes(config=True,
322 help="""execution key, for signing messages.""")
323 def _key_default(self):
324 return str_to_bytes(str(uuid.uuid4()))
325
326 def _key_changed(self):
327 self._new_auth()
328
329 signature_scheme = Unicode('hmac-sha256', config=True,
330 help="""The digest scheme used to construct the message signatures.
331 Must have the form 'hmac-HASH'.""")
332 def _signature_scheme_changed(self, name, old, new):
333 if not new.startswith('hmac-'):
334 raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)
335 hash_name = new.split('-', 1)[1]
336 try:
337 self.digest_mod = getattr(hashlib, hash_name)
338 except AttributeError:
339 raise TraitError("hashlib has no such attribute: %s" % hash_name)
340 self._new_auth()
341
342 digest_mod = Any()
343 def _digest_mod_default(self):
344 return hashlib.sha256
345
346 auth = Instance(hmac.HMAC)
347
348 def _new_auth(self):
349 if self.key:
350 self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod)
351 else:
352 self.auth = None
353
354 digest_history = Set()
355 digest_history_size = Integer(2**16, config=True,
356 help="""The maximum number of digests to remember.
357
358 The digest history will be culled when it exceeds this value.
359 """
360 )
361
362 keyfile = Unicode('', config=True,
363 help="""path to file containing execution key.""")
364 def _keyfile_changed(self, name, old, new):
365 with open(new, 'rb') as f:
366 self.key = f.read().strip()
367
368 # for protecting against sends from forks
369 pid = Integer()
370
371 # serialization traits:
372
373 pack = Any(default_packer) # the actual packer function
374 def _pack_changed(self, name, old, new):
375 if not callable(new):
376 raise TypeError("packer must be callable, not %s"%type(new))
377
378 unpack = Any(default_unpacker) # the actual packer function
379 def _unpack_changed(self, name, old, new):
380 # unpacker is not checked - it is assumed to be
381 if not callable(new):
382 raise TypeError("unpacker must be callable, not %s"%type(new))
383
384 # thresholds:
385 copy_threshold = Integer(2**16, config=True,
386 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
387 buffer_threshold = Integer(MAX_BYTES, config=True,
388 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
389 item_threshold = Integer(MAX_ITEMS, config=True,
390 help="""The maximum number of items for a container to be introspected for custom serialization.
391 Containers larger than this are pickled outright.
392 """
393 )
394
395
396 def __init__(self, **kwargs):
397 """create a Session object
398
399 Parameters
400 ----------
401
402 debug : bool
403 whether to trigger extra debugging statements
404 packer/unpacker : str : 'json', 'pickle' or import_string
405 importstrings for methods to serialize message parts. If just
406 'json' or 'pickle', predefined JSON and pickle packers will be used.
407 Otherwise, the entire importstring must be used.
408
409 The functions must accept at least valid JSON input, and output
410 *bytes*.
411
412 For example, to use msgpack:
413 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
414 pack/unpack : callables
415 You can also set the pack/unpack callables for serialization
416 directly.
417 session : unicode (must be ascii)
418 the ID of this Session object. The default is to generate a new
419 UUID.
420 bsession : bytes
421 The session as bytes
422 username : unicode
423 username added to message headers. The default is to ask the OS.
424 key : bytes
425 The key used to initialize an HMAC signature. If unset, messages
426 will not be signed or checked.
427 signature_scheme : str
428 The message digest scheme. Currently must be of the form 'hmac-HASH',
429 where 'HASH' is a hashing function available in Python's hashlib.
430 The default is 'hmac-sha256'.
431 This is ignored if 'key' is empty.
432 keyfile : filepath
433 The file containing a key. If this is set, `key` will be
434 initialized to the contents of the file.
435 """
436 super(Session, self).__init__(**kwargs)
437 self._check_packers()
438 self.none = self.pack({})
439 # ensure self._session_default() if necessary, so bsession is defined:
440 self.session
441 self.pid = os.getpid()
442 self._new_auth()
443
444 @property
445 def msg_id(self):
446 """always return new uuid"""
447 return str(uuid.uuid4())
448
449 def _check_packers(self):
450 """check packers for datetime support."""
451 pack = self.pack
452 unpack = self.unpack
453
454 # check simple serialization
455 msg = dict(a=[1,'hi'])
456 try:
457 packed = pack(msg)
458 except Exception as e:
459 msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}"
460 if self.packer == 'json':
461 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
462 else:
463 jsonmsg = ""
464 raise ValueError(
465 msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg)
466 )
467
468 # ensure packed message is bytes
469 if not isinstance(packed, bytes):
470 raise ValueError("message packed to %r, but bytes are required"%type(packed))
471
472 # check that unpack is pack's inverse
473 try:
474 unpacked = unpack(packed)
475 assert unpacked == msg
476 except Exception as e:
477 msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}"
478 if self.packer == 'json':
479 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
480 else:
481 jsonmsg = ""
482 raise ValueError(
483 msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg)
484 )
485
486 # check datetime support
487 msg = dict(t=datetime.now())
488 try:
489 unpacked = unpack(pack(msg))
490 if isinstance(unpacked['t'], datetime):
491 raise ValueError("Shouldn't deserialize to datetime")
492 except Exception:
493 self.pack = lambda o: pack(squash_dates(o))
494 self.unpack = lambda s: unpack(s)
495
496 def msg_header(self, msg_type):
497 return msg_header(self.msg_id, msg_type, self.username, self.session)
498
499 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
500 """Return the nested message dict.
501
502 This format is different from what is sent over the wire. The
503 serialize/deserialize methods converts this nested message dict to the wire
504 format, which is a list of message parts.
505 """
506 msg = {}
507 header = self.msg_header(msg_type) if header is None else header
508 msg['header'] = header
509 msg['msg_id'] = header['msg_id']
510 msg['msg_type'] = header['msg_type']
511 msg['parent_header'] = {} if parent is None else extract_header(parent)
512 msg['content'] = {} if content is None else content
513 msg['metadata'] = self.metadata.copy()
514 if metadata is not None:
515 msg['metadata'].update(metadata)
516 return msg
517
518 def sign(self, msg_list):
519 """Sign a message with HMAC digest. If no auth, return b''.
520
521 Parameters
522 ----------
523 msg_list : list
524 The [p_header,p_parent,p_content] part of the message list.
525 """
526 if self.auth is None:
527 return b''
528 h = self.auth.copy()
529 for m in msg_list:
530 h.update(m)
531 return str_to_bytes(h.hexdigest())
532
533 def serialize(self, msg, ident=None):
534 """Serialize the message components to bytes.
535
536 This is roughly the inverse of deserialize. The serialize/deserialize
537 methods work with full message lists, whereas pack/unpack work with
538 the individual message parts in the message list.
539
540 Parameters
541 ----------
542 msg : dict or Message
543 The next message dict as returned by the self.msg method.
544
545 Returns
546 -------
547 msg_list : list
548 The list of bytes objects to be sent with the format::
549
550 [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent,
551 p_metadata, p_content, buffer1, buffer2, ...]
552
553 In this list, the ``p_*`` entities are the packed or serialized
554 versions, so if JSON is used, these are utf8 encoded JSON strings.
555 """
556 content = msg.get('content', {})
557 if content is None:
558 content = self.none
559 elif isinstance(content, dict):
560 content = self.pack(content)
561 elif isinstance(content, bytes):
562 # content is already packed, as in a relayed message
563 pass
564 elif isinstance(content, unicode_type):
565 # should be bytes, but JSON often spits out unicode
566 content = content.encode('utf8')
567 else:
568 raise TypeError("Content incorrect type: %s"%type(content))
569
570 real_message = [self.pack(msg['header']),
571 self.pack(msg['parent_header']),
572 self.pack(msg['metadata']),
573 content,
574 ]
575
576 to_send = []
577
578 if isinstance(ident, list):
579 # accept list of idents
580 to_send.extend(ident)
581 elif ident is not None:
582 to_send.append(ident)
583 to_send.append(DELIM)
584
585 signature = self.sign(real_message)
586 to_send.append(signature)
587
588 to_send.extend(real_message)
589
590 return to_send
591
592 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
593 buffers=None, track=False, header=None, metadata=None):
594 """Build and send a message via stream or socket.
595
596 The message format used by this function internally is as follows:
597
598 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
599 buffer1,buffer2,...]
600
601 The serialize/deserialize methods convert the nested message dict into this
602 format.
603
604 Parameters
605 ----------
606
607 stream : zmq.Socket or ZMQStream
608 The socket-like object used to send the data.
609 msg_or_type : str or Message/dict
610 Normally, msg_or_type will be a msg_type unless a message is being
611 sent more than once. If a header is supplied, this can be set to
612 None and the msg_type will be pulled from the header.
613
614 content : dict or None
615 The content of the message (ignored if msg_or_type is a message).
616 header : dict or None
617 The header dict for the message (ignored if msg_to_type is a message).
618 parent : Message or dict or None
619 The parent or parent header describing the parent of this message
620 (ignored if msg_or_type is a message).
621 ident : bytes or list of bytes
622 The zmq.IDENTITY routing path.
623 metadata : dict or None
624 The metadata describing the message
625 buffers : list or None
626 The already-serialized buffers to be appended to the message.
627 track : bool
628 Whether to track. Only for use with Sockets, because ZMQStream
629 objects cannot track messages.
630
631
632 Returns
633 -------
634 msg : dict
635 The constructed message.
636 """
637 if not isinstance(stream, zmq.Socket):
638 # ZMQStreams and dummy sockets do not support tracking.
639 track = False
640
641 if isinstance(msg_or_type, (Message, dict)):
642 # We got a Message or message dict, not a msg_type so don't
643 # build a new Message.
644 msg = msg_or_type
645 buffers = buffers or msg.get('buffers', [])
646 else:
647 msg = self.msg(msg_or_type, content=content, parent=parent,
648 header=header, metadata=metadata)
649 if not os.getpid() == self.pid:
650 io.rprint("WARNING: attempted to send message from fork")
651 io.rprint(msg)
652 return
653 buffers = [] if buffers is None else buffers
654 if self.adapt_version:
655 msg = adapt(msg, self.adapt_version)
656 to_send = self.serialize(msg, ident)
657 to_send.extend(buffers)
658 longest = max([ len(s) for s in to_send ])
659 copy = (longest < self.copy_threshold)
660
661 if buffers and track and not copy:
662 # only really track when we are doing zero-copy buffers
663 tracker = stream.send_multipart(to_send, copy=False, track=True)
664 else:
665 # use dummy tracker, which will be done immediately
666 tracker = DONE
667 stream.send_multipart(to_send, copy=copy)
668
669 if self.debug:
670 pprint.pprint(msg)
671 pprint.pprint(to_send)
672 pprint.pprint(buffers)
673
674 msg['tracker'] = tracker
675
676 return msg
677
678 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
679 """Send a raw message via ident path.
680
681 This method is used to send a already serialized message.
682
683 Parameters
684 ----------
685 stream : ZMQStream or Socket
686 The ZMQ stream or socket to use for sending the message.
687 msg_list : list
688 The serialized list of messages to send. This only includes the
689 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
690 the message.
691 ident : ident or list
692 A single ident or a list of idents to use in sending.
693 """
694 to_send = []
695 if isinstance(ident, bytes):
696 ident = [ident]
697 if ident is not None:
698 to_send.extend(ident)
699
700 to_send.append(DELIM)
701 to_send.append(self.sign(msg_list))
702 to_send.extend(msg_list)
703 stream.send_multipart(to_send, flags, copy=copy)
704
705 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
706 """Receive and unpack a message.
707
708 Parameters
709 ----------
710 socket : ZMQStream or Socket
711 The socket or stream to use in receiving.
712
713 Returns
714 -------
715 [idents], msg
716 [idents] is a list of idents and msg is a nested message dict of
717 same format as self.msg returns.
718 """
719 if isinstance(socket, ZMQStream):
720 socket = socket.socket
721 try:
722 msg_list = socket.recv_multipart(mode, copy=copy)
723 except zmq.ZMQError as e:
724 if e.errno == zmq.EAGAIN:
725 # We can convert EAGAIN to None as we know in this case
726 # recv_multipart won't return None.
727 return None,None
728 else:
729 raise
730 # split multipart message into identity list and message dict
731 # invalid large messages can cause very expensive string comparisons
732 idents, msg_list = self.feed_identities(msg_list, copy)
733 try:
734 return idents, self.deserialize(msg_list, content=content, copy=copy)
735 except Exception as e:
736 # TODO: handle it
737 raise e
738
739 def feed_identities(self, msg_list, copy=True):
740 """Split the identities from the rest of the message.
741
742 Feed until DELIM is reached, then return the prefix as idents and
743 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
744 but that would be silly.
745
746 Parameters
747 ----------
748 msg_list : a list of Message or bytes objects
749 The message to be split.
750 copy : bool
751 flag determining whether the arguments are bytes or Messages
752
753 Returns
754 -------
755 (idents, msg_list) : two lists
756 idents will always be a list of bytes, each of which is a ZMQ
757 identity. msg_list will be a list of bytes or zmq.Messages of the
758 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
759 should be unpackable/unserializable via self.deserialize at this
760 point.
761 """
762 if copy:
763 idx = msg_list.index(DELIM)
764 return msg_list[:idx], msg_list[idx+1:]
765 else:
766 failed = True
767 for idx,m in enumerate(msg_list):
768 if m.bytes == DELIM:
769 failed = False
770 break
771 if failed:
772 raise ValueError("DELIM not in msg_list")
773 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
774 return [m.bytes for m in idents], msg_list
775
776 def _add_digest(self, signature):
777 """add a digest to history to protect against replay attacks"""
778 if self.digest_history_size == 0:
779 # no history, never add digests
780 return
781
782 self.digest_history.add(signature)
783 if len(self.digest_history) > self.digest_history_size:
784 # threshold reached, cull 10%
785 self._cull_digest_history()
786
787 def _cull_digest_history(self):
788 """cull the digest history
789
790 Removes a randomly selected 10% of the digest history
791 """
792 current = len(self.digest_history)
793 n_to_cull = max(int(current // 10), current - self.digest_history_size)
794 if n_to_cull >= current:
795 self.digest_history = set()
796 return
797 to_cull = random.sample(self.digest_history, n_to_cull)
798 self.digest_history.difference_update(to_cull)
799
800 def deserialize(self, msg_list, content=True, copy=True):
801 """Unserialize a msg_list to a nested message dict.
802
803 This is roughly the inverse of serialize. The serialize/deserialize
804 methods work with full message lists, whereas pack/unpack work with
805 the individual message parts in the message list.
806
807 Parameters
808 ----------
809 msg_list : list of bytes or Message objects
810 The list of message parts of the form [HMAC,p_header,p_parent,
811 p_metadata,p_content,buffer1,buffer2,...].
812 content : bool (True)
813 Whether to unpack the content dict (True), or leave it packed
814 (False).
815 copy : bool (True)
816 Whether msg_list contains bytes (True) or the non-copying Message
817 objects in each place (False).
818
819 Returns
820 -------
821 msg : dict
822 The nested message dict with top-level keys [header, parent_header,
823 content, buffers]. The buffers are returned as memoryviews.
824 """
825 minlen = 5
826 message = {}
827 if not copy:
828 # pyzmq didn't copy the first parts of the message, so we'll do it
829 for i in range(minlen):
830 msg_list[i] = msg_list[i].bytes
831 if self.auth is not None:
832 signature = msg_list[0]
833 if not signature:
834 raise ValueError("Unsigned Message")
835 if signature in self.digest_history:
836 raise ValueError("Duplicate Signature: %r" % signature)
837 self._add_digest(signature)
838 check = self.sign(msg_list[1:5])
839 if not compare_digest(signature, check):
840 raise ValueError("Invalid Signature: %r" % signature)
841 if not len(msg_list) >= minlen:
842 raise TypeError("malformed message, must have at least %i elements"%minlen)
843 header = self.unpack(msg_list[1])
844 message['header'] = extract_dates(header)
845 message['msg_id'] = header['msg_id']
846 message['msg_type'] = header['msg_type']
847 message['parent_header'] = extract_dates(self.unpack(msg_list[2]))
848 message['metadata'] = self.unpack(msg_list[3])
849 if content:
850 message['content'] = self.unpack(msg_list[4])
851 else:
852 message['content'] = msg_list[4]
853 buffers = [memoryview(b) for b in msg_list[5:]]
854 if buffers and buffers[0].shape is None:
855 # force copy to workaround pyzmq #646
856 buffers = [memoryview(b.bytes) for b in msg_list[5:]]
857 message['buffers'] = buffers
858 # adapt to the current version
859 return adapt(message)
860
861 def unserialize(self, *args, **kwargs):
862 warnings.warn(
863 "Session.unserialize is deprecated. Use Session.deserialize.",
864 DeprecationWarning,
865 )
866 return self.deserialize(*args, **kwargs)
867
868
869 def test_msg2obj():
870 am = dict(x=1)
871 ao = Message(am)
872 assert ao.x == am['x']
873
874 am['y'] = dict(z=1)
875 ao = Message(am)
876 assert ao.y.z == am['y']['z']
877
878 k1, k2 = 'y', 'z'
879 assert ao[k1][k2] == am[k1][k2]
880
881 am2 = dict(ao)
882 assert am['x'] == am2['x']
883 assert am['y']['z'] == am2['y']['z']
@@ -0,0 +1,1 b''
1 from jupyter_client.threaded import *
@@ -0,0 +1,1 b''
1 from .connect import * No newline at end of file
@@ -0,0 +1,3 b''
1 if __name__ == '__main__':
2 from ipython_kernel.zmq import kernelapp as app
3 app.launch_new_instance()
This diff has been collapsed as it changes many lines, (576 lines changed) Show them Hide them
@@ -0,0 +1,576 b''
1 """Utilities for connecting to kernels
2
3 The :class:`ConnectionFileMixin` class in this module encapsulates the logic
4 related to writing and reading connections files.
5 """
6 # Copyright (c) IPython Development Team.
7 # Distributed under the terms of the Modified BSD License.
8
9 #-----------------------------------------------------------------------------
10 # Imports
11 #-----------------------------------------------------------------------------
12
13 from __future__ import absolute_import
14
15 import glob
16 import json
17 import os
18 import socket
19 import sys
20 from getpass import getpass
21 from subprocess import Popen, PIPE
22 import tempfile
23
24 import zmq
25
26 # IPython imports
27 from IPython.config import LoggingConfigurable
28 from IPython.core.profiledir import ProfileDir
29 from IPython.utils.localinterfaces import localhost
30 from IPython.utils.path import filefind, get_ipython_dir
31 from IPython.utils.py3compat import (str_to_bytes, bytes_to_str, cast_bytes_py2,
32 string_types)
33 from IPython.utils.traitlets import (
34 Bool, Integer, Unicode, CaselessStrEnum, Instance,
35 )
36
37
38 #-----------------------------------------------------------------------------
39 # Working with Connection Files
40 #-----------------------------------------------------------------------------
41
42 def write_connection_file(fname=None, shell_port=0, iopub_port=0, stdin_port=0, hb_port=0,
43 control_port=0, ip='', key=b'', transport='tcp',
44 signature_scheme='hmac-sha256',
45 ):
46 """Generates a JSON config file, including the selection of random ports.
47
48 Parameters
49 ----------
50
51 fname : unicode
52 The path to the file to write
53
54 shell_port : int, optional
55 The port to use for ROUTER (shell) channel.
56
57 iopub_port : int, optional
58 The port to use for the SUB channel.
59
60 stdin_port : int, optional
61 The port to use for the ROUTER (raw input) channel.
62
63 control_port : int, optional
64 The port to use for the ROUTER (control) channel.
65
66 hb_port : int, optional
67 The port to use for the heartbeat REP channel.
68
69 ip : str, optional
70 The ip address the kernel will bind to.
71
72 key : str, optional
73 The Session key used for message authentication.
74
75 signature_scheme : str, optional
76 The scheme used for message authentication.
77 This has the form 'digest-hash', where 'digest'
78 is the scheme used for digests, and 'hash' is the name of the hash function
79 used by the digest scheme.
80 Currently, 'hmac' is the only supported digest scheme,
81 and 'sha256' is the default hash function.
82
83 """
84 if not ip:
85 ip = localhost()
86 # default to temporary connector file
87 if not fname:
88 fd, fname = tempfile.mkstemp('.json')
89 os.close(fd)
90
91 # Find open ports as necessary.
92
93 ports = []
94 ports_needed = int(shell_port <= 0) + \
95 int(iopub_port <= 0) + \
96 int(stdin_port <= 0) + \
97 int(control_port <= 0) + \
98 int(hb_port <= 0)
99 if transport == 'tcp':
100 for i in range(ports_needed):
101 sock = socket.socket()
102 # struct.pack('ii', (0,0)) is 8 null bytes
103 sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, b'\0' * 8)
104 sock.bind(('', 0))
105 ports.append(sock)
106 for i, sock in enumerate(ports):
107 port = sock.getsockname()[1]
108 sock.close()
109 ports[i] = port
110 else:
111 N = 1
112 for i in range(ports_needed):
113 while os.path.exists("%s-%s" % (ip, str(N))):
114 N += 1
115 ports.append(N)
116 N += 1
117 if shell_port <= 0:
118 shell_port = ports.pop(0)
119 if iopub_port <= 0:
120 iopub_port = ports.pop(0)
121 if stdin_port <= 0:
122 stdin_port = ports.pop(0)
123 if control_port <= 0:
124 control_port = ports.pop(0)
125 if hb_port <= 0:
126 hb_port = ports.pop(0)
127
128 cfg = dict( shell_port=shell_port,
129 iopub_port=iopub_port,
130 stdin_port=stdin_port,
131 control_port=control_port,
132 hb_port=hb_port,
133 )
134 cfg['ip'] = ip
135 cfg['key'] = bytes_to_str(key)
136 cfg['transport'] = transport
137 cfg['signature_scheme'] = signature_scheme
138
139 with open(fname, 'w') as f:
140 f.write(json.dumps(cfg, indent=2))
141
142 return fname, cfg
143
144
145 def get_connection_file(app=None):
146 """Return the path to the connection file of an app
147
148 Parameters
149 ----------
150 app : IPKernelApp instance [optional]
151 If unspecified, the currently running app will be used
152 """
153 if app is None:
154 from jupyter_client.kernelapp import IPKernelApp
155 if not IPKernelApp.initialized():
156 raise RuntimeError("app not specified, and not in a running Kernel")
157
158 app = IPKernelApp.instance()
159 return filefind(app.connection_file, ['.', app.profile_dir.security_dir])
160
161
162 def find_connection_file(filename='kernel-*.json', profile=None):
163 """find a connection file, and return its absolute path.
164
165 The current working directory and the profile's security
166 directory will be searched for the file if it is not given by
167 absolute path.
168
169 If profile is unspecified, then the current running application's
170 profile will be used, or 'default', if not run from IPython.
171
172 If the argument does not match an existing file, it will be interpreted as a
173 fileglob, and the matching file in the profile's security dir with
174 the latest access time will be used.
175
176 Parameters
177 ----------
178 filename : str
179 The connection file or fileglob to search for.
180 profile : str [optional]
181 The name of the profile to use when searching for the connection file,
182 if different from the current IPython session or 'default'.
183
184 Returns
185 -------
186 str : The absolute path of the connection file.
187 """
188 from IPython.core.application import BaseIPythonApplication as IPApp
189 try:
190 # quick check for absolute path, before going through logic
191 return filefind(filename)
192 except IOError:
193 pass
194
195 if profile is None:
196 # profile unspecified, check if running from an IPython app
197 if IPApp.initialized():
198 app = IPApp.instance()
199 profile_dir = app.profile_dir
200 else:
201 # not running in IPython, use default profile
202 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), 'default')
203 else:
204 # find profiledir by profile name:
205 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
206 security_dir = profile_dir.security_dir
207
208 try:
209 # first, try explicit name
210 return filefind(filename, ['.', security_dir])
211 except IOError:
212 pass
213
214 # not found by full name
215
216 if '*' in filename:
217 # given as a glob already
218 pat = filename
219 else:
220 # accept any substring match
221 pat = '*%s*' % filename
222 matches = glob.glob( os.path.join(security_dir, pat) )
223 if not matches:
224 raise IOError("Could not find %r in %r" % (filename, security_dir))
225 elif len(matches) == 1:
226 return matches[0]
227 else:
228 # get most recent match, by access time:
229 return sorted(matches, key=lambda f: os.stat(f).st_atime)[-1]
230
231
232 def get_connection_info(connection_file=None, unpack=False, profile=None):
233 """Return the connection information for the current Kernel.
234
235 Parameters
236 ----------
237 connection_file : str [optional]
238 The connection file to be used. Can be given by absolute path, or
239 IPython will search in the security directory of a given profile.
240 If run from IPython,
241
242 If unspecified, the connection file for the currently running
243 IPython Kernel will be used, which is only allowed from inside a kernel.
244 unpack : bool [default: False]
245 if True, return the unpacked dict, otherwise just the string contents
246 of the file.
247 profile : str [optional]
248 The name of the profile to use when searching for the connection file,
249 if different from the current IPython session or 'default'.
250
251
252 Returns
253 -------
254 The connection dictionary of the current kernel, as string or dict,
255 depending on `unpack`.
256 """
257 if connection_file is None:
258 # get connection file from current kernel
259 cf = get_connection_file()
260 else:
261 # connection file specified, allow shortnames:
262 cf = find_connection_file(connection_file, profile=profile)
263
264 with open(cf) as f:
265 info = f.read()
266
267 if unpack:
268 info = json.loads(info)
269 # ensure key is bytes:
270 info['key'] = str_to_bytes(info.get('key', ''))
271 return info
272
273
274 def connect_qtconsole(connection_file=None, argv=None, profile=None):
275 """Connect a qtconsole to the current kernel.
276
277 This is useful for connecting a second qtconsole to a kernel, or to a
278 local notebook.
279
280 Parameters
281 ----------
282 connection_file : str [optional]
283 The connection file to be used. Can be given by absolute path, or
284 IPython will search in the security directory of a given profile.
285 If run from IPython,
286
287 If unspecified, the connection file for the currently running
288 IPython Kernel will be used, which is only allowed from inside a kernel.
289 argv : list [optional]
290 Any extra args to be passed to the console.
291 profile : str [optional]
292 The name of the profile to use when searching for the connection file,
293 if different from the current IPython session or 'default'.
294
295
296 Returns
297 -------
298 :class:`subprocess.Popen` instance running the qtconsole frontend
299 """
300 argv = [] if argv is None else argv
301
302 if connection_file is None:
303 # get connection file from current kernel
304 cf = get_connection_file()
305 else:
306 cf = find_connection_file(connection_file, profile=profile)
307
308 cmd = ';'.join([
309 "from IPython.qt.console import qtconsoleapp",
310 "qtconsoleapp.main()"
311 ])
312
313 return Popen([sys.executable, '-c', cmd, '--existing', cf] + argv,
314 stdout=PIPE, stderr=PIPE, close_fds=(sys.platform != 'win32'),
315 )
316
317
318 def tunnel_to_kernel(connection_info, sshserver, sshkey=None):
319 """tunnel connections to a kernel via ssh
320
321 This will open four SSH tunnels from localhost on this machine to the
322 ports associated with the kernel. They can be either direct
323 localhost-localhost tunnels, or if an intermediate server is necessary,
324 the kernel must be listening on a public IP.
325
326 Parameters
327 ----------
328 connection_info : dict or str (path)
329 Either a connection dict, or the path to a JSON connection file
330 sshserver : str
331 The ssh sever to use to tunnel to the kernel. Can be a full
332 `user@server:port` string. ssh config aliases are respected.
333 sshkey : str [optional]
334 Path to file containing ssh key to use for authentication.
335 Only necessary if your ssh config does not already associate
336 a keyfile with the host.
337
338 Returns
339 -------
340
341 (shell, iopub, stdin, hb) : ints
342 The four ports on localhost that have been forwarded to the kernel.
343 """
344 from zmq.ssh import tunnel
345 if isinstance(connection_info, string_types):
346 # it's a path, unpack it
347 with open(connection_info) as f:
348 connection_info = json.loads(f.read())
349
350 cf = connection_info
351
352 lports = tunnel.select_random_ports(4)
353 rports = cf['shell_port'], cf['iopub_port'], cf['stdin_port'], cf['hb_port']
354
355 remote_ip = cf['ip']
356
357 if tunnel.try_passwordless_ssh(sshserver, sshkey):
358 password=False
359 else:
360 password = getpass("SSH Password for %s: " % cast_bytes_py2(sshserver))
361
362 for lp,rp in zip(lports, rports):
363 tunnel.ssh_tunnel(lp, rp, sshserver, remote_ip, sshkey, password)
364
365 return tuple(lports)
366
367
368 #-----------------------------------------------------------------------------
369 # Mixin for classes that work with connection files
370 #-----------------------------------------------------------------------------
371
372 channel_socket_types = {
373 'hb' : zmq.REQ,
374 'shell' : zmq.DEALER,
375 'iopub' : zmq.SUB,
376 'stdin' : zmq.DEALER,
377 'control': zmq.DEALER,
378 }
379
380 port_names = [ "%s_port" % channel for channel in ('shell', 'stdin', 'iopub', 'hb', 'control')]
381
382 class ConnectionFileMixin(LoggingConfigurable):
383 """Mixin for configurable classes that work with connection files"""
384
385 # The addresses for the communication channels
386 connection_file = Unicode('', config=True,
387 help="""JSON file in which to store connection info [default: kernel-<pid>.json]
388
389 This file will contain the IP, ports, and authentication key needed to connect
390 clients to this kernel. By default, this file will be created in the security dir
391 of the current profile, but can be specified by absolute path.
392 """)
393 _connection_file_written = Bool(False)
394
395 transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True)
396
397 ip = Unicode(config=True,
398 help="""Set the kernel\'s IP address [default localhost].
399 If the IP address is something other than localhost, then
400 Consoles on other machines will be able to connect
401 to the Kernel, so be careful!"""
402 )
403
404 def _ip_default(self):
405 if self.transport == 'ipc':
406 if self.connection_file:
407 return os.path.splitext(self.connection_file)[0] + '-ipc'
408 else:
409 return 'kernel-ipc'
410 else:
411 return localhost()
412
413 def _ip_changed(self, name, old, new):
414 if new == '*':
415 self.ip = '0.0.0.0'
416
417 # protected traits
418
419 hb_port = Integer(0, config=True,
420 help="set the heartbeat port [default: random]")
421 shell_port = Integer(0, config=True,
422 help="set the shell (ROUTER) port [default: random]")
423 iopub_port = Integer(0, config=True,
424 help="set the iopub (PUB) port [default: random]")
425 stdin_port = Integer(0, config=True,
426 help="set the stdin (ROUTER) port [default: random]")
427 control_port = Integer(0, config=True,
428 help="set the control (ROUTER) port [default: random]")
429
430 @property
431 def ports(self):
432 return [ getattr(self, name) for name in port_names ]
433
434 # The Session to use for communication with the kernel.
435 session = Instance('jupyter_client.session.Session')
436 def _session_default(self):
437 from jupyter_client.session import Session
438 return Session(parent=self)
439
440 #--------------------------------------------------------------------------
441 # Connection and ipc file management
442 #--------------------------------------------------------------------------
443
444 def get_connection_info(self):
445 """return the connection info as a dict"""
446 return dict(
447 transport=self.transport,
448 ip=self.ip,
449 shell_port=self.shell_port,
450 iopub_port=self.iopub_port,
451 stdin_port=self.stdin_port,
452 hb_port=self.hb_port,
453 control_port=self.control_port,
454 signature_scheme=self.session.signature_scheme,
455 key=self.session.key,
456 )
457
458 def cleanup_connection_file(self):
459 """Cleanup connection file *if we wrote it*
460
461 Will not raise if the connection file was already removed somehow.
462 """
463 if self._connection_file_written:
464 # cleanup connection files on full shutdown of kernel we started
465 self._connection_file_written = False
466 try:
467 os.remove(self.connection_file)
468 except (IOError, OSError, AttributeError):
469 pass
470
471 def cleanup_ipc_files(self):
472 """Cleanup ipc files if we wrote them."""
473 if self.transport != 'ipc':
474 return
475 for port in self.ports:
476 ipcfile = "%s-%i" % (self.ip, port)
477 try:
478 os.remove(ipcfile)
479 except (IOError, OSError):
480 pass
481
482 def write_connection_file(self):
483 """Write connection info to JSON dict in self.connection_file."""
484 if self._connection_file_written and os.path.exists(self.connection_file):
485 return
486
487 self.connection_file, cfg = write_connection_file(self.connection_file,
488 transport=self.transport, ip=self.ip, key=self.session.key,
489 stdin_port=self.stdin_port, iopub_port=self.iopub_port,
490 shell_port=self.shell_port, hb_port=self.hb_port,
491 control_port=self.control_port,
492 signature_scheme=self.session.signature_scheme,
493 )
494 # write_connection_file also sets default ports:
495 for name in port_names:
496 setattr(self, name, cfg[name])
497
498 self._connection_file_written = True
499
500 def load_connection_file(self):
501 """Load connection info from JSON dict in self.connection_file."""
502 self.log.debug(u"Loading connection file %s", self.connection_file)
503 with open(self.connection_file) as f:
504 cfg = json.load(f)
505 self.transport = cfg.get('transport', self.transport)
506 self.ip = cfg.get('ip', self._ip_default())
507
508 for name in port_names:
509 if getattr(self, name) == 0 and name in cfg:
510 # not overridden by config or cl_args
511 setattr(self, name, cfg[name])
512
513 if 'key' in cfg:
514 self.session.key = str_to_bytes(cfg['key'])
515 if 'signature_scheme' in cfg:
516 self.session.signature_scheme = cfg['signature_scheme']
517
518 #--------------------------------------------------------------------------
519 # Creating connected sockets
520 #--------------------------------------------------------------------------
521
522 def _make_url(self, channel):
523 """Make a ZeroMQ URL for a given channel."""
524 transport = self.transport
525 ip = self.ip
526 port = getattr(self, '%s_port' % channel)
527
528 if transport == 'tcp':
529 return "tcp://%s:%i" % (ip, port)
530 else:
531 return "%s://%s-%s" % (transport, ip, port)
532
533 def _create_connected_socket(self, channel, identity=None):
534 """Create a zmq Socket and connect it to the kernel."""
535 url = self._make_url(channel)
536 socket_type = channel_socket_types[channel]
537 self.log.debug("Connecting to: %s" % url)
538 sock = self.context.socket(socket_type)
539 # set linger to 1s to prevent hangs at exit
540 sock.linger = 1000
541 if identity:
542 sock.identity = identity
543 sock.connect(url)
544 return sock
545
546 def connect_iopub(self, identity=None):
547 """return zmq Socket connected to the IOPub channel"""
548 sock = self._create_connected_socket('iopub', identity=identity)
549 sock.setsockopt(zmq.SUBSCRIBE, b'')
550 return sock
551
552 def connect_shell(self, identity=None):
553 """return zmq Socket connected to the Shell channel"""
554 return self._create_connected_socket('shell', identity=identity)
555
556 def connect_stdin(self, identity=None):
557 """return zmq Socket connected to the StdIn channel"""
558 return self._create_connected_socket('stdin', identity=identity)
559
560 def connect_hb(self, identity=None):
561 """return zmq Socket connected to the Heartbeat channel"""
562 return self._create_connected_socket('hb', identity=identity)
563
564 def connect_control(self, identity=None):
565 """return zmq Socket connected to the Control channel"""
566 return self._create_connected_socket('control', identity=identity)
567
568
569 __all__ = [
570 'write_connection_file',
571 'get_connection_file',
572 'find_connection_file',
573 'get_connection_info',
574 'connect_qtconsole',
575 'tunnel_to_kernel',
576 ]
@@ -0,0 +1,1 b''
1 from jupyter_client.session import *
@@ -1,3 +1,3 b''
1 1 if __name__ == '__main__':
2 from IPython.kernel.zmq import kernelapp as app
2 from ipython_kernel.zmq import kernelapp as app
3 3 app.launch_new_instance()
1 NO CONTENT: file renamed from IPython/kernel/comm/__init__.py to ipython_kernel/comm/__init__.py
@@ -9,7 +9,7 b' import uuid'
9 9 from zmq.eventloop.ioloop import IOLoop
10 10
11 11 from IPython.config import LoggingConfigurable
12 from IPython.kernel.zmq.kernelbase import Kernel
12 from ipython_kernel.zmq.kernelbase import Kernel
13 13
14 14 from IPython.utils.jsonutil import json_clean
15 15 from IPython.utils.traitlets import Instance, Unicode, Bytes, Bool, Dict, Any
@@ -20,40 +20,40 b' class Comm(LoggingConfigurable):'
20 20 # If this is instantiated by a non-IPython kernel, shell will be None
21 21 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
22 22 allow_none=True)
23 kernel = Instance('IPython.kernel.zmq.kernelbase.Kernel')
23 kernel = Instance('ipython_kernel.zmq.kernelbase.Kernel')
24 24 def _kernel_default(self):
25 25 if Kernel.initialized():
26 26 return Kernel.instance()
27
27
28 28 iopub_socket = Any()
29 29 def _iopub_socket_default(self):
30 30 return self.kernel.iopub_socket
31 session = Instance('IPython.kernel.zmq.session.Session')
31 session = Instance('ipython_kernel.zmq.session.Session')
32 32 def _session_default(self):
33 33 if self.kernel is not None:
34 34 return self.kernel.session
35
35
36 36 target_name = Unicode('comm')
37 37 target_module = Unicode(None, allow_none=True, help="""requirejs module from
38 38 which to load comm target.""")
39
39
40 40 topic = Bytes()
41 41 def _topic_default(self):
42 42 return ('comm-%s' % self.comm_id).encode('ascii')
43
43
44 44 _open_data = Dict(help="data dict, if any, to be included in comm_open")
45 45 _close_data = Dict(help="data dict, if any, to be included in comm_close")
46
46
47 47 _msg_callback = Any()
48 48 _close_callback = Any()
49
49
50 50 _closed = Bool(True)
51 51 comm_id = Unicode()
52 52 def _comm_id_default(self):
53 53 return uuid.uuid4().hex
54
54
55 55 primary = Bool(True, help="Am I the primary or secondary Comm?")
56
56
57 57 def __init__(self, target_name='', data=None, **kwargs):
58 58 if target_name:
59 59 kwargs['target_name'] = target_name
@@ -63,7 +63,7 b' class Comm(LoggingConfigurable):'
63 63 self.open(data)
64 64 else:
65 65 self._closed = False
66
66
67 67 def _publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys):
68 68 """Helper for sending a comm message on IOPub"""
69 69 if threading.current_thread().name != 'MainThread' and IOLoop.initialized():
@@ -80,13 +80,13 b' class Comm(LoggingConfigurable):'
80 80 ident=self.topic,
81 81 buffers=buffers,
82 82 )
83
83
84 84 def __del__(self):
85 85 """trigger close on gc"""
86 86 self.close()
87
87
88 88 # publishing messages
89
89
90 90 def open(self, data=None, metadata=None, buffers=None):
91 91 """Open the frontend-side version of this comm"""
92 92 if data is None:
@@ -107,7 +107,7 b' class Comm(LoggingConfigurable):'
107 107 except:
108 108 comm_manager.unregister_comm(self)
109 109 raise
110
110
111 111 def close(self, data=None, metadata=None, buffers=None):
112 112 """Close the frontend-side version of this comm"""
113 113 if self._closed:
@@ -120,41 +120,41 b' class Comm(LoggingConfigurable):'
120 120 data=data, metadata=metadata, buffers=buffers,
121 121 )
122 122 self.kernel.comm_manager.unregister_comm(self)
123
123
124 124 def send(self, data=None, metadata=None, buffers=None):
125 125 """Send a message to the frontend-side version of this comm"""
126 126 self._publish_msg('comm_msg',
127 127 data=data, metadata=metadata, buffers=buffers,
128 128 )
129
129
130 130 # registering callbacks
131
131
132 132 def on_close(self, callback):
133 133 """Register a callback for comm_close
134
134
135 135 Will be called with the `data` of the close message.
136
136
137 137 Call `on_close(None)` to disable an existing callback.
138 138 """
139 139 self._close_callback = callback
140
140
141 141 def on_msg(self, callback):
142 142 """Register a callback for comm_msg
143
143
144 144 Will be called with the `data` of any comm_msg messages.
145
145
146 146 Call `on_msg(None)` to disable an existing callback.
147 147 """
148 148 self._msg_callback = callback
149
149
150 150 # handling of incoming messages
151
151
152 152 def handle_close(self, msg):
153 153 """Handle a comm_close message"""
154 154 self.log.debug("handle_close[%s](%s)", self.comm_id, msg)
155 155 if self._close_callback:
156 156 self._close_callback(msg)
157
157
158 158 def handle_msg(self, msg):
159 159 """Handle a comm_msg message"""
160 160 self.log.debug("handle_msg[%s](%s)", self.comm_id, msg)
@@ -18,7 +18,7 b' from .comm import Comm'
18 18
19 19 def lazy_keys(dikt):
20 20 """Return lazy-evaluated string representation of a dictionary's keys
21
21
22 22 Key list is only constructed if it will actually be used.
23 23 Used for debug-logging.
24 24 """
@@ -27,43 +27,43 b' def lazy_keys(dikt):'
27 27
28 28 class CommManager(LoggingConfigurable):
29 29 """Manager for Comms in the Kernel"""
30
30
31 31 # If this is instantiated by a non-IPython kernel, shell will be None
32 32 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
33 33 allow_none=True)
34 kernel = Instance('IPython.kernel.zmq.kernelbase.Kernel')
34 kernel = Instance('ipython_kernel.zmq.kernelbase.Kernel')
35 35
36 36 iopub_socket = Any()
37 37 def _iopub_socket_default(self):
38 38 return self.kernel.iopub_socket
39 session = Instance('IPython.kernel.zmq.session.Session')
39 session = Instance('ipython_kernel.zmq.session.Session')
40 40 def _session_default(self):
41 41 return self.kernel.session
42
42
43 43 comms = Dict()
44 44 targets = Dict()
45
45
46 46 # Public APIs
47
47
48 48 def register_target(self, target_name, f):
49 49 """Register a callable f for a given target name
50
50
51 51 f will be called with two arguments when a comm_open message is received with `target`:
52
52
53 53 - the Comm instance
54 54 - the `comm_open` message itself.
55
55
56 56 f can be a Python callable or an import string for one.
57 57 """
58 58 if isinstance(f, string_types):
59 59 f = import_item(f)
60
60
61 61 self.targets[target_name] = f
62
62
63 63 def unregister_target(self, target_name, f):
64 64 """Unregister a callable registered with register_target"""
65 65 return self.targets.pop(target_name);
66
66
67 67 def register_comm(self, comm):
68 68 """Register a new comm"""
69 69 comm_id = comm.comm_id
@@ -72,17 +72,17 b' class CommManager(LoggingConfigurable):'
72 72 comm.iopub_socket = self.iopub_socket
73 73 self.comms[comm_id] = comm
74 74 return comm_id
75
75
76 76 def unregister_comm(self, comm):
77 77 """Unregister a comm, and close its counterpart"""
78 78 # unlike get_comm, this should raise a KeyError
79 79 comm = self.comms.pop(comm.comm_id)
80
80
81 81 def get_comm(self, comm_id):
82 82 """Get a comm with a particular id
83
83
84 84 Returns the comm if found, otherwise None.
85
85
86 86 This will not raise an error,
87 87 it will log messages if the comm cannot be found.
88 88 """
@@ -93,7 +93,7 b' class CommManager(LoggingConfigurable):'
93 93 # call, because we store weakrefs
94 94 comm = self.comms[comm_id]
95 95 return comm
96
96
97 97 # Message handlers
98 98 def comm_open(self, stream, ident, msg):
99 99 """Handler for comm_open messages"""
@@ -116,14 +116,14 b' class CommManager(LoggingConfigurable):'
116 116 return
117 117 except Exception:
118 118 self.log.error("Exception opening comm with target: %s", target_name, exc_info=True)
119
119
120 120 # Failure.
121 121 try:
122 122 comm.close()
123 123 except:
124 self.log.error("""Could not close comm during `comm_open` failure
124 self.log.error("""Could not close comm during `comm_open` failure
125 125 clean-up. The comm may not have been opened yet.""", exc_info=True)
126
126
127 127 def comm_msg(self, stream, ident, msg):
128 128 """Handler for comm_msg messages"""
129 129 content = msg['content']
@@ -136,7 +136,7 b' class CommManager(LoggingConfigurable):'
136 136 comm.handle_msg(msg)
137 137 except Exception:
138 138 self.log.error("Exception in comm_msg for %s", comm_id, exc_info=True)
139
139
140 140 def comm_close(self, stream, ident, msg):
141 141 """Handler for comm_close messages"""
142 142 content = msg['content']
@@ -147,7 +147,7 b' class CommManager(LoggingConfigurable):'
147 147 self.log.debug("No such comm to close: %s", comm_id)
148 148 return
149 149 del self.comms[comm_id]
150
150
151 151 try:
152 152 comm.handle_close(msg)
153 153 except Exception:
1 NO CONTENT: file renamed from IPython/kernel/inprocess/__init__.py to ipython_kernel/inprocess/__init__.py
@@ -17,7 +17,6 b' except ImportError:'
17 17 # IPython imports
18 18 from IPython.utils.io import raw_print
19 19 from IPython.utils.traitlets import Type
20 #from IPython.kernel.blocking.channels import BlockingChannelMixin
21 20
22 21 # Local imports
23 22 from .channels import (
@@ -3,7 +3,7 b''
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 from IPython.kernel.channelsabc import HBChannelABC
6 from jupyter_client.channelsabc import HBChannelABC
7 7
8 8 from .socket import DummySocket
9 9
@@ -12,10 +12,10 b''
12 12 #-----------------------------------------------------------------------------
13 13
14 14 # IPython imports
15 from IPython.kernel.inprocess.socket import DummySocket
15 from ipython_kernel.inprocess.socket import DummySocket
16 16 from IPython.utils.traitlets import Type, Instance
17 from IPython.kernel.clientabc import KernelClientABC
18 from IPython.kernel.client import KernelClient
17 from jupyter_client.clientabc import KernelClientABC
18 from jupyter_client.client import KernelClient
19 19
20 20 # Local imports
21 21 from .channels import (
@@ -32,10 +32,10 b' class InProcessKernelClient(KernelClient):'
32 32 """A client for an in-process kernel.
33 33
34 34 This class implements the interface of
35 `IPython.kernel.clientabc.KernelClientABC` and allows
35 `jupyter_client.clientabc.KernelClientABC` and allows
36 36 (asynchronous) frontends to be used seamlessly with an in-process kernel.
37 37
38 See `IPython.kernel.client.KernelClient` for docstrings.
38 See `jupyter_client.client.KernelClient` for docstrings.
39 39 """
40 40
41 41 # The classes to use for the various channels.
@@ -44,7 +44,7 b' class InProcessKernelClient(KernelClient):'
44 44 stdin_channel_class = Type(InProcessChannel)
45 45 hb_channel_class = Type(InProcessHBChannel)
46 46
47 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel',
47 kernel = Instance('ipython_kernel.inprocess.ipkernel.InProcessKernel',
48 48 allow_none=True)
49 49
50 50 #--------------------------------------------------------------------------
@@ -10,8 +10,8 b' import sys'
10 10 from IPython.core.interactiveshell import InteractiveShellABC
11 11 from IPython.utils.jsonutil import json_clean
12 12 from IPython.utils.traitlets import Any, Enum, Instance, List, Type
13 from IPython.kernel.zmq.ipkernel import IPythonKernel
14 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
13 from ipython_kernel.zmq.ipkernel import IPythonKernel
14 from ipython_kernel.zmq.zmqshell import ZMQInteractiveShell
15 15
16 16 from .socket import DummySocket
17 17
@@ -27,7 +27,7 b' class InProcessKernel(IPythonKernel):'
27 27
28 28 # The frontends connected to this kernel.
29 29 frontends = List(
30 Instance('IPython.kernel.inprocess.client.InProcessKernelClient',
30 Instance('ipython_kernel.inprocess.client.InProcessKernelClient',
31 31 allow_none=True)
32 32 )
33 33
@@ -114,25 +114,25 b' class InProcessKernel(IPythonKernel):'
114 114 ident, msg = self.session.recv(self.iopub_socket, copy=False)
115 115 for frontend in self.frontends:
116 116 frontend.iopub_channel.call_handlers(msg)
117
117
118 118 #------ Trait initializers -----------------------------------------------
119 119
120 120 def _log_default(self):
121 121 return logging.getLogger(__name__)
122 122
123 123 def _session_default(self):
124 from IPython.kernel.zmq.session import Session
124 from ipython_kernel.zmq.session import Session
125 125 return Session(parent=self, key=b'')
126 126
127 127 def _shell_class_default(self):
128 128 return InProcessInteractiveShell
129 129
130 130 def _stdout_default(self):
131 from IPython.kernel.zmq.iostream import OutStream
131 from ipython_kernel.zmq.iostream import OutStream
132 132 return OutStream(self.session, self.iopub_socket, u'stdout', pipe=False)
133 133
134 134 def _stderr_default(self):
135 from IPython.kernel.zmq.iostream import OutStream
135 from ipython_kernel.zmq.iostream import OutStream
136 136 return OutStream(self.session, self.iopub_socket, u'stderr', pipe=False)
137 137
138 138 #-----------------------------------------------------------------------------
@@ -141,7 +141,7 b' class InProcessKernel(IPythonKernel):'
141 141
142 142 class InProcessInteractiveShell(ZMQInteractiveShell):
143 143
144 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel',
144 kernel = Instance('ipython_kernel.inprocess.ipkernel.InProcessKernel',
145 145 allow_none=True)
146 146
147 147 #-------------------------------------------------------------------------
@@ -150,7 +150,7 b' class InProcessInteractiveShell(ZMQInteractiveShell):'
150 150
151 151 def enable_gui(self, gui=None):
152 152 """Enable GUI integration for the kernel."""
153 from IPython.kernel.zmq.eventloops import enable_gui
153 from ipython_kernel.zmq.eventloops import enable_gui
154 154 if not gui:
155 155 gui = self.kernel.gui
156 156 return enable_gui(gui, kernel=self.kernel)
@@ -4,37 +4,37 b''
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 from IPython.utils.traitlets import Instance, DottedObjectName
7 from IPython.kernel.managerabc import KernelManagerABC
8 from IPython.kernel.manager import KernelManager
9 from IPython.kernel.zmq.session import Session
7 from jupyter_client.managerabc import KernelManagerABC
8 from jupyter_client.manager import KernelManager
9 from jupyter_client.session import Session
10 10
11 11
12 12 class InProcessKernelManager(KernelManager):
13 13 """A manager for an in-process kernel.
14 14
15 15 This class implements the interface of
16 `IPython.kernel.kernelmanagerabc.KernelManagerABC` and allows
16 `jupyter_client.kernelmanagerabc.KernelManagerABC` and allows
17 17 (asynchronous) frontends to be used seamlessly with an in-process kernel.
18 18
19 See `IPython.kernel.kernelmanager.KernelManager` for docstrings.
19 See `jupyter_client.kernelmanager.KernelManager` for docstrings.
20 20 """
21 21
22 22 # The kernel process with which the KernelManager is communicating.
23 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel',
23 kernel = Instance('ipython_kernel.inprocess.ipkernel.InProcessKernel',
24 24 allow_none=True)
25 25 # the client class for KM.client() shortcut
26 client_class = DottedObjectName('IPython.kernel.inprocess.BlockingInProcessKernelClient')
27
26 client_class = DottedObjectName('ipython_kernel.inprocess.BlockingInProcessKernelClient')
27
28 28 def _session_default(self):
29 29 # don't sign in-process messages
30 30 return Session(key=b'', parent=self)
31
31
32 32 #--------------------------------------------------------------------------
33 33 # Kernel management methods
34 34 #--------------------------------------------------------------------------
35 35
36 36 def start_kernel(self, **kwds):
37 from IPython.kernel.inprocess.ipkernel import InProcessKernel
37 from ipython_kernel.inprocess.ipkernel import InProcessKernel
38 38 self.kernel = InProcessKernel(parent=self, session=self.session)
39 39
40 40 def shutdown_kernel(self):
@@ -46,7 +46,7 b' SocketABC.register(zmq.Socket)'
46 46
47 47 class DummySocket(HasTraits):
48 48 """ A dummy socket implementing (part of) the zmq.Socket interface. """
49
49
50 50 queue = Instance(Queue, ())
51 51 message_sent = Int(0) # Should be an Event
52 52
1 NO CONTENT: file renamed from IPython/kernel/inprocess/tests/__init__.py to ipython_kernel/inprocess/tests/__init__.py
@@ -6,10 +6,10 b' from __future__ import print_function'
6 6 import sys
7 7 import unittest
8 8
9 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
10 from IPython.kernel.inprocess.manager import InProcessKernelManager
11 from IPython.kernel.inprocess.ipkernel import InProcessKernel
12 from IPython.kernel.tests.utils import assemble_output
9 from ipython_kernel.inprocess.blocking import BlockingInProcessKernelClient
10 from ipython_kernel.inprocess.manager import InProcessKernelManager
11 from ipython_kernel.inprocess.ipkernel import InProcessKernel
12 from ipython_kernel.tests.utils import assemble_output
13 13 from IPython.testing.decorators import skipif_not_matplotlib
14 14 from IPython.utils.io import capture_output
15 15 from IPython.utils import py3compat
@@ -66,4 +66,3 b' class InProcessKernelTestCase(unittest.TestCase):'
66 66 kc.execute('print("bar")')
67 67 out, err = assemble_output(kc.iopub_channel)
68 68 self.assertEqual(out, 'bar\n')
69
@@ -5,8 +5,8 b' from __future__ import print_function'
5 5
6 6 import unittest
7 7
8 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
9 from IPython.kernel.inprocess.manager import InProcessKernelManager
8 from ipython_kernel.inprocess.blocking import BlockingInProcessKernelClient
9 from ipython_kernel.inprocess.manager import InProcessKernelManager
10 10
11 11 #-----------------------------------------------------------------------------
12 12 # Test case
1 NO CONTENT: file renamed from IPython/kernel/resources/logo-32x32.png to ipython_kernel/resources/logo-32x32.png
1 NO CONTENT: file renamed from IPython/kernel/resources/logo-64x64.png to ipython_kernel/resources/logo-64x64.png
1 NO CONTENT: file renamed from IPython/kernel/tests/__init__.py to ipython_kernel/tests/__init__.py
@@ -49,7 +49,7 b' def test_sys_path():'
49 49
50 50 def test_sys_path_profile_dir():
51 51 """test that sys.path doesn't get messed up when `--profile-dir` is specified"""
52
52
53 53 with new_kernel(['--profile-dir', locate_profile('default')]) as kc:
54 54 msg_id, content = execute(kc=kc, code="import sys; print (repr(sys.path[0]))")
55 55 stdout, stderr = assemble_output(kc.iopub_channel)
@@ -60,7 +60,7 b' def test_subprocess_print():'
60 60 """printing from forked mp.Process"""
61 61 with new_kernel() as kc:
62 62 iopub = kc.iopub_channel
63
63
64 64 _check_mp_mode(kc, expected=False)
65 65 flush_channels(kc)
66 66 np = 5
@@ -71,11 +71,11 b' def test_subprocess_print():'
71 71 "for p in pool: p.start()",
72 72 "for p in pool: p.join()"
73 73 ])
74
74
75 75 expected = '\n'.join([
76 76 "hello %s" % i for i in range(np)
77 77 ]) + '\n'
78
78
79 79 msg_id, content = execute(kc=kc, code=code)
80 80 stdout, stderr = assemble_output(iopub)
81 81 nt.assert_equal(stdout.count("hello"), np, stdout)
@@ -90,7 +90,7 b' def test_subprocess_noprint():'
90 90 """mp.Process without print doesn't trigger iostream mp_mode"""
91 91 with kernel() as kc:
92 92 iopub = kc.iopub_channel
93
93
94 94 np = 5
95 95 code = '\n'.join([
96 96 "import multiprocessing as mp",
@@ -98,7 +98,7 b' def test_subprocess_noprint():'
98 98 "for p in pool: p.start()",
99 99 "for p in pool: p.join()"
100 100 ])
101
101
102 102 msg_id, content = execute(kc=kc, code=code)
103 103 stdout, stderr = assemble_output(iopub)
104 104 nt.assert_equal(stdout, '')
@@ -113,14 +113,14 b' def test_subprocess_error():'
113 113 """error in mp.Process doesn't crash"""
114 114 with new_kernel() as kc:
115 115 iopub = kc.iopub_channel
116
116
117 117 code = '\n'.join([
118 118 "import multiprocessing as mp",
119 119 "p = mp.Process(target=int, args=('hi',))",
120 120 "p.start()",
121 121 "p.join()",
122 122 ])
123
123
124 124 msg_id, content = execute(kc=kc, code=code)
125 125 stdout, stderr = assemble_output(iopub)
126 126 nt.assert_equal(stdout, '')
@@ -135,7 +135,7 b' def test_raw_input():'
135 135 """test [raw_]input"""
136 136 with kernel() as kc:
137 137 iopub = kc.iopub_channel
138
138
139 139 input_f = "input" if py3compat.PY3 else "raw_input"
140 140 theprompt = "prompt> "
141 141 code = 'print({input_f}("{theprompt}"))'.format(**locals())
@@ -157,7 +157,7 b' def test_eval_input():'
157 157 """test input() on Python 2"""
158 158 with kernel() as kc:
159 159 iopub = kc.iopub_channel
160
160
161 161 input_f = "input" if py3compat.PY3 else "raw_input"
162 162 theprompt = "prompt> "
163 163 code = 'print(input("{theprompt}"))'.format(**locals())
@@ -205,7 +205,7 b' def test_is_complete():'
205 205 kc.is_complete('raise = 2')
206 206 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
207 207 assert reply['content']['status'] == 'invalid'
208
208
209 209 kc.is_complete('a = [1,\n2,')
210 210 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
211 211 assert reply['content']['status'] == 'incomplete'
@@ -64,7 +64,7 b' class Version(Unicode):'
64 64 self.max = kwargs.pop('max', None)
65 65 kwargs['default_value'] = self.min
66 66 super(Version, self).__init__(*args, **kwargs)
67
67
68 68 def validate(self, obj, value):
69 69 if self.min and V(value) < V(self.min):
70 70 raise TraitError("bad version: %s < %s" % (value, self.min))
@@ -78,7 +78,7 b' class RMessage(Reference):'
78 78 header = Dict()
79 79 parent_header = Dict()
80 80 content = Dict()
81
81
82 82 def check(self, d):
83 83 super(RMessage, self).check(d)
84 84 RHeader().check(self.header)
@@ -107,7 +107,7 b' class MimeBundle(Reference):'
107 107 class ExecuteReply(Reference):
108 108 execution_count = Integer()
109 109 status = Enum((u'ok', u'error'), default_value=u'ok')
110
110
111 111 def check(self, d):
112 112 Reference.check(self, d)
113 113 if d['status'] == 'ok':
@@ -158,7 +158,7 b' class KernelInfoReply(Reference):'
158 158 implementation_version = Version(min='2.1')
159 159 language_info = Dict()
160 160 banner = Unicode()
161
161
162 162 def check(self, d):
163 163 Reference.check(self, d)
164 164 LanguageInfo().check(d['language_info'])
@@ -166,7 +166,7 b' class KernelInfoReply(Reference):'
166 166
167 167 class IsCompleteReply(Reference):
168 168 status = Enum((u'complete', u'incomplete', u'invalid', u'unknown'), default_value=u'complete')
169
169
170 170 def check(self, d):
171 171 Reference.check(self, d)
172 172 if d['status'] == 'incomplete':
@@ -224,10 +224,10 b' Specifications of `content` part of the reply messages.'
224 224
225 225 def validate_message(msg, msg_type=None, parent=None):
226 226 """validate a message
227
227
228 228 This is a generator, and must be iterated through to actually
229 229 trigger each test.
230
230
231 231 If msg_type and/or parent are given, the msg_type and/or parent msg_id
232 232 are compared with the given values.
233 233 """
@@ -249,7 +249,7 b' def validate_message(msg, msg_type=None, parent=None):'
249 249
250 250 def test_execute():
251 251 flush_channels()
252
252
253 253 msg_id = KC.execute(code='x=1')
254 254 reply = KC.get_shell_msg(timeout=TIMEOUT)
255 255 validate_message(reply, 'execute_reply', msg_id)
@@ -258,7 +258,7 b' def test_execute():'
258 258 def test_execute_silent():
259 259 flush_channels()
260 260 msg_id, reply = execute(code='x=1', silent=True)
261
261
262 262 # flush status=idle
263 263 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
264 264 validate_message(status, 'status', msg_id)
@@ -266,14 +266,14 b' def test_execute_silent():'
266 266
267 267 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
268 268 count = reply['execution_count']
269
269
270 270 msg_id, reply = execute(code='x=2', silent=True)
271
271
272 272 # flush status=idle
273 273 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
274 274 validate_message(status, 'status', msg_id)
275 275 nt.assert_equal(status['content']['execution_state'], 'idle')
276
276
277 277 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
278 278 count_2 = reply['execution_count']
279 279 nt.assert_equal(count_2, count)
@@ -281,11 +281,11 b' def test_execute_silent():'
281 281
282 282 def test_execute_error():
283 283 flush_channels()
284
284
285 285 msg_id, reply = execute(code='1/0')
286 286 nt.assert_equal(reply['status'], 'error')
287 287 nt.assert_equal(reply['ename'], 'ZeroDivisionError')
288
288
289 289 error = KC.iopub_channel.get_msg(timeout=TIMEOUT)
290 290 validate_message(error, 'error', msg_id)
291 291
@@ -296,9 +296,9 b' def test_execute_inc():'
296 296
297 297 msg_id, reply = execute(code='x=1')
298 298 count = reply['execution_count']
299
299
300 300 flush_channels()
301
301
302 302 msg_id, reply = execute(code='x=2')
303 303 count_2 = reply['execution_count']
304 304 nt.assert_equal(count_2, count+1)
@@ -306,7 +306,7 b' def test_execute_inc():'
306 306 def test_execute_stop_on_error():
307 307 """execute request should not abort execution queue with stop_on_error False"""
308 308 flush_channels()
309
309
310 310 fail = '\n'.join([
311 311 # sleep to ensure subsequent message is waiting in the queue to be aborted
312 312 'import time',
@@ -362,7 +362,7 b' def test_oinfo_found():'
362 362 flush_channels()
363 363
364 364 msg_id, reply = execute(code='a=5')
365
365
366 366 msg_id = KC.inspect('a')
367 367 reply = KC.get_shell_msg(timeout=TIMEOUT)
368 368 validate_message(reply, 'inspect_reply', msg_id)
@@ -377,7 +377,7 b' def test_oinfo_detail():'
377 377 flush_channels()
378 378
379 379 msg_id, reply = execute(code='ip=get_ipython()')
380
380
381 381 msg_id = KC.inspect('ip.object_inspect', cursor_pos=10, detail_level=1)
382 382 reply = KC.get_shell_msg(timeout=TIMEOUT)
383 383 validate_message(reply, 'inspect_reply', msg_id)
@@ -402,7 +402,7 b' def test_complete():'
402 402 flush_channels()
403 403
404 404 msg_id, reply = execute(code="alpha = albert = 5")
405
405
406 406 msg_id = KC.complete('al', 2)
407 407 reply = KC.get_shell_msg(timeout=TIMEOUT)
408 408 validate_message(reply, 'complete_reply', msg_id)
@@ -436,10 +436,10 b' def test_is_complete():'
436 436
437 437 def test_history_range():
438 438 flush_channels()
439
439
440 440 msg_id_exec = KC.execute(code='x=1', store_history = True)
441 441 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
442
442
443 443 msg_id = KC.history(hist_access_type = 'range', raw = True, output = True, start = 1, stop = 2, session = 0)
444 444 reply = KC.get_shell_msg(timeout=TIMEOUT)
445 445 validate_message(reply, 'history_reply', msg_id)
@@ -448,10 +448,10 b' def test_history_range():'
448 448
449 449 def test_history_tail():
450 450 flush_channels()
451
451
452 452 msg_id_exec = KC.execute(code='x=1', store_history = True)
453 453 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
454
454
455 455 msg_id = KC.history(hist_access_type = 'tail', raw = True, output = True, n = 1, session = 0)
456 456 reply = KC.get_shell_msg(timeout=TIMEOUT)
457 457 validate_message(reply, 'history_reply', msg_id)
@@ -460,10 +460,10 b' def test_history_tail():'
460 460
461 461 def test_history_search():
462 462 flush_channels()
463
463
464 464 msg_id_exec = KC.execute(code='x=1', store_history = True)
465 465 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
466
466
467 467 msg_id = KC.history(hist_access_type = 'search', raw = True, output = True, n = 1, pattern = '*', session = 0)
468 468 reply = KC.get_shell_msg(timeout=TIMEOUT)
469 469 validate_message(reply, 'history_reply', msg_id)
@@ -488,9 +488,8 b' def test_display_data():'
488 488 flush_channels()
489 489
490 490 msg_id, reply = execute("from IPython.core.display import display; display(1)")
491
491
492 492 display = KC.iopub_channel.get_msg(timeout=TIMEOUT)
493 493 validate_message(display, 'display_data', parent=msg_id)
494 494 data = display['content']['data']
495 495 nt.assert_equal(data['text/plain'], u'1')
496
@@ -4,6 +4,7 b''
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 import atexit
7 import os
7 8
8 9 from contextlib import contextmanager
9 10 from subprocess import PIPE, STDOUT
@@ -15,7 +16,7 b' except ImportError:'
15 16 import nose
16 17 import nose.tools as nt
17 18
18 from IPython.kernel import manager
19 from jupyter_client import manager
19 20
20 21 #-------------------------------------------------------------------------------
21 22 # Globals
@@ -32,7 +33,7 b' KC = None'
32 33 #-------------------------------------------------------------------------------
33 34 def start_new_kernel(**kwargs):
34 35 """start a new kernel, and return its Manager and Client
35
36
36 37 Integrates with our output capturing for tests.
37 38 """
38 39 kwargs.update(dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT))
@@ -41,7 +42,7 b' def start_new_kernel(**kwargs):'
41 42 def flush_channels(kc=None):
42 43 """flush any messages waiting on the queue"""
43 44 from .test_message_spec import validate_message
44
45
45 46 if kc is None:
46 47 kc = KC
47 48 for channel in (kc.shell_channel, kc.iopub_channel):
@@ -65,12 +66,12 b" def execute(code='', kc=None, **kwargs):"
65 66 busy = kc.get_iopub_msg(timeout=TIMEOUT)
66 67 validate_message(busy, 'status', msg_id)
67 68 nt.assert_equal(busy['content']['execution_state'], 'busy')
68
69
69 70 if not kwargs.get('silent'):
70 71 execute_input = kc.get_iopub_msg(timeout=TIMEOUT)
71 72 validate_message(execute_input, 'execute_input', msg_id)
72 73 nt.assert_equal(execute_input['content']['code'], code)
73
74
74 75 return msg_id, reply['content']
75 76
76 77 def start_global_kernel():
@@ -86,9 +87,9 b' def start_global_kernel():'
86 87 @contextmanager
87 88 def kernel():
88 89 """Context manager for the global kernel instance
89
90
90 91 Should be used for most kernel tests
91
92
92 93 Returns
93 94 -------
94 95 kernel_client: connected KernelClient instance
@@ -116,14 +117,15 b' def stop_global_kernel():'
116 117
117 118 def new_kernel(argv=None):
118 119 """Context manager for a new kernel in a subprocess
119
120
120 121 Should only be used for tests where the kernel must not be re-used.
121
122
122 123 Returns
123 124 -------
124 125 kernel_client: connected KernelClient instance
125 126 """
126 kwargs = dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT,
127 kwargs = dict(
128 stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT,
127 129 startup_timeout=STARTUP_TIMEOUT)
128 130 if argv is not None:
129 131 kwargs['extra_arguments'] = argv
@@ -5,7 +5,7 b''
5 5
6 6 from IPython.utils.zmqrelated import check_for_zmq
7 7
8 check_for_zmq('13', 'IPython.kernel.zmq')
9
10 from .session import Session
8 check_for_zmq('13', 'ipython_kernel.zmq')
11 9
10 from jupyter_client import session
11 Session = session.Session
@@ -13,11 +13,11 b''
13 13 #-----------------------------------------------------------------------------
14 14
15 15 from IPython.config import Configurable
16 from IPython.kernel.inprocess.socket import SocketABC
16 from ipython_kernel.inprocess.socket import SocketABC
17 17 from IPython.utils.jsonutil import json_clean
18 18 from IPython.utils.traitlets import Instance, Dict, CBytes
19 from IPython.kernel.zmq.serialize import serialize_object
20 from IPython.kernel.zmq.session import Session, extract_header
19 from ipython_kernel.zmq.serialize import serialize_object
20 from ipython_kernel.zmq.session import Session, extract_header
21 21
22 22 #-----------------------------------------------------------------------------
23 23 # Code
@@ -34,13 +34,13 b' class ZMQDataPublisher(Configurable):'
34 34 def set_parent(self, parent):
35 35 """Set the parent for outbound messages."""
36 36 self.parent_header = extract_header(parent)
37
37
38 38 def publish_data(self, data):
39 39 """publish a data_message on the IOPub channel
40
40
41 41 Parameters
42 42 ----------
43
43
44 44 data : dict
45 45 The data to be published. Think of it as a namespace.
46 46 """
@@ -59,12 +59,12 b' class ZMQDataPublisher(Configurable):'
59 59
60 60 def publish_data(data):
61 61 """publish a data_message on the IOPub channel
62
62
63 63 Parameters
64 64 ----------
65
65
66 66 data : dict
67 67 The data to be published. Think of it as a namespace.
68 68 """
69 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
69 from ipython_kernel.zmq.zmqshell import ZMQInteractiveShell
70 70 ZMQInteractiveShell.instance().data_pub.publish_data(data)
@@ -6,7 +6,7 b''
6 6 import sys
7 7
8 8 from IPython.core.displayhook import DisplayHook
9 from IPython.kernel.inprocess.socket import SocketABC
9 from ipython_kernel.inprocess.socket import SocketABC
10 10 from IPython.utils.jsonutil import encode_images
11 11 from IPython.utils.py3compat import builtin_mod
12 12 from IPython.utils.traitlets import Instance, Dict
@@ -71,4 +71,3 b' class ZMQShellDisplayHook(DisplayHook):'
71 71 if self.msg['content']['data']:
72 72 self.session.send(self.pub_socket, self.msg, ident=self.topic)
73 73 self.msg = None
74
@@ -16,19 +16,19 b' from .kernelapp import IPKernelApp'
16 16
17 17 def embed_kernel(module=None, local_ns=None, **kwargs):
18 18 """Embed and start an IPython kernel in a given scope.
19
19
20 20 Parameters
21 21 ----------
22 22 module : ModuleType, optional
23 23 The module to load into IPython globals (default: caller)
24 24 local_ns : dict, optional
25 25 The namespace to load into IPython user namespace (default: caller)
26
26
27 27 kwargs : various, optional
28 28 Further keyword args are relayed to the IPKernelApp constructor,
29 29 allowing configuration of the Kernel. Will only have an effect
30 30 on the first embed_kernel call for a given process.
31
31
32 32 """
33 33 # get the app if it exists, or set it up if it doesn't
34 34 if IPKernelApp.initialized():
@@ -50,7 +50,7 b' def embed_kernel(module=None, local_ns=None, **kwargs):'
50 50 module = caller_module
51 51 if local_ns is None:
52 52 local_ns = caller_locals
53
53
54 54 app.kernel.user_module = module
55 55 app.kernel.user_ns = local_ns
56 56 app.shell.set_completer_frame()
@@ -14,9 +14,9 b' from IPython.utils import io'
14 14 from IPython.lib.inputhook import _use_appnope
15 15
16 16 def _notify_stream_qt(kernel, stream):
17
17
18 18 from IPython.external.qt_for_kernel import QtCore
19
19
20 20 if _use_appnope() and kernel._darwin_app_nap:
21 21 from appnope import nope_scope as context
22 22 else:
@@ -26,7 +26,7 b' def _notify_stream_qt(kernel, stream):'
26 26 while stream.getsockopt(zmq.EVENTS) & zmq.POLLIN:
27 27 with context():
28 28 kernel.do_one_iteration()
29
29
30 30 fd = stream.getsockopt(zmq.FD)
31 31 notifier = QtCore.QSocketNotifier(fd, QtCore.QSocketNotifier.Read, kernel.app)
32 32 notifier.activated.connect(process_stream_events)
@@ -41,22 +41,22 b' loop_map = {'
41 41
42 42 def register_integration(*toolkitnames):
43 43 """Decorator to register an event loop to integrate with the IPython kernel
44
44
45 45 The decorator takes names to register the event loop as for the %gui magic.
46 46 You can provide alternative names for the same toolkit.
47
47
48 48 The decorated function should take a single argument, the IPython kernel
49 49 instance, arrange for the event loop to call ``kernel.do_one_iteration()``
50 50 at least every ``kernel._poll_interval`` seconds, and start the event loop.
51
52 :mod:`IPython.kernel.zmq.eventloops` provides and registers such functions
51
52 :mod:`ipython_kernel.zmq.eventloops` provides and registers such functions
53 53 for a few common event loops.
54 54 """
55 55 def decorator(func):
56 56 for name in toolkitnames:
57 57 loop_map[name] = func
58 58 return func
59
59
60 60 return decorator
61 61
62 62
@@ -68,10 +68,10 b' def loop_qt4(kernel):'
68 68
69 69 kernel.app = get_app_qt4([" "])
70 70 kernel.app.setQuitOnLastWindowClosed(False)
71
71
72 72 for s in kernel.shell_streams:
73 73 _notify_stream_qt(kernel, s)
74
74
75 75 start_event_loop_qt4(kernel.app)
76 76
77 77 @register_integration('qt5')
@@ -87,7 +87,7 b' def loop_wx(kernel):'
87 87
88 88 import wx
89 89 from IPython.lib.guisupport import start_event_loop_wx
90
90
91 91 if _use_appnope() and kernel._darwin_app_nap:
92 92 # we don't hook up App Nap contexts for Wx,
93 93 # just disable it outright.
@@ -197,7 +197,7 b' def loop_cocoa(kernel):'
197 197 "you must use matplotlib >= 1.1.0, or a native libtk."
198 198 )
199 199 return loop_tk(kernel)
200
200
201 201 from matplotlib.backends.backend_macosx import TimerMac, show
202 202
203 203 # scale interval for sec->ms
1 NO CONTENT: file renamed from IPython/kernel/zmq/gui/__init__.py to ipython_kernel/zmq/gui/__init__.py
@@ -38,7 +38,7 b' class GTKEmbed(object):'
38 38
39 39 def _wire_kernel(self):
40 40 """Initializes the kernel inside GTK.
41
41
42 42 This is meant to run only once at startup, so it does its job and
43 43 returns False to ensure it doesn't get run again by GTK.
44 44 """
@@ -46,7 +46,7 b' class GTKEmbed(object):'
46 46 GObject.timeout_add(int(1000*self.kernel._poll_interval),
47 47 self.iterate_kernel)
48 48 return False
49
49
50 50 def iterate_kernel(self):
51 51 """Run one iteration of the kernel and return True.
52 52
@@ -39,7 +39,7 b' class GTKEmbed(object):'
39 39
40 40 def _wire_kernel(self):
41 41 """Initializes the kernel inside GTK.
42
42
43 43 This is meant to run only once at startup, so it does its job and
44 44 returns False to ensure it doesn't get run again by GTK.
45 45 """
@@ -47,7 +47,7 b' class GTKEmbed(object):'
47 47 gobject.timeout_add(int(1000*self.kernel._poll_interval),
48 48 self.iterate_kernel)
49 49 return False
50
50
51 51 def iterate_kernel(self):
52 52 """Run one iteration of the kernel and return True.
53 53
1 NO CONTENT: file renamed from IPython/kernel/zmq/heartbeat.py to ipython_kernel/zmq/heartbeat.py
@@ -52,14 +52,14 b' class OutStream(object):'
52 52 self._pipe_flag = pipe
53 53 if pipe:
54 54 self._setup_pipe_in()
55
55
56 56 def _setup_pipe_in(self):
57 57 """setup listening pipe for subprocesses"""
58 58 ctx = self.pub_socket.context
59
59
60 60 # use UUID to authenticate pipe messages
61 61 self._pipe_uuid = uuid.uuid4().bytes
62
62
63 63 self._pipe_in = ctx.socket(zmq.PULL)
64 64 self._pipe_in.linger = 0
65 65 try:
@@ -81,7 +81,7 b' class OutStream(object):'
81 81 lambda s, event: self.flush(),
82 82 IOLoop.READ,
83 83 )
84
84
85 85 def _setup_pipe_out(self):
86 86 # must be new context after fork
87 87 ctx = zmq.Context()
@@ -89,13 +89,13 b' class OutStream(object):'
89 89 self._pipe_out = ctx.socket(zmq.PUSH)
90 90 self._pipe_out_lock = threading.Lock()
91 91 self._pipe_out.connect("tcp://127.0.0.1:%i" % self._pipe_port)
92
92
93 93 def _is_master_process(self):
94 94 return os.getpid() == self._master_pid
95
95
96 96 def _is_master_thread(self):
97 97 return threading.current_thread().ident == self._master_thread
98
98
99 99 def _have_pipe_out(self):
100 100 return os.getpid() == self._pipe_pid
101 101
@@ -136,10 +136,10 b' class OutStream(object):'
136 136 self._start = 0
137 137 else:
138 138 break
139
139
140 140 def _schedule_flush(self):
141 141 """schedule a flush in the main thread
142
142
143 143 only works with a tornado/pyzmq eventloop running
144 144 """
145 145 if IOLoop.initialized():
@@ -147,14 +147,14 b' class OutStream(object):'
147 147 else:
148 148 # no async loop, at least force the timer
149 149 self._start = 0
150
150
151 151 def flush(self):
152 152 """trigger actual zmq send"""
153 153 if self.pub_socket is None:
154 154 raise ValueError(u'I/O operation on closed file')
155
155
156 156 mp_mode = self._check_mp_mode()
157
157
158 158 if mp_mode != CHILD:
159 159 # we are master
160 160 if not self._is_master_thread():
@@ -162,15 +162,15 b' class OutStream(object):'
162 162 # but at least they can schedule an async flush, or force the timer.
163 163 self._schedule_flush()
164 164 return
165
165
166 166 self._flush_from_subprocesses()
167 167 data = self._flush_buffer()
168
168
169 169 if data:
170 170 content = {u'name':self.name, u'text':data}
171 171 msg = self.session.send(self.pub_socket, u'stream', content=content,
172 172 parent=self.parent_header, ident=self.topic)
173
173
174 174 if hasattr(self.pub_socket, 'flush'):
175 175 # socket itself has flush (presumably ZMQStream)
176 176 self.pub_socket.flush()
@@ -200,7 +200,7 b' class OutStream(object):'
200 200
201 201 def readline(self, size=-1):
202 202 raise IOError('Read not supported on a write only stream.')
203
203
204 204 def fileno(self):
205 205 raise UnsupportedOperation("IOStream has no fileno.")
206 206
@@ -211,7 +211,7 b' class OutStream(object):'
211 211 # Make sure that we're handling unicode
212 212 if not isinstance(string, unicode_type):
213 213 string = string.decode(self.encoding, 'replace')
214
214
215 215 is_child = (self._check_mp_mode() == CHILD)
216 216 self._buffer.write(string)
217 217 if is_child:
@@ -243,7 +243,7 b' class OutStream(object):'
243 243 self._buffer.close()
244 244 self._new_buffer()
245 245 return data
246
246
247 247 def _new_buffer(self):
248 248 self._buffer = StringIO()
249 249 self._start = -1
@@ -63,7 +63,7 b' class IPythonKernel(KernelBase):'
63 63 # TMP - hack while developing
64 64 self.shell._reply_content = None
65 65
66 self.comm_manager = CommManager(shell=self.shell, parent=self,
66 self.comm_manager = CommManager(shell=self.shell, parent=self,
67 67 kernel=self)
68 68 self.comm_manager.register_target('ipython.widget', lazy_import_handle_comm_opened)
69 69
@@ -71,7 +71,7 b' class IPythonKernel(KernelBase):'
71 71 comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ]
72 72 for msg_type in comm_msg_types:
73 73 self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type)
74
74
75 75 help_links = List([
76 76 {
77 77 'text': "Python",
@@ -363,6 +363,6 b' class IPythonKernel(KernelBase):'
363 363 class Kernel(IPythonKernel):
364 364 def __init__(self, *args, **kwargs):
365 365 import warnings
366 warnings.warn('Kernel is a deprecated alias of IPython.kernel.zmq.ipkernel.IPythonKernel',
366 warnings.warn('Kernel is a deprecated alias of ipython_kernel.zmq.ipkernel.IPythonKernel',
367 367 DeprecationWarning)
368 368 super(Kernel, self).__init__(*args, **kwargs)
@@ -28,8 +28,8 b' from IPython.utils.traitlets import ('
28 28 Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type,
29 29 )
30 30 from IPython.utils.importstring import import_item
31 from IPython.kernel import write_connection_file
32 from IPython.kernel.connect import ConnectionFileMixin
31 from jupyter_client import write_connection_file
32 from ipython_kernel.connect import ConnectionFileMixin
33 33
34 34 # local imports
35 35 from .heartbeat import Heartbeat
@@ -99,10 +99,10 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
99 99 flags = Dict(kernel_flags)
100 100 classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session]
101 101 # the kernel class, as an importstring
102 kernel_class = Type('IPython.kernel.zmq.ipkernel.IPythonKernel', config=True,
103 klass='IPython.kernel.zmq.kernelbase.Kernel',
102 kernel_class = Type('ipython_kernel.zmq.ipkernel.IPythonKernel', config=True,
103 klass='ipython_kernel.zmq.kernelbase.Kernel',
104 104 help="""The Kernel subclass to be used.
105
105
106 106 This should allow easy re-use of the IPKernelApp entry point
107 107 to configure and launch kernels other than IPython's own.
108 108 """)
@@ -110,23 +110,23 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
110 110 poller = Any() # don't restrict this even though current pollers are all Threads
111 111 heartbeat = Instance(Heartbeat, allow_none=True)
112 112 ports = Dict()
113
113
114 114 # connection info:
115
115
116 116 @property
117 117 def abs_connection_file(self):
118 118 if os.path.basename(self.connection_file) == self.connection_file:
119 119 return os.path.join(self.profile_dir.security_dir, self.connection_file)
120 120 else:
121 121 return self.connection_file
122
122
123 123
124 124 # streams, etc.
125 125 no_stdout = Bool(False, config=True, help="redirect stdout to the null device")
126 126 no_stderr = Bool(False, config=True, help="redirect stderr to the null device")
127 outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream',
127 outstream_class = DottedObjectName('ipython_kernel.zmq.iostream.OutStream',
128 128 config=True, help="The importstring for the OutStream factory")
129 displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook',
129 displayhook_class = DottedObjectName('ipython_kernel.zmq.displayhook.ZMQDisplayHook',
130 130 config=True, help="The importstring for the DisplayHook factory")
131 131
132 132 # polling
@@ -177,7 +177,7 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
177 177 write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
178 178 shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
179 179 iopub_port=self.iopub_port, control_port=self.control_port)
180
180
181 181 def cleanup_connection_file(self):
182 182 cf = self.abs_connection_file
183 183 self.log.debug("Cleaning up connection file: %s", cf)
@@ -185,9 +185,9 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
185 185 os.remove(cf)
186 186 except (IOError, OSError):
187 187 pass
188
188
189 189 self.cleanup_ipc_files()
190
190
191 191 def init_connection_file(self):
192 192 if not self.connection_file:
193 193 self.connection_file = "kernel-%s.json"%os.getpid()
@@ -203,7 +203,7 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
203 203 except Exception:
204 204 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
205 205 self.exit(1)
206
206
207 207 def init_sockets(self):
208 208 # Create a context, a session, and the kernel sockets.
209 209 self.log.info("Starting the kernel at pid: %i", os.getpid())
@@ -230,7 +230,7 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
230 230 self.control_socket.linger = 1000
231 231 self.control_port = self._bind_socket(self.control_socket, self.control_port)
232 232 self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
233
233
234 234 def init_heartbeat(self):
235 235 """start the heart beating"""
236 236 # heartbeat doesn't share context, because it mustn't be blocked
@@ -240,7 +240,7 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
240 240 self.hb_port = self.heartbeat.port
241 241 self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
242 242 self.heartbeat.start()
243
243
244 244 def log_connection_info(self):
245 245 """display connection info, and store ports"""
246 246 basename = os.path.basename(self.connection_file)
@@ -280,7 +280,7 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
280 280 sys.stdout = sys.__stdout__ = blackhole
281 281 if self.no_stderr:
282 282 sys.stderr = sys.__stderr__ = blackhole
283
283
284 284 def init_io(self):
285 285 """Redirect input streams and set a display hook."""
286 286 if self.outstream_class:
@@ -298,7 +298,7 b' class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,'
298 298 """Create the Kernel object itself"""
299 299 shell_stream = ZMQStream(self.shell_socket)
300 300 control_stream = ZMQStream(self.control_socket)
301
301
302 302 kernel_factory = self.kernel_class.instance
303 303
304 304 kernel = kernel_factory(parent=self, session=self.session,
@@ -63,15 +63,15 b' class Kernel(SingletonConfigurable):'
63 63 # This should be overridden by wrapper kernels that implement any real
64 64 # language.
65 65 language_info = {}
66
66
67 67 # any links that should go in the help menu
68 68 help_links = List()
69 69
70 70 # Private interface
71
71
72 72 _darwin_app_nap = Bool(True, config=True,
73 73 help="""Whether to use appnope for compatiblity with OS X App Nap.
74
74
75 75 Only affects OS X >= 10.9.
76 76 """
77 77 )
@@ -126,13 +126,13 b' class Kernel(SingletonConfigurable):'
126 126 self.shell_handlers = {}
127 127 for msg_type in msg_types:
128 128 self.shell_handlers[msg_type] = getattr(self, msg_type)
129
129
130 130 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
131 131 self.control_handlers = {}
132 132 for msg_type in control_msg_types:
133 133 self.control_handlers[msg_type] = getattr(self, msg_type)
134 134
135
135
136 136 def dispatch_control(self, msg):
137 137 """dispatch control requests"""
138 138 idents,msg = self.session.feed_identities(msg, copy=False)
@@ -143,11 +143,11 b' class Kernel(SingletonConfigurable):'
143 143 return
144 144
145 145 self.log.debug("Control received: %s", msg)
146
146
147 147 # Set the parent message for side effects.
148 148 self.set_parent(idents, msg)
149 149 self._publish_status(u'busy')
150
150
151 151 header = msg['header']
152 152 msg_type = header['msg_type']
153 153
@@ -159,17 +159,17 b' class Kernel(SingletonConfigurable):'
159 159 handler(self.control_stream, idents, msg)
160 160 except Exception:
161 161 self.log.error("Exception in control handler:", exc_info=True)
162
162
163 163 sys.stdout.flush()
164 164 sys.stderr.flush()
165 165 self._publish_status(u'idle')
166
166
167 167 def dispatch_shell(self, stream, msg):
168 168 """dispatch shell requests"""
169 169 # flush control requests first
170 170 if self.control_stream:
171 171 self.control_stream.flush()
172
172
173 173 idents,msg = self.session.feed_identities(msg, copy=False)
174 174 try:
175 175 msg = self.session.deserialize(msg, content=True, copy=False)
@@ -180,11 +180,11 b' class Kernel(SingletonConfigurable):'
180 180 # Set the parent message for side effects.
181 181 self.set_parent(idents, msg)
182 182 self._publish_status(u'busy')
183
183
184 184 header = msg['header']
185 185 msg_id = header['msg_id']
186 186 msg_type = msg['header']['msg_type']
187
187
188 188 # Print some info about this message and leave a '--->' marker, so it's
189 189 # easier to trace visually the message chain when debugging. Each
190 190 # handler prints its message at the end.
@@ -201,7 +201,7 b' class Kernel(SingletonConfigurable):'
201 201 self.session.send(stream, reply_type, metadata=md,
202 202 content=status, parent=msg, ident=idents)
203 203 return
204
204
205 205 handler = self.shell_handlers.get(msg_type, None)
206 206 if handler is None:
207 207 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
@@ -215,11 +215,11 b' class Kernel(SingletonConfigurable):'
215 215 self.log.error("Exception in message handler:", exc_info=True)
216 216 finally:
217 217 signal(SIGINT, sig)
218
218
219 219 sys.stdout.flush()
220 220 sys.stderr.flush()
221 221 self._publish_status(u'idle')
222
222
223 223 def enter_eventloop(self):
224 224 """enter eventloop"""
225 225 self.log.info("entering eventloop %s", self.eventloop)
@@ -257,7 +257,7 b' class Kernel(SingletonConfigurable):'
257 257
258 258 # publish idle status
259 259 self._publish_status('starting')
260
260
261 261 def do_one_iteration(self):
262 262 """step eventloop just once"""
263 263 if self.control_stream:
@@ -279,7 +279,7 b' class Kernel(SingletonConfigurable):'
279 279 #---------------------------------------------------------------------------
280 280 # Kernel request handlers
281 281 #---------------------------------------------------------------------------
282
282
283 283 def _make_metadata(self, other=None):
284 284 """init metadata dict, for execute/apply_reply"""
285 285 new_md = {
@@ -290,7 +290,7 b' class Kernel(SingletonConfigurable):'
290 290 if other:
291 291 new_md.update(other)
292 292 return new_md
293
293
294 294 def _publish_execute_input(self, code, parent, execution_count):
295 295 """Publish the code request on the iopub stream."""
296 296
@@ -298,7 +298,7 b' class Kernel(SingletonConfigurable):'
298 298 {u'code':code, u'execution_count': execution_count},
299 299 parent=parent, ident=self._topic('execute_input')
300 300 )
301
301
302 302 def _publish_status(self, status, parent=None):
303 303 """send status (busy/idle) on IOPub"""
304 304 self.session.send(self.iopub_socket,
@@ -307,35 +307,35 b' class Kernel(SingletonConfigurable):'
307 307 parent=parent or self._parent_header,
308 308 ident=self._topic('status'),
309 309 )
310
310
311 311 def set_parent(self, ident, parent):
312 312 """Set the current parent_header
313
313
314 314 Side effects (IOPub messages) and replies are associated with
315 315 the request that caused them via the parent_header.
316
316
317 317 The parent identity is used to route input_request messages
318 318 on the stdin channel.
319 319 """
320 320 self._parent_ident = ident
321 321 self._parent_header = parent
322
322
323 323 def send_response(self, stream, msg_or_type, content=None, ident=None,
324 324 buffers=None, track=False, header=None, metadata=None):
325 325 """Send a response to the message we're currently processing.
326
327 This accepts all the parameters of :meth:`IPython.kernel.zmq.session.Session.send`
326
327 This accepts all the parameters of :meth:`ipython_kernel.zmq.session.Session.send`
328 328 except ``parent``.
329
329
330 330 This relies on :meth:`set_parent` having been called for the current
331 331 message.
332 332 """
333 333 return self.session.send(stream, msg_or_type, content, self._parent_header,
334 334 ident, buffers, track, header, metadata)
335
335
336 336 def execute_request(self, stream, ident, parent):
337 337 """handle an execute_request"""
338
338
339 339 try:
340 340 content = parent[u'content']
341 341 code = py3compat.cast_unicode_py2(content[u'code'])
@@ -347,17 +347,17 b' class Kernel(SingletonConfigurable):'
347 347 self.log.error("Got bad msg: ")
348 348 self.log.error("%s", parent)
349 349 return
350
350
351 351 stop_on_error = content.get('stop_on_error', True)
352 352
353 353 md = self._make_metadata(parent['metadata'])
354
354
355 355 # Re-broadcast our input for the benefit of listening clients, and
356 356 # start computing output
357 357 if not silent:
358 358 self.execution_count += 1
359 359 self._publish_execute_input(code, parent, self.execution_count)
360
360
361 361 reply_content = self.do_execute(code, silent, store_history,
362 362 user_expressions, allow_stdin)
363 363
@@ -372,7 +372,7 b' class Kernel(SingletonConfigurable):'
372 372
373 373 # Send the reply.
374 374 reply_content = json_clean(reply_content)
375
375
376 376 md['status'] = reply_content['status']
377 377 if reply_content['status'] == 'error' and \
378 378 reply_content['ename'] == 'UnmetDependency':
@@ -381,7 +381,7 b' class Kernel(SingletonConfigurable):'
381 381 reply_msg = self.session.send(stream, u'execute_reply',
382 382 reply_content, parent, metadata=md,
383 383 ident=ident)
384
384
385 385 self.log.debug("%s", reply_msg)
386 386
387 387 if not silent and reply_msg['content']['status'] == u'error' and stop_on_error:
@@ -397,7 +397,7 b' class Kernel(SingletonConfigurable):'
397 397 content = parent['content']
398 398 code = content['code']
399 399 cursor_pos = content['cursor_pos']
400
400
401 401 matches = self.do_complete(code, cursor_pos)
402 402 matches = json_clean(matches)
403 403 completion_msg = self.session.send(stream, 'complete_reply',
@@ -415,7 +415,7 b' class Kernel(SingletonConfigurable):'
415 415
416 416 def inspect_request(self, stream, ident, parent):
417 417 content = parent['content']
418
418
419 419 reply_content = self.do_inspect(content['code'], content['cursor_pos'],
420 420 content.get('detail_level', 0))
421 421 # Before we send this object over, we scrub it for JSON usage
@@ -488,11 +488,11 b' class Kernel(SingletonConfigurable):'
488 488 kernel.
489 489 """
490 490 return {'status': 'ok', 'restart': restart}
491
491
492 492 def is_complete_request(self, stream, ident, parent):
493 493 content = parent['content']
494 494 code = content['code']
495
495
496 496 reply_content = self.do_is_complete(code)
497 497 reply_content = json_clean(reply_content)
498 498 reply_msg = self.session.send(stream, 'is_complete_reply',
@@ -528,7 +528,7 b' class Kernel(SingletonConfigurable):'
528 528 # flush i/o
529 529 sys.stdout.flush()
530 530 sys.stderr.flush()
531
531
532 532 self.session.send(stream, u'apply_reply', reply_content,
533 533 parent=parent, ident=ident,buffers=result_buf, metadata=md)
534 534
@@ -564,7 +564,7 b' class Kernel(SingletonConfigurable):'
564 564
565 565 def do_clear(self):
566 566 """Override in subclasses to clear the namespace
567
567
568 568 This is only required for IPython.parallel.
569 569 """
570 570 raise NotImplementedError
@@ -579,9 +579,9 b' class Kernel(SingletonConfigurable):'
579 579 base = "engine.%i" % self.int_id
580 580 else:
581 581 base = "kernel.%s" % self.ident
582
582
583 583 return py3compat.cast_bytes("%s.%s" % (base, topic))
584
584
585 585 def _abort_queues(self):
586 586 for stream in self.shell_streams:
587 587 if stream:
@@ -615,11 +615,11 b' class Kernel(SingletonConfigurable):'
615 615 """Raise StdinNotImplentedError if active frontend doesn't support
616 616 stdin."""
617 617 raise StdinNotImplementedError("raw_input was called, but this "
618 "frontend does not support stdin.")
619
618 "frontend does not support stdin.")
619
620 620 def getpass(self, prompt=''):
621 621 """Forward getpass to frontends
622
622
623 623 Raises
624 624 ------
625 625 StdinNotImplentedError if active frontend doesn't support stdin.
@@ -633,10 +633,10 b' class Kernel(SingletonConfigurable):'
633 633 self._parent_header,
634 634 password=True,
635 635 )
636
636
637 637 def raw_input(self, prompt=''):
638 638 """Forward raw_input to frontends
639
639
640 640 Raises
641 641 ------
642 642 StdinNotImplentedError if active frontend doesn't support stdin.
@@ -650,7 +650,7 b' class Kernel(SingletonConfigurable):'
650 650 self._parent_header,
651 651 password=False,
652 652 )
653
653
654 654 def _input_request(self, prompt, ident, parent, password=False):
655 655 # Flush output before making the request.
656 656 sys.stderr.flush()
@@ -664,7 +664,7 b' class Kernel(SingletonConfigurable):'
664 664 break
665 665 else:
666 666 raise
667
667
668 668 # Send the input request.
669 669 content = json_clean(dict(prompt=prompt, password=password))
670 670 self.session.send(self.stdin_socket, u'input_request', content, parent,
@@ -5,11 +5,11 b' from zmq.log.handlers import PUBHandler'
5 5 class EnginePUBHandler(PUBHandler):
6 6 """A simple PUBHandler subclass that sets root_topic"""
7 7 engine=None
8
8
9 9 def __init__(self, engine, *args, **kwargs):
10 10 PUBHandler.__init__(self,*args, **kwargs)
11 11 self.engine = engine
12
12
13 13 @property
14 14 def root_topic(self):
15 15 """this is a property, in case the handler is created
@@ -18,4 +18,3 b' class EnginePUBHandler(PUBHandler):'
18 18 return "engine.%i"%self.engine.id
19 19 else:
20 20 return "engine"
21
1 NO CONTENT: file renamed from IPython/kernel/zmq/parentpoller.py to ipython_kernel/zmq/parentpoller.py
1 NO CONTENT: file renamed from IPython/kernel/zmq/pylab/__init__.py to ipython_kernel/zmq/pylab/__init__.py
@@ -65,7 +65,7 b' def draw_if_interactive():'
65 65 # For further reference:
66 66 # https://github.com/ipython/ipython/issues/1612
67 67 # https://github.com/matplotlib/matplotlib/issues/835
68
68
69 69 if not hasattr(fig, 'show'):
70 70 # Queue up `fig` for display
71 71 fig.show = lambda *a: display(fig)
@@ -94,7 +94,7 b' def flush_figures():'
94 94
95 95 This is meant to be called automatically and will call show() if, during
96 96 prior code execution, there had been any calls to draw_if_interactive.
97
97
98 98 This function is meant to be used as a post_execute callback in IPython,
99 99 so user-caused errors are handled with showtraceback() instead of being
100 100 allowed to raise. If this function is not called from within IPython,
@@ -102,7 +102,7 b' def flush_figures():'
102 102 """
103 103 if not show._draw_called:
104 104 return
105
105
106 106 if InlineBackend.instance().close_figures:
107 107 # ignore the tracking, just draw and close all figures
108 108 try:
@@ -139,4 +139,3 b' def flush_figures():'
139 139 # figurecanvas. This is set here to a Agg canvas
140 140 # See https://github.com/matplotlib/matplotlib/pull/1125
141 141 FigureCanvas = FigureCanvasAgg
142
@@ -68,14 +68,14 b' class InlineBackend(InlineBackendConfig):'
68 68 )
69 69
70 70 figure_formats = Set({'png'}, config=True,
71 help="""A set of figure formats to enable: 'png',
71 help="""A set of figure formats to enable: 'png',
72 72 'retina', 'jpeg', 'svg', 'pdf'.""")
73 73
74 74 def _update_figure_formatters(self):
75 75 if self.shell is not None:
76 76 from IPython.core.pylabtools import select_figure_formats
77 77 select_figure_formats(self.shell, self.figure_formats, **self.print_figure_kwargs)
78
78
79 79 def _figure_formats_changed(self, name, old, new):
80 80 if 'jpg' in new or 'jpeg' in new:
81 81 if not pil_available():
@@ -91,20 +91,20 b' class InlineBackend(InlineBackendConfig):'
91 91
92 92 print_figure_kwargs = Dict({'bbox_inches' : 'tight'}, config=True,
93 93 help="""Extra kwargs to be passed to fig.canvas.print_figure.
94
94
95 95 Logical examples include: bbox_inches, quality (for jpeg figures), etc.
96 96 """
97 97 )
98 98 _print_figure_kwargs_changed = _update_figure_formatters
99
99
100 100 close_figures = Bool(True, config=True,
101 101 help="""Close all figures at the end of each cell.
102
102
103 103 When True, ensures that each cell starts with no active figures, but it
104 104 also means that one must keep track of references in order to edit or
105 105 redraw figures in subsequent cells. This mode is ideal for the notebook,
106 106 where residual plots from other cells might be surprising.
107
107
108 108 When False, one must call figure() to create new figures. This means
109 109 that gcf() and getfigs() can reference figures created in other cells,
110 110 and the active figure can continue to be edited with pylab/pyplot
@@ -117,4 +117,3 b' class InlineBackend(InlineBackendConfig):'
117 117 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
118 118 allow_none=True)
119 119
120
@@ -17,7 +17,7 b' from IPython.utils.pickleutil import ('
17 17 can, uncan, can_sequence, uncan_sequence, CannedObject,
18 18 istype, sequence_types, PICKLE_PROTOCOL,
19 19 )
20 from .session import MAX_ITEMS, MAX_BYTES
20 from jupyter_client.session import MAX_ITEMS, MAX_BYTES
21 21
22 22
23 23 if PY3:
@@ -52,10 +52,10 b' def _restore_buffers(obj, buffers):'
52 52
53 53 def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
54 54 """Serialize an object into a list of sendable buffers.
55
55
56 56 Parameters
57 57 ----------
58
58
59 59 obj : object
60 60 The object to be serialized
61 61 buffer_threshold : int
@@ -65,7 +65,7 b' def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):'
65 65 The maximum number of items over which canning will iterate.
66 66 Containers (lists, dicts) larger than this will be pickled without
67 67 introspection.
68
68
69 69 Returns
70 70 -------
71 71 [bufs] : list of buffers representing the serialized object.
@@ -90,17 +90,17 b' def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):'
90 90
91 91 def deserialize_object(buffers, g=None):
92 92 """reconstruct an object serialized by serialize_object from data buffers.
93
93
94 94 Parameters
95 95 ----------
96
96
97 97 bufs : list of buffers/bytes
98
98
99 99 g : globals to be used when uncanning
100
100
101 101 Returns
102 102 -------
103
103
104 104 (newobj, bufs) : unpacked object, and the list of remaining unused buffers.
105 105 """
106 106 bufs = list(buffers)
@@ -119,37 +119,37 b' def deserialize_object(buffers, g=None):'
119 119 else:
120 120 _restore_buffers(canned, bufs)
121 121 newobj = uncan(canned, g)
122
122
123 123 return newobj, bufs
124 124
125 125 def pack_apply_message(f, args, kwargs, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
126 126 """pack up a function, args, and kwargs to be sent over the wire
127
127
128 128 Each element of args/kwargs will be canned for special treatment,
129 129 but inspection will not go any deeper than that.
130
130
131 131 Any object whose data is larger than `threshold` will not have their data copied
132 132 (only numpy arrays and bytes/buffers support zero-copy)
133
133
134 134 Message will be a list of bytes/buffers of the format:
135
135
136 136 [ cf, pinfo, <arg_bufs>, <kwarg_bufs> ]
137
137
138 138 With length at least two + len(args) + len(kwargs)
139 139 """
140
140
141 141 arg_bufs = flatten(serialize_object(arg, buffer_threshold, item_threshold) for arg in args)
142
142
143 143 kw_keys = sorted(kwargs.keys())
144 144 kwarg_bufs = flatten(serialize_object(kwargs[key], buffer_threshold, item_threshold) for key in kw_keys)
145
145
146 146 info = dict(nargs=len(args), narg_bufs=len(arg_bufs), kw_keys=kw_keys)
147
147
148 148 msg = [pickle.dumps(can(f), PICKLE_PROTOCOL)]
149 149 msg.append(pickle.dumps(info, PICKLE_PROTOCOL))
150 150 msg.extend(arg_bufs)
151 151 msg.extend(kwarg_bufs)
152
152
153 153 return msg
154 154
155 155 def unpack_apply_message(bufs, g=None, copy=True):
@@ -162,19 +162,18 b' def unpack_apply_message(bufs, g=None, copy=True):'
162 162 pinfo = buffer_to_bytes_py2(bufs.pop(0))
163 163 info = pickle.loads(pinfo)
164 164 arg_bufs, kwarg_bufs = bufs[:info['narg_bufs']], bufs[info['narg_bufs']:]
165
165
166 166 args = []
167 167 for i in range(info['nargs']):
168 168 arg, arg_bufs = deserialize_object(arg_bufs, g)
169 169 args.append(arg)
170 170 args = tuple(args)
171 171 assert not arg_bufs, "Shouldn't be any arg bufs left over"
172
172
173 173 kwargs = {}
174 174 for key in info['kw_keys']:
175 175 kwarg, kwarg_bufs = deserialize_object(kwarg_bufs, g)
176 176 kwargs[key] = kwarg
177 177 assert not kwarg_bufs, "Shouldn't be any kwarg bufs left over"
178
179 return f,args,kwargs
180 178
179 return f,args,kwargs
1 NO CONTENT: file renamed from IPython/kernel/zmq/tests/__init__.py to ipython_kernel/zmq/tests/__init__.py
@@ -22,7 +22,7 b' from subprocess import Popen, PIPE'
22 22
23 23 import nose.tools as nt
24 24
25 from IPython.kernel import BlockingKernelClient
25 from jupyter_client import BlockingKernelClient
26 26 from IPython.utils import path, py3compat
27 27 from IPython.utils.py3compat import unicode_type
28 28
@@ -38,7 +38,7 b' def setup():'
38 38 global IPYTHONDIR
39 39 global env
40 40 global save_get_ipython_dir
41
41
42 42 IPYTHONDIR = tempfile.mkdtemp()
43 43
44 44 env = os.environ.copy()
@@ -50,7 +50,7 b' def setup():'
50 50
51 51 def teardown():
52 52 path.get_ipython_dir = save_get_ipython_dir
53
53
54 54 try:
55 55 shutil.rmtree(IPYTHONDIR)
56 56 except (OSError, IOError):
@@ -61,7 +61,7 b' def teardown():'
61 61 @contextmanager
62 62 def setup_kernel(cmd):
63 63 """start an embedded kernel in a subprocess, and wait for it to be ready
64
64
65 65 Returns
66 66 -------
67 67 kernel_manager: connected KernelManager instance
@@ -78,22 +78,22 b' def setup_kernel(cmd):'
78 78 and kernel.poll() is None \
79 79 and time.time() < tic + SETUP_TIMEOUT:
80 80 time.sleep(0.1)
81
81
82 82 if kernel.poll() is not None:
83 83 o,e = kernel.communicate()
84 84 e = py3compat.cast_unicode(e)
85 85 raise IOError("Kernel failed to start:\n%s" % e)
86
86
87 87 if not os.path.exists(connection_file):
88 88 if kernel.poll() is None:
89 89 kernel.terminate()
90 90 raise IOError("Connection file %r never arrived" % connection_file)
91
91
92 92 client = BlockingKernelClient(connection_file=connection_file)
93 93 client.load_connection_file()
94 94 client.start_channels()
95 95 client.wait_for_ready()
96
96
97 97 try:
98 98 yield client
99 99 finally:
@@ -111,14 +111,14 b' def test_embed_kernel_basic():'
111 111 'go()',
112 112 '',
113 113 ])
114
114
115 115 with setup_kernel(cmd) as client:
116 116 # oinfo a (int)
117 117 msg_id = client.inspect('a')
118 118 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
119 119 content = msg['content']
120 120 nt.assert_true(content['found'])
121
121
122 122 msg_id = client.execute("c=a*2")
123 123 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
124 124 content = msg['content']
@@ -143,7 +143,7 b' def test_embed_kernel_namespace():'
143 143 'go()',
144 144 '',
145 145 ])
146
146
147 147 with setup_kernel(cmd) as client:
148 148 # oinfo a (int)
149 149 msg_id = client.inspect('a')
@@ -181,7 +181,7 b' def test_embed_kernel_reentrant():'
181 181 ' go()',
182 182 '',
183 183 ])
184
184
185 185 with setup_kernel(cmd) as client:
186 186 for i in range(5):
187 187 msg_id = client.inspect('count')
@@ -190,10 +190,8 b' def test_embed_kernel_reentrant():'
190 190 nt.assert_true(content['found'])
191 191 text = content['data']['text/plain']
192 192 nt.assert_in(unicode_type(i), text)
193
193
194 194 # exit from embed_kernel
195 195 client.execute("get_ipython().exit_now = True")
196 196 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
197 197 time.sleep(0.2)
198
199
@@ -9,7 +9,7 b' from collections import namedtuple'
9 9 import nose.tools as nt
10 10
11 11 # from unittest import TestCaes
12 from IPython.kernel.zmq.serialize import serialize_object, deserialize_object
12 from ipython_kernel.zmq.serialize import serialize_object, deserialize_object
13 13 from IPython.testing import decorators as dec
14 14 from IPython.utils.pickleutil import CannedArray, CannedClass
15 15 from IPython.utils.py3compat import iteritems
@@ -28,7 +28,7 b' def roundtrip(obj):'
28 28
29 29 class C(object):
30 30 """dummy class for """
31
31
32 32 def __init__(self, **kwargs):
33 33 for key,value in iteritems(kwargs):
34 34 setattr(self, key, value)
@@ -98,7 +98,7 b' def test_recarray():'
98 98 [('n', int), ('s', '|S1'), ('u', 'uint32')],
99 99 ]:
100 100 A = new_array(shape, dtype=dtype)
101
101
102 102 bufs = serialize_object(A)
103 103 B, r = deserialize_object(bufs)
104 104 nt.assert_equal(r, [])
@@ -155,7 +155,7 b' def test_class_oldstyle():'
155 155 @interactive
156 156 class C:
157 157 a=5
158
158
159 159 bufs = serialize_object(dict(C=C))
160 160 canned = pickle.loads(bufs[0])
161 161 nt.assert_is_instance(canned['C'], CannedClass)
@@ -198,7 +198,7 b' def test_class_inheritance():'
198 198 @interactive
199 199 class D(C):
200 200 b=10
201
201
202 202 bufs = serialize_object(dict(D=D))
203 203 canned = pickle.loads(bufs[0])
204 204 nt.assert_is_instance(canned['D'], CannedClass)
1 NO CONTENT: file renamed from IPython/kernel/zmq/tests/test_start_kernel.py to ipython_kernel/zmq/tests/test_start_kernel.py
@@ -34,8 +34,8 b' from IPython.core.magic import magics_class, line_magic, Magics'
34 34 from IPython.core import payloadpage
35 35 from IPython.core.usage import default_gui_banner
36 36 from IPython.display import display, Javascript
37 from IPython.kernel.inprocess.socket import SocketABC
38 from IPython.kernel import (
37 from ipython_kernel.inprocess.socket import SocketABC
38 from ipython_kernel import (
39 39 get_connection_file, get_connection_info, connect_qtconsole
40 40 )
41 41 from IPython.testing.skipdoctest import skip_doctest
@@ -46,9 +46,9 b' from IPython.utils import py3compat'
46 46 from IPython.utils.py3compat import unicode_type
47 47 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes, Any
48 48 from IPython.utils.warn import error
49 from IPython.kernel.zmq.displayhook import ZMQShellDisplayHook
50 from IPython.kernel.zmq.datapub import ZMQDataPublisher
51 from IPython.kernel.zmq.session import extract_header
49 from ipython_kernel.zmq.displayhook import ZMQShellDisplayHook
50 from ipython_kernel.zmq.datapub import ZMQDataPublisher
51 from ipython_kernel.zmq.session import extract_header
52 52 from .session import Session
53 53
54 54 #-----------------------------------------------------------------------------
@@ -66,7 +66,7 b' class ZMQDisplayPublisher(DisplayPublisher):'
66 66 def set_parent(self, parent):
67 67 """Set the parent for outbound messages."""
68 68 self.parent_header = extract_header(parent)
69
69
70 70 def _flush_streams(self):
71 71 """flush IO Streams prior to display"""
72 72 sys.stdout.flush()
@@ -102,7 +102,7 b' class KernelMagics(Magics):'
102 102 # moved into a separate machinery as well. For now, at least isolate here
103 103 # the magics which this class needs to implement differently from the base
104 104 # class, or that are unique to it.
105
105
106 106 _find_edit_target = CodeMagics._find_edit_target
107 107
108 108 @skip_doctest
@@ -248,19 +248,19 b' class KernelMagics(Magics):'
248 248 @line_magic
249 249 def connect_info(self, arg_s):
250 250 """Print information for connecting other clients to this kernel
251
251
252 252 It will print the contents of this session's connection file, as well as
253 253 shortcuts for local clients.
254
254
255 255 In the simplest case, when called from the most recently launched kernel,
256 256 secondary clients can be connected, simply with:
257
257
258 258 $> ipython <app> --existing
259
259
260 260 """
261
261
262 262 from IPython.core.application import BaseIPythonApplication as BaseIPApp
263
263
264 264 if BaseIPApp.initialized():
265 265 app = BaseIPApp.instance()
266 266 security_dir = app.profile_dir.security_dir
@@ -268,22 +268,22 b' class KernelMagics(Magics):'
268 268 else:
269 269 profile = 'default'
270 270 security_dir = ''
271
271
272 272 try:
273 273 connection_file = get_connection_file()
274 274 info = get_connection_info(unpack=False)
275 275 except Exception as e:
276 276 error("Could not get connection info: %r" % e)
277 277 return
278
278
279 279 # add profile flag for non-default profile
280 280 profile_flag = "--profile %s" % profile if profile != 'default' else ""
281
281
282 282 # if it's in the security dir, truncate to basename
283 283 if security_dir == os.path.dirname(connection_file):
284 284 connection_file = os.path.basename(connection_file)
285
286
285
286
287 287 print (info + '\n')
288 288 print ("Paste the above JSON into a file, and connect with:\n"
289 289 " $> ipython <app> --existing <file>\n"
@@ -299,11 +299,11 b' class KernelMagics(Magics):'
299 299 @line_magic
300 300 def qtconsole(self, arg_s):
301 301 """Open a qtconsole connected to this kernel.
302
302
303 303 Useful for connecting a qtconsole to running notebooks, for better
304 304 debugging.
305 305 """
306
306
307 307 # %qtconsole should imply bind_kernel for engines:
308 308 try:
309 309 from IPython.parallel import bind_kernel
@@ -312,29 +312,29 b' class KernelMagics(Magics):'
312 312 pass
313 313 else:
314 314 bind_kernel()
315
315
316 316 try:
317 317 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
318 318 except Exception as e:
319 319 error("Could not start qtconsole: %r" % e)
320 320 return
321
321
322 322 @line_magic
323 323 def autosave(self, arg_s):
324 324 """Set the autosave interval in the notebook (in seconds).
325
325
326 326 The default value is 120, or two minutes.
327 327 ``%autosave 0`` will disable autosave.
328
328
329 329 This magic only has an effect when called from the notebook interface.
330 330 It has no effect when called in a startup file.
331 331 """
332
332
333 333 try:
334 334 interval = int(arg_s)
335 335 except ValueError:
336 336 raise UsageError("%%autosave requires an integer, got %r" % arg_s)
337
337
338 338 # javascript wants milliseconds
339 339 milliseconds = 1000 * interval
340 340 display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds),
@@ -354,7 +354,7 b' class ZMQInteractiveShell(InteractiveShell):'
354 354 data_pub_class = Type(ZMQDataPublisher)
355 355 kernel = Any()
356 356 parent_header = Any()
357
357
358 358 def _banner1_default(self):
359 359 return default_gui_banner
360 360
@@ -370,7 +370,7 b' class ZMQInteractiveShell(InteractiveShell):'
370 370 exiter = Instance(ZMQExitAutocall)
371 371 def _exiter_default(self):
372 372 return ZMQExitAutocall(self)
373
373
374 374 def _exit_now_changed(self, name, old, new):
375 375 """stop eventloop when exit_now fires"""
376 376 if new:
@@ -400,11 +400,11 b' class ZMQInteractiveShell(InteractiveShell):'
400 400 # subprocesses as much as possible.
401 401 env['PAGER'] = 'cat'
402 402 env['GIT_PAGER'] = 'cat'
403
403
404 404 def init_hooks(self):
405 405 super(ZMQInteractiveShell, self).init_hooks()
406 406 self.set_hook('show_in_pager', page.as_hook(payloadpage.page), 99)
407
407
408 408 def ask_exit(self):
409 409 """Engage the exit actions."""
410 410 self.exit_now = (not self.keepkernel_on_exit)
@@ -431,7 +431,7 b' class ZMQInteractiveShell(InteractiveShell):'
431 431 topic = None
432 432 if dh.topic:
433 433 topic = dh.topic.replace(b'execute_result', b'error')
434
434
435 435 exc_msg = dh.session.send(dh.pub_socket, u'error', json_clean(exc_content), dh.parent_header, ident=topic)
436 436
437 437 # FIXME - Hack: store exception info in shell object. Right now, the
@@ -454,7 +454,7 b' class ZMQInteractiveShell(InteractiveShell):'
454 454 replace=replace,
455 455 )
456 456 self.payload_manager.write_payload(payload)
457
457
458 458 def set_parent(self, parent):
459 459 """Set the parent header for associating output with its triggering input"""
460 460 self.parent_header = parent
@@ -469,10 +469,10 b' class ZMQInteractiveShell(InteractiveShell):'
469 469 sys.stderr.set_parent(parent)
470 470 except AttributeError:
471 471 pass
472
472
473 473 def get_parent(self):
474 474 return self.parent_header
475
475
476 476 #-------------------------------------------------------------------------
477 477 # Things related to magics
478 478 #-------------------------------------------------------------------------
General Comments 0
You need to be logged in to leave comments. Login now