From c71dcbcdea3b8612d374da76db4d7b62d766c51f 2015-03-28 00:46:25 From: Min RK Date: 2015-03-28 00:46:25 Subject: [PATCH] bigsplit: ipython_kernel --- diff --git a/IPython/kernel/__main__.py b/IPython/kernel/__main__.py index aba3866..93d8472 100644 --- a/IPython/kernel/__main__.py +++ b/IPython/kernel/__main__.py @@ -1,3 +1,3 @@ if __name__ == '__main__': - from IPython.kernel.zmq import kernelapp as app + from ipython_kernel.zmq import kernelapp as app app.launch_new_instance() diff --git a/IPython/kernel/adapter.py b/IPython/kernel/adapter.py new file mode 100644 index 0000000..3b8c046 --- /dev/null +++ b/IPython/kernel/adapter.py @@ -0,0 +1 @@ +from jupyter_client.adapter import * diff --git a/IPython/kernel/channels.py b/IPython/kernel/channels.py new file mode 100644 index 0000000..8c7fe2a --- /dev/null +++ b/IPython/kernel/channels.py @@ -0,0 +1 @@ +from jupyter_client.channels import * diff --git a/IPython/kernel/channelsabc.py b/IPython/kernel/channelsabc.py new file mode 100644 index 0000000..8894401 --- /dev/null +++ b/IPython/kernel/channelsabc.py @@ -0,0 +1 @@ +from jupyter_client.channelsabc import * diff --git a/IPython/kernel/client.py b/IPython/kernel/client.py new file mode 100644 index 0000000..a98690b --- /dev/null +++ b/IPython/kernel/client.py @@ -0,0 +1 @@ +from jupyter_client.client import * diff --git a/IPython/kernel/clientabc.py b/IPython/kernel/clientabc.py new file mode 100644 index 0000000..e0cf06c --- /dev/null +++ b/IPython/kernel/clientabc.py @@ -0,0 +1 @@ +from jupyter_client.clientabc import * diff --git a/IPython/kernel/connect.py b/IPython/kernel/connect.py new file mode 100644 index 0000000..a1686ba --- /dev/null +++ b/IPython/kernel/connect.py @@ -0,0 +1,2 @@ +from ipython_kernel.connect import * +from jupyter_client.connect import * diff --git a/IPython/kernel/kernelspec.py b/IPython/kernel/kernelspec.py new file mode 100644 index 0000000..123419b --- /dev/null +++ b/IPython/kernel/kernelspec.py @@ -0,0 +1 @@ +from jupyter_client.kernelspec import * diff --git a/IPython/kernel/kernelspecapp.py b/IPython/kernel/kernelspecapp.py new file mode 100644 index 0000000..28cd33a --- /dev/null +++ b/IPython/kernel/kernelspecapp.py @@ -0,0 +1 @@ +from jupyter_client.kernelspecapp import * diff --git a/IPython/kernel/launcher.py b/IPython/kernel/launcher.py new file mode 100644 index 0000000..1953bc4 --- /dev/null +++ b/IPython/kernel/launcher.py @@ -0,0 +1 @@ +from jupyter_client.launcher import * diff --git a/IPython/kernel/manager.py b/IPython/kernel/manager.py new file mode 100644 index 0000000..c88097c --- /dev/null +++ b/IPython/kernel/manager.py @@ -0,0 +1 @@ +from jupyter_client.manager import * diff --git a/IPython/kernel/managerabc.py b/IPython/kernel/managerabc.py new file mode 100644 index 0000000..6b40827 --- /dev/null +++ b/IPython/kernel/managerabc.py @@ -0,0 +1 @@ +from jupyter_client.managerabc import * diff --git a/IPython/kernel/multikernelmanager.py b/IPython/kernel/multikernelmanager.py new file mode 100644 index 0000000..ce576e2 --- /dev/null +++ b/IPython/kernel/multikernelmanager.py @@ -0,0 +1 @@ +from jupyter_client.multikernelmanager import * diff --git a/IPython/kernel/restarter.py b/IPython/kernel/restarter.py new file mode 100644 index 0000000..dc24117 --- /dev/null +++ b/IPython/kernel/restarter.py @@ -0,0 +1 @@ +from jupyter_client.restarter import * diff --git a/IPython/kernel/session.py b/IPython/kernel/session.py new file mode 100644 index 0000000..4eeffe6 --- /dev/null +++ b/IPython/kernel/session.py @@ -0,0 +1,883 @@ +"""Session object for building, serializing, sending, and receiving messages in +IPython. The Session object supports serialization, HMAC signatures, and +metadata on messages. + +Also defined here are utilities for working with Sessions: +* A SessionFactory to be used as a base class for configurables that work with +Sessions. +* A Message object for convenience that allows attribute-access to the msg dict. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import hashlib +import hmac +import logging +import os +import pprint +import random +import uuid +import warnings +from datetime import datetime + +try: + import cPickle + pickle = cPickle +except: + cPickle = None + import pickle + +try: + # We are using compare_digest to limit the surface of timing attacks + from hmac import compare_digest +except ImportError: + # Python < 2.7.7: When digests don't match no feedback is provided, + # limiting the surface of attack + def compare_digest(a,b): return a == b + +import zmq +from zmq.utils import jsonapi +from zmq.eventloop.ioloop import IOLoop +from zmq.eventloop.zmqstream import ZMQStream + +from IPython.core.release import kernel_protocol_version +from IPython.config.configurable import Configurable, LoggingConfigurable +from IPython.utils import io +from IPython.utils.importstring import import_item +from IPython.utils.jsonutil import extract_dates, squash_dates, date_default +from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type, + iteritems) +from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set, + DottedObjectName, CUnicode, Dict, Integer, + TraitError, +) +from IPython.utils.pickleutil import PICKLE_PROTOCOL +from jupyter_client.adapter import adapt + +#----------------------------------------------------------------------------- +# utility functions +#----------------------------------------------------------------------------- + +def squash_unicode(obj): + """coerce unicode back to bytestrings.""" + if isinstance(obj,dict): + for key in obj.keys(): + obj[key] = squash_unicode(obj[key]) + if isinstance(key, unicode_type): + obj[squash_unicode(key)] = obj.pop(key) + elif isinstance(obj, list): + for i,v in enumerate(obj): + obj[i] = squash_unicode(v) + elif isinstance(obj, unicode_type): + obj = obj.encode('utf8') + return obj + +#----------------------------------------------------------------------------- +# globals and defaults +#----------------------------------------------------------------------------- + +# default values for the thresholds: +MAX_ITEMS = 64 +MAX_BYTES = 1024 + +# ISO8601-ify datetime objects +# allow unicode +# disallow nan, because it's not actually valid JSON +json_packer = lambda obj: jsonapi.dumps(obj, default=date_default, + ensure_ascii=False, allow_nan=False, +) +json_unpacker = lambda s: jsonapi.loads(s) + +pickle_packer = lambda o: pickle.dumps(squash_dates(o), PICKLE_PROTOCOL) +pickle_unpacker = pickle.loads + +default_packer = json_packer +default_unpacker = json_unpacker + +DELIM = b"" +# singleton dummy tracker, which will always report as done +DONE = zmq.MessageTracker() + +#----------------------------------------------------------------------------- +# Mixin tools for apps that use Sessions +#----------------------------------------------------------------------------- + +session_aliases = dict( + ident = 'Session.session', + user = 'Session.username', + keyfile = 'Session.keyfile', +) + +session_flags = { + 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())), + 'keyfile' : '' }}, + """Use HMAC digests for authentication of messages. + Setting this flag will generate a new UUID to use as the HMAC key. + """), + 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }}, + """Don't authenticate messages."""), +} + +def default_secure(cfg): + """Set the default behavior for a config environment to be secure. + + If Session.key/keyfile have not been set, set Session.key to + a new random UUID. + """ + warnings.warn("default_secure is deprecated", DeprecationWarning) + if 'Session' in cfg: + if 'key' in cfg.Session or 'keyfile' in cfg.Session: + return + # key/keyfile not specified, generate new UUID: + cfg.Session.key = str_to_bytes(str(uuid.uuid4())) + + +#----------------------------------------------------------------------------- +# Classes +#----------------------------------------------------------------------------- + +class SessionFactory(LoggingConfigurable): + """The Base class for configurables that have a Session, Context, logger, + and IOLoop. + """ + + logname = Unicode('') + def _logname_changed(self, name, old, new): + self.log = logging.getLogger(new) + + # not configurable: + context = Instance('zmq.Context') + def _context_default(self): + return zmq.Context.instance() + + session = Instance('jupyter_client.session.Session') + + loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False) + def _loop_default(self): + return IOLoop.instance() + + def __init__(self, **kwargs): + super(SessionFactory, self).__init__(**kwargs) + + if self.session is None: + # construct the session + self.session = Session(**kwargs) + + +class Message(object): + """A simple message object that maps dict keys to attributes. + + A Message can be created from a dict and a dict from a Message instance + simply by calling dict(msg_obj).""" + + def __init__(self, msg_dict): + dct = self.__dict__ + for k, v in iteritems(dict(msg_dict)): + if isinstance(v, dict): + v = Message(v) + dct[k] = v + + # Having this iterator lets dict(msg_obj) work out of the box. + def __iter__(self): + return iter(iteritems(self.__dict__)) + + def __repr__(self): + return repr(self.__dict__) + + def __str__(self): + return pprint.pformat(self.__dict__) + + def __contains__(self, k): + return k in self.__dict__ + + def __getitem__(self, k): + return self.__dict__[k] + + +def msg_header(msg_id, msg_type, username, session): + date = datetime.now() + version = kernel_protocol_version + return locals() + +def extract_header(msg_or_header): + """Given a message or header, return the header.""" + if not msg_or_header: + return {} + try: + # See if msg_or_header is the entire message. + h = msg_or_header['header'] + except KeyError: + try: + # See if msg_or_header is just the header + h = msg_or_header['msg_id'] + except KeyError: + raise + else: + h = msg_or_header + if not isinstance(h, dict): + h = dict(h) + return h + +class Session(Configurable): + """Object for handling serialization and sending of messages. + + The Session object handles building messages and sending them + with ZMQ sockets or ZMQStream objects. Objects can communicate with each + other over the network via Session objects, and only need to work with the + dict-based IPython message spec. The Session will handle + serialization/deserialization, security, and metadata. + + Sessions support configurable serialization via packer/unpacker traits, + and signing with HMAC digests via the key/keyfile traits. + + Parameters + ---------- + + debug : bool + whether to trigger extra debugging statements + packer/unpacker : str : 'json', 'pickle' or import_string + importstrings for methods to serialize message parts. If just + 'json' or 'pickle', predefined JSON and pickle packers will be used. + Otherwise, the entire importstring must be used. + + The functions must accept at least valid JSON input, and output *bytes*. + + For example, to use msgpack: + packer = 'msgpack.packb', unpacker='msgpack.unpackb' + pack/unpack : callables + You can also set the pack/unpack callables for serialization directly. + session : bytes + the ID of this Session object. The default is to generate a new UUID. + username : unicode + username added to message headers. The default is to ask the OS. + key : bytes + The key used to initialize an HMAC signature. If unset, messages + will not be signed or checked. + keyfile : filepath + The file containing a key. If this is set, `key` will be initialized + to the contents of the file. + + """ + + debug=Bool(False, config=True, help="""Debug output in the Session""") + + packer = DottedObjectName('json',config=True, + help="""The name of the packer for serializing messages. + Should be one of 'json', 'pickle', or an import name + for a custom callable serializer.""") + def _packer_changed(self, name, old, new): + if new.lower() == 'json': + self.pack = json_packer + self.unpack = json_unpacker + self.unpacker = new + elif new.lower() == 'pickle': + self.pack = pickle_packer + self.unpack = pickle_unpacker + self.unpacker = new + else: + self.pack = import_item(str(new)) + + unpacker = DottedObjectName('json', config=True, + help="""The name of the unpacker for unserializing messages. + Only used with custom functions for `packer`.""") + def _unpacker_changed(self, name, old, new): + if new.lower() == 'json': + self.pack = json_packer + self.unpack = json_unpacker + self.packer = new + elif new.lower() == 'pickle': + self.pack = pickle_packer + self.unpack = pickle_unpacker + self.packer = new + else: + self.unpack = import_item(str(new)) + + session = CUnicode(u'', config=True, + help="""The UUID identifying this session.""") + def _session_default(self): + u = unicode_type(uuid.uuid4()) + self.bsession = u.encode('ascii') + return u + + def _session_changed(self, name, old, new): + self.bsession = self.session.encode('ascii') + + # bsession is the session as bytes + bsession = CBytes(b'') + + username = Unicode(str_to_unicode(os.environ.get('USER', 'username')), + help="""Username for the Session. Default is your system username.""", + config=True) + + metadata = Dict({}, config=True, + help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""") + + # if 0, no adapting to do. + adapt_version = Integer(0) + + # message signature related traits: + + key = CBytes(config=True, + help="""execution key, for signing messages.""") + def _key_default(self): + return str_to_bytes(str(uuid.uuid4())) + + def _key_changed(self): + self._new_auth() + + signature_scheme = Unicode('hmac-sha256', config=True, + help="""The digest scheme used to construct the message signatures. + Must have the form 'hmac-HASH'.""") + def _signature_scheme_changed(self, name, old, new): + if not new.startswith('hmac-'): + raise TraitError("signature_scheme must start with 'hmac-', got %r" % new) + hash_name = new.split('-', 1)[1] + try: + self.digest_mod = getattr(hashlib, hash_name) + except AttributeError: + raise TraitError("hashlib has no such attribute: %s" % hash_name) + self._new_auth() + + digest_mod = Any() + def _digest_mod_default(self): + return hashlib.sha256 + + auth = Instance(hmac.HMAC) + + def _new_auth(self): + if self.key: + self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod) + else: + self.auth = None + + digest_history = Set() + digest_history_size = Integer(2**16, config=True, + help="""The maximum number of digests to remember. + + The digest history will be culled when it exceeds this value. + """ + ) + + keyfile = Unicode('', config=True, + help="""path to file containing execution key.""") + def _keyfile_changed(self, name, old, new): + with open(new, 'rb') as f: + self.key = f.read().strip() + + # for protecting against sends from forks + pid = Integer() + + # serialization traits: + + pack = Any(default_packer) # the actual packer function + def _pack_changed(self, name, old, new): + if not callable(new): + raise TypeError("packer must be callable, not %s"%type(new)) + + unpack = Any(default_unpacker) # the actual packer function + def _unpack_changed(self, name, old, new): + # unpacker is not checked - it is assumed to be + if not callable(new): + raise TypeError("unpacker must be callable, not %s"%type(new)) + + # thresholds: + copy_threshold = Integer(2**16, config=True, + help="Threshold (in bytes) beyond which a buffer should be sent without copying.") + buffer_threshold = Integer(MAX_BYTES, config=True, + help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.") + item_threshold = Integer(MAX_ITEMS, config=True, + help="""The maximum number of items for a container to be introspected for custom serialization. + Containers larger than this are pickled outright. + """ + ) + + + def __init__(self, **kwargs): + """create a Session object + + Parameters + ---------- + + debug : bool + whether to trigger extra debugging statements + packer/unpacker : str : 'json', 'pickle' or import_string + importstrings for methods to serialize message parts. If just + 'json' or 'pickle', predefined JSON and pickle packers will be used. + Otherwise, the entire importstring must be used. + + The functions must accept at least valid JSON input, and output + *bytes*. + + For example, to use msgpack: + packer = 'msgpack.packb', unpacker='msgpack.unpackb' + pack/unpack : callables + You can also set the pack/unpack callables for serialization + directly. + session : unicode (must be ascii) + the ID of this Session object. The default is to generate a new + UUID. + bsession : bytes + The session as bytes + username : unicode + username added to message headers. The default is to ask the OS. + key : bytes + The key used to initialize an HMAC signature. If unset, messages + will not be signed or checked. + signature_scheme : str + The message digest scheme. Currently must be of the form 'hmac-HASH', + where 'HASH' is a hashing function available in Python's hashlib. + The default is 'hmac-sha256'. + This is ignored if 'key' is empty. + keyfile : filepath + The file containing a key. If this is set, `key` will be + initialized to the contents of the file. + """ + super(Session, self).__init__(**kwargs) + self._check_packers() + self.none = self.pack({}) + # ensure self._session_default() if necessary, so bsession is defined: + self.session + self.pid = os.getpid() + self._new_auth() + + @property + def msg_id(self): + """always return new uuid""" + return str(uuid.uuid4()) + + def _check_packers(self): + """check packers for datetime support.""" + pack = self.pack + unpack = self.unpack + + # check simple serialization + msg = dict(a=[1,'hi']) + try: + packed = pack(msg) + except Exception as e: + msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}" + if self.packer == 'json': + jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod + else: + jsonmsg = "" + raise ValueError( + msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg) + ) + + # ensure packed message is bytes + if not isinstance(packed, bytes): + raise ValueError("message packed to %r, but bytes are required"%type(packed)) + + # check that unpack is pack's inverse + try: + unpacked = unpack(packed) + assert unpacked == msg + except Exception as e: + msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}" + if self.packer == 'json': + jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod + else: + jsonmsg = "" + raise ValueError( + msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg) + ) + + # check datetime support + msg = dict(t=datetime.now()) + try: + unpacked = unpack(pack(msg)) + if isinstance(unpacked['t'], datetime): + raise ValueError("Shouldn't deserialize to datetime") + except Exception: + self.pack = lambda o: pack(squash_dates(o)) + self.unpack = lambda s: unpack(s) + + def msg_header(self, msg_type): + return msg_header(self.msg_id, msg_type, self.username, self.session) + + def msg(self, msg_type, content=None, parent=None, header=None, metadata=None): + """Return the nested message dict. + + This format is different from what is sent over the wire. The + serialize/deserialize methods converts this nested message dict to the wire + format, which is a list of message parts. + """ + msg = {} + header = self.msg_header(msg_type) if header is None else header + msg['header'] = header + msg['msg_id'] = header['msg_id'] + msg['msg_type'] = header['msg_type'] + msg['parent_header'] = {} if parent is None else extract_header(parent) + msg['content'] = {} if content is None else content + msg['metadata'] = self.metadata.copy() + if metadata is not None: + msg['metadata'].update(metadata) + return msg + + def sign(self, msg_list): + """Sign a message with HMAC digest. If no auth, return b''. + + Parameters + ---------- + msg_list : list + The [p_header,p_parent,p_content] part of the message list. + """ + if self.auth is None: + return b'' + h = self.auth.copy() + for m in msg_list: + h.update(m) + return str_to_bytes(h.hexdigest()) + + def serialize(self, msg, ident=None): + """Serialize the message components to bytes. + + This is roughly the inverse of deserialize. The serialize/deserialize + methods work with full message lists, whereas pack/unpack work with + the individual message parts in the message list. + + Parameters + ---------- + msg : dict or Message + The next message dict as returned by the self.msg method. + + Returns + ------- + msg_list : list + The list of bytes objects to be sent with the format:: + + [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent, + p_metadata, p_content, buffer1, buffer2, ...] + + In this list, the ``p_*`` entities are the packed or serialized + versions, so if JSON is used, these are utf8 encoded JSON strings. + """ + content = msg.get('content', {}) + if content is None: + content = self.none + elif isinstance(content, dict): + content = self.pack(content) + elif isinstance(content, bytes): + # content is already packed, as in a relayed message + pass + elif isinstance(content, unicode_type): + # should be bytes, but JSON often spits out unicode + content = content.encode('utf8') + else: + raise TypeError("Content incorrect type: %s"%type(content)) + + real_message = [self.pack(msg['header']), + self.pack(msg['parent_header']), + self.pack(msg['metadata']), + content, + ] + + to_send = [] + + if isinstance(ident, list): + # accept list of idents + to_send.extend(ident) + elif ident is not None: + to_send.append(ident) + to_send.append(DELIM) + + signature = self.sign(real_message) + to_send.append(signature) + + to_send.extend(real_message) + + return to_send + + def send(self, stream, msg_or_type, content=None, parent=None, ident=None, + buffers=None, track=False, header=None, metadata=None): + """Build and send a message via stream or socket. + + The message format used by this function internally is as follows: + + [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content, + buffer1,buffer2,...] + + The serialize/deserialize methods convert the nested message dict into this + format. + + Parameters + ---------- + + stream : zmq.Socket or ZMQStream + The socket-like object used to send the data. + msg_or_type : str or Message/dict + Normally, msg_or_type will be a msg_type unless a message is being + sent more than once. If a header is supplied, this can be set to + None and the msg_type will be pulled from the header. + + content : dict or None + The content of the message (ignored if msg_or_type is a message). + header : dict or None + The header dict for the message (ignored if msg_to_type is a message). + parent : Message or dict or None + The parent or parent header describing the parent of this message + (ignored if msg_or_type is a message). + ident : bytes or list of bytes + The zmq.IDENTITY routing path. + metadata : dict or None + The metadata describing the message + buffers : list or None + The already-serialized buffers to be appended to the message. + track : bool + Whether to track. Only for use with Sockets, because ZMQStream + objects cannot track messages. + + + Returns + ------- + msg : dict + The constructed message. + """ + if not isinstance(stream, zmq.Socket): + # ZMQStreams and dummy sockets do not support tracking. + track = False + + if isinstance(msg_or_type, (Message, dict)): + # We got a Message or message dict, not a msg_type so don't + # build a new Message. + msg = msg_or_type + buffers = buffers or msg.get('buffers', []) + else: + msg = self.msg(msg_or_type, content=content, parent=parent, + header=header, metadata=metadata) + if not os.getpid() == self.pid: + io.rprint("WARNING: attempted to send message from fork") + io.rprint(msg) + return + buffers = [] if buffers is None else buffers + if self.adapt_version: + msg = adapt(msg, self.adapt_version) + to_send = self.serialize(msg, ident) + to_send.extend(buffers) + longest = max([ len(s) for s in to_send ]) + copy = (longest < self.copy_threshold) + + if buffers and track and not copy: + # only really track when we are doing zero-copy buffers + tracker = stream.send_multipart(to_send, copy=False, track=True) + else: + # use dummy tracker, which will be done immediately + tracker = DONE + stream.send_multipart(to_send, copy=copy) + + if self.debug: + pprint.pprint(msg) + pprint.pprint(to_send) + pprint.pprint(buffers) + + msg['tracker'] = tracker + + return msg + + def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None): + """Send a raw message via ident path. + + This method is used to send a already serialized message. + + Parameters + ---------- + stream : ZMQStream or Socket + The ZMQ stream or socket to use for sending the message. + msg_list : list + The serialized list of messages to send. This only includes the + [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of + the message. + ident : ident or list + A single ident or a list of idents to use in sending. + """ + to_send = [] + if isinstance(ident, bytes): + ident = [ident] + if ident is not None: + to_send.extend(ident) + + to_send.append(DELIM) + to_send.append(self.sign(msg_list)) + to_send.extend(msg_list) + stream.send_multipart(to_send, flags, copy=copy) + + def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True): + """Receive and unpack a message. + + Parameters + ---------- + socket : ZMQStream or Socket + The socket or stream to use in receiving. + + Returns + ------- + [idents], msg + [idents] is a list of idents and msg is a nested message dict of + same format as self.msg returns. + """ + if isinstance(socket, ZMQStream): + socket = socket.socket + try: + msg_list = socket.recv_multipart(mode, copy=copy) + except zmq.ZMQError as e: + if e.errno == zmq.EAGAIN: + # We can convert EAGAIN to None as we know in this case + # recv_multipart won't return None. + return None,None + else: + raise + # split multipart message into identity list and message dict + # invalid large messages can cause very expensive string comparisons + idents, msg_list = self.feed_identities(msg_list, copy) + try: + return idents, self.deserialize(msg_list, content=content, copy=copy) + except Exception as e: + # TODO: handle it + raise e + + def feed_identities(self, msg_list, copy=True): + """Split the identities from the rest of the message. + + Feed until DELIM is reached, then return the prefix as idents and + remainder as msg_list. This is easily broken by setting an IDENT to DELIM, + but that would be silly. + + Parameters + ---------- + msg_list : a list of Message or bytes objects + The message to be split. + copy : bool + flag determining whether the arguments are bytes or Messages + + Returns + ------- + (idents, msg_list) : two lists + idents will always be a list of bytes, each of which is a ZMQ + identity. msg_list will be a list of bytes or zmq.Messages of the + form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and + should be unpackable/unserializable via self.deserialize at this + point. + """ + if copy: + idx = msg_list.index(DELIM) + return msg_list[:idx], msg_list[idx+1:] + else: + failed = True + for idx,m in enumerate(msg_list): + if m.bytes == DELIM: + failed = False + break + if failed: + raise ValueError("DELIM not in msg_list") + idents, msg_list = msg_list[:idx], msg_list[idx+1:] + return [m.bytes for m in idents], msg_list + + def _add_digest(self, signature): + """add a digest to history to protect against replay attacks""" + if self.digest_history_size == 0: + # no history, never add digests + return + + self.digest_history.add(signature) + if len(self.digest_history) > self.digest_history_size: + # threshold reached, cull 10% + self._cull_digest_history() + + def _cull_digest_history(self): + """cull the digest history + + Removes a randomly selected 10% of the digest history + """ + current = len(self.digest_history) + n_to_cull = max(int(current // 10), current - self.digest_history_size) + if n_to_cull >= current: + self.digest_history = set() + return + to_cull = random.sample(self.digest_history, n_to_cull) + self.digest_history.difference_update(to_cull) + + def deserialize(self, msg_list, content=True, copy=True): + """Unserialize a msg_list to a nested message dict. + + This is roughly the inverse of serialize. The serialize/deserialize + methods work with full message lists, whereas pack/unpack work with + the individual message parts in the message list. + + Parameters + ---------- + msg_list : list of bytes or Message objects + The list of message parts of the form [HMAC,p_header,p_parent, + p_metadata,p_content,buffer1,buffer2,...]. + content : bool (True) + Whether to unpack the content dict (True), or leave it packed + (False). + copy : bool (True) + Whether msg_list contains bytes (True) or the non-copying Message + objects in each place (False). + + Returns + ------- + msg : dict + The nested message dict with top-level keys [header, parent_header, + content, buffers]. The buffers are returned as memoryviews. + """ + minlen = 5 + message = {} + if not copy: + # pyzmq didn't copy the first parts of the message, so we'll do it + for i in range(minlen): + msg_list[i] = msg_list[i].bytes + if self.auth is not None: + signature = msg_list[0] + if not signature: + raise ValueError("Unsigned Message") + if signature in self.digest_history: + raise ValueError("Duplicate Signature: %r" % signature) + self._add_digest(signature) + check = self.sign(msg_list[1:5]) + if not compare_digest(signature, check): + raise ValueError("Invalid Signature: %r" % signature) + if not len(msg_list) >= minlen: + raise TypeError("malformed message, must have at least %i elements"%minlen) + header = self.unpack(msg_list[1]) + message['header'] = extract_dates(header) + message['msg_id'] = header['msg_id'] + message['msg_type'] = header['msg_type'] + message['parent_header'] = extract_dates(self.unpack(msg_list[2])) + message['metadata'] = self.unpack(msg_list[3]) + if content: + message['content'] = self.unpack(msg_list[4]) + else: + message['content'] = msg_list[4] + buffers = [memoryview(b) for b in msg_list[5:]] + if buffers and buffers[0].shape is None: + # force copy to workaround pyzmq #646 + buffers = [memoryview(b.bytes) for b in msg_list[5:]] + message['buffers'] = buffers + # adapt to the current version + return adapt(message) + + def unserialize(self, *args, **kwargs): + warnings.warn( + "Session.unserialize is deprecated. Use Session.deserialize.", + DeprecationWarning, + ) + return self.deserialize(*args, **kwargs) + + +def test_msg2obj(): + am = dict(x=1) + ao = Message(am) + assert ao.x == am['x'] + + am['y'] = dict(z=1) + ao = Message(am) + assert ao.y.z == am['y']['z'] + + k1, k2 = 'y', 'z' + assert ao[k1][k2] == am[k1][k2] + + am2 = dict(ao) + assert am['x'] == am2['x'] + assert am['y']['z'] == am2['y']['z'] diff --git a/IPython/kernel/threaded.py b/IPython/kernel/threaded.py new file mode 100644 index 0000000..4a1072f --- /dev/null +++ b/IPython/kernel/threaded.py @@ -0,0 +1 @@ +from jupyter_client.threaded import * diff --git a/ipython_kernel/__init__.py b/ipython_kernel/__init__.py new file mode 100644 index 0000000..14c5df6 --- /dev/null +++ b/ipython_kernel/__init__.py @@ -0,0 +1 @@ +from .connect import * \ No newline at end of file diff --git a/ipython_kernel/__main__.py b/ipython_kernel/__main__.py new file mode 100644 index 0000000..93d8472 --- /dev/null +++ b/ipython_kernel/__main__.py @@ -0,0 +1,3 @@ +if __name__ == '__main__': + from ipython_kernel.zmq import kernelapp as app + app.launch_new_instance() diff --git a/IPython/kernel/comm/__init__.py b/ipython_kernel/comm/__init__.py similarity index 100% rename from IPython/kernel/comm/__init__.py rename to ipython_kernel/comm/__init__.py diff --git a/IPython/kernel/comm/comm.py b/ipython_kernel/comm/comm.py similarity index 97% rename from IPython/kernel/comm/comm.py rename to ipython_kernel/comm/comm.py index dc51b50..6619bd6 100644 --- a/IPython/kernel/comm/comm.py +++ b/ipython_kernel/comm/comm.py @@ -9,7 +9,7 @@ import uuid from zmq.eventloop.ioloop import IOLoop from IPython.config import LoggingConfigurable -from IPython.kernel.zmq.kernelbase import Kernel +from ipython_kernel.zmq.kernelbase import Kernel from IPython.utils.jsonutil import json_clean from IPython.utils.traitlets import Instance, Unicode, Bytes, Bool, Dict, Any @@ -20,40 +20,40 @@ class Comm(LoggingConfigurable): # If this is instantiated by a non-IPython kernel, shell will be None shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) - kernel = Instance('IPython.kernel.zmq.kernelbase.Kernel') + kernel = Instance('ipython_kernel.zmq.kernelbase.Kernel') def _kernel_default(self): if Kernel.initialized(): return Kernel.instance() - + iopub_socket = Any() def _iopub_socket_default(self): return self.kernel.iopub_socket - session = Instance('IPython.kernel.zmq.session.Session') + session = Instance('ipython_kernel.zmq.session.Session') def _session_default(self): if self.kernel is not None: return self.kernel.session - + target_name = Unicode('comm') target_module = Unicode(None, allow_none=True, help="""requirejs module from which to load comm target.""") - + topic = Bytes() def _topic_default(self): return ('comm-%s' % self.comm_id).encode('ascii') - + _open_data = Dict(help="data dict, if any, to be included in comm_open") _close_data = Dict(help="data dict, if any, to be included in comm_close") - + _msg_callback = Any() _close_callback = Any() - + _closed = Bool(True) comm_id = Unicode() def _comm_id_default(self): return uuid.uuid4().hex - + primary = Bool(True, help="Am I the primary or secondary Comm?") - + def __init__(self, target_name='', data=None, **kwargs): if target_name: kwargs['target_name'] = target_name @@ -63,7 +63,7 @@ class Comm(LoggingConfigurable): self.open(data) else: self._closed = False - + def _publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys): """Helper for sending a comm message on IOPub""" if threading.current_thread().name != 'MainThread' and IOLoop.initialized(): @@ -80,13 +80,13 @@ class Comm(LoggingConfigurable): ident=self.topic, buffers=buffers, ) - + def __del__(self): """trigger close on gc""" self.close() - + # publishing messages - + def open(self, data=None, metadata=None, buffers=None): """Open the frontend-side version of this comm""" if data is None: @@ -107,7 +107,7 @@ class Comm(LoggingConfigurable): except: comm_manager.unregister_comm(self) raise - + def close(self, data=None, metadata=None, buffers=None): """Close the frontend-side version of this comm""" if self._closed: @@ -120,41 +120,41 @@ class Comm(LoggingConfigurable): data=data, metadata=metadata, buffers=buffers, ) self.kernel.comm_manager.unregister_comm(self) - + def send(self, data=None, metadata=None, buffers=None): """Send a message to the frontend-side version of this comm""" self._publish_msg('comm_msg', data=data, metadata=metadata, buffers=buffers, ) - + # registering callbacks - + def on_close(self, callback): """Register a callback for comm_close - + Will be called with the `data` of the close message. - + Call `on_close(None)` to disable an existing callback. """ self._close_callback = callback - + def on_msg(self, callback): """Register a callback for comm_msg - + Will be called with the `data` of any comm_msg messages. - + Call `on_msg(None)` to disable an existing callback. """ self._msg_callback = callback - + # handling of incoming messages - + def handle_close(self, msg): """Handle a comm_close message""" self.log.debug("handle_close[%s](%s)", self.comm_id, msg) if self._close_callback: self._close_callback(msg) - + def handle_msg(self, msg): """Handle a comm_msg message""" self.log.debug("handle_msg[%s](%s)", self.comm_id, msg) diff --git a/IPython/kernel/comm/manager.py b/ipython_kernel/comm/manager.py similarity index 97% rename from IPython/kernel/comm/manager.py rename to ipython_kernel/comm/manager.py index 8bfb2e2..066416c 100644 --- a/IPython/kernel/comm/manager.py +++ b/ipython_kernel/comm/manager.py @@ -18,7 +18,7 @@ from .comm import Comm def lazy_keys(dikt): """Return lazy-evaluated string representation of a dictionary's keys - + Key list is only constructed if it will actually be used. Used for debug-logging. """ @@ -27,43 +27,43 @@ def lazy_keys(dikt): class CommManager(LoggingConfigurable): """Manager for Comms in the Kernel""" - + # If this is instantiated by a non-IPython kernel, shell will be None shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) - kernel = Instance('IPython.kernel.zmq.kernelbase.Kernel') + kernel = Instance('ipython_kernel.zmq.kernelbase.Kernel') iopub_socket = Any() def _iopub_socket_default(self): return self.kernel.iopub_socket - session = Instance('IPython.kernel.zmq.session.Session') + session = Instance('ipython_kernel.zmq.session.Session') def _session_default(self): return self.kernel.session - + comms = Dict() targets = Dict() - + # Public APIs - + def register_target(self, target_name, f): """Register a callable f for a given target name - + f will be called with two arguments when a comm_open message is received with `target`: - + - the Comm instance - the `comm_open` message itself. - + f can be a Python callable or an import string for one. """ if isinstance(f, string_types): f = import_item(f) - + self.targets[target_name] = f - + def unregister_target(self, target_name, f): """Unregister a callable registered with register_target""" return self.targets.pop(target_name); - + def register_comm(self, comm): """Register a new comm""" comm_id = comm.comm_id @@ -72,17 +72,17 @@ class CommManager(LoggingConfigurable): comm.iopub_socket = self.iopub_socket self.comms[comm_id] = comm return comm_id - + def unregister_comm(self, comm): """Unregister a comm, and close its counterpart""" # unlike get_comm, this should raise a KeyError comm = self.comms.pop(comm.comm_id) - + def get_comm(self, comm_id): """Get a comm with a particular id - + Returns the comm if found, otherwise None. - + This will not raise an error, it will log messages if the comm cannot be found. """ @@ -93,7 +93,7 @@ class CommManager(LoggingConfigurable): # call, because we store weakrefs comm = self.comms[comm_id] return comm - + # Message handlers def comm_open(self, stream, ident, msg): """Handler for comm_open messages""" @@ -116,14 +116,14 @@ class CommManager(LoggingConfigurable): return except Exception: self.log.error("Exception opening comm with target: %s", target_name, exc_info=True) - + # Failure. try: comm.close() except: - self.log.error("""Could not close comm during `comm_open` failure + self.log.error("""Could not close comm during `comm_open` failure clean-up. The comm may not have been opened yet.""", exc_info=True) - + def comm_msg(self, stream, ident, msg): """Handler for comm_msg messages""" content = msg['content'] @@ -136,7 +136,7 @@ class CommManager(LoggingConfigurable): comm.handle_msg(msg) except Exception: self.log.error("Exception in comm_msg for %s", comm_id, exc_info=True) - + def comm_close(self, stream, ident, msg): """Handler for comm_close messages""" content = msg['content'] @@ -147,7 +147,7 @@ class CommManager(LoggingConfigurable): self.log.debug("No such comm to close: %s", comm_id) return del self.comms[comm_id] - + try: comm.handle_close(msg) except Exception: diff --git a/ipython_kernel/connect.py b/ipython_kernel/connect.py new file mode 100644 index 0000000..ef5ca0e --- /dev/null +++ b/ipython_kernel/connect.py @@ -0,0 +1,576 @@ +"""Utilities for connecting to kernels + +The :class:`ConnectionFileMixin` class in this module encapsulates the logic +related to writing and reading connections files. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +from __future__ import absolute_import + +import glob +import json +import os +import socket +import sys +from getpass import getpass +from subprocess import Popen, PIPE +import tempfile + +import zmq + +# IPython imports +from IPython.config import LoggingConfigurable +from IPython.core.profiledir import ProfileDir +from IPython.utils.localinterfaces import localhost +from IPython.utils.path import filefind, get_ipython_dir +from IPython.utils.py3compat import (str_to_bytes, bytes_to_str, cast_bytes_py2, + string_types) +from IPython.utils.traitlets import ( + Bool, Integer, Unicode, CaselessStrEnum, Instance, +) + + +#----------------------------------------------------------------------------- +# Working with Connection Files +#----------------------------------------------------------------------------- + +def write_connection_file(fname=None, shell_port=0, iopub_port=0, stdin_port=0, hb_port=0, + control_port=0, ip='', key=b'', transport='tcp', + signature_scheme='hmac-sha256', + ): + """Generates a JSON config file, including the selection of random ports. + + Parameters + ---------- + + fname : unicode + The path to the file to write + + shell_port : int, optional + The port to use for ROUTER (shell) channel. + + iopub_port : int, optional + The port to use for the SUB channel. + + stdin_port : int, optional + The port to use for the ROUTER (raw input) channel. + + control_port : int, optional + The port to use for the ROUTER (control) channel. + + hb_port : int, optional + The port to use for the heartbeat REP channel. + + ip : str, optional + The ip address the kernel will bind to. + + key : str, optional + The Session key used for message authentication. + + signature_scheme : str, optional + The scheme used for message authentication. + This has the form 'digest-hash', where 'digest' + is the scheme used for digests, and 'hash' is the name of the hash function + used by the digest scheme. + Currently, 'hmac' is the only supported digest scheme, + and 'sha256' is the default hash function. + + """ + if not ip: + ip = localhost() + # default to temporary connector file + if not fname: + fd, fname = tempfile.mkstemp('.json') + os.close(fd) + + # Find open ports as necessary. + + ports = [] + ports_needed = int(shell_port <= 0) + \ + int(iopub_port <= 0) + \ + int(stdin_port <= 0) + \ + int(control_port <= 0) + \ + int(hb_port <= 0) + if transport == 'tcp': + for i in range(ports_needed): + sock = socket.socket() + # struct.pack('ii', (0,0)) is 8 null bytes + sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, b'\0' * 8) + sock.bind(('', 0)) + ports.append(sock) + for i, sock in enumerate(ports): + port = sock.getsockname()[1] + sock.close() + ports[i] = port + else: + N = 1 + for i in range(ports_needed): + while os.path.exists("%s-%s" % (ip, str(N))): + N += 1 + ports.append(N) + N += 1 + if shell_port <= 0: + shell_port = ports.pop(0) + if iopub_port <= 0: + iopub_port = ports.pop(0) + if stdin_port <= 0: + stdin_port = ports.pop(0) + if control_port <= 0: + control_port = ports.pop(0) + if hb_port <= 0: + hb_port = ports.pop(0) + + cfg = dict( shell_port=shell_port, + iopub_port=iopub_port, + stdin_port=stdin_port, + control_port=control_port, + hb_port=hb_port, + ) + cfg['ip'] = ip + cfg['key'] = bytes_to_str(key) + cfg['transport'] = transport + cfg['signature_scheme'] = signature_scheme + + with open(fname, 'w') as f: + f.write(json.dumps(cfg, indent=2)) + + return fname, cfg + + +def get_connection_file(app=None): + """Return the path to the connection file of an app + + Parameters + ---------- + app : IPKernelApp instance [optional] + If unspecified, the currently running app will be used + """ + if app is None: + from jupyter_client.kernelapp import IPKernelApp + if not IPKernelApp.initialized(): + raise RuntimeError("app not specified, and not in a running Kernel") + + app = IPKernelApp.instance() + return filefind(app.connection_file, ['.', app.profile_dir.security_dir]) + + +def find_connection_file(filename='kernel-*.json', profile=None): + """find a connection file, and return its absolute path. + + The current working directory and the profile's security + directory will be searched for the file if it is not given by + absolute path. + + If profile is unspecified, then the current running application's + profile will be used, or 'default', if not run from IPython. + + If the argument does not match an existing file, it will be interpreted as a + fileglob, and the matching file in the profile's security dir with + the latest access time will be used. + + Parameters + ---------- + filename : str + The connection file or fileglob to search for. + profile : str [optional] + The name of the profile to use when searching for the connection file, + if different from the current IPython session or 'default'. + + Returns + ------- + str : The absolute path of the connection file. + """ + from IPython.core.application import BaseIPythonApplication as IPApp + try: + # quick check for absolute path, before going through logic + return filefind(filename) + except IOError: + pass + + if profile is None: + # profile unspecified, check if running from an IPython app + if IPApp.initialized(): + app = IPApp.instance() + profile_dir = app.profile_dir + else: + # not running in IPython, use default profile + profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), 'default') + else: + # find profiledir by profile name: + profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile) + security_dir = profile_dir.security_dir + + try: + # first, try explicit name + return filefind(filename, ['.', security_dir]) + except IOError: + pass + + # not found by full name + + if '*' in filename: + # given as a glob already + pat = filename + else: + # accept any substring match + pat = '*%s*' % filename + matches = glob.glob( os.path.join(security_dir, pat) ) + if not matches: + raise IOError("Could not find %r in %r" % (filename, security_dir)) + elif len(matches) == 1: + return matches[0] + else: + # get most recent match, by access time: + return sorted(matches, key=lambda f: os.stat(f).st_atime)[-1] + + +def get_connection_info(connection_file=None, unpack=False, profile=None): + """Return the connection information for the current Kernel. + + Parameters + ---------- + connection_file : str [optional] + The connection file to be used. Can be given by absolute path, or + IPython will search in the security directory of a given profile. + If run from IPython, + + If unspecified, the connection file for the currently running + IPython Kernel will be used, which is only allowed from inside a kernel. + unpack : bool [default: False] + if True, return the unpacked dict, otherwise just the string contents + of the file. + profile : str [optional] + The name of the profile to use when searching for the connection file, + if different from the current IPython session or 'default'. + + + Returns + ------- + The connection dictionary of the current kernel, as string or dict, + depending on `unpack`. + """ + if connection_file is None: + # get connection file from current kernel + cf = get_connection_file() + else: + # connection file specified, allow shortnames: + cf = find_connection_file(connection_file, profile=profile) + + with open(cf) as f: + info = f.read() + + if unpack: + info = json.loads(info) + # ensure key is bytes: + info['key'] = str_to_bytes(info.get('key', '')) + return info + + +def connect_qtconsole(connection_file=None, argv=None, profile=None): + """Connect a qtconsole to the current kernel. + + This is useful for connecting a second qtconsole to a kernel, or to a + local notebook. + + Parameters + ---------- + connection_file : str [optional] + The connection file to be used. Can be given by absolute path, or + IPython will search in the security directory of a given profile. + If run from IPython, + + If unspecified, the connection file for the currently running + IPython Kernel will be used, which is only allowed from inside a kernel. + argv : list [optional] + Any extra args to be passed to the console. + profile : str [optional] + The name of the profile to use when searching for the connection file, + if different from the current IPython session or 'default'. + + + Returns + ------- + :class:`subprocess.Popen` instance running the qtconsole frontend + """ + argv = [] if argv is None else argv + + if connection_file is None: + # get connection file from current kernel + cf = get_connection_file() + else: + cf = find_connection_file(connection_file, profile=profile) + + cmd = ';'.join([ + "from IPython.qt.console import qtconsoleapp", + "qtconsoleapp.main()" + ]) + + return Popen([sys.executable, '-c', cmd, '--existing', cf] + argv, + stdout=PIPE, stderr=PIPE, close_fds=(sys.platform != 'win32'), + ) + + +def tunnel_to_kernel(connection_info, sshserver, sshkey=None): + """tunnel connections to a kernel via ssh + + This will open four SSH tunnels from localhost on this machine to the + ports associated with the kernel. They can be either direct + localhost-localhost tunnels, or if an intermediate server is necessary, + the kernel must be listening on a public IP. + + Parameters + ---------- + connection_info : dict or str (path) + Either a connection dict, or the path to a JSON connection file + sshserver : str + The ssh sever to use to tunnel to the kernel. Can be a full + `user@server:port` string. ssh config aliases are respected. + sshkey : str [optional] + Path to file containing ssh key to use for authentication. + Only necessary if your ssh config does not already associate + a keyfile with the host. + + Returns + ------- + + (shell, iopub, stdin, hb) : ints + The four ports on localhost that have been forwarded to the kernel. + """ + from zmq.ssh import tunnel + if isinstance(connection_info, string_types): + # it's a path, unpack it + with open(connection_info) as f: + connection_info = json.loads(f.read()) + + cf = connection_info + + lports = tunnel.select_random_ports(4) + rports = cf['shell_port'], cf['iopub_port'], cf['stdin_port'], cf['hb_port'] + + remote_ip = cf['ip'] + + if tunnel.try_passwordless_ssh(sshserver, sshkey): + password=False + else: + password = getpass("SSH Password for %s: " % cast_bytes_py2(sshserver)) + + for lp,rp in zip(lports, rports): + tunnel.ssh_tunnel(lp, rp, sshserver, remote_ip, sshkey, password) + + return tuple(lports) + + +#----------------------------------------------------------------------------- +# Mixin for classes that work with connection files +#----------------------------------------------------------------------------- + +channel_socket_types = { + 'hb' : zmq.REQ, + 'shell' : zmq.DEALER, + 'iopub' : zmq.SUB, + 'stdin' : zmq.DEALER, + 'control': zmq.DEALER, +} + +port_names = [ "%s_port" % channel for channel in ('shell', 'stdin', 'iopub', 'hb', 'control')] + +class ConnectionFileMixin(LoggingConfigurable): + """Mixin for configurable classes that work with connection files""" + + # The addresses for the communication channels + connection_file = Unicode('', config=True, + help="""JSON file in which to store connection info [default: kernel-.json] + + This file will contain the IP, ports, and authentication key needed to connect + clients to this kernel. By default, this file will be created in the security dir + of the current profile, but can be specified by absolute path. + """) + _connection_file_written = Bool(False) + + transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True) + + ip = Unicode(config=True, + help="""Set the kernel\'s IP address [default localhost]. + If the IP address is something other than localhost, then + Consoles on other machines will be able to connect + to the Kernel, so be careful!""" + ) + + def _ip_default(self): + if self.transport == 'ipc': + if self.connection_file: + return os.path.splitext(self.connection_file)[0] + '-ipc' + else: + return 'kernel-ipc' + else: + return localhost() + + def _ip_changed(self, name, old, new): + if new == '*': + self.ip = '0.0.0.0' + + # protected traits + + hb_port = Integer(0, config=True, + help="set the heartbeat port [default: random]") + shell_port = Integer(0, config=True, + help="set the shell (ROUTER) port [default: random]") + iopub_port = Integer(0, config=True, + help="set the iopub (PUB) port [default: random]") + stdin_port = Integer(0, config=True, + help="set the stdin (ROUTER) port [default: random]") + control_port = Integer(0, config=True, + help="set the control (ROUTER) port [default: random]") + + @property + def ports(self): + return [ getattr(self, name) for name in port_names ] + + # The Session to use for communication with the kernel. + session = Instance('jupyter_client.session.Session') + def _session_default(self): + from jupyter_client.session import Session + return Session(parent=self) + + #-------------------------------------------------------------------------- + # Connection and ipc file management + #-------------------------------------------------------------------------- + + def get_connection_info(self): + """return the connection info as a dict""" + return dict( + transport=self.transport, + ip=self.ip, + shell_port=self.shell_port, + iopub_port=self.iopub_port, + stdin_port=self.stdin_port, + hb_port=self.hb_port, + control_port=self.control_port, + signature_scheme=self.session.signature_scheme, + key=self.session.key, + ) + + def cleanup_connection_file(self): + """Cleanup connection file *if we wrote it* + + Will not raise if the connection file was already removed somehow. + """ + if self._connection_file_written: + # cleanup connection files on full shutdown of kernel we started + self._connection_file_written = False + try: + os.remove(self.connection_file) + except (IOError, OSError, AttributeError): + pass + + def cleanup_ipc_files(self): + """Cleanup ipc files if we wrote them.""" + if self.transport != 'ipc': + return + for port in self.ports: + ipcfile = "%s-%i" % (self.ip, port) + try: + os.remove(ipcfile) + except (IOError, OSError): + pass + + def write_connection_file(self): + """Write connection info to JSON dict in self.connection_file.""" + if self._connection_file_written and os.path.exists(self.connection_file): + return + + self.connection_file, cfg = write_connection_file(self.connection_file, + transport=self.transport, ip=self.ip, key=self.session.key, + stdin_port=self.stdin_port, iopub_port=self.iopub_port, + shell_port=self.shell_port, hb_port=self.hb_port, + control_port=self.control_port, + signature_scheme=self.session.signature_scheme, + ) + # write_connection_file also sets default ports: + for name in port_names: + setattr(self, name, cfg[name]) + + self._connection_file_written = True + + def load_connection_file(self): + """Load connection info from JSON dict in self.connection_file.""" + self.log.debug(u"Loading connection file %s", self.connection_file) + with open(self.connection_file) as f: + cfg = json.load(f) + self.transport = cfg.get('transport', self.transport) + self.ip = cfg.get('ip', self._ip_default()) + + for name in port_names: + if getattr(self, name) == 0 and name in cfg: + # not overridden by config or cl_args + setattr(self, name, cfg[name]) + + if 'key' in cfg: + self.session.key = str_to_bytes(cfg['key']) + if 'signature_scheme' in cfg: + self.session.signature_scheme = cfg['signature_scheme'] + + #-------------------------------------------------------------------------- + # Creating connected sockets + #-------------------------------------------------------------------------- + + def _make_url(self, channel): + """Make a ZeroMQ URL for a given channel.""" + transport = self.transport + ip = self.ip + port = getattr(self, '%s_port' % channel) + + if transport == 'tcp': + return "tcp://%s:%i" % (ip, port) + else: + return "%s://%s-%s" % (transport, ip, port) + + def _create_connected_socket(self, channel, identity=None): + """Create a zmq Socket and connect it to the kernel.""" + url = self._make_url(channel) + socket_type = channel_socket_types[channel] + self.log.debug("Connecting to: %s" % url) + sock = self.context.socket(socket_type) + # set linger to 1s to prevent hangs at exit + sock.linger = 1000 + if identity: + sock.identity = identity + sock.connect(url) + return sock + + def connect_iopub(self, identity=None): + """return zmq Socket connected to the IOPub channel""" + sock = self._create_connected_socket('iopub', identity=identity) + sock.setsockopt(zmq.SUBSCRIBE, b'') + return sock + + def connect_shell(self, identity=None): + """return zmq Socket connected to the Shell channel""" + return self._create_connected_socket('shell', identity=identity) + + def connect_stdin(self, identity=None): + """return zmq Socket connected to the StdIn channel""" + return self._create_connected_socket('stdin', identity=identity) + + def connect_hb(self, identity=None): + """return zmq Socket connected to the Heartbeat channel""" + return self._create_connected_socket('hb', identity=identity) + + def connect_control(self, identity=None): + """return zmq Socket connected to the Control channel""" + return self._create_connected_socket('control', identity=identity) + + +__all__ = [ + 'write_connection_file', + 'get_connection_file', + 'find_connection_file', + 'get_connection_info', + 'connect_qtconsole', + 'tunnel_to_kernel', +] diff --git a/IPython/kernel/inprocess/__init__.py b/ipython_kernel/inprocess/__init__.py similarity index 100% rename from IPython/kernel/inprocess/__init__.py rename to ipython_kernel/inprocess/__init__.py diff --git a/IPython/kernel/inprocess/blocking.py b/ipython_kernel/inprocess/blocking.py similarity index 99% rename from IPython/kernel/inprocess/blocking.py rename to ipython_kernel/inprocess/blocking.py index 1d3a010..a90b9ce 100644 --- a/IPython/kernel/inprocess/blocking.py +++ b/ipython_kernel/inprocess/blocking.py @@ -17,7 +17,6 @@ except ImportError: # IPython imports from IPython.utils.io import raw_print from IPython.utils.traitlets import Type -#from IPython.kernel.blocking.channels import BlockingChannelMixin # Local imports from .channels import ( diff --git a/IPython/kernel/inprocess/channels.py b/ipython_kernel/inprocess/channels.py similarity index 98% rename from IPython/kernel/inprocess/channels.py rename to ipython_kernel/inprocess/channels.py index 40b1011..0b78d99 100644 --- a/IPython/kernel/inprocess/channels.py +++ b/ipython_kernel/inprocess/channels.py @@ -3,7 +3,7 @@ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. -from IPython.kernel.channelsabc import HBChannelABC +from jupyter_client.channelsabc import HBChannelABC from .socket import DummySocket diff --git a/IPython/kernel/inprocess/client.py b/ipython_kernel/inprocess/client.py similarity index 95% rename from IPython/kernel/inprocess/client.py rename to ipython_kernel/inprocess/client.py index abb1ea3..3e2b8e8 100644 --- a/IPython/kernel/inprocess/client.py +++ b/ipython_kernel/inprocess/client.py @@ -12,10 +12,10 @@ #----------------------------------------------------------------------------- # IPython imports -from IPython.kernel.inprocess.socket import DummySocket +from ipython_kernel.inprocess.socket import DummySocket from IPython.utils.traitlets import Type, Instance -from IPython.kernel.clientabc import KernelClientABC -from IPython.kernel.client import KernelClient +from jupyter_client.clientabc import KernelClientABC +from jupyter_client.client import KernelClient # Local imports from .channels import ( @@ -32,10 +32,10 @@ class InProcessKernelClient(KernelClient): """A client for an in-process kernel. This class implements the interface of - `IPython.kernel.clientabc.KernelClientABC` and allows + `jupyter_client.clientabc.KernelClientABC` and allows (asynchronous) frontends to be used seamlessly with an in-process kernel. - See `IPython.kernel.client.KernelClient` for docstrings. + See `jupyter_client.client.KernelClient` for docstrings. """ # The classes to use for the various channels. @@ -44,7 +44,7 @@ class InProcessKernelClient(KernelClient): stdin_channel_class = Type(InProcessChannel) hb_channel_class = Type(InProcessHBChannel) - kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel', + kernel = Instance('ipython_kernel.inprocess.ipkernel.InProcessKernel', allow_none=True) #-------------------------------------------------------------------------- diff --git a/IPython/kernel/inprocess/ipkernel.py b/ipython_kernel/inprocess/ipkernel.py similarity index 93% rename from IPython/kernel/inprocess/ipkernel.py rename to ipython_kernel/inprocess/ipkernel.py index 7851c0c..d43e2d4 100644 --- a/IPython/kernel/inprocess/ipkernel.py +++ b/ipython_kernel/inprocess/ipkernel.py @@ -10,8 +10,8 @@ import sys from IPython.core.interactiveshell import InteractiveShellABC from IPython.utils.jsonutil import json_clean from IPython.utils.traitlets import Any, Enum, Instance, List, Type -from IPython.kernel.zmq.ipkernel import IPythonKernel -from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell +from ipython_kernel.zmq.ipkernel import IPythonKernel +from ipython_kernel.zmq.zmqshell import ZMQInteractiveShell from .socket import DummySocket @@ -27,7 +27,7 @@ class InProcessKernel(IPythonKernel): # The frontends connected to this kernel. frontends = List( - Instance('IPython.kernel.inprocess.client.InProcessKernelClient', + Instance('ipython_kernel.inprocess.client.InProcessKernelClient', allow_none=True) ) @@ -114,25 +114,25 @@ class InProcessKernel(IPythonKernel): ident, msg = self.session.recv(self.iopub_socket, copy=False) for frontend in self.frontends: frontend.iopub_channel.call_handlers(msg) - + #------ Trait initializers ----------------------------------------------- def _log_default(self): return logging.getLogger(__name__) def _session_default(self): - from IPython.kernel.zmq.session import Session + from ipython_kernel.zmq.session import Session return Session(parent=self, key=b'') def _shell_class_default(self): return InProcessInteractiveShell def _stdout_default(self): - from IPython.kernel.zmq.iostream import OutStream + from ipython_kernel.zmq.iostream import OutStream return OutStream(self.session, self.iopub_socket, u'stdout', pipe=False) def _stderr_default(self): - from IPython.kernel.zmq.iostream import OutStream + from ipython_kernel.zmq.iostream import OutStream return OutStream(self.session, self.iopub_socket, u'stderr', pipe=False) #----------------------------------------------------------------------------- @@ -141,7 +141,7 @@ class InProcessKernel(IPythonKernel): class InProcessInteractiveShell(ZMQInteractiveShell): - kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel', + kernel = Instance('ipython_kernel.inprocess.ipkernel.InProcessKernel', allow_none=True) #------------------------------------------------------------------------- @@ -150,7 +150,7 @@ class InProcessInteractiveShell(ZMQInteractiveShell): def enable_gui(self, gui=None): """Enable GUI integration for the kernel.""" - from IPython.kernel.zmq.eventloops import enable_gui + from ipython_kernel.zmq.eventloops import enable_gui if not gui: gui = self.kernel.gui return enable_gui(gui, kernel=self.kernel) diff --git a/IPython/kernel/inprocess/manager.py b/ipython_kernel/inprocess/manager.py similarity index 84% rename from IPython/kernel/inprocess/manager.py rename to ipython_kernel/inprocess/manager.py index b76bf75..1dc7eb5 100644 --- a/IPython/kernel/inprocess/manager.py +++ b/ipython_kernel/inprocess/manager.py @@ -4,37 +4,37 @@ # Distributed under the terms of the Modified BSD License. from IPython.utils.traitlets import Instance, DottedObjectName -from IPython.kernel.managerabc import KernelManagerABC -from IPython.kernel.manager import KernelManager -from IPython.kernel.zmq.session import Session +from jupyter_client.managerabc import KernelManagerABC +from jupyter_client.manager import KernelManager +from jupyter_client.session import Session class InProcessKernelManager(KernelManager): """A manager for an in-process kernel. This class implements the interface of - `IPython.kernel.kernelmanagerabc.KernelManagerABC` and allows + `jupyter_client.kernelmanagerabc.KernelManagerABC` and allows (asynchronous) frontends to be used seamlessly with an in-process kernel. - See `IPython.kernel.kernelmanager.KernelManager` for docstrings. + See `jupyter_client.kernelmanager.KernelManager` for docstrings. """ # The kernel process with which the KernelManager is communicating. - kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel', + kernel = Instance('ipython_kernel.inprocess.ipkernel.InProcessKernel', allow_none=True) # the client class for KM.client() shortcut - client_class = DottedObjectName('IPython.kernel.inprocess.BlockingInProcessKernelClient') - + client_class = DottedObjectName('ipython_kernel.inprocess.BlockingInProcessKernelClient') + def _session_default(self): # don't sign in-process messages return Session(key=b'', parent=self) - + #-------------------------------------------------------------------------- # Kernel management methods #-------------------------------------------------------------------------- def start_kernel(self, **kwds): - from IPython.kernel.inprocess.ipkernel import InProcessKernel + from ipython_kernel.inprocess.ipkernel import InProcessKernel self.kernel = InProcessKernel(parent=self, session=self.session) def shutdown_kernel(self): diff --git a/IPython/kernel/inprocess/socket.py b/ipython_kernel/inprocess/socket.py similarity index 100% rename from IPython/kernel/inprocess/socket.py rename to ipython_kernel/inprocess/socket.py index 7e0664e..d68f3fc 100644 --- a/IPython/kernel/inprocess/socket.py +++ b/ipython_kernel/inprocess/socket.py @@ -46,7 +46,7 @@ SocketABC.register(zmq.Socket) class DummySocket(HasTraits): """ A dummy socket implementing (part of) the zmq.Socket interface. """ - + queue = Instance(Queue, ()) message_sent = Int(0) # Should be an Event diff --git a/IPython/kernel/inprocess/tests/__init__.py b/ipython_kernel/inprocess/tests/__init__.py similarity index 100% rename from IPython/kernel/inprocess/tests/__init__.py rename to ipython_kernel/inprocess/tests/__init__.py diff --git a/IPython/kernel/inprocess/tests/test_kernel.py b/ipython_kernel/inprocess/tests/test_kernel.py similarity index 92% rename from IPython/kernel/inprocess/tests/test_kernel.py rename to ipython_kernel/inprocess/tests/test_kernel.py index 3b9191d..c5b84e0 100644 --- a/IPython/kernel/inprocess/tests/test_kernel.py +++ b/ipython_kernel/inprocess/tests/test_kernel.py @@ -6,10 +6,10 @@ from __future__ import print_function import sys import unittest -from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient -from IPython.kernel.inprocess.manager import InProcessKernelManager -from IPython.kernel.inprocess.ipkernel import InProcessKernel -from IPython.kernel.tests.utils import assemble_output +from ipython_kernel.inprocess.blocking import BlockingInProcessKernelClient +from ipython_kernel.inprocess.manager import InProcessKernelManager +from ipython_kernel.inprocess.ipkernel import InProcessKernel +from ipython_kernel.tests.utils import assemble_output from IPython.testing.decorators import skipif_not_matplotlib from IPython.utils.io import capture_output from IPython.utils import py3compat @@ -66,4 +66,3 @@ class InProcessKernelTestCase(unittest.TestCase): kc.execute('print("bar")') out, err = assemble_output(kc.iopub_channel) self.assertEqual(out, 'bar\n') - diff --git a/IPython/kernel/inprocess/tests/test_kernelmanager.py b/ipython_kernel/inprocess/tests/test_kernelmanager.py similarity index 97% rename from IPython/kernel/inprocess/tests/test_kernelmanager.py rename to ipython_kernel/inprocess/tests/test_kernelmanager.py index 3ea1c86..a26181d 100644 --- a/IPython/kernel/inprocess/tests/test_kernelmanager.py +++ b/ipython_kernel/inprocess/tests/test_kernelmanager.py @@ -5,8 +5,8 @@ from __future__ import print_function import unittest -from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient -from IPython.kernel.inprocess.manager import InProcessKernelManager +from ipython_kernel.inprocess.blocking import BlockingInProcessKernelClient +from ipython_kernel.inprocess.manager import InProcessKernelManager #----------------------------------------------------------------------------- # Test case diff --git a/IPython/kernel/resources/logo-32x32.png b/ipython_kernel/resources/logo-32x32.png similarity index 100% rename from IPython/kernel/resources/logo-32x32.png rename to ipython_kernel/resources/logo-32x32.png Binary files a/IPython/kernel/resources/logo-32x32.png and b/ipython_kernel/resources/logo-32x32.png differ diff --git a/IPython/kernel/resources/logo-64x64.png b/ipython_kernel/resources/logo-64x64.png similarity index 100% rename from IPython/kernel/resources/logo-64x64.png rename to ipython_kernel/resources/logo-64x64.png Binary files a/IPython/kernel/resources/logo-64x64.png and b/ipython_kernel/resources/logo-64x64.png differ diff --git a/IPython/kernel/tests/__init__.py b/ipython_kernel/tests/__init__.py similarity index 100% rename from IPython/kernel/tests/__init__.py rename to ipython_kernel/tests/__init__.py diff --git a/IPython/kernel/tests/test_kernel.py b/ipython_kernel/tests/test_kernel.py similarity index 95% rename from IPython/kernel/tests/test_kernel.py rename to ipython_kernel/tests/test_kernel.py index 2849da5..131dc70 100644 --- a/IPython/kernel/tests/test_kernel.py +++ b/ipython_kernel/tests/test_kernel.py @@ -49,7 +49,7 @@ def test_sys_path(): def test_sys_path_profile_dir(): """test that sys.path doesn't get messed up when `--profile-dir` is specified""" - + with new_kernel(['--profile-dir', locate_profile('default')]) as kc: msg_id, content = execute(kc=kc, code="import sys; print (repr(sys.path[0]))") stdout, stderr = assemble_output(kc.iopub_channel) @@ -60,7 +60,7 @@ def test_subprocess_print(): """printing from forked mp.Process""" with new_kernel() as kc: iopub = kc.iopub_channel - + _check_mp_mode(kc, expected=False) flush_channels(kc) np = 5 @@ -71,11 +71,11 @@ def test_subprocess_print(): "for p in pool: p.start()", "for p in pool: p.join()" ]) - + expected = '\n'.join([ "hello %s" % i for i in range(np) ]) + '\n' - + msg_id, content = execute(kc=kc, code=code) stdout, stderr = assemble_output(iopub) nt.assert_equal(stdout.count("hello"), np, stdout) @@ -90,7 +90,7 @@ def test_subprocess_noprint(): """mp.Process without print doesn't trigger iostream mp_mode""" with kernel() as kc: iopub = kc.iopub_channel - + np = 5 code = '\n'.join([ "import multiprocessing as mp", @@ -98,7 +98,7 @@ def test_subprocess_noprint(): "for p in pool: p.start()", "for p in pool: p.join()" ]) - + msg_id, content = execute(kc=kc, code=code) stdout, stderr = assemble_output(iopub) nt.assert_equal(stdout, '') @@ -113,14 +113,14 @@ def test_subprocess_error(): """error in mp.Process doesn't crash""" with new_kernel() as kc: iopub = kc.iopub_channel - + code = '\n'.join([ "import multiprocessing as mp", "p = mp.Process(target=int, args=('hi',))", "p.start()", "p.join()", ]) - + msg_id, content = execute(kc=kc, code=code) stdout, stderr = assemble_output(iopub) nt.assert_equal(stdout, '') @@ -135,7 +135,7 @@ def test_raw_input(): """test [raw_]input""" with kernel() as kc: iopub = kc.iopub_channel - + input_f = "input" if py3compat.PY3 else "raw_input" theprompt = "prompt> " code = 'print({input_f}("{theprompt}"))'.format(**locals()) @@ -157,7 +157,7 @@ def test_eval_input(): """test input() on Python 2""" with kernel() as kc: iopub = kc.iopub_channel - + input_f = "input" if py3compat.PY3 else "raw_input" theprompt = "prompt> " code = 'print(input("{theprompt}"))'.format(**locals()) @@ -205,7 +205,7 @@ def test_is_complete(): kc.is_complete('raise = 2') reply = kc.get_shell_msg(block=True, timeout=TIMEOUT) assert reply['content']['status'] == 'invalid' - + kc.is_complete('a = [1,\n2,') reply = kc.get_shell_msg(block=True, timeout=TIMEOUT) assert reply['content']['status'] == 'incomplete' diff --git a/IPython/kernel/tests/test_message_spec.py b/ipython_kernel/tests/test_message_spec.py similarity index 100% rename from IPython/kernel/tests/test_message_spec.py rename to ipython_kernel/tests/test_message_spec.py index 9e8f21b..f474def 100644 --- a/IPython/kernel/tests/test_message_spec.py +++ b/ipython_kernel/tests/test_message_spec.py @@ -64,7 +64,7 @@ class Version(Unicode): self.max = kwargs.pop('max', None) kwargs['default_value'] = self.min super(Version, self).__init__(*args, **kwargs) - + def validate(self, obj, value): if self.min and V(value) < V(self.min): raise TraitError("bad version: %s < %s" % (value, self.min)) @@ -78,7 +78,7 @@ class RMessage(Reference): header = Dict() parent_header = Dict() content = Dict() - + def check(self, d): super(RMessage, self).check(d) RHeader().check(self.header) @@ -107,7 +107,7 @@ class MimeBundle(Reference): class ExecuteReply(Reference): execution_count = Integer() status = Enum((u'ok', u'error'), default_value=u'ok') - + def check(self, d): Reference.check(self, d) if d['status'] == 'ok': @@ -158,7 +158,7 @@ class KernelInfoReply(Reference): implementation_version = Version(min='2.1') language_info = Dict() banner = Unicode() - + def check(self, d): Reference.check(self, d) LanguageInfo().check(d['language_info']) @@ -166,7 +166,7 @@ class KernelInfoReply(Reference): class IsCompleteReply(Reference): status = Enum((u'complete', u'incomplete', u'invalid', u'unknown'), default_value=u'complete') - + def check(self, d): Reference.check(self, d) if d['status'] == 'incomplete': @@ -224,10 +224,10 @@ Specifications of `content` part of the reply messages. def validate_message(msg, msg_type=None, parent=None): """validate a message - + This is a generator, and must be iterated through to actually trigger each test. - + If msg_type and/or parent are given, the msg_type and/or parent msg_id are compared with the given values. """ @@ -249,7 +249,7 @@ def validate_message(msg, msg_type=None, parent=None): def test_execute(): flush_channels() - + msg_id = KC.execute(code='x=1') reply = KC.get_shell_msg(timeout=TIMEOUT) validate_message(reply, 'execute_reply', msg_id) @@ -258,7 +258,7 @@ def test_execute(): def test_execute_silent(): flush_channels() msg_id, reply = execute(code='x=1', silent=True) - + # flush status=idle status = KC.iopub_channel.get_msg(timeout=TIMEOUT) validate_message(status, 'status', msg_id) @@ -266,14 +266,14 @@ def test_execute_silent(): nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1) count = reply['execution_count'] - + msg_id, reply = execute(code='x=2', silent=True) - + # flush status=idle status = KC.iopub_channel.get_msg(timeout=TIMEOUT) validate_message(status, 'status', msg_id) nt.assert_equal(status['content']['execution_state'], 'idle') - + nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1) count_2 = reply['execution_count'] nt.assert_equal(count_2, count) @@ -281,11 +281,11 @@ def test_execute_silent(): def test_execute_error(): flush_channels() - + msg_id, reply = execute(code='1/0') nt.assert_equal(reply['status'], 'error') nt.assert_equal(reply['ename'], 'ZeroDivisionError') - + error = KC.iopub_channel.get_msg(timeout=TIMEOUT) validate_message(error, 'error', msg_id) @@ -296,9 +296,9 @@ def test_execute_inc(): msg_id, reply = execute(code='x=1') count = reply['execution_count'] - + flush_channels() - + msg_id, reply = execute(code='x=2') count_2 = reply['execution_count'] nt.assert_equal(count_2, count+1) @@ -306,7 +306,7 @@ def test_execute_inc(): def test_execute_stop_on_error(): """execute request should not abort execution queue with stop_on_error False""" flush_channels() - + fail = '\n'.join([ # sleep to ensure subsequent message is waiting in the queue to be aborted 'import time', @@ -362,7 +362,7 @@ def test_oinfo_found(): flush_channels() msg_id, reply = execute(code='a=5') - + msg_id = KC.inspect('a') reply = KC.get_shell_msg(timeout=TIMEOUT) validate_message(reply, 'inspect_reply', msg_id) @@ -377,7 +377,7 @@ def test_oinfo_detail(): flush_channels() msg_id, reply = execute(code='ip=get_ipython()') - + msg_id = KC.inspect('ip.object_inspect', cursor_pos=10, detail_level=1) reply = KC.get_shell_msg(timeout=TIMEOUT) validate_message(reply, 'inspect_reply', msg_id) @@ -402,7 +402,7 @@ def test_complete(): flush_channels() msg_id, reply = execute(code="alpha = albert = 5") - + msg_id = KC.complete('al', 2) reply = KC.get_shell_msg(timeout=TIMEOUT) validate_message(reply, 'complete_reply', msg_id) @@ -436,10 +436,10 @@ def test_is_complete(): def test_history_range(): flush_channels() - + msg_id_exec = KC.execute(code='x=1', store_history = True) reply_exec = KC.get_shell_msg(timeout=TIMEOUT) - + msg_id = KC.history(hist_access_type = 'range', raw = True, output = True, start = 1, stop = 2, session = 0) reply = KC.get_shell_msg(timeout=TIMEOUT) validate_message(reply, 'history_reply', msg_id) @@ -448,10 +448,10 @@ def test_history_range(): def test_history_tail(): flush_channels() - + msg_id_exec = KC.execute(code='x=1', store_history = True) reply_exec = KC.get_shell_msg(timeout=TIMEOUT) - + msg_id = KC.history(hist_access_type = 'tail', raw = True, output = True, n = 1, session = 0) reply = KC.get_shell_msg(timeout=TIMEOUT) validate_message(reply, 'history_reply', msg_id) @@ -460,10 +460,10 @@ def test_history_tail(): def test_history_search(): flush_channels() - + msg_id_exec = KC.execute(code='x=1', store_history = True) reply_exec = KC.get_shell_msg(timeout=TIMEOUT) - + msg_id = KC.history(hist_access_type = 'search', raw = True, output = True, n = 1, pattern = '*', session = 0) reply = KC.get_shell_msg(timeout=TIMEOUT) validate_message(reply, 'history_reply', msg_id) @@ -488,9 +488,8 @@ def test_display_data(): flush_channels() msg_id, reply = execute("from IPython.core.display import display; display(1)") - + display = KC.iopub_channel.get_msg(timeout=TIMEOUT) validate_message(display, 'display_data', parent=msg_id) data = display['content']['data'] nt.assert_equal(data['text/plain'], u'1') - diff --git a/IPython/kernel/tests/utils.py b/ipython_kernel/tests/utils.py similarity index 97% rename from IPython/kernel/tests/utils.py rename to ipython_kernel/tests/utils.py index 399f13e..cfa443c 100644 --- a/IPython/kernel/tests/utils.py +++ b/ipython_kernel/tests/utils.py @@ -4,6 +4,7 @@ # Distributed under the terms of the Modified BSD License. import atexit +import os from contextlib import contextmanager from subprocess import PIPE, STDOUT @@ -15,7 +16,7 @@ except ImportError: import nose import nose.tools as nt -from IPython.kernel import manager +from jupyter_client import manager #------------------------------------------------------------------------------- # Globals @@ -32,7 +33,7 @@ KC = None #------------------------------------------------------------------------------- def start_new_kernel(**kwargs): """start a new kernel, and return its Manager and Client - + Integrates with our output capturing for tests. """ kwargs.update(dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT)) @@ -41,7 +42,7 @@ def start_new_kernel(**kwargs): def flush_channels(kc=None): """flush any messages waiting on the queue""" from .test_message_spec import validate_message - + if kc is None: kc = KC for channel in (kc.shell_channel, kc.iopub_channel): @@ -65,12 +66,12 @@ def execute(code='', kc=None, **kwargs): busy = kc.get_iopub_msg(timeout=TIMEOUT) validate_message(busy, 'status', msg_id) nt.assert_equal(busy['content']['execution_state'], 'busy') - + if not kwargs.get('silent'): execute_input = kc.get_iopub_msg(timeout=TIMEOUT) validate_message(execute_input, 'execute_input', msg_id) nt.assert_equal(execute_input['content']['code'], code) - + return msg_id, reply['content'] def start_global_kernel(): @@ -86,9 +87,9 @@ def start_global_kernel(): @contextmanager def kernel(): """Context manager for the global kernel instance - + Should be used for most kernel tests - + Returns ------- kernel_client: connected KernelClient instance @@ -116,14 +117,15 @@ def stop_global_kernel(): def new_kernel(argv=None): """Context manager for a new kernel in a subprocess - + Should only be used for tests where the kernel must not be re-used. - + Returns ------- kernel_client: connected KernelClient instance """ - kwargs = dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT, + kwargs = dict( + stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT, startup_timeout=STARTUP_TIMEOUT) if argv is not None: kwargs['extra_arguments'] = argv diff --git a/IPython/kernel/zmq/__init__.py b/ipython_kernel/zmq/__init__.py similarity index 61% rename from IPython/kernel/zmq/__init__.py rename to ipython_kernel/zmq/__init__.py index f044d24..bb58d44 100644 --- a/IPython/kernel/zmq/__init__.py +++ b/ipython_kernel/zmq/__init__.py @@ -5,7 +5,7 @@ from IPython.utils.zmqrelated import check_for_zmq -check_for_zmq('13', 'IPython.kernel.zmq') - -from .session import Session +check_for_zmq('13', 'ipython_kernel.zmq') +from jupyter_client import session +Session = session.Session diff --git a/IPython/kernel/zmq/datapub.py b/ipython_kernel/zmq/datapub.py similarity index 92% rename from IPython/kernel/zmq/datapub.py rename to ipython_kernel/zmq/datapub.py index 0c4a175..40d76a1 100644 --- a/IPython/kernel/zmq/datapub.py +++ b/ipython_kernel/zmq/datapub.py @@ -13,11 +13,11 @@ #----------------------------------------------------------------------------- from IPython.config import Configurable -from IPython.kernel.inprocess.socket import SocketABC +from ipython_kernel.inprocess.socket import SocketABC from IPython.utils.jsonutil import json_clean from IPython.utils.traitlets import Instance, Dict, CBytes -from IPython.kernel.zmq.serialize import serialize_object -from IPython.kernel.zmq.session import Session, extract_header +from ipython_kernel.zmq.serialize import serialize_object +from ipython_kernel.zmq.session import Session, extract_header #----------------------------------------------------------------------------- # Code @@ -34,13 +34,13 @@ class ZMQDataPublisher(Configurable): def set_parent(self, parent): """Set the parent for outbound messages.""" self.parent_header = extract_header(parent) - + def publish_data(self, data): """publish a data_message on the IOPub channel - + Parameters ---------- - + data : dict The data to be published. Think of it as a namespace. """ @@ -59,12 +59,12 @@ class ZMQDataPublisher(Configurable): def publish_data(data): """publish a data_message on the IOPub channel - + Parameters ---------- - + data : dict The data to be published. Think of it as a namespace. """ - from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell + from ipython_kernel.zmq.zmqshell import ZMQInteractiveShell ZMQInteractiveShell.instance().data_pub.publish_data(data) diff --git a/IPython/kernel/zmq/displayhook.py b/ipython_kernel/zmq/displayhook.py similarity index 98% rename from IPython/kernel/zmq/displayhook.py rename to ipython_kernel/zmq/displayhook.py index 81bdec4..4013227 100644 --- a/IPython/kernel/zmq/displayhook.py +++ b/ipython_kernel/zmq/displayhook.py @@ -6,7 +6,7 @@ import sys from IPython.core.displayhook import DisplayHook -from IPython.kernel.inprocess.socket import SocketABC +from ipython_kernel.inprocess.socket import SocketABC from IPython.utils.jsonutil import encode_images from IPython.utils.py3compat import builtin_mod from IPython.utils.traitlets import Instance, Dict @@ -71,4 +71,3 @@ class ZMQShellDisplayHook(DisplayHook): if self.msg['content']['data']: self.session.send(self.pub_socket, self.msg, ident=self.topic) self.msg = None - diff --git a/IPython/kernel/zmq/embed.py b/ipython_kernel/zmq/embed.py similarity index 100% rename from IPython/kernel/zmq/embed.py rename to ipython_kernel/zmq/embed.py index cd721f9..6641cd6 100644 --- a/IPython/kernel/zmq/embed.py +++ b/ipython_kernel/zmq/embed.py @@ -16,19 +16,19 @@ from .kernelapp import IPKernelApp def embed_kernel(module=None, local_ns=None, **kwargs): """Embed and start an IPython kernel in a given scope. - + Parameters ---------- module : ModuleType, optional The module to load into IPython globals (default: caller) local_ns : dict, optional The namespace to load into IPython user namespace (default: caller) - + kwargs : various, optional Further keyword args are relayed to the IPKernelApp constructor, allowing configuration of the Kernel. Will only have an effect on the first embed_kernel call for a given process. - + """ # get the app if it exists, or set it up if it doesn't if IPKernelApp.initialized(): @@ -50,7 +50,7 @@ def embed_kernel(module=None, local_ns=None, **kwargs): module = caller_module if local_ns is None: local_ns = caller_locals - + app.kernel.user_module = module app.kernel.user_ns = local_ns app.shell.set_completer_frame() diff --git a/IPython/kernel/zmq/eventloops.py b/ipython_kernel/zmq/eventloops.py similarity index 99% rename from IPython/kernel/zmq/eventloops.py rename to ipython_kernel/zmq/eventloops.py index 309413e..11b186d 100644 --- a/IPython/kernel/zmq/eventloops.py +++ b/ipython_kernel/zmq/eventloops.py @@ -14,9 +14,9 @@ from IPython.utils import io from IPython.lib.inputhook import _use_appnope def _notify_stream_qt(kernel, stream): - + from IPython.external.qt_for_kernel import QtCore - + if _use_appnope() and kernel._darwin_app_nap: from appnope import nope_scope as context else: @@ -26,7 +26,7 @@ def _notify_stream_qt(kernel, stream): while stream.getsockopt(zmq.EVENTS) & zmq.POLLIN: with context(): kernel.do_one_iteration() - + fd = stream.getsockopt(zmq.FD) notifier = QtCore.QSocketNotifier(fd, QtCore.QSocketNotifier.Read, kernel.app) notifier.activated.connect(process_stream_events) @@ -41,22 +41,22 @@ loop_map = { def register_integration(*toolkitnames): """Decorator to register an event loop to integrate with the IPython kernel - + The decorator takes names to register the event loop as for the %gui magic. You can provide alternative names for the same toolkit. - + The decorated function should take a single argument, the IPython kernel instance, arrange for the event loop to call ``kernel.do_one_iteration()`` at least every ``kernel._poll_interval`` seconds, and start the event loop. - - :mod:`IPython.kernel.zmq.eventloops` provides and registers such functions + + :mod:`ipython_kernel.zmq.eventloops` provides and registers such functions for a few common event loops. """ def decorator(func): for name in toolkitnames: loop_map[name] = func return func - + return decorator @@ -68,10 +68,10 @@ def loop_qt4(kernel): kernel.app = get_app_qt4([" "]) kernel.app.setQuitOnLastWindowClosed(False) - + for s in kernel.shell_streams: _notify_stream_qt(kernel, s) - + start_event_loop_qt4(kernel.app) @register_integration('qt5') @@ -87,7 +87,7 @@ def loop_wx(kernel): import wx from IPython.lib.guisupport import start_event_loop_wx - + if _use_appnope() and kernel._darwin_app_nap: # we don't hook up App Nap contexts for Wx, # just disable it outright. @@ -197,7 +197,7 @@ def loop_cocoa(kernel): "you must use matplotlib >= 1.1.0, or a native libtk." ) return loop_tk(kernel) - + from matplotlib.backends.backend_macosx import TimerMac, show # scale interval for sec->ms diff --git a/IPython/kernel/zmq/gui/__init__.py b/ipython_kernel/zmq/gui/__init__.py similarity index 100% rename from IPython/kernel/zmq/gui/__init__.py rename to ipython_kernel/zmq/gui/__init__.py diff --git a/IPython/kernel/zmq/gui/gtk3embed.py b/ipython_kernel/zmq/gui/gtk3embed.py similarity index 100% rename from IPython/kernel/zmq/gui/gtk3embed.py rename to ipython_kernel/zmq/gui/gtk3embed.py index 8488550..f70a6f4 100644 --- a/IPython/kernel/zmq/gui/gtk3embed.py +++ b/ipython_kernel/zmq/gui/gtk3embed.py @@ -38,7 +38,7 @@ class GTKEmbed(object): def _wire_kernel(self): """Initializes the kernel inside GTK. - + This is meant to run only once at startup, so it does its job and returns False to ensure it doesn't get run again by GTK. """ @@ -46,7 +46,7 @@ class GTKEmbed(object): GObject.timeout_add(int(1000*self.kernel._poll_interval), self.iterate_kernel) return False - + def iterate_kernel(self): """Run one iteration of the kernel and return True. diff --git a/IPython/kernel/zmq/gui/gtkembed.py b/ipython_kernel/zmq/gui/gtkembed.py similarity index 100% rename from IPython/kernel/zmq/gui/gtkembed.py rename to ipython_kernel/zmq/gui/gtkembed.py index cb11301..d9dc7e6 100644 --- a/IPython/kernel/zmq/gui/gtkembed.py +++ b/ipython_kernel/zmq/gui/gtkembed.py @@ -39,7 +39,7 @@ class GTKEmbed(object): def _wire_kernel(self): """Initializes the kernel inside GTK. - + This is meant to run only once at startup, so it does its job and returns False to ensure it doesn't get run again by GTK. """ @@ -47,7 +47,7 @@ class GTKEmbed(object): gobject.timeout_add(int(1000*self.kernel._poll_interval), self.iterate_kernel) return False - + def iterate_kernel(self): """Run one iteration of the kernel and return True. diff --git a/IPython/kernel/zmq/heartbeat.py b/ipython_kernel/zmq/heartbeat.py similarity index 100% rename from IPython/kernel/zmq/heartbeat.py rename to ipython_kernel/zmq/heartbeat.py diff --git a/IPython/kernel/zmq/iostream.py b/ipython_kernel/zmq/iostream.py similarity index 100% rename from IPython/kernel/zmq/iostream.py rename to ipython_kernel/zmq/iostream.py index fcd7f5e..d03fd59 100644 --- a/IPython/kernel/zmq/iostream.py +++ b/ipython_kernel/zmq/iostream.py @@ -52,14 +52,14 @@ class OutStream(object): self._pipe_flag = pipe if pipe: self._setup_pipe_in() - + def _setup_pipe_in(self): """setup listening pipe for subprocesses""" ctx = self.pub_socket.context - + # use UUID to authenticate pipe messages self._pipe_uuid = uuid.uuid4().bytes - + self._pipe_in = ctx.socket(zmq.PULL) self._pipe_in.linger = 0 try: @@ -81,7 +81,7 @@ class OutStream(object): lambda s, event: self.flush(), IOLoop.READ, ) - + def _setup_pipe_out(self): # must be new context after fork ctx = zmq.Context() @@ -89,13 +89,13 @@ class OutStream(object): self._pipe_out = ctx.socket(zmq.PUSH) self._pipe_out_lock = threading.Lock() self._pipe_out.connect("tcp://127.0.0.1:%i" % self._pipe_port) - + def _is_master_process(self): return os.getpid() == self._master_pid - + def _is_master_thread(self): return threading.current_thread().ident == self._master_thread - + def _have_pipe_out(self): return os.getpid() == self._pipe_pid @@ -136,10 +136,10 @@ class OutStream(object): self._start = 0 else: break - + def _schedule_flush(self): """schedule a flush in the main thread - + only works with a tornado/pyzmq eventloop running """ if IOLoop.initialized(): @@ -147,14 +147,14 @@ class OutStream(object): else: # no async loop, at least force the timer self._start = 0 - + def flush(self): """trigger actual zmq send""" if self.pub_socket is None: raise ValueError(u'I/O operation on closed file') - + mp_mode = self._check_mp_mode() - + if mp_mode != CHILD: # we are master if not self._is_master_thread(): @@ -162,15 +162,15 @@ class OutStream(object): # but at least they can schedule an async flush, or force the timer. self._schedule_flush() return - + self._flush_from_subprocesses() data = self._flush_buffer() - + if data: content = {u'name':self.name, u'text':data} msg = self.session.send(self.pub_socket, u'stream', content=content, parent=self.parent_header, ident=self.topic) - + if hasattr(self.pub_socket, 'flush'): # socket itself has flush (presumably ZMQStream) self.pub_socket.flush() @@ -200,7 +200,7 @@ class OutStream(object): def readline(self, size=-1): raise IOError('Read not supported on a write only stream.') - + def fileno(self): raise UnsupportedOperation("IOStream has no fileno.") @@ -211,7 +211,7 @@ class OutStream(object): # Make sure that we're handling unicode if not isinstance(string, unicode_type): string = string.decode(self.encoding, 'replace') - + is_child = (self._check_mp_mode() == CHILD) self._buffer.write(string) if is_child: @@ -243,7 +243,7 @@ class OutStream(object): self._buffer.close() self._new_buffer() return data - + def _new_buffer(self): self._buffer = StringIO() self._start = -1 diff --git a/IPython/kernel/zmq/ipkernel.py b/ipython_kernel/zmq/ipkernel.py similarity index 99% rename from IPython/kernel/zmq/ipkernel.py rename to ipython_kernel/zmq/ipkernel.py index c88744f..39cf40e 100644 --- a/IPython/kernel/zmq/ipkernel.py +++ b/ipython_kernel/zmq/ipkernel.py @@ -63,7 +63,7 @@ class IPythonKernel(KernelBase): # TMP - hack while developing self.shell._reply_content = None - self.comm_manager = CommManager(shell=self.shell, parent=self, + self.comm_manager = CommManager(shell=self.shell, parent=self, kernel=self) self.comm_manager.register_target('ipython.widget', lazy_import_handle_comm_opened) @@ -71,7 +71,7 @@ class IPythonKernel(KernelBase): comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ] for msg_type in comm_msg_types: self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type) - + help_links = List([ { 'text': "Python", @@ -363,6 +363,6 @@ class IPythonKernel(KernelBase): class Kernel(IPythonKernel): def __init__(self, *args, **kwargs): import warnings - warnings.warn('Kernel is a deprecated alias of IPython.kernel.zmq.ipkernel.IPythonKernel', + warnings.warn('Kernel is a deprecated alias of ipython_kernel.zmq.ipkernel.IPythonKernel', DeprecationWarning) super(Kernel, self).__init__(*args, **kwargs) diff --git a/IPython/kernel/zmq/kernelapp.py b/ipython_kernel/zmq/kernelapp.py similarity index 96% rename from IPython/kernel/zmq/kernelapp.py rename to ipython_kernel/zmq/kernelapp.py index 329e5cb..f88590d 100644 --- a/IPython/kernel/zmq/kernelapp.py +++ b/ipython_kernel/zmq/kernelapp.py @@ -28,8 +28,8 @@ from IPython.utils.traitlets import ( Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type, ) from IPython.utils.importstring import import_item -from IPython.kernel import write_connection_file -from IPython.kernel.connect import ConnectionFileMixin +from jupyter_client import write_connection_file +from ipython_kernel.connect import ConnectionFileMixin # local imports from .heartbeat import Heartbeat @@ -99,10 +99,10 @@ class IPKernelApp(BaseIPythonApplication, InteractiveShellApp, flags = Dict(kernel_flags) classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session] # the kernel class, as an importstring - kernel_class = Type('IPython.kernel.zmq.ipkernel.IPythonKernel', config=True, - klass='IPython.kernel.zmq.kernelbase.Kernel', + kernel_class = Type('ipython_kernel.zmq.ipkernel.IPythonKernel', config=True, + klass='ipython_kernel.zmq.kernelbase.Kernel', help="""The Kernel subclass to be used. - + This should allow easy re-use of the IPKernelApp entry point to configure and launch kernels other than IPython's own. """) @@ -110,23 +110,23 @@ class IPKernelApp(BaseIPythonApplication, InteractiveShellApp, poller = Any() # don't restrict this even though current pollers are all Threads heartbeat = Instance(Heartbeat, allow_none=True) ports = Dict() - + # connection info: - + @property def abs_connection_file(self): if os.path.basename(self.connection_file) == self.connection_file: return os.path.join(self.profile_dir.security_dir, self.connection_file) else: return self.connection_file - + # streams, etc. no_stdout = Bool(False, config=True, help="redirect stdout to the null device") no_stderr = Bool(False, config=True, help="redirect stderr to the null device") - outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream', + outstream_class = DottedObjectName('ipython_kernel.zmq.iostream.OutStream', config=True, help="The importstring for the OutStream factory") - displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook', + displayhook_class = DottedObjectName('ipython_kernel.zmq.displayhook.ZMQDisplayHook', config=True, help="The importstring for the DisplayHook factory") # polling @@ -177,7 +177,7 @@ class IPKernelApp(BaseIPythonApplication, InteractiveShellApp, write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport, shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port, iopub_port=self.iopub_port, control_port=self.control_port) - + def cleanup_connection_file(self): cf = self.abs_connection_file self.log.debug("Cleaning up connection file: %s", cf) @@ -185,9 +185,9 @@ class IPKernelApp(BaseIPythonApplication, InteractiveShellApp, os.remove(cf) except (IOError, OSError): pass - + self.cleanup_ipc_files() - + def init_connection_file(self): if not self.connection_file: self.connection_file = "kernel-%s.json"%os.getpid() @@ -203,7 +203,7 @@ class IPKernelApp(BaseIPythonApplication, InteractiveShellApp, except Exception: self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True) self.exit(1) - + def init_sockets(self): # Create a context, a session, and the kernel sockets. self.log.info("Starting the kernel at pid: %i", os.getpid()) @@ -230,7 +230,7 @@ class IPKernelApp(BaseIPythonApplication, InteractiveShellApp, self.control_socket.linger = 1000 self.control_port = self._bind_socket(self.control_socket, self.control_port) self.log.debug("control ROUTER Channel on port: %i" % self.control_port) - + def init_heartbeat(self): """start the heart beating""" # heartbeat doesn't share context, because it mustn't be blocked @@ -240,7 +240,7 @@ class IPKernelApp(BaseIPythonApplication, InteractiveShellApp, self.hb_port = self.heartbeat.port self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port) self.heartbeat.start() - + def log_connection_info(self): """display connection info, and store ports""" basename = os.path.basename(self.connection_file) @@ -280,7 +280,7 @@ class IPKernelApp(BaseIPythonApplication, InteractiveShellApp, sys.stdout = sys.__stdout__ = blackhole if self.no_stderr: sys.stderr = sys.__stderr__ = blackhole - + def init_io(self): """Redirect input streams and set a display hook.""" if self.outstream_class: @@ -298,7 +298,7 @@ class IPKernelApp(BaseIPythonApplication, InteractiveShellApp, """Create the Kernel object itself""" shell_stream = ZMQStream(self.shell_socket) control_stream = ZMQStream(self.control_socket) - + kernel_factory = self.kernel_class.instance kernel = kernel_factory(parent=self, session=self.session, diff --git a/IPython/kernel/zmq/kernelbase.py b/ipython_kernel/zmq/kernelbase.py similarity index 100% rename from IPython/kernel/zmq/kernelbase.py rename to ipython_kernel/zmq/kernelbase.py index 6e7c598..6e5d346 100755 --- a/IPython/kernel/zmq/kernelbase.py +++ b/ipython_kernel/zmq/kernelbase.py @@ -63,15 +63,15 @@ class Kernel(SingletonConfigurable): # This should be overridden by wrapper kernels that implement any real # language. language_info = {} - + # any links that should go in the help menu help_links = List() # Private interface - + _darwin_app_nap = Bool(True, config=True, help="""Whether to use appnope for compatiblity with OS X App Nap. - + Only affects OS X >= 10.9. """ ) @@ -126,13 +126,13 @@ class Kernel(SingletonConfigurable): self.shell_handlers = {} for msg_type in msg_types: self.shell_handlers[msg_type] = getattr(self, msg_type) - + control_msg_types = msg_types + [ 'clear_request', 'abort_request' ] self.control_handlers = {} for msg_type in control_msg_types: self.control_handlers[msg_type] = getattr(self, msg_type) - + def dispatch_control(self, msg): """dispatch control requests""" idents,msg = self.session.feed_identities(msg, copy=False) @@ -143,11 +143,11 @@ class Kernel(SingletonConfigurable): return self.log.debug("Control received: %s", msg) - + # Set the parent message for side effects. self.set_parent(idents, msg) self._publish_status(u'busy') - + header = msg['header'] msg_type = header['msg_type'] @@ -159,17 +159,17 @@ class Kernel(SingletonConfigurable): handler(self.control_stream, idents, msg) except Exception: self.log.error("Exception in control handler:", exc_info=True) - + sys.stdout.flush() sys.stderr.flush() self._publish_status(u'idle') - + def dispatch_shell(self, stream, msg): """dispatch shell requests""" # flush control requests first if self.control_stream: self.control_stream.flush() - + idents,msg = self.session.feed_identities(msg, copy=False) try: msg = self.session.deserialize(msg, content=True, copy=False) @@ -180,11 +180,11 @@ class Kernel(SingletonConfigurable): # Set the parent message for side effects. self.set_parent(idents, msg) self._publish_status(u'busy') - + header = msg['header'] msg_id = header['msg_id'] msg_type = msg['header']['msg_type'] - + # Print some info about this message and leave a '--->' marker, so it's # easier to trace visually the message chain when debugging. Each # handler prints its message at the end. @@ -201,7 +201,7 @@ class Kernel(SingletonConfigurable): self.session.send(stream, reply_type, metadata=md, content=status, parent=msg, ident=idents) return - + handler = self.shell_handlers.get(msg_type, None) if handler is None: self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type) @@ -215,11 +215,11 @@ class Kernel(SingletonConfigurable): self.log.error("Exception in message handler:", exc_info=True) finally: signal(SIGINT, sig) - + sys.stdout.flush() sys.stderr.flush() self._publish_status(u'idle') - + def enter_eventloop(self): """enter eventloop""" self.log.info("entering eventloop %s", self.eventloop) @@ -257,7 +257,7 @@ class Kernel(SingletonConfigurable): # publish idle status self._publish_status('starting') - + def do_one_iteration(self): """step eventloop just once""" if self.control_stream: @@ -279,7 +279,7 @@ class Kernel(SingletonConfigurable): #--------------------------------------------------------------------------- # Kernel request handlers #--------------------------------------------------------------------------- - + def _make_metadata(self, other=None): """init metadata dict, for execute/apply_reply""" new_md = { @@ -290,7 +290,7 @@ class Kernel(SingletonConfigurable): if other: new_md.update(other) return new_md - + def _publish_execute_input(self, code, parent, execution_count): """Publish the code request on the iopub stream.""" @@ -298,7 +298,7 @@ class Kernel(SingletonConfigurable): {u'code':code, u'execution_count': execution_count}, parent=parent, ident=self._topic('execute_input') ) - + def _publish_status(self, status, parent=None): """send status (busy/idle) on IOPub""" self.session.send(self.iopub_socket, @@ -307,35 +307,35 @@ class Kernel(SingletonConfigurable): parent=parent or self._parent_header, ident=self._topic('status'), ) - + def set_parent(self, ident, parent): """Set the current parent_header - + Side effects (IOPub messages) and replies are associated with the request that caused them via the parent_header. - + The parent identity is used to route input_request messages on the stdin channel. """ self._parent_ident = ident self._parent_header = parent - + def send_response(self, stream, msg_or_type, content=None, ident=None, buffers=None, track=False, header=None, metadata=None): """Send a response to the message we're currently processing. - - This accepts all the parameters of :meth:`IPython.kernel.zmq.session.Session.send` + + This accepts all the parameters of :meth:`ipython_kernel.zmq.session.Session.send` except ``parent``. - + This relies on :meth:`set_parent` having been called for the current message. """ return self.session.send(stream, msg_or_type, content, self._parent_header, ident, buffers, track, header, metadata) - + def execute_request(self, stream, ident, parent): """handle an execute_request""" - + try: content = parent[u'content'] code = py3compat.cast_unicode_py2(content[u'code']) @@ -347,17 +347,17 @@ class Kernel(SingletonConfigurable): self.log.error("Got bad msg: ") self.log.error("%s", parent) return - + stop_on_error = content.get('stop_on_error', True) md = self._make_metadata(parent['metadata']) - + # Re-broadcast our input for the benefit of listening clients, and # start computing output if not silent: self.execution_count += 1 self._publish_execute_input(code, parent, self.execution_count) - + reply_content = self.do_execute(code, silent, store_history, user_expressions, allow_stdin) @@ -372,7 +372,7 @@ class Kernel(SingletonConfigurable): # Send the reply. reply_content = json_clean(reply_content) - + md['status'] = reply_content['status'] if reply_content['status'] == 'error' and \ reply_content['ename'] == 'UnmetDependency': @@ -381,7 +381,7 @@ class Kernel(SingletonConfigurable): reply_msg = self.session.send(stream, u'execute_reply', reply_content, parent, metadata=md, ident=ident) - + self.log.debug("%s", reply_msg) if not silent and reply_msg['content']['status'] == u'error' and stop_on_error: @@ -397,7 +397,7 @@ class Kernel(SingletonConfigurable): content = parent['content'] code = content['code'] cursor_pos = content['cursor_pos'] - + matches = self.do_complete(code, cursor_pos) matches = json_clean(matches) completion_msg = self.session.send(stream, 'complete_reply', @@ -415,7 +415,7 @@ class Kernel(SingletonConfigurable): def inspect_request(self, stream, ident, parent): content = parent['content'] - + reply_content = self.do_inspect(content['code'], content['cursor_pos'], content.get('detail_level', 0)) # Before we send this object over, we scrub it for JSON usage @@ -488,11 +488,11 @@ class Kernel(SingletonConfigurable): kernel. """ return {'status': 'ok', 'restart': restart} - + def is_complete_request(self, stream, ident, parent): content = parent['content'] code = content['code'] - + reply_content = self.do_is_complete(code) reply_content = json_clean(reply_content) reply_msg = self.session.send(stream, 'is_complete_reply', @@ -528,7 +528,7 @@ class Kernel(SingletonConfigurable): # flush i/o sys.stdout.flush() sys.stderr.flush() - + self.session.send(stream, u'apply_reply', reply_content, parent=parent, ident=ident,buffers=result_buf, metadata=md) @@ -564,7 +564,7 @@ class Kernel(SingletonConfigurable): def do_clear(self): """Override in subclasses to clear the namespace - + This is only required for IPython.parallel. """ raise NotImplementedError @@ -579,9 +579,9 @@ class Kernel(SingletonConfigurable): base = "engine.%i" % self.int_id else: base = "kernel.%s" % self.ident - + return py3compat.cast_bytes("%s.%s" % (base, topic)) - + def _abort_queues(self): for stream in self.shell_streams: if stream: @@ -615,11 +615,11 @@ class Kernel(SingletonConfigurable): """Raise StdinNotImplentedError if active frontend doesn't support stdin.""" raise StdinNotImplementedError("raw_input was called, but this " - "frontend does not support stdin.") - + "frontend does not support stdin.") + def getpass(self, prompt=''): """Forward getpass to frontends - + Raises ------ StdinNotImplentedError if active frontend doesn't support stdin. @@ -633,10 +633,10 @@ class Kernel(SingletonConfigurable): self._parent_header, password=True, ) - + def raw_input(self, prompt=''): """Forward raw_input to frontends - + Raises ------ StdinNotImplentedError if active frontend doesn't support stdin. @@ -650,7 +650,7 @@ class Kernel(SingletonConfigurable): self._parent_header, password=False, ) - + def _input_request(self, prompt, ident, parent, password=False): # Flush output before making the request. sys.stderr.flush() @@ -664,7 +664,7 @@ class Kernel(SingletonConfigurable): break else: raise - + # Send the input request. content = json_clean(dict(prompt=prompt, password=password)) self.session.send(self.stdin_socket, u'input_request', content, parent, diff --git a/IPython/kernel/zmq/log.py b/ipython_kernel/zmq/log.py similarity index 100% rename from IPython/kernel/zmq/log.py rename to ipython_kernel/zmq/log.py index 0c72e2c..b84dde2 100644 --- a/IPython/kernel/zmq/log.py +++ b/ipython_kernel/zmq/log.py @@ -5,11 +5,11 @@ from zmq.log.handlers import PUBHandler class EnginePUBHandler(PUBHandler): """A simple PUBHandler subclass that sets root_topic""" engine=None - + def __init__(self, engine, *args, **kwargs): PUBHandler.__init__(self,*args, **kwargs) self.engine = engine - + @property def root_topic(self): """this is a property, in case the handler is created @@ -18,4 +18,3 @@ class EnginePUBHandler(PUBHandler): return "engine.%i"%self.engine.id else: return "engine" - diff --git a/IPython/kernel/zmq/parentpoller.py b/ipython_kernel/zmq/parentpoller.py similarity index 100% rename from IPython/kernel/zmq/parentpoller.py rename to ipython_kernel/zmq/parentpoller.py diff --git a/IPython/kernel/zmq/pylab/__init__.py b/ipython_kernel/zmq/pylab/__init__.py similarity index 100% rename from IPython/kernel/zmq/pylab/__init__.py rename to ipython_kernel/zmq/pylab/__init__.py diff --git a/IPython/kernel/zmq/pylab/backend_inline.py b/ipython_kernel/zmq/pylab/backend_inline.py similarity index 100% rename from IPython/kernel/zmq/pylab/backend_inline.py rename to ipython_kernel/zmq/pylab/backend_inline.py index b45af5f..fc31daf 100644 --- a/IPython/kernel/zmq/pylab/backend_inline.py +++ b/ipython_kernel/zmq/pylab/backend_inline.py @@ -65,7 +65,7 @@ def draw_if_interactive(): # For further reference: # https://github.com/ipython/ipython/issues/1612 # https://github.com/matplotlib/matplotlib/issues/835 - + if not hasattr(fig, 'show'): # Queue up `fig` for display fig.show = lambda *a: display(fig) @@ -94,7 +94,7 @@ def flush_figures(): This is meant to be called automatically and will call show() if, during prior code execution, there had been any calls to draw_if_interactive. - + This function is meant to be used as a post_execute callback in IPython, so user-caused errors are handled with showtraceback() instead of being allowed to raise. If this function is not called from within IPython, @@ -102,7 +102,7 @@ def flush_figures(): """ if not show._draw_called: return - + if InlineBackend.instance().close_figures: # ignore the tracking, just draw and close all figures try: @@ -139,4 +139,3 @@ def flush_figures(): # figurecanvas. This is set here to a Agg canvas # See https://github.com/matplotlib/matplotlib/pull/1125 FigureCanvas = FigureCanvasAgg - diff --git a/IPython/kernel/zmq/pylab/config.py b/ipython_kernel/zmq/pylab/config.py similarity index 99% rename from IPython/kernel/zmq/pylab/config.py rename to ipython_kernel/zmq/pylab/config.py index 455ac3e..1eaa08d 100644 --- a/IPython/kernel/zmq/pylab/config.py +++ b/ipython_kernel/zmq/pylab/config.py @@ -68,14 +68,14 @@ class InlineBackend(InlineBackendConfig): ) figure_formats = Set({'png'}, config=True, - help="""A set of figure formats to enable: 'png', + help="""A set of figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'.""") def _update_figure_formatters(self): if self.shell is not None: from IPython.core.pylabtools import select_figure_formats select_figure_formats(self.shell, self.figure_formats, **self.print_figure_kwargs) - + def _figure_formats_changed(self, name, old, new): if 'jpg' in new or 'jpeg' in new: if not pil_available(): @@ -91,20 +91,20 @@ class InlineBackend(InlineBackendConfig): print_figure_kwargs = Dict({'bbox_inches' : 'tight'}, config=True, help="""Extra kwargs to be passed to fig.canvas.print_figure. - + Logical examples include: bbox_inches, quality (for jpeg figures), etc. """ ) _print_figure_kwargs_changed = _update_figure_formatters - + close_figures = Bool(True, config=True, help="""Close all figures at the end of each cell. - + When True, ensures that each cell starts with no active figures, but it also means that one must keep track of references in order to edit or redraw figures in subsequent cells. This mode is ideal for the notebook, where residual plots from other cells might be surprising. - + When False, one must call figure() to create new figures. This means that gcf() and getfigs() can reference figures created in other cells, and the active figure can continue to be edited with pylab/pyplot @@ -117,4 +117,3 @@ class InlineBackend(InlineBackendConfig): shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) - diff --git a/IPython/kernel/zmq/serialize.py b/ipython_kernel/zmq/serialize.py similarity index 99% rename from IPython/kernel/zmq/serialize.py rename to ipython_kernel/zmq/serialize.py index 678e230..1180352 100644 --- a/IPython/kernel/zmq/serialize.py +++ b/ipython_kernel/zmq/serialize.py @@ -17,7 +17,7 @@ from IPython.utils.pickleutil import ( can, uncan, can_sequence, uncan_sequence, CannedObject, istype, sequence_types, PICKLE_PROTOCOL, ) -from .session import MAX_ITEMS, MAX_BYTES +from jupyter_client.session import MAX_ITEMS, MAX_BYTES if PY3: @@ -52,10 +52,10 @@ def _restore_buffers(obj, buffers): def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS): """Serialize an object into a list of sendable buffers. - + Parameters ---------- - + obj : object The object to be serialized buffer_threshold : int @@ -65,7 +65,7 @@ def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS): The maximum number of items over which canning will iterate. Containers (lists, dicts) larger than this will be pickled without introspection. - + Returns ------- [bufs] : list of buffers representing the serialized object. @@ -90,17 +90,17 @@ def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS): def deserialize_object(buffers, g=None): """reconstruct an object serialized by serialize_object from data buffers. - + Parameters ---------- - + bufs : list of buffers/bytes - + g : globals to be used when uncanning - + Returns ------- - + (newobj, bufs) : unpacked object, and the list of remaining unused buffers. """ bufs = list(buffers) @@ -119,37 +119,37 @@ def deserialize_object(buffers, g=None): else: _restore_buffers(canned, bufs) newobj = uncan(canned, g) - + return newobj, bufs def pack_apply_message(f, args, kwargs, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS): """pack up a function, args, and kwargs to be sent over the wire - + Each element of args/kwargs will be canned for special treatment, but inspection will not go any deeper than that. - + Any object whose data is larger than `threshold` will not have their data copied (only numpy arrays and bytes/buffers support zero-copy) - + Message will be a list of bytes/buffers of the format: - + [ cf, pinfo, , ] - + With length at least two + len(args) + len(kwargs) """ - + arg_bufs = flatten(serialize_object(arg, buffer_threshold, item_threshold) for arg in args) - + kw_keys = sorted(kwargs.keys()) kwarg_bufs = flatten(serialize_object(kwargs[key], buffer_threshold, item_threshold) for key in kw_keys) - + info = dict(nargs=len(args), narg_bufs=len(arg_bufs), kw_keys=kw_keys) - + msg = [pickle.dumps(can(f), PICKLE_PROTOCOL)] msg.append(pickle.dumps(info, PICKLE_PROTOCOL)) msg.extend(arg_bufs) msg.extend(kwarg_bufs) - + return msg def unpack_apply_message(bufs, g=None, copy=True): @@ -162,19 +162,18 @@ def unpack_apply_message(bufs, g=None, copy=True): pinfo = buffer_to_bytes_py2(bufs.pop(0)) info = pickle.loads(pinfo) arg_bufs, kwarg_bufs = bufs[:info['narg_bufs']], bufs[info['narg_bufs']:] - + args = [] for i in range(info['nargs']): arg, arg_bufs = deserialize_object(arg_bufs, g) args.append(arg) args = tuple(args) assert not arg_bufs, "Shouldn't be any arg bufs left over" - + kwargs = {} for key in info['kw_keys']: kwarg, kwarg_bufs = deserialize_object(kwarg_bufs, g) kwargs[key] = kwarg assert not kwarg_bufs, "Shouldn't be any kwarg bufs left over" - - return f,args,kwargs + return f,args,kwargs diff --git a/ipython_kernel/zmq/session.py b/ipython_kernel/zmq/session.py new file mode 100644 index 0000000..6c9936d --- /dev/null +++ b/ipython_kernel/zmq/session.py @@ -0,0 +1 @@ +from jupyter_client.session import * diff --git a/IPython/kernel/zmq/tests/__init__.py b/ipython_kernel/zmq/tests/__init__.py similarity index 100% rename from IPython/kernel/zmq/tests/__init__.py rename to ipython_kernel/zmq/tests/__init__.py diff --git a/IPython/kernel/zmq/tests/test_embed_kernel.py b/ipython_kernel/zmq/tests/test_embed_kernel.py similarity index 99% rename from IPython/kernel/zmq/tests/test_embed_kernel.py rename to ipython_kernel/zmq/tests/test_embed_kernel.py index 977c22c..ead1bc8 100644 --- a/IPython/kernel/zmq/tests/test_embed_kernel.py +++ b/ipython_kernel/zmq/tests/test_embed_kernel.py @@ -22,7 +22,7 @@ from subprocess import Popen, PIPE import nose.tools as nt -from IPython.kernel import BlockingKernelClient +from jupyter_client import BlockingKernelClient from IPython.utils import path, py3compat from IPython.utils.py3compat import unicode_type @@ -38,7 +38,7 @@ def setup(): global IPYTHONDIR global env global save_get_ipython_dir - + IPYTHONDIR = tempfile.mkdtemp() env = os.environ.copy() @@ -50,7 +50,7 @@ def setup(): def teardown(): path.get_ipython_dir = save_get_ipython_dir - + try: shutil.rmtree(IPYTHONDIR) except (OSError, IOError): @@ -61,7 +61,7 @@ def teardown(): @contextmanager def setup_kernel(cmd): """start an embedded kernel in a subprocess, and wait for it to be ready - + Returns ------- kernel_manager: connected KernelManager instance @@ -78,22 +78,22 @@ def setup_kernel(cmd): and kernel.poll() is None \ and time.time() < tic + SETUP_TIMEOUT: time.sleep(0.1) - + if kernel.poll() is not None: o,e = kernel.communicate() e = py3compat.cast_unicode(e) raise IOError("Kernel failed to start:\n%s" % e) - + if not os.path.exists(connection_file): if kernel.poll() is None: kernel.terminate() raise IOError("Connection file %r never arrived" % connection_file) - + client = BlockingKernelClient(connection_file=connection_file) client.load_connection_file() client.start_channels() client.wait_for_ready() - + try: yield client finally: @@ -111,14 +111,14 @@ def test_embed_kernel_basic(): 'go()', '', ]) - + with setup_kernel(cmd) as client: # oinfo a (int) msg_id = client.inspect('a') msg = client.get_shell_msg(block=True, timeout=TIMEOUT) content = msg['content'] nt.assert_true(content['found']) - + msg_id = client.execute("c=a*2") msg = client.get_shell_msg(block=True, timeout=TIMEOUT) content = msg['content'] @@ -143,7 +143,7 @@ def test_embed_kernel_namespace(): 'go()', '', ]) - + with setup_kernel(cmd) as client: # oinfo a (int) msg_id = client.inspect('a') @@ -181,7 +181,7 @@ def test_embed_kernel_reentrant(): ' go()', '', ]) - + with setup_kernel(cmd) as client: for i in range(5): msg_id = client.inspect('count') @@ -190,10 +190,8 @@ def test_embed_kernel_reentrant(): nt.assert_true(content['found']) text = content['data']['text/plain'] nt.assert_in(unicode_type(i), text) - + # exit from embed_kernel client.execute("get_ipython().exit_now = True") msg = client.get_shell_msg(block=True, timeout=TIMEOUT) time.sleep(0.2) - - diff --git a/IPython/kernel/zmq/tests/test_serialize.py b/ipython_kernel/zmq/tests/test_serialize.py similarity index 99% rename from IPython/kernel/zmq/tests/test_serialize.py rename to ipython_kernel/zmq/tests/test_serialize.py index 2ecb302..616b24b 100644 --- a/IPython/kernel/zmq/tests/test_serialize.py +++ b/ipython_kernel/zmq/tests/test_serialize.py @@ -9,7 +9,7 @@ from collections import namedtuple import nose.tools as nt # from unittest import TestCaes -from IPython.kernel.zmq.serialize import serialize_object, deserialize_object +from ipython_kernel.zmq.serialize import serialize_object, deserialize_object from IPython.testing import decorators as dec from IPython.utils.pickleutil import CannedArray, CannedClass from IPython.utils.py3compat import iteritems @@ -28,7 +28,7 @@ def roundtrip(obj): class C(object): """dummy class for """ - + def __init__(self, **kwargs): for key,value in iteritems(kwargs): setattr(self, key, value) @@ -98,7 +98,7 @@ def test_recarray(): [('n', int), ('s', '|S1'), ('u', 'uint32')], ]: A = new_array(shape, dtype=dtype) - + bufs = serialize_object(A) B, r = deserialize_object(bufs) nt.assert_equal(r, []) @@ -155,7 +155,7 @@ def test_class_oldstyle(): @interactive class C: a=5 - + bufs = serialize_object(dict(C=C)) canned = pickle.loads(bufs[0]) nt.assert_is_instance(canned['C'], CannedClass) @@ -198,7 +198,7 @@ def test_class_inheritance(): @interactive class D(C): b=10 - + bufs = serialize_object(dict(D=D)) canned = pickle.loads(bufs[0]) nt.assert_is_instance(canned['D'], CannedClass) diff --git a/IPython/kernel/zmq/tests/test_start_kernel.py b/ipython_kernel/zmq/tests/test_start_kernel.py similarity index 100% rename from IPython/kernel/zmq/tests/test_start_kernel.py rename to ipython_kernel/zmq/tests/test_start_kernel.py diff --git a/IPython/kernel/zmq/zmqshell.py b/ipython_kernel/zmq/zmqshell.py similarity index 96% rename from IPython/kernel/zmq/zmqshell.py rename to ipython_kernel/zmq/zmqshell.py index c933f07..04b5b44 100644 --- a/IPython/kernel/zmq/zmqshell.py +++ b/ipython_kernel/zmq/zmqshell.py @@ -34,8 +34,8 @@ from IPython.core.magic import magics_class, line_magic, Magics from IPython.core import payloadpage from IPython.core.usage import default_gui_banner from IPython.display import display, Javascript -from IPython.kernel.inprocess.socket import SocketABC -from IPython.kernel import ( +from ipython_kernel.inprocess.socket import SocketABC +from ipython_kernel import ( get_connection_file, get_connection_info, connect_qtconsole ) from IPython.testing.skipdoctest import skip_doctest @@ -46,9 +46,9 @@ from IPython.utils import py3compat from IPython.utils.py3compat import unicode_type from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes, Any from IPython.utils.warn import error -from IPython.kernel.zmq.displayhook import ZMQShellDisplayHook -from IPython.kernel.zmq.datapub import ZMQDataPublisher -from IPython.kernel.zmq.session import extract_header +from ipython_kernel.zmq.displayhook import ZMQShellDisplayHook +from ipython_kernel.zmq.datapub import ZMQDataPublisher +from ipython_kernel.zmq.session import extract_header from .session import Session #----------------------------------------------------------------------------- @@ -66,7 +66,7 @@ class ZMQDisplayPublisher(DisplayPublisher): def set_parent(self, parent): """Set the parent for outbound messages.""" self.parent_header = extract_header(parent) - + def _flush_streams(self): """flush IO Streams prior to display""" sys.stdout.flush() @@ -102,7 +102,7 @@ class KernelMagics(Magics): # moved into a separate machinery as well. For now, at least isolate here # the magics which this class needs to implement differently from the base # class, or that are unique to it. - + _find_edit_target = CodeMagics._find_edit_target @skip_doctest @@ -248,19 +248,19 @@ class KernelMagics(Magics): @line_magic def connect_info(self, arg_s): """Print information for connecting other clients to this kernel - + It will print the contents of this session's connection file, as well as shortcuts for local clients. - + In the simplest case, when called from the most recently launched kernel, secondary clients can be connected, simply with: - + $> ipython --existing - + """ - + from IPython.core.application import BaseIPythonApplication as BaseIPApp - + if BaseIPApp.initialized(): app = BaseIPApp.instance() security_dir = app.profile_dir.security_dir @@ -268,22 +268,22 @@ class KernelMagics(Magics): else: profile = 'default' security_dir = '' - + try: connection_file = get_connection_file() info = get_connection_info(unpack=False) except Exception as e: error("Could not get connection info: %r" % e) return - + # add profile flag for non-default profile profile_flag = "--profile %s" % profile if profile != 'default' else "" - + # if it's in the security dir, truncate to basename if security_dir == os.path.dirname(connection_file): connection_file = os.path.basename(connection_file) - - + + print (info + '\n') print ("Paste the above JSON into a file, and connect with:\n" " $> ipython --existing \n" @@ -299,11 +299,11 @@ class KernelMagics(Magics): @line_magic def qtconsole(self, arg_s): """Open a qtconsole connected to this kernel. - + Useful for connecting a qtconsole to running notebooks, for better debugging. """ - + # %qtconsole should imply bind_kernel for engines: try: from IPython.parallel import bind_kernel @@ -312,29 +312,29 @@ class KernelMagics(Magics): pass else: bind_kernel() - + try: p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix')) except Exception as e: error("Could not start qtconsole: %r" % e) return - + @line_magic def autosave(self, arg_s): """Set the autosave interval in the notebook (in seconds). - + The default value is 120, or two minutes. ``%autosave 0`` will disable autosave. - + This magic only has an effect when called from the notebook interface. It has no effect when called in a startup file. """ - + try: interval = int(arg_s) except ValueError: raise UsageError("%%autosave requires an integer, got %r" % arg_s) - + # javascript wants milliseconds milliseconds = 1000 * interval display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds), @@ -354,7 +354,7 @@ class ZMQInteractiveShell(InteractiveShell): data_pub_class = Type(ZMQDataPublisher) kernel = Any() parent_header = Any() - + def _banner1_default(self): return default_gui_banner @@ -370,7 +370,7 @@ class ZMQInteractiveShell(InteractiveShell): exiter = Instance(ZMQExitAutocall) def _exiter_default(self): return ZMQExitAutocall(self) - + def _exit_now_changed(self, name, old, new): """stop eventloop when exit_now fires""" if new: @@ -400,11 +400,11 @@ class ZMQInteractiveShell(InteractiveShell): # subprocesses as much as possible. env['PAGER'] = 'cat' env['GIT_PAGER'] = 'cat' - + def init_hooks(self): super(ZMQInteractiveShell, self).init_hooks() self.set_hook('show_in_pager', page.as_hook(payloadpage.page), 99) - + def ask_exit(self): """Engage the exit actions.""" self.exit_now = (not self.keepkernel_on_exit) @@ -431,7 +431,7 @@ class ZMQInteractiveShell(InteractiveShell): topic = None if dh.topic: topic = dh.topic.replace(b'execute_result', b'error') - + exc_msg = dh.session.send(dh.pub_socket, u'error', json_clean(exc_content), dh.parent_header, ident=topic) # FIXME - Hack: store exception info in shell object. Right now, the @@ -454,7 +454,7 @@ class ZMQInteractiveShell(InteractiveShell): replace=replace, ) self.payload_manager.write_payload(payload) - + def set_parent(self, parent): """Set the parent header for associating output with its triggering input""" self.parent_header = parent @@ -469,10 +469,10 @@ class ZMQInteractiveShell(InteractiveShell): sys.stderr.set_parent(parent) except AttributeError: pass - + def get_parent(self): return self.parent_header - + #------------------------------------------------------------------------- # Things related to magics #-------------------------------------------------------------------------