##// END OF EJS Templates
bigsplit: ipython_kernel
Min RK -
Show More
@@ -0,0 +1,1 b''
1 from jupyter_client.adapter import *
@@ -0,0 +1,1 b''
1 from jupyter_client.channels import *
@@ -0,0 +1,1 b''
1 from jupyter_client.channelsabc import *
@@ -0,0 +1,1 b''
1 from jupyter_client.client import *
@@ -0,0 +1,1 b''
1 from jupyter_client.clientabc import *
@@ -0,0 +1,2 b''
1 from ipython_kernel.connect import *
2 from jupyter_client.connect import *
@@ -0,0 +1,1 b''
1 from jupyter_client.kernelspec import *
@@ -0,0 +1,1 b''
1 from jupyter_client.kernelspecapp import *
@@ -0,0 +1,1 b''
1 from jupyter_client.launcher import *
@@ -0,0 +1,1 b''
1 from jupyter_client.manager import *
@@ -0,0 +1,1 b''
1 from jupyter_client.managerabc import *
@@ -0,0 +1,1 b''
1 from jupyter_client.multikernelmanager import *
@@ -0,0 +1,1 b''
1 from jupyter_client.restarter import *
This diff has been collapsed as it changes many lines, (883 lines changed) Show them Hide them
@@ -0,0 +1,883 b''
1 """Session object for building, serializing, sending, and receiving messages in
2 IPython. The Session object supports serialization, HMAC signatures, and
3 metadata on messages.
4
5 Also defined here are utilities for working with Sessions:
6 * A SessionFactory to be used as a base class for configurables that work with
7 Sessions.
8 * A Message object for convenience that allows attribute-access to the msg dict.
9 """
10
11 # Copyright (c) IPython Development Team.
12 # Distributed under the terms of the Modified BSD License.
13
14 import hashlib
15 import hmac
16 import logging
17 import os
18 import pprint
19 import random
20 import uuid
21 import warnings
22 from datetime import datetime
23
24 try:
25 import cPickle
26 pickle = cPickle
27 except:
28 cPickle = None
29 import pickle
30
31 try:
32 # We are using compare_digest to limit the surface of timing attacks
33 from hmac import compare_digest
34 except ImportError:
35 # Python < 2.7.7: When digests don't match no feedback is provided,
36 # limiting the surface of attack
37 def compare_digest(a,b): return a == b
38
39 import zmq
40 from zmq.utils import jsonapi
41 from zmq.eventloop.ioloop import IOLoop
42 from zmq.eventloop.zmqstream import ZMQStream
43
44 from IPython.core.release import kernel_protocol_version
45 from IPython.config.configurable import Configurable, LoggingConfigurable
46 from IPython.utils import io
47 from IPython.utils.importstring import import_item
48 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
49 from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type,
50 iteritems)
51 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
52 DottedObjectName, CUnicode, Dict, Integer,
53 TraitError,
54 )
55 from IPython.utils.pickleutil import PICKLE_PROTOCOL
56 from jupyter_client.adapter import adapt
57
58 #-----------------------------------------------------------------------------
59 # utility functions
60 #-----------------------------------------------------------------------------
61
62 def squash_unicode(obj):
63 """coerce unicode back to bytestrings."""
64 if isinstance(obj,dict):
65 for key in obj.keys():
66 obj[key] = squash_unicode(obj[key])
67 if isinstance(key, unicode_type):
68 obj[squash_unicode(key)] = obj.pop(key)
69 elif isinstance(obj, list):
70 for i,v in enumerate(obj):
71 obj[i] = squash_unicode(v)
72 elif isinstance(obj, unicode_type):
73 obj = obj.encode('utf8')
74 return obj
75
76 #-----------------------------------------------------------------------------
77 # globals and defaults
78 #-----------------------------------------------------------------------------
79
80 # default values for the thresholds:
81 MAX_ITEMS = 64
82 MAX_BYTES = 1024
83
84 # ISO8601-ify datetime objects
85 # allow unicode
86 # disallow nan, because it's not actually valid JSON
87 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default,
88 ensure_ascii=False, allow_nan=False,
89 )
90 json_unpacker = lambda s: jsonapi.loads(s)
91
92 pickle_packer = lambda o: pickle.dumps(squash_dates(o), PICKLE_PROTOCOL)
93 pickle_unpacker = pickle.loads
94
95 default_packer = json_packer
96 default_unpacker = json_unpacker
97
98 DELIM = b"<IDS|MSG>"
99 # singleton dummy tracker, which will always report as done
100 DONE = zmq.MessageTracker()
101
102 #-----------------------------------------------------------------------------
103 # Mixin tools for apps that use Sessions
104 #-----------------------------------------------------------------------------
105
106 session_aliases = dict(
107 ident = 'Session.session',
108 user = 'Session.username',
109 keyfile = 'Session.keyfile',
110 )
111
112 session_flags = {
113 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
114 'keyfile' : '' }},
115 """Use HMAC digests for authentication of messages.
116 Setting this flag will generate a new UUID to use as the HMAC key.
117 """),
118 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
119 """Don't authenticate messages."""),
120 }
121
122 def default_secure(cfg):
123 """Set the default behavior for a config environment to be secure.
124
125 If Session.key/keyfile have not been set, set Session.key to
126 a new random UUID.
127 """
128 warnings.warn("default_secure is deprecated", DeprecationWarning)
129 if 'Session' in cfg:
130 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
131 return
132 # key/keyfile not specified, generate new UUID:
133 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
134
135
136 #-----------------------------------------------------------------------------
137 # Classes
138 #-----------------------------------------------------------------------------
139
140 class SessionFactory(LoggingConfigurable):
141 """The Base class for configurables that have a Session, Context, logger,
142 and IOLoop.
143 """
144
145 logname = Unicode('')
146 def _logname_changed(self, name, old, new):
147 self.log = logging.getLogger(new)
148
149 # not configurable:
150 context = Instance('zmq.Context')
151 def _context_default(self):
152 return zmq.Context.instance()
153
154 session = Instance('jupyter_client.session.Session')
155
156 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
157 def _loop_default(self):
158 return IOLoop.instance()
159
160 def __init__(self, **kwargs):
161 super(SessionFactory, self).__init__(**kwargs)
162
163 if self.session is None:
164 # construct the session
165 self.session = Session(**kwargs)
166
167
168 class Message(object):
169 """A simple message object that maps dict keys to attributes.
170
171 A Message can be created from a dict and a dict from a Message instance
172 simply by calling dict(msg_obj)."""
173
174 def __init__(self, msg_dict):
175 dct = self.__dict__
176 for k, v in iteritems(dict(msg_dict)):
177 if isinstance(v, dict):
178 v = Message(v)
179 dct[k] = v
180
181 # Having this iterator lets dict(msg_obj) work out of the box.
182 def __iter__(self):
183 return iter(iteritems(self.__dict__))
184
185 def __repr__(self):
186 return repr(self.__dict__)
187
188 def __str__(self):
189 return pprint.pformat(self.__dict__)
190
191 def __contains__(self, k):
192 return k in self.__dict__
193
194 def __getitem__(self, k):
195 return self.__dict__[k]
196
197
198 def msg_header(msg_id, msg_type, username, session):
199 date = datetime.now()
200 version = kernel_protocol_version
201 return locals()
202
203 def extract_header(msg_or_header):
204 """Given a message or header, return the header."""
205 if not msg_or_header:
206 return {}
207 try:
208 # See if msg_or_header is the entire message.
209 h = msg_or_header['header']
210 except KeyError:
211 try:
212 # See if msg_or_header is just the header
213 h = msg_or_header['msg_id']
214 except KeyError:
215 raise
216 else:
217 h = msg_or_header
218 if not isinstance(h, dict):
219 h = dict(h)
220 return h
221
222 class Session(Configurable):
223 """Object for handling serialization and sending of messages.
224
225 The Session object handles building messages and sending them
226 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
227 other over the network via Session objects, and only need to work with the
228 dict-based IPython message spec. The Session will handle
229 serialization/deserialization, security, and metadata.
230
231 Sessions support configurable serialization via packer/unpacker traits,
232 and signing with HMAC digests via the key/keyfile traits.
233
234 Parameters
235 ----------
236
237 debug : bool
238 whether to trigger extra debugging statements
239 packer/unpacker : str : 'json', 'pickle' or import_string
240 importstrings for methods to serialize message parts. If just
241 'json' or 'pickle', predefined JSON and pickle packers will be used.
242 Otherwise, the entire importstring must be used.
243
244 The functions must accept at least valid JSON input, and output *bytes*.
245
246 For example, to use msgpack:
247 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
248 pack/unpack : callables
249 You can also set the pack/unpack callables for serialization directly.
250 session : bytes
251 the ID of this Session object. The default is to generate a new UUID.
252 username : unicode
253 username added to message headers. The default is to ask the OS.
254 key : bytes
255 The key used to initialize an HMAC signature. If unset, messages
256 will not be signed or checked.
257 keyfile : filepath
258 The file containing a key. If this is set, `key` will be initialized
259 to the contents of the file.
260
261 """
262
263 debug=Bool(False, config=True, help="""Debug output in the Session""")
264
265 packer = DottedObjectName('json',config=True,
266 help="""The name of the packer for serializing messages.
267 Should be one of 'json', 'pickle', or an import name
268 for a custom callable serializer.""")
269 def _packer_changed(self, name, old, new):
270 if new.lower() == 'json':
271 self.pack = json_packer
272 self.unpack = json_unpacker
273 self.unpacker = new
274 elif new.lower() == 'pickle':
275 self.pack = pickle_packer
276 self.unpack = pickle_unpacker
277 self.unpacker = new
278 else:
279 self.pack = import_item(str(new))
280
281 unpacker = DottedObjectName('json', config=True,
282 help="""The name of the unpacker for unserializing messages.
283 Only used with custom functions for `packer`.""")
284 def _unpacker_changed(self, name, old, new):
285 if new.lower() == 'json':
286 self.pack = json_packer
287 self.unpack = json_unpacker
288 self.packer = new
289 elif new.lower() == 'pickle':
290 self.pack = pickle_packer
291 self.unpack = pickle_unpacker
292 self.packer = new
293 else:
294 self.unpack = import_item(str(new))
295
296 session = CUnicode(u'', config=True,
297 help="""The UUID identifying this session.""")
298 def _session_default(self):
299 u = unicode_type(uuid.uuid4())
300 self.bsession = u.encode('ascii')
301 return u
302
303 def _session_changed(self, name, old, new):
304 self.bsession = self.session.encode('ascii')
305
306 # bsession is the session as bytes
307 bsession = CBytes(b'')
308
309 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
310 help="""Username for the Session. Default is your system username.""",
311 config=True)
312
313 metadata = Dict({}, config=True,
314 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
315
316 # if 0, no adapting to do.
317 adapt_version = Integer(0)
318
319 # message signature related traits:
320
321 key = CBytes(config=True,
322 help="""execution key, for signing messages.""")
323 def _key_default(self):
324 return str_to_bytes(str(uuid.uuid4()))
325
326 def _key_changed(self):
327 self._new_auth()
328
329 signature_scheme = Unicode('hmac-sha256', config=True,
330 help="""The digest scheme used to construct the message signatures.
331 Must have the form 'hmac-HASH'.""")
332 def _signature_scheme_changed(self, name, old, new):
333 if not new.startswith('hmac-'):
334 raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)
335 hash_name = new.split('-', 1)[1]
336 try:
337 self.digest_mod = getattr(hashlib, hash_name)
338 except AttributeError:
339 raise TraitError("hashlib has no such attribute: %s" % hash_name)
340 self._new_auth()
341
342 digest_mod = Any()
343 def _digest_mod_default(self):
344 return hashlib.sha256
345
346 auth = Instance(hmac.HMAC)
347
348 def _new_auth(self):
349 if self.key:
350 self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod)
351 else:
352 self.auth = None
353
354 digest_history = Set()
355 digest_history_size = Integer(2**16, config=True,
356 help="""The maximum number of digests to remember.
357
358 The digest history will be culled when it exceeds this value.
359 """
360 )
361
362 keyfile = Unicode('', config=True,
363 help="""path to file containing execution key.""")
364 def _keyfile_changed(self, name, old, new):
365 with open(new, 'rb') as f:
366 self.key = f.read().strip()
367
368 # for protecting against sends from forks
369 pid = Integer()
370
371 # serialization traits:
372
373 pack = Any(default_packer) # the actual packer function
374 def _pack_changed(self, name, old, new):
375 if not callable(new):
376 raise TypeError("packer must be callable, not %s"%type(new))
377
378 unpack = Any(default_unpacker) # the actual packer function
379 def _unpack_changed(self, name, old, new):
380 # unpacker is not checked - it is assumed to be
381 if not callable(new):
382 raise TypeError("unpacker must be callable, not %s"%type(new))
383
384 # thresholds:
385 copy_threshold = Integer(2**16, config=True,
386 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
387 buffer_threshold = Integer(MAX_BYTES, config=True,
388 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
389 item_threshold = Integer(MAX_ITEMS, config=True,
390 help="""The maximum number of items for a container to be introspected for custom serialization.
391 Containers larger than this are pickled outright.
392 """
393 )
394
395
396 def __init__(self, **kwargs):
397 """create a Session object
398
399 Parameters
400 ----------
401
402 debug : bool
403 whether to trigger extra debugging statements
404 packer/unpacker : str : 'json', 'pickle' or import_string
405 importstrings for methods to serialize message parts. If just
406 'json' or 'pickle', predefined JSON and pickle packers will be used.
407 Otherwise, the entire importstring must be used.
408
409 The functions must accept at least valid JSON input, and output
410 *bytes*.
411
412 For example, to use msgpack:
413 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
414 pack/unpack : callables
415 You can also set the pack/unpack callables for serialization
416 directly.
417 session : unicode (must be ascii)
418 the ID of this Session object. The default is to generate a new
419 UUID.
420 bsession : bytes
421 The session as bytes
422 username : unicode
423 username added to message headers. The default is to ask the OS.
424 key : bytes
425 The key used to initialize an HMAC signature. If unset, messages
426 will not be signed or checked.
427 signature_scheme : str
428 The message digest scheme. Currently must be of the form 'hmac-HASH',
429 where 'HASH' is a hashing function available in Python's hashlib.
430 The default is 'hmac-sha256'.
431 This is ignored if 'key' is empty.
432 keyfile : filepath
433 The file containing a key. If this is set, `key` will be
434 initialized to the contents of the file.
435 """
436 super(Session, self).__init__(**kwargs)
437 self._check_packers()
438 self.none = self.pack({})
439 # ensure self._session_default() if necessary, so bsession is defined:
440 self.session
441 self.pid = os.getpid()
442 self._new_auth()
443
444 @property
445 def msg_id(self):
446 """always return new uuid"""
447 return str(uuid.uuid4())
448
449 def _check_packers(self):
450 """check packers for datetime support."""
451 pack = self.pack
452 unpack = self.unpack
453
454 # check simple serialization
455 msg = dict(a=[1,'hi'])
456 try:
457 packed = pack(msg)
458 except Exception as e:
459 msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}"
460 if self.packer == 'json':
461 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
462 else:
463 jsonmsg = ""
464 raise ValueError(
465 msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg)
466 )
467
468 # ensure packed message is bytes
469 if not isinstance(packed, bytes):
470 raise ValueError("message packed to %r, but bytes are required"%type(packed))
471
472 # check that unpack is pack's inverse
473 try:
474 unpacked = unpack(packed)
475 assert unpacked == msg
476 except Exception as e:
477 msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}"
478 if self.packer == 'json':
479 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
480 else:
481 jsonmsg = ""
482 raise ValueError(
483 msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg)
484 )
485
486 # check datetime support
487 msg = dict(t=datetime.now())
488 try:
489 unpacked = unpack(pack(msg))
490 if isinstance(unpacked['t'], datetime):
491 raise ValueError("Shouldn't deserialize to datetime")
492 except Exception:
493 self.pack = lambda o: pack(squash_dates(o))
494 self.unpack = lambda s: unpack(s)
495
496 def msg_header(self, msg_type):
497 return msg_header(self.msg_id, msg_type, self.username, self.session)
498
499 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
500 """Return the nested message dict.
501
502 This format is different from what is sent over the wire. The
503 serialize/deserialize methods converts this nested message dict to the wire
504 format, which is a list of message parts.
505 """
506 msg = {}
507 header = self.msg_header(msg_type) if header is None else header
508 msg['header'] = header
509 msg['msg_id'] = header['msg_id']
510 msg['msg_type'] = header['msg_type']
511 msg['parent_header'] = {} if parent is None else extract_header(parent)
512 msg['content'] = {} if content is None else content
513 msg['metadata'] = self.metadata.copy()
514 if metadata is not None:
515 msg['metadata'].update(metadata)
516 return msg
517
518 def sign(self, msg_list):
519 """Sign a message with HMAC digest. If no auth, return b''.
520
521 Parameters
522 ----------
523 msg_list : list
524 The [p_header,p_parent,p_content] part of the message list.
525 """
526 if self.auth is None:
527 return b''
528 h = self.auth.copy()
529 for m in msg_list:
530 h.update(m)
531 return str_to_bytes(h.hexdigest())
532
533 def serialize(self, msg, ident=None):
534 """Serialize the message components to bytes.
535
536 This is roughly the inverse of deserialize. The serialize/deserialize
537 methods work with full message lists, whereas pack/unpack work with
538 the individual message parts in the message list.
539
540 Parameters
541 ----------
542 msg : dict or Message
543 The next message dict as returned by the self.msg method.
544
545 Returns
546 -------
547 msg_list : list
548 The list of bytes objects to be sent with the format::
549
550 [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent,
551 p_metadata, p_content, buffer1, buffer2, ...]
552
553 In this list, the ``p_*`` entities are the packed or serialized
554 versions, so if JSON is used, these are utf8 encoded JSON strings.
555 """
556 content = msg.get('content', {})
557 if content is None:
558 content = self.none
559 elif isinstance(content, dict):
560 content = self.pack(content)
561 elif isinstance(content, bytes):
562 # content is already packed, as in a relayed message
563 pass
564 elif isinstance(content, unicode_type):
565 # should be bytes, but JSON often spits out unicode
566 content = content.encode('utf8')
567 else:
568 raise TypeError("Content incorrect type: %s"%type(content))
569
570 real_message = [self.pack(msg['header']),
571 self.pack(msg['parent_header']),
572 self.pack(msg['metadata']),
573 content,
574 ]
575
576 to_send = []
577
578 if isinstance(ident, list):
579 # accept list of idents
580 to_send.extend(ident)
581 elif ident is not None:
582 to_send.append(ident)
583 to_send.append(DELIM)
584
585 signature = self.sign(real_message)
586 to_send.append(signature)
587
588 to_send.extend(real_message)
589
590 return to_send
591
592 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
593 buffers=None, track=False, header=None, metadata=None):
594 """Build and send a message via stream or socket.
595
596 The message format used by this function internally is as follows:
597
598 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
599 buffer1,buffer2,...]
600
601 The serialize/deserialize methods convert the nested message dict into this
602 format.
603
604 Parameters
605 ----------
606
607 stream : zmq.Socket or ZMQStream
608 The socket-like object used to send the data.
609 msg_or_type : str or Message/dict
610 Normally, msg_or_type will be a msg_type unless a message is being
611 sent more than once. If a header is supplied, this can be set to
612 None and the msg_type will be pulled from the header.
613
614 content : dict or None
615 The content of the message (ignored if msg_or_type is a message).
616 header : dict or None
617 The header dict for the message (ignored if msg_to_type is a message).
618 parent : Message or dict or None
619 The parent or parent header describing the parent of this message
620 (ignored if msg_or_type is a message).
621 ident : bytes or list of bytes
622 The zmq.IDENTITY routing path.
623 metadata : dict or None
624 The metadata describing the message
625 buffers : list or None
626 The already-serialized buffers to be appended to the message.
627 track : bool
628 Whether to track. Only for use with Sockets, because ZMQStream
629 objects cannot track messages.
630
631
632 Returns
633 -------
634 msg : dict
635 The constructed message.
636 """
637 if not isinstance(stream, zmq.Socket):
638 # ZMQStreams and dummy sockets do not support tracking.
639 track = False
640
641 if isinstance(msg_or_type, (Message, dict)):
642 # We got a Message or message dict, not a msg_type so don't
643 # build a new Message.
644 msg = msg_or_type
645 buffers = buffers or msg.get('buffers', [])
646 else:
647 msg = self.msg(msg_or_type, content=content, parent=parent,
648 header=header, metadata=metadata)
649 if not os.getpid() == self.pid:
650 io.rprint("WARNING: attempted to send message from fork")
651 io.rprint(msg)
652 return
653 buffers = [] if buffers is None else buffers
654 if self.adapt_version:
655 msg = adapt(msg, self.adapt_version)
656 to_send = self.serialize(msg, ident)
657 to_send.extend(buffers)
658 longest = max([ len(s) for s in to_send ])
659 copy = (longest < self.copy_threshold)
660
661 if buffers and track and not copy:
662 # only really track when we are doing zero-copy buffers
663 tracker = stream.send_multipart(to_send, copy=False, track=True)
664 else:
665 # use dummy tracker, which will be done immediately
666 tracker = DONE
667 stream.send_multipart(to_send, copy=copy)
668
669 if self.debug:
670 pprint.pprint(msg)
671 pprint.pprint(to_send)
672 pprint.pprint(buffers)
673
674 msg['tracker'] = tracker
675
676 return msg
677
678 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
679 """Send a raw message via ident path.
680
681 This method is used to send a already serialized message.
682
683 Parameters
684 ----------
685 stream : ZMQStream or Socket
686 The ZMQ stream or socket to use for sending the message.
687 msg_list : list
688 The serialized list of messages to send. This only includes the
689 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
690 the message.
691 ident : ident or list
692 A single ident or a list of idents to use in sending.
693 """
694 to_send = []
695 if isinstance(ident, bytes):
696 ident = [ident]
697 if ident is not None:
698 to_send.extend(ident)
699
700 to_send.append(DELIM)
701 to_send.append(self.sign(msg_list))
702 to_send.extend(msg_list)
703 stream.send_multipart(to_send, flags, copy=copy)
704
705 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
706 """Receive and unpack a message.
707
708 Parameters
709 ----------
710 socket : ZMQStream or Socket
711 The socket or stream to use in receiving.
712
713 Returns
714 -------
715 [idents], msg
716 [idents] is a list of idents and msg is a nested message dict of
717 same format as self.msg returns.
718 """
719 if isinstance(socket, ZMQStream):
720 socket = socket.socket
721 try:
722 msg_list = socket.recv_multipart(mode, copy=copy)
723 except zmq.ZMQError as e:
724 if e.errno == zmq.EAGAIN:
725 # We can convert EAGAIN to None as we know in this case
726 # recv_multipart won't return None.
727 return None,None
728 else:
729 raise
730 # split multipart message into identity list and message dict
731 # invalid large messages can cause very expensive string comparisons
732 idents, msg_list = self.feed_identities(msg_list, copy)
733 try:
734 return idents, self.deserialize(msg_list, content=content, copy=copy)
735 except Exception as e:
736 # TODO: handle it
737 raise e
738
739 def feed_identities(self, msg_list, copy=True):
740 """Split the identities from the rest of the message.
741
742 Feed until DELIM is reached, then return the prefix as idents and
743 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
744 but that would be silly.
745
746 Parameters
747 ----------
748 msg_list : a list of Message or bytes objects
749 The message to be split.
750 copy : bool
751 flag determining whether the arguments are bytes or Messages
752
753 Returns
754 -------
755 (idents, msg_list) : two lists
756 idents will always be a list of bytes, each of which is a ZMQ
757 identity. msg_list will be a list of bytes or zmq.Messages of the
758 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
759 should be unpackable/unserializable via self.deserialize at this
760 point.
761 """
762 if copy:
763 idx = msg_list.index(DELIM)
764 return msg_list[:idx], msg_list[idx+1:]
765 else:
766 failed = True
767 for idx,m in enumerate(msg_list):
768 if m.bytes == DELIM:
769 failed = False
770 break
771 if failed:
772 raise ValueError("DELIM not in msg_list")
773 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
774 return [m.bytes for m in idents], msg_list
775
776 def _add_digest(self, signature):
777 """add a digest to history to protect against replay attacks"""
778 if self.digest_history_size == 0:
779 # no history, never add digests
780 return
781
782 self.digest_history.add(signature)
783 if len(self.digest_history) > self.digest_history_size:
784 # threshold reached, cull 10%
785 self._cull_digest_history()
786
787 def _cull_digest_history(self):
788 """cull the digest history
789
790 Removes a randomly selected 10% of the digest history
791 """
792 current = len(self.digest_history)
793 n_to_cull = max(int(current // 10), current - self.digest_history_size)
794 if n_to_cull >= current:
795 self.digest_history = set()
796 return
797 to_cull = random.sample(self.digest_history, n_to_cull)
798 self.digest_history.difference_update(to_cull)
799
800 def deserialize(self, msg_list, content=True, copy=True):
801 """Unserialize a msg_list to a nested message dict.
802
803 This is roughly the inverse of serialize. The serialize/deserialize
804 methods work with full message lists, whereas pack/unpack work with
805 the individual message parts in the message list.
806
807 Parameters
808 ----------
809 msg_list : list of bytes or Message objects
810 The list of message parts of the form [HMAC,p_header,p_parent,
811 p_metadata,p_content,buffer1,buffer2,...].
812 content : bool (True)
813 Whether to unpack the content dict (True), or leave it packed
814 (False).
815 copy : bool (True)
816 Whether msg_list contains bytes (True) or the non-copying Message
817 objects in each place (False).
818
819 Returns
820 -------
821 msg : dict
822 The nested message dict with top-level keys [header, parent_header,
823 content, buffers]. The buffers are returned as memoryviews.
824 """
825 minlen = 5
826 message = {}
827 if not copy:
828 # pyzmq didn't copy the first parts of the message, so we'll do it
829 for i in range(minlen):
830 msg_list[i] = msg_list[i].bytes
831 if self.auth is not None:
832 signature = msg_list[0]
833 if not signature:
834 raise ValueError("Unsigned Message")
835 if signature in self.digest_history:
836 raise ValueError("Duplicate Signature: %r" % signature)
837 self._add_digest(signature)
838 check = self.sign(msg_list[1:5])
839 if not compare_digest(signature, check):
840 raise ValueError("Invalid Signature: %r" % signature)
841 if not len(msg_list) >= minlen:
842 raise TypeError("malformed message, must have at least %i elements"%minlen)
843 header = self.unpack(msg_list[1])
844 message['header'] = extract_dates(header)
845 message['msg_id'] = header['msg_id']
846 message['msg_type'] = header['msg_type']
847 message['parent_header'] = extract_dates(self.unpack(msg_list[2]))
848 message['metadata'] = self.unpack(msg_list[3])
849 if content:
850 message['content'] = self.unpack(msg_list[4])
851 else:
852 message['content'] = msg_list[4]
853 buffers = [memoryview(b) for b in msg_list[5:]]
854 if buffers and buffers[0].shape is None:
855 # force copy to workaround pyzmq #646
856 buffers = [memoryview(b.bytes) for b in msg_list[5:]]
857 message['buffers'] = buffers
858 # adapt to the current version
859 return adapt(message)
860
861 def unserialize(self, *args, **kwargs):
862 warnings.warn(
863 "Session.unserialize is deprecated. Use Session.deserialize.",
864 DeprecationWarning,
865 )
866 return self.deserialize(*args, **kwargs)
867
868
869 def test_msg2obj():
870 am = dict(x=1)
871 ao = Message(am)
872 assert ao.x == am['x']
873
874 am['y'] = dict(z=1)
875 ao = Message(am)
876 assert ao.y.z == am['y']['z']
877
878 k1, k2 = 'y', 'z'
879 assert ao[k1][k2] == am[k1][k2]
880
881 am2 = dict(ao)
882 assert am['x'] == am2['x']
883 assert am['y']['z'] == am2['y']['z']
@@ -0,0 +1,1 b''
1 from jupyter_client.threaded import *
@@ -0,0 +1,1 b''
1 from .connect import * No newline at end of file
@@ -0,0 +1,3 b''
1 if __name__ == '__main__':
2 from ipython_kernel.zmq import kernelapp as app
3 app.launch_new_instance()
This diff has been collapsed as it changes many lines, (576 lines changed) Show them Hide them
@@ -0,0 +1,576 b''
1 """Utilities for connecting to kernels
2
3 The :class:`ConnectionFileMixin` class in this module encapsulates the logic
4 related to writing and reading connections files.
5 """
6 # Copyright (c) IPython Development Team.
7 # Distributed under the terms of the Modified BSD License.
8
9 #-----------------------------------------------------------------------------
10 # Imports
11 #-----------------------------------------------------------------------------
12
13 from __future__ import absolute_import
14
15 import glob
16 import json
17 import os
18 import socket
19 import sys
20 from getpass import getpass
21 from subprocess import Popen, PIPE
22 import tempfile
23
24 import zmq
25
26 # IPython imports
27 from IPython.config import LoggingConfigurable
28 from IPython.core.profiledir import ProfileDir
29 from IPython.utils.localinterfaces import localhost
30 from IPython.utils.path import filefind, get_ipython_dir
31 from IPython.utils.py3compat import (str_to_bytes, bytes_to_str, cast_bytes_py2,
32 string_types)
33 from IPython.utils.traitlets import (
34 Bool, Integer, Unicode, CaselessStrEnum, Instance,
35 )
36
37
38 #-----------------------------------------------------------------------------
39 # Working with Connection Files
40 #-----------------------------------------------------------------------------
41
42 def write_connection_file(fname=None, shell_port=0, iopub_port=0, stdin_port=0, hb_port=0,
43 control_port=0, ip='', key=b'', transport='tcp',
44 signature_scheme='hmac-sha256',
45 ):
46 """Generates a JSON config file, including the selection of random ports.
47
48 Parameters
49 ----------
50
51 fname : unicode
52 The path to the file to write
53
54 shell_port : int, optional
55 The port to use for ROUTER (shell) channel.
56
57 iopub_port : int, optional
58 The port to use for the SUB channel.
59
60 stdin_port : int, optional
61 The port to use for the ROUTER (raw input) channel.
62
63 control_port : int, optional
64 The port to use for the ROUTER (control) channel.
65
66 hb_port : int, optional
67 The port to use for the heartbeat REP channel.
68
69 ip : str, optional
70 The ip address the kernel will bind to.
71
72 key : str, optional
73 The Session key used for message authentication.
74
75 signature_scheme : str, optional
76 The scheme used for message authentication.
77 This has the form 'digest-hash', where 'digest'
78 is the scheme used for digests, and 'hash' is the name of the hash function
79 used by the digest scheme.
80 Currently, 'hmac' is the only supported digest scheme,
81 and 'sha256' is the default hash function.
82
83 """
84 if not ip:
85 ip = localhost()
86 # default to temporary connector file
87 if not fname:
88 fd, fname = tempfile.mkstemp('.json')
89 os.close(fd)
90
91 # Find open ports as necessary.
92
93 ports = []
94 ports_needed = int(shell_port <= 0) + \
95 int(iopub_port <= 0) + \
96 int(stdin_port <= 0) + \
97 int(control_port <= 0) + \
98 int(hb_port <= 0)
99 if transport == 'tcp':
100 for i in range(ports_needed):
101 sock = socket.socket()
102 # struct.pack('ii', (0,0)) is 8 null bytes
103 sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, b'\0' * 8)
104 sock.bind(('', 0))
105 ports.append(sock)
106 for i, sock in enumerate(ports):
107 port = sock.getsockname()[1]
108 sock.close()
109 ports[i] = port
110 else:
111 N = 1
112 for i in range(ports_needed):
113 while os.path.exists("%s-%s" % (ip, str(N))):
114 N += 1
115 ports.append(N)
116 N += 1
117 if shell_port <= 0:
118 shell_port = ports.pop(0)
119 if iopub_port <= 0:
120 iopub_port = ports.pop(0)
121 if stdin_port <= 0:
122 stdin_port = ports.pop(0)
123 if control_port <= 0:
124 control_port = ports.pop(0)
125 if hb_port <= 0:
126 hb_port = ports.pop(0)
127
128 cfg = dict( shell_port=shell_port,
129 iopub_port=iopub_port,
130 stdin_port=stdin_port,
131 control_port=control_port,
132 hb_port=hb_port,
133 )
134 cfg['ip'] = ip
135 cfg['key'] = bytes_to_str(key)
136 cfg['transport'] = transport
137 cfg['signature_scheme'] = signature_scheme
138
139 with open(fname, 'w') as f:
140 f.write(json.dumps(cfg, indent=2))
141
142 return fname, cfg
143
144
145 def get_connection_file(app=None):
146 """Return the path to the connection file of an app
147
148 Parameters
149 ----------
150 app : IPKernelApp instance [optional]
151 If unspecified, the currently running app will be used
152 """
153 if app is None:
154 from jupyter_client.kernelapp import IPKernelApp
155 if not IPKernelApp.initialized():
156 raise RuntimeError("app not specified, and not in a running Kernel")
157
158 app = IPKernelApp.instance()
159 return filefind(app.connection_file, ['.', app.profile_dir.security_dir])
160
161
162 def find_connection_file(filename='kernel-*.json', profile=None):
163 """find a connection file, and return its absolute path.
164
165 The current working directory and the profile's security
166 directory will be searched for the file if it is not given by
167 absolute path.
168
169 If profile is unspecified, then the current running application's
170 profile will be used, or 'default', if not run from IPython.
171
172 If the argument does not match an existing file, it will be interpreted as a
173 fileglob, and the matching file in the profile's security dir with
174 the latest access time will be used.
175
176 Parameters
177 ----------
178 filename : str
179 The connection file or fileglob to search for.
180 profile : str [optional]
181 The name of the profile to use when searching for the connection file,
182 if different from the current IPython session or 'default'.
183
184 Returns
185 -------
186 str : The absolute path of the connection file.
187 """
188 from IPython.core.application import BaseIPythonApplication as IPApp
189 try:
190 # quick check for absolute path, before going through logic
191 return filefind(filename)
192 except IOError:
193 pass
194
195 if profile is None:
196 # profile unspecified, check if running from an IPython app
197 if IPApp.initialized():
198 app = IPApp.instance()
199 profile_dir = app.profile_dir
200 else:
201 # not running in IPython, use default profile
202 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), 'default')
203 else:
204 # find profiledir by profile name:
205 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
206 security_dir = profile_dir.security_dir
207
208 try:
209 # first, try explicit name
210 return filefind(filename, ['.', security_dir])
211 except IOError:
212 pass
213
214 # not found by full name
215
216 if '*' in filename:
217 # given as a glob already
218 pat = filename
219 else:
220 # accept any substring match
221 pat = '*%s*' % filename
222 matches = glob.glob( os.path.join(security_dir, pat) )
223 if not matches:
224 raise IOError("Could not find %r in %r" % (filename, security_dir))
225 elif len(matches) == 1:
226 return matches[0]
227 else:
228 # get most recent match, by access time:
229 return sorted(matches, key=lambda f: os.stat(f).st_atime)[-1]
230
231
232 def get_connection_info(connection_file=None, unpack=False, profile=None):
233 """Return the connection information for the current Kernel.
234
235 Parameters
236 ----------
237 connection_file : str [optional]
238 The connection file to be used. Can be given by absolute path, or
239 IPython will search in the security directory of a given profile.
240 If run from IPython,
241
242 If unspecified, the connection file for the currently running
243 IPython Kernel will be used, which is only allowed from inside a kernel.
244 unpack : bool [default: False]
245 if True, return the unpacked dict, otherwise just the string contents
246 of the file.
247 profile : str [optional]
248 The name of the profile to use when searching for the connection file,
249 if different from the current IPython session or 'default'.
250
251
252 Returns
253 -------
254 The connection dictionary of the current kernel, as string or dict,
255 depending on `unpack`.
256 """
257 if connection_file is None:
258 # get connection file from current kernel
259 cf = get_connection_file()
260 else:
261 # connection file specified, allow shortnames:
262 cf = find_connection_file(connection_file, profile=profile)
263
264 with open(cf) as f:
265 info = f.read()
266
267 if unpack:
268 info = json.loads(info)
269 # ensure key is bytes:
270 info['key'] = str_to_bytes(info.get('key', ''))
271 return info
272
273
274 def connect_qtconsole(connection_file=None, argv=None, profile=None):
275 """Connect a qtconsole to the current kernel.
276
277 This is useful for connecting a second qtconsole to a kernel, or to a
278 local notebook.
279
280 Parameters
281 ----------
282 connection_file : str [optional]
283 The connection file to be used. Can be given by absolute path, or
284 IPython will search in the security directory of a given profile.
285 If run from IPython,
286
287 If unspecified, the connection file for the currently running
288 IPython Kernel will be used, which is only allowed from inside a kernel.
289 argv : list [optional]
290 Any extra args to be passed to the console.
291 profile : str [optional]
292 The name of the profile to use when searching for the connection file,
293 if different from the current IPython session or 'default'.
294
295
296 Returns
297 -------
298 :class:`subprocess.Popen` instance running the qtconsole frontend
299 """
300 argv = [] if argv is None else argv
301
302 if connection_file is None:
303 # get connection file from current kernel
304 cf = get_connection_file()
305 else:
306 cf = find_connection_file(connection_file, profile=profile)
307
308 cmd = ';'.join([
309 "from IPython.qt.console import qtconsoleapp",
310 "qtconsoleapp.main()"
311 ])
312
313 return Popen([sys.executable, '-c', cmd, '--existing', cf] + argv,
314 stdout=PIPE, stderr=PIPE, close_fds=(sys.platform != 'win32'),
315 )
316
317
318 def tunnel_to_kernel(connection_info, sshserver, sshkey=None):
319 """tunnel connections to a kernel via ssh
320
321 This will open four SSH tunnels from localhost on this machine to the
322 ports associated with the kernel. They can be either direct
323 localhost-localhost tunnels, or if an intermediate server is necessary,
324 the kernel must be listening on a public IP.
325
326 Parameters
327 ----------
328 connection_info : dict or str (path)
329 Either a connection dict, or the path to a JSON connection file
330 sshserver : str
331 The ssh sever to use to tunnel to the kernel. Can be a full
332 `user@server:port` string. ssh config aliases are respected.
333 sshkey : str [optional]
334 Path to file containing ssh key to use for authentication.
335 Only necessary if your ssh config does not already associate
336 a keyfile with the host.
337
338 Returns
339 -------
340
341 (shell, iopub, stdin, hb) : ints
342 The four ports on localhost that have been forwarded to the kernel.
343 """
344 from zmq.ssh import tunnel
345 if isinstance(connection_info, string_types):
346 # it's a path, unpack it
347 with open(connection_info) as f:
348 connection_info = json.loads(f.read())
349
350 cf = connection_info
351
352 lports = tunnel.select_random_ports(4)
353 rports = cf['shell_port'], cf['iopub_port'], cf['stdin_port'], cf['hb_port']
354
355 remote_ip = cf['ip']
356
357 if tunnel.try_passwordless_ssh(sshserver, sshkey):
358 password=False
359 else:
360 password = getpass("SSH Password for %s: " % cast_bytes_py2(sshserver))
361
362 for lp,rp in zip(lports, rports):
363 tunnel.ssh_tunnel(lp, rp, sshserver, remote_ip, sshkey, password)
364
365 return tuple(lports)
366
367
368 #-----------------------------------------------------------------------------
369 # Mixin for classes that work with connection files
370 #-----------------------------------------------------------------------------
371
372 channel_socket_types = {
373 'hb' : zmq.REQ,
374 'shell' : zmq.DEALER,
375 'iopub' : zmq.SUB,
376 'stdin' : zmq.DEALER,
377 'control': zmq.DEALER,
378 }
379
380 port_names = [ "%s_port" % channel for channel in ('shell', 'stdin', 'iopub', 'hb', 'control')]
381
382 class ConnectionFileMixin(LoggingConfigurable):
383 """Mixin for configurable classes that work with connection files"""
384
385 # The addresses for the communication channels
386 connection_file = Unicode('', config=True,
387 help="""JSON file in which to store connection info [default: kernel-<pid>.json]
388
389 This file will contain the IP, ports, and authentication key needed to connect
390 clients to this kernel. By default, this file will be created in the security dir
391 of the current profile, but can be specified by absolute path.
392 """)
393 _connection_file_written = Bool(False)
394
395 transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True)
396
397 ip = Unicode(config=True,
398 help="""Set the kernel\'s IP address [default localhost].
399 If the IP address is something other than localhost, then
400 Consoles on other machines will be able to connect
401 to the Kernel, so be careful!"""
402 )
403
404 def _ip_default(self):
405 if self.transport == 'ipc':
406 if self.connection_file:
407 return os.path.splitext(self.connection_file)[0] + '-ipc'
408 else:
409 return 'kernel-ipc'
410 else:
411 return localhost()
412
413 def _ip_changed(self, name, old, new):
414 if new == '*':
415 self.ip = '0.0.0.0'
416
417 # protected traits
418
419 hb_port = Integer(0, config=True,
420 help="set the heartbeat port [default: random]")
421 shell_port = Integer(0, config=True,
422 help="set the shell (ROUTER) port [default: random]")
423 iopub_port = Integer(0, config=True,
424 help="set the iopub (PUB) port [default: random]")
425 stdin_port = Integer(0, config=True,
426 help="set the stdin (ROUTER) port [default: random]")
427 control_port = Integer(0, config=True,
428 help="set the control (ROUTER) port [default: random]")
429
430 @property
431 def ports(self):
432 return [ getattr(self, name) for name in port_names ]
433
434 # The Session to use for communication with the kernel.
435 session = Instance('jupyter_client.session.Session')
436 def _session_default(self):
437 from jupyter_client.session import Session
438 return Session(parent=self)
439
440 #--------------------------------------------------------------------------
441 # Connection and ipc file management
442 #--------------------------------------------------------------------------
443
444 def get_connection_info(self):
445 """return the connection info as a dict"""
446 return dict(
447 transport=self.transport,
448 ip=self.ip,
449 shell_port=self.shell_port,
450 iopub_port=self.iopub_port,
451 stdin_port=self.stdin_port,
452 hb_port=self.hb_port,
453 control_port=self.control_port,
454 signature_scheme=self.session.signature_scheme,
455 key=self.session.key,
456 )
457
458 def cleanup_connection_file(self):
459 """Cleanup connection file *if we wrote it*
460
461 Will not raise if the connection file was already removed somehow.
462 """
463 if self._connection_file_written:
464 # cleanup connection files on full shutdown of kernel we started
465 self._connection_file_written = False
466 try:
467 os.remove(self.connection_file)
468 except (IOError, OSError, AttributeError):
469 pass
470
471 def cleanup_ipc_files(self):
472 """Cleanup ipc files if we wrote them."""
473 if self.transport != 'ipc':
474 return
475 for port in self.ports:
476 ipcfile = "%s-%i" % (self.ip, port)
477 try:
478 os.remove(ipcfile)
479 except (IOError, OSError):
480 pass
481
482 def write_connection_file(self):
483 """Write connection info to JSON dict in self.connection_file."""
484 if self._connection_file_written and os.path.exists(self.connection_file):
485 return
486
487 self.connection_file, cfg = write_connection_file(self.connection_file,
488 transport=self.transport, ip=self.ip, key=self.session.key,
489 stdin_port=self.stdin_port, iopub_port=self.iopub_port,
490 shell_port=self.shell_port, hb_port=self.hb_port,
491 control_port=self.control_port,
492 signature_scheme=self.session.signature_scheme,
493 )
494 # write_connection_file also sets default ports:
495 for name in port_names:
496 setattr(self, name, cfg[name])
497
498 self._connection_file_written = True
499
500 def load_connection_file(self):
501 """Load connection info from JSON dict in self.connection_file."""
502 self.log.debug(u"Loading connection file %s", self.connection_file)
503 with open(self.connection_file) as f:
504 cfg = json.load(f)
505 self.transport = cfg.get('transport', self.transport)
506 self.ip = cfg.get('ip', self._ip_default())
507
508 for name in port_names:
509 if getattr(self, name) == 0 and name in cfg:
510 # not overridden by config or cl_args
511 setattr(self, name, cfg[name])
512
513 if 'key' in cfg:
514 self.session.key = str_to_bytes(cfg['key'])
515 if 'signature_scheme' in cfg:
516 self.session.signature_scheme = cfg['signature_scheme']
517
518 #--------------------------------------------------------------------------
519 # Creating connected sockets
520 #--------------------------------------------------------------------------
521
522 def _make_url(self, channel):
523 """Make a ZeroMQ URL for a given channel."""
524 transport = self.transport
525 ip = self.ip
526 port = getattr(self, '%s_port' % channel)
527
528 if transport == 'tcp':
529 return "tcp://%s:%i" % (ip, port)
530 else:
531 return "%s://%s-%s" % (transport, ip, port)
532
533 def _create_connected_socket(self, channel, identity=None):
534 """Create a zmq Socket and connect it to the kernel."""
535 url = self._make_url(channel)
536 socket_type = channel_socket_types[channel]
537 self.log.debug("Connecting to: %s" % url)
538 sock = self.context.socket(socket_type)
539 # set linger to 1s to prevent hangs at exit
540 sock.linger = 1000
541 if identity:
542 sock.identity = identity
543 sock.connect(url)
544 return sock
545
546 def connect_iopub(self, identity=None):
547 """return zmq Socket connected to the IOPub channel"""
548 sock = self._create_connected_socket('iopub', identity=identity)
549 sock.setsockopt(zmq.SUBSCRIBE, b'')
550 return sock
551
552 def connect_shell(self, identity=None):
553 """return zmq Socket connected to the Shell channel"""
554 return self._create_connected_socket('shell', identity=identity)
555
556 def connect_stdin(self, identity=None):
557 """return zmq Socket connected to the StdIn channel"""
558 return self._create_connected_socket('stdin', identity=identity)
559
560 def connect_hb(self, identity=None):
561 """return zmq Socket connected to the Heartbeat channel"""
562 return self._create_connected_socket('hb', identity=identity)
563
564 def connect_control(self, identity=None):
565 """return zmq Socket connected to the Control channel"""
566 return self._create_connected_socket('control', identity=identity)
567
568
569 __all__ = [
570 'write_connection_file',
571 'get_connection_file',
572 'find_connection_file',
573 'get_connection_info',
574 'connect_qtconsole',
575 'tunnel_to_kernel',
576 ]
@@ -0,0 +1,1 b''
1 from jupyter_client.session import *
@@ -1,3 +1,3 b''
1 if __name__ == '__main__':
1 if __name__ == '__main__':
2 from IPython.kernel.zmq import kernelapp as app
2 from ipython_kernel.zmq import kernelapp as app
3 app.launch_new_instance()
3 app.launch_new_instance()
1 NO CONTENT: file renamed from IPython/kernel/comm/__init__.py to ipython_kernel/comm/__init__.py
NO CONTENT: file renamed from IPython/kernel/comm/__init__.py to ipython_kernel/comm/__init__.py
@@ -1,169 +1,169 b''
1 """Base class for a Comm"""
1 """Base class for a Comm"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import threading
6 import threading
7 import uuid
7 import uuid
8
8
9 from zmq.eventloop.ioloop import IOLoop
9 from zmq.eventloop.ioloop import IOLoop
10
10
11 from IPython.config import LoggingConfigurable
11 from IPython.config import LoggingConfigurable
12 from IPython.kernel.zmq.kernelbase import Kernel
12 from ipython_kernel.zmq.kernelbase import Kernel
13
13
14 from IPython.utils.jsonutil import json_clean
14 from IPython.utils.jsonutil import json_clean
15 from IPython.utils.traitlets import Instance, Unicode, Bytes, Bool, Dict, Any
15 from IPython.utils.traitlets import Instance, Unicode, Bytes, Bool, Dict, Any
16
16
17
17
18 class Comm(LoggingConfigurable):
18 class Comm(LoggingConfigurable):
19 """Class for communicating between a Frontend and a Kernel"""
19 """Class for communicating between a Frontend and a Kernel"""
20 # If this is instantiated by a non-IPython kernel, shell will be None
20 # If this is instantiated by a non-IPython kernel, shell will be None
21 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
21 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
22 allow_none=True)
22 allow_none=True)
23 kernel = Instance('IPython.kernel.zmq.kernelbase.Kernel')
23 kernel = Instance('ipython_kernel.zmq.kernelbase.Kernel')
24 def _kernel_default(self):
24 def _kernel_default(self):
25 if Kernel.initialized():
25 if Kernel.initialized():
26 return Kernel.instance()
26 return Kernel.instance()
27
27
28 iopub_socket = Any()
28 iopub_socket = Any()
29 def _iopub_socket_default(self):
29 def _iopub_socket_default(self):
30 return self.kernel.iopub_socket
30 return self.kernel.iopub_socket
31 session = Instance('IPython.kernel.zmq.session.Session')
31 session = Instance('ipython_kernel.zmq.session.Session')
32 def _session_default(self):
32 def _session_default(self):
33 if self.kernel is not None:
33 if self.kernel is not None:
34 return self.kernel.session
34 return self.kernel.session
35
35
36 target_name = Unicode('comm')
36 target_name = Unicode('comm')
37 target_module = Unicode(None, allow_none=True, help="""requirejs module from
37 target_module = Unicode(None, allow_none=True, help="""requirejs module from
38 which to load comm target.""")
38 which to load comm target.""")
39
39
40 topic = Bytes()
40 topic = Bytes()
41 def _topic_default(self):
41 def _topic_default(self):
42 return ('comm-%s' % self.comm_id).encode('ascii')
42 return ('comm-%s' % self.comm_id).encode('ascii')
43
43
44 _open_data = Dict(help="data dict, if any, to be included in comm_open")
44 _open_data = Dict(help="data dict, if any, to be included in comm_open")
45 _close_data = Dict(help="data dict, if any, to be included in comm_close")
45 _close_data = Dict(help="data dict, if any, to be included in comm_close")
46
46
47 _msg_callback = Any()
47 _msg_callback = Any()
48 _close_callback = Any()
48 _close_callback = Any()
49
49
50 _closed = Bool(True)
50 _closed = Bool(True)
51 comm_id = Unicode()
51 comm_id = Unicode()
52 def _comm_id_default(self):
52 def _comm_id_default(self):
53 return uuid.uuid4().hex
53 return uuid.uuid4().hex
54
54
55 primary = Bool(True, help="Am I the primary or secondary Comm?")
55 primary = Bool(True, help="Am I the primary or secondary Comm?")
56
56
57 def __init__(self, target_name='', data=None, **kwargs):
57 def __init__(self, target_name='', data=None, **kwargs):
58 if target_name:
58 if target_name:
59 kwargs['target_name'] = target_name
59 kwargs['target_name'] = target_name
60 super(Comm, self).__init__(**kwargs)
60 super(Comm, self).__init__(**kwargs)
61 if self.primary:
61 if self.primary:
62 # I am primary, open my peer.
62 # I am primary, open my peer.
63 self.open(data)
63 self.open(data)
64 else:
64 else:
65 self._closed = False
65 self._closed = False
66
66
67 def _publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys):
67 def _publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys):
68 """Helper for sending a comm message on IOPub"""
68 """Helper for sending a comm message on IOPub"""
69 if threading.current_thread().name != 'MainThread' and IOLoop.initialized():
69 if threading.current_thread().name != 'MainThread' and IOLoop.initialized():
70 # make sure we never send on a zmq socket outside the main IOLoop thread
70 # make sure we never send on a zmq socket outside the main IOLoop thread
71 IOLoop.instance().add_callback(lambda : self._publish_msg(msg_type, data, metadata, buffers, **keys))
71 IOLoop.instance().add_callback(lambda : self._publish_msg(msg_type, data, metadata, buffers, **keys))
72 return
72 return
73 data = {} if data is None else data
73 data = {} if data is None else data
74 metadata = {} if metadata is None else metadata
74 metadata = {} if metadata is None else metadata
75 content = json_clean(dict(data=data, comm_id=self.comm_id, **keys))
75 content = json_clean(dict(data=data, comm_id=self.comm_id, **keys))
76 self.session.send(self.iopub_socket, msg_type,
76 self.session.send(self.iopub_socket, msg_type,
77 content,
77 content,
78 metadata=json_clean(metadata),
78 metadata=json_clean(metadata),
79 parent=self.kernel._parent_header,
79 parent=self.kernel._parent_header,
80 ident=self.topic,
80 ident=self.topic,
81 buffers=buffers,
81 buffers=buffers,
82 )
82 )
83
83
84 def __del__(self):
84 def __del__(self):
85 """trigger close on gc"""
85 """trigger close on gc"""
86 self.close()
86 self.close()
87
87
88 # publishing messages
88 # publishing messages
89
89
90 def open(self, data=None, metadata=None, buffers=None):
90 def open(self, data=None, metadata=None, buffers=None):
91 """Open the frontend-side version of this comm"""
91 """Open the frontend-side version of this comm"""
92 if data is None:
92 if data is None:
93 data = self._open_data
93 data = self._open_data
94 comm_manager = getattr(self.kernel, 'comm_manager', None)
94 comm_manager = getattr(self.kernel, 'comm_manager', None)
95 if comm_manager is None:
95 if comm_manager is None:
96 raise RuntimeError("Comms cannot be opened without a kernel "
96 raise RuntimeError("Comms cannot be opened without a kernel "
97 "and a comm_manager attached to that kernel.")
97 "and a comm_manager attached to that kernel.")
98
98
99 comm_manager.register_comm(self)
99 comm_manager.register_comm(self)
100 try:
100 try:
101 self._publish_msg('comm_open',
101 self._publish_msg('comm_open',
102 data=data, metadata=metadata, buffers=buffers,
102 data=data, metadata=metadata, buffers=buffers,
103 target_name=self.target_name,
103 target_name=self.target_name,
104 target_module=self.target_module,
104 target_module=self.target_module,
105 )
105 )
106 self._closed = False
106 self._closed = False
107 except:
107 except:
108 comm_manager.unregister_comm(self)
108 comm_manager.unregister_comm(self)
109 raise
109 raise
110
110
111 def close(self, data=None, metadata=None, buffers=None):
111 def close(self, data=None, metadata=None, buffers=None):
112 """Close the frontend-side version of this comm"""
112 """Close the frontend-side version of this comm"""
113 if self._closed:
113 if self._closed:
114 # only close once
114 # only close once
115 return
115 return
116 self._closed = True
116 self._closed = True
117 if data is None:
117 if data is None:
118 data = self._close_data
118 data = self._close_data
119 self._publish_msg('comm_close',
119 self._publish_msg('comm_close',
120 data=data, metadata=metadata, buffers=buffers,
120 data=data, metadata=metadata, buffers=buffers,
121 )
121 )
122 self.kernel.comm_manager.unregister_comm(self)
122 self.kernel.comm_manager.unregister_comm(self)
123
123
124 def send(self, data=None, metadata=None, buffers=None):
124 def send(self, data=None, metadata=None, buffers=None):
125 """Send a message to the frontend-side version of this comm"""
125 """Send a message to the frontend-side version of this comm"""
126 self._publish_msg('comm_msg',
126 self._publish_msg('comm_msg',
127 data=data, metadata=metadata, buffers=buffers,
127 data=data, metadata=metadata, buffers=buffers,
128 )
128 )
129
129
130 # registering callbacks
130 # registering callbacks
131
131
132 def on_close(self, callback):
132 def on_close(self, callback):
133 """Register a callback for comm_close
133 """Register a callback for comm_close
134
134
135 Will be called with the `data` of the close message.
135 Will be called with the `data` of the close message.
136
136
137 Call `on_close(None)` to disable an existing callback.
137 Call `on_close(None)` to disable an existing callback.
138 """
138 """
139 self._close_callback = callback
139 self._close_callback = callback
140
140
141 def on_msg(self, callback):
141 def on_msg(self, callback):
142 """Register a callback for comm_msg
142 """Register a callback for comm_msg
143
143
144 Will be called with the `data` of any comm_msg messages.
144 Will be called with the `data` of any comm_msg messages.
145
145
146 Call `on_msg(None)` to disable an existing callback.
146 Call `on_msg(None)` to disable an existing callback.
147 """
147 """
148 self._msg_callback = callback
148 self._msg_callback = callback
149
149
150 # handling of incoming messages
150 # handling of incoming messages
151
151
152 def handle_close(self, msg):
152 def handle_close(self, msg):
153 """Handle a comm_close message"""
153 """Handle a comm_close message"""
154 self.log.debug("handle_close[%s](%s)", self.comm_id, msg)
154 self.log.debug("handle_close[%s](%s)", self.comm_id, msg)
155 if self._close_callback:
155 if self._close_callback:
156 self._close_callback(msg)
156 self._close_callback(msg)
157
157
158 def handle_msg(self, msg):
158 def handle_msg(self, msg):
159 """Handle a comm_msg message"""
159 """Handle a comm_msg message"""
160 self.log.debug("handle_msg[%s](%s)", self.comm_id, msg)
160 self.log.debug("handle_msg[%s](%s)", self.comm_id, msg)
161 if self._msg_callback:
161 if self._msg_callback:
162 if self.shell:
162 if self.shell:
163 self.shell.events.trigger('pre_execute')
163 self.shell.events.trigger('pre_execute')
164 self._msg_callback(msg)
164 self._msg_callback(msg)
165 if self.shell:
165 if self.shell:
166 self.shell.events.trigger('post_execute')
166 self.shell.events.trigger('post_execute')
167
167
168
168
169 __all__ = ['Comm']
169 __all__ = ['Comm']
@@ -1,157 +1,157 b''
1 """Base class to manage comms"""
1 """Base class to manage comms"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import sys
6 import sys
7
7
8 from IPython.config import LoggingConfigurable
8 from IPython.config import LoggingConfigurable
9 from IPython.core.prompts import LazyEvaluate
9 from IPython.core.prompts import LazyEvaluate
10 from IPython.core.getipython import get_ipython
10 from IPython.core.getipython import get_ipython
11
11
12 from IPython.utils.importstring import import_item
12 from IPython.utils.importstring import import_item
13 from IPython.utils.py3compat import string_types
13 from IPython.utils.py3compat import string_types
14 from IPython.utils.traitlets import Instance, Unicode, Dict, Any
14 from IPython.utils.traitlets import Instance, Unicode, Dict, Any
15
15
16 from .comm import Comm
16 from .comm import Comm
17
17
18
18
19 def lazy_keys(dikt):
19 def lazy_keys(dikt):
20 """Return lazy-evaluated string representation of a dictionary's keys
20 """Return lazy-evaluated string representation of a dictionary's keys
21
21
22 Key list is only constructed if it will actually be used.
22 Key list is only constructed if it will actually be used.
23 Used for debug-logging.
23 Used for debug-logging.
24 """
24 """
25 return LazyEvaluate(lambda d: list(d.keys()))
25 return LazyEvaluate(lambda d: list(d.keys()))
26
26
27
27
28 class CommManager(LoggingConfigurable):
28 class CommManager(LoggingConfigurable):
29 """Manager for Comms in the Kernel"""
29 """Manager for Comms in the Kernel"""
30
30
31 # If this is instantiated by a non-IPython kernel, shell will be None
31 # If this is instantiated by a non-IPython kernel, shell will be None
32 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
32 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
33 allow_none=True)
33 allow_none=True)
34 kernel = Instance('IPython.kernel.zmq.kernelbase.Kernel')
34 kernel = Instance('ipython_kernel.zmq.kernelbase.Kernel')
35
35
36 iopub_socket = Any()
36 iopub_socket = Any()
37 def _iopub_socket_default(self):
37 def _iopub_socket_default(self):
38 return self.kernel.iopub_socket
38 return self.kernel.iopub_socket
39 session = Instance('IPython.kernel.zmq.session.Session')
39 session = Instance('ipython_kernel.zmq.session.Session')
40 def _session_default(self):
40 def _session_default(self):
41 return self.kernel.session
41 return self.kernel.session
42
42
43 comms = Dict()
43 comms = Dict()
44 targets = Dict()
44 targets = Dict()
45
45
46 # Public APIs
46 # Public APIs
47
47
48 def register_target(self, target_name, f):
48 def register_target(self, target_name, f):
49 """Register a callable f for a given target name
49 """Register a callable f for a given target name
50
50
51 f will be called with two arguments when a comm_open message is received with `target`:
51 f will be called with two arguments when a comm_open message is received with `target`:
52
52
53 - the Comm instance
53 - the Comm instance
54 - the `comm_open` message itself.
54 - the `comm_open` message itself.
55
55
56 f can be a Python callable or an import string for one.
56 f can be a Python callable or an import string for one.
57 """
57 """
58 if isinstance(f, string_types):
58 if isinstance(f, string_types):
59 f = import_item(f)
59 f = import_item(f)
60
60
61 self.targets[target_name] = f
61 self.targets[target_name] = f
62
62
63 def unregister_target(self, target_name, f):
63 def unregister_target(self, target_name, f):
64 """Unregister a callable registered with register_target"""
64 """Unregister a callable registered with register_target"""
65 return self.targets.pop(target_name);
65 return self.targets.pop(target_name);
66
66
67 def register_comm(self, comm):
67 def register_comm(self, comm):
68 """Register a new comm"""
68 """Register a new comm"""
69 comm_id = comm.comm_id
69 comm_id = comm.comm_id
70 comm.shell = self.shell
70 comm.shell = self.shell
71 comm.kernel = self.kernel
71 comm.kernel = self.kernel
72 comm.iopub_socket = self.iopub_socket
72 comm.iopub_socket = self.iopub_socket
73 self.comms[comm_id] = comm
73 self.comms[comm_id] = comm
74 return comm_id
74 return comm_id
75
75
76 def unregister_comm(self, comm):
76 def unregister_comm(self, comm):
77 """Unregister a comm, and close its counterpart"""
77 """Unregister a comm, and close its counterpart"""
78 # unlike get_comm, this should raise a KeyError
78 # unlike get_comm, this should raise a KeyError
79 comm = self.comms.pop(comm.comm_id)
79 comm = self.comms.pop(comm.comm_id)
80
80
81 def get_comm(self, comm_id):
81 def get_comm(self, comm_id):
82 """Get a comm with a particular id
82 """Get a comm with a particular id
83
83
84 Returns the comm if found, otherwise None.
84 Returns the comm if found, otherwise None.
85
85
86 This will not raise an error,
86 This will not raise an error,
87 it will log messages if the comm cannot be found.
87 it will log messages if the comm cannot be found.
88 """
88 """
89 if comm_id not in self.comms:
89 if comm_id not in self.comms:
90 self.log.error("No such comm: %s", comm_id)
90 self.log.error("No such comm: %s", comm_id)
91 self.log.debug("Current comms: %s", lazy_keys(self.comms))
91 self.log.debug("Current comms: %s", lazy_keys(self.comms))
92 return
92 return
93 # call, because we store weakrefs
93 # call, because we store weakrefs
94 comm = self.comms[comm_id]
94 comm = self.comms[comm_id]
95 return comm
95 return comm
96
96
97 # Message handlers
97 # Message handlers
98 def comm_open(self, stream, ident, msg):
98 def comm_open(self, stream, ident, msg):
99 """Handler for comm_open messages"""
99 """Handler for comm_open messages"""
100 content = msg['content']
100 content = msg['content']
101 comm_id = content['comm_id']
101 comm_id = content['comm_id']
102 target_name = content['target_name']
102 target_name = content['target_name']
103 f = self.targets.get(target_name, None)
103 f = self.targets.get(target_name, None)
104 comm = Comm(comm_id=comm_id,
104 comm = Comm(comm_id=comm_id,
105 shell=self.shell,
105 shell=self.shell,
106 kernel=self.kernel,
106 kernel=self.kernel,
107 iopub_socket=self.iopub_socket,
107 iopub_socket=self.iopub_socket,
108 primary=False,
108 primary=False,
109 )
109 )
110 self.register_comm(comm)
110 self.register_comm(comm)
111 if f is None:
111 if f is None:
112 self.log.error("No such comm target registered: %s", target_name)
112 self.log.error("No such comm target registered: %s", target_name)
113 else:
113 else:
114 try:
114 try:
115 f(comm, msg)
115 f(comm, msg)
116 return
116 return
117 except Exception:
117 except Exception:
118 self.log.error("Exception opening comm with target: %s", target_name, exc_info=True)
118 self.log.error("Exception opening comm with target: %s", target_name, exc_info=True)
119
119
120 # Failure.
120 # Failure.
121 try:
121 try:
122 comm.close()
122 comm.close()
123 except:
123 except:
124 self.log.error("""Could not close comm during `comm_open` failure
124 self.log.error("""Could not close comm during `comm_open` failure
125 clean-up. The comm may not have been opened yet.""", exc_info=True)
125 clean-up. The comm may not have been opened yet.""", exc_info=True)
126
126
127 def comm_msg(self, stream, ident, msg):
127 def comm_msg(self, stream, ident, msg):
128 """Handler for comm_msg messages"""
128 """Handler for comm_msg messages"""
129 content = msg['content']
129 content = msg['content']
130 comm_id = content['comm_id']
130 comm_id = content['comm_id']
131 comm = self.get_comm(comm_id)
131 comm = self.get_comm(comm_id)
132 if comm is None:
132 if comm is None:
133 # no such comm
133 # no such comm
134 return
134 return
135 try:
135 try:
136 comm.handle_msg(msg)
136 comm.handle_msg(msg)
137 except Exception:
137 except Exception:
138 self.log.error("Exception in comm_msg for %s", comm_id, exc_info=True)
138 self.log.error("Exception in comm_msg for %s", comm_id, exc_info=True)
139
139
140 def comm_close(self, stream, ident, msg):
140 def comm_close(self, stream, ident, msg):
141 """Handler for comm_close messages"""
141 """Handler for comm_close messages"""
142 content = msg['content']
142 content = msg['content']
143 comm_id = content['comm_id']
143 comm_id = content['comm_id']
144 comm = self.get_comm(comm_id)
144 comm = self.get_comm(comm_id)
145 if comm is None:
145 if comm is None:
146 # no such comm
146 # no such comm
147 self.log.debug("No such comm to close: %s", comm_id)
147 self.log.debug("No such comm to close: %s", comm_id)
148 return
148 return
149 del self.comms[comm_id]
149 del self.comms[comm_id]
150
150
151 try:
151 try:
152 comm.handle_close(msg)
152 comm.handle_close(msg)
153 except Exception:
153 except Exception:
154 self.log.error("Exception handling comm_close for %s", comm_id, exc_info=True)
154 self.log.error("Exception handling comm_close for %s", comm_id, exc_info=True)
155
155
156
156
157 __all__ = ['CommManager']
157 __all__ = ['CommManager']
1 NO CONTENT: file renamed from IPython/kernel/inprocess/__init__.py to ipython_kernel/inprocess/__init__.py
NO CONTENT: file renamed from IPython/kernel/inprocess/__init__.py to ipython_kernel/inprocess/__init__.py
@@ -1,94 +1,93 b''
1 """ Implements a fully blocking kernel client.
1 """ Implements a fully blocking kernel client.
2
2
3 Useful for test suites and blocking terminal interfaces.
3 Useful for test suites and blocking terminal interfaces.
4 """
4 """
5 #-----------------------------------------------------------------------------
5 #-----------------------------------------------------------------------------
6 # Copyright (C) 2012 The IPython Development Team
6 # Copyright (C) 2012 The IPython Development Team
7 #
7 #
8 # Distributed under the terms of the BSD License. The full license is in
8 # Distributed under the terms of the BSD License. The full license is in
9 # the file COPYING.txt, distributed as part of this software.
9 # the file COPYING.txt, distributed as part of this software.
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11
11
12 try:
12 try:
13 from queue import Queue, Empty # Py 3
13 from queue import Queue, Empty # Py 3
14 except ImportError:
14 except ImportError:
15 from Queue import Queue, Empty # Py 2
15 from Queue import Queue, Empty # Py 2
16
16
17 # IPython imports
17 # IPython imports
18 from IPython.utils.io import raw_print
18 from IPython.utils.io import raw_print
19 from IPython.utils.traitlets import Type
19 from IPython.utils.traitlets import Type
20 #from IPython.kernel.blocking.channels import BlockingChannelMixin
21
20
22 # Local imports
21 # Local imports
23 from .channels import (
22 from .channels import (
24 InProcessChannel,
23 InProcessChannel,
25 )
24 )
26 from .client import InProcessKernelClient
25 from .client import InProcessKernelClient
27
26
28 class BlockingInProcessChannel(InProcessChannel):
27 class BlockingInProcessChannel(InProcessChannel):
29
28
30 def __init__(self, *args, **kwds):
29 def __init__(self, *args, **kwds):
31 super(BlockingInProcessChannel, self).__init__(*args, **kwds)
30 super(BlockingInProcessChannel, self).__init__(*args, **kwds)
32 self._in_queue = Queue()
31 self._in_queue = Queue()
33
32
34 def call_handlers(self, msg):
33 def call_handlers(self, msg):
35 self._in_queue.put(msg)
34 self._in_queue.put(msg)
36
35
37 def get_msg(self, block=True, timeout=None):
36 def get_msg(self, block=True, timeout=None):
38 """ Gets a message if there is one that is ready. """
37 """ Gets a message if there is one that is ready. """
39 if timeout is None:
38 if timeout is None:
40 # Queue.get(timeout=None) has stupid uninteruptible
39 # Queue.get(timeout=None) has stupid uninteruptible
41 # behavior, so wait for a week instead
40 # behavior, so wait for a week instead
42 timeout = 604800
41 timeout = 604800
43 return self._in_queue.get(block, timeout)
42 return self._in_queue.get(block, timeout)
44
43
45 def get_msgs(self):
44 def get_msgs(self):
46 """ Get all messages that are currently ready. """
45 """ Get all messages that are currently ready. """
47 msgs = []
46 msgs = []
48 while True:
47 while True:
49 try:
48 try:
50 msgs.append(self.get_msg(block=False))
49 msgs.append(self.get_msg(block=False))
51 except Empty:
50 except Empty:
52 break
51 break
53 return msgs
52 return msgs
54
53
55 def msg_ready(self):
54 def msg_ready(self):
56 """ Is there a message that has been received? """
55 """ Is there a message that has been received? """
57 return not self._in_queue.empty()
56 return not self._in_queue.empty()
58
57
59
58
60 class BlockingInProcessStdInChannel(BlockingInProcessChannel):
59 class BlockingInProcessStdInChannel(BlockingInProcessChannel):
61 def call_handlers(self, msg):
60 def call_handlers(self, msg):
62 """ Overridden for the in-process channel.
61 """ Overridden for the in-process channel.
63
62
64 This methods simply calls raw_input directly.
63 This methods simply calls raw_input directly.
65 """
64 """
66 msg_type = msg['header']['msg_type']
65 msg_type = msg['header']['msg_type']
67 if msg_type == 'input_request':
66 if msg_type == 'input_request':
68 _raw_input = self.client.kernel._sys_raw_input
67 _raw_input = self.client.kernel._sys_raw_input
69 prompt = msg['content']['prompt']
68 prompt = msg['content']['prompt']
70 raw_print(prompt, end='')
69 raw_print(prompt, end='')
71 self.client.input(_raw_input())
70 self.client.input(_raw_input())
72
71
73 class BlockingInProcessKernelClient(InProcessKernelClient):
72 class BlockingInProcessKernelClient(InProcessKernelClient):
74
73
75 # The classes to use for the various channels.
74 # The classes to use for the various channels.
76 shell_channel_class = Type(BlockingInProcessChannel)
75 shell_channel_class = Type(BlockingInProcessChannel)
77 iopub_channel_class = Type(BlockingInProcessChannel)
76 iopub_channel_class = Type(BlockingInProcessChannel)
78 stdin_channel_class = Type(BlockingInProcessStdInChannel)
77 stdin_channel_class = Type(BlockingInProcessStdInChannel)
79
78
80 def wait_for_ready(self):
79 def wait_for_ready(self):
81 # Wait for kernel info reply on shell channel
80 # Wait for kernel info reply on shell channel
82 while True:
81 while True:
83 msg = self.shell_channel.get_msg(block=True)
82 msg = self.shell_channel.get_msg(block=True)
84 if msg['msg_type'] == 'kernel_info_reply':
83 if msg['msg_type'] == 'kernel_info_reply':
85 self._handle_kernel_info_reply(msg)
84 self._handle_kernel_info_reply(msg)
86 break
85 break
87
86
88 # Flush IOPub channel
87 # Flush IOPub channel
89 while True:
88 while True:
90 try:
89 try:
91 msg = self.iopub_channel.get_msg(block=True, timeout=0.2)
90 msg = self.iopub_channel.get_msg(block=True, timeout=0.2)
92 print(msg['msg_type'])
91 print(msg['msg_type'])
93 except Empty:
92 except Empty:
94 break
93 break
@@ -1,97 +1,97 b''
1 """A kernel client for in-process kernels."""
1 """A kernel client for in-process kernels."""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from IPython.kernel.channelsabc import HBChannelABC
6 from jupyter_client.channelsabc import HBChannelABC
7
7
8 from .socket import DummySocket
8 from .socket import DummySocket
9
9
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11 # Channel classes
11 # Channel classes
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 class InProcessChannel(object):
14 class InProcessChannel(object):
15 """Base class for in-process channels."""
15 """Base class for in-process channels."""
16 proxy_methods = []
16 proxy_methods = []
17
17
18 def __init__(self, client=None):
18 def __init__(self, client=None):
19 super(InProcessChannel, self).__init__()
19 super(InProcessChannel, self).__init__()
20 self.client = client
20 self.client = client
21 self._is_alive = False
21 self._is_alive = False
22
22
23 def is_alive(self):
23 def is_alive(self):
24 return self._is_alive
24 return self._is_alive
25
25
26 def start(self):
26 def start(self):
27 self._is_alive = True
27 self._is_alive = True
28
28
29 def stop(self):
29 def stop(self):
30 self._is_alive = False
30 self._is_alive = False
31
31
32 def call_handlers(self, msg):
32 def call_handlers(self, msg):
33 """ This method is called in the main thread when a message arrives.
33 """ This method is called in the main thread when a message arrives.
34
34
35 Subclasses should override this method to handle incoming messages.
35 Subclasses should override this method to handle incoming messages.
36 """
36 """
37 raise NotImplementedError('call_handlers must be defined in a subclass.')
37 raise NotImplementedError('call_handlers must be defined in a subclass.')
38
38
39 def flush(self, timeout=1.0):
39 def flush(self, timeout=1.0):
40 pass
40 pass
41
41
42
42
43 def call_handlers_later(self, *args, **kwds):
43 def call_handlers_later(self, *args, **kwds):
44 """ Call the message handlers later.
44 """ Call the message handlers later.
45
45
46 The default implementation just calls the handlers immediately, but this
46 The default implementation just calls the handlers immediately, but this
47 method exists so that GUI toolkits can defer calling the handlers until
47 method exists so that GUI toolkits can defer calling the handlers until
48 after the event loop has run, as expected by GUI frontends.
48 after the event loop has run, as expected by GUI frontends.
49 """
49 """
50 self.call_handlers(*args, **kwds)
50 self.call_handlers(*args, **kwds)
51
51
52 def process_events(self):
52 def process_events(self):
53 """ Process any pending GUI events.
53 """ Process any pending GUI events.
54
54
55 This method will be never be called from a frontend without an event
55 This method will be never be called from a frontend without an event
56 loop (e.g., a terminal frontend).
56 loop (e.g., a terminal frontend).
57 """
57 """
58 raise NotImplementedError
58 raise NotImplementedError
59
59
60
60
61
61
62 class InProcessHBChannel(object):
62 class InProcessHBChannel(object):
63 """A dummy heartbeat channel interface for in-process kernels.
63 """A dummy heartbeat channel interface for in-process kernels.
64
64
65 Normally we use the heartbeat to check that the kernel process is alive.
65 Normally we use the heartbeat to check that the kernel process is alive.
66 When the kernel is in-process, that doesn't make sense, but clients still
66 When the kernel is in-process, that doesn't make sense, but clients still
67 expect this interface.
67 expect this interface.
68 """
68 """
69
69
70 time_to_dead = 3.0
70 time_to_dead = 3.0
71
71
72 def __init__(self, client=None):
72 def __init__(self, client=None):
73 super(InProcessHBChannel, self).__init__()
73 super(InProcessHBChannel, self).__init__()
74 self.client = client
74 self.client = client
75 self._is_alive = False
75 self._is_alive = False
76 self._pause = True
76 self._pause = True
77
77
78 def is_alive(self):
78 def is_alive(self):
79 return self._is_alive
79 return self._is_alive
80
80
81 def start(self):
81 def start(self):
82 self._is_alive = True
82 self._is_alive = True
83
83
84 def stop(self):
84 def stop(self):
85 self._is_alive = False
85 self._is_alive = False
86
86
87 def pause(self):
87 def pause(self):
88 self._pause = True
88 self._pause = True
89
89
90 def unpause(self):
90 def unpause(self):
91 self._pause = False
91 self._pause = False
92
92
93 def is_beating(self):
93 def is_beating(self):
94 return not self._pause
94 return not self._pause
95
95
96
96
97 HBChannelABC.register(InProcessHBChannel)
97 HBChannelABC.register(InProcessHBChannel)
@@ -1,157 +1,157 b''
1 """A client for in-process kernels."""
1 """A client for in-process kernels."""
2
2
3 #-----------------------------------------------------------------------------
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2012 The IPython Development Team
4 # Copyright (C) 2012 The IPython Development Team
5 #
5 #
6 # Distributed under the terms of the BSD License. The full license is in
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
7 # the file COPYING, distributed as part of this software.
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9
9
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11 # Imports
11 # Imports
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 # IPython imports
14 # IPython imports
15 from IPython.kernel.inprocess.socket import DummySocket
15 from ipython_kernel.inprocess.socket import DummySocket
16 from IPython.utils.traitlets import Type, Instance
16 from IPython.utils.traitlets import Type, Instance
17 from IPython.kernel.clientabc import KernelClientABC
17 from jupyter_client.clientabc import KernelClientABC
18 from IPython.kernel.client import KernelClient
18 from jupyter_client.client import KernelClient
19
19
20 # Local imports
20 # Local imports
21 from .channels import (
21 from .channels import (
22 InProcessChannel,
22 InProcessChannel,
23 InProcessHBChannel,
23 InProcessHBChannel,
24
24
25 )
25 )
26
26
27 #-----------------------------------------------------------------------------
27 #-----------------------------------------------------------------------------
28 # Main kernel Client class
28 # Main kernel Client class
29 #-----------------------------------------------------------------------------
29 #-----------------------------------------------------------------------------
30
30
31 class InProcessKernelClient(KernelClient):
31 class InProcessKernelClient(KernelClient):
32 """A client for an in-process kernel.
32 """A client for an in-process kernel.
33
33
34 This class implements the interface of
34 This class implements the interface of
35 `IPython.kernel.clientabc.KernelClientABC` and allows
35 `jupyter_client.clientabc.KernelClientABC` and allows
36 (asynchronous) frontends to be used seamlessly with an in-process kernel.
36 (asynchronous) frontends to be used seamlessly with an in-process kernel.
37
37
38 See `IPython.kernel.client.KernelClient` for docstrings.
38 See `jupyter_client.client.KernelClient` for docstrings.
39 """
39 """
40
40
41 # The classes to use for the various channels.
41 # The classes to use for the various channels.
42 shell_channel_class = Type(InProcessChannel)
42 shell_channel_class = Type(InProcessChannel)
43 iopub_channel_class = Type(InProcessChannel)
43 iopub_channel_class = Type(InProcessChannel)
44 stdin_channel_class = Type(InProcessChannel)
44 stdin_channel_class = Type(InProcessChannel)
45 hb_channel_class = Type(InProcessHBChannel)
45 hb_channel_class = Type(InProcessHBChannel)
46
46
47 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel',
47 kernel = Instance('ipython_kernel.inprocess.ipkernel.InProcessKernel',
48 allow_none=True)
48 allow_none=True)
49
49
50 #--------------------------------------------------------------------------
50 #--------------------------------------------------------------------------
51 # Channel management methods
51 # Channel management methods
52 #--------------------------------------------------------------------------
52 #--------------------------------------------------------------------------
53
53
54 def start_channels(self, *args, **kwargs):
54 def start_channels(self, *args, **kwargs):
55 super(InProcessKernelClient, self).start_channels(self)
55 super(InProcessKernelClient, self).start_channels(self)
56 self.kernel.frontends.append(self)
56 self.kernel.frontends.append(self)
57
57
58 @property
58 @property
59 def shell_channel(self):
59 def shell_channel(self):
60 if self._shell_channel is None:
60 if self._shell_channel is None:
61 self._shell_channel = self.shell_channel_class(self)
61 self._shell_channel = self.shell_channel_class(self)
62 return self._shell_channel
62 return self._shell_channel
63
63
64 @property
64 @property
65 def iopub_channel(self):
65 def iopub_channel(self):
66 if self._iopub_channel is None:
66 if self._iopub_channel is None:
67 self._iopub_channel = self.iopub_channel_class(self)
67 self._iopub_channel = self.iopub_channel_class(self)
68 return self._iopub_channel
68 return self._iopub_channel
69
69
70 @property
70 @property
71 def stdin_channel(self):
71 def stdin_channel(self):
72 if self._stdin_channel is None:
72 if self._stdin_channel is None:
73 self._stdin_channel = self.stdin_channel_class(self)
73 self._stdin_channel = self.stdin_channel_class(self)
74 return self._stdin_channel
74 return self._stdin_channel
75
75
76 @property
76 @property
77 def hb_channel(self):
77 def hb_channel(self):
78 if self._hb_channel is None:
78 if self._hb_channel is None:
79 self._hb_channel = self.hb_channel_class(self)
79 self._hb_channel = self.hb_channel_class(self)
80 return self._hb_channel
80 return self._hb_channel
81
81
82 # Methods for sending specific messages
82 # Methods for sending specific messages
83 # -------------------------------------
83 # -------------------------------------
84
84
85 def execute(self, code, silent=False, store_history=True,
85 def execute(self, code, silent=False, store_history=True,
86 user_expressions={}, allow_stdin=None):
86 user_expressions={}, allow_stdin=None):
87 if allow_stdin is None:
87 if allow_stdin is None:
88 allow_stdin = self.allow_stdin
88 allow_stdin = self.allow_stdin
89 content = dict(code=code, silent=silent, store_history=store_history,
89 content = dict(code=code, silent=silent, store_history=store_history,
90 user_expressions=user_expressions,
90 user_expressions=user_expressions,
91 allow_stdin=allow_stdin)
91 allow_stdin=allow_stdin)
92 msg = self.session.msg('execute_request', content)
92 msg = self.session.msg('execute_request', content)
93 self._dispatch_to_kernel(msg)
93 self._dispatch_to_kernel(msg)
94 return msg['header']['msg_id']
94 return msg['header']['msg_id']
95
95
96 def complete(self, code, cursor_pos=None):
96 def complete(self, code, cursor_pos=None):
97 if cursor_pos is None:
97 if cursor_pos is None:
98 cursor_pos = len(code)
98 cursor_pos = len(code)
99 content = dict(code=code, cursor_pos=cursor_pos)
99 content = dict(code=code, cursor_pos=cursor_pos)
100 msg = self.session.msg('complete_request', content)
100 msg = self.session.msg('complete_request', content)
101 self._dispatch_to_kernel(msg)
101 self._dispatch_to_kernel(msg)
102 return msg['header']['msg_id']
102 return msg['header']['msg_id']
103
103
104 def inspect(self, code, cursor_pos=None, detail_level=0):
104 def inspect(self, code, cursor_pos=None, detail_level=0):
105 if cursor_pos is None:
105 if cursor_pos is None:
106 cursor_pos = len(code)
106 cursor_pos = len(code)
107 content = dict(code=code, cursor_pos=cursor_pos,
107 content = dict(code=code, cursor_pos=cursor_pos,
108 detail_level=detail_level,
108 detail_level=detail_level,
109 )
109 )
110 msg = self.session.msg('inspect_request', content)
110 msg = self.session.msg('inspect_request', content)
111 self._dispatch_to_kernel(msg)
111 self._dispatch_to_kernel(msg)
112 return msg['header']['msg_id']
112 return msg['header']['msg_id']
113
113
114 def history(self, raw=True, output=False, hist_access_type='range', **kwds):
114 def history(self, raw=True, output=False, hist_access_type='range', **kwds):
115 content = dict(raw=raw, output=output,
115 content = dict(raw=raw, output=output,
116 hist_access_type=hist_access_type, **kwds)
116 hist_access_type=hist_access_type, **kwds)
117 msg = self.session.msg('history_request', content)
117 msg = self.session.msg('history_request', content)
118 self._dispatch_to_kernel(msg)
118 self._dispatch_to_kernel(msg)
119 return msg['header']['msg_id']
119 return msg['header']['msg_id']
120
120
121 def shutdown(self, restart=False):
121 def shutdown(self, restart=False):
122 # FIXME: What to do here?
122 # FIXME: What to do here?
123 raise NotImplementedError('Cannot shutdown in-process kernel')
123 raise NotImplementedError('Cannot shutdown in-process kernel')
124
124
125 def kernel_info(self):
125 def kernel_info(self):
126 """Request kernel info."""
126 """Request kernel info."""
127 msg = self.session.msg('kernel_info_request')
127 msg = self.session.msg('kernel_info_request')
128 self._dispatch_to_kernel(msg)
128 self._dispatch_to_kernel(msg)
129 return msg['header']['msg_id']
129 return msg['header']['msg_id']
130
130
131 def input(self, string):
131 def input(self, string):
132 if self.kernel is None:
132 if self.kernel is None:
133 raise RuntimeError('Cannot send input reply. No kernel exists.')
133 raise RuntimeError('Cannot send input reply. No kernel exists.')
134 self.kernel.raw_input_str = string
134 self.kernel.raw_input_str = string
135
135
136
136
137 def _dispatch_to_kernel(self, msg):
137 def _dispatch_to_kernel(self, msg):
138 """ Send a message to the kernel and handle a reply.
138 """ Send a message to the kernel and handle a reply.
139 """
139 """
140 kernel = self.kernel
140 kernel = self.kernel
141 if kernel is None:
141 if kernel is None:
142 raise RuntimeError('Cannot send request. No kernel exists.')
142 raise RuntimeError('Cannot send request. No kernel exists.')
143
143
144 stream = DummySocket()
144 stream = DummySocket()
145 self.session.send(stream, msg)
145 self.session.send(stream, msg)
146 msg_parts = stream.recv_multipart()
146 msg_parts = stream.recv_multipart()
147 kernel.dispatch_shell(stream, msg_parts)
147 kernel.dispatch_shell(stream, msg_parts)
148
148
149 idents, reply_msg = self.session.recv(stream, copy=False)
149 idents, reply_msg = self.session.recv(stream, copy=False)
150 self.shell_channel.call_handlers_later(reply_msg)
150 self.shell_channel.call_handlers_later(reply_msg)
151
151
152
152
153 #-----------------------------------------------------------------------------
153 #-----------------------------------------------------------------------------
154 # ABC Registration
154 # ABC Registration
155 #-----------------------------------------------------------------------------
155 #-----------------------------------------------------------------------------
156
156
157 KernelClientABC.register(InProcessKernelClient)
157 KernelClientABC.register(InProcessKernelClient)
@@ -1,171 +1,171 b''
1 """An in-process kernel"""
1 """An in-process kernel"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from contextlib import contextmanager
6 from contextlib import contextmanager
7 import logging
7 import logging
8 import sys
8 import sys
9
9
10 from IPython.core.interactiveshell import InteractiveShellABC
10 from IPython.core.interactiveshell import InteractiveShellABC
11 from IPython.utils.jsonutil import json_clean
11 from IPython.utils.jsonutil import json_clean
12 from IPython.utils.traitlets import Any, Enum, Instance, List, Type
12 from IPython.utils.traitlets import Any, Enum, Instance, List, Type
13 from IPython.kernel.zmq.ipkernel import IPythonKernel
13 from ipython_kernel.zmq.ipkernel import IPythonKernel
14 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
14 from ipython_kernel.zmq.zmqshell import ZMQInteractiveShell
15
15
16 from .socket import DummySocket
16 from .socket import DummySocket
17
17
18 #-----------------------------------------------------------------------------
18 #-----------------------------------------------------------------------------
19 # Main kernel class
19 # Main kernel class
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
21
21
22 class InProcessKernel(IPythonKernel):
22 class InProcessKernel(IPythonKernel):
23
23
24 #-------------------------------------------------------------------------
24 #-------------------------------------------------------------------------
25 # InProcessKernel interface
25 # InProcessKernel interface
26 #-------------------------------------------------------------------------
26 #-------------------------------------------------------------------------
27
27
28 # The frontends connected to this kernel.
28 # The frontends connected to this kernel.
29 frontends = List(
29 frontends = List(
30 Instance('IPython.kernel.inprocess.client.InProcessKernelClient',
30 Instance('ipython_kernel.inprocess.client.InProcessKernelClient',
31 allow_none=True)
31 allow_none=True)
32 )
32 )
33
33
34 # The GUI environment that the kernel is running under. This need not be
34 # The GUI environment that the kernel is running under. This need not be
35 # specified for the normal operation for the kernel, but is required for
35 # specified for the normal operation for the kernel, but is required for
36 # IPython's GUI support (including pylab). The default is 'inline' because
36 # IPython's GUI support (including pylab). The default is 'inline' because
37 # it is safe under all GUI toolkits.
37 # it is safe under all GUI toolkits.
38 gui = Enum(('tk', 'gtk', 'wx', 'qt', 'qt4', 'inline'),
38 gui = Enum(('tk', 'gtk', 'wx', 'qt', 'qt4', 'inline'),
39 default_value='inline')
39 default_value='inline')
40
40
41 raw_input_str = Any()
41 raw_input_str = Any()
42 stdout = Any()
42 stdout = Any()
43 stderr = Any()
43 stderr = Any()
44
44
45 #-------------------------------------------------------------------------
45 #-------------------------------------------------------------------------
46 # Kernel interface
46 # Kernel interface
47 #-------------------------------------------------------------------------
47 #-------------------------------------------------------------------------
48
48
49 shell_class = Type(allow_none=True)
49 shell_class = Type(allow_none=True)
50 shell_streams = List()
50 shell_streams = List()
51 control_stream = Any()
51 control_stream = Any()
52 iopub_socket = Instance(DummySocket, ())
52 iopub_socket = Instance(DummySocket, ())
53 stdin_socket = Instance(DummySocket, ())
53 stdin_socket = Instance(DummySocket, ())
54
54
55 def __init__(self, **traits):
55 def __init__(self, **traits):
56 super(InProcessKernel, self).__init__(**traits)
56 super(InProcessKernel, self).__init__(**traits)
57
57
58 self.iopub_socket.on_trait_change(self._io_dispatch, 'message_sent')
58 self.iopub_socket.on_trait_change(self._io_dispatch, 'message_sent')
59 self.shell.kernel = self
59 self.shell.kernel = self
60
60
61 def execute_request(self, stream, ident, parent):
61 def execute_request(self, stream, ident, parent):
62 """ Override for temporary IO redirection. """
62 """ Override for temporary IO redirection. """
63 with self._redirected_io():
63 with self._redirected_io():
64 super(InProcessKernel, self).execute_request(stream, ident, parent)
64 super(InProcessKernel, self).execute_request(stream, ident, parent)
65
65
66 def start(self):
66 def start(self):
67 """ Override registration of dispatchers for streams. """
67 """ Override registration of dispatchers for streams. """
68 self.shell.exit_now = False
68 self.shell.exit_now = False
69
69
70 def _abort_queue(self, stream):
70 def _abort_queue(self, stream):
71 """ The in-process kernel doesn't abort requests. """
71 """ The in-process kernel doesn't abort requests. """
72 pass
72 pass
73
73
74 def _input_request(self, prompt, ident, parent, password=False):
74 def _input_request(self, prompt, ident, parent, password=False):
75 # Flush output before making the request.
75 # Flush output before making the request.
76 self.raw_input_str = None
76 self.raw_input_str = None
77 sys.stderr.flush()
77 sys.stderr.flush()
78 sys.stdout.flush()
78 sys.stdout.flush()
79
79
80 # Send the input request.
80 # Send the input request.
81 content = json_clean(dict(prompt=prompt, password=password))
81 content = json_clean(dict(prompt=prompt, password=password))
82 msg = self.session.msg(u'input_request', content, parent)
82 msg = self.session.msg(u'input_request', content, parent)
83 for frontend in self.frontends:
83 for frontend in self.frontends:
84 if frontend.session.session == parent['header']['session']:
84 if frontend.session.session == parent['header']['session']:
85 frontend.stdin_channel.call_handlers(msg)
85 frontend.stdin_channel.call_handlers(msg)
86 break
86 break
87 else:
87 else:
88 logging.error('No frontend found for raw_input request')
88 logging.error('No frontend found for raw_input request')
89 return str()
89 return str()
90
90
91 # Await a response.
91 # Await a response.
92 while self.raw_input_str is None:
92 while self.raw_input_str is None:
93 frontend.stdin_channel.process_events()
93 frontend.stdin_channel.process_events()
94 return self.raw_input_str
94 return self.raw_input_str
95
95
96 #-------------------------------------------------------------------------
96 #-------------------------------------------------------------------------
97 # Protected interface
97 # Protected interface
98 #-------------------------------------------------------------------------
98 #-------------------------------------------------------------------------
99
99
100 @contextmanager
100 @contextmanager
101 def _redirected_io(self):
101 def _redirected_io(self):
102 """ Temporarily redirect IO to the kernel.
102 """ Temporarily redirect IO to the kernel.
103 """
103 """
104 sys_stdout, sys_stderr = sys.stdout, sys.stderr
104 sys_stdout, sys_stderr = sys.stdout, sys.stderr
105 sys.stdout, sys.stderr = self.stdout, self.stderr
105 sys.stdout, sys.stderr = self.stdout, self.stderr
106 yield
106 yield
107 sys.stdout, sys.stderr = sys_stdout, sys_stderr
107 sys.stdout, sys.stderr = sys_stdout, sys_stderr
108
108
109 #------ Trait change handlers --------------------------------------------
109 #------ Trait change handlers --------------------------------------------
110
110
111 def _io_dispatch(self):
111 def _io_dispatch(self):
112 """ Called when a message is sent to the IO socket.
112 """ Called when a message is sent to the IO socket.
113 """
113 """
114 ident, msg = self.session.recv(self.iopub_socket, copy=False)
114 ident, msg = self.session.recv(self.iopub_socket, copy=False)
115 for frontend in self.frontends:
115 for frontend in self.frontends:
116 frontend.iopub_channel.call_handlers(msg)
116 frontend.iopub_channel.call_handlers(msg)
117
117
118 #------ Trait initializers -----------------------------------------------
118 #------ Trait initializers -----------------------------------------------
119
119
120 def _log_default(self):
120 def _log_default(self):
121 return logging.getLogger(__name__)
121 return logging.getLogger(__name__)
122
122
123 def _session_default(self):
123 def _session_default(self):
124 from IPython.kernel.zmq.session import Session
124 from ipython_kernel.zmq.session import Session
125 return Session(parent=self, key=b'')
125 return Session(parent=self, key=b'')
126
126
127 def _shell_class_default(self):
127 def _shell_class_default(self):
128 return InProcessInteractiveShell
128 return InProcessInteractiveShell
129
129
130 def _stdout_default(self):
130 def _stdout_default(self):
131 from IPython.kernel.zmq.iostream import OutStream
131 from ipython_kernel.zmq.iostream import OutStream
132 return OutStream(self.session, self.iopub_socket, u'stdout', pipe=False)
132 return OutStream(self.session, self.iopub_socket, u'stdout', pipe=False)
133
133
134 def _stderr_default(self):
134 def _stderr_default(self):
135 from IPython.kernel.zmq.iostream import OutStream
135 from ipython_kernel.zmq.iostream import OutStream
136 return OutStream(self.session, self.iopub_socket, u'stderr', pipe=False)
136 return OutStream(self.session, self.iopub_socket, u'stderr', pipe=False)
137
137
138 #-----------------------------------------------------------------------------
138 #-----------------------------------------------------------------------------
139 # Interactive shell subclass
139 # Interactive shell subclass
140 #-----------------------------------------------------------------------------
140 #-----------------------------------------------------------------------------
141
141
142 class InProcessInteractiveShell(ZMQInteractiveShell):
142 class InProcessInteractiveShell(ZMQInteractiveShell):
143
143
144 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel',
144 kernel = Instance('ipython_kernel.inprocess.ipkernel.InProcessKernel',
145 allow_none=True)
145 allow_none=True)
146
146
147 #-------------------------------------------------------------------------
147 #-------------------------------------------------------------------------
148 # InteractiveShell interface
148 # InteractiveShell interface
149 #-------------------------------------------------------------------------
149 #-------------------------------------------------------------------------
150
150
151 def enable_gui(self, gui=None):
151 def enable_gui(self, gui=None):
152 """Enable GUI integration for the kernel."""
152 """Enable GUI integration for the kernel."""
153 from IPython.kernel.zmq.eventloops import enable_gui
153 from ipython_kernel.zmq.eventloops import enable_gui
154 if not gui:
154 if not gui:
155 gui = self.kernel.gui
155 gui = self.kernel.gui
156 return enable_gui(gui, kernel=self.kernel)
156 return enable_gui(gui, kernel=self.kernel)
157
157
158 def enable_matplotlib(self, gui=None):
158 def enable_matplotlib(self, gui=None):
159 """Enable matplotlib integration for the kernel."""
159 """Enable matplotlib integration for the kernel."""
160 if not gui:
160 if not gui:
161 gui = self.kernel.gui
161 gui = self.kernel.gui
162 return super(InProcessInteractiveShell, self).enable_matplotlib(gui)
162 return super(InProcessInteractiveShell, self).enable_matplotlib(gui)
163
163
164 def enable_pylab(self, gui=None, import_all=True, welcome_message=False):
164 def enable_pylab(self, gui=None, import_all=True, welcome_message=False):
165 """Activate pylab support at runtime."""
165 """Activate pylab support at runtime."""
166 if not gui:
166 if not gui:
167 gui = self.kernel.gui
167 gui = self.kernel.gui
168 return super(InProcessInteractiveShell, self).enable_pylab(gui, import_all,
168 return super(InProcessInteractiveShell, self).enable_pylab(gui, import_all,
169 welcome_message)
169 welcome_message)
170
170
171 InteractiveShellABC.register(InProcessInteractiveShell)
171 InteractiveShellABC.register(InProcessInteractiveShell)
@@ -1,72 +1,72 b''
1 """A kernel manager for in-process kernels."""
1 """A kernel manager for in-process kernels."""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from IPython.utils.traitlets import Instance, DottedObjectName
6 from IPython.utils.traitlets import Instance, DottedObjectName
7 from IPython.kernel.managerabc import KernelManagerABC
7 from jupyter_client.managerabc import KernelManagerABC
8 from IPython.kernel.manager import KernelManager
8 from jupyter_client.manager import KernelManager
9 from IPython.kernel.zmq.session import Session
9 from jupyter_client.session import Session
10
10
11
11
12 class InProcessKernelManager(KernelManager):
12 class InProcessKernelManager(KernelManager):
13 """A manager for an in-process kernel.
13 """A manager for an in-process kernel.
14
14
15 This class implements the interface of
15 This class implements the interface of
16 `IPython.kernel.kernelmanagerabc.KernelManagerABC` and allows
16 `jupyter_client.kernelmanagerabc.KernelManagerABC` and allows
17 (asynchronous) frontends to be used seamlessly with an in-process kernel.
17 (asynchronous) frontends to be used seamlessly with an in-process kernel.
18
18
19 See `IPython.kernel.kernelmanager.KernelManager` for docstrings.
19 See `jupyter_client.kernelmanager.KernelManager` for docstrings.
20 """
20 """
21
21
22 # The kernel process with which the KernelManager is communicating.
22 # The kernel process with which the KernelManager is communicating.
23 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel',
23 kernel = Instance('ipython_kernel.inprocess.ipkernel.InProcessKernel',
24 allow_none=True)
24 allow_none=True)
25 # the client class for KM.client() shortcut
25 # the client class for KM.client() shortcut
26 client_class = DottedObjectName('IPython.kernel.inprocess.BlockingInProcessKernelClient')
26 client_class = DottedObjectName('ipython_kernel.inprocess.BlockingInProcessKernelClient')
27
27
28 def _session_default(self):
28 def _session_default(self):
29 # don't sign in-process messages
29 # don't sign in-process messages
30 return Session(key=b'', parent=self)
30 return Session(key=b'', parent=self)
31
31
32 #--------------------------------------------------------------------------
32 #--------------------------------------------------------------------------
33 # Kernel management methods
33 # Kernel management methods
34 #--------------------------------------------------------------------------
34 #--------------------------------------------------------------------------
35
35
36 def start_kernel(self, **kwds):
36 def start_kernel(self, **kwds):
37 from IPython.kernel.inprocess.ipkernel import InProcessKernel
37 from ipython_kernel.inprocess.ipkernel import InProcessKernel
38 self.kernel = InProcessKernel(parent=self, session=self.session)
38 self.kernel = InProcessKernel(parent=self, session=self.session)
39
39
40 def shutdown_kernel(self):
40 def shutdown_kernel(self):
41 self._kill_kernel()
41 self._kill_kernel()
42
42
43 def restart_kernel(self, now=False, **kwds):
43 def restart_kernel(self, now=False, **kwds):
44 self.shutdown_kernel()
44 self.shutdown_kernel()
45 self.start_kernel(**kwds)
45 self.start_kernel(**kwds)
46
46
47 @property
47 @property
48 def has_kernel(self):
48 def has_kernel(self):
49 return self.kernel is not None
49 return self.kernel is not None
50
50
51 def _kill_kernel(self):
51 def _kill_kernel(self):
52 self.kernel = None
52 self.kernel = None
53
53
54 def interrupt_kernel(self):
54 def interrupt_kernel(self):
55 raise NotImplementedError("Cannot interrupt in-process kernel.")
55 raise NotImplementedError("Cannot interrupt in-process kernel.")
56
56
57 def signal_kernel(self, signum):
57 def signal_kernel(self, signum):
58 raise NotImplementedError("Cannot signal in-process kernel.")
58 raise NotImplementedError("Cannot signal in-process kernel.")
59
59
60 def is_alive(self):
60 def is_alive(self):
61 return self.kernel is not None
61 return self.kernel is not None
62
62
63 def client(self, **kwargs):
63 def client(self, **kwargs):
64 kwargs['kernel'] = self.kernel
64 kwargs['kernel'] = self.kernel
65 return super(InProcessKernelManager, self).client(**kwargs)
65 return super(InProcessKernelManager, self).client(**kwargs)
66
66
67
67
68 #-----------------------------------------------------------------------------
68 #-----------------------------------------------------------------------------
69 # ABC Registration
69 # ABC Registration
70 #-----------------------------------------------------------------------------
70 #-----------------------------------------------------------------------------
71
71
72 KernelManagerABC.register(InProcessKernelManager)
72 KernelManagerABC.register(InProcessKernelManager)
@@ -1,65 +1,65 b''
1 """ Defines a dummy socket implementing (part of) the zmq.Socket interface. """
1 """ Defines a dummy socket implementing (part of) the zmq.Socket interface. """
2
2
3 #-----------------------------------------------------------------------------
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2012 The IPython Development Team
4 # Copyright (C) 2012 The IPython Development Team
5 #
5 #
6 # Distributed under the terms of the BSD License. The full license is in
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
7 # the file COPYING, distributed as part of this software.
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9
9
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11 # Imports
11 # Imports
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 # Standard library imports.
14 # Standard library imports.
15 import abc
15 import abc
16 try:
16 try:
17 from queue import Queue # Py 3
17 from queue import Queue # Py 3
18 except ImportError:
18 except ImportError:
19 from Queue import Queue # Py 2
19 from Queue import Queue # Py 2
20
20
21 # System library imports.
21 # System library imports.
22 import zmq
22 import zmq
23
23
24 # Local imports.
24 # Local imports.
25 from IPython.utils.traitlets import HasTraits, Instance, Int
25 from IPython.utils.traitlets import HasTraits, Instance, Int
26 from IPython.utils.py3compat import with_metaclass
26 from IPython.utils.py3compat import with_metaclass
27
27
28 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
29 # Generic socket interface
29 # Generic socket interface
30 #-----------------------------------------------------------------------------
30 #-----------------------------------------------------------------------------
31
31
32 class SocketABC(with_metaclass(abc.ABCMeta, object)):
32 class SocketABC(with_metaclass(abc.ABCMeta, object)):
33 @abc.abstractmethod
33 @abc.abstractmethod
34 def recv_multipart(self, flags=0, copy=True, track=False):
34 def recv_multipart(self, flags=0, copy=True, track=False):
35 raise NotImplementedError
35 raise NotImplementedError
36
36
37 @abc.abstractmethod
37 @abc.abstractmethod
38 def send_multipart(self, msg_parts, flags=0, copy=True, track=False):
38 def send_multipart(self, msg_parts, flags=0, copy=True, track=False):
39 raise NotImplementedError
39 raise NotImplementedError
40
40
41 SocketABC.register(zmq.Socket)
41 SocketABC.register(zmq.Socket)
42
42
43 #-----------------------------------------------------------------------------
43 #-----------------------------------------------------------------------------
44 # Dummy socket class
44 # Dummy socket class
45 #-----------------------------------------------------------------------------
45 #-----------------------------------------------------------------------------
46
46
47 class DummySocket(HasTraits):
47 class DummySocket(HasTraits):
48 """ A dummy socket implementing (part of) the zmq.Socket interface. """
48 """ A dummy socket implementing (part of) the zmq.Socket interface. """
49
49
50 queue = Instance(Queue, ())
50 queue = Instance(Queue, ())
51 message_sent = Int(0) # Should be an Event
51 message_sent = Int(0) # Should be an Event
52
52
53 #-------------------------------------------------------------------------
53 #-------------------------------------------------------------------------
54 # Socket interface
54 # Socket interface
55 #-------------------------------------------------------------------------
55 #-------------------------------------------------------------------------
56
56
57 def recv_multipart(self, flags=0, copy=True, track=False):
57 def recv_multipart(self, flags=0, copy=True, track=False):
58 return self.queue.get_nowait()
58 return self.queue.get_nowait()
59
59
60 def send_multipart(self, msg_parts, flags=0, copy=True, track=False):
60 def send_multipart(self, msg_parts, flags=0, copy=True, track=False):
61 msg_parts = list(map(zmq.Message, msg_parts))
61 msg_parts = list(map(zmq.Message, msg_parts))
62 self.queue.put_nowait(msg_parts)
62 self.queue.put_nowait(msg_parts)
63 self.message_sent += 1
63 self.message_sent += 1
64
64
65 SocketABC.register(DummySocket)
65 SocketABC.register(DummySocket)
1 NO CONTENT: file renamed from IPython/kernel/inprocess/tests/__init__.py to ipython_kernel/inprocess/tests/__init__.py
NO CONTENT: file renamed from IPython/kernel/inprocess/tests/__init__.py to ipython_kernel/inprocess/tests/__init__.py
@@ -1,69 +1,68 b''
1 # Copyright (c) IPython Development Team.
1 # Copyright (c) IPython Development Team.
2 # Distributed under the terms of the Modified BSD License.
2 # Distributed under the terms of the Modified BSD License.
3
3
4 from __future__ import print_function
4 from __future__ import print_function
5
5
6 import sys
6 import sys
7 import unittest
7 import unittest
8
8
9 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
9 from ipython_kernel.inprocess.blocking import BlockingInProcessKernelClient
10 from IPython.kernel.inprocess.manager import InProcessKernelManager
10 from ipython_kernel.inprocess.manager import InProcessKernelManager
11 from IPython.kernel.inprocess.ipkernel import InProcessKernel
11 from ipython_kernel.inprocess.ipkernel import InProcessKernel
12 from IPython.kernel.tests.utils import assemble_output
12 from ipython_kernel.tests.utils import assemble_output
13 from IPython.testing.decorators import skipif_not_matplotlib
13 from IPython.testing.decorators import skipif_not_matplotlib
14 from IPython.utils.io import capture_output
14 from IPython.utils.io import capture_output
15 from IPython.utils import py3compat
15 from IPython.utils import py3compat
16
16
17 if py3compat.PY3:
17 if py3compat.PY3:
18 from io import StringIO
18 from io import StringIO
19 else:
19 else:
20 from StringIO import StringIO
20 from StringIO import StringIO
21
21
22
22
23 class InProcessKernelTestCase(unittest.TestCase):
23 class InProcessKernelTestCase(unittest.TestCase):
24
24
25 def setUp(self):
25 def setUp(self):
26 self.km = InProcessKernelManager()
26 self.km = InProcessKernelManager()
27 self.km.start_kernel()
27 self.km.start_kernel()
28 self.kc = self.km.client()
28 self.kc = self.km.client()
29 self.kc.start_channels()
29 self.kc.start_channels()
30 self.kc.wait_for_ready()
30 self.kc.wait_for_ready()
31
31
32 @skipif_not_matplotlib
32 @skipif_not_matplotlib
33 def test_pylab(self):
33 def test_pylab(self):
34 """Does %pylab work in the in-process kernel?"""
34 """Does %pylab work in the in-process kernel?"""
35 kc = self.kc
35 kc = self.kc
36 kc.execute('%pylab')
36 kc.execute('%pylab')
37 out, err = assemble_output(kc.iopub_channel)
37 out, err = assemble_output(kc.iopub_channel)
38 self.assertIn('matplotlib', out)
38 self.assertIn('matplotlib', out)
39
39
40 def test_raw_input(self):
40 def test_raw_input(self):
41 """ Does the in-process kernel handle raw_input correctly?
41 """ Does the in-process kernel handle raw_input correctly?
42 """
42 """
43 io = StringIO('foobar\n')
43 io = StringIO('foobar\n')
44 sys_stdin = sys.stdin
44 sys_stdin = sys.stdin
45 sys.stdin = io
45 sys.stdin = io
46 try:
46 try:
47 if py3compat.PY3:
47 if py3compat.PY3:
48 self.kc.execute('x = input()')
48 self.kc.execute('x = input()')
49 else:
49 else:
50 self.kc.execute('x = raw_input()')
50 self.kc.execute('x = raw_input()')
51 finally:
51 finally:
52 sys.stdin = sys_stdin
52 sys.stdin = sys_stdin
53 self.assertEqual(self.km.kernel.shell.user_ns.get('x'), 'foobar')
53 self.assertEqual(self.km.kernel.shell.user_ns.get('x'), 'foobar')
54
54
55 def test_stdout(self):
55 def test_stdout(self):
56 """ Does the in-process kernel correctly capture IO?
56 """ Does the in-process kernel correctly capture IO?
57 """
57 """
58 kernel = InProcessKernel()
58 kernel = InProcessKernel()
59
59
60 with capture_output() as io:
60 with capture_output() as io:
61 kernel.shell.run_cell('print("foo")')
61 kernel.shell.run_cell('print("foo")')
62 self.assertEqual(io.stdout, 'foo\n')
62 self.assertEqual(io.stdout, 'foo\n')
63
63
64 kc = BlockingInProcessKernelClient(kernel=kernel, session=kernel.session)
64 kc = BlockingInProcessKernelClient(kernel=kernel, session=kernel.session)
65 kernel.frontends.append(kc)
65 kernel.frontends.append(kc)
66 kc.execute('print("bar")')
66 kc.execute('print("bar")')
67 out, err = assemble_output(kc.iopub_channel)
67 out, err = assemble_output(kc.iopub_channel)
68 self.assertEqual(out, 'bar\n')
68 self.assertEqual(out, 'bar\n')
69
@@ -1,108 +1,108 b''
1 # Copyright (c) IPython Development Team.
1 # Copyright (c) IPython Development Team.
2 # Distributed under the terms of the Modified BSD License.
2 # Distributed under the terms of the Modified BSD License.
3
3
4 from __future__ import print_function
4 from __future__ import print_function
5
5
6 import unittest
6 import unittest
7
7
8 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
8 from ipython_kernel.inprocess.blocking import BlockingInProcessKernelClient
9 from IPython.kernel.inprocess.manager import InProcessKernelManager
9 from ipython_kernel.inprocess.manager import InProcessKernelManager
10
10
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12 # Test case
12 # Test case
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 class InProcessKernelManagerTestCase(unittest.TestCase):
15 class InProcessKernelManagerTestCase(unittest.TestCase):
16
16
17 def test_interface(self):
17 def test_interface(self):
18 """ Does the in-process kernel manager implement the basic KM interface?
18 """ Does the in-process kernel manager implement the basic KM interface?
19 """
19 """
20 km = InProcessKernelManager()
20 km = InProcessKernelManager()
21 self.assert_(not km.has_kernel)
21 self.assert_(not km.has_kernel)
22
22
23 km.start_kernel()
23 km.start_kernel()
24 self.assert_(km.has_kernel)
24 self.assert_(km.has_kernel)
25 self.assert_(km.kernel is not None)
25 self.assert_(km.kernel is not None)
26
26
27 kc = km.client()
27 kc = km.client()
28 self.assert_(not kc.channels_running)
28 self.assert_(not kc.channels_running)
29
29
30 kc.start_channels()
30 kc.start_channels()
31 self.assert_(kc.channels_running)
31 self.assert_(kc.channels_running)
32
32
33 old_kernel = km.kernel
33 old_kernel = km.kernel
34 km.restart_kernel()
34 km.restart_kernel()
35 self.assertIsNotNone(km.kernel)
35 self.assertIsNotNone(km.kernel)
36 self.assertNotEquals(km.kernel, old_kernel)
36 self.assertNotEquals(km.kernel, old_kernel)
37
37
38 km.shutdown_kernel()
38 km.shutdown_kernel()
39 self.assert_(not km.has_kernel)
39 self.assert_(not km.has_kernel)
40
40
41 self.assertRaises(NotImplementedError, km.interrupt_kernel)
41 self.assertRaises(NotImplementedError, km.interrupt_kernel)
42 self.assertRaises(NotImplementedError, km.signal_kernel, 9)
42 self.assertRaises(NotImplementedError, km.signal_kernel, 9)
43
43
44 kc.stop_channels()
44 kc.stop_channels()
45 self.assert_(not kc.channels_running)
45 self.assert_(not kc.channels_running)
46
46
47 def test_execute(self):
47 def test_execute(self):
48 """ Does executing code in an in-process kernel work?
48 """ Does executing code in an in-process kernel work?
49 """
49 """
50 km = InProcessKernelManager()
50 km = InProcessKernelManager()
51 km.start_kernel()
51 km.start_kernel()
52 kc = km.client()
52 kc = km.client()
53 kc.start_channels()
53 kc.start_channels()
54 kc.wait_for_ready()
54 kc.wait_for_ready()
55 kc.execute('foo = 1')
55 kc.execute('foo = 1')
56 self.assertEquals(km.kernel.shell.user_ns['foo'], 1)
56 self.assertEquals(km.kernel.shell.user_ns['foo'], 1)
57
57
58 def test_complete(self):
58 def test_complete(self):
59 """ Does requesting completion from an in-process kernel work?
59 """ Does requesting completion from an in-process kernel work?
60 """
60 """
61 km = InProcessKernelManager()
61 km = InProcessKernelManager()
62 km.start_kernel()
62 km.start_kernel()
63 kc = km.client()
63 kc = km.client()
64 kc.start_channels()
64 kc.start_channels()
65 kc.wait_for_ready()
65 kc.wait_for_ready()
66 km.kernel.shell.push({'my_bar': 0, 'my_baz': 1})
66 km.kernel.shell.push({'my_bar': 0, 'my_baz': 1})
67 kc.complete('my_ba', 5)
67 kc.complete('my_ba', 5)
68 msg = kc.get_shell_msg()
68 msg = kc.get_shell_msg()
69 self.assertEqual(msg['header']['msg_type'], 'complete_reply')
69 self.assertEqual(msg['header']['msg_type'], 'complete_reply')
70 self.assertEqual(sorted(msg['content']['matches']),
70 self.assertEqual(sorted(msg['content']['matches']),
71 ['my_bar', 'my_baz'])
71 ['my_bar', 'my_baz'])
72
72
73 def test_inspect(self):
73 def test_inspect(self):
74 """ Does requesting object information from an in-process kernel work?
74 """ Does requesting object information from an in-process kernel work?
75 """
75 """
76 km = InProcessKernelManager()
76 km = InProcessKernelManager()
77 km.start_kernel()
77 km.start_kernel()
78 kc = km.client()
78 kc = km.client()
79 kc.start_channels()
79 kc.start_channels()
80 kc.wait_for_ready()
80 kc.wait_for_ready()
81 km.kernel.shell.user_ns['foo'] = 1
81 km.kernel.shell.user_ns['foo'] = 1
82 kc.inspect('foo')
82 kc.inspect('foo')
83 msg = kc.get_shell_msg()
83 msg = kc.get_shell_msg()
84 self.assertEqual(msg['header']['msg_type'], 'inspect_reply')
84 self.assertEqual(msg['header']['msg_type'], 'inspect_reply')
85 content = msg['content']
85 content = msg['content']
86 assert content['found']
86 assert content['found']
87 text = content['data']['text/plain']
87 text = content['data']['text/plain']
88 self.assertIn('int', text)
88 self.assertIn('int', text)
89
89
90 def test_history(self):
90 def test_history(self):
91 """ Does requesting history from an in-process kernel work?
91 """ Does requesting history from an in-process kernel work?
92 """
92 """
93 km = InProcessKernelManager()
93 km = InProcessKernelManager()
94 km.start_kernel()
94 km.start_kernel()
95 kc = km.client()
95 kc = km.client()
96 kc.start_channels()
96 kc.start_channels()
97 kc.wait_for_ready()
97 kc.wait_for_ready()
98 kc.execute('%who')
98 kc.execute('%who')
99 kc.history(hist_access_type='tail', n=1)
99 kc.history(hist_access_type='tail', n=1)
100 msg = kc.shell_channel.get_msgs()[-1]
100 msg = kc.shell_channel.get_msgs()[-1]
101 self.assertEquals(msg['header']['msg_type'], 'history_reply')
101 self.assertEquals(msg['header']['msg_type'], 'history_reply')
102 history = msg['content']['history']
102 history = msg['content']['history']
103 self.assertEquals(len(history), 1)
103 self.assertEquals(len(history), 1)
104 self.assertEquals(history[0][2], '%who')
104 self.assertEquals(history[0][2], '%who')
105
105
106
106
107 if __name__ == '__main__':
107 if __name__ == '__main__':
108 unittest.main()
108 unittest.main()
1 NO CONTENT: file renamed from IPython/kernel/resources/logo-32x32.png to ipython_kernel/resources/logo-32x32.png
NO CONTENT: file renamed from IPython/kernel/resources/logo-32x32.png to ipython_kernel/resources/logo-32x32.png
1 NO CONTENT: file renamed from IPython/kernel/resources/logo-64x64.png to ipython_kernel/resources/logo-64x64.png
NO CONTENT: file renamed from IPython/kernel/resources/logo-64x64.png to ipython_kernel/resources/logo-64x64.png
1 NO CONTENT: file renamed from IPython/kernel/tests/__init__.py to ipython_kernel/tests/__init__.py
NO CONTENT: file renamed from IPython/kernel/tests/__init__.py to ipython_kernel/tests/__init__.py
@@ -1,228 +1,228 b''
1 # coding: utf-8
1 # coding: utf-8
2 """test the IPython Kernel"""
2 """test the IPython Kernel"""
3
3
4 # Copyright (c) IPython Development Team.
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
5 # Distributed under the terms of the Modified BSD License.
6
6
7 import io
7 import io
8 import os.path
8 import os.path
9 import sys
9 import sys
10
10
11 import nose.tools as nt
11 import nose.tools as nt
12
12
13 from IPython.testing import decorators as dec, tools as tt
13 from IPython.testing import decorators as dec, tools as tt
14 from IPython.utils import py3compat
14 from IPython.utils import py3compat
15 from IPython.utils.path import locate_profile
15 from IPython.utils.path import locate_profile
16 from IPython.utils.tempdir import TemporaryDirectory
16 from IPython.utils.tempdir import TemporaryDirectory
17
17
18 from .utils import (new_kernel, kernel, TIMEOUT, assemble_output, execute,
18 from .utils import (new_kernel, kernel, TIMEOUT, assemble_output, execute,
19 flush_channels, wait_for_idle)
19 flush_channels, wait_for_idle)
20
20
21
21
22 def _check_mp_mode(kc, expected=False, stream="stdout"):
22 def _check_mp_mode(kc, expected=False, stream="stdout"):
23 execute(kc=kc, code="import sys")
23 execute(kc=kc, code="import sys")
24 flush_channels(kc)
24 flush_channels(kc)
25 msg_id, content = execute(kc=kc, code="print (sys.%s._check_mp_mode())" % stream)
25 msg_id, content = execute(kc=kc, code="print (sys.%s._check_mp_mode())" % stream)
26 stdout, stderr = assemble_output(kc.iopub_channel)
26 stdout, stderr = assemble_output(kc.iopub_channel)
27 nt.assert_equal(eval(stdout.strip()), expected)
27 nt.assert_equal(eval(stdout.strip()), expected)
28
28
29
29
30 # printing tests
30 # printing tests
31
31
32 def test_simple_print():
32 def test_simple_print():
33 """simple print statement in kernel"""
33 """simple print statement in kernel"""
34 with kernel() as kc:
34 with kernel() as kc:
35 iopub = kc.iopub_channel
35 iopub = kc.iopub_channel
36 msg_id, content = execute(kc=kc, code="print ('hi')")
36 msg_id, content = execute(kc=kc, code="print ('hi')")
37 stdout, stderr = assemble_output(iopub)
37 stdout, stderr = assemble_output(iopub)
38 nt.assert_equal(stdout, 'hi\n')
38 nt.assert_equal(stdout, 'hi\n')
39 nt.assert_equal(stderr, '')
39 nt.assert_equal(stderr, '')
40 _check_mp_mode(kc, expected=False)
40 _check_mp_mode(kc, expected=False)
41
41
42
42
43 def test_sys_path():
43 def test_sys_path():
44 """test that sys.path doesn't get messed up by default"""
44 """test that sys.path doesn't get messed up by default"""
45 with kernel() as kc:
45 with kernel() as kc:
46 msg_id, content = execute(kc=kc, code="import sys; print (repr(sys.path[0]))")
46 msg_id, content = execute(kc=kc, code="import sys; print (repr(sys.path[0]))")
47 stdout, stderr = assemble_output(kc.iopub_channel)
47 stdout, stderr = assemble_output(kc.iopub_channel)
48 nt.assert_equal(stdout, "''\n")
48 nt.assert_equal(stdout, "''\n")
49
49
50 def test_sys_path_profile_dir():
50 def test_sys_path_profile_dir():
51 """test that sys.path doesn't get messed up when `--profile-dir` is specified"""
51 """test that sys.path doesn't get messed up when `--profile-dir` is specified"""
52
52
53 with new_kernel(['--profile-dir', locate_profile('default')]) as kc:
53 with new_kernel(['--profile-dir', locate_profile('default')]) as kc:
54 msg_id, content = execute(kc=kc, code="import sys; print (repr(sys.path[0]))")
54 msg_id, content = execute(kc=kc, code="import sys; print (repr(sys.path[0]))")
55 stdout, stderr = assemble_output(kc.iopub_channel)
55 stdout, stderr = assemble_output(kc.iopub_channel)
56 nt.assert_equal(stdout, "''\n")
56 nt.assert_equal(stdout, "''\n")
57
57
58 @dec.knownfailureif(sys.platform == 'win32', "subprocess prints fail on Windows")
58 @dec.knownfailureif(sys.platform == 'win32', "subprocess prints fail on Windows")
59 def test_subprocess_print():
59 def test_subprocess_print():
60 """printing from forked mp.Process"""
60 """printing from forked mp.Process"""
61 with new_kernel() as kc:
61 with new_kernel() as kc:
62 iopub = kc.iopub_channel
62 iopub = kc.iopub_channel
63
63
64 _check_mp_mode(kc, expected=False)
64 _check_mp_mode(kc, expected=False)
65 flush_channels(kc)
65 flush_channels(kc)
66 np = 5
66 np = 5
67 code = '\n'.join([
67 code = '\n'.join([
68 "from __future__ import print_function",
68 "from __future__ import print_function",
69 "import multiprocessing as mp",
69 "import multiprocessing as mp",
70 "pool = [mp.Process(target=print, args=('hello', i,)) for i in range(%i)]" % np,
70 "pool = [mp.Process(target=print, args=('hello', i,)) for i in range(%i)]" % np,
71 "for p in pool: p.start()",
71 "for p in pool: p.start()",
72 "for p in pool: p.join()"
72 "for p in pool: p.join()"
73 ])
73 ])
74
74
75 expected = '\n'.join([
75 expected = '\n'.join([
76 "hello %s" % i for i in range(np)
76 "hello %s" % i for i in range(np)
77 ]) + '\n'
77 ]) + '\n'
78
78
79 msg_id, content = execute(kc=kc, code=code)
79 msg_id, content = execute(kc=kc, code=code)
80 stdout, stderr = assemble_output(iopub)
80 stdout, stderr = assemble_output(iopub)
81 nt.assert_equal(stdout.count("hello"), np, stdout)
81 nt.assert_equal(stdout.count("hello"), np, stdout)
82 for n in range(np):
82 for n in range(np):
83 nt.assert_equal(stdout.count(str(n)), 1, stdout)
83 nt.assert_equal(stdout.count(str(n)), 1, stdout)
84 nt.assert_equal(stderr, '')
84 nt.assert_equal(stderr, '')
85 _check_mp_mode(kc, expected=False)
85 _check_mp_mode(kc, expected=False)
86 _check_mp_mode(kc, expected=False, stream="stderr")
86 _check_mp_mode(kc, expected=False, stream="stderr")
87
87
88
88
89 def test_subprocess_noprint():
89 def test_subprocess_noprint():
90 """mp.Process without print doesn't trigger iostream mp_mode"""
90 """mp.Process without print doesn't trigger iostream mp_mode"""
91 with kernel() as kc:
91 with kernel() as kc:
92 iopub = kc.iopub_channel
92 iopub = kc.iopub_channel
93
93
94 np = 5
94 np = 5
95 code = '\n'.join([
95 code = '\n'.join([
96 "import multiprocessing as mp",
96 "import multiprocessing as mp",
97 "pool = [mp.Process(target=range, args=(i,)) for i in range(%i)]" % np,
97 "pool = [mp.Process(target=range, args=(i,)) for i in range(%i)]" % np,
98 "for p in pool: p.start()",
98 "for p in pool: p.start()",
99 "for p in pool: p.join()"
99 "for p in pool: p.join()"
100 ])
100 ])
101
101
102 msg_id, content = execute(kc=kc, code=code)
102 msg_id, content = execute(kc=kc, code=code)
103 stdout, stderr = assemble_output(iopub)
103 stdout, stderr = assemble_output(iopub)
104 nt.assert_equal(stdout, '')
104 nt.assert_equal(stdout, '')
105 nt.assert_equal(stderr, '')
105 nt.assert_equal(stderr, '')
106
106
107 _check_mp_mode(kc, expected=False)
107 _check_mp_mode(kc, expected=False)
108 _check_mp_mode(kc, expected=False, stream="stderr")
108 _check_mp_mode(kc, expected=False, stream="stderr")
109
109
110
110
111 @dec.knownfailureif(sys.platform == 'win32', "subprocess prints fail on Windows")
111 @dec.knownfailureif(sys.platform == 'win32', "subprocess prints fail on Windows")
112 def test_subprocess_error():
112 def test_subprocess_error():
113 """error in mp.Process doesn't crash"""
113 """error in mp.Process doesn't crash"""
114 with new_kernel() as kc:
114 with new_kernel() as kc:
115 iopub = kc.iopub_channel
115 iopub = kc.iopub_channel
116
116
117 code = '\n'.join([
117 code = '\n'.join([
118 "import multiprocessing as mp",
118 "import multiprocessing as mp",
119 "p = mp.Process(target=int, args=('hi',))",
119 "p = mp.Process(target=int, args=('hi',))",
120 "p.start()",
120 "p.start()",
121 "p.join()",
121 "p.join()",
122 ])
122 ])
123
123
124 msg_id, content = execute(kc=kc, code=code)
124 msg_id, content = execute(kc=kc, code=code)
125 stdout, stderr = assemble_output(iopub)
125 stdout, stderr = assemble_output(iopub)
126 nt.assert_equal(stdout, '')
126 nt.assert_equal(stdout, '')
127 nt.assert_true("ValueError" in stderr, stderr)
127 nt.assert_true("ValueError" in stderr, stderr)
128
128
129 _check_mp_mode(kc, expected=False)
129 _check_mp_mode(kc, expected=False)
130 _check_mp_mode(kc, expected=False, stream="stderr")
130 _check_mp_mode(kc, expected=False, stream="stderr")
131
131
132 # raw_input tests
132 # raw_input tests
133
133
134 def test_raw_input():
134 def test_raw_input():
135 """test [raw_]input"""
135 """test [raw_]input"""
136 with kernel() as kc:
136 with kernel() as kc:
137 iopub = kc.iopub_channel
137 iopub = kc.iopub_channel
138
138
139 input_f = "input" if py3compat.PY3 else "raw_input"
139 input_f = "input" if py3compat.PY3 else "raw_input"
140 theprompt = "prompt> "
140 theprompt = "prompt> "
141 code = 'print({input_f}("{theprompt}"))'.format(**locals())
141 code = 'print({input_f}("{theprompt}"))'.format(**locals())
142 msg_id = kc.execute(code, allow_stdin=True)
142 msg_id = kc.execute(code, allow_stdin=True)
143 msg = kc.get_stdin_msg(block=True, timeout=TIMEOUT)
143 msg = kc.get_stdin_msg(block=True, timeout=TIMEOUT)
144 nt.assert_equal(msg['header']['msg_type'], u'input_request')
144 nt.assert_equal(msg['header']['msg_type'], u'input_request')
145 content = msg['content']
145 content = msg['content']
146 nt.assert_equal(content['prompt'], theprompt)
146 nt.assert_equal(content['prompt'], theprompt)
147 text = "some text"
147 text = "some text"
148 kc.input(text)
148 kc.input(text)
149 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
149 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
150 nt.assert_equal(reply['content']['status'], 'ok')
150 nt.assert_equal(reply['content']['status'], 'ok')
151 stdout, stderr = assemble_output(iopub)
151 stdout, stderr = assemble_output(iopub)
152 nt.assert_equal(stdout, text + "\n")
152 nt.assert_equal(stdout, text + "\n")
153
153
154
154
155 @dec.skipif(py3compat.PY3)
155 @dec.skipif(py3compat.PY3)
156 def test_eval_input():
156 def test_eval_input():
157 """test input() on Python 2"""
157 """test input() on Python 2"""
158 with kernel() as kc:
158 with kernel() as kc:
159 iopub = kc.iopub_channel
159 iopub = kc.iopub_channel
160
160
161 input_f = "input" if py3compat.PY3 else "raw_input"
161 input_f = "input" if py3compat.PY3 else "raw_input"
162 theprompt = "prompt> "
162 theprompt = "prompt> "
163 code = 'print(input("{theprompt}"))'.format(**locals())
163 code = 'print(input("{theprompt}"))'.format(**locals())
164 msg_id = kc.execute(code, allow_stdin=True)
164 msg_id = kc.execute(code, allow_stdin=True)
165 msg = kc.get_stdin_msg(block=True, timeout=TIMEOUT)
165 msg = kc.get_stdin_msg(block=True, timeout=TIMEOUT)
166 nt.assert_equal(msg['header']['msg_type'], u'input_request')
166 nt.assert_equal(msg['header']['msg_type'], u'input_request')
167 content = msg['content']
167 content = msg['content']
168 nt.assert_equal(content['prompt'], theprompt)
168 nt.assert_equal(content['prompt'], theprompt)
169 kc.input("1+1")
169 kc.input("1+1")
170 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
170 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
171 nt.assert_equal(reply['content']['status'], 'ok')
171 nt.assert_equal(reply['content']['status'], 'ok')
172 stdout, stderr = assemble_output(iopub)
172 stdout, stderr = assemble_output(iopub)
173 nt.assert_equal(stdout, "2\n")
173 nt.assert_equal(stdout, "2\n")
174
174
175
175
176 def test_save_history():
176 def test_save_history():
177 # Saving history from the kernel with %hist -f was failing because of
177 # Saving history from the kernel with %hist -f was failing because of
178 # unicode problems on Python 2.
178 # unicode problems on Python 2.
179 with kernel() as kc, TemporaryDirectory() as td:
179 with kernel() as kc, TemporaryDirectory() as td:
180 file = os.path.join(td, 'hist.out')
180 file = os.path.join(td, 'hist.out')
181 execute(u'a=1', kc=kc)
181 execute(u'a=1', kc=kc)
182 wait_for_idle(kc)
182 wait_for_idle(kc)
183 execute(u'b=u"abcþ"', kc=kc)
183 execute(u'b=u"abcþ"', kc=kc)
184 wait_for_idle(kc)
184 wait_for_idle(kc)
185 _, reply = execute("%hist -f " + file, kc=kc)
185 _, reply = execute("%hist -f " + file, kc=kc)
186 nt.assert_equal(reply['status'], 'ok')
186 nt.assert_equal(reply['status'], 'ok')
187 with io.open(file, encoding='utf-8') as f:
187 with io.open(file, encoding='utf-8') as f:
188 content = f.read()
188 content = f.read()
189 nt.assert_in(u'a=1', content)
189 nt.assert_in(u'a=1', content)
190 nt.assert_in(u'b=u"abcþ"', content)
190 nt.assert_in(u'b=u"abcþ"', content)
191
191
192 def test_help_output():
192 def test_help_output():
193 """ipython kernel --help-all works"""
193 """ipython kernel --help-all works"""
194 tt.help_all_output_test('kernel')
194 tt.help_all_output_test('kernel')
195
195
196 def test_is_complete():
196 def test_is_complete():
197 with kernel() as kc:
197 with kernel() as kc:
198 # There are more test cases for this in core - here we just check
198 # There are more test cases for this in core - here we just check
199 # that the kernel exposes the interface correctly.
199 # that the kernel exposes the interface correctly.
200 kc.is_complete('2+2')
200 kc.is_complete('2+2')
201 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
201 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
202 assert reply['content']['status'] == 'complete'
202 assert reply['content']['status'] == 'complete'
203
203
204 # SyntaxError should mean it's complete
204 # SyntaxError should mean it's complete
205 kc.is_complete('raise = 2')
205 kc.is_complete('raise = 2')
206 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
206 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
207 assert reply['content']['status'] == 'invalid'
207 assert reply['content']['status'] == 'invalid'
208
208
209 kc.is_complete('a = [1,\n2,')
209 kc.is_complete('a = [1,\n2,')
210 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
210 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
211 assert reply['content']['status'] == 'incomplete'
211 assert reply['content']['status'] == 'incomplete'
212 assert reply['content']['indent'] == ''
212 assert reply['content']['indent'] == ''
213
213
214 def test_complete():
214 def test_complete():
215 with kernel() as kc:
215 with kernel() as kc:
216 execute(u'a = 1', kc=kc)
216 execute(u'a = 1', kc=kc)
217 wait_for_idle(kc)
217 wait_for_idle(kc)
218 cell = 'import IPython\nb = a.'
218 cell = 'import IPython\nb = a.'
219 kc.complete(cell)
219 kc.complete(cell)
220 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
220 reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
221 c = reply['content']
221 c = reply['content']
222 nt.assert_equal(c['status'], 'ok')
222 nt.assert_equal(c['status'], 'ok')
223 nt.assert_equal(c['cursor_start'], cell.find('a.'))
223 nt.assert_equal(c['cursor_start'], cell.find('a.'))
224 nt.assert_equal(c['cursor_end'], cell.find('a.') + 2)
224 nt.assert_equal(c['cursor_end'], cell.find('a.') + 2)
225 matches = c['matches']
225 matches = c['matches']
226 nt.assert_greater(len(matches), 0)
226 nt.assert_greater(len(matches), 0)
227 for match in matches:
227 for match in matches:
228 nt.assert_equal(match[:2], 'a.')
228 nt.assert_equal(match[:2], 'a.')
@@ -1,496 +1,495 b''
1 """Test suite for our zeromq-based message specification."""
1 """Test suite for our zeromq-based message specification."""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import re
6 import re
7 import sys
7 import sys
8 from distutils.version import LooseVersion as V
8 from distutils.version import LooseVersion as V
9 try:
9 try:
10 from queue import Empty # Py 3
10 from queue import Empty # Py 3
11 except ImportError:
11 except ImportError:
12 from Queue import Empty # Py 2
12 from Queue import Empty # Py 2
13
13
14 import nose.tools as nt
14 import nose.tools as nt
15
15
16 from IPython.utils.traitlets import (
16 from IPython.utils.traitlets import (
17 HasTraits, TraitError, Bool, Unicode, Dict, Integer, List, Enum,
17 HasTraits, TraitError, Bool, Unicode, Dict, Integer, List, Enum,
18 )
18 )
19 from IPython.utils.py3compat import string_types, iteritems
19 from IPython.utils.py3compat import string_types, iteritems
20
20
21 from .utils import TIMEOUT, start_global_kernel, flush_channels, execute
21 from .utils import TIMEOUT, start_global_kernel, flush_channels, execute
22
22
23 #-----------------------------------------------------------------------------
23 #-----------------------------------------------------------------------------
24 # Globals
24 # Globals
25 #-----------------------------------------------------------------------------
25 #-----------------------------------------------------------------------------
26 KC = None
26 KC = None
27
27
28 def setup():
28 def setup():
29 global KC
29 global KC
30 KC = start_global_kernel()
30 KC = start_global_kernel()
31
31
32 #-----------------------------------------------------------------------------
32 #-----------------------------------------------------------------------------
33 # Message Spec References
33 # Message Spec References
34 #-----------------------------------------------------------------------------
34 #-----------------------------------------------------------------------------
35
35
36 class Reference(HasTraits):
36 class Reference(HasTraits):
37
37
38 """
38 """
39 Base class for message spec specification testing.
39 Base class for message spec specification testing.
40
40
41 This class is the core of the message specification test. The
41 This class is the core of the message specification test. The
42 idea is that child classes implement trait attributes for each
42 idea is that child classes implement trait attributes for each
43 message keys, so that message keys can be tested against these
43 message keys, so that message keys can be tested against these
44 traits using :meth:`check` method.
44 traits using :meth:`check` method.
45
45
46 """
46 """
47
47
48 def check(self, d):
48 def check(self, d):
49 """validate a dict against our traits"""
49 """validate a dict against our traits"""
50 for key in self.trait_names():
50 for key in self.trait_names():
51 nt.assert_in(key, d)
51 nt.assert_in(key, d)
52 # FIXME: always allow None, probably not a good idea
52 # FIXME: always allow None, probably not a good idea
53 if d[key] is None:
53 if d[key] is None:
54 continue
54 continue
55 try:
55 try:
56 setattr(self, key, d[key])
56 setattr(self, key, d[key])
57 except TraitError as e:
57 except TraitError as e:
58 assert False, str(e)
58 assert False, str(e)
59
59
60
60
61 class Version(Unicode):
61 class Version(Unicode):
62 def __init__(self, *args, **kwargs):
62 def __init__(self, *args, **kwargs):
63 self.min = kwargs.pop('min', None)
63 self.min = kwargs.pop('min', None)
64 self.max = kwargs.pop('max', None)
64 self.max = kwargs.pop('max', None)
65 kwargs['default_value'] = self.min
65 kwargs['default_value'] = self.min
66 super(Version, self).__init__(*args, **kwargs)
66 super(Version, self).__init__(*args, **kwargs)
67
67
68 def validate(self, obj, value):
68 def validate(self, obj, value):
69 if self.min and V(value) < V(self.min):
69 if self.min and V(value) < V(self.min):
70 raise TraitError("bad version: %s < %s" % (value, self.min))
70 raise TraitError("bad version: %s < %s" % (value, self.min))
71 if self.max and (V(value) > V(self.max)):
71 if self.max and (V(value) > V(self.max)):
72 raise TraitError("bad version: %s > %s" % (value, self.max))
72 raise TraitError("bad version: %s > %s" % (value, self.max))
73
73
74
74
75 class RMessage(Reference):
75 class RMessage(Reference):
76 msg_id = Unicode()
76 msg_id = Unicode()
77 msg_type = Unicode()
77 msg_type = Unicode()
78 header = Dict()
78 header = Dict()
79 parent_header = Dict()
79 parent_header = Dict()
80 content = Dict()
80 content = Dict()
81
81
82 def check(self, d):
82 def check(self, d):
83 super(RMessage, self).check(d)
83 super(RMessage, self).check(d)
84 RHeader().check(self.header)
84 RHeader().check(self.header)
85 if self.parent_header:
85 if self.parent_header:
86 RHeader().check(self.parent_header)
86 RHeader().check(self.parent_header)
87
87
88 class RHeader(Reference):
88 class RHeader(Reference):
89 msg_id = Unicode()
89 msg_id = Unicode()
90 msg_type = Unicode()
90 msg_type = Unicode()
91 session = Unicode()
91 session = Unicode()
92 username = Unicode()
92 username = Unicode()
93 version = Version(min='5.0')
93 version = Version(min='5.0')
94
94
95 mime_pat = re.compile(r'^[\w\-\+\.]+/[\w\-\+\.]+$')
95 mime_pat = re.compile(r'^[\w\-\+\.]+/[\w\-\+\.]+$')
96
96
97 class MimeBundle(Reference):
97 class MimeBundle(Reference):
98 metadata = Dict()
98 metadata = Dict()
99 data = Dict()
99 data = Dict()
100 def _data_changed(self, name, old, new):
100 def _data_changed(self, name, old, new):
101 for k,v in iteritems(new):
101 for k,v in iteritems(new):
102 assert mime_pat.match(k)
102 assert mime_pat.match(k)
103 nt.assert_is_instance(v, string_types)
103 nt.assert_is_instance(v, string_types)
104
104
105 # shell replies
105 # shell replies
106
106
107 class ExecuteReply(Reference):
107 class ExecuteReply(Reference):
108 execution_count = Integer()
108 execution_count = Integer()
109 status = Enum((u'ok', u'error'), default_value=u'ok')
109 status = Enum((u'ok', u'error'), default_value=u'ok')
110
110
111 def check(self, d):
111 def check(self, d):
112 Reference.check(self, d)
112 Reference.check(self, d)
113 if d['status'] == 'ok':
113 if d['status'] == 'ok':
114 ExecuteReplyOkay().check(d)
114 ExecuteReplyOkay().check(d)
115 elif d['status'] == 'error':
115 elif d['status'] == 'error':
116 ExecuteReplyError().check(d)
116 ExecuteReplyError().check(d)
117
117
118
118
119 class ExecuteReplyOkay(Reference):
119 class ExecuteReplyOkay(Reference):
120 payload = List(Dict)
120 payload = List(Dict)
121 user_expressions = Dict()
121 user_expressions = Dict()
122
122
123
123
124 class ExecuteReplyError(Reference):
124 class ExecuteReplyError(Reference):
125 ename = Unicode()
125 ename = Unicode()
126 evalue = Unicode()
126 evalue = Unicode()
127 traceback = List(Unicode)
127 traceback = List(Unicode)
128
128
129
129
130 class InspectReply(MimeBundle):
130 class InspectReply(MimeBundle):
131 found = Bool()
131 found = Bool()
132
132
133
133
134 class ArgSpec(Reference):
134 class ArgSpec(Reference):
135 args = List(Unicode)
135 args = List(Unicode)
136 varargs = Unicode()
136 varargs = Unicode()
137 varkw = Unicode()
137 varkw = Unicode()
138 defaults = List()
138 defaults = List()
139
139
140
140
141 class Status(Reference):
141 class Status(Reference):
142 execution_state = Enum((u'busy', u'idle', u'starting'), default_value=u'busy')
142 execution_state = Enum((u'busy', u'idle', u'starting'), default_value=u'busy')
143
143
144
144
145 class CompleteReply(Reference):
145 class CompleteReply(Reference):
146 matches = List(Unicode)
146 matches = List(Unicode)
147 cursor_start = Integer()
147 cursor_start = Integer()
148 cursor_end = Integer()
148 cursor_end = Integer()
149 status = Unicode()
149 status = Unicode()
150
150
151 class LanguageInfo(Reference):
151 class LanguageInfo(Reference):
152 name = Unicode('python')
152 name = Unicode('python')
153 version = Unicode(sys.version.split()[0])
153 version = Unicode(sys.version.split()[0])
154
154
155 class KernelInfoReply(Reference):
155 class KernelInfoReply(Reference):
156 protocol_version = Version(min='5.0')
156 protocol_version = Version(min='5.0')
157 implementation = Unicode('ipython')
157 implementation = Unicode('ipython')
158 implementation_version = Version(min='2.1')
158 implementation_version = Version(min='2.1')
159 language_info = Dict()
159 language_info = Dict()
160 banner = Unicode()
160 banner = Unicode()
161
161
162 def check(self, d):
162 def check(self, d):
163 Reference.check(self, d)
163 Reference.check(self, d)
164 LanguageInfo().check(d['language_info'])
164 LanguageInfo().check(d['language_info'])
165
165
166
166
167 class IsCompleteReply(Reference):
167 class IsCompleteReply(Reference):
168 status = Enum((u'complete', u'incomplete', u'invalid', u'unknown'), default_value=u'complete')
168 status = Enum((u'complete', u'incomplete', u'invalid', u'unknown'), default_value=u'complete')
169
169
170 def check(self, d):
170 def check(self, d):
171 Reference.check(self, d)
171 Reference.check(self, d)
172 if d['status'] == 'incomplete':
172 if d['status'] == 'incomplete':
173 IsCompleteReplyIncomplete().check(d)
173 IsCompleteReplyIncomplete().check(d)
174
174
175 class IsCompleteReplyIncomplete(Reference):
175 class IsCompleteReplyIncomplete(Reference):
176 indent = Unicode()
176 indent = Unicode()
177
177
178
178
179 # IOPub messages
179 # IOPub messages
180
180
181 class ExecuteInput(Reference):
181 class ExecuteInput(Reference):
182 code = Unicode()
182 code = Unicode()
183 execution_count = Integer()
183 execution_count = Integer()
184
184
185
185
186 Error = ExecuteReplyError
186 Error = ExecuteReplyError
187
187
188
188
189 class Stream(Reference):
189 class Stream(Reference):
190 name = Enum((u'stdout', u'stderr'), default_value=u'stdout')
190 name = Enum((u'stdout', u'stderr'), default_value=u'stdout')
191 text = Unicode()
191 text = Unicode()
192
192
193
193
194 class DisplayData(MimeBundle):
194 class DisplayData(MimeBundle):
195 pass
195 pass
196
196
197
197
198 class ExecuteResult(MimeBundle):
198 class ExecuteResult(MimeBundle):
199 execution_count = Integer()
199 execution_count = Integer()
200
200
201 class HistoryReply(Reference):
201 class HistoryReply(Reference):
202 history = List(List())
202 history = List(List())
203
203
204
204
205 references = {
205 references = {
206 'execute_reply' : ExecuteReply(),
206 'execute_reply' : ExecuteReply(),
207 'inspect_reply' : InspectReply(),
207 'inspect_reply' : InspectReply(),
208 'status' : Status(),
208 'status' : Status(),
209 'complete_reply' : CompleteReply(),
209 'complete_reply' : CompleteReply(),
210 'kernel_info_reply': KernelInfoReply(),
210 'kernel_info_reply': KernelInfoReply(),
211 'is_complete_reply': IsCompleteReply(),
211 'is_complete_reply': IsCompleteReply(),
212 'execute_input' : ExecuteInput(),
212 'execute_input' : ExecuteInput(),
213 'execute_result' : ExecuteResult(),
213 'execute_result' : ExecuteResult(),
214 'history_reply' : HistoryReply(),
214 'history_reply' : HistoryReply(),
215 'error' : Error(),
215 'error' : Error(),
216 'stream' : Stream(),
216 'stream' : Stream(),
217 'display_data' : DisplayData(),
217 'display_data' : DisplayData(),
218 'header' : RHeader(),
218 'header' : RHeader(),
219 }
219 }
220 """
220 """
221 Specifications of `content` part of the reply messages.
221 Specifications of `content` part of the reply messages.
222 """
222 """
223
223
224
224
225 def validate_message(msg, msg_type=None, parent=None):
225 def validate_message(msg, msg_type=None, parent=None):
226 """validate a message
226 """validate a message
227
227
228 This is a generator, and must be iterated through to actually
228 This is a generator, and must be iterated through to actually
229 trigger each test.
229 trigger each test.
230
230
231 If msg_type and/or parent are given, the msg_type and/or parent msg_id
231 If msg_type and/or parent are given, the msg_type and/or parent msg_id
232 are compared with the given values.
232 are compared with the given values.
233 """
233 """
234 RMessage().check(msg)
234 RMessage().check(msg)
235 if msg_type:
235 if msg_type:
236 nt.assert_equal(msg['msg_type'], msg_type)
236 nt.assert_equal(msg['msg_type'], msg_type)
237 if parent:
237 if parent:
238 nt.assert_equal(msg['parent_header']['msg_id'], parent)
238 nt.assert_equal(msg['parent_header']['msg_id'], parent)
239 content = msg['content']
239 content = msg['content']
240 ref = references[msg['msg_type']]
240 ref = references[msg['msg_type']]
241 ref.check(content)
241 ref.check(content)
242
242
243
243
244 #-----------------------------------------------------------------------------
244 #-----------------------------------------------------------------------------
245 # Tests
245 # Tests
246 #-----------------------------------------------------------------------------
246 #-----------------------------------------------------------------------------
247
247
248 # Shell channel
248 # Shell channel
249
249
250 def test_execute():
250 def test_execute():
251 flush_channels()
251 flush_channels()
252
252
253 msg_id = KC.execute(code='x=1')
253 msg_id = KC.execute(code='x=1')
254 reply = KC.get_shell_msg(timeout=TIMEOUT)
254 reply = KC.get_shell_msg(timeout=TIMEOUT)
255 validate_message(reply, 'execute_reply', msg_id)
255 validate_message(reply, 'execute_reply', msg_id)
256
256
257
257
258 def test_execute_silent():
258 def test_execute_silent():
259 flush_channels()
259 flush_channels()
260 msg_id, reply = execute(code='x=1', silent=True)
260 msg_id, reply = execute(code='x=1', silent=True)
261
261
262 # flush status=idle
262 # flush status=idle
263 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
263 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
264 validate_message(status, 'status', msg_id)
264 validate_message(status, 'status', msg_id)
265 nt.assert_equal(status['content']['execution_state'], 'idle')
265 nt.assert_equal(status['content']['execution_state'], 'idle')
266
266
267 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
267 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
268 count = reply['execution_count']
268 count = reply['execution_count']
269
269
270 msg_id, reply = execute(code='x=2', silent=True)
270 msg_id, reply = execute(code='x=2', silent=True)
271
271
272 # flush status=idle
272 # flush status=idle
273 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
273 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
274 validate_message(status, 'status', msg_id)
274 validate_message(status, 'status', msg_id)
275 nt.assert_equal(status['content']['execution_state'], 'idle')
275 nt.assert_equal(status['content']['execution_state'], 'idle')
276
276
277 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
277 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
278 count_2 = reply['execution_count']
278 count_2 = reply['execution_count']
279 nt.assert_equal(count_2, count)
279 nt.assert_equal(count_2, count)
280
280
281
281
282 def test_execute_error():
282 def test_execute_error():
283 flush_channels()
283 flush_channels()
284
284
285 msg_id, reply = execute(code='1/0')
285 msg_id, reply = execute(code='1/0')
286 nt.assert_equal(reply['status'], 'error')
286 nt.assert_equal(reply['status'], 'error')
287 nt.assert_equal(reply['ename'], 'ZeroDivisionError')
287 nt.assert_equal(reply['ename'], 'ZeroDivisionError')
288
288
289 error = KC.iopub_channel.get_msg(timeout=TIMEOUT)
289 error = KC.iopub_channel.get_msg(timeout=TIMEOUT)
290 validate_message(error, 'error', msg_id)
290 validate_message(error, 'error', msg_id)
291
291
292
292
293 def test_execute_inc():
293 def test_execute_inc():
294 """execute request should increment execution_count"""
294 """execute request should increment execution_count"""
295 flush_channels()
295 flush_channels()
296
296
297 msg_id, reply = execute(code='x=1')
297 msg_id, reply = execute(code='x=1')
298 count = reply['execution_count']
298 count = reply['execution_count']
299
299
300 flush_channels()
300 flush_channels()
301
301
302 msg_id, reply = execute(code='x=2')
302 msg_id, reply = execute(code='x=2')
303 count_2 = reply['execution_count']
303 count_2 = reply['execution_count']
304 nt.assert_equal(count_2, count+1)
304 nt.assert_equal(count_2, count+1)
305
305
306 def test_execute_stop_on_error():
306 def test_execute_stop_on_error():
307 """execute request should not abort execution queue with stop_on_error False"""
307 """execute request should not abort execution queue with stop_on_error False"""
308 flush_channels()
308 flush_channels()
309
309
310 fail = '\n'.join([
310 fail = '\n'.join([
311 # sleep to ensure subsequent message is waiting in the queue to be aborted
311 # sleep to ensure subsequent message is waiting in the queue to be aborted
312 'import time',
312 'import time',
313 'time.sleep(0.5)',
313 'time.sleep(0.5)',
314 'raise ValueError',
314 'raise ValueError',
315 ])
315 ])
316 KC.execute(code=fail)
316 KC.execute(code=fail)
317 msg_id = KC.execute(code='print("Hello")')
317 msg_id = KC.execute(code='print("Hello")')
318 KC.get_shell_msg(timeout=TIMEOUT)
318 KC.get_shell_msg(timeout=TIMEOUT)
319 reply = KC.get_shell_msg(timeout=TIMEOUT)
319 reply = KC.get_shell_msg(timeout=TIMEOUT)
320 nt.assert_equal(reply['content']['status'], 'aborted')
320 nt.assert_equal(reply['content']['status'], 'aborted')
321
321
322 flush_channels()
322 flush_channels()
323
323
324 KC.execute(code=fail, stop_on_error=False)
324 KC.execute(code=fail, stop_on_error=False)
325 msg_id = KC.execute(code='print("Hello")')
325 msg_id = KC.execute(code='print("Hello")')
326 KC.get_shell_msg(timeout=TIMEOUT)
326 KC.get_shell_msg(timeout=TIMEOUT)
327 reply = KC.get_shell_msg(timeout=TIMEOUT)
327 reply = KC.get_shell_msg(timeout=TIMEOUT)
328 nt.assert_equal(reply['content']['status'], 'ok')
328 nt.assert_equal(reply['content']['status'], 'ok')
329
329
330
330
331 def test_user_expressions():
331 def test_user_expressions():
332 flush_channels()
332 flush_channels()
333
333
334 msg_id, reply = execute(code='x=1', user_expressions=dict(foo='x+1'))
334 msg_id, reply = execute(code='x=1', user_expressions=dict(foo='x+1'))
335 user_expressions = reply['user_expressions']
335 user_expressions = reply['user_expressions']
336 nt.assert_equal(user_expressions, {u'foo': {
336 nt.assert_equal(user_expressions, {u'foo': {
337 u'status': u'ok',
337 u'status': u'ok',
338 u'data': {u'text/plain': u'2'},
338 u'data': {u'text/plain': u'2'},
339 u'metadata': {},
339 u'metadata': {},
340 }})
340 }})
341
341
342
342
343 def test_user_expressions_fail():
343 def test_user_expressions_fail():
344 flush_channels()
344 flush_channels()
345
345
346 msg_id, reply = execute(code='x=0', user_expressions=dict(foo='nosuchname'))
346 msg_id, reply = execute(code='x=0', user_expressions=dict(foo='nosuchname'))
347 user_expressions = reply['user_expressions']
347 user_expressions = reply['user_expressions']
348 foo = user_expressions['foo']
348 foo = user_expressions['foo']
349 nt.assert_equal(foo['status'], 'error')
349 nt.assert_equal(foo['status'], 'error')
350 nt.assert_equal(foo['ename'], 'NameError')
350 nt.assert_equal(foo['ename'], 'NameError')
351
351
352
352
353 def test_oinfo():
353 def test_oinfo():
354 flush_channels()
354 flush_channels()
355
355
356 msg_id = KC.inspect('a')
356 msg_id = KC.inspect('a')
357 reply = KC.get_shell_msg(timeout=TIMEOUT)
357 reply = KC.get_shell_msg(timeout=TIMEOUT)
358 validate_message(reply, 'inspect_reply', msg_id)
358 validate_message(reply, 'inspect_reply', msg_id)
359
359
360
360
361 def test_oinfo_found():
361 def test_oinfo_found():
362 flush_channels()
362 flush_channels()
363
363
364 msg_id, reply = execute(code='a=5')
364 msg_id, reply = execute(code='a=5')
365
365
366 msg_id = KC.inspect('a')
366 msg_id = KC.inspect('a')
367 reply = KC.get_shell_msg(timeout=TIMEOUT)
367 reply = KC.get_shell_msg(timeout=TIMEOUT)
368 validate_message(reply, 'inspect_reply', msg_id)
368 validate_message(reply, 'inspect_reply', msg_id)
369 content = reply['content']
369 content = reply['content']
370 assert content['found']
370 assert content['found']
371 text = content['data']['text/plain']
371 text = content['data']['text/plain']
372 nt.assert_in('Type:', text)
372 nt.assert_in('Type:', text)
373 nt.assert_in('Docstring:', text)
373 nt.assert_in('Docstring:', text)
374
374
375
375
376 def test_oinfo_detail():
376 def test_oinfo_detail():
377 flush_channels()
377 flush_channels()
378
378
379 msg_id, reply = execute(code='ip=get_ipython()')
379 msg_id, reply = execute(code='ip=get_ipython()')
380
380
381 msg_id = KC.inspect('ip.object_inspect', cursor_pos=10, detail_level=1)
381 msg_id = KC.inspect('ip.object_inspect', cursor_pos=10, detail_level=1)
382 reply = KC.get_shell_msg(timeout=TIMEOUT)
382 reply = KC.get_shell_msg(timeout=TIMEOUT)
383 validate_message(reply, 'inspect_reply', msg_id)
383 validate_message(reply, 'inspect_reply', msg_id)
384 content = reply['content']
384 content = reply['content']
385 assert content['found']
385 assert content['found']
386 text = content['data']['text/plain']
386 text = content['data']['text/plain']
387 nt.assert_in('Signature:', text)
387 nt.assert_in('Signature:', text)
388 nt.assert_in('Source:', text)
388 nt.assert_in('Source:', text)
389
389
390
390
391 def test_oinfo_not_found():
391 def test_oinfo_not_found():
392 flush_channels()
392 flush_channels()
393
393
394 msg_id = KC.inspect('dne')
394 msg_id = KC.inspect('dne')
395 reply = KC.get_shell_msg(timeout=TIMEOUT)
395 reply = KC.get_shell_msg(timeout=TIMEOUT)
396 validate_message(reply, 'inspect_reply', msg_id)
396 validate_message(reply, 'inspect_reply', msg_id)
397 content = reply['content']
397 content = reply['content']
398 nt.assert_false(content['found'])
398 nt.assert_false(content['found'])
399
399
400
400
401 def test_complete():
401 def test_complete():
402 flush_channels()
402 flush_channels()
403
403
404 msg_id, reply = execute(code="alpha = albert = 5")
404 msg_id, reply = execute(code="alpha = albert = 5")
405
405
406 msg_id = KC.complete('al', 2)
406 msg_id = KC.complete('al', 2)
407 reply = KC.get_shell_msg(timeout=TIMEOUT)
407 reply = KC.get_shell_msg(timeout=TIMEOUT)
408 validate_message(reply, 'complete_reply', msg_id)
408 validate_message(reply, 'complete_reply', msg_id)
409 matches = reply['content']['matches']
409 matches = reply['content']['matches']
410 for name in ('alpha', 'albert'):
410 for name in ('alpha', 'albert'):
411 nt.assert_in(name, matches)
411 nt.assert_in(name, matches)
412
412
413
413
414 def test_kernel_info_request():
414 def test_kernel_info_request():
415 flush_channels()
415 flush_channels()
416
416
417 msg_id = KC.kernel_info()
417 msg_id = KC.kernel_info()
418 reply = KC.get_shell_msg(timeout=TIMEOUT)
418 reply = KC.get_shell_msg(timeout=TIMEOUT)
419 validate_message(reply, 'kernel_info_reply', msg_id)
419 validate_message(reply, 'kernel_info_reply', msg_id)
420
420
421
421
422 def test_single_payload():
422 def test_single_payload():
423 flush_channels()
423 flush_channels()
424 msg_id, reply = execute(code="for i in range(3):\n"+
424 msg_id, reply = execute(code="for i in range(3):\n"+
425 " x=range?\n")
425 " x=range?\n")
426 payload = reply['payload']
426 payload = reply['payload']
427 next_input_pls = [pl for pl in payload if pl["source"] == "set_next_input"]
427 next_input_pls = [pl for pl in payload if pl["source"] == "set_next_input"]
428 nt.assert_equal(len(next_input_pls), 1)
428 nt.assert_equal(len(next_input_pls), 1)
429
429
430 def test_is_complete():
430 def test_is_complete():
431 flush_channels()
431 flush_channels()
432
432
433 msg_id = KC.is_complete("a = 1")
433 msg_id = KC.is_complete("a = 1")
434 reply = KC.get_shell_msg(timeout=TIMEOUT)
434 reply = KC.get_shell_msg(timeout=TIMEOUT)
435 validate_message(reply, 'is_complete_reply', msg_id)
435 validate_message(reply, 'is_complete_reply', msg_id)
436
436
437 def test_history_range():
437 def test_history_range():
438 flush_channels()
438 flush_channels()
439
439
440 msg_id_exec = KC.execute(code='x=1', store_history = True)
440 msg_id_exec = KC.execute(code='x=1', store_history = True)
441 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
441 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
442
442
443 msg_id = KC.history(hist_access_type = 'range', raw = True, output = True, start = 1, stop = 2, session = 0)
443 msg_id = KC.history(hist_access_type = 'range', raw = True, output = True, start = 1, stop = 2, session = 0)
444 reply = KC.get_shell_msg(timeout=TIMEOUT)
444 reply = KC.get_shell_msg(timeout=TIMEOUT)
445 validate_message(reply, 'history_reply', msg_id)
445 validate_message(reply, 'history_reply', msg_id)
446 content = reply['content']
446 content = reply['content']
447 nt.assert_equal(len(content['history']), 1)
447 nt.assert_equal(len(content['history']), 1)
448
448
449 def test_history_tail():
449 def test_history_tail():
450 flush_channels()
450 flush_channels()
451
451
452 msg_id_exec = KC.execute(code='x=1', store_history = True)
452 msg_id_exec = KC.execute(code='x=1', store_history = True)
453 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
453 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
454
454
455 msg_id = KC.history(hist_access_type = 'tail', raw = True, output = True, n = 1, session = 0)
455 msg_id = KC.history(hist_access_type = 'tail', raw = True, output = True, n = 1, session = 0)
456 reply = KC.get_shell_msg(timeout=TIMEOUT)
456 reply = KC.get_shell_msg(timeout=TIMEOUT)
457 validate_message(reply, 'history_reply', msg_id)
457 validate_message(reply, 'history_reply', msg_id)
458 content = reply['content']
458 content = reply['content']
459 nt.assert_equal(len(content['history']), 1)
459 nt.assert_equal(len(content['history']), 1)
460
460
461 def test_history_search():
461 def test_history_search():
462 flush_channels()
462 flush_channels()
463
463
464 msg_id_exec = KC.execute(code='x=1', store_history = True)
464 msg_id_exec = KC.execute(code='x=1', store_history = True)
465 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
465 reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
466
466
467 msg_id = KC.history(hist_access_type = 'search', raw = True, output = True, n = 1, pattern = '*', session = 0)
467 msg_id = KC.history(hist_access_type = 'search', raw = True, output = True, n = 1, pattern = '*', session = 0)
468 reply = KC.get_shell_msg(timeout=TIMEOUT)
468 reply = KC.get_shell_msg(timeout=TIMEOUT)
469 validate_message(reply, 'history_reply', msg_id)
469 validate_message(reply, 'history_reply', msg_id)
470 content = reply['content']
470 content = reply['content']
471 nt.assert_equal(len(content['history']), 1)
471 nt.assert_equal(len(content['history']), 1)
472
472
473 # IOPub channel
473 # IOPub channel
474
474
475
475
476 def test_stream():
476 def test_stream():
477 flush_channels()
477 flush_channels()
478
478
479 msg_id, reply = execute("print('hi')")
479 msg_id, reply = execute("print('hi')")
480
480
481 stdout = KC.iopub_channel.get_msg(timeout=TIMEOUT)
481 stdout = KC.iopub_channel.get_msg(timeout=TIMEOUT)
482 validate_message(stdout, 'stream', msg_id)
482 validate_message(stdout, 'stream', msg_id)
483 content = stdout['content']
483 content = stdout['content']
484 nt.assert_equal(content['text'], u'hi\n')
484 nt.assert_equal(content['text'], u'hi\n')
485
485
486
486
487 def test_display_data():
487 def test_display_data():
488 flush_channels()
488 flush_channels()
489
489
490 msg_id, reply = execute("from IPython.core.display import display; display(1)")
490 msg_id, reply = execute("from IPython.core.display import display; display(1)")
491
491
492 display = KC.iopub_channel.get_msg(timeout=TIMEOUT)
492 display = KC.iopub_channel.get_msg(timeout=TIMEOUT)
493 validate_message(display, 'display_data', parent=msg_id)
493 validate_message(display, 'display_data', parent=msg_id)
494 data = display['content']['data']
494 data = display['content']['data']
495 nt.assert_equal(data['text/plain'], u'1')
495 nt.assert_equal(data['text/plain'], u'1')
496
@@ -1,161 +1,163 b''
1 """utilities for testing IPython kernels"""
1 """utilities for testing IPython kernels"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import atexit
6 import atexit
7 import os
7
8
8 from contextlib import contextmanager
9 from contextlib import contextmanager
9 from subprocess import PIPE, STDOUT
10 from subprocess import PIPE, STDOUT
10 try:
11 try:
11 from queue import Empty # Py 3
12 from queue import Empty # Py 3
12 except ImportError:
13 except ImportError:
13 from Queue import Empty # Py 2
14 from Queue import Empty # Py 2
14
15
15 import nose
16 import nose
16 import nose.tools as nt
17 import nose.tools as nt
17
18
18 from IPython.kernel import manager
19 from jupyter_client import manager
19
20
20 #-------------------------------------------------------------------------------
21 #-------------------------------------------------------------------------------
21 # Globals
22 # Globals
22 #-------------------------------------------------------------------------------
23 #-------------------------------------------------------------------------------
23
24
24 STARTUP_TIMEOUT = 60
25 STARTUP_TIMEOUT = 60
25 TIMEOUT = 15
26 TIMEOUT = 15
26
27
27 KM = None
28 KM = None
28 KC = None
29 KC = None
29
30
30 #-------------------------------------------------------------------------------
31 #-------------------------------------------------------------------------------
31 # code
32 # code
32 #-------------------------------------------------------------------------------
33 #-------------------------------------------------------------------------------
33 def start_new_kernel(**kwargs):
34 def start_new_kernel(**kwargs):
34 """start a new kernel, and return its Manager and Client
35 """start a new kernel, and return its Manager and Client
35
36
36 Integrates with our output capturing for tests.
37 Integrates with our output capturing for tests.
37 """
38 """
38 kwargs.update(dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT))
39 kwargs.update(dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT))
39 return manager.start_new_kernel(startup_timeout=STARTUP_TIMEOUT, **kwargs)
40 return manager.start_new_kernel(startup_timeout=STARTUP_TIMEOUT, **kwargs)
40
41
41 def flush_channels(kc=None):
42 def flush_channels(kc=None):
42 """flush any messages waiting on the queue"""
43 """flush any messages waiting on the queue"""
43 from .test_message_spec import validate_message
44 from .test_message_spec import validate_message
44
45
45 if kc is None:
46 if kc is None:
46 kc = KC
47 kc = KC
47 for channel in (kc.shell_channel, kc.iopub_channel):
48 for channel in (kc.shell_channel, kc.iopub_channel):
48 while True:
49 while True:
49 try:
50 try:
50 msg = channel.get_msg(block=True, timeout=0.1)
51 msg = channel.get_msg(block=True, timeout=0.1)
51 except Empty:
52 except Empty:
52 break
53 break
53 else:
54 else:
54 validate_message(msg)
55 validate_message(msg)
55
56
56
57
57 def execute(code='', kc=None, **kwargs):
58 def execute(code='', kc=None, **kwargs):
58 """wrapper for doing common steps for validating an execution request"""
59 """wrapper for doing common steps for validating an execution request"""
59 from .test_message_spec import validate_message
60 from .test_message_spec import validate_message
60 if kc is None:
61 if kc is None:
61 kc = KC
62 kc = KC
62 msg_id = kc.execute(code=code, **kwargs)
63 msg_id = kc.execute(code=code, **kwargs)
63 reply = kc.get_shell_msg(timeout=TIMEOUT)
64 reply = kc.get_shell_msg(timeout=TIMEOUT)
64 validate_message(reply, 'execute_reply', msg_id)
65 validate_message(reply, 'execute_reply', msg_id)
65 busy = kc.get_iopub_msg(timeout=TIMEOUT)
66 busy = kc.get_iopub_msg(timeout=TIMEOUT)
66 validate_message(busy, 'status', msg_id)
67 validate_message(busy, 'status', msg_id)
67 nt.assert_equal(busy['content']['execution_state'], 'busy')
68 nt.assert_equal(busy['content']['execution_state'], 'busy')
68
69
69 if not kwargs.get('silent'):
70 if not kwargs.get('silent'):
70 execute_input = kc.get_iopub_msg(timeout=TIMEOUT)
71 execute_input = kc.get_iopub_msg(timeout=TIMEOUT)
71 validate_message(execute_input, 'execute_input', msg_id)
72 validate_message(execute_input, 'execute_input', msg_id)
72 nt.assert_equal(execute_input['content']['code'], code)
73 nt.assert_equal(execute_input['content']['code'], code)
73
74
74 return msg_id, reply['content']
75 return msg_id, reply['content']
75
76
76 def start_global_kernel():
77 def start_global_kernel():
77 """start the global kernel (if it isn't running) and return its client"""
78 """start the global kernel (if it isn't running) and return its client"""
78 global KM, KC
79 global KM, KC
79 if KM is None:
80 if KM is None:
80 KM, KC = start_new_kernel()
81 KM, KC = start_new_kernel()
81 atexit.register(stop_global_kernel)
82 atexit.register(stop_global_kernel)
82 else:
83 else:
83 flush_channels(KC)
84 flush_channels(KC)
84 return KC
85 return KC
85
86
86 @contextmanager
87 @contextmanager
87 def kernel():
88 def kernel():
88 """Context manager for the global kernel instance
89 """Context manager for the global kernel instance
89
90
90 Should be used for most kernel tests
91 Should be used for most kernel tests
91
92
92 Returns
93 Returns
93 -------
94 -------
94 kernel_client: connected KernelClient instance
95 kernel_client: connected KernelClient instance
95 """
96 """
96 yield start_global_kernel()
97 yield start_global_kernel()
97
98
98 def uses_kernel(test_f):
99 def uses_kernel(test_f):
99 """Decorator for tests that use the global kernel"""
100 """Decorator for tests that use the global kernel"""
100 def wrapped_test():
101 def wrapped_test():
101 with kernel() as kc:
102 with kernel() as kc:
102 test_f(kc)
103 test_f(kc)
103 wrapped_test.__doc__ = test_f.__doc__
104 wrapped_test.__doc__ = test_f.__doc__
104 wrapped_test.__name__ = test_f.__name__
105 wrapped_test.__name__ = test_f.__name__
105 return wrapped_test
106 return wrapped_test
106
107
107 def stop_global_kernel():
108 def stop_global_kernel():
108 """Stop the global shared kernel instance, if it exists"""
109 """Stop the global shared kernel instance, if it exists"""
109 global KM, KC
110 global KM, KC
110 KC.stop_channels()
111 KC.stop_channels()
111 KC = None
112 KC = None
112 if KM is None:
113 if KM is None:
113 return
114 return
114 KM.shutdown_kernel(now=True)
115 KM.shutdown_kernel(now=True)
115 KM = None
116 KM = None
116
117
117 def new_kernel(argv=None):
118 def new_kernel(argv=None):
118 """Context manager for a new kernel in a subprocess
119 """Context manager for a new kernel in a subprocess
119
120
120 Should only be used for tests where the kernel must not be re-used.
121 Should only be used for tests where the kernel must not be re-used.
121
122
122 Returns
123 Returns
123 -------
124 -------
124 kernel_client: connected KernelClient instance
125 kernel_client: connected KernelClient instance
125 """
126 """
126 kwargs = dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT,
127 kwargs = dict(
128 stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT,
127 startup_timeout=STARTUP_TIMEOUT)
129 startup_timeout=STARTUP_TIMEOUT)
128 if argv is not None:
130 if argv is not None:
129 kwargs['extra_arguments'] = argv
131 kwargs['extra_arguments'] = argv
130 return manager.run_kernel(**kwargs)
132 return manager.run_kernel(**kwargs)
131
133
132 def assemble_output(iopub):
134 def assemble_output(iopub):
133 """assemble stdout/err from an execution"""
135 """assemble stdout/err from an execution"""
134 stdout = ''
136 stdout = ''
135 stderr = ''
137 stderr = ''
136 while True:
138 while True:
137 msg = iopub.get_msg(block=True, timeout=1)
139 msg = iopub.get_msg(block=True, timeout=1)
138 msg_type = msg['msg_type']
140 msg_type = msg['msg_type']
139 content = msg['content']
141 content = msg['content']
140 if msg_type == 'status' and content['execution_state'] == 'idle':
142 if msg_type == 'status' and content['execution_state'] == 'idle':
141 # idle message signals end of output
143 # idle message signals end of output
142 break
144 break
143 elif msg['msg_type'] == 'stream':
145 elif msg['msg_type'] == 'stream':
144 if content['name'] == 'stdout':
146 if content['name'] == 'stdout':
145 stdout += content['text']
147 stdout += content['text']
146 elif content['name'] == 'stderr':
148 elif content['name'] == 'stderr':
147 stderr += content['text']
149 stderr += content['text']
148 else:
150 else:
149 raise KeyError("bad stream: %r" % content['name'])
151 raise KeyError("bad stream: %r" % content['name'])
150 else:
152 else:
151 # other output, ignored
153 # other output, ignored
152 pass
154 pass
153 return stdout, stderr
155 return stdout, stderr
154
156
155 def wait_for_idle(kc):
157 def wait_for_idle(kc):
156 while True:
158 while True:
157 msg = kc.iopub_channel.get_msg(block=True, timeout=1)
159 msg = kc.iopub_channel.get_msg(block=True, timeout=1)
158 msg_type = msg['msg_type']
160 msg_type = msg['msg_type']
159 content = msg['content']
161 content = msg['content']
160 if msg_type == 'status' and content['execution_state'] == 'idle':
162 if msg_type == 'status' and content['execution_state'] == 'idle':
161 break
163 break
@@ -1,11 +1,11 b''
1 # Copyright (c) IPython Development Team.
1 # Copyright (c) IPython Development Team.
2 # Distributed under the terms of the Modified BSD License.
2 # Distributed under the terms of the Modified BSD License.
3
3
4 # Verify zmq version dependency
4 # Verify zmq version dependency
5
5
6 from IPython.utils.zmqrelated import check_for_zmq
6 from IPython.utils.zmqrelated import check_for_zmq
7
7
8 check_for_zmq('13', 'IPython.kernel.zmq')
8 check_for_zmq('13', 'ipython_kernel.zmq')
9
10 from .session import Session
11
9
10 from jupyter_client import session
11 Session = session.Session
@@ -1,70 +1,70 b''
1 """Publishing native (typically pickled) objects.
1 """Publishing native (typically pickled) objects.
2 """
2 """
3
3
4 #-----------------------------------------------------------------------------
4 #-----------------------------------------------------------------------------
5 # Copyright (C) 2012 The IPython Development Team
5 # Copyright (C) 2012 The IPython Development Team
6 #
6 #
7 # Distributed under the terms of the BSD License. The full license is in
7 # Distributed under the terms of the BSD License. The full license is in
8 # the file COPYING, distributed as part of this software.
8 # the file COPYING, distributed as part of this software.
9 #-----------------------------------------------------------------------------
9 #-----------------------------------------------------------------------------
10
10
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12 # Imports
12 # Imports
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 from IPython.config import Configurable
15 from IPython.config import Configurable
16 from IPython.kernel.inprocess.socket import SocketABC
16 from ipython_kernel.inprocess.socket import SocketABC
17 from IPython.utils.jsonutil import json_clean
17 from IPython.utils.jsonutil import json_clean
18 from IPython.utils.traitlets import Instance, Dict, CBytes
18 from IPython.utils.traitlets import Instance, Dict, CBytes
19 from IPython.kernel.zmq.serialize import serialize_object
19 from ipython_kernel.zmq.serialize import serialize_object
20 from IPython.kernel.zmq.session import Session, extract_header
20 from ipython_kernel.zmq.session import Session, extract_header
21
21
22 #-----------------------------------------------------------------------------
22 #-----------------------------------------------------------------------------
23 # Code
23 # Code
24 #-----------------------------------------------------------------------------
24 #-----------------------------------------------------------------------------
25
25
26
26
27 class ZMQDataPublisher(Configurable):
27 class ZMQDataPublisher(Configurable):
28
28
29 topic = topic = CBytes(b'datapub')
29 topic = topic = CBytes(b'datapub')
30 session = Instance(Session, allow_none=True)
30 session = Instance(Session, allow_none=True)
31 pub_socket = Instance(SocketABC, allow_none=True)
31 pub_socket = Instance(SocketABC, allow_none=True)
32 parent_header = Dict({})
32 parent_header = Dict({})
33
33
34 def set_parent(self, parent):
34 def set_parent(self, parent):
35 """Set the parent for outbound messages."""
35 """Set the parent for outbound messages."""
36 self.parent_header = extract_header(parent)
36 self.parent_header = extract_header(parent)
37
37
38 def publish_data(self, data):
38 def publish_data(self, data):
39 """publish a data_message on the IOPub channel
39 """publish a data_message on the IOPub channel
40
40
41 Parameters
41 Parameters
42 ----------
42 ----------
43
43
44 data : dict
44 data : dict
45 The data to be published. Think of it as a namespace.
45 The data to be published. Think of it as a namespace.
46 """
46 """
47 session = self.session
47 session = self.session
48 buffers = serialize_object(data,
48 buffers = serialize_object(data,
49 buffer_threshold=session.buffer_threshold,
49 buffer_threshold=session.buffer_threshold,
50 item_threshold=session.item_threshold,
50 item_threshold=session.item_threshold,
51 )
51 )
52 content = json_clean(dict(keys=data.keys()))
52 content = json_clean(dict(keys=data.keys()))
53 session.send(self.pub_socket, 'data_message', content=content,
53 session.send(self.pub_socket, 'data_message', content=content,
54 parent=self.parent_header,
54 parent=self.parent_header,
55 buffers=buffers,
55 buffers=buffers,
56 ident=self.topic,
56 ident=self.topic,
57 )
57 )
58
58
59
59
60 def publish_data(data):
60 def publish_data(data):
61 """publish a data_message on the IOPub channel
61 """publish a data_message on the IOPub channel
62
62
63 Parameters
63 Parameters
64 ----------
64 ----------
65
65
66 data : dict
66 data : dict
67 The data to be published. Think of it as a namespace.
67 The data to be published. Think of it as a namespace.
68 """
68 """
69 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
69 from ipython_kernel.zmq.zmqshell import ZMQInteractiveShell
70 ZMQInteractiveShell.instance().data_pub.publish_data(data)
70 ZMQInteractiveShell.instance().data_pub.publish_data(data)
@@ -1,74 +1,73 b''
1 """Replacements for sys.displayhook that publish over ZMQ."""
1 """Replacements for sys.displayhook that publish over ZMQ."""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import sys
6 import sys
7
7
8 from IPython.core.displayhook import DisplayHook
8 from IPython.core.displayhook import DisplayHook
9 from IPython.kernel.inprocess.socket import SocketABC
9 from ipython_kernel.inprocess.socket import SocketABC
10 from IPython.utils.jsonutil import encode_images
10 from IPython.utils.jsonutil import encode_images
11 from IPython.utils.py3compat import builtin_mod
11 from IPython.utils.py3compat import builtin_mod
12 from IPython.utils.traitlets import Instance, Dict
12 from IPython.utils.traitlets import Instance, Dict
13 from .session import extract_header, Session
13 from .session import extract_header, Session
14
14
15 class ZMQDisplayHook(object):
15 class ZMQDisplayHook(object):
16 """A simple displayhook that publishes the object's repr over a ZeroMQ
16 """A simple displayhook that publishes the object's repr over a ZeroMQ
17 socket."""
17 socket."""
18 topic=b'execute_result'
18 topic=b'execute_result'
19
19
20 def __init__(self, session, pub_socket):
20 def __init__(self, session, pub_socket):
21 self.session = session
21 self.session = session
22 self.pub_socket = pub_socket
22 self.pub_socket = pub_socket
23 self.parent_header = {}
23 self.parent_header = {}
24
24
25 def __call__(self, obj):
25 def __call__(self, obj):
26 if obj is None:
26 if obj is None:
27 return
27 return
28
28
29 builtin_mod._ = obj
29 builtin_mod._ = obj
30 sys.stdout.flush()
30 sys.stdout.flush()
31 sys.stderr.flush()
31 sys.stderr.flush()
32 msg = self.session.send(self.pub_socket, u'execute_result', {u'data':repr(obj)},
32 msg = self.session.send(self.pub_socket, u'execute_result', {u'data':repr(obj)},
33 parent=self.parent_header, ident=self.topic)
33 parent=self.parent_header, ident=self.topic)
34
34
35 def set_parent(self, parent):
35 def set_parent(self, parent):
36 self.parent_header = extract_header(parent)
36 self.parent_header = extract_header(parent)
37
37
38
38
39 class ZMQShellDisplayHook(DisplayHook):
39 class ZMQShellDisplayHook(DisplayHook):
40 """A displayhook subclass that publishes data using ZeroMQ. This is intended
40 """A displayhook subclass that publishes data using ZeroMQ. This is intended
41 to work with an InteractiveShell instance. It sends a dict of different
41 to work with an InteractiveShell instance. It sends a dict of different
42 representations of the object."""
42 representations of the object."""
43 topic=None
43 topic=None
44
44
45 session = Instance(Session, allow_none=True)
45 session = Instance(Session, allow_none=True)
46 pub_socket = Instance(SocketABC, allow_none=True)
46 pub_socket = Instance(SocketABC, allow_none=True)
47 parent_header = Dict({})
47 parent_header = Dict({})
48
48
49 def set_parent(self, parent):
49 def set_parent(self, parent):
50 """Set the parent for outbound messages."""
50 """Set the parent for outbound messages."""
51 self.parent_header = extract_header(parent)
51 self.parent_header = extract_header(parent)
52
52
53 def start_displayhook(self):
53 def start_displayhook(self):
54 self.msg = self.session.msg(u'execute_result', {
54 self.msg = self.session.msg(u'execute_result', {
55 'data': {},
55 'data': {},
56 'metadata': {},
56 'metadata': {},
57 }, parent=self.parent_header)
57 }, parent=self.parent_header)
58
58
59 def write_output_prompt(self):
59 def write_output_prompt(self):
60 """Write the output prompt."""
60 """Write the output prompt."""
61 self.msg['content']['execution_count'] = self.prompt_count
61 self.msg['content']['execution_count'] = self.prompt_count
62
62
63 def write_format_data(self, format_dict, md_dict=None):
63 def write_format_data(self, format_dict, md_dict=None):
64 self.msg['content']['data'] = encode_images(format_dict)
64 self.msg['content']['data'] = encode_images(format_dict)
65 self.msg['content']['metadata'] = md_dict
65 self.msg['content']['metadata'] = md_dict
66
66
67 def finish_displayhook(self):
67 def finish_displayhook(self):
68 """Finish up all displayhook activities."""
68 """Finish up all displayhook activities."""
69 sys.stdout.flush()
69 sys.stdout.flush()
70 sys.stderr.flush()
70 sys.stderr.flush()
71 if self.msg['content']['data']:
71 if self.msg['content']['data']:
72 self.session.send(self.pub_socket, self.msg, ident=self.topic)
72 self.session.send(self.pub_socket, self.msg, ident=self.topic)
73 self.msg = None
73 self.msg = None
74
@@ -1,57 +1,57 b''
1 """Simple function for embedding an IPython kernel
1 """Simple function for embedding an IPython kernel
2 """
2 """
3 #-----------------------------------------------------------------------------
3 #-----------------------------------------------------------------------------
4 # Imports
4 # Imports
5 #-----------------------------------------------------------------------------
5 #-----------------------------------------------------------------------------
6
6
7 import sys
7 import sys
8
8
9 from IPython.utils.frame import extract_module_locals
9 from IPython.utils.frame import extract_module_locals
10
10
11 from .kernelapp import IPKernelApp
11 from .kernelapp import IPKernelApp
12
12
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Code
14 # Code
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16
16
17 def embed_kernel(module=None, local_ns=None, **kwargs):
17 def embed_kernel(module=None, local_ns=None, **kwargs):
18 """Embed and start an IPython kernel in a given scope.
18 """Embed and start an IPython kernel in a given scope.
19
19
20 Parameters
20 Parameters
21 ----------
21 ----------
22 module : ModuleType, optional
22 module : ModuleType, optional
23 The module to load into IPython globals (default: caller)
23 The module to load into IPython globals (default: caller)
24 local_ns : dict, optional
24 local_ns : dict, optional
25 The namespace to load into IPython user namespace (default: caller)
25 The namespace to load into IPython user namespace (default: caller)
26
26
27 kwargs : various, optional
27 kwargs : various, optional
28 Further keyword args are relayed to the IPKernelApp constructor,
28 Further keyword args are relayed to the IPKernelApp constructor,
29 allowing configuration of the Kernel. Will only have an effect
29 allowing configuration of the Kernel. Will only have an effect
30 on the first embed_kernel call for a given process.
30 on the first embed_kernel call for a given process.
31
31
32 """
32 """
33 # get the app if it exists, or set it up if it doesn't
33 # get the app if it exists, or set it up if it doesn't
34 if IPKernelApp.initialized():
34 if IPKernelApp.initialized():
35 app = IPKernelApp.instance()
35 app = IPKernelApp.instance()
36 else:
36 else:
37 app = IPKernelApp.instance(**kwargs)
37 app = IPKernelApp.instance(**kwargs)
38 app.initialize([])
38 app.initialize([])
39 # Undo unnecessary sys module mangling from init_sys_modules.
39 # Undo unnecessary sys module mangling from init_sys_modules.
40 # This would not be necessary if we could prevent it
40 # This would not be necessary if we could prevent it
41 # in the first place by using a different InteractiveShell
41 # in the first place by using a different InteractiveShell
42 # subclass, as in the regular embed case.
42 # subclass, as in the regular embed case.
43 main = app.kernel.shell._orig_sys_modules_main_mod
43 main = app.kernel.shell._orig_sys_modules_main_mod
44 if main is not None:
44 if main is not None:
45 sys.modules[app.kernel.shell._orig_sys_modules_main_name] = main
45 sys.modules[app.kernel.shell._orig_sys_modules_main_name] = main
46
46
47 # load the calling scope if not given
47 # load the calling scope if not given
48 (caller_module, caller_locals) = extract_module_locals(1)
48 (caller_module, caller_locals) = extract_module_locals(1)
49 if module is None:
49 if module is None:
50 module = caller_module
50 module = caller_module
51 if local_ns is None:
51 if local_ns is None:
52 local_ns = caller_locals
52 local_ns = caller_locals
53
53
54 app.kernel.user_module = module
54 app.kernel.user_module = module
55 app.kernel.user_ns = local_ns
55 app.kernel.user_ns = local_ns
56 app.shell.set_completer_frame()
56 app.shell.set_completer_frame()
57 app.start()
57 app.start()
@@ -1,273 +1,273 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 """Event loop integration for the ZeroMQ-based kernels."""
2 """Event loop integration for the ZeroMQ-based kernels."""
3
3
4 # Copyright (c) IPython Development Team.
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
5 # Distributed under the terms of the Modified BSD License.
6
6
7 import os
7 import os
8 import sys
8 import sys
9
9
10 import zmq
10 import zmq
11
11
12 from IPython.config.application import Application
12 from IPython.config.application import Application
13 from IPython.utils import io
13 from IPython.utils import io
14 from IPython.lib.inputhook import _use_appnope
14 from IPython.lib.inputhook import _use_appnope
15
15
16 def _notify_stream_qt(kernel, stream):
16 def _notify_stream_qt(kernel, stream):
17
17
18 from IPython.external.qt_for_kernel import QtCore
18 from IPython.external.qt_for_kernel import QtCore
19
19
20 if _use_appnope() and kernel._darwin_app_nap:
20 if _use_appnope() and kernel._darwin_app_nap:
21 from appnope import nope_scope as context
21 from appnope import nope_scope as context
22 else:
22 else:
23 from IPython.core.interactiveshell import NoOpContext as context
23 from IPython.core.interactiveshell import NoOpContext as context
24
24
25 def process_stream_events():
25 def process_stream_events():
26 while stream.getsockopt(zmq.EVENTS) & zmq.POLLIN:
26 while stream.getsockopt(zmq.EVENTS) & zmq.POLLIN:
27 with context():
27 with context():
28 kernel.do_one_iteration()
28 kernel.do_one_iteration()
29
29
30 fd = stream.getsockopt(zmq.FD)
30 fd = stream.getsockopt(zmq.FD)
31 notifier = QtCore.QSocketNotifier(fd, QtCore.QSocketNotifier.Read, kernel.app)
31 notifier = QtCore.QSocketNotifier(fd, QtCore.QSocketNotifier.Read, kernel.app)
32 notifier.activated.connect(process_stream_events)
32 notifier.activated.connect(process_stream_events)
33
33
34 # mapping of keys to loop functions
34 # mapping of keys to loop functions
35 loop_map = {
35 loop_map = {
36 'inline': None,
36 'inline': None,
37 'nbagg': None,
37 'nbagg': None,
38 'notebook': None,
38 'notebook': None,
39 None : None,
39 None : None,
40 }
40 }
41
41
42 def register_integration(*toolkitnames):
42 def register_integration(*toolkitnames):
43 """Decorator to register an event loop to integrate with the IPython kernel
43 """Decorator to register an event loop to integrate with the IPython kernel
44
44
45 The decorator takes names to register the event loop as for the %gui magic.
45 The decorator takes names to register the event loop as for the %gui magic.
46 You can provide alternative names for the same toolkit.
46 You can provide alternative names for the same toolkit.
47
47
48 The decorated function should take a single argument, the IPython kernel
48 The decorated function should take a single argument, the IPython kernel
49 instance, arrange for the event loop to call ``kernel.do_one_iteration()``
49 instance, arrange for the event loop to call ``kernel.do_one_iteration()``
50 at least every ``kernel._poll_interval`` seconds, and start the event loop.
50 at least every ``kernel._poll_interval`` seconds, and start the event loop.
51
51
52 :mod:`IPython.kernel.zmq.eventloops` provides and registers such functions
52 :mod:`ipython_kernel.zmq.eventloops` provides and registers such functions
53 for a few common event loops.
53 for a few common event loops.
54 """
54 """
55 def decorator(func):
55 def decorator(func):
56 for name in toolkitnames:
56 for name in toolkitnames:
57 loop_map[name] = func
57 loop_map[name] = func
58 return func
58 return func
59
59
60 return decorator
60 return decorator
61
61
62
62
63 @register_integration('qt', 'qt4')
63 @register_integration('qt', 'qt4')
64 def loop_qt4(kernel):
64 def loop_qt4(kernel):
65 """Start a kernel with PyQt4 event loop integration."""
65 """Start a kernel with PyQt4 event loop integration."""
66
66
67 from IPython.lib.guisupport import get_app_qt4, start_event_loop_qt4
67 from IPython.lib.guisupport import get_app_qt4, start_event_loop_qt4
68
68
69 kernel.app = get_app_qt4([" "])
69 kernel.app = get_app_qt4([" "])
70 kernel.app.setQuitOnLastWindowClosed(False)
70 kernel.app.setQuitOnLastWindowClosed(False)
71
71
72 for s in kernel.shell_streams:
72 for s in kernel.shell_streams:
73 _notify_stream_qt(kernel, s)
73 _notify_stream_qt(kernel, s)
74
74
75 start_event_loop_qt4(kernel.app)
75 start_event_loop_qt4(kernel.app)
76
76
77 @register_integration('qt5')
77 @register_integration('qt5')
78 def loop_qt5(kernel):
78 def loop_qt5(kernel):
79 """Start a kernel with PyQt5 event loop integration."""
79 """Start a kernel with PyQt5 event loop integration."""
80 os.environ['QT_API'] = 'pyqt5'
80 os.environ['QT_API'] = 'pyqt5'
81 return loop_qt4(kernel)
81 return loop_qt4(kernel)
82
82
83
83
84 @register_integration('wx')
84 @register_integration('wx')
85 def loop_wx(kernel):
85 def loop_wx(kernel):
86 """Start a kernel with wx event loop support."""
86 """Start a kernel with wx event loop support."""
87
87
88 import wx
88 import wx
89 from IPython.lib.guisupport import start_event_loop_wx
89 from IPython.lib.guisupport import start_event_loop_wx
90
90
91 if _use_appnope() and kernel._darwin_app_nap:
91 if _use_appnope() and kernel._darwin_app_nap:
92 # we don't hook up App Nap contexts for Wx,
92 # we don't hook up App Nap contexts for Wx,
93 # just disable it outright.
93 # just disable it outright.
94 from appnope import nope
94 from appnope import nope
95 nope()
95 nope()
96
96
97 doi = kernel.do_one_iteration
97 doi = kernel.do_one_iteration
98 # Wx uses milliseconds
98 # Wx uses milliseconds
99 poll_interval = int(1000*kernel._poll_interval)
99 poll_interval = int(1000*kernel._poll_interval)
100
100
101 # We have to put the wx.Timer in a wx.Frame for it to fire properly.
101 # We have to put the wx.Timer in a wx.Frame for it to fire properly.
102 # We make the Frame hidden when we create it in the main app below.
102 # We make the Frame hidden when we create it in the main app below.
103 class TimerFrame(wx.Frame):
103 class TimerFrame(wx.Frame):
104 def __init__(self, func):
104 def __init__(self, func):
105 wx.Frame.__init__(self, None, -1)
105 wx.Frame.__init__(self, None, -1)
106 self.timer = wx.Timer(self)
106 self.timer = wx.Timer(self)
107 # Units for the timer are in milliseconds
107 # Units for the timer are in milliseconds
108 self.timer.Start(poll_interval)
108 self.timer.Start(poll_interval)
109 self.Bind(wx.EVT_TIMER, self.on_timer)
109 self.Bind(wx.EVT_TIMER, self.on_timer)
110 self.func = func
110 self.func = func
111
111
112 def on_timer(self, event):
112 def on_timer(self, event):
113 self.func()
113 self.func()
114
114
115 # We need a custom wx.App to create our Frame subclass that has the
115 # We need a custom wx.App to create our Frame subclass that has the
116 # wx.Timer to drive the ZMQ event loop.
116 # wx.Timer to drive the ZMQ event loop.
117 class IPWxApp(wx.App):
117 class IPWxApp(wx.App):
118 def OnInit(self):
118 def OnInit(self):
119 self.frame = TimerFrame(doi)
119 self.frame = TimerFrame(doi)
120 self.frame.Show(False)
120 self.frame.Show(False)
121 return True
121 return True
122
122
123 # The redirect=False here makes sure that wx doesn't replace
123 # The redirect=False here makes sure that wx doesn't replace
124 # sys.stdout/stderr with its own classes.
124 # sys.stdout/stderr with its own classes.
125 kernel.app = IPWxApp(redirect=False)
125 kernel.app = IPWxApp(redirect=False)
126
126
127 # The import of wx on Linux sets the handler for signal.SIGINT
127 # The import of wx on Linux sets the handler for signal.SIGINT
128 # to 0. This is a bug in wx or gtk. We fix by just setting it
128 # to 0. This is a bug in wx or gtk. We fix by just setting it
129 # back to the Python default.
129 # back to the Python default.
130 import signal
130 import signal
131 if not callable(signal.getsignal(signal.SIGINT)):
131 if not callable(signal.getsignal(signal.SIGINT)):
132 signal.signal(signal.SIGINT, signal.default_int_handler)
132 signal.signal(signal.SIGINT, signal.default_int_handler)
133
133
134 start_event_loop_wx(kernel.app)
134 start_event_loop_wx(kernel.app)
135
135
136
136
137 @register_integration('tk')
137 @register_integration('tk')
138 def loop_tk(kernel):
138 def loop_tk(kernel):
139 """Start a kernel with the Tk event loop."""
139 """Start a kernel with the Tk event loop."""
140
140
141 try:
141 try:
142 from tkinter import Tk # Py 3
142 from tkinter import Tk # Py 3
143 except ImportError:
143 except ImportError:
144 from Tkinter import Tk # Py 2
144 from Tkinter import Tk # Py 2
145 doi = kernel.do_one_iteration
145 doi = kernel.do_one_iteration
146 # Tk uses milliseconds
146 # Tk uses milliseconds
147 poll_interval = int(1000*kernel._poll_interval)
147 poll_interval = int(1000*kernel._poll_interval)
148 # For Tkinter, we create a Tk object and call its withdraw method.
148 # For Tkinter, we create a Tk object and call its withdraw method.
149 class Timer(object):
149 class Timer(object):
150 def __init__(self, func):
150 def __init__(self, func):
151 self.app = Tk()
151 self.app = Tk()
152 self.app.withdraw()
152 self.app.withdraw()
153 self.func = func
153 self.func = func
154
154
155 def on_timer(self):
155 def on_timer(self):
156 self.func()
156 self.func()
157 self.app.after(poll_interval, self.on_timer)
157 self.app.after(poll_interval, self.on_timer)
158
158
159 def start(self):
159 def start(self):
160 self.on_timer() # Call it once to get things going.
160 self.on_timer() # Call it once to get things going.
161 self.app.mainloop()
161 self.app.mainloop()
162
162
163 kernel.timer = Timer(doi)
163 kernel.timer = Timer(doi)
164 kernel.timer.start()
164 kernel.timer.start()
165
165
166
166
167 @register_integration('gtk')
167 @register_integration('gtk')
168 def loop_gtk(kernel):
168 def loop_gtk(kernel):
169 """Start the kernel, coordinating with the GTK event loop"""
169 """Start the kernel, coordinating with the GTK event loop"""
170 from .gui.gtkembed import GTKEmbed
170 from .gui.gtkembed import GTKEmbed
171
171
172 gtk_kernel = GTKEmbed(kernel)
172 gtk_kernel = GTKEmbed(kernel)
173 gtk_kernel.start()
173 gtk_kernel.start()
174
174
175
175
176 @register_integration('gtk3')
176 @register_integration('gtk3')
177 def loop_gtk3(kernel):
177 def loop_gtk3(kernel):
178 """Start the kernel, coordinating with the GTK event loop"""
178 """Start the kernel, coordinating with the GTK event loop"""
179 from .gui.gtk3embed import GTKEmbed
179 from .gui.gtk3embed import GTKEmbed
180
180
181 gtk_kernel = GTKEmbed(kernel)
181 gtk_kernel = GTKEmbed(kernel)
182 gtk_kernel.start()
182 gtk_kernel.start()
183
183
184
184
185 @register_integration('osx')
185 @register_integration('osx')
186 def loop_cocoa(kernel):
186 def loop_cocoa(kernel):
187 """Start the kernel, coordinating with the Cocoa CFRunLoop event loop
187 """Start the kernel, coordinating with the Cocoa CFRunLoop event loop
188 via the matplotlib MacOSX backend.
188 via the matplotlib MacOSX backend.
189 """
189 """
190 import matplotlib
190 import matplotlib
191 if matplotlib.__version__ < '1.1.0':
191 if matplotlib.__version__ < '1.1.0':
192 kernel.log.warn(
192 kernel.log.warn(
193 "MacOSX backend in matplotlib %s doesn't have a Timer, "
193 "MacOSX backend in matplotlib %s doesn't have a Timer, "
194 "falling back on Tk for CFRunLoop integration. Note that "
194 "falling back on Tk for CFRunLoop integration. Note that "
195 "even this won't work if Tk is linked against X11 instead of "
195 "even this won't work if Tk is linked against X11 instead of "
196 "Cocoa (e.g. EPD). To use the MacOSX backend in the kernel, "
196 "Cocoa (e.g. EPD). To use the MacOSX backend in the kernel, "
197 "you must use matplotlib >= 1.1.0, or a native libtk."
197 "you must use matplotlib >= 1.1.0, or a native libtk."
198 )
198 )
199 return loop_tk(kernel)
199 return loop_tk(kernel)
200
200
201 from matplotlib.backends.backend_macosx import TimerMac, show
201 from matplotlib.backends.backend_macosx import TimerMac, show
202
202
203 # scale interval for sec->ms
203 # scale interval for sec->ms
204 poll_interval = int(1000*kernel._poll_interval)
204 poll_interval = int(1000*kernel._poll_interval)
205
205
206 real_excepthook = sys.excepthook
206 real_excepthook = sys.excepthook
207 def handle_int(etype, value, tb):
207 def handle_int(etype, value, tb):
208 """don't let KeyboardInterrupts look like crashes"""
208 """don't let KeyboardInterrupts look like crashes"""
209 if etype is KeyboardInterrupt:
209 if etype is KeyboardInterrupt:
210 io.raw_print("KeyboardInterrupt caught in CFRunLoop")
210 io.raw_print("KeyboardInterrupt caught in CFRunLoop")
211 else:
211 else:
212 real_excepthook(etype, value, tb)
212 real_excepthook(etype, value, tb)
213
213
214 # add doi() as a Timer to the CFRunLoop
214 # add doi() as a Timer to the CFRunLoop
215 def doi():
215 def doi():
216 # restore excepthook during IPython code
216 # restore excepthook during IPython code
217 sys.excepthook = real_excepthook
217 sys.excepthook = real_excepthook
218 kernel.do_one_iteration()
218 kernel.do_one_iteration()
219 # and back:
219 # and back:
220 sys.excepthook = handle_int
220 sys.excepthook = handle_int
221
221
222 t = TimerMac(poll_interval)
222 t = TimerMac(poll_interval)
223 t.add_callback(doi)
223 t.add_callback(doi)
224 t.start()
224 t.start()
225
225
226 # but still need a Poller for when there are no active windows,
226 # but still need a Poller for when there are no active windows,
227 # during which time mainloop() returns immediately
227 # during which time mainloop() returns immediately
228 poller = zmq.Poller()
228 poller = zmq.Poller()
229 if kernel.control_stream:
229 if kernel.control_stream:
230 poller.register(kernel.control_stream.socket, zmq.POLLIN)
230 poller.register(kernel.control_stream.socket, zmq.POLLIN)
231 for stream in kernel.shell_streams:
231 for stream in kernel.shell_streams:
232 poller.register(stream.socket, zmq.POLLIN)
232 poller.register(stream.socket, zmq.POLLIN)
233
233
234 while True:
234 while True:
235 try:
235 try:
236 # double nested try/except, to properly catch KeyboardInterrupt
236 # double nested try/except, to properly catch KeyboardInterrupt
237 # due to pyzmq Issue #130
237 # due to pyzmq Issue #130
238 try:
238 try:
239 # don't let interrupts during mainloop invoke crash_handler:
239 # don't let interrupts during mainloop invoke crash_handler:
240 sys.excepthook = handle_int
240 sys.excepthook = handle_int
241 show.mainloop()
241 show.mainloop()
242 sys.excepthook = real_excepthook
242 sys.excepthook = real_excepthook
243 # use poller if mainloop returned (no windows)
243 # use poller if mainloop returned (no windows)
244 # scale by extra factor of 10, since it's a real poll
244 # scale by extra factor of 10, since it's a real poll
245 poller.poll(10*poll_interval)
245 poller.poll(10*poll_interval)
246 kernel.do_one_iteration()
246 kernel.do_one_iteration()
247 except:
247 except:
248 raise
248 raise
249 except KeyboardInterrupt:
249 except KeyboardInterrupt:
250 # Ctrl-C shouldn't crash the kernel
250 # Ctrl-C shouldn't crash the kernel
251 io.raw_print("KeyboardInterrupt caught in kernel")
251 io.raw_print("KeyboardInterrupt caught in kernel")
252 finally:
252 finally:
253 # ensure excepthook is restored
253 # ensure excepthook is restored
254 sys.excepthook = real_excepthook
254 sys.excepthook = real_excepthook
255
255
256
256
257
257
258 def enable_gui(gui, kernel=None):
258 def enable_gui(gui, kernel=None):
259 """Enable integration with a given GUI"""
259 """Enable integration with a given GUI"""
260 if gui not in loop_map:
260 if gui not in loop_map:
261 e = "Invalid GUI request %r, valid ones are:%s" % (gui, loop_map.keys())
261 e = "Invalid GUI request %r, valid ones are:%s" % (gui, loop_map.keys())
262 raise ValueError(e)
262 raise ValueError(e)
263 if kernel is None:
263 if kernel is None:
264 if Application.initialized():
264 if Application.initialized():
265 kernel = getattr(Application.instance(), 'kernel', None)
265 kernel = getattr(Application.instance(), 'kernel', None)
266 if kernel is None:
266 if kernel is None:
267 raise RuntimeError("You didn't specify a kernel,"
267 raise RuntimeError("You didn't specify a kernel,"
268 " and no IPython Application with a kernel appears to be running."
268 " and no IPython Application with a kernel appears to be running."
269 )
269 )
270 loop = loop_map[gui]
270 loop = loop_map[gui]
271 if loop and kernel.eventloop is not None and kernel.eventloop is not loop:
271 if loop and kernel.eventloop is not None and kernel.eventloop is not loop:
272 raise RuntimeError("Cannot activate multiple GUI eventloops")
272 raise RuntimeError("Cannot activate multiple GUI eventloops")
273 kernel.eventloop = loop
273 kernel.eventloop = loop
1 NO CONTENT: file renamed from IPython/kernel/zmq/gui/__init__.py to ipython_kernel/zmq/gui/__init__.py
NO CONTENT: file renamed from IPython/kernel/zmq/gui/__init__.py to ipython_kernel/zmq/gui/__init__.py
@@ -1,85 +1,85 b''
1 """GUI support for the IPython ZeroMQ kernel - GTK toolkit support.
1 """GUI support for the IPython ZeroMQ kernel - GTK toolkit support.
2 """
2 """
3 #-----------------------------------------------------------------------------
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2010-2011 The IPython Development Team
4 # Copyright (C) 2010-2011 The IPython Development Team
5 #
5 #
6 # Distributed under the terms of the BSD License. The full license is in
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING.txt, distributed as part of this software.
7 # the file COPYING.txt, distributed as part of this software.
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9
9
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11 # Imports
11 # Imports
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13 # stdlib
13 # stdlib
14 import sys
14 import sys
15
15
16 # Third-party
16 # Third-party
17 from gi.repository import GObject, Gtk
17 from gi.repository import GObject, Gtk
18
18
19 #-----------------------------------------------------------------------------
19 #-----------------------------------------------------------------------------
20 # Classes and functions
20 # Classes and functions
21 #-----------------------------------------------------------------------------
21 #-----------------------------------------------------------------------------
22
22
23 class GTKEmbed(object):
23 class GTKEmbed(object):
24 """A class to embed a kernel into the GTK main event loop.
24 """A class to embed a kernel into the GTK main event loop.
25 """
25 """
26 def __init__(self, kernel):
26 def __init__(self, kernel):
27 self.kernel = kernel
27 self.kernel = kernel
28 # These two will later store the real gtk functions when we hijack them
28 # These two will later store the real gtk functions when we hijack them
29 self.gtk_main = None
29 self.gtk_main = None
30 self.gtk_main_quit = None
30 self.gtk_main_quit = None
31
31
32 def start(self):
32 def start(self):
33 """Starts the GTK main event loop and sets our kernel startup routine.
33 """Starts the GTK main event loop and sets our kernel startup routine.
34 """
34 """
35 # Register our function to initiate the kernel and start gtk
35 # Register our function to initiate the kernel and start gtk
36 GObject.idle_add(self._wire_kernel)
36 GObject.idle_add(self._wire_kernel)
37 Gtk.main()
37 Gtk.main()
38
38
39 def _wire_kernel(self):
39 def _wire_kernel(self):
40 """Initializes the kernel inside GTK.
40 """Initializes the kernel inside GTK.
41
41
42 This is meant to run only once at startup, so it does its job and
42 This is meant to run only once at startup, so it does its job and
43 returns False to ensure it doesn't get run again by GTK.
43 returns False to ensure it doesn't get run again by GTK.
44 """
44 """
45 self.gtk_main, self.gtk_main_quit = self._hijack_gtk()
45 self.gtk_main, self.gtk_main_quit = self._hijack_gtk()
46 GObject.timeout_add(int(1000*self.kernel._poll_interval),
46 GObject.timeout_add(int(1000*self.kernel._poll_interval),
47 self.iterate_kernel)
47 self.iterate_kernel)
48 return False
48 return False
49
49
50 def iterate_kernel(self):
50 def iterate_kernel(self):
51 """Run one iteration of the kernel and return True.
51 """Run one iteration of the kernel and return True.
52
52
53 GTK timer functions must return True to be called again, so we make the
53 GTK timer functions must return True to be called again, so we make the
54 call to :meth:`do_one_iteration` and then return True for GTK.
54 call to :meth:`do_one_iteration` and then return True for GTK.
55 """
55 """
56 self.kernel.do_one_iteration()
56 self.kernel.do_one_iteration()
57 return True
57 return True
58
58
59 def stop(self):
59 def stop(self):
60 # FIXME: this one isn't getting called because we have no reliable
60 # FIXME: this one isn't getting called because we have no reliable
61 # kernel shutdown. We need to fix that: once the kernel has a
61 # kernel shutdown. We need to fix that: once the kernel has a
62 # shutdown mechanism, it can call this.
62 # shutdown mechanism, it can call this.
63 self.gtk_main_quit()
63 self.gtk_main_quit()
64 sys.exit()
64 sys.exit()
65
65
66 def _hijack_gtk(self):
66 def _hijack_gtk(self):
67 """Hijack a few key functions in GTK for IPython integration.
67 """Hijack a few key functions in GTK for IPython integration.
68
68
69 Modifies pyGTK's main and main_quit with a dummy so user code does not
69 Modifies pyGTK's main and main_quit with a dummy so user code does not
70 block IPython. This allows us to use %run to run arbitrary pygtk
70 block IPython. This allows us to use %run to run arbitrary pygtk
71 scripts from a long-lived IPython session, and when they attempt to
71 scripts from a long-lived IPython session, and when they attempt to
72 start or stop
72 start or stop
73
73
74 Returns
74 Returns
75 -------
75 -------
76 The original functions that have been hijacked:
76 The original functions that have been hijacked:
77 - Gtk.main
77 - Gtk.main
78 - Gtk.main_quit
78 - Gtk.main_quit
79 """
79 """
80 def dummy(*args, **kw):
80 def dummy(*args, **kw):
81 pass
81 pass
82 # save and trap main and main_quit from gtk
82 # save and trap main and main_quit from gtk
83 orig_main, Gtk.main = Gtk.main, dummy
83 orig_main, Gtk.main = Gtk.main, dummy
84 orig_main_quit, Gtk.main_quit = Gtk.main_quit, dummy
84 orig_main_quit, Gtk.main_quit = Gtk.main_quit, dummy
85 return orig_main, orig_main_quit
85 return orig_main, orig_main_quit
@@ -1,86 +1,86 b''
1 """GUI support for the IPython ZeroMQ kernel - GTK toolkit support.
1 """GUI support for the IPython ZeroMQ kernel - GTK toolkit support.
2 """
2 """
3 #-----------------------------------------------------------------------------
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2010-2011 The IPython Development Team
4 # Copyright (C) 2010-2011 The IPython Development Team
5 #
5 #
6 # Distributed under the terms of the BSD License. The full license is in
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING.txt, distributed as part of this software.
7 # the file COPYING.txt, distributed as part of this software.
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9
9
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11 # Imports
11 # Imports
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13 # stdlib
13 # stdlib
14 import sys
14 import sys
15
15
16 # Third-party
16 # Third-party
17 import gobject
17 import gobject
18 import gtk
18 import gtk
19
19
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
21 # Classes and functions
21 # Classes and functions
22 #-----------------------------------------------------------------------------
22 #-----------------------------------------------------------------------------
23
23
24 class GTKEmbed(object):
24 class GTKEmbed(object):
25 """A class to embed a kernel into the GTK main event loop.
25 """A class to embed a kernel into the GTK main event loop.
26 """
26 """
27 def __init__(self, kernel):
27 def __init__(self, kernel):
28 self.kernel = kernel
28 self.kernel = kernel
29 # These two will later store the real gtk functions when we hijack them
29 # These two will later store the real gtk functions when we hijack them
30 self.gtk_main = None
30 self.gtk_main = None
31 self.gtk_main_quit = None
31 self.gtk_main_quit = None
32
32
33 def start(self):
33 def start(self):
34 """Starts the GTK main event loop and sets our kernel startup routine.
34 """Starts the GTK main event loop and sets our kernel startup routine.
35 """
35 """
36 # Register our function to initiate the kernel and start gtk
36 # Register our function to initiate the kernel and start gtk
37 gobject.idle_add(self._wire_kernel)
37 gobject.idle_add(self._wire_kernel)
38 gtk.main()
38 gtk.main()
39
39
40 def _wire_kernel(self):
40 def _wire_kernel(self):
41 """Initializes the kernel inside GTK.
41 """Initializes the kernel inside GTK.
42
42
43 This is meant to run only once at startup, so it does its job and
43 This is meant to run only once at startup, so it does its job and
44 returns False to ensure it doesn't get run again by GTK.
44 returns False to ensure it doesn't get run again by GTK.
45 """
45 """
46 self.gtk_main, self.gtk_main_quit = self._hijack_gtk()
46 self.gtk_main, self.gtk_main_quit = self._hijack_gtk()
47 gobject.timeout_add(int(1000*self.kernel._poll_interval),
47 gobject.timeout_add(int(1000*self.kernel._poll_interval),
48 self.iterate_kernel)
48 self.iterate_kernel)
49 return False
49 return False
50
50
51 def iterate_kernel(self):
51 def iterate_kernel(self):
52 """Run one iteration of the kernel and return True.
52 """Run one iteration of the kernel and return True.
53
53
54 GTK timer functions must return True to be called again, so we make the
54 GTK timer functions must return True to be called again, so we make the
55 call to :meth:`do_one_iteration` and then return True for GTK.
55 call to :meth:`do_one_iteration` and then return True for GTK.
56 """
56 """
57 self.kernel.do_one_iteration()
57 self.kernel.do_one_iteration()
58 return True
58 return True
59
59
60 def stop(self):
60 def stop(self):
61 # FIXME: this one isn't getting called because we have no reliable
61 # FIXME: this one isn't getting called because we have no reliable
62 # kernel shutdown. We need to fix that: once the kernel has a
62 # kernel shutdown. We need to fix that: once the kernel has a
63 # shutdown mechanism, it can call this.
63 # shutdown mechanism, it can call this.
64 self.gtk_main_quit()
64 self.gtk_main_quit()
65 sys.exit()
65 sys.exit()
66
66
67 def _hijack_gtk(self):
67 def _hijack_gtk(self):
68 """Hijack a few key functions in GTK for IPython integration.
68 """Hijack a few key functions in GTK for IPython integration.
69
69
70 Modifies pyGTK's main and main_quit with a dummy so user code does not
70 Modifies pyGTK's main and main_quit with a dummy so user code does not
71 block IPython. This allows us to use %run to run arbitrary pygtk
71 block IPython. This allows us to use %run to run arbitrary pygtk
72 scripts from a long-lived IPython session, and when they attempt to
72 scripts from a long-lived IPython session, and when they attempt to
73 start or stop
73 start or stop
74
74
75 Returns
75 Returns
76 -------
76 -------
77 The original functions that have been hijacked:
77 The original functions that have been hijacked:
78 - gtk.main
78 - gtk.main
79 - gtk.main_quit
79 - gtk.main_quit
80 """
80 """
81 def dummy(*args, **kw):
81 def dummy(*args, **kw):
82 pass
82 pass
83 # save and trap main and main_quit from gtk
83 # save and trap main and main_quit from gtk
84 orig_main, gtk.main = gtk.main, dummy
84 orig_main, gtk.main = gtk.main, dummy
85 orig_main_quit, gtk.main_quit = gtk.main_quit, dummy
85 orig_main_quit, gtk.main_quit = gtk.main_quit, dummy
86 return orig_main, orig_main_quit
86 return orig_main, orig_main_quit
1 NO CONTENT: file renamed from IPython/kernel/zmq/heartbeat.py to ipython_kernel/zmq/heartbeat.py
NO CONTENT: file renamed from IPython/kernel/zmq/heartbeat.py to ipython_kernel/zmq/heartbeat.py
@@ -1,249 +1,249 b''
1 """Wrappers for forwarding stdout/stderr over zmq"""
1 """Wrappers for forwarding stdout/stderr over zmq"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import os
6 import os
7 import threading
7 import threading
8 import time
8 import time
9 import uuid
9 import uuid
10 from io import StringIO, UnsupportedOperation
10 from io import StringIO, UnsupportedOperation
11
11
12 import zmq
12 import zmq
13 from zmq.eventloop.ioloop import IOLoop
13 from zmq.eventloop.ioloop import IOLoop
14
14
15 from .session import extract_header
15 from .session import extract_header
16
16
17 from IPython.utils import py3compat
17 from IPython.utils import py3compat
18 from IPython.utils.py3compat import unicode_type
18 from IPython.utils.py3compat import unicode_type
19 from IPython.utils.warn import warn
19 from IPython.utils.warn import warn
20
20
21 #-----------------------------------------------------------------------------
21 #-----------------------------------------------------------------------------
22 # Globals
22 # Globals
23 #-----------------------------------------------------------------------------
23 #-----------------------------------------------------------------------------
24
24
25 MASTER = 0
25 MASTER = 0
26 CHILD = 1
26 CHILD = 1
27
27
28 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
29 # Stream classes
29 # Stream classes
30 #-----------------------------------------------------------------------------
30 #-----------------------------------------------------------------------------
31
31
32 class OutStream(object):
32 class OutStream(object):
33 """A file like object that publishes the stream to a 0MQ PUB socket."""
33 """A file like object that publishes the stream to a 0MQ PUB socket."""
34
34
35 # The time interval between automatic flushes, in seconds.
35 # The time interval between automatic flushes, in seconds.
36 _subprocess_flush_limit = 256
36 _subprocess_flush_limit = 256
37 flush_interval = 0.05
37 flush_interval = 0.05
38 topic=None
38 topic=None
39
39
40 def __init__(self, session, pub_socket, name, pipe=True):
40 def __init__(self, session, pub_socket, name, pipe=True):
41 self.encoding = 'UTF-8'
41 self.encoding = 'UTF-8'
42 self.session = session
42 self.session = session
43 self.pub_socket = pub_socket
43 self.pub_socket = pub_socket
44 self.name = name
44 self.name = name
45 self.topic = b'stream.' + py3compat.cast_bytes(name)
45 self.topic = b'stream.' + py3compat.cast_bytes(name)
46 self.parent_header = {}
46 self.parent_header = {}
47 self._new_buffer()
47 self._new_buffer()
48 self._buffer_lock = threading.Lock()
48 self._buffer_lock = threading.Lock()
49 self._master_pid = os.getpid()
49 self._master_pid = os.getpid()
50 self._master_thread = threading.current_thread().ident
50 self._master_thread = threading.current_thread().ident
51 self._pipe_pid = os.getpid()
51 self._pipe_pid = os.getpid()
52 self._pipe_flag = pipe
52 self._pipe_flag = pipe
53 if pipe:
53 if pipe:
54 self._setup_pipe_in()
54 self._setup_pipe_in()
55
55
56 def _setup_pipe_in(self):
56 def _setup_pipe_in(self):
57 """setup listening pipe for subprocesses"""
57 """setup listening pipe for subprocesses"""
58 ctx = self.pub_socket.context
58 ctx = self.pub_socket.context
59
59
60 # use UUID to authenticate pipe messages
60 # use UUID to authenticate pipe messages
61 self._pipe_uuid = uuid.uuid4().bytes
61 self._pipe_uuid = uuid.uuid4().bytes
62
62
63 self._pipe_in = ctx.socket(zmq.PULL)
63 self._pipe_in = ctx.socket(zmq.PULL)
64 self._pipe_in.linger = 0
64 self._pipe_in.linger = 0
65 try:
65 try:
66 self._pipe_port = self._pipe_in.bind_to_random_port("tcp://127.0.0.1")
66 self._pipe_port = self._pipe_in.bind_to_random_port("tcp://127.0.0.1")
67 except zmq.ZMQError as e:
67 except zmq.ZMQError as e:
68 warn("Couldn't bind IOStream to 127.0.0.1: %s" % e +
68 warn("Couldn't bind IOStream to 127.0.0.1: %s" % e +
69 "\nsubprocess output will be unavailable."
69 "\nsubprocess output will be unavailable."
70 )
70 )
71 self._pipe_flag = False
71 self._pipe_flag = False
72 self._pipe_in.close()
72 self._pipe_in.close()
73 del self._pipe_in
73 del self._pipe_in
74 return
74 return
75 self._pipe_poller = zmq.Poller()
75 self._pipe_poller = zmq.Poller()
76 self._pipe_poller.register(self._pipe_in, zmq.POLLIN)
76 self._pipe_poller.register(self._pipe_in, zmq.POLLIN)
77 if IOLoop.initialized():
77 if IOLoop.initialized():
78 # subprocess flush should trigger flush
78 # subprocess flush should trigger flush
79 # if kernel is idle
79 # if kernel is idle
80 IOLoop.instance().add_handler(self._pipe_in,
80 IOLoop.instance().add_handler(self._pipe_in,
81 lambda s, event: self.flush(),
81 lambda s, event: self.flush(),
82 IOLoop.READ,
82 IOLoop.READ,
83 )
83 )
84
84
85 def _setup_pipe_out(self):
85 def _setup_pipe_out(self):
86 # must be new context after fork
86 # must be new context after fork
87 ctx = zmq.Context()
87 ctx = zmq.Context()
88 self._pipe_pid = os.getpid()
88 self._pipe_pid = os.getpid()
89 self._pipe_out = ctx.socket(zmq.PUSH)
89 self._pipe_out = ctx.socket(zmq.PUSH)
90 self._pipe_out_lock = threading.Lock()
90 self._pipe_out_lock = threading.Lock()
91 self._pipe_out.connect("tcp://127.0.0.1:%i" % self._pipe_port)
91 self._pipe_out.connect("tcp://127.0.0.1:%i" % self._pipe_port)
92
92
93 def _is_master_process(self):
93 def _is_master_process(self):
94 return os.getpid() == self._master_pid
94 return os.getpid() == self._master_pid
95
95
96 def _is_master_thread(self):
96 def _is_master_thread(self):
97 return threading.current_thread().ident == self._master_thread
97 return threading.current_thread().ident == self._master_thread
98
98
99 def _have_pipe_out(self):
99 def _have_pipe_out(self):
100 return os.getpid() == self._pipe_pid
100 return os.getpid() == self._pipe_pid
101
101
102 def _check_mp_mode(self):
102 def _check_mp_mode(self):
103 """check for forks, and switch to zmq pipeline if necessary"""
103 """check for forks, and switch to zmq pipeline if necessary"""
104 if not self._pipe_flag or self._is_master_process():
104 if not self._pipe_flag or self._is_master_process():
105 return MASTER
105 return MASTER
106 else:
106 else:
107 if not self._have_pipe_out():
107 if not self._have_pipe_out():
108 self._flush_buffer()
108 self._flush_buffer()
109 # setup a new out pipe
109 # setup a new out pipe
110 self._setup_pipe_out()
110 self._setup_pipe_out()
111 return CHILD
111 return CHILD
112
112
113 def set_parent(self, parent):
113 def set_parent(self, parent):
114 self.parent_header = extract_header(parent)
114 self.parent_header = extract_header(parent)
115
115
116 def close(self):
116 def close(self):
117 self.pub_socket = None
117 self.pub_socket = None
118
118
119 @property
119 @property
120 def closed(self):
120 def closed(self):
121 return self.pub_socket is None
121 return self.pub_socket is None
122
122
123 def _flush_from_subprocesses(self):
123 def _flush_from_subprocesses(self):
124 """flush possible pub data from subprocesses into my buffer"""
124 """flush possible pub data from subprocesses into my buffer"""
125 if not self._pipe_flag or not self._is_master_process():
125 if not self._pipe_flag or not self._is_master_process():
126 return
126 return
127 for i in range(self._subprocess_flush_limit):
127 for i in range(self._subprocess_flush_limit):
128 if self._pipe_poller.poll(0):
128 if self._pipe_poller.poll(0):
129 msg = self._pipe_in.recv_multipart()
129 msg = self._pipe_in.recv_multipart()
130 if msg[0] != self._pipe_uuid:
130 if msg[0] != self._pipe_uuid:
131 continue
131 continue
132 else:
132 else:
133 self._buffer.write(msg[1].decode(self.encoding, 'replace'))
133 self._buffer.write(msg[1].decode(self.encoding, 'replace'))
134 # this always means a flush,
134 # this always means a flush,
135 # so reset our timer
135 # so reset our timer
136 self._start = 0
136 self._start = 0
137 else:
137 else:
138 break
138 break
139
139
140 def _schedule_flush(self):
140 def _schedule_flush(self):
141 """schedule a flush in the main thread
141 """schedule a flush in the main thread
142
142
143 only works with a tornado/pyzmq eventloop running
143 only works with a tornado/pyzmq eventloop running
144 """
144 """
145 if IOLoop.initialized():
145 if IOLoop.initialized():
146 IOLoop.instance().add_callback(self.flush)
146 IOLoop.instance().add_callback(self.flush)
147 else:
147 else:
148 # no async loop, at least force the timer
148 # no async loop, at least force the timer
149 self._start = 0
149 self._start = 0
150
150
151 def flush(self):
151 def flush(self):
152 """trigger actual zmq send"""
152 """trigger actual zmq send"""
153 if self.pub_socket is None:
153 if self.pub_socket is None:
154 raise ValueError(u'I/O operation on closed file')
154 raise ValueError(u'I/O operation on closed file')
155
155
156 mp_mode = self._check_mp_mode()
156 mp_mode = self._check_mp_mode()
157
157
158 if mp_mode != CHILD:
158 if mp_mode != CHILD:
159 # we are master
159 # we are master
160 if not self._is_master_thread():
160 if not self._is_master_thread():
161 # sub-threads must not trigger flush directly,
161 # sub-threads must not trigger flush directly,
162 # but at least they can schedule an async flush, or force the timer.
162 # but at least they can schedule an async flush, or force the timer.
163 self._schedule_flush()
163 self._schedule_flush()
164 return
164 return
165
165
166 self._flush_from_subprocesses()
166 self._flush_from_subprocesses()
167 data = self._flush_buffer()
167 data = self._flush_buffer()
168
168
169 if data:
169 if data:
170 content = {u'name':self.name, u'text':data}
170 content = {u'name':self.name, u'text':data}
171 msg = self.session.send(self.pub_socket, u'stream', content=content,
171 msg = self.session.send(self.pub_socket, u'stream', content=content,
172 parent=self.parent_header, ident=self.topic)
172 parent=self.parent_header, ident=self.topic)
173
173
174 if hasattr(self.pub_socket, 'flush'):
174 if hasattr(self.pub_socket, 'flush'):
175 # socket itself has flush (presumably ZMQStream)
175 # socket itself has flush (presumably ZMQStream)
176 self.pub_socket.flush()
176 self.pub_socket.flush()
177 else:
177 else:
178 with self._pipe_out_lock:
178 with self._pipe_out_lock:
179 string = self._flush_buffer()
179 string = self._flush_buffer()
180 tracker = self._pipe_out.send_multipart([
180 tracker = self._pipe_out.send_multipart([
181 self._pipe_uuid,
181 self._pipe_uuid,
182 string.encode(self.encoding, 'replace'),
182 string.encode(self.encoding, 'replace'),
183 ], copy=False, track=True)
183 ], copy=False, track=True)
184 try:
184 try:
185 tracker.wait(1)
185 tracker.wait(1)
186 except:
186 except:
187 pass
187 pass
188
188
189 def isatty(self):
189 def isatty(self):
190 return False
190 return False
191
191
192 def __next__(self):
192 def __next__(self):
193 raise IOError('Read not supported on a write only stream.')
193 raise IOError('Read not supported on a write only stream.')
194
194
195 if not py3compat.PY3:
195 if not py3compat.PY3:
196 next = __next__
196 next = __next__
197
197
198 def read(self, size=-1):
198 def read(self, size=-1):
199 raise IOError('Read not supported on a write only stream.')
199 raise IOError('Read not supported on a write only stream.')
200
200
201 def readline(self, size=-1):
201 def readline(self, size=-1):
202 raise IOError('Read not supported on a write only stream.')
202 raise IOError('Read not supported on a write only stream.')
203
203
204 def fileno(self):
204 def fileno(self):
205 raise UnsupportedOperation("IOStream has no fileno.")
205 raise UnsupportedOperation("IOStream has no fileno.")
206
206
207 def write(self, string):
207 def write(self, string):
208 if self.pub_socket is None:
208 if self.pub_socket is None:
209 raise ValueError('I/O operation on closed file')
209 raise ValueError('I/O operation on closed file')
210 else:
210 else:
211 # Make sure that we're handling unicode
211 # Make sure that we're handling unicode
212 if not isinstance(string, unicode_type):
212 if not isinstance(string, unicode_type):
213 string = string.decode(self.encoding, 'replace')
213 string = string.decode(self.encoding, 'replace')
214
214
215 is_child = (self._check_mp_mode() == CHILD)
215 is_child = (self._check_mp_mode() == CHILD)
216 self._buffer.write(string)
216 self._buffer.write(string)
217 if is_child:
217 if is_child:
218 # newlines imply flush in subprocesses
218 # newlines imply flush in subprocesses
219 # mp.Pool cannot be trusted to flush promptly (or ever),
219 # mp.Pool cannot be trusted to flush promptly (or ever),
220 # and this helps.
220 # and this helps.
221 if '\n' in string:
221 if '\n' in string:
222 self.flush()
222 self.flush()
223 # do we want to check subprocess flushes on write?
223 # do we want to check subprocess flushes on write?
224 # self._flush_from_subprocesses()
224 # self._flush_from_subprocesses()
225 current_time = time.time()
225 current_time = time.time()
226 if self._start < 0:
226 if self._start < 0:
227 self._start = current_time
227 self._start = current_time
228 elif current_time - self._start > self.flush_interval:
228 elif current_time - self._start > self.flush_interval:
229 self.flush()
229 self.flush()
230
230
231 def writelines(self, sequence):
231 def writelines(self, sequence):
232 if self.pub_socket is None:
232 if self.pub_socket is None:
233 raise ValueError('I/O operation on closed file')
233 raise ValueError('I/O operation on closed file')
234 else:
234 else:
235 for string in sequence:
235 for string in sequence:
236 self.write(string)
236 self.write(string)
237
237
238 def _flush_buffer(self):
238 def _flush_buffer(self):
239 """clear the current buffer and return the current buffer data"""
239 """clear the current buffer and return the current buffer data"""
240 data = u''
240 data = u''
241 if self._buffer is not None:
241 if self._buffer is not None:
242 data = self._buffer.getvalue()
242 data = self._buffer.getvalue()
243 self._buffer.close()
243 self._buffer.close()
244 self._new_buffer()
244 self._new_buffer()
245 return data
245 return data
246
246
247 def _new_buffer(self):
247 def _new_buffer(self):
248 self._buffer = StringIO()
248 self._buffer = StringIO()
249 self._start = -1
249 self._start = -1
@@ -1,368 +1,368 b''
1 """The IPython kernel implementation"""
1 """The IPython kernel implementation"""
2
2
3 import getpass
3 import getpass
4 import sys
4 import sys
5 import traceback
5 import traceback
6
6
7 from IPython.core import release
7 from IPython.core import release
8 from IPython.utils.py3compat import builtin_mod, PY3
8 from IPython.utils.py3compat import builtin_mod, PY3
9 from IPython.utils.tokenutil import token_at_cursor, line_at_cursor
9 from IPython.utils.tokenutil import token_at_cursor, line_at_cursor
10 from IPython.utils.traitlets import Instance, Type, Any, List
10 from IPython.utils.traitlets import Instance, Type, Any, List
11 from IPython.utils.decorators import undoc
11 from IPython.utils.decorators import undoc
12
12
13 from ..comm import CommManager
13 from ..comm import CommManager
14 from .kernelbase import Kernel as KernelBase
14 from .kernelbase import Kernel as KernelBase
15 from .serialize import serialize_object, unpack_apply_message
15 from .serialize import serialize_object, unpack_apply_message
16 from .zmqshell import ZMQInteractiveShell
16 from .zmqshell import ZMQInteractiveShell
17
17
18
18
19 def lazy_import_handle_comm_opened(*args, **kwargs):
19 def lazy_import_handle_comm_opened(*args, **kwargs):
20 from IPython.html.widgets import Widget
20 from IPython.html.widgets import Widget
21 Widget.handle_comm_opened(*args, **kwargs)
21 Widget.handle_comm_opened(*args, **kwargs)
22
22
23
23
24 class IPythonKernel(KernelBase):
24 class IPythonKernel(KernelBase):
25 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
25 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
26 allow_none=True)
26 allow_none=True)
27 shell_class = Type(ZMQInteractiveShell)
27 shell_class = Type(ZMQInteractiveShell)
28
28
29 user_module = Any()
29 user_module = Any()
30 def _user_module_changed(self, name, old, new):
30 def _user_module_changed(self, name, old, new):
31 if self.shell is not None:
31 if self.shell is not None:
32 self.shell.user_module = new
32 self.shell.user_module = new
33
33
34 user_ns = Instance(dict, args=None, allow_none=True)
34 user_ns = Instance(dict, args=None, allow_none=True)
35 def _user_ns_changed(self, name, old, new):
35 def _user_ns_changed(self, name, old, new):
36 if self.shell is not None:
36 if self.shell is not None:
37 self.shell.user_ns = new
37 self.shell.user_ns = new
38 self.shell.init_user_ns()
38 self.shell.init_user_ns()
39
39
40 # A reference to the Python builtin 'raw_input' function.
40 # A reference to the Python builtin 'raw_input' function.
41 # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3)
41 # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3)
42 _sys_raw_input = Any()
42 _sys_raw_input = Any()
43 _sys_eval_input = Any()
43 _sys_eval_input = Any()
44
44
45 def __init__(self, **kwargs):
45 def __init__(self, **kwargs):
46 super(IPythonKernel, self).__init__(**kwargs)
46 super(IPythonKernel, self).__init__(**kwargs)
47
47
48 # Initialize the InteractiveShell subclass
48 # Initialize the InteractiveShell subclass
49 self.shell = self.shell_class.instance(parent=self,
49 self.shell = self.shell_class.instance(parent=self,
50 profile_dir = self.profile_dir,
50 profile_dir = self.profile_dir,
51 user_module = self.user_module,
51 user_module = self.user_module,
52 user_ns = self.user_ns,
52 user_ns = self.user_ns,
53 kernel = self,
53 kernel = self,
54 )
54 )
55 self.shell.displayhook.session = self.session
55 self.shell.displayhook.session = self.session
56 self.shell.displayhook.pub_socket = self.iopub_socket
56 self.shell.displayhook.pub_socket = self.iopub_socket
57 self.shell.displayhook.topic = self._topic('execute_result')
57 self.shell.displayhook.topic = self._topic('execute_result')
58 self.shell.display_pub.session = self.session
58 self.shell.display_pub.session = self.session
59 self.shell.display_pub.pub_socket = self.iopub_socket
59 self.shell.display_pub.pub_socket = self.iopub_socket
60 self.shell.data_pub.session = self.session
60 self.shell.data_pub.session = self.session
61 self.shell.data_pub.pub_socket = self.iopub_socket
61 self.shell.data_pub.pub_socket = self.iopub_socket
62
62
63 # TMP - hack while developing
63 # TMP - hack while developing
64 self.shell._reply_content = None
64 self.shell._reply_content = None
65
65
66 self.comm_manager = CommManager(shell=self.shell, parent=self,
66 self.comm_manager = CommManager(shell=self.shell, parent=self,
67 kernel=self)
67 kernel=self)
68 self.comm_manager.register_target('ipython.widget', lazy_import_handle_comm_opened)
68 self.comm_manager.register_target('ipython.widget', lazy_import_handle_comm_opened)
69
69
70 self.shell.configurables.append(self.comm_manager)
70 self.shell.configurables.append(self.comm_manager)
71 comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ]
71 comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ]
72 for msg_type in comm_msg_types:
72 for msg_type in comm_msg_types:
73 self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type)
73 self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type)
74
74
75 help_links = List([
75 help_links = List([
76 {
76 {
77 'text': "Python",
77 'text': "Python",
78 'url': "http://docs.python.org/%i.%i" % sys.version_info[:2],
78 'url': "http://docs.python.org/%i.%i" % sys.version_info[:2],
79 },
79 },
80 {
80 {
81 'text': "IPython",
81 'text': "IPython",
82 'url': "http://ipython.org/documentation.html",
82 'url': "http://ipython.org/documentation.html",
83 },
83 },
84 {
84 {
85 'text': "NumPy",
85 'text': "NumPy",
86 'url': "http://docs.scipy.org/doc/numpy/reference/",
86 'url': "http://docs.scipy.org/doc/numpy/reference/",
87 },
87 },
88 {
88 {
89 'text': "SciPy",
89 'text': "SciPy",
90 'url': "http://docs.scipy.org/doc/scipy/reference/",
90 'url': "http://docs.scipy.org/doc/scipy/reference/",
91 },
91 },
92 {
92 {
93 'text': "Matplotlib",
93 'text': "Matplotlib",
94 'url': "http://matplotlib.org/contents.html",
94 'url': "http://matplotlib.org/contents.html",
95 },
95 },
96 {
96 {
97 'text': "SymPy",
97 'text': "SymPy",
98 'url': "http://docs.sympy.org/latest/index.html",
98 'url': "http://docs.sympy.org/latest/index.html",
99 },
99 },
100 {
100 {
101 'text': "pandas",
101 'text': "pandas",
102 'url': "http://pandas.pydata.org/pandas-docs/stable/",
102 'url': "http://pandas.pydata.org/pandas-docs/stable/",
103 },
103 },
104 ])
104 ])
105
105
106 # Kernel info fields
106 # Kernel info fields
107 implementation = 'ipython'
107 implementation = 'ipython'
108 implementation_version = release.version
108 implementation_version = release.version
109 language_info = {
109 language_info = {
110 'name': 'python',
110 'name': 'python',
111 'version': sys.version.split()[0],
111 'version': sys.version.split()[0],
112 'mimetype': 'text/x-python',
112 'mimetype': 'text/x-python',
113 'codemirror_mode': {'name': 'ipython',
113 'codemirror_mode': {'name': 'ipython',
114 'version': sys.version_info[0]},
114 'version': sys.version_info[0]},
115 'pygments_lexer': 'ipython%d' % (3 if PY3 else 2),
115 'pygments_lexer': 'ipython%d' % (3 if PY3 else 2),
116 'nbconvert_exporter': 'python',
116 'nbconvert_exporter': 'python',
117 'file_extension': '.py'
117 'file_extension': '.py'
118 }
118 }
119 @property
119 @property
120 def banner(self):
120 def banner(self):
121 return self.shell.banner
121 return self.shell.banner
122
122
123 def start(self):
123 def start(self):
124 self.shell.exit_now = False
124 self.shell.exit_now = False
125 super(IPythonKernel, self).start()
125 super(IPythonKernel, self).start()
126
126
127 def set_parent(self, ident, parent):
127 def set_parent(self, ident, parent):
128 """Overridden from parent to tell the display hook and output streams
128 """Overridden from parent to tell the display hook and output streams
129 about the parent message.
129 about the parent message.
130 """
130 """
131 super(IPythonKernel, self).set_parent(ident, parent)
131 super(IPythonKernel, self).set_parent(ident, parent)
132 self.shell.set_parent(parent)
132 self.shell.set_parent(parent)
133
133
134 def _forward_input(self, allow_stdin=False):
134 def _forward_input(self, allow_stdin=False):
135 """Forward raw_input and getpass to the current frontend.
135 """Forward raw_input and getpass to the current frontend.
136
136
137 via input_request
137 via input_request
138 """
138 """
139 self._allow_stdin = allow_stdin
139 self._allow_stdin = allow_stdin
140
140
141 if PY3:
141 if PY3:
142 self._sys_raw_input = builtin_mod.input
142 self._sys_raw_input = builtin_mod.input
143 builtin_mod.input = self.raw_input
143 builtin_mod.input = self.raw_input
144 else:
144 else:
145 self._sys_raw_input = builtin_mod.raw_input
145 self._sys_raw_input = builtin_mod.raw_input
146 self._sys_eval_input = builtin_mod.input
146 self._sys_eval_input = builtin_mod.input
147 builtin_mod.raw_input = self.raw_input
147 builtin_mod.raw_input = self.raw_input
148 builtin_mod.input = lambda prompt='': eval(self.raw_input(prompt))
148 builtin_mod.input = lambda prompt='': eval(self.raw_input(prompt))
149 self._save_getpass = getpass.getpass
149 self._save_getpass = getpass.getpass
150 getpass.getpass = self.getpass
150 getpass.getpass = self.getpass
151
151
152 def _restore_input(self):
152 def _restore_input(self):
153 """Restore raw_input, getpass"""
153 """Restore raw_input, getpass"""
154 if PY3:
154 if PY3:
155 builtin_mod.input = self._sys_raw_input
155 builtin_mod.input = self._sys_raw_input
156 else:
156 else:
157 builtin_mod.raw_input = self._sys_raw_input
157 builtin_mod.raw_input = self._sys_raw_input
158 builtin_mod.input = self._sys_eval_input
158 builtin_mod.input = self._sys_eval_input
159
159
160 getpass.getpass = self._save_getpass
160 getpass.getpass = self._save_getpass
161
161
162 @property
162 @property
163 def execution_count(self):
163 def execution_count(self):
164 return self.shell.execution_count
164 return self.shell.execution_count
165
165
166 @execution_count.setter
166 @execution_count.setter
167 def execution_count(self, value):
167 def execution_count(self, value):
168 # Ignore the incrememnting done by KernelBase, in favour of our shell's
168 # Ignore the incrememnting done by KernelBase, in favour of our shell's
169 # execution counter.
169 # execution counter.
170 pass
170 pass
171
171
172 def do_execute(self, code, silent, store_history=True,
172 def do_execute(self, code, silent, store_history=True,
173 user_expressions=None, allow_stdin=False):
173 user_expressions=None, allow_stdin=False):
174 shell = self.shell # we'll need this a lot here
174 shell = self.shell # we'll need this a lot here
175
175
176 self._forward_input(allow_stdin)
176 self._forward_input(allow_stdin)
177
177
178 reply_content = {}
178 reply_content = {}
179 # FIXME: the shell calls the exception handler itself.
179 # FIXME: the shell calls the exception handler itself.
180 shell._reply_content = None
180 shell._reply_content = None
181 try:
181 try:
182 shell.run_cell(code, store_history=store_history, silent=silent)
182 shell.run_cell(code, store_history=store_history, silent=silent)
183 except:
183 except:
184 status = u'error'
184 status = u'error'
185 # FIXME: this code right now isn't being used yet by default,
185 # FIXME: this code right now isn't being used yet by default,
186 # because the run_cell() call above directly fires off exception
186 # because the run_cell() call above directly fires off exception
187 # reporting. This code, therefore, is only active in the scenario
187 # reporting. This code, therefore, is only active in the scenario
188 # where runlines itself has an unhandled exception. We need to
188 # where runlines itself has an unhandled exception. We need to
189 # uniformize this, for all exception construction to come from a
189 # uniformize this, for all exception construction to come from a
190 # single location in the codbase.
190 # single location in the codbase.
191 etype, evalue, tb = sys.exc_info()
191 etype, evalue, tb = sys.exc_info()
192 tb_list = traceback.format_exception(etype, evalue, tb)
192 tb_list = traceback.format_exception(etype, evalue, tb)
193 reply_content.update(shell._showtraceback(etype, evalue, tb_list))
193 reply_content.update(shell._showtraceback(etype, evalue, tb_list))
194 else:
194 else:
195 status = u'ok'
195 status = u'ok'
196 finally:
196 finally:
197 self._restore_input()
197 self._restore_input()
198
198
199 reply_content[u'status'] = status
199 reply_content[u'status'] = status
200
200
201 # Return the execution counter so clients can display prompts
201 # Return the execution counter so clients can display prompts
202 reply_content['execution_count'] = shell.execution_count - 1
202 reply_content['execution_count'] = shell.execution_count - 1
203
203
204 # FIXME - fish exception info out of shell, possibly left there by
204 # FIXME - fish exception info out of shell, possibly left there by
205 # runlines. We'll need to clean up this logic later.
205 # runlines. We'll need to clean up this logic later.
206 if shell._reply_content is not None:
206 if shell._reply_content is not None:
207 reply_content.update(shell._reply_content)
207 reply_content.update(shell._reply_content)
208 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute')
208 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute')
209 reply_content['engine_info'] = e_info
209 reply_content['engine_info'] = e_info
210 # reset after use
210 # reset after use
211 shell._reply_content = None
211 shell._reply_content = None
212
212
213 if 'traceback' in reply_content:
213 if 'traceback' in reply_content:
214 self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback']))
214 self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback']))
215
215
216
216
217 # At this point, we can tell whether the main code execution succeeded
217 # At this point, we can tell whether the main code execution succeeded
218 # or not. If it did, we proceed to evaluate user_expressions
218 # or not. If it did, we proceed to evaluate user_expressions
219 if reply_content['status'] == 'ok':
219 if reply_content['status'] == 'ok':
220 reply_content[u'user_expressions'] = \
220 reply_content[u'user_expressions'] = \
221 shell.user_expressions(user_expressions or {})
221 shell.user_expressions(user_expressions or {})
222 else:
222 else:
223 # If there was an error, don't even try to compute expressions
223 # If there was an error, don't even try to compute expressions
224 reply_content[u'user_expressions'] = {}
224 reply_content[u'user_expressions'] = {}
225
225
226 # Payloads should be retrieved regardless of outcome, so we can both
226 # Payloads should be retrieved regardless of outcome, so we can both
227 # recover partial output (that could have been generated early in a
227 # recover partial output (that could have been generated early in a
228 # block, before an error) and clear the payload system always.
228 # block, before an error) and clear the payload system always.
229 reply_content[u'payload'] = shell.payload_manager.read_payload()
229 reply_content[u'payload'] = shell.payload_manager.read_payload()
230 # Be agressive about clearing the payload because we don't want
230 # Be agressive about clearing the payload because we don't want
231 # it to sit in memory until the next execute_request comes in.
231 # it to sit in memory until the next execute_request comes in.
232 shell.payload_manager.clear_payload()
232 shell.payload_manager.clear_payload()
233
233
234 return reply_content
234 return reply_content
235
235
236 def do_complete(self, code, cursor_pos):
236 def do_complete(self, code, cursor_pos):
237 # FIXME: IPython completers currently assume single line,
237 # FIXME: IPython completers currently assume single line,
238 # but completion messages give multi-line context
238 # but completion messages give multi-line context
239 # For now, extract line from cell, based on cursor_pos:
239 # For now, extract line from cell, based on cursor_pos:
240 if cursor_pos is None:
240 if cursor_pos is None:
241 cursor_pos = len(code)
241 cursor_pos = len(code)
242 line, offset = line_at_cursor(code, cursor_pos)
242 line, offset = line_at_cursor(code, cursor_pos)
243 line_cursor = cursor_pos - offset
243 line_cursor = cursor_pos - offset
244
244
245 txt, matches = self.shell.complete('', line, line_cursor)
245 txt, matches = self.shell.complete('', line, line_cursor)
246 return {'matches' : matches,
246 return {'matches' : matches,
247 'cursor_end' : cursor_pos,
247 'cursor_end' : cursor_pos,
248 'cursor_start' : cursor_pos - len(txt),
248 'cursor_start' : cursor_pos - len(txt),
249 'metadata' : {},
249 'metadata' : {},
250 'status' : 'ok'}
250 'status' : 'ok'}
251
251
252 def do_inspect(self, code, cursor_pos, detail_level=0):
252 def do_inspect(self, code, cursor_pos, detail_level=0):
253 name = token_at_cursor(code, cursor_pos)
253 name = token_at_cursor(code, cursor_pos)
254 info = self.shell.object_inspect(name)
254 info = self.shell.object_inspect(name)
255
255
256 reply_content = {'status' : 'ok'}
256 reply_content = {'status' : 'ok'}
257 reply_content['data'] = data = {}
257 reply_content['data'] = data = {}
258 reply_content['metadata'] = {}
258 reply_content['metadata'] = {}
259 reply_content['found'] = info['found']
259 reply_content['found'] = info['found']
260 if info['found']:
260 if info['found']:
261 info_text = self.shell.object_inspect_text(
261 info_text = self.shell.object_inspect_text(
262 name,
262 name,
263 detail_level=detail_level,
263 detail_level=detail_level,
264 )
264 )
265 data['text/plain'] = info_text
265 data['text/plain'] = info_text
266
266
267 return reply_content
267 return reply_content
268
268
269 def do_history(self, hist_access_type, output, raw, session=None, start=None,
269 def do_history(self, hist_access_type, output, raw, session=None, start=None,
270 stop=None, n=None, pattern=None, unique=False):
270 stop=None, n=None, pattern=None, unique=False):
271 if hist_access_type == 'tail':
271 if hist_access_type == 'tail':
272 hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
272 hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
273 include_latest=True)
273 include_latest=True)
274
274
275 elif hist_access_type == 'range':
275 elif hist_access_type == 'range':
276 hist = self.shell.history_manager.get_range(session, start, stop,
276 hist = self.shell.history_manager.get_range(session, start, stop,
277 raw=raw, output=output)
277 raw=raw, output=output)
278
278
279 elif hist_access_type == 'search':
279 elif hist_access_type == 'search':
280 hist = self.shell.history_manager.search(
280 hist = self.shell.history_manager.search(
281 pattern, raw=raw, output=output, n=n, unique=unique)
281 pattern, raw=raw, output=output, n=n, unique=unique)
282 else:
282 else:
283 hist = []
283 hist = []
284
284
285 return {'history' : list(hist)}
285 return {'history' : list(hist)}
286
286
287 def do_shutdown(self, restart):
287 def do_shutdown(self, restart):
288 self.shell.exit_now = True
288 self.shell.exit_now = True
289 return dict(status='ok', restart=restart)
289 return dict(status='ok', restart=restart)
290
290
291 def do_is_complete(self, code):
291 def do_is_complete(self, code):
292 status, indent_spaces = self.shell.input_transformer_manager.check_complete(code)
292 status, indent_spaces = self.shell.input_transformer_manager.check_complete(code)
293 r = {'status': status}
293 r = {'status': status}
294 if status == 'incomplete':
294 if status == 'incomplete':
295 r['indent'] = ' ' * indent_spaces
295 r['indent'] = ' ' * indent_spaces
296 return r
296 return r
297
297
298 def do_apply(self, content, bufs, msg_id, reply_metadata):
298 def do_apply(self, content, bufs, msg_id, reply_metadata):
299 shell = self.shell
299 shell = self.shell
300 try:
300 try:
301 working = shell.user_ns
301 working = shell.user_ns
302
302
303 prefix = "_"+str(msg_id).replace("-","")+"_"
303 prefix = "_"+str(msg_id).replace("-","")+"_"
304
304
305 f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
305 f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
306
306
307 fname = getattr(f, '__name__', 'f')
307 fname = getattr(f, '__name__', 'f')
308
308
309 fname = prefix+"f"
309 fname = prefix+"f"
310 argname = prefix+"args"
310 argname = prefix+"args"
311 kwargname = prefix+"kwargs"
311 kwargname = prefix+"kwargs"
312 resultname = prefix+"result"
312 resultname = prefix+"result"
313
313
314 ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
314 ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
315 # print ns
315 # print ns
316 working.update(ns)
316 working.update(ns)
317 code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
317 code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
318 try:
318 try:
319 exec(code, shell.user_global_ns, shell.user_ns)
319 exec(code, shell.user_global_ns, shell.user_ns)
320 result = working.get(resultname)
320 result = working.get(resultname)
321 finally:
321 finally:
322 for key in ns:
322 for key in ns:
323 working.pop(key)
323 working.pop(key)
324
324
325 result_buf = serialize_object(result,
325 result_buf = serialize_object(result,
326 buffer_threshold=self.session.buffer_threshold,
326 buffer_threshold=self.session.buffer_threshold,
327 item_threshold=self.session.item_threshold,
327 item_threshold=self.session.item_threshold,
328 )
328 )
329
329
330 except:
330 except:
331 # invoke IPython traceback formatting
331 # invoke IPython traceback formatting
332 shell.showtraceback()
332 shell.showtraceback()
333 # FIXME - fish exception info out of shell, possibly left there by
333 # FIXME - fish exception info out of shell, possibly left there by
334 # run_code. We'll need to clean up this logic later.
334 # run_code. We'll need to clean up this logic later.
335 reply_content = {}
335 reply_content = {}
336 if shell._reply_content is not None:
336 if shell._reply_content is not None:
337 reply_content.update(shell._reply_content)
337 reply_content.update(shell._reply_content)
338 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply')
338 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply')
339 reply_content['engine_info'] = e_info
339 reply_content['engine_info'] = e_info
340 # reset after use
340 # reset after use
341 shell._reply_content = None
341 shell._reply_content = None
342
342
343 self.send_response(self.iopub_socket, u'error', reply_content,
343 self.send_response(self.iopub_socket, u'error', reply_content,
344 ident=self._topic('error'))
344 ident=self._topic('error'))
345 self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback']))
345 self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback']))
346 result_buf = []
346 result_buf = []
347
347
348 if reply_content['ename'] == 'UnmetDependency':
348 if reply_content['ename'] == 'UnmetDependency':
349 reply_metadata['dependencies_met'] = False
349 reply_metadata['dependencies_met'] = False
350 else:
350 else:
351 reply_content = {'status' : 'ok'}
351 reply_content = {'status' : 'ok'}
352
352
353 return reply_content, result_buf
353 return reply_content, result_buf
354
354
355 def do_clear(self):
355 def do_clear(self):
356 self.shell.reset(False)
356 self.shell.reset(False)
357 return dict(status='ok')
357 return dict(status='ok')
358
358
359
359
360 # This exists only for backwards compatibility - use IPythonKernel instead
360 # This exists only for backwards compatibility - use IPythonKernel instead
361
361
362 @undoc
362 @undoc
363 class Kernel(IPythonKernel):
363 class Kernel(IPythonKernel):
364 def __init__(self, *args, **kwargs):
364 def __init__(self, *args, **kwargs):
365 import warnings
365 import warnings
366 warnings.warn('Kernel is a deprecated alias of IPython.kernel.zmq.ipkernel.IPythonKernel',
366 warnings.warn('Kernel is a deprecated alias of ipython_kernel.zmq.ipkernel.IPythonKernel',
367 DeprecationWarning)
367 DeprecationWarning)
368 super(Kernel, self).__init__(*args, **kwargs)
368 super(Kernel, self).__init__(*args, **kwargs)
@@ -1,387 +1,387 b''
1 """An Application for launching a kernel"""
1 """An Application for launching a kernel"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from __future__ import print_function
6 from __future__ import print_function
7
7
8 import atexit
8 import atexit
9 import os
9 import os
10 import sys
10 import sys
11 import signal
11 import signal
12
12
13 import zmq
13 import zmq
14 from zmq.eventloop import ioloop
14 from zmq.eventloop import ioloop
15 from zmq.eventloop.zmqstream import ZMQStream
15 from zmq.eventloop.zmqstream import ZMQStream
16
16
17 from IPython.core.ultratb import FormattedTB
17 from IPython.core.ultratb import FormattedTB
18 from IPython.core.application import (
18 from IPython.core.application import (
19 BaseIPythonApplication, base_flags, base_aliases, catch_config_error
19 BaseIPythonApplication, base_flags, base_aliases, catch_config_error
20 )
20 )
21 from IPython.core.profiledir import ProfileDir
21 from IPython.core.profiledir import ProfileDir
22 from IPython.core.shellapp import (
22 from IPython.core.shellapp import (
23 InteractiveShellApp, shell_flags, shell_aliases
23 InteractiveShellApp, shell_flags, shell_aliases
24 )
24 )
25 from IPython.utils import io
25 from IPython.utils import io
26 from IPython.utils.path import filefind
26 from IPython.utils.path import filefind
27 from IPython.utils.traitlets import (
27 from IPython.utils.traitlets import (
28 Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type,
28 Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type,
29 )
29 )
30 from IPython.utils.importstring import import_item
30 from IPython.utils.importstring import import_item
31 from IPython.kernel import write_connection_file
31 from jupyter_client import write_connection_file
32 from IPython.kernel.connect import ConnectionFileMixin
32 from ipython_kernel.connect import ConnectionFileMixin
33
33
34 # local imports
34 # local imports
35 from .heartbeat import Heartbeat
35 from .heartbeat import Heartbeat
36 from .ipkernel import IPythonKernel
36 from .ipkernel import IPythonKernel
37 from .parentpoller import ParentPollerUnix, ParentPollerWindows
37 from .parentpoller import ParentPollerUnix, ParentPollerWindows
38 from .session import (
38 from .session import (
39 Session, session_flags, session_aliases,
39 Session, session_flags, session_aliases,
40 )
40 )
41 from .zmqshell import ZMQInteractiveShell
41 from .zmqshell import ZMQInteractiveShell
42
42
43 #-----------------------------------------------------------------------------
43 #-----------------------------------------------------------------------------
44 # Flags and Aliases
44 # Flags and Aliases
45 #-----------------------------------------------------------------------------
45 #-----------------------------------------------------------------------------
46
46
47 kernel_aliases = dict(base_aliases)
47 kernel_aliases = dict(base_aliases)
48 kernel_aliases.update({
48 kernel_aliases.update({
49 'ip' : 'IPKernelApp.ip',
49 'ip' : 'IPKernelApp.ip',
50 'hb' : 'IPKernelApp.hb_port',
50 'hb' : 'IPKernelApp.hb_port',
51 'shell' : 'IPKernelApp.shell_port',
51 'shell' : 'IPKernelApp.shell_port',
52 'iopub' : 'IPKernelApp.iopub_port',
52 'iopub' : 'IPKernelApp.iopub_port',
53 'stdin' : 'IPKernelApp.stdin_port',
53 'stdin' : 'IPKernelApp.stdin_port',
54 'control' : 'IPKernelApp.control_port',
54 'control' : 'IPKernelApp.control_port',
55 'f' : 'IPKernelApp.connection_file',
55 'f' : 'IPKernelApp.connection_file',
56 'transport': 'IPKernelApp.transport',
56 'transport': 'IPKernelApp.transport',
57 })
57 })
58
58
59 kernel_flags = dict(base_flags)
59 kernel_flags = dict(base_flags)
60 kernel_flags.update({
60 kernel_flags.update({
61 'no-stdout' : (
61 'no-stdout' : (
62 {'IPKernelApp' : {'no_stdout' : True}},
62 {'IPKernelApp' : {'no_stdout' : True}},
63 "redirect stdout to the null device"),
63 "redirect stdout to the null device"),
64 'no-stderr' : (
64 'no-stderr' : (
65 {'IPKernelApp' : {'no_stderr' : True}},
65 {'IPKernelApp' : {'no_stderr' : True}},
66 "redirect stderr to the null device"),
66 "redirect stderr to the null device"),
67 'pylab' : (
67 'pylab' : (
68 {'IPKernelApp' : {'pylab' : 'auto'}},
68 {'IPKernelApp' : {'pylab' : 'auto'}},
69 """Pre-load matplotlib and numpy for interactive use with
69 """Pre-load matplotlib and numpy for interactive use with
70 the default matplotlib backend."""),
70 the default matplotlib backend."""),
71 })
71 })
72
72
73 # inherit flags&aliases for any IPython shell apps
73 # inherit flags&aliases for any IPython shell apps
74 kernel_aliases.update(shell_aliases)
74 kernel_aliases.update(shell_aliases)
75 kernel_flags.update(shell_flags)
75 kernel_flags.update(shell_flags)
76
76
77 # inherit flags&aliases for Sessions
77 # inherit flags&aliases for Sessions
78 kernel_aliases.update(session_aliases)
78 kernel_aliases.update(session_aliases)
79 kernel_flags.update(session_flags)
79 kernel_flags.update(session_flags)
80
80
81 _ctrl_c_message = """\
81 _ctrl_c_message = """\
82 NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
82 NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
83
83
84 To exit, you will have to explicitly quit this process, by either sending
84 To exit, you will have to explicitly quit this process, by either sending
85 "quit" from a client, or using Ctrl-\\ in UNIX-like environments.
85 "quit" from a client, or using Ctrl-\\ in UNIX-like environments.
86
86
87 To read more about this, see https://github.com/ipython/ipython/issues/2049
87 To read more about this, see https://github.com/ipython/ipython/issues/2049
88
88
89 """
89 """
90
90
91 #-----------------------------------------------------------------------------
91 #-----------------------------------------------------------------------------
92 # Application class for starting an IPython Kernel
92 # Application class for starting an IPython Kernel
93 #-----------------------------------------------------------------------------
93 #-----------------------------------------------------------------------------
94
94
95 class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,
95 class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,
96 ConnectionFileMixin):
96 ConnectionFileMixin):
97 name='ipython-kernel'
97 name='ipython-kernel'
98 aliases = Dict(kernel_aliases)
98 aliases = Dict(kernel_aliases)
99 flags = Dict(kernel_flags)
99 flags = Dict(kernel_flags)
100 classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session]
100 classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session]
101 # the kernel class, as an importstring
101 # the kernel class, as an importstring
102 kernel_class = Type('IPython.kernel.zmq.ipkernel.IPythonKernel', config=True,
102 kernel_class = Type('ipython_kernel.zmq.ipkernel.IPythonKernel', config=True,
103 klass='IPython.kernel.zmq.kernelbase.Kernel',
103 klass='ipython_kernel.zmq.kernelbase.Kernel',
104 help="""The Kernel subclass to be used.
104 help="""The Kernel subclass to be used.
105
105
106 This should allow easy re-use of the IPKernelApp entry point
106 This should allow easy re-use of the IPKernelApp entry point
107 to configure and launch kernels other than IPython's own.
107 to configure and launch kernels other than IPython's own.
108 """)
108 """)
109 kernel = Any()
109 kernel = Any()
110 poller = Any() # don't restrict this even though current pollers are all Threads
110 poller = Any() # don't restrict this even though current pollers are all Threads
111 heartbeat = Instance(Heartbeat, allow_none=True)
111 heartbeat = Instance(Heartbeat, allow_none=True)
112 ports = Dict()
112 ports = Dict()
113
113
114 # connection info:
114 # connection info:
115
115
116 @property
116 @property
117 def abs_connection_file(self):
117 def abs_connection_file(self):
118 if os.path.basename(self.connection_file) == self.connection_file:
118 if os.path.basename(self.connection_file) == self.connection_file:
119 return os.path.join(self.profile_dir.security_dir, self.connection_file)
119 return os.path.join(self.profile_dir.security_dir, self.connection_file)
120 else:
120 else:
121 return self.connection_file
121 return self.connection_file
122
122
123
123
124 # streams, etc.
124 # streams, etc.
125 no_stdout = Bool(False, config=True, help="redirect stdout to the null device")
125 no_stdout = Bool(False, config=True, help="redirect stdout to the null device")
126 no_stderr = Bool(False, config=True, help="redirect stderr to the null device")
126 no_stderr = Bool(False, config=True, help="redirect stderr to the null device")
127 outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream',
127 outstream_class = DottedObjectName('ipython_kernel.zmq.iostream.OutStream',
128 config=True, help="The importstring for the OutStream factory")
128 config=True, help="The importstring for the OutStream factory")
129 displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook',
129 displayhook_class = DottedObjectName('ipython_kernel.zmq.displayhook.ZMQDisplayHook',
130 config=True, help="The importstring for the DisplayHook factory")
130 config=True, help="The importstring for the DisplayHook factory")
131
131
132 # polling
132 # polling
133 parent_handle = Integer(int(os.environ.get('JPY_PARENT_PID') or 0), config=True,
133 parent_handle = Integer(int(os.environ.get('JPY_PARENT_PID') or 0), config=True,
134 help="""kill this process if its parent dies. On Windows, the argument
134 help="""kill this process if its parent dies. On Windows, the argument
135 specifies the HANDLE of the parent process, otherwise it is simply boolean.
135 specifies the HANDLE of the parent process, otherwise it is simply boolean.
136 """)
136 """)
137 interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0), config=True,
137 interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0), config=True,
138 help="""ONLY USED ON WINDOWS
138 help="""ONLY USED ON WINDOWS
139 Interrupt this process when the parent is signaled.
139 Interrupt this process when the parent is signaled.
140 """)
140 """)
141
141
142 def init_crash_handler(self):
142 def init_crash_handler(self):
143 # Install minimal exception handling
143 # Install minimal exception handling
144 sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor',
144 sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor',
145 ostream=sys.__stdout__)
145 ostream=sys.__stdout__)
146
146
147 def init_poller(self):
147 def init_poller(self):
148 if sys.platform == 'win32':
148 if sys.platform == 'win32':
149 if self.interrupt or self.parent_handle:
149 if self.interrupt or self.parent_handle:
150 self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
150 self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
151 elif self.parent_handle:
151 elif self.parent_handle:
152 self.poller = ParentPollerUnix()
152 self.poller = ParentPollerUnix()
153
153
154 def _bind_socket(self, s, port):
154 def _bind_socket(self, s, port):
155 iface = '%s://%s' % (self.transport, self.ip)
155 iface = '%s://%s' % (self.transport, self.ip)
156 if self.transport == 'tcp':
156 if self.transport == 'tcp':
157 if port <= 0:
157 if port <= 0:
158 port = s.bind_to_random_port(iface)
158 port = s.bind_to_random_port(iface)
159 else:
159 else:
160 s.bind("tcp://%s:%i" % (self.ip, port))
160 s.bind("tcp://%s:%i" % (self.ip, port))
161 elif self.transport == 'ipc':
161 elif self.transport == 'ipc':
162 if port <= 0:
162 if port <= 0:
163 port = 1
163 port = 1
164 path = "%s-%i" % (self.ip, port)
164 path = "%s-%i" % (self.ip, port)
165 while os.path.exists(path):
165 while os.path.exists(path):
166 port = port + 1
166 port = port + 1
167 path = "%s-%i" % (self.ip, port)
167 path = "%s-%i" % (self.ip, port)
168 else:
168 else:
169 path = "%s-%i" % (self.ip, port)
169 path = "%s-%i" % (self.ip, port)
170 s.bind("ipc://%s" % path)
170 s.bind("ipc://%s" % path)
171 return port
171 return port
172
172
173 def write_connection_file(self):
173 def write_connection_file(self):
174 """write connection info to JSON file"""
174 """write connection info to JSON file"""
175 cf = self.abs_connection_file
175 cf = self.abs_connection_file
176 self.log.debug("Writing connection file: %s", cf)
176 self.log.debug("Writing connection file: %s", cf)
177 write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
177 write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
178 shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
178 shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
179 iopub_port=self.iopub_port, control_port=self.control_port)
179 iopub_port=self.iopub_port, control_port=self.control_port)
180
180
181 def cleanup_connection_file(self):
181 def cleanup_connection_file(self):
182 cf = self.abs_connection_file
182 cf = self.abs_connection_file
183 self.log.debug("Cleaning up connection file: %s", cf)
183 self.log.debug("Cleaning up connection file: %s", cf)
184 try:
184 try:
185 os.remove(cf)
185 os.remove(cf)
186 except (IOError, OSError):
186 except (IOError, OSError):
187 pass
187 pass
188
188
189 self.cleanup_ipc_files()
189 self.cleanup_ipc_files()
190
190
191 def init_connection_file(self):
191 def init_connection_file(self):
192 if not self.connection_file:
192 if not self.connection_file:
193 self.connection_file = "kernel-%s.json"%os.getpid()
193 self.connection_file = "kernel-%s.json"%os.getpid()
194 try:
194 try:
195 self.connection_file = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
195 self.connection_file = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
196 except IOError:
196 except IOError:
197 self.log.debug("Connection file not found: %s", self.connection_file)
197 self.log.debug("Connection file not found: %s", self.connection_file)
198 # This means I own it, so I will clean it up:
198 # This means I own it, so I will clean it up:
199 atexit.register(self.cleanup_connection_file)
199 atexit.register(self.cleanup_connection_file)
200 return
200 return
201 try:
201 try:
202 self.load_connection_file()
202 self.load_connection_file()
203 except Exception:
203 except Exception:
204 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
204 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
205 self.exit(1)
205 self.exit(1)
206
206
207 def init_sockets(self):
207 def init_sockets(self):
208 # Create a context, a session, and the kernel sockets.
208 # Create a context, a session, and the kernel sockets.
209 self.log.info("Starting the kernel at pid: %i", os.getpid())
209 self.log.info("Starting the kernel at pid: %i", os.getpid())
210 context = zmq.Context.instance()
210 context = zmq.Context.instance()
211 # Uncomment this to try closing the context.
211 # Uncomment this to try closing the context.
212 # atexit.register(context.term)
212 # atexit.register(context.term)
213
213
214 self.shell_socket = context.socket(zmq.ROUTER)
214 self.shell_socket = context.socket(zmq.ROUTER)
215 self.shell_socket.linger = 1000
215 self.shell_socket.linger = 1000
216 self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
216 self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
217 self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
217 self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
218
218
219 self.iopub_socket = context.socket(zmq.PUB)
219 self.iopub_socket = context.socket(zmq.PUB)
220 self.iopub_socket.linger = 1000
220 self.iopub_socket.linger = 1000
221 self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
221 self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
222 self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
222 self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
223
223
224 self.stdin_socket = context.socket(zmq.ROUTER)
224 self.stdin_socket = context.socket(zmq.ROUTER)
225 self.stdin_socket.linger = 1000
225 self.stdin_socket.linger = 1000
226 self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
226 self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
227 self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
227 self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
228
228
229 self.control_socket = context.socket(zmq.ROUTER)
229 self.control_socket = context.socket(zmq.ROUTER)
230 self.control_socket.linger = 1000
230 self.control_socket.linger = 1000
231 self.control_port = self._bind_socket(self.control_socket, self.control_port)
231 self.control_port = self._bind_socket(self.control_socket, self.control_port)
232 self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
232 self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
233
233
234 def init_heartbeat(self):
234 def init_heartbeat(self):
235 """start the heart beating"""
235 """start the heart beating"""
236 # heartbeat doesn't share context, because it mustn't be blocked
236 # heartbeat doesn't share context, because it mustn't be blocked
237 # by the GIL, which is accessed by libzmq when freeing zero-copy messages
237 # by the GIL, which is accessed by libzmq when freeing zero-copy messages
238 hb_ctx = zmq.Context()
238 hb_ctx = zmq.Context()
239 self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
239 self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
240 self.hb_port = self.heartbeat.port
240 self.hb_port = self.heartbeat.port
241 self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
241 self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
242 self.heartbeat.start()
242 self.heartbeat.start()
243
243
244 def log_connection_info(self):
244 def log_connection_info(self):
245 """display connection info, and store ports"""
245 """display connection info, and store ports"""
246 basename = os.path.basename(self.connection_file)
246 basename = os.path.basename(self.connection_file)
247 if basename == self.connection_file or \
247 if basename == self.connection_file or \
248 os.path.dirname(self.connection_file) == self.profile_dir.security_dir:
248 os.path.dirname(self.connection_file) == self.profile_dir.security_dir:
249 # use shortname
249 # use shortname
250 tail = basename
250 tail = basename
251 if self.profile != 'default':
251 if self.profile != 'default':
252 tail += " --profile %s" % self.profile
252 tail += " --profile %s" % self.profile
253 else:
253 else:
254 tail = self.connection_file
254 tail = self.connection_file
255 lines = [
255 lines = [
256 "To connect another client to this kernel, use:",
256 "To connect another client to this kernel, use:",
257 " --existing %s" % tail,
257 " --existing %s" % tail,
258 ]
258 ]
259 # log connection info
259 # log connection info
260 # info-level, so often not shown.
260 # info-level, so often not shown.
261 # frontends should use the %connect_info magic
261 # frontends should use the %connect_info magic
262 # to see the connection info
262 # to see the connection info
263 for line in lines:
263 for line in lines:
264 self.log.info(line)
264 self.log.info(line)
265 # also raw print to the terminal if no parent_handle (`ipython kernel`)
265 # also raw print to the terminal if no parent_handle (`ipython kernel`)
266 if not self.parent_handle:
266 if not self.parent_handle:
267 io.rprint(_ctrl_c_message)
267 io.rprint(_ctrl_c_message)
268 for line in lines:
268 for line in lines:
269 io.rprint(line)
269 io.rprint(line)
270
270
271 self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
271 self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
272 stdin=self.stdin_port, hb=self.hb_port,
272 stdin=self.stdin_port, hb=self.hb_port,
273 control=self.control_port)
273 control=self.control_port)
274
274
275 def init_blackhole(self):
275 def init_blackhole(self):
276 """redirects stdout/stderr to devnull if necessary"""
276 """redirects stdout/stderr to devnull if necessary"""
277 if self.no_stdout or self.no_stderr:
277 if self.no_stdout or self.no_stderr:
278 blackhole = open(os.devnull, 'w')
278 blackhole = open(os.devnull, 'w')
279 if self.no_stdout:
279 if self.no_stdout:
280 sys.stdout = sys.__stdout__ = blackhole
280 sys.stdout = sys.__stdout__ = blackhole
281 if self.no_stderr:
281 if self.no_stderr:
282 sys.stderr = sys.__stderr__ = blackhole
282 sys.stderr = sys.__stderr__ = blackhole
283
283
284 def init_io(self):
284 def init_io(self):
285 """Redirect input streams and set a display hook."""
285 """Redirect input streams and set a display hook."""
286 if self.outstream_class:
286 if self.outstream_class:
287 outstream_factory = import_item(str(self.outstream_class))
287 outstream_factory = import_item(str(self.outstream_class))
288 sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout')
288 sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout')
289 sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr')
289 sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr')
290 if self.displayhook_class:
290 if self.displayhook_class:
291 displayhook_factory = import_item(str(self.displayhook_class))
291 displayhook_factory = import_item(str(self.displayhook_class))
292 sys.displayhook = displayhook_factory(self.session, self.iopub_socket)
292 sys.displayhook = displayhook_factory(self.session, self.iopub_socket)
293
293
294 def init_signal(self):
294 def init_signal(self):
295 signal.signal(signal.SIGINT, signal.SIG_IGN)
295 signal.signal(signal.SIGINT, signal.SIG_IGN)
296
296
297 def init_kernel(self):
297 def init_kernel(self):
298 """Create the Kernel object itself"""
298 """Create the Kernel object itself"""
299 shell_stream = ZMQStream(self.shell_socket)
299 shell_stream = ZMQStream(self.shell_socket)
300 control_stream = ZMQStream(self.control_socket)
300 control_stream = ZMQStream(self.control_socket)
301
301
302 kernel_factory = self.kernel_class.instance
302 kernel_factory = self.kernel_class.instance
303
303
304 kernel = kernel_factory(parent=self, session=self.session,
304 kernel = kernel_factory(parent=self, session=self.session,
305 shell_streams=[shell_stream, control_stream],
305 shell_streams=[shell_stream, control_stream],
306 iopub_socket=self.iopub_socket,
306 iopub_socket=self.iopub_socket,
307 stdin_socket=self.stdin_socket,
307 stdin_socket=self.stdin_socket,
308 log=self.log,
308 log=self.log,
309 profile_dir=self.profile_dir,
309 profile_dir=self.profile_dir,
310 user_ns=self.user_ns,
310 user_ns=self.user_ns,
311 )
311 )
312 kernel.record_ports(self.ports)
312 kernel.record_ports(self.ports)
313 self.kernel = kernel
313 self.kernel = kernel
314
314
315 def init_gui_pylab(self):
315 def init_gui_pylab(self):
316 """Enable GUI event loop integration, taking pylab into account."""
316 """Enable GUI event loop integration, taking pylab into account."""
317
317
318 # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
318 # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
319 # to ensure that any exception is printed straight to stderr.
319 # to ensure that any exception is printed straight to stderr.
320 # Normally _showtraceback associates the reply with an execution,
320 # Normally _showtraceback associates the reply with an execution,
321 # which means frontends will never draw it, as this exception
321 # which means frontends will never draw it, as this exception
322 # is not associated with any execute request.
322 # is not associated with any execute request.
323
323
324 shell = self.shell
324 shell = self.shell
325 _showtraceback = shell._showtraceback
325 _showtraceback = shell._showtraceback
326 try:
326 try:
327 # replace error-sending traceback with stderr
327 # replace error-sending traceback with stderr
328 def print_tb(etype, evalue, stb):
328 def print_tb(etype, evalue, stb):
329 print ("GUI event loop or pylab initialization failed",
329 print ("GUI event loop or pylab initialization failed",
330 file=io.stderr)
330 file=io.stderr)
331 print (shell.InteractiveTB.stb2text(stb), file=io.stderr)
331 print (shell.InteractiveTB.stb2text(stb), file=io.stderr)
332 shell._showtraceback = print_tb
332 shell._showtraceback = print_tb
333 InteractiveShellApp.init_gui_pylab(self)
333 InteractiveShellApp.init_gui_pylab(self)
334 finally:
334 finally:
335 shell._showtraceback = _showtraceback
335 shell._showtraceback = _showtraceback
336
336
337 def init_shell(self):
337 def init_shell(self):
338 self.shell = getattr(self.kernel, 'shell', None)
338 self.shell = getattr(self.kernel, 'shell', None)
339 if self.shell:
339 if self.shell:
340 self.shell.configurables.append(self)
340 self.shell.configurables.append(self)
341
341
342 @catch_config_error
342 @catch_config_error
343 def initialize(self, argv=None):
343 def initialize(self, argv=None):
344 super(IPKernelApp, self).initialize(argv)
344 super(IPKernelApp, self).initialize(argv)
345 self.init_blackhole()
345 self.init_blackhole()
346 self.init_connection_file()
346 self.init_connection_file()
347 self.init_poller()
347 self.init_poller()
348 self.init_sockets()
348 self.init_sockets()
349 self.init_heartbeat()
349 self.init_heartbeat()
350 # writing/displaying connection info must be *after* init_sockets/heartbeat
350 # writing/displaying connection info must be *after* init_sockets/heartbeat
351 self.log_connection_info()
351 self.log_connection_info()
352 self.write_connection_file()
352 self.write_connection_file()
353 self.init_io()
353 self.init_io()
354 self.init_signal()
354 self.init_signal()
355 self.init_kernel()
355 self.init_kernel()
356 # shell init steps
356 # shell init steps
357 self.init_path()
357 self.init_path()
358 self.init_shell()
358 self.init_shell()
359 if self.shell:
359 if self.shell:
360 self.init_gui_pylab()
360 self.init_gui_pylab()
361 self.init_extensions()
361 self.init_extensions()
362 self.init_code()
362 self.init_code()
363 # flush stdout/stderr, so that anything written to these streams during
363 # flush stdout/stderr, so that anything written to these streams during
364 # initialization do not get associated with the first execution request
364 # initialization do not get associated with the first execution request
365 sys.stdout.flush()
365 sys.stdout.flush()
366 sys.stderr.flush()
366 sys.stderr.flush()
367
367
368 def start(self):
368 def start(self):
369 if self.poller is not None:
369 if self.poller is not None:
370 self.poller.start()
370 self.poller.start()
371 self.kernel.start()
371 self.kernel.start()
372 try:
372 try:
373 ioloop.IOLoop.instance().start()
373 ioloop.IOLoop.instance().start()
374 except KeyboardInterrupt:
374 except KeyboardInterrupt:
375 pass
375 pass
376
376
377 launch_new_instance = IPKernelApp.launch_instance
377 launch_new_instance = IPKernelApp.launch_instance
378
378
379 def main():
379 def main():
380 """Run an IPKernel as an application"""
380 """Run an IPKernel as an application"""
381 app = IPKernelApp.instance()
381 app = IPKernelApp.instance()
382 app.initialize()
382 app.initialize()
383 app.start()
383 app.start()
384
384
385
385
386 if __name__ == '__main__':
386 if __name__ == '__main__':
387 main()
387 main()
@@ -1,701 +1,701 b''
1 """Base class for a kernel that talks to frontends over 0MQ."""
1 """Base class for a kernel that talks to frontends over 0MQ."""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from __future__ import print_function
6 from __future__ import print_function
7
7
8 import sys
8 import sys
9 import time
9 import time
10 import logging
10 import logging
11 import uuid
11 import uuid
12
12
13 from datetime import datetime
13 from datetime import datetime
14 from signal import (
14 from signal import (
15 signal, default_int_handler, SIGINT
15 signal, default_int_handler, SIGINT
16 )
16 )
17
17
18 import zmq
18 import zmq
19 from zmq.eventloop import ioloop
19 from zmq.eventloop import ioloop
20 from zmq.eventloop.zmqstream import ZMQStream
20 from zmq.eventloop.zmqstream import ZMQStream
21
21
22 from IPython.config.configurable import SingletonConfigurable
22 from IPython.config.configurable import SingletonConfigurable
23 from IPython.core.error import StdinNotImplementedError
23 from IPython.core.error import StdinNotImplementedError
24 from IPython.core import release
24 from IPython.core import release
25 from IPython.utils import py3compat
25 from IPython.utils import py3compat
26 from IPython.utils.py3compat import unicode_type, string_types
26 from IPython.utils.py3compat import unicode_type, string_types
27 from IPython.utils.jsonutil import json_clean
27 from IPython.utils.jsonutil import json_clean
28 from IPython.utils.traitlets import (
28 from IPython.utils.traitlets import (
29 Any, Instance, Float, Dict, List, Set, Integer, Unicode, Bool,
29 Any, Instance, Float, Dict, List, Set, Integer, Unicode, Bool,
30 )
30 )
31
31
32 from .session import Session
32 from .session import Session
33
33
34
34
35 class Kernel(SingletonConfigurable):
35 class Kernel(SingletonConfigurable):
36
36
37 #---------------------------------------------------------------------------
37 #---------------------------------------------------------------------------
38 # Kernel interface
38 # Kernel interface
39 #---------------------------------------------------------------------------
39 #---------------------------------------------------------------------------
40
40
41 # attribute to override with a GUI
41 # attribute to override with a GUI
42 eventloop = Any(None)
42 eventloop = Any(None)
43 def _eventloop_changed(self, name, old, new):
43 def _eventloop_changed(self, name, old, new):
44 """schedule call to eventloop from IOLoop"""
44 """schedule call to eventloop from IOLoop"""
45 loop = ioloop.IOLoop.instance()
45 loop = ioloop.IOLoop.instance()
46 loop.add_callback(self.enter_eventloop)
46 loop.add_callback(self.enter_eventloop)
47
47
48 session = Instance(Session, allow_none=True)
48 session = Instance(Session, allow_none=True)
49 profile_dir = Instance('IPython.core.profiledir.ProfileDir', allow_none=True)
49 profile_dir = Instance('IPython.core.profiledir.ProfileDir', allow_none=True)
50 shell_streams = List()
50 shell_streams = List()
51 control_stream = Instance(ZMQStream, allow_none=True)
51 control_stream = Instance(ZMQStream, allow_none=True)
52 iopub_socket = Instance(zmq.Socket, allow_none=True)
52 iopub_socket = Instance(zmq.Socket, allow_none=True)
53 stdin_socket = Instance(zmq.Socket, allow_none=True)
53 stdin_socket = Instance(zmq.Socket, allow_none=True)
54 log = Instance(logging.Logger, allow_none=True)
54 log = Instance(logging.Logger, allow_none=True)
55
55
56 # identities:
56 # identities:
57 int_id = Integer(-1)
57 int_id = Integer(-1)
58 ident = Unicode()
58 ident = Unicode()
59
59
60 def _ident_default(self):
60 def _ident_default(self):
61 return unicode_type(uuid.uuid4())
61 return unicode_type(uuid.uuid4())
62
62
63 # This should be overridden by wrapper kernels that implement any real
63 # This should be overridden by wrapper kernels that implement any real
64 # language.
64 # language.
65 language_info = {}
65 language_info = {}
66
66
67 # any links that should go in the help menu
67 # any links that should go in the help menu
68 help_links = List()
68 help_links = List()
69
69
70 # Private interface
70 # Private interface
71
71
72 _darwin_app_nap = Bool(True, config=True,
72 _darwin_app_nap = Bool(True, config=True,
73 help="""Whether to use appnope for compatiblity with OS X App Nap.
73 help="""Whether to use appnope for compatiblity with OS X App Nap.
74
74
75 Only affects OS X >= 10.9.
75 Only affects OS X >= 10.9.
76 """
76 """
77 )
77 )
78
78
79 # track associations with current request
79 # track associations with current request
80 _allow_stdin = Bool(False)
80 _allow_stdin = Bool(False)
81 _parent_header = Dict()
81 _parent_header = Dict()
82 _parent_ident = Any(b'')
82 _parent_ident = Any(b'')
83 # Time to sleep after flushing the stdout/err buffers in each execute
83 # Time to sleep after flushing the stdout/err buffers in each execute
84 # cycle. While this introduces a hard limit on the minimal latency of the
84 # cycle. While this introduces a hard limit on the minimal latency of the
85 # execute cycle, it helps prevent output synchronization problems for
85 # execute cycle, it helps prevent output synchronization problems for
86 # clients.
86 # clients.
87 # Units are in seconds. The minimum zmq latency on local host is probably
87 # Units are in seconds. The minimum zmq latency on local host is probably
88 # ~150 microseconds, set this to 500us for now. We may need to increase it
88 # ~150 microseconds, set this to 500us for now. We may need to increase it
89 # a little if it's not enough after more interactive testing.
89 # a little if it's not enough after more interactive testing.
90 _execute_sleep = Float(0.0005, config=True)
90 _execute_sleep = Float(0.0005, config=True)
91
91
92 # Frequency of the kernel's event loop.
92 # Frequency of the kernel's event loop.
93 # Units are in seconds, kernel subclasses for GUI toolkits may need to
93 # Units are in seconds, kernel subclasses for GUI toolkits may need to
94 # adapt to milliseconds.
94 # adapt to milliseconds.
95 _poll_interval = Float(0.05, config=True)
95 _poll_interval = Float(0.05, config=True)
96
96
97 # If the shutdown was requested over the network, we leave here the
97 # If the shutdown was requested over the network, we leave here the
98 # necessary reply message so it can be sent by our registered atexit
98 # necessary reply message so it can be sent by our registered atexit
99 # handler. This ensures that the reply is only sent to clients truly at
99 # handler. This ensures that the reply is only sent to clients truly at
100 # the end of our shutdown process (which happens after the underlying
100 # the end of our shutdown process (which happens after the underlying
101 # IPython shell's own shutdown).
101 # IPython shell's own shutdown).
102 _shutdown_message = None
102 _shutdown_message = None
103
103
104 # This is a dict of port number that the kernel is listening on. It is set
104 # This is a dict of port number that the kernel is listening on. It is set
105 # by record_ports and used by connect_request.
105 # by record_ports and used by connect_request.
106 _recorded_ports = Dict()
106 _recorded_ports = Dict()
107
107
108 # set of aborted msg_ids
108 # set of aborted msg_ids
109 aborted = Set()
109 aborted = Set()
110
110
111 # Track execution count here. For IPython, we override this to use the
111 # Track execution count here. For IPython, we override this to use the
112 # execution count we store in the shell.
112 # execution count we store in the shell.
113 execution_count = 0
113 execution_count = 0
114
114
115
115
116 def __init__(self, **kwargs):
116 def __init__(self, **kwargs):
117 super(Kernel, self).__init__(**kwargs)
117 super(Kernel, self).__init__(**kwargs)
118
118
119 # Build dict of handlers for message types
119 # Build dict of handlers for message types
120 msg_types = [ 'execute_request', 'complete_request',
120 msg_types = [ 'execute_request', 'complete_request',
121 'inspect_request', 'history_request',
121 'inspect_request', 'history_request',
122 'kernel_info_request',
122 'kernel_info_request',
123 'connect_request', 'shutdown_request',
123 'connect_request', 'shutdown_request',
124 'apply_request', 'is_complete_request',
124 'apply_request', 'is_complete_request',
125 ]
125 ]
126 self.shell_handlers = {}
126 self.shell_handlers = {}
127 for msg_type in msg_types:
127 for msg_type in msg_types:
128 self.shell_handlers[msg_type] = getattr(self, msg_type)
128 self.shell_handlers[msg_type] = getattr(self, msg_type)
129
129
130 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
130 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
131 self.control_handlers = {}
131 self.control_handlers = {}
132 for msg_type in control_msg_types:
132 for msg_type in control_msg_types:
133 self.control_handlers[msg_type] = getattr(self, msg_type)
133 self.control_handlers[msg_type] = getattr(self, msg_type)
134
134
135
135
136 def dispatch_control(self, msg):
136 def dispatch_control(self, msg):
137 """dispatch control requests"""
137 """dispatch control requests"""
138 idents,msg = self.session.feed_identities(msg, copy=False)
138 idents,msg = self.session.feed_identities(msg, copy=False)
139 try:
139 try:
140 msg = self.session.deserialize(msg, content=True, copy=False)
140 msg = self.session.deserialize(msg, content=True, copy=False)
141 except:
141 except:
142 self.log.error("Invalid Control Message", exc_info=True)
142 self.log.error("Invalid Control Message", exc_info=True)
143 return
143 return
144
144
145 self.log.debug("Control received: %s", msg)
145 self.log.debug("Control received: %s", msg)
146
146
147 # Set the parent message for side effects.
147 # Set the parent message for side effects.
148 self.set_parent(idents, msg)
148 self.set_parent(idents, msg)
149 self._publish_status(u'busy')
149 self._publish_status(u'busy')
150
150
151 header = msg['header']
151 header = msg['header']
152 msg_type = header['msg_type']
152 msg_type = header['msg_type']
153
153
154 handler = self.control_handlers.get(msg_type, None)
154 handler = self.control_handlers.get(msg_type, None)
155 if handler is None:
155 if handler is None:
156 self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
156 self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
157 else:
157 else:
158 try:
158 try:
159 handler(self.control_stream, idents, msg)
159 handler(self.control_stream, idents, msg)
160 except Exception:
160 except Exception:
161 self.log.error("Exception in control handler:", exc_info=True)
161 self.log.error("Exception in control handler:", exc_info=True)
162
162
163 sys.stdout.flush()
163 sys.stdout.flush()
164 sys.stderr.flush()
164 sys.stderr.flush()
165 self._publish_status(u'idle')
165 self._publish_status(u'idle')
166
166
167 def dispatch_shell(self, stream, msg):
167 def dispatch_shell(self, stream, msg):
168 """dispatch shell requests"""
168 """dispatch shell requests"""
169 # flush control requests first
169 # flush control requests first
170 if self.control_stream:
170 if self.control_stream:
171 self.control_stream.flush()
171 self.control_stream.flush()
172
172
173 idents,msg = self.session.feed_identities(msg, copy=False)
173 idents,msg = self.session.feed_identities(msg, copy=False)
174 try:
174 try:
175 msg = self.session.deserialize(msg, content=True, copy=False)
175 msg = self.session.deserialize(msg, content=True, copy=False)
176 except:
176 except:
177 self.log.error("Invalid Message", exc_info=True)
177 self.log.error("Invalid Message", exc_info=True)
178 return
178 return
179
179
180 # Set the parent message for side effects.
180 # Set the parent message for side effects.
181 self.set_parent(idents, msg)
181 self.set_parent(idents, msg)
182 self._publish_status(u'busy')
182 self._publish_status(u'busy')
183
183
184 header = msg['header']
184 header = msg['header']
185 msg_id = header['msg_id']
185 msg_id = header['msg_id']
186 msg_type = msg['header']['msg_type']
186 msg_type = msg['header']['msg_type']
187
187
188 # Print some info about this message and leave a '--->' marker, so it's
188 # Print some info about this message and leave a '--->' marker, so it's
189 # easier to trace visually the message chain when debugging. Each
189 # easier to trace visually the message chain when debugging. Each
190 # handler prints its message at the end.
190 # handler prints its message at the end.
191 self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
191 self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
192 self.log.debug(' Content: %s\n --->\n ', msg['content'])
192 self.log.debug(' Content: %s\n --->\n ', msg['content'])
193
193
194 if msg_id in self.aborted:
194 if msg_id in self.aborted:
195 self.aborted.remove(msg_id)
195 self.aborted.remove(msg_id)
196 # is it safe to assume a msg_id will not be resubmitted?
196 # is it safe to assume a msg_id will not be resubmitted?
197 reply_type = msg_type.split('_')[0] + '_reply'
197 reply_type = msg_type.split('_')[0] + '_reply'
198 status = {'status' : 'aborted'}
198 status = {'status' : 'aborted'}
199 md = {'engine' : self.ident}
199 md = {'engine' : self.ident}
200 md.update(status)
200 md.update(status)
201 self.session.send(stream, reply_type, metadata=md,
201 self.session.send(stream, reply_type, metadata=md,
202 content=status, parent=msg, ident=idents)
202 content=status, parent=msg, ident=idents)
203 return
203 return
204
204
205 handler = self.shell_handlers.get(msg_type, None)
205 handler = self.shell_handlers.get(msg_type, None)
206 if handler is None:
206 if handler is None:
207 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
207 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
208 else:
208 else:
209 # ensure default_int_handler during handler call
209 # ensure default_int_handler during handler call
210 sig = signal(SIGINT, default_int_handler)
210 sig = signal(SIGINT, default_int_handler)
211 self.log.debug("%s: %s", msg_type, msg)
211 self.log.debug("%s: %s", msg_type, msg)
212 try:
212 try:
213 handler(stream, idents, msg)
213 handler(stream, idents, msg)
214 except Exception:
214 except Exception:
215 self.log.error("Exception in message handler:", exc_info=True)
215 self.log.error("Exception in message handler:", exc_info=True)
216 finally:
216 finally:
217 signal(SIGINT, sig)
217 signal(SIGINT, sig)
218
218
219 sys.stdout.flush()
219 sys.stdout.flush()
220 sys.stderr.flush()
220 sys.stderr.flush()
221 self._publish_status(u'idle')
221 self._publish_status(u'idle')
222
222
223 def enter_eventloop(self):
223 def enter_eventloop(self):
224 """enter eventloop"""
224 """enter eventloop"""
225 self.log.info("entering eventloop %s", self.eventloop)
225 self.log.info("entering eventloop %s", self.eventloop)
226 for stream in self.shell_streams:
226 for stream in self.shell_streams:
227 # flush any pending replies,
227 # flush any pending replies,
228 # which may be skipped by entering the eventloop
228 # which may be skipped by entering the eventloop
229 stream.flush(zmq.POLLOUT)
229 stream.flush(zmq.POLLOUT)
230 # restore default_int_handler
230 # restore default_int_handler
231 signal(SIGINT, default_int_handler)
231 signal(SIGINT, default_int_handler)
232 while self.eventloop is not None:
232 while self.eventloop is not None:
233 try:
233 try:
234 self.eventloop(self)
234 self.eventloop(self)
235 except KeyboardInterrupt:
235 except KeyboardInterrupt:
236 # Ctrl-C shouldn't crash the kernel
236 # Ctrl-C shouldn't crash the kernel
237 self.log.error("KeyboardInterrupt caught in kernel")
237 self.log.error("KeyboardInterrupt caught in kernel")
238 continue
238 continue
239 else:
239 else:
240 # eventloop exited cleanly, this means we should stop (right?)
240 # eventloop exited cleanly, this means we should stop (right?)
241 self.eventloop = None
241 self.eventloop = None
242 break
242 break
243 self.log.info("exiting eventloop")
243 self.log.info("exiting eventloop")
244
244
245 def start(self):
245 def start(self):
246 """register dispatchers for streams"""
246 """register dispatchers for streams"""
247 if self.control_stream:
247 if self.control_stream:
248 self.control_stream.on_recv(self.dispatch_control, copy=False)
248 self.control_stream.on_recv(self.dispatch_control, copy=False)
249
249
250 def make_dispatcher(stream):
250 def make_dispatcher(stream):
251 def dispatcher(msg):
251 def dispatcher(msg):
252 return self.dispatch_shell(stream, msg)
252 return self.dispatch_shell(stream, msg)
253 return dispatcher
253 return dispatcher
254
254
255 for s in self.shell_streams:
255 for s in self.shell_streams:
256 s.on_recv(make_dispatcher(s), copy=False)
256 s.on_recv(make_dispatcher(s), copy=False)
257
257
258 # publish idle status
258 # publish idle status
259 self._publish_status('starting')
259 self._publish_status('starting')
260
260
261 def do_one_iteration(self):
261 def do_one_iteration(self):
262 """step eventloop just once"""
262 """step eventloop just once"""
263 if self.control_stream:
263 if self.control_stream:
264 self.control_stream.flush()
264 self.control_stream.flush()
265 for stream in self.shell_streams:
265 for stream in self.shell_streams:
266 # handle at most one request per iteration
266 # handle at most one request per iteration
267 stream.flush(zmq.POLLIN, 1)
267 stream.flush(zmq.POLLIN, 1)
268 stream.flush(zmq.POLLOUT)
268 stream.flush(zmq.POLLOUT)
269
269
270
270
271 def record_ports(self, ports):
271 def record_ports(self, ports):
272 """Record the ports that this kernel is using.
272 """Record the ports that this kernel is using.
273
273
274 The creator of the Kernel instance must call this methods if they
274 The creator of the Kernel instance must call this methods if they
275 want the :meth:`connect_request` method to return the port numbers.
275 want the :meth:`connect_request` method to return the port numbers.
276 """
276 """
277 self._recorded_ports = ports
277 self._recorded_ports = ports
278
278
279 #---------------------------------------------------------------------------
279 #---------------------------------------------------------------------------
280 # Kernel request handlers
280 # Kernel request handlers
281 #---------------------------------------------------------------------------
281 #---------------------------------------------------------------------------
282
282
283 def _make_metadata(self, other=None):
283 def _make_metadata(self, other=None):
284 """init metadata dict, for execute/apply_reply"""
284 """init metadata dict, for execute/apply_reply"""
285 new_md = {
285 new_md = {
286 'dependencies_met' : True,
286 'dependencies_met' : True,
287 'engine' : self.ident,
287 'engine' : self.ident,
288 'started': datetime.now(),
288 'started': datetime.now(),
289 }
289 }
290 if other:
290 if other:
291 new_md.update(other)
291 new_md.update(other)
292 return new_md
292 return new_md
293
293
294 def _publish_execute_input(self, code, parent, execution_count):
294 def _publish_execute_input(self, code, parent, execution_count):
295 """Publish the code request on the iopub stream."""
295 """Publish the code request on the iopub stream."""
296
296
297 self.session.send(self.iopub_socket, u'execute_input',
297 self.session.send(self.iopub_socket, u'execute_input',
298 {u'code':code, u'execution_count': execution_count},
298 {u'code':code, u'execution_count': execution_count},
299 parent=parent, ident=self._topic('execute_input')
299 parent=parent, ident=self._topic('execute_input')
300 )
300 )
301
301
302 def _publish_status(self, status, parent=None):
302 def _publish_status(self, status, parent=None):
303 """send status (busy/idle) on IOPub"""
303 """send status (busy/idle) on IOPub"""
304 self.session.send(self.iopub_socket,
304 self.session.send(self.iopub_socket,
305 u'status',
305 u'status',
306 {u'execution_state': status},
306 {u'execution_state': status},
307 parent=parent or self._parent_header,
307 parent=parent or self._parent_header,
308 ident=self._topic('status'),
308 ident=self._topic('status'),
309 )
309 )
310
310
311 def set_parent(self, ident, parent):
311 def set_parent(self, ident, parent):
312 """Set the current parent_header
312 """Set the current parent_header
313
313
314 Side effects (IOPub messages) and replies are associated with
314 Side effects (IOPub messages) and replies are associated with
315 the request that caused them via the parent_header.
315 the request that caused them via the parent_header.
316
316
317 The parent identity is used to route input_request messages
317 The parent identity is used to route input_request messages
318 on the stdin channel.
318 on the stdin channel.
319 """
319 """
320 self._parent_ident = ident
320 self._parent_ident = ident
321 self._parent_header = parent
321 self._parent_header = parent
322
322
323 def send_response(self, stream, msg_or_type, content=None, ident=None,
323 def send_response(self, stream, msg_or_type, content=None, ident=None,
324 buffers=None, track=False, header=None, metadata=None):
324 buffers=None, track=False, header=None, metadata=None):
325 """Send a response to the message we're currently processing.
325 """Send a response to the message we're currently processing.
326
326
327 This accepts all the parameters of :meth:`IPython.kernel.zmq.session.Session.send`
327 This accepts all the parameters of :meth:`ipython_kernel.zmq.session.Session.send`
328 except ``parent``.
328 except ``parent``.
329
329
330 This relies on :meth:`set_parent` having been called for the current
330 This relies on :meth:`set_parent` having been called for the current
331 message.
331 message.
332 """
332 """
333 return self.session.send(stream, msg_or_type, content, self._parent_header,
333 return self.session.send(stream, msg_or_type, content, self._parent_header,
334 ident, buffers, track, header, metadata)
334 ident, buffers, track, header, metadata)
335
335
336 def execute_request(self, stream, ident, parent):
336 def execute_request(self, stream, ident, parent):
337 """handle an execute_request"""
337 """handle an execute_request"""
338
338
339 try:
339 try:
340 content = parent[u'content']
340 content = parent[u'content']
341 code = py3compat.cast_unicode_py2(content[u'code'])
341 code = py3compat.cast_unicode_py2(content[u'code'])
342 silent = content[u'silent']
342 silent = content[u'silent']
343 store_history = content.get(u'store_history', not silent)
343 store_history = content.get(u'store_history', not silent)
344 user_expressions = content.get('user_expressions', {})
344 user_expressions = content.get('user_expressions', {})
345 allow_stdin = content.get('allow_stdin', False)
345 allow_stdin = content.get('allow_stdin', False)
346 except:
346 except:
347 self.log.error("Got bad msg: ")
347 self.log.error("Got bad msg: ")
348 self.log.error("%s", parent)
348 self.log.error("%s", parent)
349 return
349 return
350
350
351 stop_on_error = content.get('stop_on_error', True)
351 stop_on_error = content.get('stop_on_error', True)
352
352
353 md = self._make_metadata(parent['metadata'])
353 md = self._make_metadata(parent['metadata'])
354
354
355 # Re-broadcast our input for the benefit of listening clients, and
355 # Re-broadcast our input for the benefit of listening clients, and
356 # start computing output
356 # start computing output
357 if not silent:
357 if not silent:
358 self.execution_count += 1
358 self.execution_count += 1
359 self._publish_execute_input(code, parent, self.execution_count)
359 self._publish_execute_input(code, parent, self.execution_count)
360
360
361 reply_content = self.do_execute(code, silent, store_history,
361 reply_content = self.do_execute(code, silent, store_history,
362 user_expressions, allow_stdin)
362 user_expressions, allow_stdin)
363
363
364 # Flush output before sending the reply.
364 # Flush output before sending the reply.
365 sys.stdout.flush()
365 sys.stdout.flush()
366 sys.stderr.flush()
366 sys.stderr.flush()
367 # FIXME: on rare occasions, the flush doesn't seem to make it to the
367 # FIXME: on rare occasions, the flush doesn't seem to make it to the
368 # clients... This seems to mitigate the problem, but we definitely need
368 # clients... This seems to mitigate the problem, but we definitely need
369 # to better understand what's going on.
369 # to better understand what's going on.
370 if self._execute_sleep:
370 if self._execute_sleep:
371 time.sleep(self._execute_sleep)
371 time.sleep(self._execute_sleep)
372
372
373 # Send the reply.
373 # Send the reply.
374 reply_content = json_clean(reply_content)
374 reply_content = json_clean(reply_content)
375
375
376 md['status'] = reply_content['status']
376 md['status'] = reply_content['status']
377 if reply_content['status'] == 'error' and \
377 if reply_content['status'] == 'error' and \
378 reply_content['ename'] == 'UnmetDependency':
378 reply_content['ename'] == 'UnmetDependency':
379 md['dependencies_met'] = False
379 md['dependencies_met'] = False
380
380
381 reply_msg = self.session.send(stream, u'execute_reply',
381 reply_msg = self.session.send(stream, u'execute_reply',
382 reply_content, parent, metadata=md,
382 reply_content, parent, metadata=md,
383 ident=ident)
383 ident=ident)
384
384
385 self.log.debug("%s", reply_msg)
385 self.log.debug("%s", reply_msg)
386
386
387 if not silent and reply_msg['content']['status'] == u'error' and stop_on_error:
387 if not silent and reply_msg['content']['status'] == u'error' and stop_on_error:
388 self._abort_queues()
388 self._abort_queues()
389
389
390 def do_execute(self, code, silent, store_history=True,
390 def do_execute(self, code, silent, store_history=True,
391 user_expressions=None, allow_stdin=False):
391 user_expressions=None, allow_stdin=False):
392 """Execute user code. Must be overridden by subclasses.
392 """Execute user code. Must be overridden by subclasses.
393 """
393 """
394 raise NotImplementedError
394 raise NotImplementedError
395
395
396 def complete_request(self, stream, ident, parent):
396 def complete_request(self, stream, ident, parent):
397 content = parent['content']
397 content = parent['content']
398 code = content['code']
398 code = content['code']
399 cursor_pos = content['cursor_pos']
399 cursor_pos = content['cursor_pos']
400
400
401 matches = self.do_complete(code, cursor_pos)
401 matches = self.do_complete(code, cursor_pos)
402 matches = json_clean(matches)
402 matches = json_clean(matches)
403 completion_msg = self.session.send(stream, 'complete_reply',
403 completion_msg = self.session.send(stream, 'complete_reply',
404 matches, parent, ident)
404 matches, parent, ident)
405 self.log.debug("%s", completion_msg)
405 self.log.debug("%s", completion_msg)
406
406
407 def do_complete(self, code, cursor_pos):
407 def do_complete(self, code, cursor_pos):
408 """Override in subclasses to find completions.
408 """Override in subclasses to find completions.
409 """
409 """
410 return {'matches' : [],
410 return {'matches' : [],
411 'cursor_end' : cursor_pos,
411 'cursor_end' : cursor_pos,
412 'cursor_start' : cursor_pos,
412 'cursor_start' : cursor_pos,
413 'metadata' : {},
413 'metadata' : {},
414 'status' : 'ok'}
414 'status' : 'ok'}
415
415
416 def inspect_request(self, stream, ident, parent):
416 def inspect_request(self, stream, ident, parent):
417 content = parent['content']
417 content = parent['content']
418
418
419 reply_content = self.do_inspect(content['code'], content['cursor_pos'],
419 reply_content = self.do_inspect(content['code'], content['cursor_pos'],
420 content.get('detail_level', 0))
420 content.get('detail_level', 0))
421 # Before we send this object over, we scrub it for JSON usage
421 # Before we send this object over, we scrub it for JSON usage
422 reply_content = json_clean(reply_content)
422 reply_content = json_clean(reply_content)
423 msg = self.session.send(stream, 'inspect_reply',
423 msg = self.session.send(stream, 'inspect_reply',
424 reply_content, parent, ident)
424 reply_content, parent, ident)
425 self.log.debug("%s", msg)
425 self.log.debug("%s", msg)
426
426
427 def do_inspect(self, code, cursor_pos, detail_level=0):
427 def do_inspect(self, code, cursor_pos, detail_level=0):
428 """Override in subclasses to allow introspection.
428 """Override in subclasses to allow introspection.
429 """
429 """
430 return {'status': 'ok', 'data':{}, 'metadata':{}, 'found':False}
430 return {'status': 'ok', 'data':{}, 'metadata':{}, 'found':False}
431
431
432 def history_request(self, stream, ident, parent):
432 def history_request(self, stream, ident, parent):
433 content = parent['content']
433 content = parent['content']
434
434
435 reply_content = self.do_history(**content)
435 reply_content = self.do_history(**content)
436
436
437 reply_content = json_clean(reply_content)
437 reply_content = json_clean(reply_content)
438 msg = self.session.send(stream, 'history_reply',
438 msg = self.session.send(stream, 'history_reply',
439 reply_content, parent, ident)
439 reply_content, parent, ident)
440 self.log.debug("%s", msg)
440 self.log.debug("%s", msg)
441
441
442 def do_history(self, hist_access_type, output, raw, session=None, start=None,
442 def do_history(self, hist_access_type, output, raw, session=None, start=None,
443 stop=None, n=None, pattern=None, unique=False):
443 stop=None, n=None, pattern=None, unique=False):
444 """Override in subclasses to access history.
444 """Override in subclasses to access history.
445 """
445 """
446 return {'history': []}
446 return {'history': []}
447
447
448 def connect_request(self, stream, ident, parent):
448 def connect_request(self, stream, ident, parent):
449 if self._recorded_ports is not None:
449 if self._recorded_ports is not None:
450 content = self._recorded_ports.copy()
450 content = self._recorded_ports.copy()
451 else:
451 else:
452 content = {}
452 content = {}
453 msg = self.session.send(stream, 'connect_reply',
453 msg = self.session.send(stream, 'connect_reply',
454 content, parent, ident)
454 content, parent, ident)
455 self.log.debug("%s", msg)
455 self.log.debug("%s", msg)
456
456
457 @property
457 @property
458 def kernel_info(self):
458 def kernel_info(self):
459 return {
459 return {
460 'protocol_version': release.kernel_protocol_version,
460 'protocol_version': release.kernel_protocol_version,
461 'implementation': self.implementation,
461 'implementation': self.implementation,
462 'implementation_version': self.implementation_version,
462 'implementation_version': self.implementation_version,
463 'language_info': self.language_info,
463 'language_info': self.language_info,
464 'banner': self.banner,
464 'banner': self.banner,
465 'help_links': self.help_links,
465 'help_links': self.help_links,
466 }
466 }
467
467
468 def kernel_info_request(self, stream, ident, parent):
468 def kernel_info_request(self, stream, ident, parent):
469 msg = self.session.send(stream, 'kernel_info_reply',
469 msg = self.session.send(stream, 'kernel_info_reply',
470 self.kernel_info, parent, ident)
470 self.kernel_info, parent, ident)
471 self.log.debug("%s", msg)
471 self.log.debug("%s", msg)
472
472
473 def shutdown_request(self, stream, ident, parent):
473 def shutdown_request(self, stream, ident, parent):
474 content = self.do_shutdown(parent['content']['restart'])
474 content = self.do_shutdown(parent['content']['restart'])
475 self.session.send(stream, u'shutdown_reply', content, parent, ident=ident)
475 self.session.send(stream, u'shutdown_reply', content, parent, ident=ident)
476 # same content, but different msg_id for broadcasting on IOPub
476 # same content, but different msg_id for broadcasting on IOPub
477 self._shutdown_message = self.session.msg(u'shutdown_reply',
477 self._shutdown_message = self.session.msg(u'shutdown_reply',
478 content, parent
478 content, parent
479 )
479 )
480
480
481 self._at_shutdown()
481 self._at_shutdown()
482 # call sys.exit after a short delay
482 # call sys.exit after a short delay
483 loop = ioloop.IOLoop.instance()
483 loop = ioloop.IOLoop.instance()
484 loop.add_timeout(time.time()+0.1, loop.stop)
484 loop.add_timeout(time.time()+0.1, loop.stop)
485
485
486 def do_shutdown(self, restart):
486 def do_shutdown(self, restart):
487 """Override in subclasses to do things when the frontend shuts down the
487 """Override in subclasses to do things when the frontend shuts down the
488 kernel.
488 kernel.
489 """
489 """
490 return {'status': 'ok', 'restart': restart}
490 return {'status': 'ok', 'restart': restart}
491
491
492 def is_complete_request(self, stream, ident, parent):
492 def is_complete_request(self, stream, ident, parent):
493 content = parent['content']
493 content = parent['content']
494 code = content['code']
494 code = content['code']
495
495
496 reply_content = self.do_is_complete(code)
496 reply_content = self.do_is_complete(code)
497 reply_content = json_clean(reply_content)
497 reply_content = json_clean(reply_content)
498 reply_msg = self.session.send(stream, 'is_complete_reply',
498 reply_msg = self.session.send(stream, 'is_complete_reply',
499 reply_content, parent, ident)
499 reply_content, parent, ident)
500 self.log.debug("%s", reply_msg)
500 self.log.debug("%s", reply_msg)
501
501
502 def do_is_complete(self, code):
502 def do_is_complete(self, code):
503 """Override in subclasses to find completions.
503 """Override in subclasses to find completions.
504 """
504 """
505 return {'status' : 'unknown',
505 return {'status' : 'unknown',
506 }
506 }
507
507
508 #---------------------------------------------------------------------------
508 #---------------------------------------------------------------------------
509 # Engine methods
509 # Engine methods
510 #---------------------------------------------------------------------------
510 #---------------------------------------------------------------------------
511
511
512 def apply_request(self, stream, ident, parent):
512 def apply_request(self, stream, ident, parent):
513 try:
513 try:
514 content = parent[u'content']
514 content = parent[u'content']
515 bufs = parent[u'buffers']
515 bufs = parent[u'buffers']
516 msg_id = parent['header']['msg_id']
516 msg_id = parent['header']['msg_id']
517 except:
517 except:
518 self.log.error("Got bad msg: %s", parent, exc_info=True)
518 self.log.error("Got bad msg: %s", parent, exc_info=True)
519 return
519 return
520
520
521 md = self._make_metadata(parent['metadata'])
521 md = self._make_metadata(parent['metadata'])
522
522
523 reply_content, result_buf = self.do_apply(content, bufs, msg_id, md)
523 reply_content, result_buf = self.do_apply(content, bufs, msg_id, md)
524
524
525 # put 'ok'/'error' status in header, for scheduler introspection:
525 # put 'ok'/'error' status in header, for scheduler introspection:
526 md['status'] = reply_content['status']
526 md['status'] = reply_content['status']
527
527
528 # flush i/o
528 # flush i/o
529 sys.stdout.flush()
529 sys.stdout.flush()
530 sys.stderr.flush()
530 sys.stderr.flush()
531
531
532 self.session.send(stream, u'apply_reply', reply_content,
532 self.session.send(stream, u'apply_reply', reply_content,
533 parent=parent, ident=ident,buffers=result_buf, metadata=md)
533 parent=parent, ident=ident,buffers=result_buf, metadata=md)
534
534
535 def do_apply(self, content, bufs, msg_id, reply_metadata):
535 def do_apply(self, content, bufs, msg_id, reply_metadata):
536 """Override in subclasses to support the IPython parallel framework.
536 """Override in subclasses to support the IPython parallel framework.
537 """
537 """
538 raise NotImplementedError
538 raise NotImplementedError
539
539
540 #---------------------------------------------------------------------------
540 #---------------------------------------------------------------------------
541 # Control messages
541 # Control messages
542 #---------------------------------------------------------------------------
542 #---------------------------------------------------------------------------
543
543
544 def abort_request(self, stream, ident, parent):
544 def abort_request(self, stream, ident, parent):
545 """abort a specific msg by id"""
545 """abort a specific msg by id"""
546 msg_ids = parent['content'].get('msg_ids', None)
546 msg_ids = parent['content'].get('msg_ids', None)
547 if isinstance(msg_ids, string_types):
547 if isinstance(msg_ids, string_types):
548 msg_ids = [msg_ids]
548 msg_ids = [msg_ids]
549 if not msg_ids:
549 if not msg_ids:
550 self._abort_queues()
550 self._abort_queues()
551 for mid in msg_ids:
551 for mid in msg_ids:
552 self.aborted.add(str(mid))
552 self.aborted.add(str(mid))
553
553
554 content = dict(status='ok')
554 content = dict(status='ok')
555 reply_msg = self.session.send(stream, 'abort_reply', content=content,
555 reply_msg = self.session.send(stream, 'abort_reply', content=content,
556 parent=parent, ident=ident)
556 parent=parent, ident=ident)
557 self.log.debug("%s", reply_msg)
557 self.log.debug("%s", reply_msg)
558
558
559 def clear_request(self, stream, idents, parent):
559 def clear_request(self, stream, idents, parent):
560 """Clear our namespace."""
560 """Clear our namespace."""
561 content = self.do_clear()
561 content = self.do_clear()
562 self.session.send(stream, 'clear_reply', ident=idents, parent=parent,
562 self.session.send(stream, 'clear_reply', ident=idents, parent=parent,
563 content = content)
563 content = content)
564
564
565 def do_clear(self):
565 def do_clear(self):
566 """Override in subclasses to clear the namespace
566 """Override in subclasses to clear the namespace
567
567
568 This is only required for IPython.parallel.
568 This is only required for IPython.parallel.
569 """
569 """
570 raise NotImplementedError
570 raise NotImplementedError
571
571
572 #---------------------------------------------------------------------------
572 #---------------------------------------------------------------------------
573 # Protected interface
573 # Protected interface
574 #---------------------------------------------------------------------------
574 #---------------------------------------------------------------------------
575
575
576 def _topic(self, topic):
576 def _topic(self, topic):
577 """prefixed topic for IOPub messages"""
577 """prefixed topic for IOPub messages"""
578 if self.int_id >= 0:
578 if self.int_id >= 0:
579 base = "engine.%i" % self.int_id
579 base = "engine.%i" % self.int_id
580 else:
580 else:
581 base = "kernel.%s" % self.ident
581 base = "kernel.%s" % self.ident
582
582
583 return py3compat.cast_bytes("%s.%s" % (base, topic))
583 return py3compat.cast_bytes("%s.%s" % (base, topic))
584
584
585 def _abort_queues(self):
585 def _abort_queues(self):
586 for stream in self.shell_streams:
586 for stream in self.shell_streams:
587 if stream:
587 if stream:
588 self._abort_queue(stream)
588 self._abort_queue(stream)
589
589
590 def _abort_queue(self, stream):
590 def _abort_queue(self, stream):
591 poller = zmq.Poller()
591 poller = zmq.Poller()
592 poller.register(stream.socket, zmq.POLLIN)
592 poller.register(stream.socket, zmq.POLLIN)
593 while True:
593 while True:
594 idents,msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
594 idents,msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
595 if msg is None:
595 if msg is None:
596 return
596 return
597
597
598 self.log.info("Aborting:")
598 self.log.info("Aborting:")
599 self.log.info("%s", msg)
599 self.log.info("%s", msg)
600 msg_type = msg['header']['msg_type']
600 msg_type = msg['header']['msg_type']
601 reply_type = msg_type.split('_')[0] + '_reply'
601 reply_type = msg_type.split('_')[0] + '_reply'
602
602
603 status = {'status' : 'aborted'}
603 status = {'status' : 'aborted'}
604 md = {'engine' : self.ident}
604 md = {'engine' : self.ident}
605 md.update(status)
605 md.update(status)
606 reply_msg = self.session.send(stream, reply_type, metadata=md,
606 reply_msg = self.session.send(stream, reply_type, metadata=md,
607 content=status, parent=msg, ident=idents)
607 content=status, parent=msg, ident=idents)
608 self.log.debug("%s", reply_msg)
608 self.log.debug("%s", reply_msg)
609 # We need to wait a bit for requests to come in. This can probably
609 # We need to wait a bit for requests to come in. This can probably
610 # be set shorter for true asynchronous clients.
610 # be set shorter for true asynchronous clients.
611 poller.poll(50)
611 poller.poll(50)
612
612
613
613
614 def _no_raw_input(self):
614 def _no_raw_input(self):
615 """Raise StdinNotImplentedError if active frontend doesn't support
615 """Raise StdinNotImplentedError if active frontend doesn't support
616 stdin."""
616 stdin."""
617 raise StdinNotImplementedError("raw_input was called, but this "
617 raise StdinNotImplementedError("raw_input was called, but this "
618 "frontend does not support stdin.")
618 "frontend does not support stdin.")
619
619
620 def getpass(self, prompt=''):
620 def getpass(self, prompt=''):
621 """Forward getpass to frontends
621 """Forward getpass to frontends
622
622
623 Raises
623 Raises
624 ------
624 ------
625 StdinNotImplentedError if active frontend doesn't support stdin.
625 StdinNotImplentedError if active frontend doesn't support stdin.
626 """
626 """
627 if not self._allow_stdin:
627 if not self._allow_stdin:
628 raise StdinNotImplementedError(
628 raise StdinNotImplementedError(
629 "getpass was called, but this frontend does not support input requests."
629 "getpass was called, but this frontend does not support input requests."
630 )
630 )
631 return self._input_request(prompt,
631 return self._input_request(prompt,
632 self._parent_ident,
632 self._parent_ident,
633 self._parent_header,
633 self._parent_header,
634 password=True,
634 password=True,
635 )
635 )
636
636
637 def raw_input(self, prompt=''):
637 def raw_input(self, prompt=''):
638 """Forward raw_input to frontends
638 """Forward raw_input to frontends
639
639
640 Raises
640 Raises
641 ------
641 ------
642 StdinNotImplentedError if active frontend doesn't support stdin.
642 StdinNotImplentedError if active frontend doesn't support stdin.
643 """
643 """
644 if not self._allow_stdin:
644 if not self._allow_stdin:
645 raise StdinNotImplementedError(
645 raise StdinNotImplementedError(
646 "raw_input was called, but this frontend does not support input requests."
646 "raw_input was called, but this frontend does not support input requests."
647 )
647 )
648 return self._input_request(prompt,
648 return self._input_request(prompt,
649 self._parent_ident,
649 self._parent_ident,
650 self._parent_header,
650 self._parent_header,
651 password=False,
651 password=False,
652 )
652 )
653
653
654 def _input_request(self, prompt, ident, parent, password=False):
654 def _input_request(self, prompt, ident, parent, password=False):
655 # Flush output before making the request.
655 # Flush output before making the request.
656 sys.stderr.flush()
656 sys.stderr.flush()
657 sys.stdout.flush()
657 sys.stdout.flush()
658 # flush the stdin socket, to purge stale replies
658 # flush the stdin socket, to purge stale replies
659 while True:
659 while True:
660 try:
660 try:
661 self.stdin_socket.recv_multipart(zmq.NOBLOCK)
661 self.stdin_socket.recv_multipart(zmq.NOBLOCK)
662 except zmq.ZMQError as e:
662 except zmq.ZMQError as e:
663 if e.errno == zmq.EAGAIN:
663 if e.errno == zmq.EAGAIN:
664 break
664 break
665 else:
665 else:
666 raise
666 raise
667
667
668 # Send the input request.
668 # Send the input request.
669 content = json_clean(dict(prompt=prompt, password=password))
669 content = json_clean(dict(prompt=prompt, password=password))
670 self.session.send(self.stdin_socket, u'input_request', content, parent,
670 self.session.send(self.stdin_socket, u'input_request', content, parent,
671 ident=ident)
671 ident=ident)
672
672
673 # Await a response.
673 # Await a response.
674 while True:
674 while True:
675 try:
675 try:
676 ident, reply = self.session.recv(self.stdin_socket, 0)
676 ident, reply = self.session.recv(self.stdin_socket, 0)
677 except Exception:
677 except Exception:
678 self.log.warn("Invalid Message:", exc_info=True)
678 self.log.warn("Invalid Message:", exc_info=True)
679 except KeyboardInterrupt:
679 except KeyboardInterrupt:
680 # re-raise KeyboardInterrupt, to truncate traceback
680 # re-raise KeyboardInterrupt, to truncate traceback
681 raise KeyboardInterrupt
681 raise KeyboardInterrupt
682 else:
682 else:
683 break
683 break
684 try:
684 try:
685 value = py3compat.unicode_to_str(reply['content']['value'])
685 value = py3compat.unicode_to_str(reply['content']['value'])
686 except:
686 except:
687 self.log.error("Bad input_reply: %s", parent)
687 self.log.error("Bad input_reply: %s", parent)
688 value = ''
688 value = ''
689 if value == '\x04':
689 if value == '\x04':
690 # EOF
690 # EOF
691 raise EOFError
691 raise EOFError
692 return value
692 return value
693
693
694 def _at_shutdown(self):
694 def _at_shutdown(self):
695 """Actions taken at shutdown by the kernel, called by python's atexit.
695 """Actions taken at shutdown by the kernel, called by python's atexit.
696 """
696 """
697 # io.rprint("Kernel at_shutdown") # dbg
697 # io.rprint("Kernel at_shutdown") # dbg
698 if self._shutdown_message is not None:
698 if self._shutdown_message is not None:
699 self.session.send(self.iopub_socket, self._shutdown_message, ident=self._topic('shutdown'))
699 self.session.send(self.iopub_socket, self._shutdown_message, ident=self._topic('shutdown'))
700 self.log.debug("%s", self._shutdown_message)
700 self.log.debug("%s", self._shutdown_message)
701 [ s.flush(zmq.POLLOUT) for s in self.shell_streams ]
701 [ s.flush(zmq.POLLOUT) for s in self.shell_streams ]
@@ -1,21 +1,20 b''
1 from logging import INFO, DEBUG, WARN, ERROR, FATAL
1 from logging import INFO, DEBUG, WARN, ERROR, FATAL
2
2
3 from zmq.log.handlers import PUBHandler
3 from zmq.log.handlers import PUBHandler
4
4
5 class EnginePUBHandler(PUBHandler):
5 class EnginePUBHandler(PUBHandler):
6 """A simple PUBHandler subclass that sets root_topic"""
6 """A simple PUBHandler subclass that sets root_topic"""
7 engine=None
7 engine=None
8
8
9 def __init__(self, engine, *args, **kwargs):
9 def __init__(self, engine, *args, **kwargs):
10 PUBHandler.__init__(self,*args, **kwargs)
10 PUBHandler.__init__(self,*args, **kwargs)
11 self.engine = engine
11 self.engine = engine
12
12
13 @property
13 @property
14 def root_topic(self):
14 def root_topic(self):
15 """this is a property, in case the handler is created
15 """this is a property, in case the handler is created
16 before the engine gets registered with an id"""
16 before the engine gets registered with an id"""
17 if isinstance(getattr(self.engine, 'id', None), int):
17 if isinstance(getattr(self.engine, 'id', None), int):
18 return "engine.%i"%self.engine.id
18 return "engine.%i"%self.engine.id
19 else:
19 else:
20 return "engine"
20 return "engine"
21
1 NO CONTENT: file renamed from IPython/kernel/zmq/parentpoller.py to ipython_kernel/zmq/parentpoller.py
NO CONTENT: file renamed from IPython/kernel/zmq/parentpoller.py to ipython_kernel/zmq/parentpoller.py
1 NO CONTENT: file renamed from IPython/kernel/zmq/pylab/__init__.py to ipython_kernel/zmq/pylab/__init__.py
NO CONTENT: file renamed from IPython/kernel/zmq/pylab/__init__.py to ipython_kernel/zmq/pylab/__init__.py
@@ -1,142 +1,141 b''
1 """A matplotlib backend for publishing figures via display_data"""
1 """A matplotlib backend for publishing figures via display_data"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from __future__ import print_function
6 from __future__ import print_function
7
7
8 import matplotlib
8 import matplotlib
9 from matplotlib.backends.backend_agg import new_figure_manager, FigureCanvasAgg # analysis: ignore
9 from matplotlib.backends.backend_agg import new_figure_manager, FigureCanvasAgg # analysis: ignore
10 from matplotlib._pylab_helpers import Gcf
10 from matplotlib._pylab_helpers import Gcf
11
11
12 from IPython.core.getipython import get_ipython
12 from IPython.core.getipython import get_ipython
13 from IPython.core.display import display
13 from IPython.core.display import display
14
14
15 from .config import InlineBackend
15 from .config import InlineBackend
16
16
17
17
18 def show(close=None):
18 def show(close=None):
19 """Show all figures as SVG/PNG payloads sent to the IPython clients.
19 """Show all figures as SVG/PNG payloads sent to the IPython clients.
20
20
21 Parameters
21 Parameters
22 ----------
22 ----------
23 close : bool, optional
23 close : bool, optional
24 If true, a ``plt.close('all')`` call is automatically issued after
24 If true, a ``plt.close('all')`` call is automatically issued after
25 sending all the figures. If this is set, the figures will entirely
25 sending all the figures. If this is set, the figures will entirely
26 removed from the internal list of figures.
26 removed from the internal list of figures.
27 """
27 """
28 if close is None:
28 if close is None:
29 close = InlineBackend.instance().close_figures
29 close = InlineBackend.instance().close_figures
30 try:
30 try:
31 for figure_manager in Gcf.get_all_fig_managers():
31 for figure_manager in Gcf.get_all_fig_managers():
32 display(figure_manager.canvas.figure)
32 display(figure_manager.canvas.figure)
33 finally:
33 finally:
34 show._to_draw = []
34 show._to_draw = []
35 # only call close('all') if any to close
35 # only call close('all') if any to close
36 # close triggers gc.collect, which can be slow
36 # close triggers gc.collect, which can be slow
37 if close and Gcf.get_all_fig_managers():
37 if close and Gcf.get_all_fig_managers():
38 matplotlib.pyplot.close('all')
38 matplotlib.pyplot.close('all')
39
39
40
40
41 # This flag will be reset by draw_if_interactive when called
41 # This flag will be reset by draw_if_interactive when called
42 show._draw_called = False
42 show._draw_called = False
43 # list of figures to draw when flush_figures is called
43 # list of figures to draw when flush_figures is called
44 show._to_draw = []
44 show._to_draw = []
45
45
46
46
47 def draw_if_interactive():
47 def draw_if_interactive():
48 """
48 """
49 Is called after every pylab drawing command
49 Is called after every pylab drawing command
50 """
50 """
51 # signal that the current active figure should be sent at the end of
51 # signal that the current active figure should be sent at the end of
52 # execution. Also sets the _draw_called flag, signaling that there will be
52 # execution. Also sets the _draw_called flag, signaling that there will be
53 # something to send. At the end of the code execution, a separate call to
53 # something to send. At the end of the code execution, a separate call to
54 # flush_figures() will act upon these values
54 # flush_figures() will act upon these values
55 manager = Gcf.get_active()
55 manager = Gcf.get_active()
56 if manager is None:
56 if manager is None:
57 return
57 return
58 fig = manager.canvas.figure
58 fig = manager.canvas.figure
59
59
60 # Hack: matplotlib FigureManager objects in interacive backends (at least
60 # Hack: matplotlib FigureManager objects in interacive backends (at least
61 # in some of them) monkeypatch the figure object and add a .show() method
61 # in some of them) monkeypatch the figure object and add a .show() method
62 # to it. This applies the same monkeypatch in order to support user code
62 # to it. This applies the same monkeypatch in order to support user code
63 # that might expect `.show()` to be part of the official API of figure
63 # that might expect `.show()` to be part of the official API of figure
64 # objects.
64 # objects.
65 # For further reference:
65 # For further reference:
66 # https://github.com/ipython/ipython/issues/1612
66 # https://github.com/ipython/ipython/issues/1612
67 # https://github.com/matplotlib/matplotlib/issues/835
67 # https://github.com/matplotlib/matplotlib/issues/835
68
68
69 if not hasattr(fig, 'show'):
69 if not hasattr(fig, 'show'):
70 # Queue up `fig` for display
70 # Queue up `fig` for display
71 fig.show = lambda *a: display(fig)
71 fig.show = lambda *a: display(fig)
72
72
73 # If matplotlib was manually set to non-interactive mode, this function
73 # If matplotlib was manually set to non-interactive mode, this function
74 # should be a no-op (otherwise we'll generate duplicate plots, since a user
74 # should be a no-op (otherwise we'll generate duplicate plots, since a user
75 # who set ioff() manually expects to make separate draw/show calls).
75 # who set ioff() manually expects to make separate draw/show calls).
76 if not matplotlib.is_interactive():
76 if not matplotlib.is_interactive():
77 return
77 return
78
78
79 # ensure current figure will be drawn, and each subsequent call
79 # ensure current figure will be drawn, and each subsequent call
80 # of draw_if_interactive() moves the active figure to ensure it is
80 # of draw_if_interactive() moves the active figure to ensure it is
81 # drawn last
81 # drawn last
82 try:
82 try:
83 show._to_draw.remove(fig)
83 show._to_draw.remove(fig)
84 except ValueError:
84 except ValueError:
85 # ensure it only appears in the draw list once
85 # ensure it only appears in the draw list once
86 pass
86 pass
87 # Queue up the figure for drawing in next show() call
87 # Queue up the figure for drawing in next show() call
88 show._to_draw.append(fig)
88 show._to_draw.append(fig)
89 show._draw_called = True
89 show._draw_called = True
90
90
91
91
92 def flush_figures():
92 def flush_figures():
93 """Send all figures that changed
93 """Send all figures that changed
94
94
95 This is meant to be called automatically and will call show() if, during
95 This is meant to be called automatically and will call show() if, during
96 prior code execution, there had been any calls to draw_if_interactive.
96 prior code execution, there had been any calls to draw_if_interactive.
97
97
98 This function is meant to be used as a post_execute callback in IPython,
98 This function is meant to be used as a post_execute callback in IPython,
99 so user-caused errors are handled with showtraceback() instead of being
99 so user-caused errors are handled with showtraceback() instead of being
100 allowed to raise. If this function is not called from within IPython,
100 allowed to raise. If this function is not called from within IPython,
101 then these exceptions will raise.
101 then these exceptions will raise.
102 """
102 """
103 if not show._draw_called:
103 if not show._draw_called:
104 return
104 return
105
105
106 if InlineBackend.instance().close_figures:
106 if InlineBackend.instance().close_figures:
107 # ignore the tracking, just draw and close all figures
107 # ignore the tracking, just draw and close all figures
108 try:
108 try:
109 return show(True)
109 return show(True)
110 except Exception as e:
110 except Exception as e:
111 # safely show traceback if in IPython, else raise
111 # safely show traceback if in IPython, else raise
112 ip = get_ipython()
112 ip = get_ipython()
113 if ip is None:
113 if ip is None:
114 raise e
114 raise e
115 else:
115 else:
116 ip.showtraceback()
116 ip.showtraceback()
117 return
117 return
118 try:
118 try:
119 # exclude any figures that were closed:
119 # exclude any figures that were closed:
120 active = set([fm.canvas.figure for fm in Gcf.get_all_fig_managers()])
120 active = set([fm.canvas.figure for fm in Gcf.get_all_fig_managers()])
121 for fig in [ fig for fig in show._to_draw if fig in active ]:
121 for fig in [ fig for fig in show._to_draw if fig in active ]:
122 try:
122 try:
123 display(fig)
123 display(fig)
124 except Exception as e:
124 except Exception as e:
125 # safely show traceback if in IPython, else raise
125 # safely show traceback if in IPython, else raise
126 ip = get_ipython()
126 ip = get_ipython()
127 if ip is None:
127 if ip is None:
128 raise e
128 raise e
129 else:
129 else:
130 ip.showtraceback()
130 ip.showtraceback()
131 return
131 return
132 finally:
132 finally:
133 # clear flags for next round
133 # clear flags for next round
134 show._to_draw = []
134 show._to_draw = []
135 show._draw_called = False
135 show._draw_called = False
136
136
137
137
138 # Changes to matplotlib in version 1.2 requires a mpl backend to supply a default
138 # Changes to matplotlib in version 1.2 requires a mpl backend to supply a default
139 # figurecanvas. This is set here to a Agg canvas
139 # figurecanvas. This is set here to a Agg canvas
140 # See https://github.com/matplotlib/matplotlib/pull/1125
140 # See https://github.com/matplotlib/matplotlib/pull/1125
141 FigureCanvas = FigureCanvasAgg
141 FigureCanvas = FigureCanvasAgg
142
@@ -1,120 +1,119 b''
1 """Configurable for configuring the IPython inline backend
1 """Configurable for configuring the IPython inline backend
2
2
3 This module does not import anything from matplotlib.
3 This module does not import anything from matplotlib.
4 """
4 """
5 #-----------------------------------------------------------------------------
5 #-----------------------------------------------------------------------------
6 # Copyright (C) 2011 The IPython Development Team
6 # Copyright (C) 2011 The IPython Development Team
7 #
7 #
8 # Distributed under the terms of the BSD License. The full license is in
8 # Distributed under the terms of the BSD License. The full license is in
9 # the file COPYING, distributed as part of this software.
9 # the file COPYING, distributed as part of this software.
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11
11
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13 # Imports
13 # Imports
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15
15
16 from IPython.config import Config
16 from IPython.config import Config
17 from IPython.config.configurable import SingletonConfigurable
17 from IPython.config.configurable import SingletonConfigurable
18 from IPython.utils.traitlets import (
18 from IPython.utils.traitlets import (
19 Dict, Instance, CaselessStrEnum, Set, Bool, Int, TraitError, Unicode
19 Dict, Instance, CaselessStrEnum, Set, Bool, Int, TraitError, Unicode
20 )
20 )
21 from IPython.utils.warn import warn
21 from IPython.utils.warn import warn
22
22
23 #-----------------------------------------------------------------------------
23 #-----------------------------------------------------------------------------
24 # Configurable for inline backend options
24 # Configurable for inline backend options
25 #-----------------------------------------------------------------------------
25 #-----------------------------------------------------------------------------
26
26
27 def pil_available():
27 def pil_available():
28 """Test if PIL/Pillow is available"""
28 """Test if PIL/Pillow is available"""
29 out = False
29 out = False
30 try:
30 try:
31 from PIL import Image
31 from PIL import Image
32 out = True
32 out = True
33 except:
33 except:
34 pass
34 pass
35 return out
35 return out
36
36
37 # inherit from InlineBackendConfig for deprecation purposes
37 # inherit from InlineBackendConfig for deprecation purposes
38 class InlineBackendConfig(SingletonConfigurable):
38 class InlineBackendConfig(SingletonConfigurable):
39 pass
39 pass
40
40
41 class InlineBackend(InlineBackendConfig):
41 class InlineBackend(InlineBackendConfig):
42 """An object to store configuration of the inline backend."""
42 """An object to store configuration of the inline backend."""
43
43
44 def _config_changed(self, name, old, new):
44 def _config_changed(self, name, old, new):
45 # warn on change of renamed config section
45 # warn on change of renamed config section
46 if new.InlineBackendConfig != getattr(old, 'InlineBackendConfig', Config()):
46 if new.InlineBackendConfig != getattr(old, 'InlineBackendConfig', Config()):
47 warn("InlineBackendConfig has been renamed to InlineBackend")
47 warn("InlineBackendConfig has been renamed to InlineBackend")
48 super(InlineBackend, self)._config_changed(name, old, new)
48 super(InlineBackend, self)._config_changed(name, old, new)
49
49
50 # The typical default figure size is too large for inline use,
50 # The typical default figure size is too large for inline use,
51 # so we shrink the figure size to 6x4, and tweak fonts to
51 # so we shrink the figure size to 6x4, and tweak fonts to
52 # make that fit.
52 # make that fit.
53 rc = Dict({'figure.figsize': (6.0,4.0),
53 rc = Dict({'figure.figsize': (6.0,4.0),
54 # play nicely with white background in the Qt and notebook frontend
54 # play nicely with white background in the Qt and notebook frontend
55 'figure.facecolor': (1,1,1,0),
55 'figure.facecolor': (1,1,1,0),
56 'figure.edgecolor': (1,1,1,0),
56 'figure.edgecolor': (1,1,1,0),
57 'axes.facecolor': (1,1,1,0),
57 'axes.facecolor': (1,1,1,0),
58 # 12pt labels get cutoff on 6x4 logplots, so use 10pt.
58 # 12pt labels get cutoff on 6x4 logplots, so use 10pt.
59 'font.size': 10,
59 'font.size': 10,
60 # 72 dpi matches SVG/qtconsole
60 # 72 dpi matches SVG/qtconsole
61 # this only affects PNG export, as SVG has no dpi setting
61 # this only affects PNG export, as SVG has no dpi setting
62 'savefig.dpi': 72,
62 'savefig.dpi': 72,
63 # 10pt still needs a little more room on the xlabel:
63 # 10pt still needs a little more room on the xlabel:
64 'figure.subplot.bottom' : .125
64 'figure.subplot.bottom' : .125
65 }, config=True,
65 }, config=True,
66 help="""Subset of matplotlib rcParams that should be different for the
66 help="""Subset of matplotlib rcParams that should be different for the
67 inline backend."""
67 inline backend."""
68 )
68 )
69
69
70 figure_formats = Set({'png'}, config=True,
70 figure_formats = Set({'png'}, config=True,
71 help="""A set of figure formats to enable: 'png',
71 help="""A set of figure formats to enable: 'png',
72 'retina', 'jpeg', 'svg', 'pdf'.""")
72 'retina', 'jpeg', 'svg', 'pdf'.""")
73
73
74 def _update_figure_formatters(self):
74 def _update_figure_formatters(self):
75 if self.shell is not None:
75 if self.shell is not None:
76 from IPython.core.pylabtools import select_figure_formats
76 from IPython.core.pylabtools import select_figure_formats
77 select_figure_formats(self.shell, self.figure_formats, **self.print_figure_kwargs)
77 select_figure_formats(self.shell, self.figure_formats, **self.print_figure_kwargs)
78
78
79 def _figure_formats_changed(self, name, old, new):
79 def _figure_formats_changed(self, name, old, new):
80 if 'jpg' in new or 'jpeg' in new:
80 if 'jpg' in new or 'jpeg' in new:
81 if not pil_available():
81 if not pil_available():
82 raise TraitError("Requires PIL/Pillow for JPG figures")
82 raise TraitError("Requires PIL/Pillow for JPG figures")
83 self._update_figure_formatters()
83 self._update_figure_formatters()
84
84
85 figure_format = Unicode(config=True, help="""The figure format to enable (deprecated
85 figure_format = Unicode(config=True, help="""The figure format to enable (deprecated
86 use `figure_formats` instead)""")
86 use `figure_formats` instead)""")
87
87
88 def _figure_format_changed(self, name, old, new):
88 def _figure_format_changed(self, name, old, new):
89 if new:
89 if new:
90 self.figure_formats = {new}
90 self.figure_formats = {new}
91
91
92 print_figure_kwargs = Dict({'bbox_inches' : 'tight'}, config=True,
92 print_figure_kwargs = Dict({'bbox_inches' : 'tight'}, config=True,
93 help="""Extra kwargs to be passed to fig.canvas.print_figure.
93 help="""Extra kwargs to be passed to fig.canvas.print_figure.
94
94
95 Logical examples include: bbox_inches, quality (for jpeg figures), etc.
95 Logical examples include: bbox_inches, quality (for jpeg figures), etc.
96 """
96 """
97 )
97 )
98 _print_figure_kwargs_changed = _update_figure_formatters
98 _print_figure_kwargs_changed = _update_figure_formatters
99
99
100 close_figures = Bool(True, config=True,
100 close_figures = Bool(True, config=True,
101 help="""Close all figures at the end of each cell.
101 help="""Close all figures at the end of each cell.
102
102
103 When True, ensures that each cell starts with no active figures, but it
103 When True, ensures that each cell starts with no active figures, but it
104 also means that one must keep track of references in order to edit or
104 also means that one must keep track of references in order to edit or
105 redraw figures in subsequent cells. This mode is ideal for the notebook,
105 redraw figures in subsequent cells. This mode is ideal for the notebook,
106 where residual plots from other cells might be surprising.
106 where residual plots from other cells might be surprising.
107
107
108 When False, one must call figure() to create new figures. This means
108 When False, one must call figure() to create new figures. This means
109 that gcf() and getfigs() can reference figures created in other cells,
109 that gcf() and getfigs() can reference figures created in other cells,
110 and the active figure can continue to be edited with pylab/pyplot
110 and the active figure can continue to be edited with pylab/pyplot
111 methods that reference the current active figure. This mode facilitates
111 methods that reference the current active figure. This mode facilitates
112 iterative editing of figures, and behaves most consistently with
112 iterative editing of figures, and behaves most consistently with
113 other matplotlib backends, but figure barriers between cells must
113 other matplotlib backends, but figure barriers between cells must
114 be explicit.
114 be explicit.
115 """)
115 """)
116
116
117 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
117 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
118 allow_none=True)
118 allow_none=True)
119
119
120
@@ -1,180 +1,179 b''
1 """serialization utilities for apply messages"""
1 """serialization utilities for apply messages"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 try:
6 try:
7 import cPickle
7 import cPickle
8 pickle = cPickle
8 pickle = cPickle
9 except:
9 except:
10 cPickle = None
10 cPickle = None
11 import pickle
11 import pickle
12
12
13 # IPython imports
13 # IPython imports
14 from IPython.utils.py3compat import PY3, buffer_to_bytes_py2
14 from IPython.utils.py3compat import PY3, buffer_to_bytes_py2
15 from IPython.utils.data import flatten
15 from IPython.utils.data import flatten
16 from IPython.utils.pickleutil import (
16 from IPython.utils.pickleutil import (
17 can, uncan, can_sequence, uncan_sequence, CannedObject,
17 can, uncan, can_sequence, uncan_sequence, CannedObject,
18 istype, sequence_types, PICKLE_PROTOCOL,
18 istype, sequence_types, PICKLE_PROTOCOL,
19 )
19 )
20 from .session import MAX_ITEMS, MAX_BYTES
20 from jupyter_client.session import MAX_ITEMS, MAX_BYTES
21
21
22
22
23 if PY3:
23 if PY3:
24 buffer = memoryview
24 buffer = memoryview
25
25
26 #-----------------------------------------------------------------------------
26 #-----------------------------------------------------------------------------
27 # Serialization Functions
27 # Serialization Functions
28 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
29
29
30
30
31 def _extract_buffers(obj, threshold=MAX_BYTES):
31 def _extract_buffers(obj, threshold=MAX_BYTES):
32 """extract buffers larger than a certain threshold"""
32 """extract buffers larger than a certain threshold"""
33 buffers = []
33 buffers = []
34 if isinstance(obj, CannedObject) and obj.buffers:
34 if isinstance(obj, CannedObject) and obj.buffers:
35 for i,buf in enumerate(obj.buffers):
35 for i,buf in enumerate(obj.buffers):
36 if len(buf) > threshold:
36 if len(buf) > threshold:
37 # buffer larger than threshold, prevent pickling
37 # buffer larger than threshold, prevent pickling
38 obj.buffers[i] = None
38 obj.buffers[i] = None
39 buffers.append(buf)
39 buffers.append(buf)
40 elif isinstance(buf, buffer):
40 elif isinstance(buf, buffer):
41 # buffer too small for separate send, coerce to bytes
41 # buffer too small for separate send, coerce to bytes
42 # because pickling buffer objects just results in broken pointers
42 # because pickling buffer objects just results in broken pointers
43 obj.buffers[i] = bytes(buf)
43 obj.buffers[i] = bytes(buf)
44 return buffers
44 return buffers
45
45
46 def _restore_buffers(obj, buffers):
46 def _restore_buffers(obj, buffers):
47 """restore buffers extracted by """
47 """restore buffers extracted by """
48 if isinstance(obj, CannedObject) and obj.buffers:
48 if isinstance(obj, CannedObject) and obj.buffers:
49 for i,buf in enumerate(obj.buffers):
49 for i,buf in enumerate(obj.buffers):
50 if buf is None:
50 if buf is None:
51 obj.buffers[i] = buffers.pop(0)
51 obj.buffers[i] = buffers.pop(0)
52
52
53 def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
53 def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
54 """Serialize an object into a list of sendable buffers.
54 """Serialize an object into a list of sendable buffers.
55
55
56 Parameters
56 Parameters
57 ----------
57 ----------
58
58
59 obj : object
59 obj : object
60 The object to be serialized
60 The object to be serialized
61 buffer_threshold : int
61 buffer_threshold : int
62 The threshold (in bytes) for pulling out data buffers
62 The threshold (in bytes) for pulling out data buffers
63 to avoid pickling them.
63 to avoid pickling them.
64 item_threshold : int
64 item_threshold : int
65 The maximum number of items over which canning will iterate.
65 The maximum number of items over which canning will iterate.
66 Containers (lists, dicts) larger than this will be pickled without
66 Containers (lists, dicts) larger than this will be pickled without
67 introspection.
67 introspection.
68
68
69 Returns
69 Returns
70 -------
70 -------
71 [bufs] : list of buffers representing the serialized object.
71 [bufs] : list of buffers representing the serialized object.
72 """
72 """
73 buffers = []
73 buffers = []
74 if istype(obj, sequence_types) and len(obj) < item_threshold:
74 if istype(obj, sequence_types) and len(obj) < item_threshold:
75 cobj = can_sequence(obj)
75 cobj = can_sequence(obj)
76 for c in cobj:
76 for c in cobj:
77 buffers.extend(_extract_buffers(c, buffer_threshold))
77 buffers.extend(_extract_buffers(c, buffer_threshold))
78 elif istype(obj, dict) and len(obj) < item_threshold:
78 elif istype(obj, dict) and len(obj) < item_threshold:
79 cobj = {}
79 cobj = {}
80 for k in sorted(obj):
80 for k in sorted(obj):
81 c = can(obj[k])
81 c = can(obj[k])
82 buffers.extend(_extract_buffers(c, buffer_threshold))
82 buffers.extend(_extract_buffers(c, buffer_threshold))
83 cobj[k] = c
83 cobj[k] = c
84 else:
84 else:
85 cobj = can(obj)
85 cobj = can(obj)
86 buffers.extend(_extract_buffers(cobj, buffer_threshold))
86 buffers.extend(_extract_buffers(cobj, buffer_threshold))
87
87
88 buffers.insert(0, pickle.dumps(cobj, PICKLE_PROTOCOL))
88 buffers.insert(0, pickle.dumps(cobj, PICKLE_PROTOCOL))
89 return buffers
89 return buffers
90
90
91 def deserialize_object(buffers, g=None):
91 def deserialize_object(buffers, g=None):
92 """reconstruct an object serialized by serialize_object from data buffers.
92 """reconstruct an object serialized by serialize_object from data buffers.
93
93
94 Parameters
94 Parameters
95 ----------
95 ----------
96
96
97 bufs : list of buffers/bytes
97 bufs : list of buffers/bytes
98
98
99 g : globals to be used when uncanning
99 g : globals to be used when uncanning
100
100
101 Returns
101 Returns
102 -------
102 -------
103
103
104 (newobj, bufs) : unpacked object, and the list of remaining unused buffers.
104 (newobj, bufs) : unpacked object, and the list of remaining unused buffers.
105 """
105 """
106 bufs = list(buffers)
106 bufs = list(buffers)
107 pobj = buffer_to_bytes_py2(bufs.pop(0))
107 pobj = buffer_to_bytes_py2(bufs.pop(0))
108 canned = pickle.loads(pobj)
108 canned = pickle.loads(pobj)
109 if istype(canned, sequence_types) and len(canned) < MAX_ITEMS:
109 if istype(canned, sequence_types) and len(canned) < MAX_ITEMS:
110 for c in canned:
110 for c in canned:
111 _restore_buffers(c, bufs)
111 _restore_buffers(c, bufs)
112 newobj = uncan_sequence(canned, g)
112 newobj = uncan_sequence(canned, g)
113 elif istype(canned, dict) and len(canned) < MAX_ITEMS:
113 elif istype(canned, dict) and len(canned) < MAX_ITEMS:
114 newobj = {}
114 newobj = {}
115 for k in sorted(canned):
115 for k in sorted(canned):
116 c = canned[k]
116 c = canned[k]
117 _restore_buffers(c, bufs)
117 _restore_buffers(c, bufs)
118 newobj[k] = uncan(c, g)
118 newobj[k] = uncan(c, g)
119 else:
119 else:
120 _restore_buffers(canned, bufs)
120 _restore_buffers(canned, bufs)
121 newobj = uncan(canned, g)
121 newobj = uncan(canned, g)
122
122
123 return newobj, bufs
123 return newobj, bufs
124
124
125 def pack_apply_message(f, args, kwargs, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
125 def pack_apply_message(f, args, kwargs, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS):
126 """pack up a function, args, and kwargs to be sent over the wire
126 """pack up a function, args, and kwargs to be sent over the wire
127
127
128 Each element of args/kwargs will be canned for special treatment,
128 Each element of args/kwargs will be canned for special treatment,
129 but inspection will not go any deeper than that.
129 but inspection will not go any deeper than that.
130
130
131 Any object whose data is larger than `threshold` will not have their data copied
131 Any object whose data is larger than `threshold` will not have their data copied
132 (only numpy arrays and bytes/buffers support zero-copy)
132 (only numpy arrays and bytes/buffers support zero-copy)
133
133
134 Message will be a list of bytes/buffers of the format:
134 Message will be a list of bytes/buffers of the format:
135
135
136 [ cf, pinfo, <arg_bufs>, <kwarg_bufs> ]
136 [ cf, pinfo, <arg_bufs>, <kwarg_bufs> ]
137
137
138 With length at least two + len(args) + len(kwargs)
138 With length at least two + len(args) + len(kwargs)
139 """
139 """
140
140
141 arg_bufs = flatten(serialize_object(arg, buffer_threshold, item_threshold) for arg in args)
141 arg_bufs = flatten(serialize_object(arg, buffer_threshold, item_threshold) for arg in args)
142
142
143 kw_keys = sorted(kwargs.keys())
143 kw_keys = sorted(kwargs.keys())
144 kwarg_bufs = flatten(serialize_object(kwargs[key], buffer_threshold, item_threshold) for key in kw_keys)
144 kwarg_bufs = flatten(serialize_object(kwargs[key], buffer_threshold, item_threshold) for key in kw_keys)
145
145
146 info = dict(nargs=len(args), narg_bufs=len(arg_bufs), kw_keys=kw_keys)
146 info = dict(nargs=len(args), narg_bufs=len(arg_bufs), kw_keys=kw_keys)
147
147
148 msg = [pickle.dumps(can(f), PICKLE_PROTOCOL)]
148 msg = [pickle.dumps(can(f), PICKLE_PROTOCOL)]
149 msg.append(pickle.dumps(info, PICKLE_PROTOCOL))
149 msg.append(pickle.dumps(info, PICKLE_PROTOCOL))
150 msg.extend(arg_bufs)
150 msg.extend(arg_bufs)
151 msg.extend(kwarg_bufs)
151 msg.extend(kwarg_bufs)
152
152
153 return msg
153 return msg
154
154
155 def unpack_apply_message(bufs, g=None, copy=True):
155 def unpack_apply_message(bufs, g=None, copy=True):
156 """unpack f,args,kwargs from buffers packed by pack_apply_message()
156 """unpack f,args,kwargs from buffers packed by pack_apply_message()
157 Returns: original f,args,kwargs"""
157 Returns: original f,args,kwargs"""
158 bufs = list(bufs) # allow us to pop
158 bufs = list(bufs) # allow us to pop
159 assert len(bufs) >= 2, "not enough buffers!"
159 assert len(bufs) >= 2, "not enough buffers!"
160 pf = buffer_to_bytes_py2(bufs.pop(0))
160 pf = buffer_to_bytes_py2(bufs.pop(0))
161 f = uncan(pickle.loads(pf), g)
161 f = uncan(pickle.loads(pf), g)
162 pinfo = buffer_to_bytes_py2(bufs.pop(0))
162 pinfo = buffer_to_bytes_py2(bufs.pop(0))
163 info = pickle.loads(pinfo)
163 info = pickle.loads(pinfo)
164 arg_bufs, kwarg_bufs = bufs[:info['narg_bufs']], bufs[info['narg_bufs']:]
164 arg_bufs, kwarg_bufs = bufs[:info['narg_bufs']], bufs[info['narg_bufs']:]
165
165
166 args = []
166 args = []
167 for i in range(info['nargs']):
167 for i in range(info['nargs']):
168 arg, arg_bufs = deserialize_object(arg_bufs, g)
168 arg, arg_bufs = deserialize_object(arg_bufs, g)
169 args.append(arg)
169 args.append(arg)
170 args = tuple(args)
170 args = tuple(args)
171 assert not arg_bufs, "Shouldn't be any arg bufs left over"
171 assert not arg_bufs, "Shouldn't be any arg bufs left over"
172
172
173 kwargs = {}
173 kwargs = {}
174 for key in info['kw_keys']:
174 for key in info['kw_keys']:
175 kwarg, kwarg_bufs = deserialize_object(kwarg_bufs, g)
175 kwarg, kwarg_bufs = deserialize_object(kwarg_bufs, g)
176 kwargs[key] = kwarg
176 kwargs[key] = kwarg
177 assert not kwarg_bufs, "Shouldn't be any kwarg bufs left over"
177 assert not kwarg_bufs, "Shouldn't be any kwarg bufs left over"
178
179 return f,args,kwargs
180
178
179 return f,args,kwargs
1 NO CONTENT: file renamed from IPython/kernel/zmq/tests/__init__.py to ipython_kernel/zmq/tests/__init__.py
NO CONTENT: file renamed from IPython/kernel/zmq/tests/__init__.py to ipython_kernel/zmq/tests/__init__.py
@@ -1,199 +1,197 b''
1 """test IPython.embed_kernel()"""
1 """test IPython.embed_kernel()"""
2
2
3 #-------------------------------------------------------------------------------
3 #-------------------------------------------------------------------------------
4 # Copyright (C) 2012 The IPython Development Team
4 # Copyright (C) 2012 The IPython Development Team
5 #
5 #
6 # Distributed under the terms of the BSD License. The full license is in
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
7 # the file COPYING, distributed as part of this software.
8 #-------------------------------------------------------------------------------
8 #-------------------------------------------------------------------------------
9
9
10 #-------------------------------------------------------------------------------
10 #-------------------------------------------------------------------------------
11 # Imports
11 # Imports
12 #-------------------------------------------------------------------------------
12 #-------------------------------------------------------------------------------
13
13
14 import os
14 import os
15 import shutil
15 import shutil
16 import sys
16 import sys
17 import tempfile
17 import tempfile
18 import time
18 import time
19
19
20 from contextlib import contextmanager
20 from contextlib import contextmanager
21 from subprocess import Popen, PIPE
21 from subprocess import Popen, PIPE
22
22
23 import nose.tools as nt
23 import nose.tools as nt
24
24
25 from IPython.kernel import BlockingKernelClient
25 from jupyter_client import BlockingKernelClient
26 from IPython.utils import path, py3compat
26 from IPython.utils import path, py3compat
27 from IPython.utils.py3compat import unicode_type
27 from IPython.utils.py3compat import unicode_type
28
28
29 #-------------------------------------------------------------------------------
29 #-------------------------------------------------------------------------------
30 # Tests
30 # Tests
31 #-------------------------------------------------------------------------------
31 #-------------------------------------------------------------------------------
32
32
33 SETUP_TIMEOUT = 60
33 SETUP_TIMEOUT = 60
34 TIMEOUT = 15
34 TIMEOUT = 15
35
35
36 def setup():
36 def setup():
37 """setup temporary IPYTHONDIR for tests"""
37 """setup temporary IPYTHONDIR for tests"""
38 global IPYTHONDIR
38 global IPYTHONDIR
39 global env
39 global env
40 global save_get_ipython_dir
40 global save_get_ipython_dir
41
41
42 IPYTHONDIR = tempfile.mkdtemp()
42 IPYTHONDIR = tempfile.mkdtemp()
43
43
44 env = os.environ.copy()
44 env = os.environ.copy()
45 env["IPYTHONDIR"] = IPYTHONDIR
45 env["IPYTHONDIR"] = IPYTHONDIR
46
46
47 save_get_ipython_dir = path.get_ipython_dir
47 save_get_ipython_dir = path.get_ipython_dir
48 path.get_ipython_dir = lambda : IPYTHONDIR
48 path.get_ipython_dir = lambda : IPYTHONDIR
49
49
50
50
51 def teardown():
51 def teardown():
52 path.get_ipython_dir = save_get_ipython_dir
52 path.get_ipython_dir = save_get_ipython_dir
53
53
54 try:
54 try:
55 shutil.rmtree(IPYTHONDIR)
55 shutil.rmtree(IPYTHONDIR)
56 except (OSError, IOError):
56 except (OSError, IOError):
57 # no such file
57 # no such file
58 pass
58 pass
59
59
60
60
61 @contextmanager
61 @contextmanager
62 def setup_kernel(cmd):
62 def setup_kernel(cmd):
63 """start an embedded kernel in a subprocess, and wait for it to be ready
63 """start an embedded kernel in a subprocess, and wait for it to be ready
64
64
65 Returns
65 Returns
66 -------
66 -------
67 kernel_manager: connected KernelManager instance
67 kernel_manager: connected KernelManager instance
68 """
68 """
69 kernel = Popen([sys.executable, '-c', cmd], stdout=PIPE, stderr=PIPE, env=env)
69 kernel = Popen([sys.executable, '-c', cmd], stdout=PIPE, stderr=PIPE, env=env)
70 connection_file = os.path.join(IPYTHONDIR,
70 connection_file = os.path.join(IPYTHONDIR,
71 'profile_default',
71 'profile_default',
72 'security',
72 'security',
73 'kernel-%i.json' % kernel.pid
73 'kernel-%i.json' % kernel.pid
74 )
74 )
75 # wait for connection file to exist, timeout after 5s
75 # wait for connection file to exist, timeout after 5s
76 tic = time.time()
76 tic = time.time()
77 while not os.path.exists(connection_file) \
77 while not os.path.exists(connection_file) \
78 and kernel.poll() is None \
78 and kernel.poll() is None \
79 and time.time() < tic + SETUP_TIMEOUT:
79 and time.time() < tic + SETUP_TIMEOUT:
80 time.sleep(0.1)
80 time.sleep(0.1)
81
81
82 if kernel.poll() is not None:
82 if kernel.poll() is not None:
83 o,e = kernel.communicate()
83 o,e = kernel.communicate()
84 e = py3compat.cast_unicode(e)
84 e = py3compat.cast_unicode(e)
85 raise IOError("Kernel failed to start:\n%s" % e)
85 raise IOError("Kernel failed to start:\n%s" % e)
86
86
87 if not os.path.exists(connection_file):
87 if not os.path.exists(connection_file):
88 if kernel.poll() is None:
88 if kernel.poll() is None:
89 kernel.terminate()
89 kernel.terminate()
90 raise IOError("Connection file %r never arrived" % connection_file)
90 raise IOError("Connection file %r never arrived" % connection_file)
91
91
92 client = BlockingKernelClient(connection_file=connection_file)
92 client = BlockingKernelClient(connection_file=connection_file)
93 client.load_connection_file()
93 client.load_connection_file()
94 client.start_channels()
94 client.start_channels()
95 client.wait_for_ready()
95 client.wait_for_ready()
96
96
97 try:
97 try:
98 yield client
98 yield client
99 finally:
99 finally:
100 client.stop_channels()
100 client.stop_channels()
101 kernel.terminate()
101 kernel.terminate()
102
102
103 def test_embed_kernel_basic():
103 def test_embed_kernel_basic():
104 """IPython.embed_kernel() is basically functional"""
104 """IPython.embed_kernel() is basically functional"""
105 cmd = '\n'.join([
105 cmd = '\n'.join([
106 'from IPython import embed_kernel',
106 'from IPython import embed_kernel',
107 'def go():',
107 'def go():',
108 ' a=5',
108 ' a=5',
109 ' b="hi there"',
109 ' b="hi there"',
110 ' embed_kernel()',
110 ' embed_kernel()',
111 'go()',
111 'go()',
112 '',
112 '',
113 ])
113 ])
114
114
115 with setup_kernel(cmd) as client:
115 with setup_kernel(cmd) as client:
116 # oinfo a (int)
116 # oinfo a (int)
117 msg_id = client.inspect('a')
117 msg_id = client.inspect('a')
118 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
118 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
119 content = msg['content']
119 content = msg['content']
120 nt.assert_true(content['found'])
120 nt.assert_true(content['found'])
121
121
122 msg_id = client.execute("c=a*2")
122 msg_id = client.execute("c=a*2")
123 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
123 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
124 content = msg['content']
124 content = msg['content']
125 nt.assert_equal(content['status'], u'ok')
125 nt.assert_equal(content['status'], u'ok')
126
126
127 # oinfo c (should be 10)
127 # oinfo c (should be 10)
128 msg_id = client.inspect('c')
128 msg_id = client.inspect('c')
129 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
129 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
130 content = msg['content']
130 content = msg['content']
131 nt.assert_true(content['found'])
131 nt.assert_true(content['found'])
132 text = content['data']['text/plain']
132 text = content['data']['text/plain']
133 nt.assert_in('10', text)
133 nt.assert_in('10', text)
134
134
135 def test_embed_kernel_namespace():
135 def test_embed_kernel_namespace():
136 """IPython.embed_kernel() inherits calling namespace"""
136 """IPython.embed_kernel() inherits calling namespace"""
137 cmd = '\n'.join([
137 cmd = '\n'.join([
138 'from IPython import embed_kernel',
138 'from IPython import embed_kernel',
139 'def go():',
139 'def go():',
140 ' a=5',
140 ' a=5',
141 ' b="hi there"',
141 ' b="hi there"',
142 ' embed_kernel()',
142 ' embed_kernel()',
143 'go()',
143 'go()',
144 '',
144 '',
145 ])
145 ])
146
146
147 with setup_kernel(cmd) as client:
147 with setup_kernel(cmd) as client:
148 # oinfo a (int)
148 # oinfo a (int)
149 msg_id = client.inspect('a')
149 msg_id = client.inspect('a')
150 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
150 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
151 content = msg['content']
151 content = msg['content']
152 nt.assert_true(content['found'])
152 nt.assert_true(content['found'])
153 text = content['data']['text/plain']
153 text = content['data']['text/plain']
154 nt.assert_in(u'5', text)
154 nt.assert_in(u'5', text)
155
155
156 # oinfo b (str)
156 # oinfo b (str)
157 msg_id = client.inspect('b')
157 msg_id = client.inspect('b')
158 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
158 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
159 content = msg['content']
159 content = msg['content']
160 nt.assert_true(content['found'])
160 nt.assert_true(content['found'])
161 text = content['data']['text/plain']
161 text = content['data']['text/plain']
162 nt.assert_in(u'hi there', text)
162 nt.assert_in(u'hi there', text)
163
163
164 # oinfo c (undefined)
164 # oinfo c (undefined)
165 msg_id = client.inspect('c')
165 msg_id = client.inspect('c')
166 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
166 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
167 content = msg['content']
167 content = msg['content']
168 nt.assert_false(content['found'])
168 nt.assert_false(content['found'])
169
169
170 def test_embed_kernel_reentrant():
170 def test_embed_kernel_reentrant():
171 """IPython.embed_kernel() can be called multiple times"""
171 """IPython.embed_kernel() can be called multiple times"""
172 cmd = '\n'.join([
172 cmd = '\n'.join([
173 'from IPython import embed_kernel',
173 'from IPython import embed_kernel',
174 'count = 0',
174 'count = 0',
175 'def go():',
175 'def go():',
176 ' global count',
176 ' global count',
177 ' embed_kernel()',
177 ' embed_kernel()',
178 ' count = count + 1',
178 ' count = count + 1',
179 '',
179 '',
180 'while True:'
180 'while True:'
181 ' go()',
181 ' go()',
182 '',
182 '',
183 ])
183 ])
184
184
185 with setup_kernel(cmd) as client:
185 with setup_kernel(cmd) as client:
186 for i in range(5):
186 for i in range(5):
187 msg_id = client.inspect('count')
187 msg_id = client.inspect('count')
188 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
188 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
189 content = msg['content']
189 content = msg['content']
190 nt.assert_true(content['found'])
190 nt.assert_true(content['found'])
191 text = content['data']['text/plain']
191 text = content['data']['text/plain']
192 nt.assert_in(unicode_type(i), text)
192 nt.assert_in(unicode_type(i), text)
193
193
194 # exit from embed_kernel
194 # exit from embed_kernel
195 client.execute("get_ipython().exit_now = True")
195 client.execute("get_ipython().exit_now = True")
196 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
196 msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
197 time.sleep(0.2)
197 time.sleep(0.2)
198
199
@@ -1,208 +1,208 b''
1 """test serialization tools"""
1 """test serialization tools"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import pickle
6 import pickle
7 from collections import namedtuple
7 from collections import namedtuple
8
8
9 import nose.tools as nt
9 import nose.tools as nt
10
10
11 # from unittest import TestCaes
11 # from unittest import TestCaes
12 from IPython.kernel.zmq.serialize import serialize_object, deserialize_object
12 from ipython_kernel.zmq.serialize import serialize_object, deserialize_object
13 from IPython.testing import decorators as dec
13 from IPython.testing import decorators as dec
14 from IPython.utils.pickleutil import CannedArray, CannedClass
14 from IPython.utils.pickleutil import CannedArray, CannedClass
15 from IPython.utils.py3compat import iteritems
15 from IPython.utils.py3compat import iteritems
16 from IPython.parallel import interactive
16 from IPython.parallel import interactive
17
17
18 #-------------------------------------------------------------------------------
18 #-------------------------------------------------------------------------------
19 # Globals and Utilities
19 # Globals and Utilities
20 #-------------------------------------------------------------------------------
20 #-------------------------------------------------------------------------------
21
21
22 def roundtrip(obj):
22 def roundtrip(obj):
23 """roundtrip an object through serialization"""
23 """roundtrip an object through serialization"""
24 bufs = serialize_object(obj)
24 bufs = serialize_object(obj)
25 obj2, remainder = deserialize_object(bufs)
25 obj2, remainder = deserialize_object(bufs)
26 nt.assert_equals(remainder, [])
26 nt.assert_equals(remainder, [])
27 return obj2
27 return obj2
28
28
29 class C(object):
29 class C(object):
30 """dummy class for """
30 """dummy class for """
31
31
32 def __init__(self, **kwargs):
32 def __init__(self, **kwargs):
33 for key,value in iteritems(kwargs):
33 for key,value in iteritems(kwargs):
34 setattr(self, key, value)
34 setattr(self, key, value)
35
35
36 SHAPES = ((100,), (1024,10), (10,8,6,5), (), (0,))
36 SHAPES = ((100,), (1024,10), (10,8,6,5), (), (0,))
37 DTYPES = ('uint8', 'float64', 'int32', [('g', 'float32')], '|S10')
37 DTYPES = ('uint8', 'float64', 'int32', [('g', 'float32')], '|S10')
38
38
39 #-------------------------------------------------------------------------------
39 #-------------------------------------------------------------------------------
40 # Tests
40 # Tests
41 #-------------------------------------------------------------------------------
41 #-------------------------------------------------------------------------------
42
42
43 def new_array(shape, dtype):
43 def new_array(shape, dtype):
44 import numpy
44 import numpy
45 return numpy.random.random(shape).astype(dtype)
45 return numpy.random.random(shape).astype(dtype)
46
46
47 def test_roundtrip_simple():
47 def test_roundtrip_simple():
48 for obj in [
48 for obj in [
49 'hello',
49 'hello',
50 dict(a='b', b=10),
50 dict(a='b', b=10),
51 [1,2,'hi'],
51 [1,2,'hi'],
52 (b'123', 'hello'),
52 (b'123', 'hello'),
53 ]:
53 ]:
54 obj2 = roundtrip(obj)
54 obj2 = roundtrip(obj)
55 nt.assert_equal(obj, obj2)
55 nt.assert_equal(obj, obj2)
56
56
57 def test_roundtrip_nested():
57 def test_roundtrip_nested():
58 for obj in [
58 for obj in [
59 dict(a=range(5), b={1:b'hello'}),
59 dict(a=range(5), b={1:b'hello'}),
60 [range(5),[range(3),(1,[b'whoda'])]],
60 [range(5),[range(3),(1,[b'whoda'])]],
61 ]:
61 ]:
62 obj2 = roundtrip(obj)
62 obj2 = roundtrip(obj)
63 nt.assert_equal(obj, obj2)
63 nt.assert_equal(obj, obj2)
64
64
65 def test_roundtrip_buffered():
65 def test_roundtrip_buffered():
66 for obj in [
66 for obj in [
67 dict(a=b"x"*1025),
67 dict(a=b"x"*1025),
68 b"hello"*500,
68 b"hello"*500,
69 [b"hello"*501, 1,2,3]
69 [b"hello"*501, 1,2,3]
70 ]:
70 ]:
71 bufs = serialize_object(obj)
71 bufs = serialize_object(obj)
72 nt.assert_equal(len(bufs), 2)
72 nt.assert_equal(len(bufs), 2)
73 obj2, remainder = deserialize_object(bufs)
73 obj2, remainder = deserialize_object(bufs)
74 nt.assert_equal(remainder, [])
74 nt.assert_equal(remainder, [])
75 nt.assert_equal(obj, obj2)
75 nt.assert_equal(obj, obj2)
76
76
77 @dec.skip_without('numpy')
77 @dec.skip_without('numpy')
78 def test_numpy():
78 def test_numpy():
79 import numpy
79 import numpy
80 from numpy.testing.utils import assert_array_equal
80 from numpy.testing.utils import assert_array_equal
81 for shape in SHAPES:
81 for shape in SHAPES:
82 for dtype in DTYPES:
82 for dtype in DTYPES:
83 A = new_array(shape, dtype=dtype)
83 A = new_array(shape, dtype=dtype)
84 bufs = serialize_object(A)
84 bufs = serialize_object(A)
85 B, r = deserialize_object(bufs)
85 B, r = deserialize_object(bufs)
86 nt.assert_equal(r, [])
86 nt.assert_equal(r, [])
87 nt.assert_equal(A.shape, B.shape)
87 nt.assert_equal(A.shape, B.shape)
88 nt.assert_equal(A.dtype, B.dtype)
88 nt.assert_equal(A.dtype, B.dtype)
89 assert_array_equal(A,B)
89 assert_array_equal(A,B)
90
90
91 @dec.skip_without('numpy')
91 @dec.skip_without('numpy')
92 def test_recarray():
92 def test_recarray():
93 import numpy
93 import numpy
94 from numpy.testing.utils import assert_array_equal
94 from numpy.testing.utils import assert_array_equal
95 for shape in SHAPES:
95 for shape in SHAPES:
96 for dtype in [
96 for dtype in [
97 [('f', float), ('s', '|S10')],
97 [('f', float), ('s', '|S10')],
98 [('n', int), ('s', '|S1'), ('u', 'uint32')],
98 [('n', int), ('s', '|S1'), ('u', 'uint32')],
99 ]:
99 ]:
100 A = new_array(shape, dtype=dtype)
100 A = new_array(shape, dtype=dtype)
101
101
102 bufs = serialize_object(A)
102 bufs = serialize_object(A)
103 B, r = deserialize_object(bufs)
103 B, r = deserialize_object(bufs)
104 nt.assert_equal(r, [])
104 nt.assert_equal(r, [])
105 nt.assert_equal(A.shape, B.shape)
105 nt.assert_equal(A.shape, B.shape)
106 nt.assert_equal(A.dtype, B.dtype)
106 nt.assert_equal(A.dtype, B.dtype)
107 assert_array_equal(A,B)
107 assert_array_equal(A,B)
108
108
109 @dec.skip_without('numpy')
109 @dec.skip_without('numpy')
110 def test_numpy_in_seq():
110 def test_numpy_in_seq():
111 import numpy
111 import numpy
112 from numpy.testing.utils import assert_array_equal
112 from numpy.testing.utils import assert_array_equal
113 for shape in SHAPES:
113 for shape in SHAPES:
114 for dtype in DTYPES:
114 for dtype in DTYPES:
115 A = new_array(shape, dtype=dtype)
115 A = new_array(shape, dtype=dtype)
116 bufs = serialize_object((A,1,2,b'hello'))
116 bufs = serialize_object((A,1,2,b'hello'))
117 canned = pickle.loads(bufs[0])
117 canned = pickle.loads(bufs[0])
118 nt.assert_is_instance(canned[0], CannedArray)
118 nt.assert_is_instance(canned[0], CannedArray)
119 tup, r = deserialize_object(bufs)
119 tup, r = deserialize_object(bufs)
120 B = tup[0]
120 B = tup[0]
121 nt.assert_equal(r, [])
121 nt.assert_equal(r, [])
122 nt.assert_equal(A.shape, B.shape)
122 nt.assert_equal(A.shape, B.shape)
123 nt.assert_equal(A.dtype, B.dtype)
123 nt.assert_equal(A.dtype, B.dtype)
124 assert_array_equal(A,B)
124 assert_array_equal(A,B)
125
125
126 @dec.skip_without('numpy')
126 @dec.skip_without('numpy')
127 def test_numpy_in_dict():
127 def test_numpy_in_dict():
128 import numpy
128 import numpy
129 from numpy.testing.utils import assert_array_equal
129 from numpy.testing.utils import assert_array_equal
130 for shape in SHAPES:
130 for shape in SHAPES:
131 for dtype in DTYPES:
131 for dtype in DTYPES:
132 A = new_array(shape, dtype=dtype)
132 A = new_array(shape, dtype=dtype)
133 bufs = serialize_object(dict(a=A,b=1,c=range(20)))
133 bufs = serialize_object(dict(a=A,b=1,c=range(20)))
134 canned = pickle.loads(bufs[0])
134 canned = pickle.loads(bufs[0])
135 nt.assert_is_instance(canned['a'], CannedArray)
135 nt.assert_is_instance(canned['a'], CannedArray)
136 d, r = deserialize_object(bufs)
136 d, r = deserialize_object(bufs)
137 B = d['a']
137 B = d['a']
138 nt.assert_equal(r, [])
138 nt.assert_equal(r, [])
139 nt.assert_equal(A.shape, B.shape)
139 nt.assert_equal(A.shape, B.shape)
140 nt.assert_equal(A.dtype, B.dtype)
140 nt.assert_equal(A.dtype, B.dtype)
141 assert_array_equal(A,B)
141 assert_array_equal(A,B)
142
142
143 def test_class():
143 def test_class():
144 @interactive
144 @interactive
145 class C(object):
145 class C(object):
146 a=5
146 a=5
147 bufs = serialize_object(dict(C=C))
147 bufs = serialize_object(dict(C=C))
148 canned = pickle.loads(bufs[0])
148 canned = pickle.loads(bufs[0])
149 nt.assert_is_instance(canned['C'], CannedClass)
149 nt.assert_is_instance(canned['C'], CannedClass)
150 d, r = deserialize_object(bufs)
150 d, r = deserialize_object(bufs)
151 C2 = d['C']
151 C2 = d['C']
152 nt.assert_equal(C2.a, C.a)
152 nt.assert_equal(C2.a, C.a)
153
153
154 def test_class_oldstyle():
154 def test_class_oldstyle():
155 @interactive
155 @interactive
156 class C:
156 class C:
157 a=5
157 a=5
158
158
159 bufs = serialize_object(dict(C=C))
159 bufs = serialize_object(dict(C=C))
160 canned = pickle.loads(bufs[0])
160 canned = pickle.loads(bufs[0])
161 nt.assert_is_instance(canned['C'], CannedClass)
161 nt.assert_is_instance(canned['C'], CannedClass)
162 d, r = deserialize_object(bufs)
162 d, r = deserialize_object(bufs)
163 C2 = d['C']
163 C2 = d['C']
164 nt.assert_equal(C2.a, C.a)
164 nt.assert_equal(C2.a, C.a)
165
165
166 def test_tuple():
166 def test_tuple():
167 tup = (lambda x:x, 1)
167 tup = (lambda x:x, 1)
168 bufs = serialize_object(tup)
168 bufs = serialize_object(tup)
169 canned = pickle.loads(bufs[0])
169 canned = pickle.loads(bufs[0])
170 nt.assert_is_instance(canned, tuple)
170 nt.assert_is_instance(canned, tuple)
171 t2, r = deserialize_object(bufs)
171 t2, r = deserialize_object(bufs)
172 nt.assert_equal(t2[0](t2[1]), tup[0](tup[1]))
172 nt.assert_equal(t2[0](t2[1]), tup[0](tup[1]))
173
173
174 point = namedtuple('point', 'x y')
174 point = namedtuple('point', 'x y')
175
175
176 def test_namedtuple():
176 def test_namedtuple():
177 p = point(1,2)
177 p = point(1,2)
178 bufs = serialize_object(p)
178 bufs = serialize_object(p)
179 canned = pickle.loads(bufs[0])
179 canned = pickle.loads(bufs[0])
180 nt.assert_is_instance(canned, point)
180 nt.assert_is_instance(canned, point)
181 p2, r = deserialize_object(bufs, globals())
181 p2, r = deserialize_object(bufs, globals())
182 nt.assert_equal(p2.x, p.x)
182 nt.assert_equal(p2.x, p.x)
183 nt.assert_equal(p2.y, p.y)
183 nt.assert_equal(p2.y, p.y)
184
184
185 def test_list():
185 def test_list():
186 lis = [lambda x:x, 1]
186 lis = [lambda x:x, 1]
187 bufs = serialize_object(lis)
187 bufs = serialize_object(lis)
188 canned = pickle.loads(bufs[0])
188 canned = pickle.loads(bufs[0])
189 nt.assert_is_instance(canned, list)
189 nt.assert_is_instance(canned, list)
190 l2, r = deserialize_object(bufs)
190 l2, r = deserialize_object(bufs)
191 nt.assert_equal(l2[0](l2[1]), lis[0](lis[1]))
191 nt.assert_equal(l2[0](l2[1]), lis[0](lis[1]))
192
192
193 def test_class_inheritance():
193 def test_class_inheritance():
194 @interactive
194 @interactive
195 class C(object):
195 class C(object):
196 a=5
196 a=5
197
197
198 @interactive
198 @interactive
199 class D(C):
199 class D(C):
200 b=10
200 b=10
201
201
202 bufs = serialize_object(dict(D=D))
202 bufs = serialize_object(dict(D=D))
203 canned = pickle.loads(bufs[0])
203 canned = pickle.loads(bufs[0])
204 nt.assert_is_instance(canned['D'], CannedClass)
204 nt.assert_is_instance(canned['D'], CannedClass)
205 d, r = deserialize_object(bufs)
205 d, r = deserialize_object(bufs)
206 D2 = d['D']
206 D2 = d['D']
207 nt.assert_equal(D2.a, D.a)
207 nt.assert_equal(D2.a, D.a)
208 nt.assert_equal(D2.b, D.b)
208 nt.assert_equal(D2.b, D.b)
1 NO CONTENT: file renamed from IPython/kernel/zmq/tests/test_start_kernel.py to ipython_kernel/zmq/tests/test_start_kernel.py
NO CONTENT: file renamed from IPython/kernel/zmq/tests/test_start_kernel.py to ipython_kernel/zmq/tests/test_start_kernel.py
@@ -1,486 +1,486 b''
1 """A ZMQ-based subclass of InteractiveShell.
1 """A ZMQ-based subclass of InteractiveShell.
2
2
3 This code is meant to ease the refactoring of the base InteractiveShell into
3 This code is meant to ease the refactoring of the base InteractiveShell into
4 something with a cleaner architecture for 2-process use, without actually
4 something with a cleaner architecture for 2-process use, without actually
5 breaking InteractiveShell itself. So we're doing something a bit ugly, where
5 breaking InteractiveShell itself. So we're doing something a bit ugly, where
6 we subclass and override what we want to fix. Once this is working well, we
6 we subclass and override what we want to fix. Once this is working well, we
7 can go back to the base class and refactor the code for a cleaner inheritance
7 can go back to the base class and refactor the code for a cleaner inheritance
8 implementation that doesn't rely on so much monkeypatching.
8 implementation that doesn't rely on so much monkeypatching.
9
9
10 But this lets us maintain a fully working IPython as we develop the new
10 But this lets us maintain a fully working IPython as we develop the new
11 machinery. This should thus be thought of as scaffolding.
11 machinery. This should thus be thought of as scaffolding.
12 """
12 """
13
13
14 # Copyright (c) IPython Development Team.
14 # Copyright (c) IPython Development Team.
15 # Distributed under the terms of the Modified BSD License.
15 # Distributed under the terms of the Modified BSD License.
16
16
17 from __future__ import print_function
17 from __future__ import print_function
18
18
19 import os
19 import os
20 import sys
20 import sys
21 import time
21 import time
22
22
23 from zmq.eventloop import ioloop
23 from zmq.eventloop import ioloop
24
24
25 from IPython.core.interactiveshell import (
25 from IPython.core.interactiveshell import (
26 InteractiveShell, InteractiveShellABC
26 InteractiveShell, InteractiveShellABC
27 )
27 )
28 from IPython.core import page
28 from IPython.core import page
29 from IPython.core.autocall import ZMQExitAutocall
29 from IPython.core.autocall import ZMQExitAutocall
30 from IPython.core.displaypub import DisplayPublisher
30 from IPython.core.displaypub import DisplayPublisher
31 from IPython.core.error import UsageError
31 from IPython.core.error import UsageError
32 from IPython.core.magics import MacroToEdit, CodeMagics
32 from IPython.core.magics import MacroToEdit, CodeMagics
33 from IPython.core.magic import magics_class, line_magic, Magics
33 from IPython.core.magic import magics_class, line_magic, Magics
34 from IPython.core import payloadpage
34 from IPython.core import payloadpage
35 from IPython.core.usage import default_gui_banner
35 from IPython.core.usage import default_gui_banner
36 from IPython.display import display, Javascript
36 from IPython.display import display, Javascript
37 from IPython.kernel.inprocess.socket import SocketABC
37 from ipython_kernel.inprocess.socket import SocketABC
38 from IPython.kernel import (
38 from ipython_kernel import (
39 get_connection_file, get_connection_info, connect_qtconsole
39 get_connection_file, get_connection_info, connect_qtconsole
40 )
40 )
41 from IPython.testing.skipdoctest import skip_doctest
41 from IPython.testing.skipdoctest import skip_doctest
42 from IPython.utils import openpy
42 from IPython.utils import openpy
43 from IPython.utils.jsonutil import json_clean, encode_images
43 from IPython.utils.jsonutil import json_clean, encode_images
44 from IPython.utils.process import arg_split
44 from IPython.utils.process import arg_split
45 from IPython.utils import py3compat
45 from IPython.utils import py3compat
46 from IPython.utils.py3compat import unicode_type
46 from IPython.utils.py3compat import unicode_type
47 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes, Any
47 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes, Any
48 from IPython.utils.warn import error
48 from IPython.utils.warn import error
49 from IPython.kernel.zmq.displayhook import ZMQShellDisplayHook
49 from ipython_kernel.zmq.displayhook import ZMQShellDisplayHook
50 from IPython.kernel.zmq.datapub import ZMQDataPublisher
50 from ipython_kernel.zmq.datapub import ZMQDataPublisher
51 from IPython.kernel.zmq.session import extract_header
51 from ipython_kernel.zmq.session import extract_header
52 from .session import Session
52 from .session import Session
53
53
54 #-----------------------------------------------------------------------------
54 #-----------------------------------------------------------------------------
55 # Functions and classes
55 # Functions and classes
56 #-----------------------------------------------------------------------------
56 #-----------------------------------------------------------------------------
57
57
58 class ZMQDisplayPublisher(DisplayPublisher):
58 class ZMQDisplayPublisher(DisplayPublisher):
59 """A display publisher that publishes data using a ZeroMQ PUB socket."""
59 """A display publisher that publishes data using a ZeroMQ PUB socket."""
60
60
61 session = Instance(Session, allow_none=True)
61 session = Instance(Session, allow_none=True)
62 pub_socket = Instance(SocketABC, allow_none=True)
62 pub_socket = Instance(SocketABC, allow_none=True)
63 parent_header = Dict({})
63 parent_header = Dict({})
64 topic = CBytes(b'display_data')
64 topic = CBytes(b'display_data')
65
65
66 def set_parent(self, parent):
66 def set_parent(self, parent):
67 """Set the parent for outbound messages."""
67 """Set the parent for outbound messages."""
68 self.parent_header = extract_header(parent)
68 self.parent_header = extract_header(parent)
69
69
70 def _flush_streams(self):
70 def _flush_streams(self):
71 """flush IO Streams prior to display"""
71 """flush IO Streams prior to display"""
72 sys.stdout.flush()
72 sys.stdout.flush()
73 sys.stderr.flush()
73 sys.stderr.flush()
74
74
75 def publish(self, data, metadata=None, source=None):
75 def publish(self, data, metadata=None, source=None):
76 self._flush_streams()
76 self._flush_streams()
77 if metadata is None:
77 if metadata is None:
78 metadata = {}
78 metadata = {}
79 self._validate_data(data, metadata)
79 self._validate_data(data, metadata)
80 content = {}
80 content = {}
81 content['data'] = encode_images(data)
81 content['data'] = encode_images(data)
82 content['metadata'] = metadata
82 content['metadata'] = metadata
83 self.session.send(
83 self.session.send(
84 self.pub_socket, u'display_data', json_clean(content),
84 self.pub_socket, u'display_data', json_clean(content),
85 parent=self.parent_header, ident=self.topic,
85 parent=self.parent_header, ident=self.topic,
86 )
86 )
87
87
88 def clear_output(self, wait=False):
88 def clear_output(self, wait=False):
89 content = dict(wait=wait)
89 content = dict(wait=wait)
90 self._flush_streams()
90 self._flush_streams()
91 self.session.send(
91 self.session.send(
92 self.pub_socket, u'clear_output', content,
92 self.pub_socket, u'clear_output', content,
93 parent=self.parent_header, ident=self.topic,
93 parent=self.parent_header, ident=self.topic,
94 )
94 )
95
95
96 @magics_class
96 @magics_class
97 class KernelMagics(Magics):
97 class KernelMagics(Magics):
98 #------------------------------------------------------------------------
98 #------------------------------------------------------------------------
99 # Magic overrides
99 # Magic overrides
100 #------------------------------------------------------------------------
100 #------------------------------------------------------------------------
101 # Once the base class stops inheriting from magic, this code needs to be
101 # Once the base class stops inheriting from magic, this code needs to be
102 # moved into a separate machinery as well. For now, at least isolate here
102 # moved into a separate machinery as well. For now, at least isolate here
103 # the magics which this class needs to implement differently from the base
103 # the magics which this class needs to implement differently from the base
104 # class, or that are unique to it.
104 # class, or that are unique to it.
105
105
106 _find_edit_target = CodeMagics._find_edit_target
106 _find_edit_target = CodeMagics._find_edit_target
107
107
108 @skip_doctest
108 @skip_doctest
109 @line_magic
109 @line_magic
110 def edit(self, parameter_s='', last_call=['','']):
110 def edit(self, parameter_s='', last_call=['','']):
111 """Bring up an editor and execute the resulting code.
111 """Bring up an editor and execute the resulting code.
112
112
113 Usage:
113 Usage:
114 %edit [options] [args]
114 %edit [options] [args]
115
115
116 %edit runs an external text editor. You will need to set the command for
116 %edit runs an external text editor. You will need to set the command for
117 this editor via the ``TerminalInteractiveShell.editor`` option in your
117 this editor via the ``TerminalInteractiveShell.editor`` option in your
118 configuration file before it will work.
118 configuration file before it will work.
119
119
120 This command allows you to conveniently edit multi-line code right in
120 This command allows you to conveniently edit multi-line code right in
121 your IPython session.
121 your IPython session.
122
122
123 If called without arguments, %edit opens up an empty editor with a
123 If called without arguments, %edit opens up an empty editor with a
124 temporary file and will execute the contents of this file when you
124 temporary file and will execute the contents of this file when you
125 close it (don't forget to save it!).
125 close it (don't forget to save it!).
126
126
127 Options:
127 Options:
128
128
129 -n <number>
129 -n <number>
130 Open the editor at a specified line number. By default, the IPython
130 Open the editor at a specified line number. By default, the IPython
131 editor hook uses the unix syntax 'editor +N filename', but you can
131 editor hook uses the unix syntax 'editor +N filename', but you can
132 configure this by providing your own modified hook if your favorite
132 configure this by providing your own modified hook if your favorite
133 editor supports line-number specifications with a different syntax.
133 editor supports line-number specifications with a different syntax.
134
134
135 -p
135 -p
136 Call the editor with the same data as the previous time it was used,
136 Call the editor with the same data as the previous time it was used,
137 regardless of how long ago (in your current session) it was.
137 regardless of how long ago (in your current session) it was.
138
138
139 -r
139 -r
140 Use 'raw' input. This option only applies to input taken from the
140 Use 'raw' input. This option only applies to input taken from the
141 user's history. By default, the 'processed' history is used, so that
141 user's history. By default, the 'processed' history is used, so that
142 magics are loaded in their transformed version to valid Python. If
142 magics are loaded in their transformed version to valid Python. If
143 this option is given, the raw input as typed as the command line is
143 this option is given, the raw input as typed as the command line is
144 used instead. When you exit the editor, it will be executed by
144 used instead. When you exit the editor, it will be executed by
145 IPython's own processor.
145 IPython's own processor.
146
146
147 Arguments:
147 Arguments:
148
148
149 If arguments are given, the following possibilites exist:
149 If arguments are given, the following possibilites exist:
150
150
151 - The arguments are numbers or pairs of colon-separated numbers (like
151 - The arguments are numbers or pairs of colon-separated numbers (like
152 1 4:8 9). These are interpreted as lines of previous input to be
152 1 4:8 9). These are interpreted as lines of previous input to be
153 loaded into the editor. The syntax is the same of the %macro command.
153 loaded into the editor. The syntax is the same of the %macro command.
154
154
155 - If the argument doesn't start with a number, it is evaluated as a
155 - If the argument doesn't start with a number, it is evaluated as a
156 variable and its contents loaded into the editor. You can thus edit
156 variable and its contents loaded into the editor. You can thus edit
157 any string which contains python code (including the result of
157 any string which contains python code (including the result of
158 previous edits).
158 previous edits).
159
159
160 - If the argument is the name of an object (other than a string),
160 - If the argument is the name of an object (other than a string),
161 IPython will try to locate the file where it was defined and open the
161 IPython will try to locate the file where it was defined and open the
162 editor at the point where it is defined. You can use ``%edit function``
162 editor at the point where it is defined. You can use ``%edit function``
163 to load an editor exactly at the point where 'function' is defined,
163 to load an editor exactly at the point where 'function' is defined,
164 edit it and have the file be executed automatically.
164 edit it and have the file be executed automatically.
165
165
166 If the object is a macro (see %macro for details), this opens up your
166 If the object is a macro (see %macro for details), this opens up your
167 specified editor with a temporary file containing the macro's data.
167 specified editor with a temporary file containing the macro's data.
168 Upon exit, the macro is reloaded with the contents of the file.
168 Upon exit, the macro is reloaded with the contents of the file.
169
169
170 Note: opening at an exact line is only supported under Unix, and some
170 Note: opening at an exact line is only supported under Unix, and some
171 editors (like kedit and gedit up to Gnome 2.8) do not understand the
171 editors (like kedit and gedit up to Gnome 2.8) do not understand the
172 '+NUMBER' parameter necessary for this feature. Good editors like
172 '+NUMBER' parameter necessary for this feature. Good editors like
173 (X)Emacs, vi, jed, pico and joe all do.
173 (X)Emacs, vi, jed, pico and joe all do.
174
174
175 - If the argument is not found as a variable, IPython will look for a
175 - If the argument is not found as a variable, IPython will look for a
176 file with that name (adding .py if necessary) and load it into the
176 file with that name (adding .py if necessary) and load it into the
177 editor. It will execute its contents with execfile() when you exit,
177 editor. It will execute its contents with execfile() when you exit,
178 loading any code in the file into your interactive namespace.
178 loading any code in the file into your interactive namespace.
179
179
180 Unlike in the terminal, this is designed to use a GUI editor, and we do
180 Unlike in the terminal, this is designed to use a GUI editor, and we do
181 not know when it has closed. So the file you edit will not be
181 not know when it has closed. So the file you edit will not be
182 automatically executed or printed.
182 automatically executed or printed.
183
183
184 Note that %edit is also available through the alias %ed.
184 Note that %edit is also available through the alias %ed.
185 """
185 """
186
186
187 opts,args = self.parse_options(parameter_s,'prn:')
187 opts,args = self.parse_options(parameter_s,'prn:')
188
188
189 try:
189 try:
190 filename, lineno, _ = CodeMagics._find_edit_target(self.shell, args, opts, last_call)
190 filename, lineno, _ = CodeMagics._find_edit_target(self.shell, args, opts, last_call)
191 except MacroToEdit as e:
191 except MacroToEdit as e:
192 # TODO: Implement macro editing over 2 processes.
192 # TODO: Implement macro editing over 2 processes.
193 print("Macro editing not yet implemented in 2-process model.")
193 print("Macro editing not yet implemented in 2-process model.")
194 return
194 return
195
195
196 # Make sure we send to the client an absolute path, in case the working
196 # Make sure we send to the client an absolute path, in case the working
197 # directory of client and kernel don't match
197 # directory of client and kernel don't match
198 filename = os.path.abspath(filename)
198 filename = os.path.abspath(filename)
199
199
200 payload = {
200 payload = {
201 'source' : 'edit_magic',
201 'source' : 'edit_magic',
202 'filename' : filename,
202 'filename' : filename,
203 'line_number' : lineno
203 'line_number' : lineno
204 }
204 }
205 self.shell.payload_manager.write_payload(payload)
205 self.shell.payload_manager.write_payload(payload)
206
206
207 # A few magics that are adapted to the specifics of using pexpect and a
207 # A few magics that are adapted to the specifics of using pexpect and a
208 # remote terminal
208 # remote terminal
209
209
210 @line_magic
210 @line_magic
211 def clear(self, arg_s):
211 def clear(self, arg_s):
212 """Clear the terminal."""
212 """Clear the terminal."""
213 if os.name == 'posix':
213 if os.name == 'posix':
214 self.shell.system("clear")
214 self.shell.system("clear")
215 else:
215 else:
216 self.shell.system("cls")
216 self.shell.system("cls")
217
217
218 if os.name == 'nt':
218 if os.name == 'nt':
219 # This is the usual name in windows
219 # This is the usual name in windows
220 cls = line_magic('cls')(clear)
220 cls = line_magic('cls')(clear)
221
221
222 # Terminal pagers won't work over pexpect, but we do have our own pager
222 # Terminal pagers won't work over pexpect, but we do have our own pager
223
223
224 @line_magic
224 @line_magic
225 def less(self, arg_s):
225 def less(self, arg_s):
226 """Show a file through the pager.
226 """Show a file through the pager.
227
227
228 Files ending in .py are syntax-highlighted."""
228 Files ending in .py are syntax-highlighted."""
229 if not arg_s:
229 if not arg_s:
230 raise UsageError('Missing filename.')
230 raise UsageError('Missing filename.')
231
231
232 if arg_s.endswith('.py'):
232 if arg_s.endswith('.py'):
233 cont = self.shell.pycolorize(openpy.read_py_file(arg_s, skip_encoding_cookie=False))
233 cont = self.shell.pycolorize(openpy.read_py_file(arg_s, skip_encoding_cookie=False))
234 else:
234 else:
235 cont = open(arg_s).read()
235 cont = open(arg_s).read()
236 page.page(cont)
236 page.page(cont)
237
237
238 more = line_magic('more')(less)
238 more = line_magic('more')(less)
239
239
240 # Man calls a pager, so we also need to redefine it
240 # Man calls a pager, so we also need to redefine it
241 if os.name == 'posix':
241 if os.name == 'posix':
242 @line_magic
242 @line_magic
243 def man(self, arg_s):
243 def man(self, arg_s):
244 """Find the man page for the given command and display in pager."""
244 """Find the man page for the given command and display in pager."""
245 page.page(self.shell.getoutput('man %s | col -b' % arg_s,
245 page.page(self.shell.getoutput('man %s | col -b' % arg_s,
246 split=False))
246 split=False))
247
247
248 @line_magic
248 @line_magic
249 def connect_info(self, arg_s):
249 def connect_info(self, arg_s):
250 """Print information for connecting other clients to this kernel
250 """Print information for connecting other clients to this kernel
251
251
252 It will print the contents of this session's connection file, as well as
252 It will print the contents of this session's connection file, as well as
253 shortcuts for local clients.
253 shortcuts for local clients.
254
254
255 In the simplest case, when called from the most recently launched kernel,
255 In the simplest case, when called from the most recently launched kernel,
256 secondary clients can be connected, simply with:
256 secondary clients can be connected, simply with:
257
257
258 $> ipython <app> --existing
258 $> ipython <app> --existing
259
259
260 """
260 """
261
261
262 from IPython.core.application import BaseIPythonApplication as BaseIPApp
262 from IPython.core.application import BaseIPythonApplication as BaseIPApp
263
263
264 if BaseIPApp.initialized():
264 if BaseIPApp.initialized():
265 app = BaseIPApp.instance()
265 app = BaseIPApp.instance()
266 security_dir = app.profile_dir.security_dir
266 security_dir = app.profile_dir.security_dir
267 profile = app.profile
267 profile = app.profile
268 else:
268 else:
269 profile = 'default'
269 profile = 'default'
270 security_dir = ''
270 security_dir = ''
271
271
272 try:
272 try:
273 connection_file = get_connection_file()
273 connection_file = get_connection_file()
274 info = get_connection_info(unpack=False)
274 info = get_connection_info(unpack=False)
275 except Exception as e:
275 except Exception as e:
276 error("Could not get connection info: %r" % e)
276 error("Could not get connection info: %r" % e)
277 return
277 return
278
278
279 # add profile flag for non-default profile
279 # add profile flag for non-default profile
280 profile_flag = "--profile %s" % profile if profile != 'default' else ""
280 profile_flag = "--profile %s" % profile if profile != 'default' else ""
281
281
282 # if it's in the security dir, truncate to basename
282 # if it's in the security dir, truncate to basename
283 if security_dir == os.path.dirname(connection_file):
283 if security_dir == os.path.dirname(connection_file):
284 connection_file = os.path.basename(connection_file)
284 connection_file = os.path.basename(connection_file)
285
285
286
286
287 print (info + '\n')
287 print (info + '\n')
288 print ("Paste the above JSON into a file, and connect with:\n"
288 print ("Paste the above JSON into a file, and connect with:\n"
289 " $> ipython <app> --existing <file>\n"
289 " $> ipython <app> --existing <file>\n"
290 "or, if you are local, you can connect with just:\n"
290 "or, if you are local, you can connect with just:\n"
291 " $> ipython <app> --existing {0} {1}\n"
291 " $> ipython <app> --existing {0} {1}\n"
292 "or even just:\n"
292 "or even just:\n"
293 " $> ipython <app> --existing {1}\n"
293 " $> ipython <app> --existing {1}\n"
294 "if this is the most recent IPython session you have started.".format(
294 "if this is the most recent IPython session you have started.".format(
295 connection_file, profile_flag
295 connection_file, profile_flag
296 )
296 )
297 )
297 )
298
298
299 @line_magic
299 @line_magic
300 def qtconsole(self, arg_s):
300 def qtconsole(self, arg_s):
301 """Open a qtconsole connected to this kernel.
301 """Open a qtconsole connected to this kernel.
302
302
303 Useful for connecting a qtconsole to running notebooks, for better
303 Useful for connecting a qtconsole to running notebooks, for better
304 debugging.
304 debugging.
305 """
305 """
306
306
307 # %qtconsole should imply bind_kernel for engines:
307 # %qtconsole should imply bind_kernel for engines:
308 try:
308 try:
309 from IPython.parallel import bind_kernel
309 from IPython.parallel import bind_kernel
310 except ImportError:
310 except ImportError:
311 # technically possible, because parallel has higher pyzmq min-version
311 # technically possible, because parallel has higher pyzmq min-version
312 pass
312 pass
313 else:
313 else:
314 bind_kernel()
314 bind_kernel()
315
315
316 try:
316 try:
317 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
317 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
318 except Exception as e:
318 except Exception as e:
319 error("Could not start qtconsole: %r" % e)
319 error("Could not start qtconsole: %r" % e)
320 return
320 return
321
321
322 @line_magic
322 @line_magic
323 def autosave(self, arg_s):
323 def autosave(self, arg_s):
324 """Set the autosave interval in the notebook (in seconds).
324 """Set the autosave interval in the notebook (in seconds).
325
325
326 The default value is 120, or two minutes.
326 The default value is 120, or two minutes.
327 ``%autosave 0`` will disable autosave.
327 ``%autosave 0`` will disable autosave.
328
328
329 This magic only has an effect when called from the notebook interface.
329 This magic only has an effect when called from the notebook interface.
330 It has no effect when called in a startup file.
330 It has no effect when called in a startup file.
331 """
331 """
332
332
333 try:
333 try:
334 interval = int(arg_s)
334 interval = int(arg_s)
335 except ValueError:
335 except ValueError:
336 raise UsageError("%%autosave requires an integer, got %r" % arg_s)
336 raise UsageError("%%autosave requires an integer, got %r" % arg_s)
337
337
338 # javascript wants milliseconds
338 # javascript wants milliseconds
339 milliseconds = 1000 * interval
339 milliseconds = 1000 * interval
340 display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds),
340 display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds),
341 include=['application/javascript']
341 include=['application/javascript']
342 )
342 )
343 if interval:
343 if interval:
344 print("Autosaving every %i seconds" % interval)
344 print("Autosaving every %i seconds" % interval)
345 else:
345 else:
346 print("Autosave disabled")
346 print("Autosave disabled")
347
347
348
348
349 class ZMQInteractiveShell(InteractiveShell):
349 class ZMQInteractiveShell(InteractiveShell):
350 """A subclass of InteractiveShell for ZMQ."""
350 """A subclass of InteractiveShell for ZMQ."""
351
351
352 displayhook_class = Type(ZMQShellDisplayHook)
352 displayhook_class = Type(ZMQShellDisplayHook)
353 display_pub_class = Type(ZMQDisplayPublisher)
353 display_pub_class = Type(ZMQDisplayPublisher)
354 data_pub_class = Type(ZMQDataPublisher)
354 data_pub_class = Type(ZMQDataPublisher)
355 kernel = Any()
355 kernel = Any()
356 parent_header = Any()
356 parent_header = Any()
357
357
358 def _banner1_default(self):
358 def _banner1_default(self):
359 return default_gui_banner
359 return default_gui_banner
360
360
361 # Override the traitlet in the parent class, because there's no point using
361 # Override the traitlet in the parent class, because there's no point using
362 # readline for the kernel. Can be removed when the readline code is moved
362 # readline for the kernel. Can be removed when the readline code is moved
363 # to the terminal frontend.
363 # to the terminal frontend.
364 colors_force = CBool(True)
364 colors_force = CBool(True)
365 readline_use = CBool(False)
365 readline_use = CBool(False)
366 # autoindent has no meaning in a zmqshell, and attempting to enable it
366 # autoindent has no meaning in a zmqshell, and attempting to enable it
367 # will print a warning in the absence of readline.
367 # will print a warning in the absence of readline.
368 autoindent = CBool(False)
368 autoindent = CBool(False)
369
369
370 exiter = Instance(ZMQExitAutocall)
370 exiter = Instance(ZMQExitAutocall)
371 def _exiter_default(self):
371 def _exiter_default(self):
372 return ZMQExitAutocall(self)
372 return ZMQExitAutocall(self)
373
373
374 def _exit_now_changed(self, name, old, new):
374 def _exit_now_changed(self, name, old, new):
375 """stop eventloop when exit_now fires"""
375 """stop eventloop when exit_now fires"""
376 if new:
376 if new:
377 loop = ioloop.IOLoop.instance()
377 loop = ioloop.IOLoop.instance()
378 loop.add_timeout(time.time()+0.1, loop.stop)
378 loop.add_timeout(time.time()+0.1, loop.stop)
379
379
380 keepkernel_on_exit = None
380 keepkernel_on_exit = None
381
381
382 # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no
382 # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no
383 # interactive input being read; we provide event loop support in ipkernel
383 # interactive input being read; we provide event loop support in ipkernel
384 @staticmethod
384 @staticmethod
385 def enable_gui(gui):
385 def enable_gui(gui):
386 from .eventloops import enable_gui as real_enable_gui
386 from .eventloops import enable_gui as real_enable_gui
387 try:
387 try:
388 real_enable_gui(gui)
388 real_enable_gui(gui)
389 except ValueError as e:
389 except ValueError as e:
390 raise UsageError("%s" % e)
390 raise UsageError("%s" % e)
391
391
392 def init_environment(self):
392 def init_environment(self):
393 """Configure the user's environment."""
393 """Configure the user's environment."""
394 env = os.environ
394 env = os.environ
395 # These two ensure 'ls' produces nice coloring on BSD-derived systems
395 # These two ensure 'ls' produces nice coloring on BSD-derived systems
396 env['TERM'] = 'xterm-color'
396 env['TERM'] = 'xterm-color'
397 env['CLICOLOR'] = '1'
397 env['CLICOLOR'] = '1'
398 # Since normal pagers don't work at all (over pexpect we don't have
398 # Since normal pagers don't work at all (over pexpect we don't have
399 # single-key control of the subprocess), try to disable paging in
399 # single-key control of the subprocess), try to disable paging in
400 # subprocesses as much as possible.
400 # subprocesses as much as possible.
401 env['PAGER'] = 'cat'
401 env['PAGER'] = 'cat'
402 env['GIT_PAGER'] = 'cat'
402 env['GIT_PAGER'] = 'cat'
403
403
404 def init_hooks(self):
404 def init_hooks(self):
405 super(ZMQInteractiveShell, self).init_hooks()
405 super(ZMQInteractiveShell, self).init_hooks()
406 self.set_hook('show_in_pager', page.as_hook(payloadpage.page), 99)
406 self.set_hook('show_in_pager', page.as_hook(payloadpage.page), 99)
407
407
408 def ask_exit(self):
408 def ask_exit(self):
409 """Engage the exit actions."""
409 """Engage the exit actions."""
410 self.exit_now = (not self.keepkernel_on_exit)
410 self.exit_now = (not self.keepkernel_on_exit)
411 payload = dict(
411 payload = dict(
412 source='ask_exit',
412 source='ask_exit',
413 keepkernel=self.keepkernel_on_exit,
413 keepkernel=self.keepkernel_on_exit,
414 )
414 )
415 self.payload_manager.write_payload(payload)
415 self.payload_manager.write_payload(payload)
416
416
417 def _showtraceback(self, etype, evalue, stb):
417 def _showtraceback(self, etype, evalue, stb):
418 # try to preserve ordering of tracebacks and print statements
418 # try to preserve ordering of tracebacks and print statements
419 sys.stdout.flush()
419 sys.stdout.flush()
420 sys.stderr.flush()
420 sys.stderr.flush()
421
421
422 exc_content = {
422 exc_content = {
423 u'traceback' : stb,
423 u'traceback' : stb,
424 u'ename' : unicode_type(etype.__name__),
424 u'ename' : unicode_type(etype.__name__),
425 u'evalue' : py3compat.safe_unicode(evalue),
425 u'evalue' : py3compat.safe_unicode(evalue),
426 }
426 }
427
427
428 dh = self.displayhook
428 dh = self.displayhook
429 # Send exception info over pub socket for other clients than the caller
429 # Send exception info over pub socket for other clients than the caller
430 # to pick up
430 # to pick up
431 topic = None
431 topic = None
432 if dh.topic:
432 if dh.topic:
433 topic = dh.topic.replace(b'execute_result', b'error')
433 topic = dh.topic.replace(b'execute_result', b'error')
434
434
435 exc_msg = dh.session.send(dh.pub_socket, u'error', json_clean(exc_content), dh.parent_header, ident=topic)
435 exc_msg = dh.session.send(dh.pub_socket, u'error', json_clean(exc_content), dh.parent_header, ident=topic)
436
436
437 # FIXME - Hack: store exception info in shell object. Right now, the
437 # FIXME - Hack: store exception info in shell object. Right now, the
438 # caller is reading this info after the fact, we need to fix this logic
438 # caller is reading this info after the fact, we need to fix this logic
439 # to remove this hack. Even uglier, we need to store the error status
439 # to remove this hack. Even uglier, we need to store the error status
440 # here, because in the main loop, the logic that sets it is being
440 # here, because in the main loop, the logic that sets it is being
441 # skipped because runlines swallows the exceptions.
441 # skipped because runlines swallows the exceptions.
442 exc_content[u'status'] = u'error'
442 exc_content[u'status'] = u'error'
443 self._reply_content = exc_content
443 self._reply_content = exc_content
444 # /FIXME
444 # /FIXME
445
445
446 return exc_content
446 return exc_content
447
447
448 def set_next_input(self, text, replace=False):
448 def set_next_input(self, text, replace=False):
449 """Send the specified text to the frontend to be presented at the next
449 """Send the specified text to the frontend to be presented at the next
450 input cell."""
450 input cell."""
451 payload = dict(
451 payload = dict(
452 source='set_next_input',
452 source='set_next_input',
453 text=text,
453 text=text,
454 replace=replace,
454 replace=replace,
455 )
455 )
456 self.payload_manager.write_payload(payload)
456 self.payload_manager.write_payload(payload)
457
457
458 def set_parent(self, parent):
458 def set_parent(self, parent):
459 """Set the parent header for associating output with its triggering input"""
459 """Set the parent header for associating output with its triggering input"""
460 self.parent_header = parent
460 self.parent_header = parent
461 self.displayhook.set_parent(parent)
461 self.displayhook.set_parent(parent)
462 self.display_pub.set_parent(parent)
462 self.display_pub.set_parent(parent)
463 self.data_pub.set_parent(parent)
463 self.data_pub.set_parent(parent)
464 try:
464 try:
465 sys.stdout.set_parent(parent)
465 sys.stdout.set_parent(parent)
466 except AttributeError:
466 except AttributeError:
467 pass
467 pass
468 try:
468 try:
469 sys.stderr.set_parent(parent)
469 sys.stderr.set_parent(parent)
470 except AttributeError:
470 except AttributeError:
471 pass
471 pass
472
472
473 def get_parent(self):
473 def get_parent(self):
474 return self.parent_header
474 return self.parent_header
475
475
476 #-------------------------------------------------------------------------
476 #-------------------------------------------------------------------------
477 # Things related to magics
477 # Things related to magics
478 #-------------------------------------------------------------------------
478 #-------------------------------------------------------------------------
479
479
480 def init_magics(self):
480 def init_magics(self):
481 super(ZMQInteractiveShell, self).init_magics()
481 super(ZMQInteractiveShell, self).init_magics()
482 self.register_magics(KernelMagics)
482 self.register_magics(KernelMagics)
483 self.magics_manager.register_alias('ed', 'edit')
483 self.magics_manager.register_alias('ed', 'edit')
484
484
485
485
486 InteractiveShellABC.register(ZMQInteractiveShell)
486 InteractiveShellABC.register(ZMQInteractiveShell)
General Comments 0
You need to be logged in to leave comments. Login now