##// END OF EJS Templates
More fixes to doc formatting
Thomas Kluyver -
Show More
@@ -1,206 +1,206 b''
1 """Base class to manage the interaction with a running kernel
1 """Base class to manage the interaction with a running kernel
2 """
2 """
3
3
4 #-----------------------------------------------------------------------------
4 #-----------------------------------------------------------------------------
5 # Copyright (C) 2013 The IPython Development Team
5 # Copyright (C) 2013 The IPython Development Team
6 #
6 #
7 # Distributed under the terms of the BSD License. The full license is in
7 # Distributed under the terms of the BSD License. The full license is in
8 # the file COPYING, distributed as part of this software.
8 # the file COPYING, distributed as part of this software.
9 #-----------------------------------------------------------------------------
9 #-----------------------------------------------------------------------------
10
10
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12 # Imports
12 # Imports
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 from __future__ import absolute_import
15 from __future__ import absolute_import
16
16
17 import zmq
17 import zmq
18
18
19 # Local imports
19 # Local imports
20 from IPython.config.configurable import LoggingConfigurable
20 from IPython.config.configurable import LoggingConfigurable
21 from IPython.utils.traitlets import (
21 from IPython.utils.traitlets import (
22 Any, Instance, Type,
22 Any, Instance, Type,
23 )
23 )
24
24
25 from .zmq.session import Session
25 from .zmq.session import Session
26 from .channels import (
26 from .channels import (
27 ShellChannel, IOPubChannel,
27 ShellChannel, IOPubChannel,
28 HBChannel, StdInChannel,
28 HBChannel, StdInChannel,
29 )
29 )
30 from .clientabc import KernelClientABC
30 from .clientabc import KernelClientABC
31 from .connect import ConnectionFileMixin
31 from .connect import ConnectionFileMixin
32
32
33
33
34 #-----------------------------------------------------------------------------
34 #-----------------------------------------------------------------------------
35 # Main kernel client class
35 # Main kernel client class
36 #-----------------------------------------------------------------------------
36 #-----------------------------------------------------------------------------
37
37
38 class KernelClient(LoggingConfigurable, ConnectionFileMixin):
38 class KernelClient(LoggingConfigurable, ConnectionFileMixin):
39 """Communicates with a single kernel on any host via zmq channels.
39 """Communicates with a single kernel on any host via zmq channels.
40
40
41 There are four channels associated with each kernel:
41 There are four channels associated with each kernel:
42
42
43 * shell: for request/reply calls to the kernel.
43 * shell: for request/reply calls to the kernel.
44 * iopub: for the kernel to publish results to frontends.
44 * iopub: for the kernel to publish results to frontends.
45 * hb: for monitoring the kernel's heartbeat.
45 * hb: for monitoring the kernel's heartbeat.
46 * stdin: for frontends to reply to raw_input calls in the kernel.
46 * stdin: for frontends to reply to raw_input calls in the kernel.
47
47
48 The methods of the channels are exposed as methods of the client itself
48 The methods of the channels are exposed as methods of the client itself
49 (KernelClient.execute, complete, history, etc.).
49 (KernelClient.execute, complete, history, etc.).
50 See the channels themselves for documentation of these methods.
50 See the channels themselves for documentation of these methods.
51
51
52 """
52 """
53
53
54 # The PyZMQ Context to use for communication with the kernel.
54 # The PyZMQ Context to use for communication with the kernel.
55 context = Instance(zmq.Context)
55 context = Instance(zmq.Context)
56 def _context_default(self):
56 def _context_default(self):
57 return zmq.Context.instance()
57 return zmq.Context.instance()
58
58
59 # The Session to use for communication with the kernel.
59 # The Session to use for communication with the kernel.
60 session = Instance(Session)
60 session = Instance(Session)
61 def _session_default(self):
61 def _session_default(self):
62 return Session(parent=self)
62 return Session(parent=self)
63
63
64 # The classes to use for the various channels
64 # The classes to use for the various channels
65 shell_channel_class = Type(ShellChannel)
65 shell_channel_class = Type(ShellChannel)
66 iopub_channel_class = Type(IOPubChannel)
66 iopub_channel_class = Type(IOPubChannel)
67 stdin_channel_class = Type(StdInChannel)
67 stdin_channel_class = Type(StdInChannel)
68 hb_channel_class = Type(HBChannel)
68 hb_channel_class = Type(HBChannel)
69
69
70 # Protected traits
70 # Protected traits
71 _shell_channel = Any
71 _shell_channel = Any
72 _iopub_channel = Any
72 _iopub_channel = Any
73 _stdin_channel = Any
73 _stdin_channel = Any
74 _hb_channel = Any
74 _hb_channel = Any
75
75
76 #--------------------------------------------------------------------------
76 #--------------------------------------------------------------------------
77 # Channel proxy methods
77 # Channel proxy methods
78 #--------------------------------------------------------------------------
78 #--------------------------------------------------------------------------
79
79
80 def _get_msg(channel, *args, **kwargs):
80 def _get_msg(channel, *args, **kwargs):
81 return channel.get_msg(*args, **kwargs)
81 return channel.get_msg(*args, **kwargs)
82
82
83 def get_shell_msg(self, *args, **kwargs):
83 def get_shell_msg(self, *args, **kwargs):
84 """Get a message from the shell channel"""
84 """Get a message from the shell channel"""
85 return self.shell_channel.get_msg(*args, **kwargs)
85 return self.shell_channel.get_msg(*args, **kwargs)
86
86
87 def get_iopub_msg(self, *args, **kwargs):
87 def get_iopub_msg(self, *args, **kwargs):
88 """Get a message from the iopub channel"""
88 """Get a message from the iopub channel"""
89 return self.iopub_channel.get_msg(*args, **kwargs)
89 return self.iopub_channel.get_msg(*args, **kwargs)
90
90
91 def get_stdin_msg(self, *args, **kwargs):
91 def get_stdin_msg(self, *args, **kwargs):
92 """Get a message from the stdin channel"""
92 """Get a message from the stdin channel"""
93 return self.stdin_channel.get_msg(*args, **kwargs)
93 return self.stdin_channel.get_msg(*args, **kwargs)
94
94
95 #--------------------------------------------------------------------------
95 #--------------------------------------------------------------------------
96 # Channel management methods
96 # Channel management methods
97 #--------------------------------------------------------------------------
97 #--------------------------------------------------------------------------
98
98
99 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
99 def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
100 """Starts the channels for this kernel.
100 """Starts the channels for this kernel.
101
101
102 This will create the channels if they do not exist and then start
102 This will create the channels if they do not exist and then start
103 them (their activity runs in a thread). If port numbers of 0 are
103 them (their activity runs in a thread). If port numbers of 0 are
104 being used (random ports) then you must first call
104 being used (random ports) then you must first call
105 :method:`start_kernel`. If the channels have been stopped and you
105 :meth:`start_kernel`. If the channels have been stopped and you
106 call this, :class:`RuntimeError` will be raised.
106 call this, :class:`RuntimeError` will be raised.
107 """
107 """
108 if shell:
108 if shell:
109 self.shell_channel.start()
109 self.shell_channel.start()
110 for method in self.shell_channel.proxy_methods:
110 for method in self.shell_channel.proxy_methods:
111 setattr(self, method, getattr(self.shell_channel, method))
111 setattr(self, method, getattr(self.shell_channel, method))
112 if iopub:
112 if iopub:
113 self.iopub_channel.start()
113 self.iopub_channel.start()
114 for method in self.iopub_channel.proxy_methods:
114 for method in self.iopub_channel.proxy_methods:
115 setattr(self, method, getattr(self.iopub_channel, method))
115 setattr(self, method, getattr(self.iopub_channel, method))
116 if stdin:
116 if stdin:
117 self.stdin_channel.start()
117 self.stdin_channel.start()
118 for method in self.stdin_channel.proxy_methods:
118 for method in self.stdin_channel.proxy_methods:
119 setattr(self, method, getattr(self.stdin_channel, method))
119 setattr(self, method, getattr(self.stdin_channel, method))
120 self.shell_channel.allow_stdin = True
120 self.shell_channel.allow_stdin = True
121 else:
121 else:
122 self.shell_channel.allow_stdin = False
122 self.shell_channel.allow_stdin = False
123 if hb:
123 if hb:
124 self.hb_channel.start()
124 self.hb_channel.start()
125
125
126 def stop_channels(self):
126 def stop_channels(self):
127 """Stops all the running channels for this kernel.
127 """Stops all the running channels for this kernel.
128
128
129 This stops their event loops and joins their threads.
129 This stops their event loops and joins their threads.
130 """
130 """
131 if self.shell_channel.is_alive():
131 if self.shell_channel.is_alive():
132 self.shell_channel.stop()
132 self.shell_channel.stop()
133 if self.iopub_channel.is_alive():
133 if self.iopub_channel.is_alive():
134 self.iopub_channel.stop()
134 self.iopub_channel.stop()
135 if self.stdin_channel.is_alive():
135 if self.stdin_channel.is_alive():
136 self.stdin_channel.stop()
136 self.stdin_channel.stop()
137 if self.hb_channel.is_alive():
137 if self.hb_channel.is_alive():
138 self.hb_channel.stop()
138 self.hb_channel.stop()
139
139
140 @property
140 @property
141 def channels_running(self):
141 def channels_running(self):
142 """Are any of the channels created and running?"""
142 """Are any of the channels created and running?"""
143 return (self.shell_channel.is_alive() or self.iopub_channel.is_alive() or
143 return (self.shell_channel.is_alive() or self.iopub_channel.is_alive() or
144 self.stdin_channel.is_alive() or self.hb_channel.is_alive())
144 self.stdin_channel.is_alive() or self.hb_channel.is_alive())
145
145
146 @property
146 @property
147 def shell_channel(self):
147 def shell_channel(self):
148 """Get the shell channel object for this kernel."""
148 """Get the shell channel object for this kernel."""
149 if self._shell_channel is None:
149 if self._shell_channel is None:
150 url = self._make_url('shell')
150 url = self._make_url('shell')
151 self.log.debug("connecting shell channel to %s", url)
151 self.log.debug("connecting shell channel to %s", url)
152 self._shell_channel = self.shell_channel_class(
152 self._shell_channel = self.shell_channel_class(
153 self.context, self.session, url
153 self.context, self.session, url
154 )
154 )
155 return self._shell_channel
155 return self._shell_channel
156
156
157 @property
157 @property
158 def iopub_channel(self):
158 def iopub_channel(self):
159 """Get the iopub channel object for this kernel."""
159 """Get the iopub channel object for this kernel."""
160 if self._iopub_channel is None:
160 if self._iopub_channel is None:
161 url = self._make_url('iopub')
161 url = self._make_url('iopub')
162 self.log.debug("connecting iopub channel to %s", url)
162 self.log.debug("connecting iopub channel to %s", url)
163 self._iopub_channel = self.iopub_channel_class(
163 self._iopub_channel = self.iopub_channel_class(
164 self.context, self.session, url
164 self.context, self.session, url
165 )
165 )
166 return self._iopub_channel
166 return self._iopub_channel
167
167
168 @property
168 @property
169 def stdin_channel(self):
169 def stdin_channel(self):
170 """Get the stdin channel object for this kernel."""
170 """Get the stdin channel object for this kernel."""
171 if self._stdin_channel is None:
171 if self._stdin_channel is None:
172 url = self._make_url('stdin')
172 url = self._make_url('stdin')
173 self.log.debug("connecting stdin channel to %s", url)
173 self.log.debug("connecting stdin channel to %s", url)
174 self._stdin_channel = self.stdin_channel_class(
174 self._stdin_channel = self.stdin_channel_class(
175 self.context, self.session, url
175 self.context, self.session, url
176 )
176 )
177 return self._stdin_channel
177 return self._stdin_channel
178
178
179 @property
179 @property
180 def hb_channel(self):
180 def hb_channel(self):
181 """Get the hb channel object for this kernel."""
181 """Get the hb channel object for this kernel."""
182 if self._hb_channel is None:
182 if self._hb_channel is None:
183 url = self._make_url('hb')
183 url = self._make_url('hb')
184 self.log.debug("connecting heartbeat channel to %s", url)
184 self.log.debug("connecting heartbeat channel to %s", url)
185 self._hb_channel = self.hb_channel_class(
185 self._hb_channel = self.hb_channel_class(
186 self.context, self.session, url
186 self.context, self.session, url
187 )
187 )
188 return self._hb_channel
188 return self._hb_channel
189
189
190 def is_alive(self):
190 def is_alive(self):
191 """Is the kernel process still running?"""
191 """Is the kernel process still running?"""
192 if self._hb_channel is not None:
192 if self._hb_channel is not None:
193 # We didn't start the kernel with this KernelManager so we
193 # We didn't start the kernel with this KernelManager so we
194 # use the heartbeat.
194 # use the heartbeat.
195 return self._hb_channel.is_beating()
195 return self._hb_channel.is_beating()
196 else:
196 else:
197 # no heartbeat and not local, we can't tell if it's running,
197 # no heartbeat and not local, we can't tell if it's running,
198 # so naively return True
198 # so naively return True
199 return True
199 return True
200
200
201
201
202 #-----------------------------------------------------------------------------
202 #-----------------------------------------------------------------------------
203 # ABC Registration
203 # ABC Registration
204 #-----------------------------------------------------------------------------
204 #-----------------------------------------------------------------------------
205
205
206 KernelClientABC.register(KernelClient)
206 KernelClientABC.register(KernelClient)
@@ -1,848 +1,850 b''
1 """Session object for building, serializing, sending, and receiving messages in
1 """Session object for building, serializing, sending, and receiving messages in
2 IPython. The Session object supports serialization, HMAC signatures, and
2 IPython. The Session object supports serialization, HMAC signatures, and
3 metadata on messages.
3 metadata on messages.
4
4
5 Also defined here are utilities for working with Sessions:
5 Also defined here are utilities for working with Sessions:
6 * A SessionFactory to be used as a base class for configurables that work with
6 * A SessionFactory to be used as a base class for configurables that work with
7 Sessions.
7 Sessions.
8 * A Message object for convenience that allows attribute-access to the msg dict.
8 * A Message object for convenience that allows attribute-access to the msg dict.
9
9
10 Authors:
10 Authors:
11
11
12 * Min RK
12 * Min RK
13 * Brian Granger
13 * Brian Granger
14 * Fernando Perez
14 * Fernando Perez
15 """
15 """
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17 # Copyright (C) 2010-2011 The IPython Development Team
17 # Copyright (C) 2010-2011 The IPython Development Team
18 #
18 #
19 # Distributed under the terms of the BSD License. The full license is in
19 # Distributed under the terms of the BSD License. The full license is in
20 # the file COPYING, distributed as part of this software.
20 # the file COPYING, distributed as part of this software.
21 #-----------------------------------------------------------------------------
21 #-----------------------------------------------------------------------------
22
22
23 #-----------------------------------------------------------------------------
23 #-----------------------------------------------------------------------------
24 # Imports
24 # Imports
25 #-----------------------------------------------------------------------------
25 #-----------------------------------------------------------------------------
26
26
27 import hashlib
27 import hashlib
28 import hmac
28 import hmac
29 import logging
29 import logging
30 import os
30 import os
31 import pprint
31 import pprint
32 import random
32 import random
33 import uuid
33 import uuid
34 from datetime import datetime
34 from datetime import datetime
35
35
36 try:
36 try:
37 import cPickle
37 import cPickle
38 pickle = cPickle
38 pickle = cPickle
39 except:
39 except:
40 cPickle = None
40 cPickle = None
41 import pickle
41 import pickle
42
42
43 import zmq
43 import zmq
44 from zmq.utils import jsonapi
44 from zmq.utils import jsonapi
45 from zmq.eventloop.ioloop import IOLoop
45 from zmq.eventloop.ioloop import IOLoop
46 from zmq.eventloop.zmqstream import ZMQStream
46 from zmq.eventloop.zmqstream import ZMQStream
47
47
48 from IPython.config.configurable import Configurable, LoggingConfigurable
48 from IPython.config.configurable import Configurable, LoggingConfigurable
49 from IPython.utils import io
49 from IPython.utils import io
50 from IPython.utils.importstring import import_item
50 from IPython.utils.importstring import import_item
51 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
51 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
52 from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type,
52 from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type,
53 iteritems)
53 iteritems)
54 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
54 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
55 DottedObjectName, CUnicode, Dict, Integer,
55 DottedObjectName, CUnicode, Dict, Integer,
56 TraitError,
56 TraitError,
57 )
57 )
58 from IPython.kernel.zmq.serialize import MAX_ITEMS, MAX_BYTES
58 from IPython.kernel.zmq.serialize import MAX_ITEMS, MAX_BYTES
59
59
60 #-----------------------------------------------------------------------------
60 #-----------------------------------------------------------------------------
61 # utility functions
61 # utility functions
62 #-----------------------------------------------------------------------------
62 #-----------------------------------------------------------------------------
63
63
64 def squash_unicode(obj):
64 def squash_unicode(obj):
65 """coerce unicode back to bytestrings."""
65 """coerce unicode back to bytestrings."""
66 if isinstance(obj,dict):
66 if isinstance(obj,dict):
67 for key in obj.keys():
67 for key in obj.keys():
68 obj[key] = squash_unicode(obj[key])
68 obj[key] = squash_unicode(obj[key])
69 if isinstance(key, unicode_type):
69 if isinstance(key, unicode_type):
70 obj[squash_unicode(key)] = obj.pop(key)
70 obj[squash_unicode(key)] = obj.pop(key)
71 elif isinstance(obj, list):
71 elif isinstance(obj, list):
72 for i,v in enumerate(obj):
72 for i,v in enumerate(obj):
73 obj[i] = squash_unicode(v)
73 obj[i] = squash_unicode(v)
74 elif isinstance(obj, unicode_type):
74 elif isinstance(obj, unicode_type):
75 obj = obj.encode('utf8')
75 obj = obj.encode('utf8')
76 return obj
76 return obj
77
77
78 #-----------------------------------------------------------------------------
78 #-----------------------------------------------------------------------------
79 # globals and defaults
79 # globals and defaults
80 #-----------------------------------------------------------------------------
80 #-----------------------------------------------------------------------------
81
81
82 # ISO8601-ify datetime objects
82 # ISO8601-ify datetime objects
83 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default)
83 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default)
84 json_unpacker = lambda s: jsonapi.loads(s)
84 json_unpacker = lambda s: jsonapi.loads(s)
85
85
86 pickle_packer = lambda o: pickle.dumps(squash_dates(o),-1)
86 pickle_packer = lambda o: pickle.dumps(squash_dates(o),-1)
87 pickle_unpacker = pickle.loads
87 pickle_unpacker = pickle.loads
88
88
89 default_packer = json_packer
89 default_packer = json_packer
90 default_unpacker = json_unpacker
90 default_unpacker = json_unpacker
91
91
92 DELIM = b"<IDS|MSG>"
92 DELIM = b"<IDS|MSG>"
93 # singleton dummy tracker, which will always report as done
93 # singleton dummy tracker, which will always report as done
94 DONE = zmq.MessageTracker()
94 DONE = zmq.MessageTracker()
95
95
96 #-----------------------------------------------------------------------------
96 #-----------------------------------------------------------------------------
97 # Mixin tools for apps that use Sessions
97 # Mixin tools for apps that use Sessions
98 #-----------------------------------------------------------------------------
98 #-----------------------------------------------------------------------------
99
99
100 session_aliases = dict(
100 session_aliases = dict(
101 ident = 'Session.session',
101 ident = 'Session.session',
102 user = 'Session.username',
102 user = 'Session.username',
103 keyfile = 'Session.keyfile',
103 keyfile = 'Session.keyfile',
104 )
104 )
105
105
106 session_flags = {
106 session_flags = {
107 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
107 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
108 'keyfile' : '' }},
108 'keyfile' : '' }},
109 """Use HMAC digests for authentication of messages.
109 """Use HMAC digests for authentication of messages.
110 Setting this flag will generate a new UUID to use as the HMAC key.
110 Setting this flag will generate a new UUID to use as the HMAC key.
111 """),
111 """),
112 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
112 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
113 """Don't authenticate messages."""),
113 """Don't authenticate messages."""),
114 }
114 }
115
115
116 def default_secure(cfg):
116 def default_secure(cfg):
117 """Set the default behavior for a config environment to be secure.
117 """Set the default behavior for a config environment to be secure.
118
118
119 If Session.key/keyfile have not been set, set Session.key to
119 If Session.key/keyfile have not been set, set Session.key to
120 a new random UUID.
120 a new random UUID.
121 """
121 """
122
122
123 if 'Session' in cfg:
123 if 'Session' in cfg:
124 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
124 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
125 return
125 return
126 # key/keyfile not specified, generate new UUID:
126 # key/keyfile not specified, generate new UUID:
127 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
127 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
128
128
129
129
130 #-----------------------------------------------------------------------------
130 #-----------------------------------------------------------------------------
131 # Classes
131 # Classes
132 #-----------------------------------------------------------------------------
132 #-----------------------------------------------------------------------------
133
133
134 class SessionFactory(LoggingConfigurable):
134 class SessionFactory(LoggingConfigurable):
135 """The Base class for configurables that have a Session, Context, logger,
135 """The Base class for configurables that have a Session, Context, logger,
136 and IOLoop.
136 and IOLoop.
137 """
137 """
138
138
139 logname = Unicode('')
139 logname = Unicode('')
140 def _logname_changed(self, name, old, new):
140 def _logname_changed(self, name, old, new):
141 self.log = logging.getLogger(new)
141 self.log = logging.getLogger(new)
142
142
143 # not configurable:
143 # not configurable:
144 context = Instance('zmq.Context')
144 context = Instance('zmq.Context')
145 def _context_default(self):
145 def _context_default(self):
146 return zmq.Context.instance()
146 return zmq.Context.instance()
147
147
148 session = Instance('IPython.kernel.zmq.session.Session')
148 session = Instance('IPython.kernel.zmq.session.Session')
149
149
150 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
150 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
151 def _loop_default(self):
151 def _loop_default(self):
152 return IOLoop.instance()
152 return IOLoop.instance()
153
153
154 def __init__(self, **kwargs):
154 def __init__(self, **kwargs):
155 super(SessionFactory, self).__init__(**kwargs)
155 super(SessionFactory, self).__init__(**kwargs)
156
156
157 if self.session is None:
157 if self.session is None:
158 # construct the session
158 # construct the session
159 self.session = Session(**kwargs)
159 self.session = Session(**kwargs)
160
160
161
161
162 class Message(object):
162 class Message(object):
163 """A simple message object that maps dict keys to attributes.
163 """A simple message object that maps dict keys to attributes.
164
164
165 A Message can be created from a dict and a dict from a Message instance
165 A Message can be created from a dict and a dict from a Message instance
166 simply by calling dict(msg_obj)."""
166 simply by calling dict(msg_obj)."""
167
167
168 def __init__(self, msg_dict):
168 def __init__(self, msg_dict):
169 dct = self.__dict__
169 dct = self.__dict__
170 for k, v in iteritems(dict(msg_dict)):
170 for k, v in iteritems(dict(msg_dict)):
171 if isinstance(v, dict):
171 if isinstance(v, dict):
172 v = Message(v)
172 v = Message(v)
173 dct[k] = v
173 dct[k] = v
174
174
175 # Having this iterator lets dict(msg_obj) work out of the box.
175 # Having this iterator lets dict(msg_obj) work out of the box.
176 def __iter__(self):
176 def __iter__(self):
177 return iter(iteritems(self.__dict__))
177 return iter(iteritems(self.__dict__))
178
178
179 def __repr__(self):
179 def __repr__(self):
180 return repr(self.__dict__)
180 return repr(self.__dict__)
181
181
182 def __str__(self):
182 def __str__(self):
183 return pprint.pformat(self.__dict__)
183 return pprint.pformat(self.__dict__)
184
184
185 def __contains__(self, k):
185 def __contains__(self, k):
186 return k in self.__dict__
186 return k in self.__dict__
187
187
188 def __getitem__(self, k):
188 def __getitem__(self, k):
189 return self.__dict__[k]
189 return self.__dict__[k]
190
190
191
191
192 def msg_header(msg_id, msg_type, username, session):
192 def msg_header(msg_id, msg_type, username, session):
193 date = datetime.now()
193 date = datetime.now()
194 return locals()
194 return locals()
195
195
196 def extract_header(msg_or_header):
196 def extract_header(msg_or_header):
197 """Given a message or header, return the header."""
197 """Given a message or header, return the header."""
198 if not msg_or_header:
198 if not msg_or_header:
199 return {}
199 return {}
200 try:
200 try:
201 # See if msg_or_header is the entire message.
201 # See if msg_or_header is the entire message.
202 h = msg_or_header['header']
202 h = msg_or_header['header']
203 except KeyError:
203 except KeyError:
204 try:
204 try:
205 # See if msg_or_header is just the header
205 # See if msg_or_header is just the header
206 h = msg_or_header['msg_id']
206 h = msg_or_header['msg_id']
207 except KeyError:
207 except KeyError:
208 raise
208 raise
209 else:
209 else:
210 h = msg_or_header
210 h = msg_or_header
211 if not isinstance(h, dict):
211 if not isinstance(h, dict):
212 h = dict(h)
212 h = dict(h)
213 return h
213 return h
214
214
215 class Session(Configurable):
215 class Session(Configurable):
216 """Object for handling serialization and sending of messages.
216 """Object for handling serialization and sending of messages.
217
217
218 The Session object handles building messages and sending them
218 The Session object handles building messages and sending them
219 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
219 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
220 other over the network via Session objects, and only need to work with the
220 other over the network via Session objects, and only need to work with the
221 dict-based IPython message spec. The Session will handle
221 dict-based IPython message spec. The Session will handle
222 serialization/deserialization, security, and metadata.
222 serialization/deserialization, security, and metadata.
223
223
224 Sessions support configurable serialiization via packer/unpacker traits,
224 Sessions support configurable serialiization via packer/unpacker traits,
225 and signing with HMAC digests via the key/keyfile traits.
225 and signing with HMAC digests via the key/keyfile traits.
226
226
227 Parameters
227 Parameters
228 ----------
228 ----------
229
229
230 debug : bool
230 debug : bool
231 whether to trigger extra debugging statements
231 whether to trigger extra debugging statements
232 packer/unpacker : str : 'json', 'pickle' or import_string
232 packer/unpacker : str : 'json', 'pickle' or import_string
233 importstrings for methods to serialize message parts. If just
233 importstrings for methods to serialize message parts. If just
234 'json' or 'pickle', predefined JSON and pickle packers will be used.
234 'json' or 'pickle', predefined JSON and pickle packers will be used.
235 Otherwise, the entire importstring must be used.
235 Otherwise, the entire importstring must be used.
236
236
237 The functions must accept at least valid JSON input, and output *bytes*.
237 The functions must accept at least valid JSON input, and output *bytes*.
238
238
239 For example, to use msgpack:
239 For example, to use msgpack:
240 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
240 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
241 pack/unpack : callables
241 pack/unpack : callables
242 You can also set the pack/unpack callables for serialization directly.
242 You can also set the pack/unpack callables for serialization directly.
243 session : bytes
243 session : bytes
244 the ID of this Session object. The default is to generate a new UUID.
244 the ID of this Session object. The default is to generate a new UUID.
245 username : unicode
245 username : unicode
246 username added to message headers. The default is to ask the OS.
246 username added to message headers. The default is to ask the OS.
247 key : bytes
247 key : bytes
248 The key used to initialize an HMAC signature. If unset, messages
248 The key used to initialize an HMAC signature. If unset, messages
249 will not be signed or checked.
249 will not be signed or checked.
250 keyfile : filepath
250 keyfile : filepath
251 The file containing a key. If this is set, `key` will be initialized
251 The file containing a key. If this is set, `key` will be initialized
252 to the contents of the file.
252 to the contents of the file.
253
253
254 """
254 """
255
255
256 debug=Bool(False, config=True, help="""Debug output in the Session""")
256 debug=Bool(False, config=True, help="""Debug output in the Session""")
257
257
258 packer = DottedObjectName('json',config=True,
258 packer = DottedObjectName('json',config=True,
259 help="""The name of the packer for serializing messages.
259 help="""The name of the packer for serializing messages.
260 Should be one of 'json', 'pickle', or an import name
260 Should be one of 'json', 'pickle', or an import name
261 for a custom callable serializer.""")
261 for a custom callable serializer.""")
262 def _packer_changed(self, name, old, new):
262 def _packer_changed(self, name, old, new):
263 if new.lower() == 'json':
263 if new.lower() == 'json':
264 self.pack = json_packer
264 self.pack = json_packer
265 self.unpack = json_unpacker
265 self.unpack = json_unpacker
266 self.unpacker = new
266 self.unpacker = new
267 elif new.lower() == 'pickle':
267 elif new.lower() == 'pickle':
268 self.pack = pickle_packer
268 self.pack = pickle_packer
269 self.unpack = pickle_unpacker
269 self.unpack = pickle_unpacker
270 self.unpacker = new
270 self.unpacker = new
271 else:
271 else:
272 self.pack = import_item(str(new))
272 self.pack = import_item(str(new))
273
273
274 unpacker = DottedObjectName('json', config=True,
274 unpacker = DottedObjectName('json', config=True,
275 help="""The name of the unpacker for unserializing messages.
275 help="""The name of the unpacker for unserializing messages.
276 Only used with custom functions for `packer`.""")
276 Only used with custom functions for `packer`.""")
277 def _unpacker_changed(self, name, old, new):
277 def _unpacker_changed(self, name, old, new):
278 if new.lower() == 'json':
278 if new.lower() == 'json':
279 self.pack = json_packer
279 self.pack = json_packer
280 self.unpack = json_unpacker
280 self.unpack = json_unpacker
281 self.packer = new
281 self.packer = new
282 elif new.lower() == 'pickle':
282 elif new.lower() == 'pickle':
283 self.pack = pickle_packer
283 self.pack = pickle_packer
284 self.unpack = pickle_unpacker
284 self.unpack = pickle_unpacker
285 self.packer = new
285 self.packer = new
286 else:
286 else:
287 self.unpack = import_item(str(new))
287 self.unpack = import_item(str(new))
288
288
289 session = CUnicode(u'', config=True,
289 session = CUnicode(u'', config=True,
290 help="""The UUID identifying this session.""")
290 help="""The UUID identifying this session.""")
291 def _session_default(self):
291 def _session_default(self):
292 u = unicode_type(uuid.uuid4())
292 u = unicode_type(uuid.uuid4())
293 self.bsession = u.encode('ascii')
293 self.bsession = u.encode('ascii')
294 return u
294 return u
295
295
296 def _session_changed(self, name, old, new):
296 def _session_changed(self, name, old, new):
297 self.bsession = self.session.encode('ascii')
297 self.bsession = self.session.encode('ascii')
298
298
299 # bsession is the session as bytes
299 # bsession is the session as bytes
300 bsession = CBytes(b'')
300 bsession = CBytes(b'')
301
301
302 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
302 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
303 help="""Username for the Session. Default is your system username.""",
303 help="""Username for the Session. Default is your system username.""",
304 config=True)
304 config=True)
305
305
306 metadata = Dict({}, config=True,
306 metadata = Dict({}, config=True,
307 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
307 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
308
308
309 # message signature related traits:
309 # message signature related traits:
310
310
311 key = CBytes(b'', config=True,
311 key = CBytes(b'', config=True,
312 help="""execution key, for extra authentication.""")
312 help="""execution key, for extra authentication.""")
313 def _key_changed(self, name, old, new):
313 def _key_changed(self, name, old, new):
314 if new:
314 if new:
315 self.auth = hmac.HMAC(new, digestmod=self.digest_mod)
315 self.auth = hmac.HMAC(new, digestmod=self.digest_mod)
316 else:
316 else:
317 self.auth = None
317 self.auth = None
318
318
319 signature_scheme = Unicode('hmac-sha256', config=True,
319 signature_scheme = Unicode('hmac-sha256', config=True,
320 help="""The digest scheme used to construct the message signatures.
320 help="""The digest scheme used to construct the message signatures.
321 Must have the form 'hmac-HASH'.""")
321 Must have the form 'hmac-HASH'.""")
322 def _signature_scheme_changed(self, name, old, new):
322 def _signature_scheme_changed(self, name, old, new):
323 if not new.startswith('hmac-'):
323 if not new.startswith('hmac-'):
324 raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)
324 raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)
325 hash_name = new.split('-', 1)[1]
325 hash_name = new.split('-', 1)[1]
326 try:
326 try:
327 self.digest_mod = getattr(hashlib, hash_name)
327 self.digest_mod = getattr(hashlib, hash_name)
328 except AttributeError:
328 except AttributeError:
329 raise TraitError("hashlib has no such attribute: %s" % hash_name)
329 raise TraitError("hashlib has no such attribute: %s" % hash_name)
330
330
331 digest_mod = Any()
331 digest_mod = Any()
332 def _digest_mod_default(self):
332 def _digest_mod_default(self):
333 return hashlib.sha256
333 return hashlib.sha256
334
334
335 auth = Instance(hmac.HMAC)
335 auth = Instance(hmac.HMAC)
336
336
337 digest_history = Set()
337 digest_history = Set()
338 digest_history_size = Integer(2**16, config=True,
338 digest_history_size = Integer(2**16, config=True,
339 help="""The maximum number of digests to remember.
339 help="""The maximum number of digests to remember.
340
340
341 The digest history will be culled when it exceeds this value.
341 The digest history will be culled when it exceeds this value.
342 """
342 """
343 )
343 )
344
344
345 keyfile = Unicode('', config=True,
345 keyfile = Unicode('', config=True,
346 help="""path to file containing execution key.""")
346 help="""path to file containing execution key.""")
347 def _keyfile_changed(self, name, old, new):
347 def _keyfile_changed(self, name, old, new):
348 with open(new, 'rb') as f:
348 with open(new, 'rb') as f:
349 self.key = f.read().strip()
349 self.key = f.read().strip()
350
350
351 # for protecting against sends from forks
351 # for protecting against sends from forks
352 pid = Integer()
352 pid = Integer()
353
353
354 # serialization traits:
354 # serialization traits:
355
355
356 pack = Any(default_packer) # the actual packer function
356 pack = Any(default_packer) # the actual packer function
357 def _pack_changed(self, name, old, new):
357 def _pack_changed(self, name, old, new):
358 if not callable(new):
358 if not callable(new):
359 raise TypeError("packer must be callable, not %s"%type(new))
359 raise TypeError("packer must be callable, not %s"%type(new))
360
360
361 unpack = Any(default_unpacker) # the actual packer function
361 unpack = Any(default_unpacker) # the actual packer function
362 def _unpack_changed(self, name, old, new):
362 def _unpack_changed(self, name, old, new):
363 # unpacker is not checked - it is assumed to be
363 # unpacker is not checked - it is assumed to be
364 if not callable(new):
364 if not callable(new):
365 raise TypeError("unpacker must be callable, not %s"%type(new))
365 raise TypeError("unpacker must be callable, not %s"%type(new))
366
366
367 # thresholds:
367 # thresholds:
368 copy_threshold = Integer(2**16, config=True,
368 copy_threshold = Integer(2**16, config=True,
369 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
369 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
370 buffer_threshold = Integer(MAX_BYTES, config=True,
370 buffer_threshold = Integer(MAX_BYTES, config=True,
371 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
371 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
372 item_threshold = Integer(MAX_ITEMS, config=True,
372 item_threshold = Integer(MAX_ITEMS, config=True,
373 help="""The maximum number of items for a container to be introspected for custom serialization.
373 help="""The maximum number of items for a container to be introspected for custom serialization.
374 Containers larger than this are pickled outright.
374 Containers larger than this are pickled outright.
375 """
375 """
376 )
376 )
377
377
378
378
379 def __init__(self, **kwargs):
379 def __init__(self, **kwargs):
380 """create a Session object
380 """create a Session object
381
381
382 Parameters
382 Parameters
383 ----------
383 ----------
384
384
385 debug : bool
385 debug : bool
386 whether to trigger extra debugging statements
386 whether to trigger extra debugging statements
387 packer/unpacker : str : 'json', 'pickle' or import_string
387 packer/unpacker : str : 'json', 'pickle' or import_string
388 importstrings for methods to serialize message parts. If just
388 importstrings for methods to serialize message parts. If just
389 'json' or 'pickle', predefined JSON and pickle packers will be used.
389 'json' or 'pickle', predefined JSON and pickle packers will be used.
390 Otherwise, the entire importstring must be used.
390 Otherwise, the entire importstring must be used.
391
391
392 The functions must accept at least valid JSON input, and output
392 The functions must accept at least valid JSON input, and output
393 *bytes*.
393 *bytes*.
394
394
395 For example, to use msgpack:
395 For example, to use msgpack:
396 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
396 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
397 pack/unpack : callables
397 pack/unpack : callables
398 You can also set the pack/unpack callables for serialization
398 You can also set the pack/unpack callables for serialization
399 directly.
399 directly.
400 session : unicode (must be ascii)
400 session : unicode (must be ascii)
401 the ID of this Session object. The default is to generate a new
401 the ID of this Session object. The default is to generate a new
402 UUID.
402 UUID.
403 bsession : bytes
403 bsession : bytes
404 The session as bytes
404 The session as bytes
405 username : unicode
405 username : unicode
406 username added to message headers. The default is to ask the OS.
406 username added to message headers. The default is to ask the OS.
407 key : bytes
407 key : bytes
408 The key used to initialize an HMAC signature. If unset, messages
408 The key used to initialize an HMAC signature. If unset, messages
409 will not be signed or checked.
409 will not be signed or checked.
410 signature_scheme : str
410 signature_scheme : str
411 The message digest scheme. Currently must be of the form 'hmac-HASH',
411 The message digest scheme. Currently must be of the form 'hmac-HASH',
412 where 'HASH' is a hashing function available in Python's hashlib.
412 where 'HASH' is a hashing function available in Python's hashlib.
413 The default is 'hmac-sha256'.
413 The default is 'hmac-sha256'.
414 This is ignored if 'key' is empty.
414 This is ignored if 'key' is empty.
415 keyfile : filepath
415 keyfile : filepath
416 The file containing a key. If this is set, `key` will be
416 The file containing a key. If this is set, `key` will be
417 initialized to the contents of the file.
417 initialized to the contents of the file.
418 """
418 """
419 super(Session, self).__init__(**kwargs)
419 super(Session, self).__init__(**kwargs)
420 self._check_packers()
420 self._check_packers()
421 self.none = self.pack({})
421 self.none = self.pack({})
422 # ensure self._session_default() if necessary, so bsession is defined:
422 # ensure self._session_default() if necessary, so bsession is defined:
423 self.session
423 self.session
424 self.pid = os.getpid()
424 self.pid = os.getpid()
425
425
426 @property
426 @property
427 def msg_id(self):
427 def msg_id(self):
428 """always return new uuid"""
428 """always return new uuid"""
429 return str(uuid.uuid4())
429 return str(uuid.uuid4())
430
430
431 def _check_packers(self):
431 def _check_packers(self):
432 """check packers for datetime support."""
432 """check packers for datetime support."""
433 pack = self.pack
433 pack = self.pack
434 unpack = self.unpack
434 unpack = self.unpack
435
435
436 # check simple serialization
436 # check simple serialization
437 msg = dict(a=[1,'hi'])
437 msg = dict(a=[1,'hi'])
438 try:
438 try:
439 packed = pack(msg)
439 packed = pack(msg)
440 except Exception as e:
440 except Exception as e:
441 msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}"
441 msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}"
442 if self.packer == 'json':
442 if self.packer == 'json':
443 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
443 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
444 else:
444 else:
445 jsonmsg = ""
445 jsonmsg = ""
446 raise ValueError(
446 raise ValueError(
447 msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg)
447 msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg)
448 )
448 )
449
449
450 # ensure packed message is bytes
450 # ensure packed message is bytes
451 if not isinstance(packed, bytes):
451 if not isinstance(packed, bytes):
452 raise ValueError("message packed to %r, but bytes are required"%type(packed))
452 raise ValueError("message packed to %r, but bytes are required"%type(packed))
453
453
454 # check that unpack is pack's inverse
454 # check that unpack is pack's inverse
455 try:
455 try:
456 unpacked = unpack(packed)
456 unpacked = unpack(packed)
457 assert unpacked == msg
457 assert unpacked == msg
458 except Exception as e:
458 except Exception as e:
459 msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}"
459 msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}"
460 if self.packer == 'json':
460 if self.packer == 'json':
461 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
461 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
462 else:
462 else:
463 jsonmsg = ""
463 jsonmsg = ""
464 raise ValueError(
464 raise ValueError(
465 msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg)
465 msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg)
466 )
466 )
467
467
468 # check datetime support
468 # check datetime support
469 msg = dict(t=datetime.now())
469 msg = dict(t=datetime.now())
470 try:
470 try:
471 unpacked = unpack(pack(msg))
471 unpacked = unpack(pack(msg))
472 if isinstance(unpacked['t'], datetime):
472 if isinstance(unpacked['t'], datetime):
473 raise ValueError("Shouldn't deserialize to datetime")
473 raise ValueError("Shouldn't deserialize to datetime")
474 except Exception:
474 except Exception:
475 self.pack = lambda o: pack(squash_dates(o))
475 self.pack = lambda o: pack(squash_dates(o))
476 self.unpack = lambda s: unpack(s)
476 self.unpack = lambda s: unpack(s)
477
477
478 def msg_header(self, msg_type):
478 def msg_header(self, msg_type):
479 return msg_header(self.msg_id, msg_type, self.username, self.session)
479 return msg_header(self.msg_id, msg_type, self.username, self.session)
480
480
481 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
481 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
482 """Return the nested message dict.
482 """Return the nested message dict.
483
483
484 This format is different from what is sent over the wire. The
484 This format is different from what is sent over the wire. The
485 serialize/unserialize methods converts this nested message dict to the wire
485 serialize/unserialize methods converts this nested message dict to the wire
486 format, which is a list of message parts.
486 format, which is a list of message parts.
487 """
487 """
488 msg = {}
488 msg = {}
489 header = self.msg_header(msg_type) if header is None else header
489 header = self.msg_header(msg_type) if header is None else header
490 msg['header'] = header
490 msg['header'] = header
491 msg['msg_id'] = header['msg_id']
491 msg['msg_id'] = header['msg_id']
492 msg['msg_type'] = header['msg_type']
492 msg['msg_type'] = header['msg_type']
493 msg['parent_header'] = {} if parent is None else extract_header(parent)
493 msg['parent_header'] = {} if parent is None else extract_header(parent)
494 msg['content'] = {} if content is None else content
494 msg['content'] = {} if content is None else content
495 msg['metadata'] = self.metadata.copy()
495 msg['metadata'] = self.metadata.copy()
496 if metadata is not None:
496 if metadata is not None:
497 msg['metadata'].update(metadata)
497 msg['metadata'].update(metadata)
498 return msg
498 return msg
499
499
500 def sign(self, msg_list):
500 def sign(self, msg_list):
501 """Sign a message with HMAC digest. If no auth, return b''.
501 """Sign a message with HMAC digest. If no auth, return b''.
502
502
503 Parameters
503 Parameters
504 ----------
504 ----------
505 msg_list : list
505 msg_list : list
506 The [p_header,p_parent,p_content] part of the message list.
506 The [p_header,p_parent,p_content] part of the message list.
507 """
507 """
508 if self.auth is None:
508 if self.auth is None:
509 return b''
509 return b''
510 h = self.auth.copy()
510 h = self.auth.copy()
511 for m in msg_list:
511 for m in msg_list:
512 h.update(m)
512 h.update(m)
513 return str_to_bytes(h.hexdigest())
513 return str_to_bytes(h.hexdigest())
514
514
515 def serialize(self, msg, ident=None):
515 def serialize(self, msg, ident=None):
516 """Serialize the message components to bytes.
516 """Serialize the message components to bytes.
517
517
518 This is roughly the inverse of unserialize. The serialize/unserialize
518 This is roughly the inverse of unserialize. The serialize/unserialize
519 methods work with full message lists, whereas pack/unpack work with
519 methods work with full message lists, whereas pack/unpack work with
520 the individual message parts in the message list.
520 the individual message parts in the message list.
521
521
522 Parameters
522 Parameters
523 ----------
523 ----------
524 msg : dict or Message
524 msg : dict or Message
525 The nexted message dict as returned by the self.msg method.
525 The nexted message dict as returned by the self.msg method.
526
526
527 Returns
527 Returns
528 -------
528 -------
529 msg_list : list
529 msg_list : list
530 The list of bytes objects to be sent with the format:
530 The list of bytes objects to be sent with the format::
531 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_metadata,p_content,
531
532 buffer1,buffer2,...]. In this list, the p_* entities are
532 [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent,
533 the packed or serialized versions, so if JSON is used, these
533 p_metadata, p_content, buffer1, buffer2, ...]
534 are utf8 encoded JSON strings.
534
535 In this list, the ``p_*`` entities are the packed or serialized
536 versions, so if JSON is used, these are utf8 encoded JSON strings.
535 """
537 """
536 content = msg.get('content', {})
538 content = msg.get('content', {})
537 if content is None:
539 if content is None:
538 content = self.none
540 content = self.none
539 elif isinstance(content, dict):
541 elif isinstance(content, dict):
540 content = self.pack(content)
542 content = self.pack(content)
541 elif isinstance(content, bytes):
543 elif isinstance(content, bytes):
542 # content is already packed, as in a relayed message
544 # content is already packed, as in a relayed message
543 pass
545 pass
544 elif isinstance(content, unicode_type):
546 elif isinstance(content, unicode_type):
545 # should be bytes, but JSON often spits out unicode
547 # should be bytes, but JSON often spits out unicode
546 content = content.encode('utf8')
548 content = content.encode('utf8')
547 else:
549 else:
548 raise TypeError("Content incorrect type: %s"%type(content))
550 raise TypeError("Content incorrect type: %s"%type(content))
549
551
550 real_message = [self.pack(msg['header']),
552 real_message = [self.pack(msg['header']),
551 self.pack(msg['parent_header']),
553 self.pack(msg['parent_header']),
552 self.pack(msg['metadata']),
554 self.pack(msg['metadata']),
553 content,
555 content,
554 ]
556 ]
555
557
556 to_send = []
558 to_send = []
557
559
558 if isinstance(ident, list):
560 if isinstance(ident, list):
559 # accept list of idents
561 # accept list of idents
560 to_send.extend(ident)
562 to_send.extend(ident)
561 elif ident is not None:
563 elif ident is not None:
562 to_send.append(ident)
564 to_send.append(ident)
563 to_send.append(DELIM)
565 to_send.append(DELIM)
564
566
565 signature = self.sign(real_message)
567 signature = self.sign(real_message)
566 to_send.append(signature)
568 to_send.append(signature)
567
569
568 to_send.extend(real_message)
570 to_send.extend(real_message)
569
571
570 return to_send
572 return to_send
571
573
572 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
574 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
573 buffers=None, track=False, header=None, metadata=None):
575 buffers=None, track=False, header=None, metadata=None):
574 """Build and send a message via stream or socket.
576 """Build and send a message via stream or socket.
575
577
576 The message format used by this function internally is as follows:
578 The message format used by this function internally is as follows:
577
579
578 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
580 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
579 buffer1,buffer2,...]
581 buffer1,buffer2,...]
580
582
581 The serialize/unserialize methods convert the nested message dict into this
583 The serialize/unserialize methods convert the nested message dict into this
582 format.
584 format.
583
585
584 Parameters
586 Parameters
585 ----------
587 ----------
586
588
587 stream : zmq.Socket or ZMQStream
589 stream : zmq.Socket or ZMQStream
588 The socket-like object used to send the data.
590 The socket-like object used to send the data.
589 msg_or_type : str or Message/dict
591 msg_or_type : str or Message/dict
590 Normally, msg_or_type will be a msg_type unless a message is being
592 Normally, msg_or_type will be a msg_type unless a message is being
591 sent more than once. If a header is supplied, this can be set to
593 sent more than once. If a header is supplied, this can be set to
592 None and the msg_type will be pulled from the header.
594 None and the msg_type will be pulled from the header.
593
595
594 content : dict or None
596 content : dict or None
595 The content of the message (ignored if msg_or_type is a message).
597 The content of the message (ignored if msg_or_type is a message).
596 header : dict or None
598 header : dict or None
597 The header dict for the message (ignored if msg_to_type is a message).
599 The header dict for the message (ignored if msg_to_type is a message).
598 parent : Message or dict or None
600 parent : Message or dict or None
599 The parent or parent header describing the parent of this message
601 The parent or parent header describing the parent of this message
600 (ignored if msg_or_type is a message).
602 (ignored if msg_or_type is a message).
601 ident : bytes or list of bytes
603 ident : bytes or list of bytes
602 The zmq.IDENTITY routing path.
604 The zmq.IDENTITY routing path.
603 metadata : dict or None
605 metadata : dict or None
604 The metadata describing the message
606 The metadata describing the message
605 buffers : list or None
607 buffers : list or None
606 The already-serialized buffers to be appended to the message.
608 The already-serialized buffers to be appended to the message.
607 track : bool
609 track : bool
608 Whether to track. Only for use with Sockets, because ZMQStream
610 Whether to track. Only for use with Sockets, because ZMQStream
609 objects cannot track messages.
611 objects cannot track messages.
610
612
611
613
612 Returns
614 Returns
613 -------
615 -------
614 msg : dict
616 msg : dict
615 The constructed message.
617 The constructed message.
616 """
618 """
617 if not isinstance(stream, zmq.Socket):
619 if not isinstance(stream, zmq.Socket):
618 # ZMQStreams and dummy sockets do not support tracking.
620 # ZMQStreams and dummy sockets do not support tracking.
619 track = False
621 track = False
620
622
621 if isinstance(msg_or_type, (Message, dict)):
623 if isinstance(msg_or_type, (Message, dict)):
622 # We got a Message or message dict, not a msg_type so don't
624 # We got a Message or message dict, not a msg_type so don't
623 # build a new Message.
625 # build a new Message.
624 msg = msg_or_type
626 msg = msg_or_type
625 else:
627 else:
626 msg = self.msg(msg_or_type, content=content, parent=parent,
628 msg = self.msg(msg_or_type, content=content, parent=parent,
627 header=header, metadata=metadata)
629 header=header, metadata=metadata)
628 if not os.getpid() == self.pid:
630 if not os.getpid() == self.pid:
629 io.rprint("WARNING: attempted to send message from fork")
631 io.rprint("WARNING: attempted to send message from fork")
630 io.rprint(msg)
632 io.rprint(msg)
631 return
633 return
632 buffers = [] if buffers is None else buffers
634 buffers = [] if buffers is None else buffers
633 to_send = self.serialize(msg, ident)
635 to_send = self.serialize(msg, ident)
634 to_send.extend(buffers)
636 to_send.extend(buffers)
635 longest = max([ len(s) for s in to_send ])
637 longest = max([ len(s) for s in to_send ])
636 copy = (longest < self.copy_threshold)
638 copy = (longest < self.copy_threshold)
637
639
638 if buffers and track and not copy:
640 if buffers and track and not copy:
639 # only really track when we are doing zero-copy buffers
641 # only really track when we are doing zero-copy buffers
640 tracker = stream.send_multipart(to_send, copy=False, track=True)
642 tracker = stream.send_multipart(to_send, copy=False, track=True)
641 else:
643 else:
642 # use dummy tracker, which will be done immediately
644 # use dummy tracker, which will be done immediately
643 tracker = DONE
645 tracker = DONE
644 stream.send_multipart(to_send, copy=copy)
646 stream.send_multipart(to_send, copy=copy)
645
647
646 if self.debug:
648 if self.debug:
647 pprint.pprint(msg)
649 pprint.pprint(msg)
648 pprint.pprint(to_send)
650 pprint.pprint(to_send)
649 pprint.pprint(buffers)
651 pprint.pprint(buffers)
650
652
651 msg['tracker'] = tracker
653 msg['tracker'] = tracker
652
654
653 return msg
655 return msg
654
656
655 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
657 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
656 """Send a raw message via ident path.
658 """Send a raw message via ident path.
657
659
658 This method is used to send a already serialized message.
660 This method is used to send a already serialized message.
659
661
660 Parameters
662 Parameters
661 ----------
663 ----------
662 stream : ZMQStream or Socket
664 stream : ZMQStream or Socket
663 The ZMQ stream or socket to use for sending the message.
665 The ZMQ stream or socket to use for sending the message.
664 msg_list : list
666 msg_list : list
665 The serialized list of messages to send. This only includes the
667 The serialized list of messages to send. This only includes the
666 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
668 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
667 the message.
669 the message.
668 ident : ident or list
670 ident : ident or list
669 A single ident or a list of idents to use in sending.
671 A single ident or a list of idents to use in sending.
670 """
672 """
671 to_send = []
673 to_send = []
672 if isinstance(ident, bytes):
674 if isinstance(ident, bytes):
673 ident = [ident]
675 ident = [ident]
674 if ident is not None:
676 if ident is not None:
675 to_send.extend(ident)
677 to_send.extend(ident)
676
678
677 to_send.append(DELIM)
679 to_send.append(DELIM)
678 to_send.append(self.sign(msg_list))
680 to_send.append(self.sign(msg_list))
679 to_send.extend(msg_list)
681 to_send.extend(msg_list)
680 stream.send_multipart(msg_list, flags, copy=copy)
682 stream.send_multipart(msg_list, flags, copy=copy)
681
683
682 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
684 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
683 """Receive and unpack a message.
685 """Receive and unpack a message.
684
686
685 Parameters
687 Parameters
686 ----------
688 ----------
687 socket : ZMQStream or Socket
689 socket : ZMQStream or Socket
688 The socket or stream to use in receiving.
690 The socket or stream to use in receiving.
689
691
690 Returns
692 Returns
691 -------
693 -------
692 [idents], msg
694 [idents], msg
693 [idents] is a list of idents and msg is a nested message dict of
695 [idents] is a list of idents and msg is a nested message dict of
694 same format as self.msg returns.
696 same format as self.msg returns.
695 """
697 """
696 if isinstance(socket, ZMQStream):
698 if isinstance(socket, ZMQStream):
697 socket = socket.socket
699 socket = socket.socket
698 try:
700 try:
699 msg_list = socket.recv_multipart(mode, copy=copy)
701 msg_list = socket.recv_multipart(mode, copy=copy)
700 except zmq.ZMQError as e:
702 except zmq.ZMQError as e:
701 if e.errno == zmq.EAGAIN:
703 if e.errno == zmq.EAGAIN:
702 # We can convert EAGAIN to None as we know in this case
704 # We can convert EAGAIN to None as we know in this case
703 # recv_multipart won't return None.
705 # recv_multipart won't return None.
704 return None,None
706 return None,None
705 else:
707 else:
706 raise
708 raise
707 # split multipart message into identity list and message dict
709 # split multipart message into identity list and message dict
708 # invalid large messages can cause very expensive string comparisons
710 # invalid large messages can cause very expensive string comparisons
709 idents, msg_list = self.feed_identities(msg_list, copy)
711 idents, msg_list = self.feed_identities(msg_list, copy)
710 try:
712 try:
711 return idents, self.unserialize(msg_list, content=content, copy=copy)
713 return idents, self.unserialize(msg_list, content=content, copy=copy)
712 except Exception as e:
714 except Exception as e:
713 # TODO: handle it
715 # TODO: handle it
714 raise e
716 raise e
715
717
716 def feed_identities(self, msg_list, copy=True):
718 def feed_identities(self, msg_list, copy=True):
717 """Split the identities from the rest of the message.
719 """Split the identities from the rest of the message.
718
720
719 Feed until DELIM is reached, then return the prefix as idents and
721 Feed until DELIM is reached, then return the prefix as idents and
720 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
722 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
721 but that would be silly.
723 but that would be silly.
722
724
723 Parameters
725 Parameters
724 ----------
726 ----------
725 msg_list : a list of Message or bytes objects
727 msg_list : a list of Message or bytes objects
726 The message to be split.
728 The message to be split.
727 copy : bool
729 copy : bool
728 flag determining whether the arguments are bytes or Messages
730 flag determining whether the arguments are bytes or Messages
729
731
730 Returns
732 Returns
731 -------
733 -------
732 (idents, msg_list) : two lists
734 (idents, msg_list) : two lists
733 idents will always be a list of bytes, each of which is a ZMQ
735 idents will always be a list of bytes, each of which is a ZMQ
734 identity. msg_list will be a list of bytes or zmq.Messages of the
736 identity. msg_list will be a list of bytes or zmq.Messages of the
735 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
737 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
736 should be unpackable/unserializable via self.unserialize at this
738 should be unpackable/unserializable via self.unserialize at this
737 point.
739 point.
738 """
740 """
739 if copy:
741 if copy:
740 idx = msg_list.index(DELIM)
742 idx = msg_list.index(DELIM)
741 return msg_list[:idx], msg_list[idx+1:]
743 return msg_list[:idx], msg_list[idx+1:]
742 else:
744 else:
743 failed = True
745 failed = True
744 for idx,m in enumerate(msg_list):
746 for idx,m in enumerate(msg_list):
745 if m.bytes == DELIM:
747 if m.bytes == DELIM:
746 failed = False
748 failed = False
747 break
749 break
748 if failed:
750 if failed:
749 raise ValueError("DELIM not in msg_list")
751 raise ValueError("DELIM not in msg_list")
750 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
752 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
751 return [m.bytes for m in idents], msg_list
753 return [m.bytes for m in idents], msg_list
752
754
753 def _add_digest(self, signature):
755 def _add_digest(self, signature):
754 """add a digest to history to protect against replay attacks"""
756 """add a digest to history to protect against replay attacks"""
755 if self.digest_history_size == 0:
757 if self.digest_history_size == 0:
756 # no history, never add digests
758 # no history, never add digests
757 return
759 return
758
760
759 self.digest_history.add(signature)
761 self.digest_history.add(signature)
760 if len(self.digest_history) > self.digest_history_size:
762 if len(self.digest_history) > self.digest_history_size:
761 # threshold reached, cull 10%
763 # threshold reached, cull 10%
762 self._cull_digest_history()
764 self._cull_digest_history()
763
765
764 def _cull_digest_history(self):
766 def _cull_digest_history(self):
765 """cull the digest history
767 """cull the digest history
766
768
767 Removes a randomly selected 10% of the digest history
769 Removes a randomly selected 10% of the digest history
768 """
770 """
769 current = len(self.digest_history)
771 current = len(self.digest_history)
770 n_to_cull = max(int(current // 10), current - self.digest_history_size)
772 n_to_cull = max(int(current // 10), current - self.digest_history_size)
771 if n_to_cull >= current:
773 if n_to_cull >= current:
772 self.digest_history = set()
774 self.digest_history = set()
773 return
775 return
774 to_cull = random.sample(self.digest_history, n_to_cull)
776 to_cull = random.sample(self.digest_history, n_to_cull)
775 self.digest_history.difference_update(to_cull)
777 self.digest_history.difference_update(to_cull)
776
778
777 def unserialize(self, msg_list, content=True, copy=True):
779 def unserialize(self, msg_list, content=True, copy=True):
778 """Unserialize a msg_list to a nested message dict.
780 """Unserialize a msg_list to a nested message dict.
779
781
780 This is roughly the inverse of serialize. The serialize/unserialize
782 This is roughly the inverse of serialize. The serialize/unserialize
781 methods work with full message lists, whereas pack/unpack work with
783 methods work with full message lists, whereas pack/unpack work with
782 the individual message parts in the message list.
784 the individual message parts in the message list.
783
785
784 Parameters
786 Parameters
785 ----------
787 ----------
786 msg_list : list of bytes or Message objects
788 msg_list : list of bytes or Message objects
787 The list of message parts of the form [HMAC,p_header,p_parent,
789 The list of message parts of the form [HMAC,p_header,p_parent,
788 p_metadata,p_content,buffer1,buffer2,...].
790 p_metadata,p_content,buffer1,buffer2,...].
789 content : bool (True)
791 content : bool (True)
790 Whether to unpack the content dict (True), or leave it packed
792 Whether to unpack the content dict (True), or leave it packed
791 (False).
793 (False).
792 copy : bool (True)
794 copy : bool (True)
793 Whether to return the bytes (True), or the non-copying Message
795 Whether to return the bytes (True), or the non-copying Message
794 object in each place (False).
796 object in each place (False).
795
797
796 Returns
798 Returns
797 -------
799 -------
798 msg : dict
800 msg : dict
799 The nested message dict with top-level keys [header, parent_header,
801 The nested message dict with top-level keys [header, parent_header,
800 content, buffers].
802 content, buffers].
801 """
803 """
802 minlen = 5
804 minlen = 5
803 message = {}
805 message = {}
804 if not copy:
806 if not copy:
805 for i in range(minlen):
807 for i in range(minlen):
806 msg_list[i] = msg_list[i].bytes
808 msg_list[i] = msg_list[i].bytes
807 if self.auth is not None:
809 if self.auth is not None:
808 signature = msg_list[0]
810 signature = msg_list[0]
809 if not signature:
811 if not signature:
810 raise ValueError("Unsigned Message")
812 raise ValueError("Unsigned Message")
811 if signature in self.digest_history:
813 if signature in self.digest_history:
812 raise ValueError("Duplicate Signature: %r" % signature)
814 raise ValueError("Duplicate Signature: %r" % signature)
813 self._add_digest(signature)
815 self._add_digest(signature)
814 check = self.sign(msg_list[1:5])
816 check = self.sign(msg_list[1:5])
815 if not signature == check:
817 if not signature == check:
816 raise ValueError("Invalid Signature: %r" % signature)
818 raise ValueError("Invalid Signature: %r" % signature)
817 if not len(msg_list) >= minlen:
819 if not len(msg_list) >= minlen:
818 raise TypeError("malformed message, must have at least %i elements"%minlen)
820 raise TypeError("malformed message, must have at least %i elements"%minlen)
819 header = self.unpack(msg_list[1])
821 header = self.unpack(msg_list[1])
820 message['header'] = extract_dates(header)
822 message['header'] = extract_dates(header)
821 message['msg_id'] = header['msg_id']
823 message['msg_id'] = header['msg_id']
822 message['msg_type'] = header['msg_type']
824 message['msg_type'] = header['msg_type']
823 message['parent_header'] = extract_dates(self.unpack(msg_list[2]))
825 message['parent_header'] = extract_dates(self.unpack(msg_list[2]))
824 message['metadata'] = self.unpack(msg_list[3])
826 message['metadata'] = self.unpack(msg_list[3])
825 if content:
827 if content:
826 message['content'] = self.unpack(msg_list[4])
828 message['content'] = self.unpack(msg_list[4])
827 else:
829 else:
828 message['content'] = msg_list[4]
830 message['content'] = msg_list[4]
829
831
830 message['buffers'] = msg_list[5:]
832 message['buffers'] = msg_list[5:]
831 return message
833 return message
832
834
833 def test_msg2obj():
835 def test_msg2obj():
834 am = dict(x=1)
836 am = dict(x=1)
835 ao = Message(am)
837 ao = Message(am)
836 assert ao.x == am['x']
838 assert ao.x == am['x']
837
839
838 am['y'] = dict(z=1)
840 am['y'] = dict(z=1)
839 ao = Message(am)
841 ao = Message(am)
840 assert ao.y.z == am['y']['z']
842 assert ao.y.z == am['y']['z']
841
843
842 k1, k2 = 'y', 'z'
844 k1, k2 = 'y', 'z'
843 assert ao[k1][k2] == am[k1][k2]
845 assert ao[k1][k2] == am[k1][k2]
844
846
845 am2 = dict(ao)
847 am2 = dict(ao)
846 assert am['x'] == am2['x']
848 assert am['x'] == am2['x']
847 assert am['y']['z'] == am2['y']['z']
849 assert am['y']['z'] == am2['y']['z']
848
850
@@ -1,624 +1,624 b''
1 """A ZMQ-based subclass of InteractiveShell.
1 """A ZMQ-based subclass of InteractiveShell.
2
2
3 This code is meant to ease the refactoring of the base InteractiveShell into
3 This code is meant to ease the refactoring of the base InteractiveShell into
4 something with a cleaner architecture for 2-process use, without actually
4 something with a cleaner architecture for 2-process use, without actually
5 breaking InteractiveShell itself. So we're doing something a bit ugly, where
5 breaking InteractiveShell itself. So we're doing something a bit ugly, where
6 we subclass and override what we want to fix. Once this is working well, we
6 we subclass and override what we want to fix. Once this is working well, we
7 can go back to the base class and refactor the code for a cleaner inheritance
7 can go back to the base class and refactor the code for a cleaner inheritance
8 implementation that doesn't rely on so much monkeypatching.
8 implementation that doesn't rely on so much monkeypatching.
9
9
10 But this lets us maintain a fully working IPython as we develop the new
10 But this lets us maintain a fully working IPython as we develop the new
11 machinery. This should thus be thought of as scaffolding.
11 machinery. This should thus be thought of as scaffolding.
12 """
12 """
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Imports
14 # Imports
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 from __future__ import print_function
16 from __future__ import print_function
17
17
18 # Stdlib
18 # Stdlib
19 import os
19 import os
20 import sys
20 import sys
21 import time
21 import time
22
22
23 # System library imports
23 # System library imports
24 from zmq.eventloop import ioloop
24 from zmq.eventloop import ioloop
25
25
26 # Our own
26 # Our own
27 from IPython.core.interactiveshell import (
27 from IPython.core.interactiveshell import (
28 InteractiveShell, InteractiveShellABC
28 InteractiveShell, InteractiveShellABC
29 )
29 )
30 from IPython.core import page
30 from IPython.core import page
31 from IPython.core.autocall import ZMQExitAutocall
31 from IPython.core.autocall import ZMQExitAutocall
32 from IPython.core.displaypub import DisplayPublisher
32 from IPython.core.displaypub import DisplayPublisher
33 from IPython.core.error import UsageError
33 from IPython.core.error import UsageError
34 from IPython.core.magics import MacroToEdit, CodeMagics
34 from IPython.core.magics import MacroToEdit, CodeMagics
35 from IPython.core.magic import magics_class, line_magic, Magics
35 from IPython.core.magic import magics_class, line_magic, Magics
36 from IPython.core.payloadpage import install_payload_page
36 from IPython.core.payloadpage import install_payload_page
37 from IPython.display import display, Javascript
37 from IPython.display import display, Javascript
38 from IPython.kernel.inprocess.socket import SocketABC
38 from IPython.kernel.inprocess.socket import SocketABC
39 from IPython.kernel import (
39 from IPython.kernel import (
40 get_connection_file, get_connection_info, connect_qtconsole
40 get_connection_file, get_connection_info, connect_qtconsole
41 )
41 )
42 from IPython.testing.skipdoctest import skip_doctest
42 from IPython.testing.skipdoctest import skip_doctest
43 from IPython.utils import openpy
43 from IPython.utils import openpy
44 from IPython.utils.jsonutil import json_clean, encode_images
44 from IPython.utils.jsonutil import json_clean, encode_images
45 from IPython.utils.process import arg_split
45 from IPython.utils.process import arg_split
46 from IPython.utils import py3compat
46 from IPython.utils import py3compat
47 from IPython.utils.py3compat import unicode_type
47 from IPython.utils.py3compat import unicode_type
48 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes, Any
48 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes, Any
49 from IPython.utils.warn import error
49 from IPython.utils.warn import error
50 from IPython.kernel.zmq.displayhook import ZMQShellDisplayHook
50 from IPython.kernel.zmq.displayhook import ZMQShellDisplayHook
51 from IPython.kernel.zmq.datapub import ZMQDataPublisher
51 from IPython.kernel.zmq.datapub import ZMQDataPublisher
52 from IPython.kernel.zmq.session import extract_header
52 from IPython.kernel.zmq.session import extract_header
53 from IPython.kernel.comm import CommManager
53 from IPython.kernel.comm import CommManager
54 from .session import Session
54 from .session import Session
55
55
56 #-----------------------------------------------------------------------------
56 #-----------------------------------------------------------------------------
57 # Functions and classes
57 # Functions and classes
58 #-----------------------------------------------------------------------------
58 #-----------------------------------------------------------------------------
59
59
60 class ZMQDisplayPublisher(DisplayPublisher):
60 class ZMQDisplayPublisher(DisplayPublisher):
61 """A display publisher that publishes data using a ZeroMQ PUB socket."""
61 """A display publisher that publishes data using a ZeroMQ PUB socket."""
62
62
63 session = Instance(Session)
63 session = Instance(Session)
64 pub_socket = Instance(SocketABC)
64 pub_socket = Instance(SocketABC)
65 parent_header = Dict({})
65 parent_header = Dict({})
66 topic = CBytes(b'display_data')
66 topic = CBytes(b'display_data')
67
67
68 def set_parent(self, parent):
68 def set_parent(self, parent):
69 """Set the parent for outbound messages."""
69 """Set the parent for outbound messages."""
70 self.parent_header = extract_header(parent)
70 self.parent_header = extract_header(parent)
71
71
72 def _flush_streams(self):
72 def _flush_streams(self):
73 """flush IO Streams prior to display"""
73 """flush IO Streams prior to display"""
74 sys.stdout.flush()
74 sys.stdout.flush()
75 sys.stderr.flush()
75 sys.stderr.flush()
76
76
77 def publish(self, source, data, metadata=None):
77 def publish(self, source, data, metadata=None):
78 self._flush_streams()
78 self._flush_streams()
79 if metadata is None:
79 if metadata is None:
80 metadata = {}
80 metadata = {}
81 self._validate_data(source, data, metadata)
81 self._validate_data(source, data, metadata)
82 content = {}
82 content = {}
83 content['source'] = source
83 content['source'] = source
84 content['data'] = encode_images(data)
84 content['data'] = encode_images(data)
85 content['metadata'] = metadata
85 content['metadata'] = metadata
86 self.session.send(
86 self.session.send(
87 self.pub_socket, u'display_data', json_clean(content),
87 self.pub_socket, u'display_data', json_clean(content),
88 parent=self.parent_header, ident=self.topic,
88 parent=self.parent_header, ident=self.topic,
89 )
89 )
90
90
91 def clear_output(self, wait=False):
91 def clear_output(self, wait=False):
92 content = dict(wait=wait)
92 content = dict(wait=wait)
93
93
94 print('\r', file=sys.stdout, end='')
94 print('\r', file=sys.stdout, end='')
95 print('\r', file=sys.stderr, end='')
95 print('\r', file=sys.stderr, end='')
96 self._flush_streams()
96 self._flush_streams()
97
97
98 self.session.send(
98 self.session.send(
99 self.pub_socket, u'clear_output', content,
99 self.pub_socket, u'clear_output', content,
100 parent=self.parent_header, ident=self.topic,
100 parent=self.parent_header, ident=self.topic,
101 )
101 )
102
102
103 @magics_class
103 @magics_class
104 class KernelMagics(Magics):
104 class KernelMagics(Magics):
105 #------------------------------------------------------------------------
105 #------------------------------------------------------------------------
106 # Magic overrides
106 # Magic overrides
107 #------------------------------------------------------------------------
107 #------------------------------------------------------------------------
108 # Once the base class stops inheriting from magic, this code needs to be
108 # Once the base class stops inheriting from magic, this code needs to be
109 # moved into a separate machinery as well. For now, at least isolate here
109 # moved into a separate machinery as well. For now, at least isolate here
110 # the magics which this class needs to implement differently from the base
110 # the magics which this class needs to implement differently from the base
111 # class, or that are unique to it.
111 # class, or that are unique to it.
112
112
113 @line_magic
113 @line_magic
114 def doctest_mode(self, parameter_s=''):
114 def doctest_mode(self, parameter_s=''):
115 """Toggle doctest mode on and off.
115 """Toggle doctest mode on and off.
116
116
117 This mode is intended to make IPython behave as much as possible like a
117 This mode is intended to make IPython behave as much as possible like a
118 plain Python shell, from the perspective of how its prompts, exceptions
118 plain Python shell, from the perspective of how its prompts, exceptions
119 and output look. This makes it easy to copy and paste parts of a
119 and output look. This makes it easy to copy and paste parts of a
120 session into doctests. It does so by:
120 session into doctests. It does so by:
121
121
122 - Changing the prompts to the classic ``>>>`` ones.
122 - Changing the prompts to the classic ``>>>`` ones.
123 - Changing the exception reporting mode to 'Plain'.
123 - Changing the exception reporting mode to 'Plain'.
124 - Disabling pretty-printing of output.
124 - Disabling pretty-printing of output.
125
125
126 Note that IPython also supports the pasting of code snippets that have
126 Note that IPython also supports the pasting of code snippets that have
127 leading '>>>' and '...' prompts in them. This means that you can paste
127 leading '>>>' and '...' prompts in them. This means that you can paste
128 doctests from files or docstrings (even if they have leading
128 doctests from files or docstrings (even if they have leading
129 whitespace), and the code will execute correctly. You can then use
129 whitespace), and the code will execute correctly. You can then use
130 '%history -t' to see the translated history; this will give you the
130 '%history -t' to see the translated history; this will give you the
131 input after removal of all the leading prompts and whitespace, which
131 input after removal of all the leading prompts and whitespace, which
132 can be pasted back into an editor.
132 can be pasted back into an editor.
133
133
134 With these features, you can switch into this mode easily whenever you
134 With these features, you can switch into this mode easily whenever you
135 need to do testing and changes to doctests, without having to leave
135 need to do testing and changes to doctests, without having to leave
136 your existing IPython session.
136 your existing IPython session.
137 """
137 """
138
138
139 from IPython.utils.ipstruct import Struct
139 from IPython.utils.ipstruct import Struct
140
140
141 # Shorthands
141 # Shorthands
142 shell = self.shell
142 shell = self.shell
143 disp_formatter = self.shell.display_formatter
143 disp_formatter = self.shell.display_formatter
144 ptformatter = disp_formatter.formatters['text/plain']
144 ptformatter = disp_formatter.formatters['text/plain']
145 # dstore is a data store kept in the instance metadata bag to track any
145 # dstore is a data store kept in the instance metadata bag to track any
146 # changes we make, so we can undo them later.
146 # changes we make, so we can undo them later.
147 dstore = shell.meta.setdefault('doctest_mode', Struct())
147 dstore = shell.meta.setdefault('doctest_mode', Struct())
148 save_dstore = dstore.setdefault
148 save_dstore = dstore.setdefault
149
149
150 # save a few values we'll need to recover later
150 # save a few values we'll need to recover later
151 mode = save_dstore('mode', False)
151 mode = save_dstore('mode', False)
152 save_dstore('rc_pprint', ptformatter.pprint)
152 save_dstore('rc_pprint', ptformatter.pprint)
153 save_dstore('rc_active_types',disp_formatter.active_types)
153 save_dstore('rc_active_types',disp_formatter.active_types)
154 save_dstore('xmode', shell.InteractiveTB.mode)
154 save_dstore('xmode', shell.InteractiveTB.mode)
155
155
156 if mode == False:
156 if mode == False:
157 # turn on
157 # turn on
158 ptformatter.pprint = False
158 ptformatter.pprint = False
159 disp_formatter.active_types = ['text/plain']
159 disp_formatter.active_types = ['text/plain']
160 shell.magic('xmode Plain')
160 shell.magic('xmode Plain')
161 else:
161 else:
162 # turn off
162 # turn off
163 ptformatter.pprint = dstore.rc_pprint
163 ptformatter.pprint = dstore.rc_pprint
164 disp_formatter.active_types = dstore.rc_active_types
164 disp_formatter.active_types = dstore.rc_active_types
165 shell.magic("xmode " + dstore.xmode)
165 shell.magic("xmode " + dstore.xmode)
166
166
167 # Store new mode and inform on console
167 # Store new mode and inform on console
168 dstore.mode = bool(1-int(mode))
168 dstore.mode = bool(1-int(mode))
169 mode_label = ['OFF','ON'][dstore.mode]
169 mode_label = ['OFF','ON'][dstore.mode]
170 print('Doctest mode is:', mode_label)
170 print('Doctest mode is:', mode_label)
171
171
172 # Send the payload back so that clients can modify their prompt display
172 # Send the payload back so that clients can modify their prompt display
173 payload = dict(
173 payload = dict(
174 source='doctest_mode',
174 source='doctest_mode',
175 mode=dstore.mode)
175 mode=dstore.mode)
176 shell.payload_manager.write_payload(payload)
176 shell.payload_manager.write_payload(payload)
177
177
178
178
179 _find_edit_target = CodeMagics._find_edit_target
179 _find_edit_target = CodeMagics._find_edit_target
180
180
181 @skip_doctest
181 @skip_doctest
182 @line_magic
182 @line_magic
183 def edit(self, parameter_s='', last_call=['','']):
183 def edit(self, parameter_s='', last_call=['','']):
184 """Bring up an editor and execute the resulting code.
184 """Bring up an editor and execute the resulting code.
185
185
186 Usage:
186 Usage:
187 %edit [options] [args]
187 %edit [options] [args]
188
188
189 %edit runs an external text editor. You will need to set the command for
189 %edit runs an external text editor. You will need to set the command for
190 this editor via the ``TerminalInteractiveShell.editor`` option in your
190 this editor via the ``TerminalInteractiveShell.editor`` option in your
191 configuration file before it will work.
191 configuration file before it will work.
192
192
193 This command allows you to conveniently edit multi-line code right in
193 This command allows you to conveniently edit multi-line code right in
194 your IPython session.
194 your IPython session.
195
195
196 If called without arguments, %edit opens up an empty editor with a
196 If called without arguments, %edit opens up an empty editor with a
197 temporary file and will execute the contents of this file when you
197 temporary file and will execute the contents of this file when you
198 close it (don't forget to save it!).
198 close it (don't forget to save it!).
199
199
200
201 Options:
200 Options:
202
201
203 -n <number>: open the editor at a specified line number. By default,
202 -n <number>
204 the IPython editor hook uses the unix syntax 'editor +N filename', but
203 Open the editor at a specified line number. By default, the IPython
205 you can configure this by providing your own modified hook if your
204 editor hook uses the unix syntax 'editor +N filename', but you can
206 favorite editor supports line-number specifications with a different
205 configure this by providing your own modified hook if your favorite
207 syntax.
206 editor supports line-number specifications with a different syntax.
208
207
209 -p: this will call the editor with the same data as the previous time
208 -p
210 it was used, regardless of how long ago (in your current session) it
209 Call the editor with the same data as the previous time it was used,
211 was.
210 regardless of how long ago (in your current session) it was.
212
211
213 -r: use 'raw' input. This option only applies to input taken from the
212 -r
214 user's history. By default, the 'processed' history is used, so that
213 Use 'raw' input. This option only applies to input taken from the
215 magics are loaded in their transformed version to valid Python. If
214 user's history. By default, the 'processed' history is used, so that
216 this option is given, the raw input as typed as the command line is
215 magics are loaded in their transformed version to valid Python. If
217 used instead. When you exit the editor, it will be executed by
216 this option is given, the raw input as typed as the command line is
218 IPython's own processor.
217 used instead. When you exit the editor, it will be executed by
219
218 IPython's own processor.
220 -x: do not execute the edited code immediately upon exit. This is
219
221 mainly useful if you are editing programs which need to be called with
220 -x
222 command line arguments, which you can then do using %run.
221 Do not execute the edited code immediately upon exit. This is mainly
223
222 useful if you are editing programs which need to be called with
223 command line arguments, which you can then do using %run.
224
224
225 Arguments:
225 Arguments:
226
226
227 If arguments are given, the following possibilites exist:
227 If arguments are given, the following possibilites exist:
228
228
229 - The arguments are numbers or pairs of colon-separated numbers (like
229 - The arguments are numbers or pairs of colon-separated numbers (like
230 1 4:8 9). These are interpreted as lines of previous input to be
230 1 4:8 9). These are interpreted as lines of previous input to be
231 loaded into the editor. The syntax is the same of the %macro command.
231 loaded into the editor. The syntax is the same of the %macro command.
232
232
233 - If the argument doesn't start with a number, it is evaluated as a
233 - If the argument doesn't start with a number, it is evaluated as a
234 variable and its contents loaded into the editor. You can thus edit
234 variable and its contents loaded into the editor. You can thus edit
235 any string which contains python code (including the result of
235 any string which contains python code (including the result of
236 previous edits).
236 previous edits).
237
237
238 - If the argument is the name of an object (other than a string),
238 - If the argument is the name of an object (other than a string),
239 IPython will try to locate the file where it was defined and open the
239 IPython will try to locate the file where it was defined and open the
240 editor at the point where it is defined. You can use `%edit function`
240 editor at the point where it is defined. You can use ``%edit function``
241 to load an editor exactly at the point where 'function' is defined,
241 to load an editor exactly at the point where 'function' is defined,
242 edit it and have the file be executed automatically.
242 edit it and have the file be executed automatically.
243
243
244 If the object is a macro (see %macro for details), this opens up your
244 If the object is a macro (see %macro for details), this opens up your
245 specified editor with a temporary file containing the macro's data.
245 specified editor with a temporary file containing the macro's data.
246 Upon exit, the macro is reloaded with the contents of the file.
246 Upon exit, the macro is reloaded with the contents of the file.
247
247
248 Note: opening at an exact line is only supported under Unix, and some
248 Note: opening at an exact line is only supported under Unix, and some
249 editors (like kedit and gedit up to Gnome 2.8) do not understand the
249 editors (like kedit and gedit up to Gnome 2.8) do not understand the
250 '+NUMBER' parameter necessary for this feature. Good editors like
250 '+NUMBER' parameter necessary for this feature. Good editors like
251 (X)Emacs, vi, jed, pico and joe all do.
251 (X)Emacs, vi, jed, pico and joe all do.
252
252
253 - If the argument is not found as a variable, IPython will look for a
253 - If the argument is not found as a variable, IPython will look for a
254 file with that name (adding .py if necessary) and load it into the
254 file with that name (adding .py if necessary) and load it into the
255 editor. It will execute its contents with execfile() when you exit,
255 editor. It will execute its contents with execfile() when you exit,
256 loading any code in the file into your interactive namespace.
256 loading any code in the file into your interactive namespace.
257
257
258 After executing your code, %edit will return as output the code you
258 After executing your code, %edit will return as output the code you
259 typed in the editor (except when it was an existing file). This way
259 typed in the editor (except when it was an existing file). This way
260 you can reload the code in further invocations of %edit as a variable,
260 you can reload the code in further invocations of %edit as a variable,
261 via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
261 via ``_<NUMBER>` or ``Out[<NUMBER>]``, where <NUMBER> is the prompt number of
262 the output.
262 the output.
263
263
264 Note that %edit is also available through the alias %ed.
264 Note that %edit is also available through the alias %ed.
265
265
266 This is an example of creating a simple function inside the editor and
266 This is an example of creating a simple function inside the editor and
267 then modifying it. First, start up the editor:
267 then modifying it. First, start up the editor::
268
268
269 In [1]: ed
269 In [1]: ed
270 Editing... done. Executing edited code...
270 Editing... done. Executing edited code...
271 Out[1]: 'def foo():n print "foo() was defined in an editing session"n'
271 Out[1]: 'def foo():n print "foo() was defined in an editing session"n'
272
272
273 We can then call the function foo():
273 We can then call the function foo()::
274
274
275 In [2]: foo()
275 In [2]: foo()
276 foo() was defined in an editing session
276 foo() was defined in an editing session
277
277
278 Now we edit foo. IPython automatically loads the editor with the
278 Now we edit foo. IPython automatically loads the editor with the
279 (temporary) file where foo() was previously defined:
279 (temporary) file where foo() was previously defined::
280
280
281 In [3]: ed foo
281 In [3]: ed foo
282 Editing... done. Executing edited code...
282 Editing... done. Executing edited code...
283
283
284 And if we call foo() again we get the modified version:
284 And if we call foo() again we get the modified version::
285
285
286 In [4]: foo()
286 In [4]: foo()
287 foo() has now been changed!
287 foo() has now been changed!
288
288
289 Here is an example of how to edit a code snippet successive
289 Here is an example of how to edit a code snippet successive
290 times. First we call the editor:
290 times. First we call the editor::
291
291
292 In [5]: ed
292 In [5]: ed
293 Editing... done. Executing edited code...
293 Editing... done. Executing edited code...
294 hello
294 hello
295 Out[5]: "print 'hello'n"
295 Out[5]: "print 'hello'n"
296
296
297 Now we call it again with the previous output (stored in _):
297 Now we call it again with the previous output (stored in _)::
298
298
299 In [6]: ed _
299 In [6]: ed _
300 Editing... done. Executing edited code...
300 Editing... done. Executing edited code...
301 hello world
301 hello world
302 Out[6]: "print 'hello world'n"
302 Out[6]: "print 'hello world'n"
303
303
304 Now we call it with the output #8 (stored in _8, also as Out[8]):
304 Now we call it with the output #8 (stored in ``_8``, also as Out[8])::
305
305
306 In [7]: ed _8
306 In [7]: ed _8
307 Editing... done. Executing edited code...
307 Editing... done. Executing edited code...
308 hello again
308 hello again
309 Out[7]: "print 'hello again'n"
309 Out[7]: "print 'hello again'n"
310 """
310 """
311
311
312 opts,args = self.parse_options(parameter_s,'prn:')
312 opts,args = self.parse_options(parameter_s,'prn:')
313
313
314 try:
314 try:
315 filename, lineno, _ = CodeMagics._find_edit_target(self.shell, args, opts, last_call)
315 filename, lineno, _ = CodeMagics._find_edit_target(self.shell, args, opts, last_call)
316 except MacroToEdit as e:
316 except MacroToEdit as e:
317 # TODO: Implement macro editing over 2 processes.
317 # TODO: Implement macro editing over 2 processes.
318 print("Macro editing not yet implemented in 2-process model.")
318 print("Macro editing not yet implemented in 2-process model.")
319 return
319 return
320
320
321 # Make sure we send to the client an absolute path, in case the working
321 # Make sure we send to the client an absolute path, in case the working
322 # directory of client and kernel don't match
322 # directory of client and kernel don't match
323 filename = os.path.abspath(filename)
323 filename = os.path.abspath(filename)
324
324
325 payload = {
325 payload = {
326 'source' : 'edit_magic',
326 'source' : 'edit_magic',
327 'filename' : filename,
327 'filename' : filename,
328 'line_number' : lineno
328 'line_number' : lineno
329 }
329 }
330 self.shell.payload_manager.write_payload(payload)
330 self.shell.payload_manager.write_payload(payload)
331
331
332 # A few magics that are adapted to the specifics of using pexpect and a
332 # A few magics that are adapted to the specifics of using pexpect and a
333 # remote terminal
333 # remote terminal
334
334
335 @line_magic
335 @line_magic
336 def clear(self, arg_s):
336 def clear(self, arg_s):
337 """Clear the terminal."""
337 """Clear the terminal."""
338 if os.name == 'posix':
338 if os.name == 'posix':
339 self.shell.system("clear")
339 self.shell.system("clear")
340 else:
340 else:
341 self.shell.system("cls")
341 self.shell.system("cls")
342
342
343 if os.name == 'nt':
343 if os.name == 'nt':
344 # This is the usual name in windows
344 # This is the usual name in windows
345 cls = line_magic('cls')(clear)
345 cls = line_magic('cls')(clear)
346
346
347 # Terminal pagers won't work over pexpect, but we do have our own pager
347 # Terminal pagers won't work over pexpect, but we do have our own pager
348
348
349 @line_magic
349 @line_magic
350 def less(self, arg_s):
350 def less(self, arg_s):
351 """Show a file through the pager.
351 """Show a file through the pager.
352
352
353 Files ending in .py are syntax-highlighted."""
353 Files ending in .py are syntax-highlighted."""
354 if not arg_s:
354 if not arg_s:
355 raise UsageError('Missing filename.')
355 raise UsageError('Missing filename.')
356
356
357 cont = open(arg_s).read()
357 cont = open(arg_s).read()
358 if arg_s.endswith('.py'):
358 if arg_s.endswith('.py'):
359 cont = self.shell.pycolorize(openpy.read_py_file(arg_s, skip_encoding_cookie=False))
359 cont = self.shell.pycolorize(openpy.read_py_file(arg_s, skip_encoding_cookie=False))
360 else:
360 else:
361 cont = open(arg_s).read()
361 cont = open(arg_s).read()
362 page.page(cont)
362 page.page(cont)
363
363
364 more = line_magic('more')(less)
364 more = line_magic('more')(less)
365
365
366 # Man calls a pager, so we also need to redefine it
366 # Man calls a pager, so we also need to redefine it
367 if os.name == 'posix':
367 if os.name == 'posix':
368 @line_magic
368 @line_magic
369 def man(self, arg_s):
369 def man(self, arg_s):
370 """Find the man page for the given command and display in pager."""
370 """Find the man page for the given command and display in pager."""
371 page.page(self.shell.getoutput('man %s | col -b' % arg_s,
371 page.page(self.shell.getoutput('man %s | col -b' % arg_s,
372 split=False))
372 split=False))
373
373
374 @line_magic
374 @line_magic
375 def connect_info(self, arg_s):
375 def connect_info(self, arg_s):
376 """Print information for connecting other clients to this kernel
376 """Print information for connecting other clients to this kernel
377
377
378 It will print the contents of this session's connection file, as well as
378 It will print the contents of this session's connection file, as well as
379 shortcuts for local clients.
379 shortcuts for local clients.
380
380
381 In the simplest case, when called from the most recently launched kernel,
381 In the simplest case, when called from the most recently launched kernel,
382 secondary clients can be connected, simply with:
382 secondary clients can be connected, simply with:
383
383
384 $> ipython <app> --existing
384 $> ipython <app> --existing
385
385
386 """
386 """
387
387
388 from IPython.core.application import BaseIPythonApplication as BaseIPApp
388 from IPython.core.application import BaseIPythonApplication as BaseIPApp
389
389
390 if BaseIPApp.initialized():
390 if BaseIPApp.initialized():
391 app = BaseIPApp.instance()
391 app = BaseIPApp.instance()
392 security_dir = app.profile_dir.security_dir
392 security_dir = app.profile_dir.security_dir
393 profile = app.profile
393 profile = app.profile
394 else:
394 else:
395 profile = 'default'
395 profile = 'default'
396 security_dir = ''
396 security_dir = ''
397
397
398 try:
398 try:
399 connection_file = get_connection_file()
399 connection_file = get_connection_file()
400 info = get_connection_info(unpack=False)
400 info = get_connection_info(unpack=False)
401 except Exception as e:
401 except Exception as e:
402 error("Could not get connection info: %r" % e)
402 error("Could not get connection info: %r" % e)
403 return
403 return
404
404
405 # add profile flag for non-default profile
405 # add profile flag for non-default profile
406 profile_flag = "--profile %s" % profile if profile != 'default' else ""
406 profile_flag = "--profile %s" % profile if profile != 'default' else ""
407
407
408 # if it's in the security dir, truncate to basename
408 # if it's in the security dir, truncate to basename
409 if security_dir == os.path.dirname(connection_file):
409 if security_dir == os.path.dirname(connection_file):
410 connection_file = os.path.basename(connection_file)
410 connection_file = os.path.basename(connection_file)
411
411
412
412
413 print (info + '\n')
413 print (info + '\n')
414 print ("Paste the above JSON into a file, and connect with:\n"
414 print ("Paste the above JSON into a file, and connect with:\n"
415 " $> ipython <app> --existing <file>\n"
415 " $> ipython <app> --existing <file>\n"
416 "or, if you are local, you can connect with just:\n"
416 "or, if you are local, you can connect with just:\n"
417 " $> ipython <app> --existing {0} {1}\n"
417 " $> ipython <app> --existing {0} {1}\n"
418 "or even just:\n"
418 "or even just:\n"
419 " $> ipython <app> --existing {1}\n"
419 " $> ipython <app> --existing {1}\n"
420 "if this is the most recent IPython session you have started.".format(
420 "if this is the most recent IPython session you have started.".format(
421 connection_file, profile_flag
421 connection_file, profile_flag
422 )
422 )
423 )
423 )
424
424
425 @line_magic
425 @line_magic
426 def qtconsole(self, arg_s):
426 def qtconsole(self, arg_s):
427 """Open a qtconsole connected to this kernel.
427 """Open a qtconsole connected to this kernel.
428
428
429 Useful for connecting a qtconsole to running notebooks, for better
429 Useful for connecting a qtconsole to running notebooks, for better
430 debugging.
430 debugging.
431 """
431 """
432
432
433 # %qtconsole should imply bind_kernel for engines:
433 # %qtconsole should imply bind_kernel for engines:
434 try:
434 try:
435 from IPython.parallel import bind_kernel
435 from IPython.parallel import bind_kernel
436 except ImportError:
436 except ImportError:
437 # technically possible, because parallel has higher pyzmq min-version
437 # technically possible, because parallel has higher pyzmq min-version
438 pass
438 pass
439 else:
439 else:
440 bind_kernel()
440 bind_kernel()
441
441
442 try:
442 try:
443 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
443 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
444 except Exception as e:
444 except Exception as e:
445 error("Could not start qtconsole: %r" % e)
445 error("Could not start qtconsole: %r" % e)
446 return
446 return
447
447
448 @line_magic
448 @line_magic
449 def autosave(self, arg_s):
449 def autosave(self, arg_s):
450 """Set the autosave interval in the notebook (in seconds).
450 """Set the autosave interval in the notebook (in seconds).
451
451
452 The default value is 120, or two minutes.
452 The default value is 120, or two minutes.
453 ``%autosave 0`` will disable autosave.
453 ``%autosave 0`` will disable autosave.
454
454
455 This magic only has an effect when called from the notebook interface.
455 This magic only has an effect when called from the notebook interface.
456 It has no effect when called in a startup file.
456 It has no effect when called in a startup file.
457 """
457 """
458
458
459 try:
459 try:
460 interval = int(arg_s)
460 interval = int(arg_s)
461 except ValueError:
461 except ValueError:
462 raise UsageError("%%autosave requires an integer, got %r" % arg_s)
462 raise UsageError("%%autosave requires an integer, got %r" % arg_s)
463
463
464 # javascript wants milliseconds
464 # javascript wants milliseconds
465 milliseconds = 1000 * interval
465 milliseconds = 1000 * interval
466 display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds),
466 display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds),
467 include=['application/javascript']
467 include=['application/javascript']
468 )
468 )
469 if interval:
469 if interval:
470 print("Autosaving every %i seconds" % interval)
470 print("Autosaving every %i seconds" % interval)
471 else:
471 else:
472 print("Autosave disabled")
472 print("Autosave disabled")
473
473
474
474
475 class ZMQInteractiveShell(InteractiveShell):
475 class ZMQInteractiveShell(InteractiveShell):
476 """A subclass of InteractiveShell for ZMQ."""
476 """A subclass of InteractiveShell for ZMQ."""
477
477
478 displayhook_class = Type(ZMQShellDisplayHook)
478 displayhook_class = Type(ZMQShellDisplayHook)
479 display_pub_class = Type(ZMQDisplayPublisher)
479 display_pub_class = Type(ZMQDisplayPublisher)
480 data_pub_class = Type(ZMQDataPublisher)
480 data_pub_class = Type(ZMQDataPublisher)
481 kernel = Any()
481 kernel = Any()
482 parent_header = Any()
482 parent_header = Any()
483
483
484 # Override the traitlet in the parent class, because there's no point using
484 # Override the traitlet in the parent class, because there's no point using
485 # readline for the kernel. Can be removed when the readline code is moved
485 # readline for the kernel. Can be removed when the readline code is moved
486 # to the terminal frontend.
486 # to the terminal frontend.
487 colors_force = CBool(True)
487 colors_force = CBool(True)
488 readline_use = CBool(False)
488 readline_use = CBool(False)
489 # autoindent has no meaning in a zmqshell, and attempting to enable it
489 # autoindent has no meaning in a zmqshell, and attempting to enable it
490 # will print a warning in the absence of readline.
490 # will print a warning in the absence of readline.
491 autoindent = CBool(False)
491 autoindent = CBool(False)
492
492
493 exiter = Instance(ZMQExitAutocall)
493 exiter = Instance(ZMQExitAutocall)
494 def _exiter_default(self):
494 def _exiter_default(self):
495 return ZMQExitAutocall(self)
495 return ZMQExitAutocall(self)
496
496
497 def _exit_now_changed(self, name, old, new):
497 def _exit_now_changed(self, name, old, new):
498 """stop eventloop when exit_now fires"""
498 """stop eventloop when exit_now fires"""
499 if new:
499 if new:
500 loop = ioloop.IOLoop.instance()
500 loop = ioloop.IOLoop.instance()
501 loop.add_timeout(time.time()+0.1, loop.stop)
501 loop.add_timeout(time.time()+0.1, loop.stop)
502
502
503 keepkernel_on_exit = None
503 keepkernel_on_exit = None
504
504
505 # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no
505 # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no
506 # interactive input being read; we provide event loop support in ipkernel
506 # interactive input being read; we provide event loop support in ipkernel
507 @staticmethod
507 @staticmethod
508 def enable_gui(gui):
508 def enable_gui(gui):
509 from .eventloops import enable_gui as real_enable_gui
509 from .eventloops import enable_gui as real_enable_gui
510 try:
510 try:
511 real_enable_gui(gui)
511 real_enable_gui(gui)
512 except ValueError as e:
512 except ValueError as e:
513 raise UsageError("%s" % e)
513 raise UsageError("%s" % e)
514
514
515 def init_environment(self):
515 def init_environment(self):
516 """Configure the user's environment.
516 """Configure the user's environment.
517
517
518 """
518 """
519 env = os.environ
519 env = os.environ
520 # These two ensure 'ls' produces nice coloring on BSD-derived systems
520 # These two ensure 'ls' produces nice coloring on BSD-derived systems
521 env['TERM'] = 'xterm-color'
521 env['TERM'] = 'xterm-color'
522 env['CLICOLOR'] = '1'
522 env['CLICOLOR'] = '1'
523 # Since normal pagers don't work at all (over pexpect we don't have
523 # Since normal pagers don't work at all (over pexpect we don't have
524 # single-key control of the subprocess), try to disable paging in
524 # single-key control of the subprocess), try to disable paging in
525 # subprocesses as much as possible.
525 # subprocesses as much as possible.
526 env['PAGER'] = 'cat'
526 env['PAGER'] = 'cat'
527 env['GIT_PAGER'] = 'cat'
527 env['GIT_PAGER'] = 'cat'
528
528
529 # And install the payload version of page.
529 # And install the payload version of page.
530 install_payload_page()
530 install_payload_page()
531
531
532 def auto_rewrite_input(self, cmd):
532 def auto_rewrite_input(self, cmd):
533 """Called to show the auto-rewritten input for autocall and friends.
533 """Called to show the auto-rewritten input for autocall and friends.
534
534
535 FIXME: this payload is currently not correctly processed by the
535 FIXME: this payload is currently not correctly processed by the
536 frontend.
536 frontend.
537 """
537 """
538 new = self.prompt_manager.render('rewrite') + cmd
538 new = self.prompt_manager.render('rewrite') + cmd
539 payload = dict(
539 payload = dict(
540 source='auto_rewrite_input',
540 source='auto_rewrite_input',
541 transformed_input=new,
541 transformed_input=new,
542 )
542 )
543 self.payload_manager.write_payload(payload)
543 self.payload_manager.write_payload(payload)
544
544
545 def ask_exit(self):
545 def ask_exit(self):
546 """Engage the exit actions."""
546 """Engage the exit actions."""
547 self.exit_now = True
547 self.exit_now = True
548 payload = dict(
548 payload = dict(
549 source='ask_exit',
549 source='ask_exit',
550 exit=True,
550 exit=True,
551 keepkernel=self.keepkernel_on_exit,
551 keepkernel=self.keepkernel_on_exit,
552 )
552 )
553 self.payload_manager.write_payload(payload)
553 self.payload_manager.write_payload(payload)
554
554
555 def _showtraceback(self, etype, evalue, stb):
555 def _showtraceback(self, etype, evalue, stb):
556
556
557 exc_content = {
557 exc_content = {
558 u'traceback' : stb,
558 u'traceback' : stb,
559 u'ename' : unicode_type(etype.__name__),
559 u'ename' : unicode_type(etype.__name__),
560 u'evalue' : py3compat.safe_unicode(evalue),
560 u'evalue' : py3compat.safe_unicode(evalue),
561 }
561 }
562
562
563 dh = self.displayhook
563 dh = self.displayhook
564 # Send exception info over pub socket for other clients than the caller
564 # Send exception info over pub socket for other clients than the caller
565 # to pick up
565 # to pick up
566 topic = None
566 topic = None
567 if dh.topic:
567 if dh.topic:
568 topic = dh.topic.replace(b'pyout', b'pyerr')
568 topic = dh.topic.replace(b'pyout', b'pyerr')
569
569
570 exc_msg = dh.session.send(dh.pub_socket, u'pyerr', json_clean(exc_content), dh.parent_header, ident=topic)
570 exc_msg = dh.session.send(dh.pub_socket, u'pyerr', json_clean(exc_content), dh.parent_header, ident=topic)
571
571
572 # FIXME - Hack: store exception info in shell object. Right now, the
572 # FIXME - Hack: store exception info in shell object. Right now, the
573 # caller is reading this info after the fact, we need to fix this logic
573 # caller is reading this info after the fact, we need to fix this logic
574 # to remove this hack. Even uglier, we need to store the error status
574 # to remove this hack. Even uglier, we need to store the error status
575 # here, because in the main loop, the logic that sets it is being
575 # here, because in the main loop, the logic that sets it is being
576 # skipped because runlines swallows the exceptions.
576 # skipped because runlines swallows the exceptions.
577 exc_content[u'status'] = u'error'
577 exc_content[u'status'] = u'error'
578 self._reply_content = exc_content
578 self._reply_content = exc_content
579 # /FIXME
579 # /FIXME
580
580
581 return exc_content
581 return exc_content
582
582
583 def set_next_input(self, text):
583 def set_next_input(self, text):
584 """Send the specified text to the frontend to be presented at the next
584 """Send the specified text to the frontend to be presented at the next
585 input cell."""
585 input cell."""
586 payload = dict(
586 payload = dict(
587 source='set_next_input',
587 source='set_next_input',
588 text=text
588 text=text
589 )
589 )
590 self.payload_manager.write_payload(payload)
590 self.payload_manager.write_payload(payload)
591
591
592 def set_parent(self, parent):
592 def set_parent(self, parent):
593 """Set the parent header for associating output with its triggering input"""
593 """Set the parent header for associating output with its triggering input"""
594 self.parent_header = parent
594 self.parent_header = parent
595 self.displayhook.set_parent(parent)
595 self.displayhook.set_parent(parent)
596 self.display_pub.set_parent(parent)
596 self.display_pub.set_parent(parent)
597 self.data_pub.set_parent(parent)
597 self.data_pub.set_parent(parent)
598 try:
598 try:
599 sys.stdout.set_parent(parent)
599 sys.stdout.set_parent(parent)
600 except AttributeError:
600 except AttributeError:
601 pass
601 pass
602 try:
602 try:
603 sys.stderr.set_parent(parent)
603 sys.stderr.set_parent(parent)
604 except AttributeError:
604 except AttributeError:
605 pass
605 pass
606
606
607 def get_parent(self):
607 def get_parent(self):
608 return self.parent_header
608 return self.parent_header
609
609
610 #-------------------------------------------------------------------------
610 #-------------------------------------------------------------------------
611 # Things related to magics
611 # Things related to magics
612 #-------------------------------------------------------------------------
612 #-------------------------------------------------------------------------
613
613
614 def init_magics(self):
614 def init_magics(self):
615 super(ZMQInteractiveShell, self).init_magics()
615 super(ZMQInteractiveShell, self).init_magics()
616 self.register_magics(KernelMagics)
616 self.register_magics(KernelMagics)
617 self.magics_manager.register_alias('ed', 'edit')
617 self.magics_manager.register_alias('ed', 'edit')
618
618
619 def init_comms(self):
619 def init_comms(self):
620 self.comm_manager = CommManager(shell=self, parent=self)
620 self.comm_manager = CommManager(shell=self, parent=self)
621 self.configurables.append(self.comm_manager)
621 self.configurables.append(self.comm_manager)
622
622
623
623
624 InteractiveShellABC.register(ZMQInteractiveShell)
624 InteractiveShellABC.register(ZMQInteractiveShell)
@@ -1,486 +1,491 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """Manage background (threaded) jobs conveniently from an interactive shell.
2 """Manage background (threaded) jobs conveniently from an interactive shell.
3
3
4 This module provides a BackgroundJobManager class. This is the main class
4 This module provides a BackgroundJobManager class. This is the main class
5 meant for public usage, it implements an object which can create and manage
5 meant for public usage, it implements an object which can create and manage
6 new background jobs.
6 new background jobs.
7
7
8 It also provides the actual job classes managed by these BackgroundJobManager
8 It also provides the actual job classes managed by these BackgroundJobManager
9 objects, see their docstrings below.
9 objects, see their docstrings below.
10
10
11
11
12 This system was inspired by discussions with B. Granger and the
12 This system was inspired by discussions with B. Granger and the
13 BackgroundCommand class described in the book Python Scripting for
13 BackgroundCommand class described in the book Python Scripting for
14 Computational Science, by H. P. Langtangen:
14 Computational Science, by H. P. Langtangen:
15
15
16 http://folk.uio.no/hpl/scripting
16 http://folk.uio.no/hpl/scripting
17
17
18 (although ultimately no code from this text was used, as IPython's system is a
18 (although ultimately no code from this text was used, as IPython's system is a
19 separate implementation).
19 separate implementation).
20
20
21 An example notebook is provided in our documentation illustrating interactive
21 An example notebook is provided in our documentation illustrating interactive
22 use of the system.
22 use of the system.
23 """
23 """
24 from __future__ import print_function
24 from __future__ import print_function
25
25
26 #*****************************************************************************
26 #*****************************************************************************
27 # Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu>
27 # Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu>
28 #
28 #
29 # Distributed under the terms of the BSD License. The full license is in
29 # Distributed under the terms of the BSD License. The full license is in
30 # the file COPYING, distributed as part of this software.
30 # the file COPYING, distributed as part of this software.
31 #*****************************************************************************
31 #*****************************************************************************
32
32
33 # Code begins
33 # Code begins
34 import sys
34 import sys
35 import threading
35 import threading
36
36
37 from IPython import get_ipython
37 from IPython import get_ipython
38 from IPython.core.ultratb import AutoFormattedTB
38 from IPython.core.ultratb import AutoFormattedTB
39 from IPython.utils.warn import error
39 from IPython.utils.warn import error
40 from IPython.utils.py3compat import string_types
40 from IPython.utils.py3compat import string_types
41
41
42
42
43 class BackgroundJobManager(object):
43 class BackgroundJobManager(object):
44 """Class to manage a pool of backgrounded threaded jobs.
44 """Class to manage a pool of backgrounded threaded jobs.
45
45
46 Below, we assume that 'jobs' is a BackgroundJobManager instance.
46 Below, we assume that 'jobs' is a BackgroundJobManager instance.
47
47
48 Usage summary (see the method docstrings for details):
48 Usage summary (see the method docstrings for details):
49
49
50 jobs.new(...) -> start a new job
50 jobs.new(...) -> start a new job
51
51
52 jobs() or jobs.status() -> print status summary of all jobs
52 jobs() or jobs.status() -> print status summary of all jobs
53
53
54 jobs[N] -> returns job number N.
54 jobs[N] -> returns job number N.
55
55
56 foo = jobs[N].result -> assign to variable foo the result of job N
56 foo = jobs[N].result -> assign to variable foo the result of job N
57
57
58 jobs[N].traceback() -> print the traceback of dead job N
58 jobs[N].traceback() -> print the traceback of dead job N
59
59
60 jobs.remove(N) -> remove (finished) job N
60 jobs.remove(N) -> remove (finished) job N
61
61
62 jobs.flush() -> remove all finished jobs
62 jobs.flush() -> remove all finished jobs
63
63
64 As a convenience feature, BackgroundJobManager instances provide the
64 As a convenience feature, BackgroundJobManager instances provide the
65 utility result and traceback methods which retrieve the corresponding
65 utility result and traceback methods which retrieve the corresponding
66 information from the jobs list:
66 information from the jobs list:
67
67
68 jobs.result(N) <--> jobs[N].result
68 jobs.result(N) <--> jobs[N].result
69 jobs.traceback(N) <--> jobs[N].traceback()
69 jobs.traceback(N) <--> jobs[N].traceback()
70
70
71 While this appears minor, it allows you to use tab completion
71 While this appears minor, it allows you to use tab completion
72 interactively on the job manager instance.
72 interactively on the job manager instance.
73 """
73 """
74
74
75 def __init__(self):
75 def __init__(self):
76 # Lists for job management, accessed via a property to ensure they're
76 # Lists for job management, accessed via a property to ensure they're
77 # up to date.x
77 # up to date.x
78 self._running = []
78 self._running = []
79 self._completed = []
79 self._completed = []
80 self._dead = []
80 self._dead = []
81 # A dict of all jobs, so users can easily access any of them
81 # A dict of all jobs, so users can easily access any of them
82 self.all = {}
82 self.all = {}
83 # For reporting
83 # For reporting
84 self._comp_report = []
84 self._comp_report = []
85 self._dead_report = []
85 self._dead_report = []
86 # Store status codes locally for fast lookups
86 # Store status codes locally for fast lookups
87 self._s_created = BackgroundJobBase.stat_created_c
87 self._s_created = BackgroundJobBase.stat_created_c
88 self._s_running = BackgroundJobBase.stat_running_c
88 self._s_running = BackgroundJobBase.stat_running_c
89 self._s_completed = BackgroundJobBase.stat_completed_c
89 self._s_completed = BackgroundJobBase.stat_completed_c
90 self._s_dead = BackgroundJobBase.stat_dead_c
90 self._s_dead = BackgroundJobBase.stat_dead_c
91
91
92 @property
92 @property
93 def running(self):
93 def running(self):
94 self._update_status()
94 self._update_status()
95 return self._running
95 return self._running
96
96
97 @property
97 @property
98 def dead(self):
98 def dead(self):
99 self._update_status()
99 self._update_status()
100 return self._dead
100 return self._dead
101
101
102 @property
102 @property
103 def completed(self):
103 def completed(self):
104 self._update_status()
104 self._update_status()
105 return self._completed
105 return self._completed
106
106
107 def new(self, func_or_exp, *args, **kwargs):
107 def new(self, func_or_exp, *args, **kwargs):
108 """Add a new background job and start it in a separate thread.
108 """Add a new background job and start it in a separate thread.
109
109
110 There are two types of jobs which can be created:
110 There are two types of jobs which can be created:
111
111
112 1. Jobs based on expressions which can be passed to an eval() call.
112 1. Jobs based on expressions which can be passed to an eval() call.
113 The expression must be given as a string. For example:
113 The expression must be given as a string. For example:
114
114
115 job_manager.new('myfunc(x,y,z=1)'[,glob[,loc]])
115 job_manager.new('myfunc(x,y,z=1)'[,glob[,loc]])
116
116
117 The given expression is passed to eval(), along with the optional
117 The given expression is passed to eval(), along with the optional
118 global/local dicts provided. If no dicts are given, they are
118 global/local dicts provided. If no dicts are given, they are
119 extracted automatically from the caller's frame.
119 extracted automatically from the caller's frame.
120
120
121 A Python statement is NOT a valid eval() expression. Basically, you
121 A Python statement is NOT a valid eval() expression. Basically, you
122 can only use as an eval() argument something which can go on the right
122 can only use as an eval() argument something which can go on the right
123 of an '=' sign and be assigned to a variable.
123 of an '=' sign and be assigned to a variable.
124
124
125 For example,"print 'hello'" is not valid, but '2+3' is.
125 For example,"print 'hello'" is not valid, but '2+3' is.
126
126
127 2. Jobs given a function object, optionally passing additional
127 2. Jobs given a function object, optionally passing additional
128 positional arguments:
128 positional arguments:
129
129
130 job_manager.new(myfunc, x, y)
130 job_manager.new(myfunc, x, y)
131
131
132 The function is called with the given arguments.
132 The function is called with the given arguments.
133
133
134 If you need to pass keyword arguments to your function, you must
134 If you need to pass keyword arguments to your function, you must
135 supply them as a dict named kw:
135 supply them as a dict named kw:
136
136
137 job_manager.new(myfunc, x, y, kw=dict(z=1))
137 job_manager.new(myfunc, x, y, kw=dict(z=1))
138
138
139 The reason for this assymmetry is that the new() method needs to
139 The reason for this assymmetry is that the new() method needs to
140 maintain access to its own keywords, and this prevents name collisions
140 maintain access to its own keywords, and this prevents name collisions
141 between arguments to new() and arguments to your own functions.
141 between arguments to new() and arguments to your own functions.
142
142
143 In both cases, the result is stored in the job.result field of the
143 In both cases, the result is stored in the job.result field of the
144 background job object.
144 background job object.
145
145
146 You can set `daemon` attribute of the thread by giving the keyword
146 You can set `daemon` attribute of the thread by giving the keyword
147 argument `daemon`.
147 argument `daemon`.
148
148
149 Notes and caveats:
149 Notes and caveats:
150
150
151 1. All threads running share the same standard output. Thus, if your
151 1. All threads running share the same standard output. Thus, if your
152 background jobs generate output, it will come out on top of whatever
152 background jobs generate output, it will come out on top of whatever
153 you are currently writing. For this reason, background jobs are best
153 you are currently writing. For this reason, background jobs are best
154 used with silent functions which simply return their output.
154 used with silent functions which simply return their output.
155
155
156 2. Threads also all work within the same global namespace, and this
156 2. Threads also all work within the same global namespace, and this
157 system does not lock interactive variables. So if you send job to the
157 system does not lock interactive variables. So if you send job to the
158 background which operates on a mutable object for a long time, and
158 background which operates on a mutable object for a long time, and
159 start modifying that same mutable object interactively (or in another
159 start modifying that same mutable object interactively (or in another
160 backgrounded job), all sorts of bizarre behaviour will occur.
160 backgrounded job), all sorts of bizarre behaviour will occur.
161
161
162 3. If a background job is spending a lot of time inside a C extension
162 3. If a background job is spending a lot of time inside a C extension
163 module which does not release the Python Global Interpreter Lock
163 module which does not release the Python Global Interpreter Lock
164 (GIL), this will block the IPython prompt. This is simply because the
164 (GIL), this will block the IPython prompt. This is simply because the
165 Python interpreter can only switch between threads at Python
165 Python interpreter can only switch between threads at Python
166 bytecodes. While the execution is inside C code, the interpreter must
166 bytecodes. While the execution is inside C code, the interpreter must
167 simply wait unless the extension module releases the GIL.
167 simply wait unless the extension module releases the GIL.
168
168
169 4. There is no way, due to limitations in the Python threads library,
169 4. There is no way, due to limitations in the Python threads library,
170 to kill a thread once it has started."""
170 to kill a thread once it has started."""
171
171
172 if callable(func_or_exp):
172 if callable(func_or_exp):
173 kw = kwargs.get('kw',{})
173 kw = kwargs.get('kw',{})
174 job = BackgroundJobFunc(func_or_exp,*args,**kw)
174 job = BackgroundJobFunc(func_or_exp,*args,**kw)
175 elif isinstance(func_or_exp, string_types):
175 elif isinstance(func_or_exp, string_types):
176 if not args:
176 if not args:
177 frame = sys._getframe(1)
177 frame = sys._getframe(1)
178 glob, loc = frame.f_globals, frame.f_locals
178 glob, loc = frame.f_globals, frame.f_locals
179 elif len(args)==1:
179 elif len(args)==1:
180 glob = loc = args[0]
180 glob = loc = args[0]
181 elif len(args)==2:
181 elif len(args)==2:
182 glob,loc = args
182 glob,loc = args
183 else:
183 else:
184 raise ValueError(
184 raise ValueError(
185 'Expression jobs take at most 2 args (globals,locals)')
185 'Expression jobs take at most 2 args (globals,locals)')
186 job = BackgroundJobExpr(func_or_exp, glob, loc)
186 job = BackgroundJobExpr(func_or_exp, glob, loc)
187 else:
187 else:
188 raise TypeError('invalid args for new job')
188 raise TypeError('invalid args for new job')
189
189
190 if kwargs.get('daemon', False):
190 if kwargs.get('daemon', False):
191 job.daemon = True
191 job.daemon = True
192 job.num = len(self.all)+1 if self.all else 0
192 job.num = len(self.all)+1 if self.all else 0
193 self.running.append(job)
193 self.running.append(job)
194 self.all[job.num] = job
194 self.all[job.num] = job
195 print('Starting job # %s in a separate thread.' % job.num)
195 print('Starting job # %s in a separate thread.' % job.num)
196 job.start()
196 job.start()
197 return job
197 return job
198
198
199 def __getitem__(self, job_key):
199 def __getitem__(self, job_key):
200 num = job_key if isinstance(job_key, int) else job_key.num
200 num = job_key if isinstance(job_key, int) else job_key.num
201 return self.all[num]
201 return self.all[num]
202
202
203 def __call__(self):
203 def __call__(self):
204 """An alias to self.status(),
204 """An alias to self.status(),
205
205
206 This allows you to simply call a job manager instance much like the
206 This allows you to simply call a job manager instance much like the
207 Unix `jobs` shell command."""
207 Unix `jobs` shell command."""
208
208
209 return self.status()
209 return self.status()
210
210
211 def _update_status(self):
211 def _update_status(self):
212 """Update the status of the job lists.
212 """Update the status of the job lists.
213
213
214 This method moves finished jobs to one of two lists:
214 This method moves finished jobs to one of two lists:
215 - self.completed: jobs which completed successfully
215 - self.completed: jobs which completed successfully
216 - self.dead: jobs which finished but died.
216 - self.dead: jobs which finished but died.
217
217
218 It also copies those jobs to corresponding _report lists. These lists
218 It also copies those jobs to corresponding _report lists. These lists
219 are used to report jobs completed/dead since the last update, and are
219 are used to report jobs completed/dead since the last update, and are
220 then cleared by the reporting function after each call."""
220 then cleared by the reporting function after each call."""
221
221
222 # Status codes
222 # Status codes
223 srun, scomp, sdead = self._s_running, self._s_completed, self._s_dead
223 srun, scomp, sdead = self._s_running, self._s_completed, self._s_dead
224 # State lists, use the actual lists b/c the public names are properties
224 # State lists, use the actual lists b/c the public names are properties
225 # that call this very function on access
225 # that call this very function on access
226 running, completed, dead = self._running, self._completed, self._dead
226 running, completed, dead = self._running, self._completed, self._dead
227
227
228 # Now, update all state lists
228 # Now, update all state lists
229 for num, job in enumerate(running):
229 for num, job in enumerate(running):
230 stat = job.stat_code
230 stat = job.stat_code
231 if stat == srun:
231 if stat == srun:
232 continue
232 continue
233 elif stat == scomp:
233 elif stat == scomp:
234 completed.append(job)
234 completed.append(job)
235 self._comp_report.append(job)
235 self._comp_report.append(job)
236 running[num] = False
236 running[num] = False
237 elif stat == sdead:
237 elif stat == sdead:
238 dead.append(job)
238 dead.append(job)
239 self._dead_report.append(job)
239 self._dead_report.append(job)
240 running[num] = False
240 running[num] = False
241 # Remove dead/completed jobs from running list
241 # Remove dead/completed jobs from running list
242 running[:] = filter(None, running)
242 running[:] = filter(None, running)
243
243
244 def _group_report(self,group,name):
244 def _group_report(self,group,name):
245 """Report summary for a given job group.
245 """Report summary for a given job group.
246
246
247 Return True if the group had any elements."""
247 Return True if the group had any elements."""
248
248
249 if group:
249 if group:
250 print('%s jobs:' % name)
250 print('%s jobs:' % name)
251 for job in group:
251 for job in group:
252 print('%s : %s' % (job.num,job))
252 print('%s : %s' % (job.num,job))
253 print()
253 print()
254 return True
254 return True
255
255
256 def _group_flush(self,group,name):
256 def _group_flush(self,group,name):
257 """Flush a given job group
257 """Flush a given job group
258
258
259 Return True if the group had any elements."""
259 Return True if the group had any elements."""
260
260
261 njobs = len(group)
261 njobs = len(group)
262 if njobs:
262 if njobs:
263 plural = {1:''}.setdefault(njobs,'s')
263 plural = {1:''}.setdefault(njobs,'s')
264 print('Flushing %s %s job%s.' % (njobs,name,plural))
264 print('Flushing %s %s job%s.' % (njobs,name,plural))
265 group[:] = []
265 group[:] = []
266 return True
266 return True
267
267
268 def _status_new(self):
268 def _status_new(self):
269 """Print the status of newly finished jobs.
269 """Print the status of newly finished jobs.
270
270
271 Return True if any new jobs are reported.
271 Return True if any new jobs are reported.
272
272
273 This call resets its own state every time, so it only reports jobs
273 This call resets its own state every time, so it only reports jobs
274 which have finished since the last time it was called."""
274 which have finished since the last time it was called."""
275
275
276 self._update_status()
276 self._update_status()
277 new_comp = self._group_report(self._comp_report, 'Completed')
277 new_comp = self._group_report(self._comp_report, 'Completed')
278 new_dead = self._group_report(self._dead_report,
278 new_dead = self._group_report(self._dead_report,
279 'Dead, call jobs.traceback() for details')
279 'Dead, call jobs.traceback() for details')
280 self._comp_report[:] = []
280 self._comp_report[:] = []
281 self._dead_report[:] = []
281 self._dead_report[:] = []
282 return new_comp or new_dead
282 return new_comp or new_dead
283
283
284 def status(self,verbose=0):
284 def status(self,verbose=0):
285 """Print a status of all jobs currently being managed."""
285 """Print a status of all jobs currently being managed."""
286
286
287 self._update_status()
287 self._update_status()
288 self._group_report(self.running,'Running')
288 self._group_report(self.running,'Running')
289 self._group_report(self.completed,'Completed')
289 self._group_report(self.completed,'Completed')
290 self._group_report(self.dead,'Dead')
290 self._group_report(self.dead,'Dead')
291 # Also flush the report queues
291 # Also flush the report queues
292 self._comp_report[:] = []
292 self._comp_report[:] = []
293 self._dead_report[:] = []
293 self._dead_report[:] = []
294
294
295 def remove(self,num):
295 def remove(self,num):
296 """Remove a finished (completed or dead) job."""
296 """Remove a finished (completed or dead) job."""
297
297
298 try:
298 try:
299 job = self.all[num]
299 job = self.all[num]
300 except KeyError:
300 except KeyError:
301 error('Job #%s not found' % num)
301 error('Job #%s not found' % num)
302 else:
302 else:
303 stat_code = job.stat_code
303 stat_code = job.stat_code
304 if stat_code == self._s_running:
304 if stat_code == self._s_running:
305 error('Job #%s is still running, it can not be removed.' % num)
305 error('Job #%s is still running, it can not be removed.' % num)
306 return
306 return
307 elif stat_code == self._s_completed:
307 elif stat_code == self._s_completed:
308 self.completed.remove(job)
308 self.completed.remove(job)
309 elif stat_code == self._s_dead:
309 elif stat_code == self._s_dead:
310 self.dead.remove(job)
310 self.dead.remove(job)
311
311
312 def flush(self):
312 def flush(self):
313 """Flush all finished jobs (completed and dead) from lists.
313 """Flush all finished jobs (completed and dead) from lists.
314
314
315 Running jobs are never flushed.
315 Running jobs are never flushed.
316
316
317 It first calls _status_new(), to update info. If any jobs have
317 It first calls _status_new(), to update info. If any jobs have
318 completed since the last _status_new() call, the flush operation
318 completed since the last _status_new() call, the flush operation
319 aborts."""
319 aborts."""
320
320
321 # Remove the finished jobs from the master dict
321 # Remove the finished jobs from the master dict
322 alljobs = self.all
322 alljobs = self.all
323 for job in self.completed+self.dead:
323 for job in self.completed+self.dead:
324 del(alljobs[job.num])
324 del(alljobs[job.num])
325
325
326 # Now flush these lists completely
326 # Now flush these lists completely
327 fl_comp = self._group_flush(self.completed, 'Completed')
327 fl_comp = self._group_flush(self.completed, 'Completed')
328 fl_dead = self._group_flush(self.dead, 'Dead')
328 fl_dead = self._group_flush(self.dead, 'Dead')
329 if not (fl_comp or fl_dead):
329 if not (fl_comp or fl_dead):
330 print('No jobs to flush.')
330 print('No jobs to flush.')
331
331
332 def result(self,num):
332 def result(self,num):
333 """result(N) -> return the result of job N."""
333 """result(N) -> return the result of job N."""
334 try:
334 try:
335 return self.all[num].result
335 return self.all[num].result
336 except KeyError:
336 except KeyError:
337 error('Job #%s not found' % num)
337 error('Job #%s not found' % num)
338
338
339 def _traceback(self, job):
339 def _traceback(self, job):
340 num = job if isinstance(job, int) else job.num
340 num = job if isinstance(job, int) else job.num
341 try:
341 try:
342 self.all[num].traceback()
342 self.all[num].traceback()
343 except KeyError:
343 except KeyError:
344 error('Job #%s not found' % num)
344 error('Job #%s not found' % num)
345
345
346 def traceback(self, job=None):
346 def traceback(self, job=None):
347 if job is None:
347 if job is None:
348 self._update_status()
348 self._update_status()
349 for deadjob in self.dead:
349 for deadjob in self.dead:
350 print("Traceback for: %r" % deadjob)
350 print("Traceback for: %r" % deadjob)
351 self._traceback(deadjob)
351 self._traceback(deadjob)
352 print()
352 print()
353 else:
353 else:
354 self._traceback(job)
354 self._traceback(job)
355
355
356
356
357 class BackgroundJobBase(threading.Thread):
357 class BackgroundJobBase(threading.Thread):
358 """Base class to build BackgroundJob classes.
358 """Base class to build BackgroundJob classes.
359
359
360 The derived classes must implement:
360 The derived classes must implement:
361
361
362 - Their own __init__, since the one here raises NotImplementedError. The
362 - Their own __init__, since the one here raises NotImplementedError. The
363 derived constructor must call self._init() at the end, to provide common
363 derived constructor must call self._init() at the end, to provide common
364 initialization.
364 initialization.
365
365
366 - A strform attribute used in calls to __str__.
366 - A strform attribute used in calls to __str__.
367
367
368 - A call() method, which will make the actual execution call and must
368 - A call() method, which will make the actual execution call and must
369 return a value to be held in the 'result' field of the job object."""
369 return a value to be held in the 'result' field of the job object.
370 """
370
371
371 # Class constants for status, in string and as numerical codes (when
372 # Class constants for status, in string and as numerical codes (when
372 # updating jobs lists, we don't want to do string comparisons). This will
373 # updating jobs lists, we don't want to do string comparisons). This will
373 # be done at every user prompt, so it has to be as fast as possible
374 # be done at every user prompt, so it has to be as fast as possible
374 stat_created = 'Created'; stat_created_c = 0
375 stat_created = 'Created'; stat_created_c = 0
375 stat_running = 'Running'; stat_running_c = 1
376 stat_running = 'Running'; stat_running_c = 1
376 stat_completed = 'Completed'; stat_completed_c = 2
377 stat_completed = 'Completed'; stat_completed_c = 2
377 stat_dead = 'Dead (Exception), call jobs.traceback() for details'
378 stat_dead = 'Dead (Exception), call jobs.traceback() for details'
378 stat_dead_c = -1
379 stat_dead_c = -1
379
380
380 def __init__(self):
381 def __init__(self):
382 """Must be implemented in subclasses.
383
384 Subclasses must call :meth:`_init` for standard initialisation.
385 """
381 raise NotImplementedError("This class can not be instantiated directly.")
386 raise NotImplementedError("This class can not be instantiated directly.")
382
387
383 def _init(self):
388 def _init(self):
384 """Common initialization for all BackgroundJob objects"""
389 """Common initialization for all BackgroundJob objects"""
385
390
386 for attr in ['call','strform']:
391 for attr in ['call','strform']:
387 assert hasattr(self,attr), "Missing attribute <%s>" % attr
392 assert hasattr(self,attr), "Missing attribute <%s>" % attr
388
393
389 # The num tag can be set by an external job manager
394 # The num tag can be set by an external job manager
390 self.num = None
395 self.num = None
391
396
392 self.status = BackgroundJobBase.stat_created
397 self.status = BackgroundJobBase.stat_created
393 self.stat_code = BackgroundJobBase.stat_created_c
398 self.stat_code = BackgroundJobBase.stat_created_c
394 self.finished = False
399 self.finished = False
395 self.result = '<BackgroundJob has not completed>'
400 self.result = '<BackgroundJob has not completed>'
396
401
397 # reuse the ipython traceback handler if we can get to it, otherwise
402 # reuse the ipython traceback handler if we can get to it, otherwise
398 # make a new one
403 # make a new one
399 try:
404 try:
400 make_tb = get_ipython().InteractiveTB.text
405 make_tb = get_ipython().InteractiveTB.text
401 except:
406 except:
402 make_tb = AutoFormattedTB(mode = 'Context',
407 make_tb = AutoFormattedTB(mode = 'Context',
403 color_scheme='NoColor',
408 color_scheme='NoColor',
404 tb_offset = 1).text
409 tb_offset = 1).text
405 # Note that the actual API for text() requires the three args to be
410 # Note that the actual API for text() requires the three args to be
406 # passed in, so we wrap it in a simple lambda.
411 # passed in, so we wrap it in a simple lambda.
407 self._make_tb = lambda : make_tb(None, None, None)
412 self._make_tb = lambda : make_tb(None, None, None)
408
413
409 # Hold a formatted traceback if one is generated.
414 # Hold a formatted traceback if one is generated.
410 self._tb = None
415 self._tb = None
411
416
412 threading.Thread.__init__(self)
417 threading.Thread.__init__(self)
413
418
414 def __str__(self):
419 def __str__(self):
415 return self.strform
420 return self.strform
416
421
417 def __repr__(self):
422 def __repr__(self):
418 return '<BackgroundJob #%d: %s>' % (self.num, self.strform)
423 return '<BackgroundJob #%d: %s>' % (self.num, self.strform)
419
424
420 def traceback(self):
425 def traceback(self):
421 print(self._tb)
426 print(self._tb)
422
427
423 def run(self):
428 def run(self):
424 try:
429 try:
425 self.status = BackgroundJobBase.stat_running
430 self.status = BackgroundJobBase.stat_running
426 self.stat_code = BackgroundJobBase.stat_running_c
431 self.stat_code = BackgroundJobBase.stat_running_c
427 self.result = self.call()
432 self.result = self.call()
428 except:
433 except:
429 self.status = BackgroundJobBase.stat_dead
434 self.status = BackgroundJobBase.stat_dead
430 self.stat_code = BackgroundJobBase.stat_dead_c
435 self.stat_code = BackgroundJobBase.stat_dead_c
431 self.finished = None
436 self.finished = None
432 self.result = ('<BackgroundJob died, call jobs.traceback() for details>')
437 self.result = ('<BackgroundJob died, call jobs.traceback() for details>')
433 self._tb = self._make_tb()
438 self._tb = self._make_tb()
434 else:
439 else:
435 self.status = BackgroundJobBase.stat_completed
440 self.status = BackgroundJobBase.stat_completed
436 self.stat_code = BackgroundJobBase.stat_completed_c
441 self.stat_code = BackgroundJobBase.stat_completed_c
437 self.finished = True
442 self.finished = True
438
443
439
444
440 class BackgroundJobExpr(BackgroundJobBase):
445 class BackgroundJobExpr(BackgroundJobBase):
441 """Evaluate an expression as a background job (uses a separate thread)."""
446 """Evaluate an expression as a background job (uses a separate thread)."""
442
447
443 def __init__(self, expression, glob=None, loc=None):
448 def __init__(self, expression, glob=None, loc=None):
444 """Create a new job from a string which can be fed to eval().
449 """Create a new job from a string which can be fed to eval().
445
450
446 global/locals dicts can be provided, which will be passed to the eval
451 global/locals dicts can be provided, which will be passed to the eval
447 call."""
452 call."""
448
453
449 # fail immediately if the given expression can't be compiled
454 # fail immediately if the given expression can't be compiled
450 self.code = compile(expression,'<BackgroundJob compilation>','eval')
455 self.code = compile(expression,'<BackgroundJob compilation>','eval')
451
456
452 glob = {} if glob is None else glob
457 glob = {} if glob is None else glob
453 loc = {} if loc is None else loc
458 loc = {} if loc is None else loc
454 self.expression = self.strform = expression
459 self.expression = self.strform = expression
455 self.glob = glob
460 self.glob = glob
456 self.loc = loc
461 self.loc = loc
457 self._init()
462 self._init()
458
463
459 def call(self):
464 def call(self):
460 return eval(self.code,self.glob,self.loc)
465 return eval(self.code,self.glob,self.loc)
461
466
462
467
463 class BackgroundJobFunc(BackgroundJobBase):
468 class BackgroundJobFunc(BackgroundJobBase):
464 """Run a function call as a background job (uses a separate thread)."""
469 """Run a function call as a background job (uses a separate thread)."""
465
470
466 def __init__(self, func, *args, **kwargs):
471 def __init__(self, func, *args, **kwargs):
467 """Create a new job from a callable object.
472 """Create a new job from a callable object.
468
473
469 Any positional arguments and keyword args given to this constructor
474 Any positional arguments and keyword args given to this constructor
470 after the initial callable are passed directly to it."""
475 after the initial callable are passed directly to it."""
471
476
472 if not callable(func):
477 if not callable(func):
473 raise TypeError(
478 raise TypeError(
474 'first argument to BackgroundJobFunc must be callable')
479 'first argument to BackgroundJobFunc must be callable')
475
480
476 self.func = func
481 self.func = func
477 self.args = args
482 self.args = args
478 self.kwargs = kwargs
483 self.kwargs = kwargs
479 # The string form will only include the function passed, because
484 # The string form will only include the function passed, because
480 # generating string representations of the arguments is a potentially
485 # generating string representations of the arguments is a potentially
481 # _very_ expensive operation (e.g. with large arrays).
486 # _very_ expensive operation (e.g. with large arrays).
482 self.strform = str(func)
487 self.strform = str(func)
483 self._init()
488 self._init()
484
489
485 def call(self):
490 def call(self):
486 return self.func(*self.args, **self.kwargs)
491 return self.func(*self.args, **self.kwargs)
@@ -1,270 +1,273 b''
1 """This module defines Exporter, a highly configurable converter
1 """This module defines Exporter, a highly configurable converter
2 that uses Jinja2 to export notebook files into different formats.
2 that uses Jinja2 to export notebook files into different formats.
3 """
3 """
4
4
5 #-----------------------------------------------------------------------------
5 #-----------------------------------------------------------------------------
6 # Copyright (c) 2013, the IPython Development Team.
6 # Copyright (c) 2013, the IPython Development Team.
7 #
7 #
8 # Distributed under the terms of the Modified BSD License.
8 # Distributed under the terms of the Modified BSD License.
9 #
9 #
10 # The full license is in the file COPYING.txt, distributed with this software.
10 # The full license is in the file COPYING.txt, distributed with this software.
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12
12
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Imports
14 # Imports
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16
16
17 from __future__ import print_function, absolute_import
17 from __future__ import print_function, absolute_import
18
18
19 # Stdlib imports
19 # Stdlib imports
20 import io
20 import io
21 import os
21 import os
22 import copy
22 import copy
23 import collections
23 import collections
24 import datetime
24 import datetime
25
25
26
26
27 # IPython imports
27 # IPython imports
28 from IPython.config.configurable import LoggingConfigurable
28 from IPython.config.configurable import LoggingConfigurable
29 from IPython.config import Config
29 from IPython.config import Config
30 from IPython.nbformat import current as nbformat
30 from IPython.nbformat import current as nbformat
31 from IPython.utils.traitlets import MetaHasTraits, Unicode, List
31 from IPython.utils.traitlets import MetaHasTraits, Unicode, List
32 from IPython.utils.importstring import import_item
32 from IPython.utils.importstring import import_item
33 from IPython.utils import text, py3compat
33 from IPython.utils import text, py3compat
34
34
35 from IPython.nbconvert import preprocessors as nbpreprocessors
35 from IPython.nbconvert import preprocessors as nbpreprocessors
36
36
37
37
38 #-----------------------------------------------------------------------------
38 #-----------------------------------------------------------------------------
39 # Class
39 # Class
40 #-----------------------------------------------------------------------------
40 #-----------------------------------------------------------------------------
41
41
42 class ResourcesDict(collections.defaultdict):
42 class ResourcesDict(collections.defaultdict):
43 def __missing__(self, key):
43 def __missing__(self, key):
44 return ''
44 return ''
45
45
46
46
47 class Exporter(LoggingConfigurable):
47 class Exporter(LoggingConfigurable):
48 """
48 """
49 Class containing methods that sequentially run a list of preprocessors on a
49 Class containing methods that sequentially run a list of preprocessors on a
50 NotebookNode object and then return the modified NotebookNode object and
50 NotebookNode object and then return the modified NotebookNode object and
51 accompanying resources dict.
51 accompanying resources dict.
52 """
52 """
53
53
54 file_extension = Unicode(
54 file_extension = Unicode(
55 'txt', config=True,
55 'txt', config=True,
56 help="Extension of the file that should be written to disk"
56 help="Extension of the file that should be written to disk"
57 )
57 )
58
58
59 #Configurability, allows the user to easily add filters and preprocessors.
59 #Configurability, allows the user to easily add filters and preprocessors.
60 preprocessors = List(config=True,
60 preprocessors = List(config=True,
61 help="""List of preprocessors, by name or namespace, to enable.""")
61 help="""List of preprocessors, by name or namespace, to enable.""")
62
62
63 _preprocessors = None
63 _preprocessors = None
64
64
65 default_preprocessors = List(['IPython.nbconvert.preprocessors.coalesce_streams',
65 default_preprocessors = List(['IPython.nbconvert.preprocessors.coalesce_streams',
66 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor',
66 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor',
67 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor',
67 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor',
68 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor',
68 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor',
69 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor',
69 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor',
70 'IPython.nbconvert.preprocessors.LatexPreprocessor',
70 'IPython.nbconvert.preprocessors.LatexPreprocessor',
71 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'],
71 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'],
72 config=True,
72 config=True,
73 help="""List of preprocessors available by default, by name, namespace,
73 help="""List of preprocessors available by default, by name, namespace,
74 instance, or type.""")
74 instance, or type.""")
75
75
76
76
77 def __init__(self, config=None, **kw):
77 def __init__(self, config=None, **kw):
78 """
78 """
79 Public constructor
79 Public constructor
80
80
81 Parameters
81 Parameters
82 ----------
82 ----------
83 config : config
83 config : config
84 User configuration instance.
84 User configuration instance.
85 """
85 """
86 with_default_config = self.default_config
86 with_default_config = self.default_config
87 if config:
87 if config:
88 with_default_config.merge(config)
88 with_default_config.merge(config)
89
89
90 super(Exporter, self).__init__(config=with_default_config, **kw)
90 super(Exporter, self).__init__(config=with_default_config, **kw)
91
91
92 self._init_preprocessors()
92 self._init_preprocessors()
93
93
94
94
95 @property
95 @property
96 def default_config(self):
96 def default_config(self):
97 return Config()
97 return Config()
98
98
99
99
100 def from_notebook_node(self, nb, resources=None, **kw):
100 def from_notebook_node(self, nb, resources=None, **kw):
101 """
101 """
102 Convert a notebook from a notebook node instance.
102 Convert a notebook from a notebook node instance.
103
103
104 Parameters
104 Parameters
105 ----------
105 ----------
106 nb : Notebook node
106 nb : :class:`~IPython.nbformat.v3.nbbase.NotebookNode`
107 resources : dict (**kw)
107 Notebook node
108 of additional resources that can be accessed read/write by
108 resources : dict
109 preprocessors.
109 Additional resources that can be accessed read/write by
110 preprocessors and filters.
111 **kw
112 Ignored (?)
110 """
113 """
111 nb_copy = copy.deepcopy(nb)
114 nb_copy = copy.deepcopy(nb)
112 resources = self._init_resources(resources)
115 resources = self._init_resources(resources)
113
116
114 # Preprocess
117 # Preprocess
115 nb_copy, resources = self._preprocess(nb_copy, resources)
118 nb_copy, resources = self._preprocess(nb_copy, resources)
116
119
117 return nb_copy, resources
120 return nb_copy, resources
118
121
119
122
120 def from_filename(self, filename, resources=None, **kw):
123 def from_filename(self, filename, resources=None, **kw):
121 """
124 """
122 Convert a notebook from a notebook file.
125 Convert a notebook from a notebook file.
123
126
124 Parameters
127 Parameters
125 ----------
128 ----------
126 filename : str
129 filename : str
127 Full filename of the notebook file to open and convert.
130 Full filename of the notebook file to open and convert.
128 """
131 """
129
132
130 # Pull the metadata from the filesystem.
133 # Pull the metadata from the filesystem.
131 if resources is None:
134 if resources is None:
132 resources = ResourcesDict()
135 resources = ResourcesDict()
133 if not 'metadata' in resources or resources['metadata'] == '':
136 if not 'metadata' in resources or resources['metadata'] == '':
134 resources['metadata'] = ResourcesDict()
137 resources['metadata'] = ResourcesDict()
135 basename = os.path.basename(filename)
138 basename = os.path.basename(filename)
136 notebook_name = basename[:basename.rfind('.')]
139 notebook_name = basename[:basename.rfind('.')]
137 resources['metadata']['name'] = notebook_name
140 resources['metadata']['name'] = notebook_name
138
141
139 modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(filename))
142 modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(filename))
140 resources['metadata']['modified_date'] = modified_date.strftime(text.date_format)
143 resources['metadata']['modified_date'] = modified_date.strftime(text.date_format)
141
144
142 with io.open(filename) as f:
145 with io.open(filename) as f:
143 return self.from_notebook_node(nbformat.read(f, 'json'), resources=resources, **kw)
146 return self.from_notebook_node(nbformat.read(f, 'json'), resources=resources, **kw)
144
147
145
148
146 def from_file(self, file_stream, resources=None, **kw):
149 def from_file(self, file_stream, resources=None, **kw):
147 """
150 """
148 Convert a notebook from a notebook file.
151 Convert a notebook from a notebook file.
149
152
150 Parameters
153 Parameters
151 ----------
154 ----------
152 file_stream : file-like object
155 file_stream : file-like object
153 Notebook file-like object to convert.
156 Notebook file-like object to convert.
154 """
157 """
155 return self.from_notebook_node(nbformat.read(file_stream, 'json'), resources=resources, **kw)
158 return self.from_notebook_node(nbformat.read(file_stream, 'json'), resources=resources, **kw)
156
159
157
160
158 def register_preprocessor(self, preprocessor, enabled=False):
161 def register_preprocessor(self, preprocessor, enabled=False):
159 """
162 """
160 Register a preprocessor.
163 Register a preprocessor.
161 Preprocessors are classes that act upon the notebook before it is
164 Preprocessors are classes that act upon the notebook before it is
162 passed into the Jinja templating engine. preprocessors are also
165 passed into the Jinja templating engine. preprocessors are also
163 capable of passing additional information to the Jinja
166 capable of passing additional information to the Jinja
164 templating engine.
167 templating engine.
165
168
166 Parameters
169 Parameters
167 ----------
170 ----------
168 preprocessor : preprocessor
171 preprocessor : preprocessor
169 """
172 """
170 if preprocessor is None:
173 if preprocessor is None:
171 raise TypeError('preprocessor')
174 raise TypeError('preprocessor')
172 isclass = isinstance(preprocessor, type)
175 isclass = isinstance(preprocessor, type)
173 constructed = not isclass
176 constructed = not isclass
174
177
175 # Handle preprocessor's registration based on it's type
178 # Handle preprocessor's registration based on it's type
176 if constructed and isinstance(preprocessor, py3compat.string_types):
179 if constructed and isinstance(preprocessor, py3compat.string_types):
177 # Preprocessor is a string, import the namespace and recursively call
180 # Preprocessor is a string, import the namespace and recursively call
178 # this register_preprocessor method
181 # this register_preprocessor method
179 preprocessor_cls = import_item(preprocessor)
182 preprocessor_cls = import_item(preprocessor)
180 return self.register_preprocessor(preprocessor_cls, enabled)
183 return self.register_preprocessor(preprocessor_cls, enabled)
181
184
182 if constructed and hasattr(preprocessor, '__call__'):
185 if constructed and hasattr(preprocessor, '__call__'):
183 # Preprocessor is a function, no need to construct it.
186 # Preprocessor is a function, no need to construct it.
184 # Register and return the preprocessor.
187 # Register and return the preprocessor.
185 if enabled:
188 if enabled:
186 preprocessor.enabled = True
189 preprocessor.enabled = True
187 self._preprocessors.append(preprocessor)
190 self._preprocessors.append(preprocessor)
188 return preprocessor
191 return preprocessor
189
192
190 elif isclass and isinstance(preprocessor, MetaHasTraits):
193 elif isclass and isinstance(preprocessor, MetaHasTraits):
191 # Preprocessor is configurable. Make sure to pass in new default for
194 # Preprocessor is configurable. Make sure to pass in new default for
192 # the enabled flag if one was specified.
195 # the enabled flag if one was specified.
193 self.register_preprocessor(preprocessor(parent=self), enabled)
196 self.register_preprocessor(preprocessor(parent=self), enabled)
194
197
195 elif isclass:
198 elif isclass:
196 # Preprocessor is not configurable, construct it
199 # Preprocessor is not configurable, construct it
197 self.register_preprocessor(preprocessor(), enabled)
200 self.register_preprocessor(preprocessor(), enabled)
198
201
199 else:
202 else:
200 # Preprocessor is an instance of something without a __call__
203 # Preprocessor is an instance of something without a __call__
201 # attribute.
204 # attribute.
202 raise TypeError('preprocessor')
205 raise TypeError('preprocessor')
203
206
204
207
205 def _init_preprocessors(self):
208 def _init_preprocessors(self):
206 """
209 """
207 Register all of the preprocessors needed for this exporter, disabled
210 Register all of the preprocessors needed for this exporter, disabled
208 unless specified explicitly.
211 unless specified explicitly.
209 """
212 """
210 if self._preprocessors is None:
213 if self._preprocessors is None:
211 self._preprocessors = []
214 self._preprocessors = []
212
215
213 #Load default preprocessors (not necessarly enabled by default).
216 #Load default preprocessors (not necessarly enabled by default).
214 if self.default_preprocessors:
217 if self.default_preprocessors:
215 for preprocessor in self.default_preprocessors:
218 for preprocessor in self.default_preprocessors:
216 self.register_preprocessor(preprocessor)
219 self.register_preprocessor(preprocessor)
217
220
218 #Load user preprocessors. Enable by default.
221 #Load user preprocessors. Enable by default.
219 if self.preprocessors:
222 if self.preprocessors:
220 for preprocessor in self.preprocessors:
223 for preprocessor in self.preprocessors:
221 self.register_preprocessor(preprocessor, enabled=True)
224 self.register_preprocessor(preprocessor, enabled=True)
222
225
223
226
224 def _init_resources(self, resources):
227 def _init_resources(self, resources):
225
228
226 #Make sure the resources dict is of ResourcesDict type.
229 #Make sure the resources dict is of ResourcesDict type.
227 if resources is None:
230 if resources is None:
228 resources = ResourcesDict()
231 resources = ResourcesDict()
229 if not isinstance(resources, ResourcesDict):
232 if not isinstance(resources, ResourcesDict):
230 new_resources = ResourcesDict()
233 new_resources = ResourcesDict()
231 new_resources.update(resources)
234 new_resources.update(resources)
232 resources = new_resources
235 resources = new_resources
233
236
234 #Make sure the metadata extension exists in resources
237 #Make sure the metadata extension exists in resources
235 if 'metadata' in resources:
238 if 'metadata' in resources:
236 if not isinstance(resources['metadata'], ResourcesDict):
239 if not isinstance(resources['metadata'], ResourcesDict):
237 resources['metadata'] = ResourcesDict(resources['metadata'])
240 resources['metadata'] = ResourcesDict(resources['metadata'])
238 else:
241 else:
239 resources['metadata'] = ResourcesDict()
242 resources['metadata'] = ResourcesDict()
240 if not resources['metadata']['name']:
243 if not resources['metadata']['name']:
241 resources['metadata']['name'] = 'Notebook'
244 resources['metadata']['name'] = 'Notebook'
242
245
243 #Set the output extension
246 #Set the output extension
244 resources['output_extension'] = self.file_extension
247 resources['output_extension'] = self.file_extension
245 return resources
248 return resources
246
249
247
250
248 def _preprocess(self, nb, resources):
251 def _preprocess(self, nb, resources):
249 """
252 """
250 Preprocess the notebook before passing it into the Jinja engine.
253 Preprocess the notebook before passing it into the Jinja engine.
251 To preprocess the notebook is to apply all of the
254 To preprocess the notebook is to apply all of the
252
255
253 Parameters
256 Parameters
254 ----------
257 ----------
255 nb : notebook node
258 nb : notebook node
256 notebook that is being exported.
259 notebook that is being exported.
257 resources : a dict of additional resources that
260 resources : a dict of additional resources that
258 can be accessed read/write by preprocessors
261 can be accessed read/write by preprocessors
259 """
262 """
260
263
261 # Do a copy.deepcopy first,
264 # Do a copy.deepcopy first,
262 # we are never safe enough with what the preprocessors could do.
265 # we are never safe enough with what the preprocessors could do.
263 nbc = copy.deepcopy(nb)
266 nbc = copy.deepcopy(nb)
264 resc = copy.deepcopy(resources)
267 resc = copy.deepcopy(resources)
265
268
266 #Run each preprocessor on the notebook. Carry the output along
269 #Run each preprocessor on the notebook. Carry the output along
267 #to each preprocessor
270 #to each preprocessor
268 for preprocessor in self._preprocessors:
271 for preprocessor in self._preprocessors:
269 nbc, resc = preprocessor(nbc, resc)
272 nbc, resc = preprocessor(nbc, resc)
270 return nbc, resc
273 return nbc, resc
@@ -1,312 +1,313 b''
1 """This module defines Exporter, a highly configurable converter
1 """This module defines Exporter, a highly configurable converter
2 that uses Jinja2 to export notebook files into different formats.
2 that uses Jinja2 to export notebook files into different formats.
3 """
3 """
4
4
5 #-----------------------------------------------------------------------------
5 #-----------------------------------------------------------------------------
6 # Copyright (c) 2013, the IPython Development Team.
6 # Copyright (c) 2013, the IPython Development Team.
7 #
7 #
8 # Distributed under the terms of the Modified BSD License.
8 # Distributed under the terms of the Modified BSD License.
9 #
9 #
10 # The full license is in the file COPYING.txt, distributed with this software.
10 # The full license is in the file COPYING.txt, distributed with this software.
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12
12
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Imports
14 # Imports
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16
16
17 from __future__ import print_function, absolute_import
17 from __future__ import print_function, absolute_import
18
18
19 # Stdlib imports
19 # Stdlib imports
20 import os
20 import os
21
21
22 # other libs/dependencies
22 # other libs/dependencies
23 from jinja2 import Environment, FileSystemLoader, ChoiceLoader, TemplateNotFound
23 from jinja2 import Environment, FileSystemLoader, ChoiceLoader, TemplateNotFound
24
24
25 # IPython imports
25 # IPython imports
26 from IPython.utils.traitlets import MetaHasTraits, Unicode, List, Dict, Any
26 from IPython.utils.traitlets import MetaHasTraits, Unicode, List, Dict, Any
27 from IPython.utils.importstring import import_item
27 from IPython.utils.importstring import import_item
28 from IPython.utils import py3compat, text
28 from IPython.utils import py3compat, text
29
29
30 from IPython.nbconvert import filters
30 from IPython.nbconvert import filters
31 from .exporter import Exporter
31 from .exporter import Exporter
32
32
33 #-----------------------------------------------------------------------------
33 #-----------------------------------------------------------------------------
34 # Globals and constants
34 # Globals and constants
35 #-----------------------------------------------------------------------------
35 #-----------------------------------------------------------------------------
36
36
37 #Jinja2 extensions to load.
37 #Jinja2 extensions to load.
38 JINJA_EXTENSIONS = ['jinja2.ext.loopcontrols']
38 JINJA_EXTENSIONS = ['jinja2.ext.loopcontrols']
39
39
40 default_filters = {
40 default_filters = {
41 'indent': text.indent,
41 'indent': text.indent,
42 'markdown2html': filters.markdown2html,
42 'markdown2html': filters.markdown2html,
43 'ansi2html': filters.ansi2html,
43 'ansi2html': filters.ansi2html,
44 'filter_data_type': filters.DataTypeFilter,
44 'filter_data_type': filters.DataTypeFilter,
45 'get_lines': filters.get_lines,
45 'get_lines': filters.get_lines,
46 'highlight2html': filters.Highlight2Html,
46 'highlight2html': filters.Highlight2Html,
47 'highlight2latex': filters.Highlight2Latex,
47 'highlight2latex': filters.Highlight2Latex,
48 'ipython2python': filters.ipython2python,
48 'ipython2python': filters.ipython2python,
49 'posix_path': filters.posix_path,
49 'posix_path': filters.posix_path,
50 'markdown2latex': filters.markdown2latex,
50 'markdown2latex': filters.markdown2latex,
51 'markdown2rst': filters.markdown2rst,
51 'markdown2rst': filters.markdown2rst,
52 'comment_lines': filters.comment_lines,
52 'comment_lines': filters.comment_lines,
53 'strip_ansi': filters.strip_ansi,
53 'strip_ansi': filters.strip_ansi,
54 'strip_dollars': filters.strip_dollars,
54 'strip_dollars': filters.strip_dollars,
55 'strip_files_prefix': filters.strip_files_prefix,
55 'strip_files_prefix': filters.strip_files_prefix,
56 'html2text' : filters.html2text,
56 'html2text' : filters.html2text,
57 'add_anchor': filters.add_anchor,
57 'add_anchor': filters.add_anchor,
58 'ansi2latex': filters.ansi2latex,
58 'ansi2latex': filters.ansi2latex,
59 'wrap_text': filters.wrap_text,
59 'wrap_text': filters.wrap_text,
60 'escape_latex': filters.escape_latex,
60 'escape_latex': filters.escape_latex,
61 'citation2latex': filters.citation2latex,
61 'citation2latex': filters.citation2latex,
62 'path2url': filters.path2url,
62 'path2url': filters.path2url,
63 'add_prompts': filters.add_prompts,
63 'add_prompts': filters.add_prompts,
64 }
64 }
65
65
66 #-----------------------------------------------------------------------------
66 #-----------------------------------------------------------------------------
67 # Class
67 # Class
68 #-----------------------------------------------------------------------------
68 #-----------------------------------------------------------------------------
69
69
70 class TemplateExporter(Exporter):
70 class TemplateExporter(Exporter):
71 """
71 """
72 Exports notebooks into other file formats. Uses Jinja 2 templating engine
72 Exports notebooks into other file formats. Uses Jinja 2 templating engine
73 to output new formats. Inherit from this class if you are creating a new
73 to output new formats. Inherit from this class if you are creating a new
74 template type along with new filters/preprocessors. If the filters/
74 template type along with new filters/preprocessors. If the filters/
75 preprocessors provided by default suffice, there is no need to inherit from
75 preprocessors provided by default suffice, there is no need to inherit from
76 this class. Instead, override the template_file and file_extension
76 this class. Instead, override the template_file and file_extension
77 traits via a config file.
77 traits via a config file.
78
78
79 {filters}
79 {filters}
80 """
80 """
81
81
82 # finish the docstring
82 # finish the docstring
83 __doc__ = __doc__.format(filters = '- '+'\n - '.join(default_filters.keys()))
83 __doc__ = __doc__.format(filters = '- '+'\n - '.join(default_filters.keys()))
84
84
85
85
86 template_file = Unicode(u'default',
86 template_file = Unicode(u'default',
87 config=True,
87 config=True,
88 help="Name of the template file to use")
88 help="Name of the template file to use")
89 def _template_file_changed(self, name, old, new):
89 def _template_file_changed(self, name, old, new):
90 if new == 'default':
90 if new == 'default':
91 self.template_file = self.default_template
91 self.template_file = self.default_template
92 else:
92 else:
93 self.template_file = new
93 self.template_file = new
94 self.template = None
94 self.template = None
95 self._load_template()
95 self._load_template()
96
96
97 default_template = Unicode(u'')
97 default_template = Unicode(u'')
98 template = Any()
98 template = Any()
99 environment = Any()
99 environment = Any()
100
100
101 template_path = List(['.'], config=True)
101 template_path = List(['.'], config=True)
102 def _template_path_changed(self, name, old, new):
102 def _template_path_changed(self, name, old, new):
103 self._load_template()
103 self._load_template()
104
104
105 default_template_path = Unicode(
105 default_template_path = Unicode(
106 os.path.join("..", "templates"),
106 os.path.join("..", "templates"),
107 help="Path where the template files are located.")
107 help="Path where the template files are located.")
108
108
109 template_skeleton_path = Unicode(
109 template_skeleton_path = Unicode(
110 os.path.join("..", "templates", "skeleton"),
110 os.path.join("..", "templates", "skeleton"),
111 help="Path where the template skeleton files are located.")
111 help="Path where the template skeleton files are located.")
112
112
113 #Jinja block definitions
113 #Jinja block definitions
114 jinja_comment_block_start = Unicode("", config=True)
114 jinja_comment_block_start = Unicode("", config=True)
115 jinja_comment_block_end = Unicode("", config=True)
115 jinja_comment_block_end = Unicode("", config=True)
116 jinja_variable_block_start = Unicode("", config=True)
116 jinja_variable_block_start = Unicode("", config=True)
117 jinja_variable_block_end = Unicode("", config=True)
117 jinja_variable_block_end = Unicode("", config=True)
118 jinja_logic_block_start = Unicode("", config=True)
118 jinja_logic_block_start = Unicode("", config=True)
119 jinja_logic_block_end = Unicode("", config=True)
119 jinja_logic_block_end = Unicode("", config=True)
120
120
121 #Extension that the template files use.
121 #Extension that the template files use.
122 template_extension = Unicode(".tpl", config=True)
122 template_extension = Unicode(".tpl", config=True)
123
123
124 filters = Dict(config=True,
124 filters = Dict(config=True,
125 help="""Dictionary of filters, by name and namespace, to add to the Jinja
125 help="""Dictionary of filters, by name and namespace, to add to the Jinja
126 environment.""")
126 environment.""")
127
127
128
128
129 def __init__(self, config=None, extra_loaders=None, **kw):
129 def __init__(self, config=None, extra_loaders=None, **kw):
130 """
130 """
131 Public constructor
131 Public constructor
132
132
133 Parameters
133 Parameters
134 ----------
134 ----------
135 config : config
135 config : config
136 User configuration instance.
136 User configuration instance.
137 extra_loaders : list[of Jinja Loaders]
137 extra_loaders : list[of Jinja Loaders]
138 ordered list of Jinja loader to find templates. Will be tried in order
138 ordered list of Jinja loader to find templates. Will be tried in order
139 before the default FileSystem ones.
139 before the default FileSystem ones.
140 template : str (optional, kw arg)
140 template : str (optional, kw arg)
141 Template to use when exporting.
141 Template to use when exporting.
142 """
142 """
143 super(TemplateExporter, self).__init__(config=config, **kw)
143 super(TemplateExporter, self).__init__(config=config, **kw)
144
144
145 #Init
145 #Init
146 self._init_template()
146 self._init_template()
147 self._init_environment(extra_loaders=extra_loaders)
147 self._init_environment(extra_loaders=extra_loaders)
148 self._init_preprocessors()
148 self._init_preprocessors()
149 self._init_filters()
149 self._init_filters()
150
150
151
151
152 def _load_template(self):
152 def _load_template(self):
153 """Load the Jinja template object from the template file
153 """Load the Jinja template object from the template file
154
154
155 This is a no-op if the template attribute is already defined,
155 This is a no-op if the template attribute is already defined,
156 or the Jinja environment is not setup yet.
156 or the Jinja environment is not setup yet.
157
157
158 This is triggered by various trait changes that would change the template.
158 This is triggered by various trait changes that would change the template.
159 """
159 """
160 if self.template is not None:
160 if self.template is not None:
161 return
161 return
162 # called too early, do nothing
162 # called too early, do nothing
163 if self.environment is None:
163 if self.environment is None:
164 return
164 return
165 # Try different template names during conversion. First try to load the
165 # Try different template names during conversion. First try to load the
166 # template by name with extension added, then try loading the template
166 # template by name with extension added, then try loading the template
167 # as if the name is explicitly specified, then try the name as a
167 # as if the name is explicitly specified, then try the name as a
168 # 'flavor', and lastly just try to load the template by module name.
168 # 'flavor', and lastly just try to load the template by module name.
169 module_name = self.__module__.rsplit('.', 1)[-1]
169 module_name = self.__module__.rsplit('.', 1)[-1]
170 try_names = []
170 try_names = []
171 if self.template_file:
171 if self.template_file:
172 try_names.extend([
172 try_names.extend([
173 self.template_file + self.template_extension,
173 self.template_file + self.template_extension,
174 self.template_file,
174 self.template_file,
175 module_name + '_' + self.template_file + self.template_extension,
175 module_name + '_' + self.template_file + self.template_extension,
176 ])
176 ])
177 try_names.append(module_name + self.template_extension)
177 try_names.append(module_name + self.template_extension)
178 for try_name in try_names:
178 for try_name in try_names:
179 self.log.debug("Attempting to load template %s", try_name)
179 self.log.debug("Attempting to load template %s", try_name)
180 try:
180 try:
181 self.template = self.environment.get_template(try_name)
181 self.template = self.environment.get_template(try_name)
182 except (TemplateNotFound, IOError):
182 except (TemplateNotFound, IOError):
183 pass
183 pass
184 except Exception as e:
184 except Exception as e:
185 self.log.warn("Unexpected exception loading template: %s", try_name, exc_info=True)
185 self.log.warn("Unexpected exception loading template: %s", try_name, exc_info=True)
186 else:
186 else:
187 self.log.info("Loaded template %s", try_name)
187 self.log.info("Loaded template %s", try_name)
188 break
188 break
189
189
190 def from_notebook_node(self, nb, resources=None, **kw):
190 def from_notebook_node(self, nb, resources=None, **kw):
191 """
191 """
192 Convert a notebook from a notebook node instance.
192 Convert a notebook from a notebook node instance.
193
193
194 Parameters
194 Parameters
195 ----------
195 ----------
196 nb : Notebook node
196 nb : :class:`~IPython.nbformat.v3.nbbase.NotebookNode`
197 resources : dict (**kw)
197 Notebook node
198 of additional resources that can be accessed read/write by
198 resources : dict
199 preprocessors and filters.
199 Additional resources that can be accessed read/write by
200 preprocessors and filters.
200 """
201 """
201 nb_copy, resources = super(TemplateExporter, self).from_notebook_node(nb, resources, **kw)
202 nb_copy, resources = super(TemplateExporter, self).from_notebook_node(nb, resources, **kw)
202
203
203 self._load_template()
204 self._load_template()
204
205
205 if self.template is not None:
206 if self.template is not None:
206 output = self.template.render(nb=nb_copy, resources=resources)
207 output = self.template.render(nb=nb_copy, resources=resources)
207 else:
208 else:
208 raise IOError('template file "%s" could not be found' % self.template_file)
209 raise IOError('template file "%s" could not be found' % self.template_file)
209 return output, resources
210 return output, resources
210
211
211
212
212 def register_filter(self, name, jinja_filter):
213 def register_filter(self, name, jinja_filter):
213 """
214 """
214 Register a filter.
215 Register a filter.
215 A filter is a function that accepts and acts on one string.
216 A filter is a function that accepts and acts on one string.
216 The filters are accesible within the Jinja templating engine.
217 The filters are accesible within the Jinja templating engine.
217
218
218 Parameters
219 Parameters
219 ----------
220 ----------
220 name : str
221 name : str
221 name to give the filter in the Jinja engine
222 name to give the filter in the Jinja engine
222 filter : filter
223 filter : filter
223 """
224 """
224 if jinja_filter is None:
225 if jinja_filter is None:
225 raise TypeError('filter')
226 raise TypeError('filter')
226 isclass = isinstance(jinja_filter, type)
227 isclass = isinstance(jinja_filter, type)
227 constructed = not isclass
228 constructed = not isclass
228
229
229 #Handle filter's registration based on it's type
230 #Handle filter's registration based on it's type
230 if constructed and isinstance(jinja_filter, py3compat.string_types):
231 if constructed and isinstance(jinja_filter, py3compat.string_types):
231 #filter is a string, import the namespace and recursively call
232 #filter is a string, import the namespace and recursively call
232 #this register_filter method
233 #this register_filter method
233 filter_cls = import_item(jinja_filter)
234 filter_cls = import_item(jinja_filter)
234 return self.register_filter(name, filter_cls)
235 return self.register_filter(name, filter_cls)
235
236
236 if constructed and hasattr(jinja_filter, '__call__'):
237 if constructed and hasattr(jinja_filter, '__call__'):
237 #filter is a function, no need to construct it.
238 #filter is a function, no need to construct it.
238 self.environment.filters[name] = jinja_filter
239 self.environment.filters[name] = jinja_filter
239 return jinja_filter
240 return jinja_filter
240
241
241 elif isclass and isinstance(jinja_filter, MetaHasTraits):
242 elif isclass and isinstance(jinja_filter, MetaHasTraits):
242 #filter is configurable. Make sure to pass in new default for
243 #filter is configurable. Make sure to pass in new default for
243 #the enabled flag if one was specified.
244 #the enabled flag if one was specified.
244 filter_instance = jinja_filter(parent=self)
245 filter_instance = jinja_filter(parent=self)
245 self.register_filter(name, filter_instance )
246 self.register_filter(name, filter_instance )
246
247
247 elif isclass:
248 elif isclass:
248 #filter is not configurable, construct it
249 #filter is not configurable, construct it
249 filter_instance = jinja_filter()
250 filter_instance = jinja_filter()
250 self.register_filter(name, filter_instance)
251 self.register_filter(name, filter_instance)
251
252
252 else:
253 else:
253 #filter is an instance of something without a __call__
254 #filter is an instance of something without a __call__
254 #attribute.
255 #attribute.
255 raise TypeError('filter')
256 raise TypeError('filter')
256
257
257
258
258 def _init_template(self):
259 def _init_template(self):
259 """
260 """
260 Make sure a template name is specified. If one isn't specified, try to
261 Make sure a template name is specified. If one isn't specified, try to
261 build one from the information we know.
262 build one from the information we know.
262 """
263 """
263 self._template_file_changed('template_file', self.template_file, self.template_file)
264 self._template_file_changed('template_file', self.template_file, self.template_file)
264
265
265
266
266 def _init_environment(self, extra_loaders=None):
267 def _init_environment(self, extra_loaders=None):
267 """
268 """
268 Create the Jinja templating environment.
269 Create the Jinja templating environment.
269 """
270 """
270 here = os.path.dirname(os.path.realpath(__file__))
271 here = os.path.dirname(os.path.realpath(__file__))
271 loaders = []
272 loaders = []
272 if extra_loaders:
273 if extra_loaders:
273 loaders.extend(extra_loaders)
274 loaders.extend(extra_loaders)
274
275
275 paths = self.template_path
276 paths = self.template_path
276 paths.extend([os.path.join(here, self.default_template_path),
277 paths.extend([os.path.join(here, self.default_template_path),
277 os.path.join(here, self.template_skeleton_path)])
278 os.path.join(here, self.template_skeleton_path)])
278 loaders.append(FileSystemLoader(paths))
279 loaders.append(FileSystemLoader(paths))
279
280
280 self.environment = Environment(
281 self.environment = Environment(
281 loader= ChoiceLoader(loaders),
282 loader= ChoiceLoader(loaders),
282 extensions=JINJA_EXTENSIONS
283 extensions=JINJA_EXTENSIONS
283 )
284 )
284
285
285 #Set special Jinja2 syntax that will not conflict with latex.
286 #Set special Jinja2 syntax that will not conflict with latex.
286 if self.jinja_logic_block_start:
287 if self.jinja_logic_block_start:
287 self.environment.block_start_string = self.jinja_logic_block_start
288 self.environment.block_start_string = self.jinja_logic_block_start
288 if self.jinja_logic_block_end:
289 if self.jinja_logic_block_end:
289 self.environment.block_end_string = self.jinja_logic_block_end
290 self.environment.block_end_string = self.jinja_logic_block_end
290 if self.jinja_variable_block_start:
291 if self.jinja_variable_block_start:
291 self.environment.variable_start_string = self.jinja_variable_block_start
292 self.environment.variable_start_string = self.jinja_variable_block_start
292 if self.jinja_variable_block_end:
293 if self.jinja_variable_block_end:
293 self.environment.variable_end_string = self.jinja_variable_block_end
294 self.environment.variable_end_string = self.jinja_variable_block_end
294 if self.jinja_comment_block_start:
295 if self.jinja_comment_block_start:
295 self.environment.comment_start_string = self.jinja_comment_block_start
296 self.environment.comment_start_string = self.jinja_comment_block_start
296 if self.jinja_comment_block_end:
297 if self.jinja_comment_block_end:
297 self.environment.comment_end_string = self.jinja_comment_block_end
298 self.environment.comment_end_string = self.jinja_comment_block_end
298
299
299
300
300 def _init_filters(self):
301 def _init_filters(self):
301 """
302 """
302 Register all of the filters required for the exporter.
303 Register all of the filters required for the exporter.
303 """
304 """
304
305
305 #Add default filters to the Jinja2 environment
306 #Add default filters to the Jinja2 environment
306 for key, value in default_filters.items():
307 for key, value in default_filters.items():
307 self.register_filter(key, value)
308 self.register_filter(key, value)
308
309
309 #Load user filters. Overwrite existing filters if need be.
310 #Load user filters. Overwrite existing filters if need be.
310 if self.filters:
311 if self.filters:
311 for key, user_filter in self.filters.items():
312 for key, user_filter in self.filters.items():
312 self.register_filter(key, user_filter)
313 self.register_filter(key, user_filter)
@@ -1,1450 +1,1451 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 """
2 """
3 Facilities for launching IPython processes asynchronously.
3 Facilities for launching IPython processes asynchronously.
4
4
5 Authors:
5 Authors:
6
6
7 * Brian Granger
7 * Brian Granger
8 * MinRK
8 * MinRK
9 """
9 """
10
10
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12 # Copyright (C) 2008-2011 The IPython Development Team
12 # Copyright (C) 2008-2011 The IPython Development Team
13 #
13 #
14 # Distributed under the terms of the BSD License. The full license is in
14 # Distributed under the terms of the BSD License. The full license is in
15 # the file COPYING, distributed as part of this software.
15 # the file COPYING, distributed as part of this software.
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17
17
18 #-----------------------------------------------------------------------------
18 #-----------------------------------------------------------------------------
19 # Imports
19 # Imports
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
21
21
22 import copy
22 import copy
23 import logging
23 import logging
24 import os
24 import os
25 import pipes
25 import pipes
26 import stat
26 import stat
27 import sys
27 import sys
28 import time
28 import time
29
29
30 # signal imports, handling various platforms, versions
30 # signal imports, handling various platforms, versions
31
31
32 from signal import SIGINT, SIGTERM
32 from signal import SIGINT, SIGTERM
33 try:
33 try:
34 from signal import SIGKILL
34 from signal import SIGKILL
35 except ImportError:
35 except ImportError:
36 # Windows
36 # Windows
37 SIGKILL=SIGTERM
37 SIGKILL=SIGTERM
38
38
39 try:
39 try:
40 # Windows >= 2.7, 3.2
40 # Windows >= 2.7, 3.2
41 from signal import CTRL_C_EVENT as SIGINT
41 from signal import CTRL_C_EVENT as SIGINT
42 except ImportError:
42 except ImportError:
43 pass
43 pass
44
44
45 from subprocess import Popen, PIPE, STDOUT
45 from subprocess import Popen, PIPE, STDOUT
46 try:
46 try:
47 from subprocess import check_output
47 from subprocess import check_output
48 except ImportError:
48 except ImportError:
49 # pre-2.7, define check_output with Popen
49 # pre-2.7, define check_output with Popen
50 def check_output(*args, **kwargs):
50 def check_output(*args, **kwargs):
51 kwargs.update(dict(stdout=PIPE))
51 kwargs.update(dict(stdout=PIPE))
52 p = Popen(*args, **kwargs)
52 p = Popen(*args, **kwargs)
53 out,err = p.communicate()
53 out,err = p.communicate()
54 return out
54 return out
55
55
56 from zmq.eventloop import ioloop
56 from zmq.eventloop import ioloop
57
57
58 from IPython.config.application import Application
58 from IPython.config.application import Application
59 from IPython.config.configurable import LoggingConfigurable
59 from IPython.config.configurable import LoggingConfigurable
60 from IPython.utils.text import EvalFormatter
60 from IPython.utils.text import EvalFormatter
61 from IPython.utils.traitlets import (
61 from IPython.utils.traitlets import (
62 Any, Integer, CFloat, List, Unicode, Dict, Instance, HasTraits, CRegExp
62 Any, Integer, CFloat, List, Unicode, Dict, Instance, HasTraits, CRegExp
63 )
63 )
64 from IPython.utils.encoding import DEFAULT_ENCODING
64 from IPython.utils.encoding import DEFAULT_ENCODING
65 from IPython.utils.path import get_home_dir
65 from IPython.utils.path import get_home_dir
66 from IPython.utils.process import find_cmd, FindCmdError
66 from IPython.utils.process import find_cmd, FindCmdError
67 from IPython.utils.py3compat import iteritems, itervalues
67 from IPython.utils.py3compat import iteritems, itervalues
68
68
69 from .win32support import forward_read_events
69 from .win32support import forward_read_events
70
70
71 from .winhpcjob import IPControllerTask, IPEngineTask, IPControllerJob, IPEngineSetJob
71 from .winhpcjob import IPControllerTask, IPEngineTask, IPControllerJob, IPEngineSetJob
72
72
73 WINDOWS = os.name == 'nt'
73 WINDOWS = os.name == 'nt'
74
74
75 #-----------------------------------------------------------------------------
75 #-----------------------------------------------------------------------------
76 # Paths to the kernel apps
76 # Paths to the kernel apps
77 #-----------------------------------------------------------------------------
77 #-----------------------------------------------------------------------------
78
78
79 cmd = "from IPython.parallel.apps.%s import launch_new_instance; launch_new_instance()"
79 cmd = "from IPython.parallel.apps.%s import launch_new_instance; launch_new_instance()"
80
80
81 ipcluster_cmd_argv = [sys.executable, "-c", cmd % "ipclusterapp"]
81 ipcluster_cmd_argv = [sys.executable, "-c", cmd % "ipclusterapp"]
82
82
83 ipengine_cmd_argv = [sys.executable, "-c", cmd % "ipengineapp"]
83 ipengine_cmd_argv = [sys.executable, "-c", cmd % "ipengineapp"]
84
84
85 ipcontroller_cmd_argv = [sys.executable, "-c", cmd % "ipcontrollerapp"]
85 ipcontroller_cmd_argv = [sys.executable, "-c", cmd % "ipcontrollerapp"]
86
86
87 #-----------------------------------------------------------------------------
87 #-----------------------------------------------------------------------------
88 # Base launchers and errors
88 # Base launchers and errors
89 #-----------------------------------------------------------------------------
89 #-----------------------------------------------------------------------------
90
90
91 class LauncherError(Exception):
91 class LauncherError(Exception):
92 pass
92 pass
93
93
94
94
95 class ProcessStateError(LauncherError):
95 class ProcessStateError(LauncherError):
96 pass
96 pass
97
97
98
98
99 class UnknownStatus(LauncherError):
99 class UnknownStatus(LauncherError):
100 pass
100 pass
101
101
102
102
103 class BaseLauncher(LoggingConfigurable):
103 class BaseLauncher(LoggingConfigurable):
104 """An asbtraction for starting, stopping and signaling a process."""
104 """An asbtraction for starting, stopping and signaling a process."""
105
105
106 # In all of the launchers, the work_dir is where child processes will be
106 # In all of the launchers, the work_dir is where child processes will be
107 # run. This will usually be the profile_dir, but may not be. any work_dir
107 # run. This will usually be the profile_dir, but may not be. any work_dir
108 # passed into the __init__ method will override the config value.
108 # passed into the __init__ method will override the config value.
109 # This should not be used to set the work_dir for the actual engine
109 # This should not be used to set the work_dir for the actual engine
110 # and controller. Instead, use their own config files or the
110 # and controller. Instead, use their own config files or the
111 # controller_args, engine_args attributes of the launchers to add
111 # controller_args, engine_args attributes of the launchers to add
112 # the work_dir option.
112 # the work_dir option.
113 work_dir = Unicode(u'.')
113 work_dir = Unicode(u'.')
114 loop = Instance('zmq.eventloop.ioloop.IOLoop')
114 loop = Instance('zmq.eventloop.ioloop.IOLoop')
115
115
116 start_data = Any()
116 start_data = Any()
117 stop_data = Any()
117 stop_data = Any()
118
118
119 def _loop_default(self):
119 def _loop_default(self):
120 return ioloop.IOLoop.instance()
120 return ioloop.IOLoop.instance()
121
121
122 def __init__(self, work_dir=u'.', config=None, **kwargs):
122 def __init__(self, work_dir=u'.', config=None, **kwargs):
123 super(BaseLauncher, self).__init__(work_dir=work_dir, config=config, **kwargs)
123 super(BaseLauncher, self).__init__(work_dir=work_dir, config=config, **kwargs)
124 self.state = 'before' # can be before, running, after
124 self.state = 'before' # can be before, running, after
125 self.stop_callbacks = []
125 self.stop_callbacks = []
126 self.start_data = None
126 self.start_data = None
127 self.stop_data = None
127 self.stop_data = None
128
128
129 @property
129 @property
130 def args(self):
130 def args(self):
131 """A list of cmd and args that will be used to start the process.
131 """A list of cmd and args that will be used to start the process.
132
132
133 This is what is passed to :func:`spawnProcess` and the first element
133 This is what is passed to :func:`spawnProcess` and the first element
134 will be the process name.
134 will be the process name.
135 """
135 """
136 return self.find_args()
136 return self.find_args()
137
137
138 def find_args(self):
138 def find_args(self):
139 """The ``.args`` property calls this to find the args list.
139 """The ``.args`` property calls this to find the args list.
140
140
141 Subcommand should implement this to construct the cmd and args.
141 Subcommand should implement this to construct the cmd and args.
142 """
142 """
143 raise NotImplementedError('find_args must be implemented in a subclass')
143 raise NotImplementedError('find_args must be implemented in a subclass')
144
144
145 @property
145 @property
146 def arg_str(self):
146 def arg_str(self):
147 """The string form of the program arguments."""
147 """The string form of the program arguments."""
148 return ' '.join(self.args)
148 return ' '.join(self.args)
149
149
150 @property
150 @property
151 def running(self):
151 def running(self):
152 """Am I running."""
152 """Am I running."""
153 if self.state == 'running':
153 if self.state == 'running':
154 return True
154 return True
155 else:
155 else:
156 return False
156 return False
157
157
158 def start(self):
158 def start(self):
159 """Start the process."""
159 """Start the process."""
160 raise NotImplementedError('start must be implemented in a subclass')
160 raise NotImplementedError('start must be implemented in a subclass')
161
161
162 def stop(self):
162 def stop(self):
163 """Stop the process and notify observers of stopping.
163 """Stop the process and notify observers of stopping.
164
164
165 This method will return None immediately.
165 This method will return None immediately.
166 To observe the actual process stopping, see :meth:`on_stop`.
166 To observe the actual process stopping, see :meth:`on_stop`.
167 """
167 """
168 raise NotImplementedError('stop must be implemented in a subclass')
168 raise NotImplementedError('stop must be implemented in a subclass')
169
169
170 def on_stop(self, f):
170 def on_stop(self, f):
171 """Register a callback to be called with this Launcher's stop_data
171 """Register a callback to be called with this Launcher's stop_data
172 when the process actually finishes.
172 when the process actually finishes.
173 """
173 """
174 if self.state=='after':
174 if self.state=='after':
175 return f(self.stop_data)
175 return f(self.stop_data)
176 else:
176 else:
177 self.stop_callbacks.append(f)
177 self.stop_callbacks.append(f)
178
178
179 def notify_start(self, data):
179 def notify_start(self, data):
180 """Call this to trigger startup actions.
180 """Call this to trigger startup actions.
181
181
182 This logs the process startup and sets the state to 'running'. It is
182 This logs the process startup and sets the state to 'running'. It is
183 a pass-through so it can be used as a callback.
183 a pass-through so it can be used as a callback.
184 """
184 """
185
185
186 self.log.debug('Process %r started: %r', self.args[0], data)
186 self.log.debug('Process %r started: %r', self.args[0], data)
187 self.start_data = data
187 self.start_data = data
188 self.state = 'running'
188 self.state = 'running'
189 return data
189 return data
190
190
191 def notify_stop(self, data):
191 def notify_stop(self, data):
192 """Call this to trigger process stop actions.
192 """Call this to trigger process stop actions.
193
193
194 This logs the process stopping and sets the state to 'after'. Call
194 This logs the process stopping and sets the state to 'after'. Call
195 this to trigger callbacks registered via :meth:`on_stop`."""
195 this to trigger callbacks registered via :meth:`on_stop`."""
196
196
197 self.log.debug('Process %r stopped: %r', self.args[0], data)
197 self.log.debug('Process %r stopped: %r', self.args[0], data)
198 self.stop_data = data
198 self.stop_data = data
199 self.state = 'after'
199 self.state = 'after'
200 for i in range(len(self.stop_callbacks)):
200 for i in range(len(self.stop_callbacks)):
201 d = self.stop_callbacks.pop()
201 d = self.stop_callbacks.pop()
202 d(data)
202 d(data)
203 return data
203 return data
204
204
205 def signal(self, sig):
205 def signal(self, sig):
206 """Signal the process.
206 """Signal the process.
207
207
208 Parameters
208 Parameters
209 ----------
209 ----------
210 sig : str or int
210 sig : str or int
211 'KILL', 'INT', etc., or any signal number
211 'KILL', 'INT', etc., or any signal number
212 """
212 """
213 raise NotImplementedError('signal must be implemented in a subclass')
213 raise NotImplementedError('signal must be implemented in a subclass')
214
214
215 class ClusterAppMixin(HasTraits):
215 class ClusterAppMixin(HasTraits):
216 """MixIn for cluster args as traits"""
216 """MixIn for cluster args as traits"""
217 profile_dir=Unicode('')
217 profile_dir=Unicode('')
218 cluster_id=Unicode('')
218 cluster_id=Unicode('')
219
219
220 @property
220 @property
221 def cluster_args(self):
221 def cluster_args(self):
222 return ['--profile-dir', self.profile_dir, '--cluster-id', self.cluster_id]
222 return ['--profile-dir', self.profile_dir, '--cluster-id', self.cluster_id]
223
223
224 class ControllerMixin(ClusterAppMixin):
224 class ControllerMixin(ClusterAppMixin):
225 controller_cmd = List(ipcontroller_cmd_argv, config=True,
225 controller_cmd = List(ipcontroller_cmd_argv, config=True,
226 help="""Popen command to launch ipcontroller.""")
226 help="""Popen command to launch ipcontroller.""")
227 # Command line arguments to ipcontroller.
227 # Command line arguments to ipcontroller.
228 controller_args = List(['--log-to-file','--log-level=%i' % logging.INFO], config=True,
228 controller_args = List(['--log-to-file','--log-level=%i' % logging.INFO], config=True,
229 help="""command-line args to pass to ipcontroller""")
229 help="""command-line args to pass to ipcontroller""")
230
230
231 class EngineMixin(ClusterAppMixin):
231 class EngineMixin(ClusterAppMixin):
232 engine_cmd = List(ipengine_cmd_argv, config=True,
232 engine_cmd = List(ipengine_cmd_argv, config=True,
233 help="""command to launch the Engine.""")
233 help="""command to launch the Engine.""")
234 # Command line arguments for ipengine.
234 # Command line arguments for ipengine.
235 engine_args = List(['--log-to-file','--log-level=%i' % logging.INFO], config=True,
235 engine_args = List(['--log-to-file','--log-level=%i' % logging.INFO], config=True,
236 help="command-line arguments to pass to ipengine"
236 help="command-line arguments to pass to ipengine"
237 )
237 )
238
238
239
239
240 #-----------------------------------------------------------------------------
240 #-----------------------------------------------------------------------------
241 # Local process launchers
241 # Local process launchers
242 #-----------------------------------------------------------------------------
242 #-----------------------------------------------------------------------------
243
243
244
244
245 class LocalProcessLauncher(BaseLauncher):
245 class LocalProcessLauncher(BaseLauncher):
246 """Start and stop an external process in an asynchronous manner.
246 """Start and stop an external process in an asynchronous manner.
247
247
248 This will launch the external process with a working directory of
248 This will launch the external process with a working directory of
249 ``self.work_dir``.
249 ``self.work_dir``.
250 """
250 """
251
251
252 # This is used to to construct self.args, which is passed to
252 # This is used to to construct self.args, which is passed to
253 # spawnProcess.
253 # spawnProcess.
254 cmd_and_args = List([])
254 cmd_and_args = List([])
255 poll_frequency = Integer(100) # in ms
255 poll_frequency = Integer(100) # in ms
256
256
257 def __init__(self, work_dir=u'.', config=None, **kwargs):
257 def __init__(self, work_dir=u'.', config=None, **kwargs):
258 super(LocalProcessLauncher, self).__init__(
258 super(LocalProcessLauncher, self).__init__(
259 work_dir=work_dir, config=config, **kwargs
259 work_dir=work_dir, config=config, **kwargs
260 )
260 )
261 self.process = None
261 self.process = None
262 self.poller = None
262 self.poller = None
263
263
264 def find_args(self):
264 def find_args(self):
265 return self.cmd_and_args
265 return self.cmd_and_args
266
266
267 def start(self):
267 def start(self):
268 self.log.debug("Starting %s: %r", self.__class__.__name__, self.args)
268 self.log.debug("Starting %s: %r", self.__class__.__name__, self.args)
269 if self.state == 'before':
269 if self.state == 'before':
270 self.process = Popen(self.args,
270 self.process = Popen(self.args,
271 stdout=PIPE,stderr=PIPE,stdin=PIPE,
271 stdout=PIPE,stderr=PIPE,stdin=PIPE,
272 env=os.environ,
272 env=os.environ,
273 cwd=self.work_dir
273 cwd=self.work_dir
274 )
274 )
275 if WINDOWS:
275 if WINDOWS:
276 self.stdout = forward_read_events(self.process.stdout)
276 self.stdout = forward_read_events(self.process.stdout)
277 self.stderr = forward_read_events(self.process.stderr)
277 self.stderr = forward_read_events(self.process.stderr)
278 else:
278 else:
279 self.stdout = self.process.stdout.fileno()
279 self.stdout = self.process.stdout.fileno()
280 self.stderr = self.process.stderr.fileno()
280 self.stderr = self.process.stderr.fileno()
281 self.loop.add_handler(self.stdout, self.handle_stdout, self.loop.READ)
281 self.loop.add_handler(self.stdout, self.handle_stdout, self.loop.READ)
282 self.loop.add_handler(self.stderr, self.handle_stderr, self.loop.READ)
282 self.loop.add_handler(self.stderr, self.handle_stderr, self.loop.READ)
283 self.poller = ioloop.PeriodicCallback(self.poll, self.poll_frequency, self.loop)
283 self.poller = ioloop.PeriodicCallback(self.poll, self.poll_frequency, self.loop)
284 self.poller.start()
284 self.poller.start()
285 self.notify_start(self.process.pid)
285 self.notify_start(self.process.pid)
286 else:
286 else:
287 s = 'The process was already started and has state: %r' % self.state
287 s = 'The process was already started and has state: %r' % self.state
288 raise ProcessStateError(s)
288 raise ProcessStateError(s)
289
289
290 def stop(self):
290 def stop(self):
291 return self.interrupt_then_kill()
291 return self.interrupt_then_kill()
292
292
293 def signal(self, sig):
293 def signal(self, sig):
294 if self.state == 'running':
294 if self.state == 'running':
295 if WINDOWS and sig != SIGINT:
295 if WINDOWS and sig != SIGINT:
296 # use Windows tree-kill for better child cleanup
296 # use Windows tree-kill for better child cleanup
297 check_output(['taskkill', '-pid', str(self.process.pid), '-t', '-f'])
297 check_output(['taskkill', '-pid', str(self.process.pid), '-t', '-f'])
298 else:
298 else:
299 self.process.send_signal(sig)
299 self.process.send_signal(sig)
300
300
301 def interrupt_then_kill(self, delay=2.0):
301 def interrupt_then_kill(self, delay=2.0):
302 """Send INT, wait a delay and then send KILL."""
302 """Send INT, wait a delay and then send KILL."""
303 try:
303 try:
304 self.signal(SIGINT)
304 self.signal(SIGINT)
305 except Exception:
305 except Exception:
306 self.log.debug("interrupt failed")
306 self.log.debug("interrupt failed")
307 pass
307 pass
308 self.killer = ioloop.DelayedCallback(lambda : self.signal(SIGKILL), delay*1000, self.loop)
308 self.killer = ioloop.DelayedCallback(lambda : self.signal(SIGKILL), delay*1000, self.loop)
309 self.killer.start()
309 self.killer.start()
310
310
311 # callbacks, etc:
311 # callbacks, etc:
312
312
313 def handle_stdout(self, fd, events):
313 def handle_stdout(self, fd, events):
314 if WINDOWS:
314 if WINDOWS:
315 line = self.stdout.recv()
315 line = self.stdout.recv()
316 else:
316 else:
317 line = self.process.stdout.readline()
317 line = self.process.stdout.readline()
318 # a stopped process will be readable but return empty strings
318 # a stopped process will be readable but return empty strings
319 if line:
319 if line:
320 self.log.debug(line[:-1])
320 self.log.debug(line[:-1])
321 else:
321 else:
322 self.poll()
322 self.poll()
323
323
324 def handle_stderr(self, fd, events):
324 def handle_stderr(self, fd, events):
325 if WINDOWS:
325 if WINDOWS:
326 line = self.stderr.recv()
326 line = self.stderr.recv()
327 else:
327 else:
328 line = self.process.stderr.readline()
328 line = self.process.stderr.readline()
329 # a stopped process will be readable but return empty strings
329 # a stopped process will be readable but return empty strings
330 if line:
330 if line:
331 self.log.debug(line[:-1])
331 self.log.debug(line[:-1])
332 else:
332 else:
333 self.poll()
333 self.poll()
334
334
335 def poll(self):
335 def poll(self):
336 status = self.process.poll()
336 status = self.process.poll()
337 if status is not None:
337 if status is not None:
338 self.poller.stop()
338 self.poller.stop()
339 self.loop.remove_handler(self.stdout)
339 self.loop.remove_handler(self.stdout)
340 self.loop.remove_handler(self.stderr)
340 self.loop.remove_handler(self.stderr)
341 self.notify_stop(dict(exit_code=status, pid=self.process.pid))
341 self.notify_stop(dict(exit_code=status, pid=self.process.pid))
342 return status
342 return status
343
343
344 class LocalControllerLauncher(LocalProcessLauncher, ControllerMixin):
344 class LocalControllerLauncher(LocalProcessLauncher, ControllerMixin):
345 """Launch a controller as a regular external process."""
345 """Launch a controller as a regular external process."""
346
346
347 def find_args(self):
347 def find_args(self):
348 return self.controller_cmd + self.cluster_args + self.controller_args
348 return self.controller_cmd + self.cluster_args + self.controller_args
349
349
350 def start(self):
350 def start(self):
351 """Start the controller by profile_dir."""
351 """Start the controller by profile_dir."""
352 return super(LocalControllerLauncher, self).start()
352 return super(LocalControllerLauncher, self).start()
353
353
354
354
355 class LocalEngineLauncher(LocalProcessLauncher, EngineMixin):
355 class LocalEngineLauncher(LocalProcessLauncher, EngineMixin):
356 """Launch a single engine as a regular externall process."""
356 """Launch a single engine as a regular externall process."""
357
357
358 def find_args(self):
358 def find_args(self):
359 return self.engine_cmd + self.cluster_args + self.engine_args
359 return self.engine_cmd + self.cluster_args + self.engine_args
360
360
361
361
362 class LocalEngineSetLauncher(LocalEngineLauncher):
362 class LocalEngineSetLauncher(LocalEngineLauncher):
363 """Launch a set of engines as regular external processes."""
363 """Launch a set of engines as regular external processes."""
364
364
365 delay = CFloat(0.1, config=True,
365 delay = CFloat(0.1, config=True,
366 help="""delay (in seconds) between starting each engine after the first.
366 help="""delay (in seconds) between starting each engine after the first.
367 This can help force the engines to get their ids in order, or limit
367 This can help force the engines to get their ids in order, or limit
368 process flood when starting many engines."""
368 process flood when starting many engines."""
369 )
369 )
370
370
371 # launcher class
371 # launcher class
372 launcher_class = LocalEngineLauncher
372 launcher_class = LocalEngineLauncher
373
373
374 launchers = Dict()
374 launchers = Dict()
375 stop_data = Dict()
375 stop_data = Dict()
376
376
377 def __init__(self, work_dir=u'.', config=None, **kwargs):
377 def __init__(self, work_dir=u'.', config=None, **kwargs):
378 super(LocalEngineSetLauncher, self).__init__(
378 super(LocalEngineSetLauncher, self).__init__(
379 work_dir=work_dir, config=config, **kwargs
379 work_dir=work_dir, config=config, **kwargs
380 )
380 )
381 self.stop_data = {}
381 self.stop_data = {}
382
382
383 def start(self, n):
383 def start(self, n):
384 """Start n engines by profile or profile_dir."""
384 """Start n engines by profile or profile_dir."""
385 dlist = []
385 dlist = []
386 for i in range(n):
386 for i in range(n):
387 if i > 0:
387 if i > 0:
388 time.sleep(self.delay)
388 time.sleep(self.delay)
389 el = self.launcher_class(work_dir=self.work_dir, parent=self, log=self.log,
389 el = self.launcher_class(work_dir=self.work_dir, parent=self, log=self.log,
390 profile_dir=self.profile_dir, cluster_id=self.cluster_id,
390 profile_dir=self.profile_dir, cluster_id=self.cluster_id,
391 )
391 )
392
392
393 # Copy the engine args over to each engine launcher.
393 # Copy the engine args over to each engine launcher.
394 el.engine_cmd = copy.deepcopy(self.engine_cmd)
394 el.engine_cmd = copy.deepcopy(self.engine_cmd)
395 el.engine_args = copy.deepcopy(self.engine_args)
395 el.engine_args = copy.deepcopy(self.engine_args)
396 el.on_stop(self._notice_engine_stopped)
396 el.on_stop(self._notice_engine_stopped)
397 d = el.start()
397 d = el.start()
398 self.launchers[i] = el
398 self.launchers[i] = el
399 dlist.append(d)
399 dlist.append(d)
400 self.notify_start(dlist)
400 self.notify_start(dlist)
401 return dlist
401 return dlist
402
402
403 def find_args(self):
403 def find_args(self):
404 return ['engine set']
404 return ['engine set']
405
405
406 def signal(self, sig):
406 def signal(self, sig):
407 dlist = []
407 dlist = []
408 for el in itervalues(self.launchers):
408 for el in itervalues(self.launchers):
409 d = el.signal(sig)
409 d = el.signal(sig)
410 dlist.append(d)
410 dlist.append(d)
411 return dlist
411 return dlist
412
412
413 def interrupt_then_kill(self, delay=1.0):
413 def interrupt_then_kill(self, delay=1.0):
414 dlist = []
414 dlist = []
415 for el in itervalues(self.launchers):
415 for el in itervalues(self.launchers):
416 d = el.interrupt_then_kill(delay)
416 d = el.interrupt_then_kill(delay)
417 dlist.append(d)
417 dlist.append(d)
418 return dlist
418 return dlist
419
419
420 def stop(self):
420 def stop(self):
421 return self.interrupt_then_kill()
421 return self.interrupt_then_kill()
422
422
423 def _notice_engine_stopped(self, data):
423 def _notice_engine_stopped(self, data):
424 pid = data['pid']
424 pid = data['pid']
425 for idx,el in iteritems(self.launchers):
425 for idx,el in iteritems(self.launchers):
426 if el.process.pid == pid:
426 if el.process.pid == pid:
427 break
427 break
428 self.launchers.pop(idx)
428 self.launchers.pop(idx)
429 self.stop_data[idx] = data
429 self.stop_data[idx] = data
430 if not self.launchers:
430 if not self.launchers:
431 self.notify_stop(self.stop_data)
431 self.notify_stop(self.stop_data)
432
432
433
433
434 #-----------------------------------------------------------------------------
434 #-----------------------------------------------------------------------------
435 # MPI launchers
435 # MPI launchers
436 #-----------------------------------------------------------------------------
436 #-----------------------------------------------------------------------------
437
437
438
438
439 class MPILauncher(LocalProcessLauncher):
439 class MPILauncher(LocalProcessLauncher):
440 """Launch an external process using mpiexec."""
440 """Launch an external process using mpiexec."""
441
441
442 mpi_cmd = List(['mpiexec'], config=True,
442 mpi_cmd = List(['mpiexec'], config=True,
443 help="The mpiexec command to use in starting the process."
443 help="The mpiexec command to use in starting the process."
444 )
444 )
445 mpi_args = List([], config=True,
445 mpi_args = List([], config=True,
446 help="The command line arguments to pass to mpiexec."
446 help="The command line arguments to pass to mpiexec."
447 )
447 )
448 program = List(['date'],
448 program = List(['date'],
449 help="The program to start via mpiexec.")
449 help="The program to start via mpiexec.")
450 program_args = List([],
450 program_args = List([],
451 help="The command line argument to the program."
451 help="The command line argument to the program."
452 )
452 )
453 n = Integer(1)
453 n = Integer(1)
454
454
455 def __init__(self, *args, **kwargs):
455 def __init__(self, *args, **kwargs):
456 # deprecation for old MPIExec names:
456 # deprecation for old MPIExec names:
457 config = kwargs.get('config', {})
457 config = kwargs.get('config', {})
458 for oldname in ('MPIExecLauncher', 'MPIExecControllerLauncher', 'MPIExecEngineSetLauncher'):
458 for oldname in ('MPIExecLauncher', 'MPIExecControllerLauncher', 'MPIExecEngineSetLauncher'):
459 deprecated = config.get(oldname)
459 deprecated = config.get(oldname)
460 if deprecated:
460 if deprecated:
461 newname = oldname.replace('MPIExec', 'MPI')
461 newname = oldname.replace('MPIExec', 'MPI')
462 config[newname].update(deprecated)
462 config[newname].update(deprecated)
463 self.log.warn("WARNING: %s name has been deprecated, use %s", oldname, newname)
463 self.log.warn("WARNING: %s name has been deprecated, use %s", oldname, newname)
464
464
465 super(MPILauncher, self).__init__(*args, **kwargs)
465 super(MPILauncher, self).__init__(*args, **kwargs)
466
466
467 def find_args(self):
467 def find_args(self):
468 """Build self.args using all the fields."""
468 """Build self.args using all the fields."""
469 return self.mpi_cmd + ['-n', str(self.n)] + self.mpi_args + \
469 return self.mpi_cmd + ['-n', str(self.n)] + self.mpi_args + \
470 self.program + self.program_args
470 self.program + self.program_args
471
471
472 def start(self, n):
472 def start(self, n):
473 """Start n instances of the program using mpiexec."""
473 """Start n instances of the program using mpiexec."""
474 self.n = n
474 self.n = n
475 return super(MPILauncher, self).start()
475 return super(MPILauncher, self).start()
476
476
477
477
478 class MPIControllerLauncher(MPILauncher, ControllerMixin):
478 class MPIControllerLauncher(MPILauncher, ControllerMixin):
479 """Launch a controller using mpiexec."""
479 """Launch a controller using mpiexec."""
480
480
481 # alias back to *non-configurable* program[_args] for use in find_args()
481 # alias back to *non-configurable* program[_args] for use in find_args()
482 # this way all Controller/EngineSetLaunchers have the same form, rather
482 # this way all Controller/EngineSetLaunchers have the same form, rather
483 # than *some* having `program_args` and others `controller_args`
483 # than *some* having `program_args` and others `controller_args`
484 @property
484 @property
485 def program(self):
485 def program(self):
486 return self.controller_cmd
486 return self.controller_cmd
487
487
488 @property
488 @property
489 def program_args(self):
489 def program_args(self):
490 return self.cluster_args + self.controller_args
490 return self.cluster_args + self.controller_args
491
491
492 def start(self):
492 def start(self):
493 """Start the controller by profile_dir."""
493 """Start the controller by profile_dir."""
494 return super(MPIControllerLauncher, self).start(1)
494 return super(MPIControllerLauncher, self).start(1)
495
495
496
496
497 class MPIEngineSetLauncher(MPILauncher, EngineMixin):
497 class MPIEngineSetLauncher(MPILauncher, EngineMixin):
498 """Launch engines using mpiexec"""
498 """Launch engines using mpiexec"""
499
499
500 # alias back to *non-configurable* program[_args] for use in find_args()
500 # alias back to *non-configurable* program[_args] for use in find_args()
501 # this way all Controller/EngineSetLaunchers have the same form, rather
501 # this way all Controller/EngineSetLaunchers have the same form, rather
502 # than *some* having `program_args` and others `controller_args`
502 # than *some* having `program_args` and others `controller_args`
503 @property
503 @property
504 def program(self):
504 def program(self):
505 return self.engine_cmd
505 return self.engine_cmd
506
506
507 @property
507 @property
508 def program_args(self):
508 def program_args(self):
509 return self.cluster_args + self.engine_args
509 return self.cluster_args + self.engine_args
510
510
511 def start(self, n):
511 def start(self, n):
512 """Start n engines by profile or profile_dir."""
512 """Start n engines by profile or profile_dir."""
513 self.n = n
513 self.n = n
514 return super(MPIEngineSetLauncher, self).start(n)
514 return super(MPIEngineSetLauncher, self).start(n)
515
515
516 # deprecated MPIExec names
516 # deprecated MPIExec names
517 class DeprecatedMPILauncher(object):
517 class DeprecatedMPILauncher(object):
518 def warn(self):
518 def warn(self):
519 oldname = self.__class__.__name__
519 oldname = self.__class__.__name__
520 newname = oldname.replace('MPIExec', 'MPI')
520 newname = oldname.replace('MPIExec', 'MPI')
521 self.log.warn("WARNING: %s name is deprecated, use %s", oldname, newname)
521 self.log.warn("WARNING: %s name is deprecated, use %s", oldname, newname)
522
522
523 class MPIExecLauncher(MPILauncher, DeprecatedMPILauncher):
523 class MPIExecLauncher(MPILauncher, DeprecatedMPILauncher):
524 """Deprecated, use MPILauncher"""
524 """Deprecated, use MPILauncher"""
525 def __init__(self, *args, **kwargs):
525 def __init__(self, *args, **kwargs):
526 super(MPIExecLauncher, self).__init__(*args, **kwargs)
526 super(MPIExecLauncher, self).__init__(*args, **kwargs)
527 self.warn()
527 self.warn()
528
528
529 class MPIExecControllerLauncher(MPIControllerLauncher, DeprecatedMPILauncher):
529 class MPIExecControllerLauncher(MPIControllerLauncher, DeprecatedMPILauncher):
530 """Deprecated, use MPIControllerLauncher"""
530 """Deprecated, use MPIControllerLauncher"""
531 def __init__(self, *args, **kwargs):
531 def __init__(self, *args, **kwargs):
532 super(MPIExecControllerLauncher, self).__init__(*args, **kwargs)
532 super(MPIExecControllerLauncher, self).__init__(*args, **kwargs)
533 self.warn()
533 self.warn()
534
534
535 class MPIExecEngineSetLauncher(MPIEngineSetLauncher, DeprecatedMPILauncher):
535 class MPIExecEngineSetLauncher(MPIEngineSetLauncher, DeprecatedMPILauncher):
536 """Deprecated, use MPIEngineSetLauncher"""
536 """Deprecated, use MPIEngineSetLauncher"""
537 def __init__(self, *args, **kwargs):
537 def __init__(self, *args, **kwargs):
538 super(MPIExecEngineSetLauncher, self).__init__(*args, **kwargs)
538 super(MPIExecEngineSetLauncher, self).__init__(*args, **kwargs)
539 self.warn()
539 self.warn()
540
540
541
541
542 #-----------------------------------------------------------------------------
542 #-----------------------------------------------------------------------------
543 # SSH launchers
543 # SSH launchers
544 #-----------------------------------------------------------------------------
544 #-----------------------------------------------------------------------------
545
545
546 # TODO: Get SSH Launcher back to level of sshx in 0.10.2
546 # TODO: Get SSH Launcher back to level of sshx in 0.10.2
547
547
548 class SSHLauncher(LocalProcessLauncher):
548 class SSHLauncher(LocalProcessLauncher):
549 """A minimal launcher for ssh.
549 """A minimal launcher for ssh.
550
550
551 To be useful this will probably have to be extended to use the ``sshx``
551 To be useful this will probably have to be extended to use the ``sshx``
552 idea for environment variables. There could be other things this needs
552 idea for environment variables. There could be other things this needs
553 as well.
553 as well.
554 """
554 """
555
555
556 ssh_cmd = List(['ssh'], config=True,
556 ssh_cmd = List(['ssh'], config=True,
557 help="command for starting ssh")
557 help="command for starting ssh")
558 ssh_args = List(['-tt'], config=True,
558 ssh_args = List(['-tt'], config=True,
559 help="args to pass to ssh")
559 help="args to pass to ssh")
560 scp_cmd = List(['scp'], config=True,
560 scp_cmd = List(['scp'], config=True,
561 help="command for sending files")
561 help="command for sending files")
562 program = List(['date'],
562 program = List(['date'],
563 help="Program to launch via ssh")
563 help="Program to launch via ssh")
564 program_args = List([],
564 program_args = List([],
565 help="args to pass to remote program")
565 help="args to pass to remote program")
566 hostname = Unicode('', config=True,
566 hostname = Unicode('', config=True,
567 help="hostname on which to launch the program")
567 help="hostname on which to launch the program")
568 user = Unicode('', config=True,
568 user = Unicode('', config=True,
569 help="username for ssh")
569 help="username for ssh")
570 location = Unicode('', config=True,
570 location = Unicode('', config=True,
571 help="user@hostname location for ssh in one setting")
571 help="user@hostname location for ssh in one setting")
572 to_fetch = List([], config=True,
572 to_fetch = List([], config=True,
573 help="List of (remote, local) files to fetch after starting")
573 help="List of (remote, local) files to fetch after starting")
574 to_send = List([], config=True,
574 to_send = List([], config=True,
575 help="List of (local, remote) files to send before starting")
575 help="List of (local, remote) files to send before starting")
576
576
577 def _hostname_changed(self, name, old, new):
577 def _hostname_changed(self, name, old, new):
578 if self.user:
578 if self.user:
579 self.location = u'%s@%s' % (self.user, new)
579 self.location = u'%s@%s' % (self.user, new)
580 else:
580 else:
581 self.location = new
581 self.location = new
582
582
583 def _user_changed(self, name, old, new):
583 def _user_changed(self, name, old, new):
584 self.location = u'%s@%s' % (new, self.hostname)
584 self.location = u'%s@%s' % (new, self.hostname)
585
585
586 def find_args(self):
586 def find_args(self):
587 return self.ssh_cmd + self.ssh_args + [self.location] + \
587 return self.ssh_cmd + self.ssh_args + [self.location] + \
588 list(map(pipes.quote, self.program + self.program_args))
588 list(map(pipes.quote, self.program + self.program_args))
589
589
590 def _send_file(self, local, remote):
590 def _send_file(self, local, remote):
591 """send a single file"""
591 """send a single file"""
592 remote = "%s:%s" % (self.location, remote)
592 remote = "%s:%s" % (self.location, remote)
593 for i in range(10):
593 for i in range(10):
594 if not os.path.exists(local):
594 if not os.path.exists(local):
595 self.log.debug("waiting for %s" % local)
595 self.log.debug("waiting for %s" % local)
596 time.sleep(1)
596 time.sleep(1)
597 else:
597 else:
598 break
598 break
599 self.log.info("sending %s to %s", local, remote)
599 self.log.info("sending %s to %s", local, remote)
600 check_output(self.scp_cmd + [local, remote])
600 check_output(self.scp_cmd + [local, remote])
601
601
602 def send_files(self):
602 def send_files(self):
603 """send our files (called before start)"""
603 """send our files (called before start)"""
604 if not self.to_send:
604 if not self.to_send:
605 return
605 return
606 for local_file, remote_file in self.to_send:
606 for local_file, remote_file in self.to_send:
607 self._send_file(local_file, remote_file)
607 self._send_file(local_file, remote_file)
608
608
609 def _fetch_file(self, remote, local):
609 def _fetch_file(self, remote, local):
610 """fetch a single file"""
610 """fetch a single file"""
611 full_remote = "%s:%s" % (self.location, remote)
611 full_remote = "%s:%s" % (self.location, remote)
612 self.log.info("fetching %s from %s", local, full_remote)
612 self.log.info("fetching %s from %s", local, full_remote)
613 for i in range(10):
613 for i in range(10):
614 # wait up to 10s for remote file to exist
614 # wait up to 10s for remote file to exist
615 check = check_output(self.ssh_cmd + self.ssh_args + \
615 check = check_output(self.ssh_cmd + self.ssh_args + \
616 [self.location, 'test -e', remote, "&& echo 'yes' || echo 'no'"])
616 [self.location, 'test -e', remote, "&& echo 'yes' || echo 'no'"])
617 check = check.decode(DEFAULT_ENCODING, 'replace').strip()
617 check = check.decode(DEFAULT_ENCODING, 'replace').strip()
618 if check == u'no':
618 if check == u'no':
619 time.sleep(1)
619 time.sleep(1)
620 elif check == u'yes':
620 elif check == u'yes':
621 break
621 break
622 check_output(self.scp_cmd + [full_remote, local])
622 check_output(self.scp_cmd + [full_remote, local])
623
623
624 def fetch_files(self):
624 def fetch_files(self):
625 """fetch remote files (called after start)"""
625 """fetch remote files (called after start)"""
626 if not self.to_fetch:
626 if not self.to_fetch:
627 return
627 return
628 for remote_file, local_file in self.to_fetch:
628 for remote_file, local_file in self.to_fetch:
629 self._fetch_file(remote_file, local_file)
629 self._fetch_file(remote_file, local_file)
630
630
631 def start(self, hostname=None, user=None):
631 def start(self, hostname=None, user=None):
632 if hostname is not None:
632 if hostname is not None:
633 self.hostname = hostname
633 self.hostname = hostname
634 if user is not None:
634 if user is not None:
635 self.user = user
635 self.user = user
636
636
637 self.send_files()
637 self.send_files()
638 super(SSHLauncher, self).start()
638 super(SSHLauncher, self).start()
639 self.fetch_files()
639 self.fetch_files()
640
640
641 def signal(self, sig):
641 def signal(self, sig):
642 if self.state == 'running':
642 if self.state == 'running':
643 # send escaped ssh connection-closer
643 # send escaped ssh connection-closer
644 self.process.stdin.write('~.')
644 self.process.stdin.write('~.')
645 self.process.stdin.flush()
645 self.process.stdin.flush()
646
646
647 class SSHClusterLauncher(SSHLauncher, ClusterAppMixin):
647 class SSHClusterLauncher(SSHLauncher, ClusterAppMixin):
648
648
649 remote_profile_dir = Unicode('', config=True,
649 remote_profile_dir = Unicode('', config=True,
650 help="""The remote profile_dir to use.
650 help="""The remote profile_dir to use.
651
651
652 If not specified, use calling profile, stripping out possible leading homedir.
652 If not specified, use calling profile, stripping out possible leading homedir.
653 """)
653 """)
654
654
655 def _profile_dir_changed(self, name, old, new):
655 def _profile_dir_changed(self, name, old, new):
656 if not self.remote_profile_dir:
656 if not self.remote_profile_dir:
657 # trigger remote_profile_dir_default logic again,
657 # trigger remote_profile_dir_default logic again,
658 # in case it was already triggered before profile_dir was set
658 # in case it was already triggered before profile_dir was set
659 self.remote_profile_dir = self._strip_home(new)
659 self.remote_profile_dir = self._strip_home(new)
660
660
661 @staticmethod
661 @staticmethod
662 def _strip_home(path):
662 def _strip_home(path):
663 """turns /home/you/.ipython/profile_foo into .ipython/profile_foo"""
663 """turns /home/you/.ipython/profile_foo into .ipython/profile_foo"""
664 home = get_home_dir()
664 home = get_home_dir()
665 if not home.endswith('/'):
665 if not home.endswith('/'):
666 home = home+'/'
666 home = home+'/'
667
667
668 if path.startswith(home):
668 if path.startswith(home):
669 return path[len(home):]
669 return path[len(home):]
670 else:
670 else:
671 return path
671 return path
672
672
673 def _remote_profile_dir_default(self):
673 def _remote_profile_dir_default(self):
674 return self._strip_home(self.profile_dir)
674 return self._strip_home(self.profile_dir)
675
675
676 def _cluster_id_changed(self, name, old, new):
676 def _cluster_id_changed(self, name, old, new):
677 if new:
677 if new:
678 raise ValueError("cluster id not supported by SSH launchers")
678 raise ValueError("cluster id not supported by SSH launchers")
679
679
680 @property
680 @property
681 def cluster_args(self):
681 def cluster_args(self):
682 return ['--profile-dir', self.remote_profile_dir]
682 return ['--profile-dir', self.remote_profile_dir]
683
683
684 class SSHControllerLauncher(SSHClusterLauncher, ControllerMixin):
684 class SSHControllerLauncher(SSHClusterLauncher, ControllerMixin):
685
685
686 # alias back to *non-configurable* program[_args] for use in find_args()
686 # alias back to *non-configurable* program[_args] for use in find_args()
687 # this way all Controller/EngineSetLaunchers have the same form, rather
687 # this way all Controller/EngineSetLaunchers have the same form, rather
688 # than *some* having `program_args` and others `controller_args`
688 # than *some* having `program_args` and others `controller_args`
689
689
690 def _controller_cmd_default(self):
690 def _controller_cmd_default(self):
691 return ['ipcontroller']
691 return ['ipcontroller']
692
692
693 @property
693 @property
694 def program(self):
694 def program(self):
695 return self.controller_cmd
695 return self.controller_cmd
696
696
697 @property
697 @property
698 def program_args(self):
698 def program_args(self):
699 return self.cluster_args + self.controller_args
699 return self.cluster_args + self.controller_args
700
700
701 def _to_fetch_default(self):
701 def _to_fetch_default(self):
702 return [
702 return [
703 (os.path.join(self.remote_profile_dir, 'security', cf),
703 (os.path.join(self.remote_profile_dir, 'security', cf),
704 os.path.join(self.profile_dir, 'security', cf),)
704 os.path.join(self.profile_dir, 'security', cf),)
705 for cf in ('ipcontroller-client.json', 'ipcontroller-engine.json')
705 for cf in ('ipcontroller-client.json', 'ipcontroller-engine.json')
706 ]
706 ]
707
707
708 class SSHEngineLauncher(SSHClusterLauncher, EngineMixin):
708 class SSHEngineLauncher(SSHClusterLauncher, EngineMixin):
709
709
710 # alias back to *non-configurable* program[_args] for use in find_args()
710 # alias back to *non-configurable* program[_args] for use in find_args()
711 # this way all Controller/EngineSetLaunchers have the same form, rather
711 # this way all Controller/EngineSetLaunchers have the same form, rather
712 # than *some* having `program_args` and others `controller_args`
712 # than *some* having `program_args` and others `controller_args`
713
713
714 def _engine_cmd_default(self):
714 def _engine_cmd_default(self):
715 return ['ipengine']
715 return ['ipengine']
716
716
717 @property
717 @property
718 def program(self):
718 def program(self):
719 return self.engine_cmd
719 return self.engine_cmd
720
720
721 @property
721 @property
722 def program_args(self):
722 def program_args(self):
723 return self.cluster_args + self.engine_args
723 return self.cluster_args + self.engine_args
724
724
725 def _to_send_default(self):
725 def _to_send_default(self):
726 return [
726 return [
727 (os.path.join(self.profile_dir, 'security', cf),
727 (os.path.join(self.profile_dir, 'security', cf),
728 os.path.join(self.remote_profile_dir, 'security', cf))
728 os.path.join(self.remote_profile_dir, 'security', cf))
729 for cf in ('ipcontroller-client.json', 'ipcontroller-engine.json')
729 for cf in ('ipcontroller-client.json', 'ipcontroller-engine.json')
730 ]
730 ]
731
731
732
732
733 class SSHEngineSetLauncher(LocalEngineSetLauncher):
733 class SSHEngineSetLauncher(LocalEngineSetLauncher):
734 launcher_class = SSHEngineLauncher
734 launcher_class = SSHEngineLauncher
735 engines = Dict(config=True,
735 engines = Dict(config=True,
736 help="""dict of engines to launch. This is a dict by hostname of ints,
736 help="""dict of engines to launch. This is a dict by hostname of ints,
737 corresponding to the number of engines to start on that host.""")
737 corresponding to the number of engines to start on that host.""")
738
738
739 def _engine_cmd_default(self):
739 def _engine_cmd_default(self):
740 return ['ipengine']
740 return ['ipengine']
741
741
742 @property
742 @property
743 def engine_count(self):
743 def engine_count(self):
744 """determine engine count from `engines` dict"""
744 """determine engine count from `engines` dict"""
745 count = 0
745 count = 0
746 for n in itervalues(self.engines):
746 for n in itervalues(self.engines):
747 if isinstance(n, (tuple,list)):
747 if isinstance(n, (tuple,list)):
748 n,args = n
748 n,args = n
749 count += n
749 count += n
750 return count
750 return count
751
751
752 def start(self, n):
752 def start(self, n):
753 """Start engines by profile or profile_dir.
753 """Start engines by profile or profile_dir.
754 `n` is ignored, and the `engines` config property is used instead.
754 `n` is ignored, and the `engines` config property is used instead.
755 """
755 """
756
756
757 dlist = []
757 dlist = []
758 for host, n in iteritems(self.engines):
758 for host, n in iteritems(self.engines):
759 if isinstance(n, (tuple, list)):
759 if isinstance(n, (tuple, list)):
760 n, args = n
760 n, args = n
761 else:
761 else:
762 args = copy.deepcopy(self.engine_args)
762 args = copy.deepcopy(self.engine_args)
763
763
764 if '@' in host:
764 if '@' in host:
765 user,host = host.split('@',1)
765 user,host = host.split('@',1)
766 else:
766 else:
767 user=None
767 user=None
768 for i in range(n):
768 for i in range(n):
769 if i > 0:
769 if i > 0:
770 time.sleep(self.delay)
770 time.sleep(self.delay)
771 el = self.launcher_class(work_dir=self.work_dir, parent=self, log=self.log,
771 el = self.launcher_class(work_dir=self.work_dir, parent=self, log=self.log,
772 profile_dir=self.profile_dir, cluster_id=self.cluster_id,
772 profile_dir=self.profile_dir, cluster_id=self.cluster_id,
773 )
773 )
774 if i > 0:
774 if i > 0:
775 # only send files for the first engine on each host
775 # only send files for the first engine on each host
776 el.to_send = []
776 el.to_send = []
777
777
778 # Copy the engine args over to each engine launcher.
778 # Copy the engine args over to each engine launcher.
779 el.engine_cmd = self.engine_cmd
779 el.engine_cmd = self.engine_cmd
780 el.engine_args = args
780 el.engine_args = args
781 el.on_stop(self._notice_engine_stopped)
781 el.on_stop(self._notice_engine_stopped)
782 d = el.start(user=user, hostname=host)
782 d = el.start(user=user, hostname=host)
783 self.launchers[ "%s/%i" % (host,i) ] = el
783 self.launchers[ "%s/%i" % (host,i) ] = el
784 dlist.append(d)
784 dlist.append(d)
785 self.notify_start(dlist)
785 self.notify_start(dlist)
786 return dlist
786 return dlist
787
787
788
788
789 class SSHProxyEngineSetLauncher(SSHClusterLauncher):
789 class SSHProxyEngineSetLauncher(SSHClusterLauncher):
790 """Launcher for calling
790 """Launcher for calling
791 `ipcluster engines` on a remote machine.
791 `ipcluster engines` on a remote machine.
792
792
793 Requires that remote profile is already configured.
793 Requires that remote profile is already configured.
794 """
794 """
795
795
796 n = Integer()
796 n = Integer()
797 ipcluster_cmd = List(['ipcluster'], config=True)
797 ipcluster_cmd = List(['ipcluster'], config=True)
798
798
799 @property
799 @property
800 def program(self):
800 def program(self):
801 return self.ipcluster_cmd + ['engines']
801 return self.ipcluster_cmd + ['engines']
802
802
803 @property
803 @property
804 def program_args(self):
804 def program_args(self):
805 return ['-n', str(self.n), '--profile-dir', self.remote_profile_dir]
805 return ['-n', str(self.n), '--profile-dir', self.remote_profile_dir]
806
806
807 def _to_send_default(self):
807 def _to_send_default(self):
808 return [
808 return [
809 (os.path.join(self.profile_dir, 'security', cf),
809 (os.path.join(self.profile_dir, 'security', cf),
810 os.path.join(self.remote_profile_dir, 'security', cf))
810 os.path.join(self.remote_profile_dir, 'security', cf))
811 for cf in ('ipcontroller-client.json', 'ipcontroller-engine.json')
811 for cf in ('ipcontroller-client.json', 'ipcontroller-engine.json')
812 ]
812 ]
813
813
814 def start(self, n):
814 def start(self, n):
815 self.n = n
815 self.n = n
816 super(SSHProxyEngineSetLauncher, self).start()
816 super(SSHProxyEngineSetLauncher, self).start()
817
817
818
818
819 #-----------------------------------------------------------------------------
819 #-----------------------------------------------------------------------------
820 # Windows HPC Server 2008 scheduler launchers
820 # Windows HPC Server 2008 scheduler launchers
821 #-----------------------------------------------------------------------------
821 #-----------------------------------------------------------------------------
822
822
823
823
824 # This is only used on Windows.
824 # This is only used on Windows.
825 def find_job_cmd():
825 def find_job_cmd():
826 if WINDOWS:
826 if WINDOWS:
827 try:
827 try:
828 return find_cmd('job')
828 return find_cmd('job')
829 except (FindCmdError, ImportError):
829 except (FindCmdError, ImportError):
830 # ImportError will be raised if win32api is not installed
830 # ImportError will be raised if win32api is not installed
831 return 'job'
831 return 'job'
832 else:
832 else:
833 return 'job'
833 return 'job'
834
834
835
835
836 class WindowsHPCLauncher(BaseLauncher):
836 class WindowsHPCLauncher(BaseLauncher):
837
837
838 job_id_regexp = CRegExp(r'\d+', config=True,
838 job_id_regexp = CRegExp(r'\d+', config=True,
839 help="""A regular expression used to get the job id from the output of the
839 help="""A regular expression used to get the job id from the output of the
840 submit_command. """
840 submit_command. """
841 )
841 )
842 job_file_name = Unicode(u'ipython_job.xml', config=True,
842 job_file_name = Unicode(u'ipython_job.xml', config=True,
843 help="The filename of the instantiated job script.")
843 help="The filename of the instantiated job script.")
844 # The full path to the instantiated job script. This gets made dynamically
844 # The full path to the instantiated job script. This gets made dynamically
845 # by combining the work_dir with the job_file_name.
845 # by combining the work_dir with the job_file_name.
846 job_file = Unicode(u'')
846 job_file = Unicode(u'')
847 scheduler = Unicode('', config=True,
847 scheduler = Unicode('', config=True,
848 help="The hostname of the scheduler to submit the job to.")
848 help="The hostname of the scheduler to submit the job to.")
849 job_cmd = Unicode(find_job_cmd(), config=True,
849 job_cmd = Unicode(find_job_cmd(), config=True,
850 help="The command for submitting jobs.")
850 help="The command for submitting jobs.")
851
851
852 def __init__(self, work_dir=u'.', config=None, **kwargs):
852 def __init__(self, work_dir=u'.', config=None, **kwargs):
853 super(WindowsHPCLauncher, self).__init__(
853 super(WindowsHPCLauncher, self).__init__(
854 work_dir=work_dir, config=config, **kwargs
854 work_dir=work_dir, config=config, **kwargs
855 )
855 )
856
856
857 @property
857 @property
858 def job_file(self):
858 def job_file(self):
859 return os.path.join(self.work_dir, self.job_file_name)
859 return os.path.join(self.work_dir, self.job_file_name)
860
860
861 def write_job_file(self, n):
861 def write_job_file(self, n):
862 raise NotImplementedError("Implement write_job_file in a subclass.")
862 raise NotImplementedError("Implement write_job_file in a subclass.")
863
863
864 def find_args(self):
864 def find_args(self):
865 return [u'job.exe']
865 return [u'job.exe']
866
866
867 def parse_job_id(self, output):
867 def parse_job_id(self, output):
868 """Take the output of the submit command and return the job id."""
868 """Take the output of the submit command and return the job id."""
869 m = self.job_id_regexp.search(output)
869 m = self.job_id_regexp.search(output)
870 if m is not None:
870 if m is not None:
871 job_id = m.group()
871 job_id = m.group()
872 else:
872 else:
873 raise LauncherError("Job id couldn't be determined: %s" % output)
873 raise LauncherError("Job id couldn't be determined: %s" % output)
874 self.job_id = job_id
874 self.job_id = job_id
875 self.log.info('Job started with id: %r', job_id)
875 self.log.info('Job started with id: %r', job_id)
876 return job_id
876 return job_id
877
877
878 def start(self, n):
878 def start(self, n):
879 """Start n copies of the process using the Win HPC job scheduler."""
879 """Start n copies of the process using the Win HPC job scheduler."""
880 self.write_job_file(n)
880 self.write_job_file(n)
881 args = [
881 args = [
882 'submit',
882 'submit',
883 '/jobfile:%s' % self.job_file,
883 '/jobfile:%s' % self.job_file,
884 '/scheduler:%s' % self.scheduler
884 '/scheduler:%s' % self.scheduler
885 ]
885 ]
886 self.log.debug("Starting Win HPC Job: %s" % (self.job_cmd + ' ' + ' '.join(args),))
886 self.log.debug("Starting Win HPC Job: %s" % (self.job_cmd + ' ' + ' '.join(args),))
887
887
888 output = check_output([self.job_cmd]+args,
888 output = check_output([self.job_cmd]+args,
889 env=os.environ,
889 env=os.environ,
890 cwd=self.work_dir,
890 cwd=self.work_dir,
891 stderr=STDOUT
891 stderr=STDOUT
892 )
892 )
893 output = output.decode(DEFAULT_ENCODING, 'replace')
893 output = output.decode(DEFAULT_ENCODING, 'replace')
894 job_id = self.parse_job_id(output)
894 job_id = self.parse_job_id(output)
895 self.notify_start(job_id)
895 self.notify_start(job_id)
896 return job_id
896 return job_id
897
897
898 def stop(self):
898 def stop(self):
899 args = [
899 args = [
900 'cancel',
900 'cancel',
901 self.job_id,
901 self.job_id,
902 '/scheduler:%s' % self.scheduler
902 '/scheduler:%s' % self.scheduler
903 ]
903 ]
904 self.log.info("Stopping Win HPC Job: %s" % (self.job_cmd + ' ' + ' '.join(args),))
904 self.log.info("Stopping Win HPC Job: %s" % (self.job_cmd + ' ' + ' '.join(args),))
905 try:
905 try:
906 output = check_output([self.job_cmd]+args,
906 output = check_output([self.job_cmd]+args,
907 env=os.environ,
907 env=os.environ,
908 cwd=self.work_dir,
908 cwd=self.work_dir,
909 stderr=STDOUT
909 stderr=STDOUT
910 )
910 )
911 output = output.decode(DEFAULT_ENCODING, 'replace')
911 output = output.decode(DEFAULT_ENCODING, 'replace')
912 except:
912 except:
913 output = u'The job already appears to be stopped: %r' % self.job_id
913 output = u'The job already appears to be stopped: %r' % self.job_id
914 self.notify_stop(dict(job_id=self.job_id, output=output)) # Pass the output of the kill cmd
914 self.notify_stop(dict(job_id=self.job_id, output=output)) # Pass the output of the kill cmd
915 return output
915 return output
916
916
917
917
918 class WindowsHPCControllerLauncher(WindowsHPCLauncher, ClusterAppMixin):
918 class WindowsHPCControllerLauncher(WindowsHPCLauncher, ClusterAppMixin):
919
919
920 job_file_name = Unicode(u'ipcontroller_job.xml', config=True,
920 job_file_name = Unicode(u'ipcontroller_job.xml', config=True,
921 help="WinHPC xml job file.")
921 help="WinHPC xml job file.")
922 controller_args = List([], config=False,
922 controller_args = List([], config=False,
923 help="extra args to pass to ipcontroller")
923 help="extra args to pass to ipcontroller")
924
924
925 def write_job_file(self, n):
925 def write_job_file(self, n):
926 job = IPControllerJob(parent=self)
926 job = IPControllerJob(parent=self)
927
927
928 t = IPControllerTask(parent=self)
928 t = IPControllerTask(parent=self)
929 # The tasks work directory is *not* the actual work directory of
929 # The tasks work directory is *not* the actual work directory of
930 # the controller. It is used as the base path for the stdout/stderr
930 # the controller. It is used as the base path for the stdout/stderr
931 # files that the scheduler redirects to.
931 # files that the scheduler redirects to.
932 t.work_directory = self.profile_dir
932 t.work_directory = self.profile_dir
933 # Add the profile_dir and from self.start().
933 # Add the profile_dir and from self.start().
934 t.controller_args.extend(self.cluster_args)
934 t.controller_args.extend(self.cluster_args)
935 t.controller_args.extend(self.controller_args)
935 t.controller_args.extend(self.controller_args)
936 job.add_task(t)
936 job.add_task(t)
937
937
938 self.log.debug("Writing job description file: %s", self.job_file)
938 self.log.debug("Writing job description file: %s", self.job_file)
939 job.write(self.job_file)
939 job.write(self.job_file)
940
940
941 @property
941 @property
942 def job_file(self):
942 def job_file(self):
943 return os.path.join(self.profile_dir, self.job_file_name)
943 return os.path.join(self.profile_dir, self.job_file_name)
944
944
945 def start(self):
945 def start(self):
946 """Start the controller by profile_dir."""
946 """Start the controller by profile_dir."""
947 return super(WindowsHPCControllerLauncher, self).start(1)
947 return super(WindowsHPCControllerLauncher, self).start(1)
948
948
949
949
950 class WindowsHPCEngineSetLauncher(WindowsHPCLauncher, ClusterAppMixin):
950 class WindowsHPCEngineSetLauncher(WindowsHPCLauncher, ClusterAppMixin):
951
951
952 job_file_name = Unicode(u'ipengineset_job.xml', config=True,
952 job_file_name = Unicode(u'ipengineset_job.xml', config=True,
953 help="jobfile for ipengines job")
953 help="jobfile for ipengines job")
954 engine_args = List([], config=False,
954 engine_args = List([], config=False,
955 help="extra args to pas to ipengine")
955 help="extra args to pas to ipengine")
956
956
957 def write_job_file(self, n):
957 def write_job_file(self, n):
958 job = IPEngineSetJob(parent=self)
958 job = IPEngineSetJob(parent=self)
959
959
960 for i in range(n):
960 for i in range(n):
961 t = IPEngineTask(parent=self)
961 t = IPEngineTask(parent=self)
962 # The tasks work directory is *not* the actual work directory of
962 # The tasks work directory is *not* the actual work directory of
963 # the engine. It is used as the base path for the stdout/stderr
963 # the engine. It is used as the base path for the stdout/stderr
964 # files that the scheduler redirects to.
964 # files that the scheduler redirects to.
965 t.work_directory = self.profile_dir
965 t.work_directory = self.profile_dir
966 # Add the profile_dir and from self.start().
966 # Add the profile_dir and from self.start().
967 t.engine_args.extend(self.cluster_args)
967 t.engine_args.extend(self.cluster_args)
968 t.engine_args.extend(self.engine_args)
968 t.engine_args.extend(self.engine_args)
969 job.add_task(t)
969 job.add_task(t)
970
970
971 self.log.debug("Writing job description file: %s", self.job_file)
971 self.log.debug("Writing job description file: %s", self.job_file)
972 job.write(self.job_file)
972 job.write(self.job_file)
973
973
974 @property
974 @property
975 def job_file(self):
975 def job_file(self):
976 return os.path.join(self.profile_dir, self.job_file_name)
976 return os.path.join(self.profile_dir, self.job_file_name)
977
977
978 def start(self, n):
978 def start(self, n):
979 """Start the controller by profile_dir."""
979 """Start the controller by profile_dir."""
980 return super(WindowsHPCEngineSetLauncher, self).start(n)
980 return super(WindowsHPCEngineSetLauncher, self).start(n)
981
981
982
982
983 #-----------------------------------------------------------------------------
983 #-----------------------------------------------------------------------------
984 # Batch (PBS) system launchers
984 # Batch (PBS) system launchers
985 #-----------------------------------------------------------------------------
985 #-----------------------------------------------------------------------------
986
986
987 class BatchClusterAppMixin(ClusterAppMixin):
987 class BatchClusterAppMixin(ClusterAppMixin):
988 """ClusterApp mixin that updates the self.context dict, rather than cl-args."""
988 """ClusterApp mixin that updates the self.context dict, rather than cl-args."""
989 def _profile_dir_changed(self, name, old, new):
989 def _profile_dir_changed(self, name, old, new):
990 self.context[name] = new
990 self.context[name] = new
991 _cluster_id_changed = _profile_dir_changed
991 _cluster_id_changed = _profile_dir_changed
992
992
993 def _profile_dir_default(self):
993 def _profile_dir_default(self):
994 self.context['profile_dir'] = ''
994 self.context['profile_dir'] = ''
995 return ''
995 return ''
996 def _cluster_id_default(self):
996 def _cluster_id_default(self):
997 self.context['cluster_id'] = ''
997 self.context['cluster_id'] = ''
998 return ''
998 return ''
999
999
1000
1000
1001 class BatchSystemLauncher(BaseLauncher):
1001 class BatchSystemLauncher(BaseLauncher):
1002 """Launch an external process using a batch system.
1002 """Launch an external process using a batch system.
1003
1003
1004 This class is designed to work with UNIX batch systems like PBS, LSF,
1004 This class is designed to work with UNIX batch systems like PBS, LSF,
1005 GridEngine, etc. The overall model is that there are different commands
1005 GridEngine, etc. The overall model is that there are different commands
1006 like qsub, qdel, etc. that handle the starting and stopping of the process.
1006 like qsub, qdel, etc. that handle the starting and stopping of the process.
1007
1007
1008 This class also has the notion of a batch script. The ``batch_template``
1008 This class also has the notion of a batch script. The ``batch_template``
1009 attribute can be set to a string that is a template for the batch script.
1009 attribute can be set to a string that is a template for the batch script.
1010 This template is instantiated using string formatting. Thus the template can
1010 This template is instantiated using string formatting. Thus the template can
1011 use {n} fot the number of instances. Subclasses can add additional variables
1011 use {n} fot the number of instances. Subclasses can add additional variables
1012 to the template dict.
1012 to the template dict.
1013 """
1013 """
1014
1014
1015 # Subclasses must fill these in. See PBSEngineSet
1015 # Subclasses must fill these in. See PBSEngineSet
1016 submit_command = List([''], config=True,
1016 submit_command = List([''], config=True,
1017 help="The name of the command line program used to submit jobs.")
1017 help="The name of the command line program used to submit jobs.")
1018 delete_command = List([''], config=True,
1018 delete_command = List([''], config=True,
1019 help="The name of the command line program used to delete jobs.")
1019 help="The name of the command line program used to delete jobs.")
1020 job_id_regexp = CRegExp('', config=True,
1020 job_id_regexp = CRegExp('', config=True,
1021 help="""A regular expression used to get the job id from the output of the
1021 help="""A regular expression used to get the job id from the output of the
1022 submit_command.""")
1022 submit_command.""")
1023 job_id_regexp_group = Integer(0, config=True,
1023 job_id_regexp_group = Integer(0, config=True,
1024 help="""The group we wish to match in job_id_regexp (0 to match all)""")
1024 help="""The group we wish to match in job_id_regexp (0 to match all)""")
1025 batch_template = Unicode('', config=True,
1025 batch_template = Unicode('', config=True,
1026 help="The string that is the batch script template itself.")
1026 help="The string that is the batch script template itself.")
1027 batch_template_file = Unicode(u'', config=True,
1027 batch_template_file = Unicode(u'', config=True,
1028 help="The file that contains the batch template.")
1028 help="The file that contains the batch template.")
1029 batch_file_name = Unicode(u'batch_script', config=True,
1029 batch_file_name = Unicode(u'batch_script', config=True,
1030 help="The filename of the instantiated batch script.")
1030 help="The filename of the instantiated batch script.")
1031 queue = Unicode(u'', config=True,
1031 queue = Unicode(u'', config=True,
1032 help="The PBS Queue.")
1032 help="The PBS Queue.")
1033
1033
1034 def _queue_changed(self, name, old, new):
1034 def _queue_changed(self, name, old, new):
1035 self.context[name] = new
1035 self.context[name] = new
1036
1036
1037 n = Integer(1)
1037 n = Integer(1)
1038 _n_changed = _queue_changed
1038 _n_changed = _queue_changed
1039
1039
1040 # not configurable, override in subclasses
1040 # not configurable, override in subclasses
1041 # PBS Job Array regex
1041 # PBS Job Array regex
1042 job_array_regexp = CRegExp('')
1042 job_array_regexp = CRegExp('')
1043 job_array_template = Unicode('')
1043 job_array_template = Unicode('')
1044 # PBS Queue regex
1044 # PBS Queue regex
1045 queue_regexp = CRegExp('')
1045 queue_regexp = CRegExp('')
1046 queue_template = Unicode('')
1046 queue_template = Unicode('')
1047 # The default batch template, override in subclasses
1047 # The default batch template, override in subclasses
1048 default_template = Unicode('')
1048 default_template = Unicode('')
1049 # The full path to the instantiated batch script.
1049 # The full path to the instantiated batch script.
1050 batch_file = Unicode(u'')
1050 batch_file = Unicode(u'')
1051 # the format dict used with batch_template:
1051 # the format dict used with batch_template:
1052 context = Dict()
1052 context = Dict()
1053
1053
1054 def _context_default(self):
1054 def _context_default(self):
1055 """load the default context with the default values for the basic keys
1055 """load the default context with the default values for the basic keys
1056
1056
1057 because the _trait_changed methods only load the context if they
1057 because the _trait_changed methods only load the context if they
1058 are set to something other than the default value.
1058 are set to something other than the default value.
1059 """
1059 """
1060 return dict(n=1, queue=u'', profile_dir=u'', cluster_id=u'')
1060 return dict(n=1, queue=u'', profile_dir=u'', cluster_id=u'')
1061
1061
1062 # the Formatter instance for rendering the templates:
1062 # the Formatter instance for rendering the templates:
1063 formatter = Instance(EvalFormatter, (), {})
1063 formatter = Instance(EvalFormatter, (), {})
1064
1064
1065 def find_args(self):
1065 def find_args(self):
1066 return self.submit_command + [self.batch_file]
1066 return self.submit_command + [self.batch_file]
1067
1067
1068 def __init__(self, work_dir=u'.', config=None, **kwargs):
1068 def __init__(self, work_dir=u'.', config=None, **kwargs):
1069 super(BatchSystemLauncher, self).__init__(
1069 super(BatchSystemLauncher, self).__init__(
1070 work_dir=work_dir, config=config, **kwargs
1070 work_dir=work_dir, config=config, **kwargs
1071 )
1071 )
1072 self.batch_file = os.path.join(self.work_dir, self.batch_file_name)
1072 self.batch_file = os.path.join(self.work_dir, self.batch_file_name)
1073
1073
1074 def parse_job_id(self, output):
1074 def parse_job_id(self, output):
1075 """Take the output of the submit command and return the job id."""
1075 """Take the output of the submit command and return the job id."""
1076 m = self.job_id_regexp.search(output)
1076 m = self.job_id_regexp.search(output)
1077 if m is not None:
1077 if m is not None:
1078 job_id = m.group(self.job_id_regexp_group)
1078 job_id = m.group(self.job_id_regexp_group)
1079 else:
1079 else:
1080 raise LauncherError("Job id couldn't be determined: %s" % output)
1080 raise LauncherError("Job id couldn't be determined: %s" % output)
1081 self.job_id = job_id
1081 self.job_id = job_id
1082 self.log.info('Job submitted with job id: %r', job_id)
1082 self.log.info('Job submitted with job id: %r', job_id)
1083 return job_id
1083 return job_id
1084
1084
1085 def write_batch_script(self, n):
1085 def write_batch_script(self, n):
1086 """Instantiate and write the batch script to the work_dir."""
1086 """Instantiate and write the batch script to the work_dir."""
1087 self.n = n
1087 self.n = n
1088 # first priority is batch_template if set
1088 # first priority is batch_template if set
1089 if self.batch_template_file and not self.batch_template:
1089 if self.batch_template_file and not self.batch_template:
1090 # second priority is batch_template_file
1090 # second priority is batch_template_file
1091 with open(self.batch_template_file) as f:
1091 with open(self.batch_template_file) as f:
1092 self.batch_template = f.read()
1092 self.batch_template = f.read()
1093 if not self.batch_template:
1093 if not self.batch_template:
1094 # third (last) priority is default_template
1094 # third (last) priority is default_template
1095 self.batch_template = self.default_template
1095 self.batch_template = self.default_template
1096 # add jobarray or queue lines to user-specified template
1096 # add jobarray or queue lines to user-specified template
1097 # note that this is *only* when user did not specify a template.
1097 # note that this is *only* when user did not specify a template.
1098 self._insert_queue_in_script()
1098 self._insert_queue_in_script()
1099 self._insert_job_array_in_script()
1099 self._insert_job_array_in_script()
1100 script_as_string = self.formatter.format(self.batch_template, **self.context)
1100 script_as_string = self.formatter.format(self.batch_template, **self.context)
1101 self.log.debug('Writing batch script: %s', self.batch_file)
1101 self.log.debug('Writing batch script: %s', self.batch_file)
1102 with open(self.batch_file, 'w') as f:
1102 with open(self.batch_file, 'w') as f:
1103 f.write(script_as_string)
1103 f.write(script_as_string)
1104 os.chmod(self.batch_file, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
1104 os.chmod(self.batch_file, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
1105
1105
1106 def _insert_queue_in_script(self):
1106 def _insert_queue_in_script(self):
1107 """Inserts a queue if required into the batch script.
1107 """Inserts a queue if required into the batch script.
1108 """
1108 """
1109 if self.queue and not self.queue_regexp.search(self.batch_template):
1109 if self.queue and not self.queue_regexp.search(self.batch_template):
1110 self.log.debug("adding PBS queue settings to batch script")
1110 self.log.debug("adding PBS queue settings to batch script")
1111 firstline, rest = self.batch_template.split('\n',1)
1111 firstline, rest = self.batch_template.split('\n',1)
1112 self.batch_template = u'\n'.join([firstline, self.queue_template, rest])
1112 self.batch_template = u'\n'.join([firstline, self.queue_template, rest])
1113
1113
1114 def _insert_job_array_in_script(self):
1114 def _insert_job_array_in_script(self):
1115 """Inserts a job array if required into the batch script.
1115 """Inserts a job array if required into the batch script.
1116 """
1116 """
1117 if not self.job_array_regexp.search(self.batch_template):
1117 if not self.job_array_regexp.search(self.batch_template):
1118 self.log.debug("adding job array settings to batch script")
1118 self.log.debug("adding job array settings to batch script")
1119 firstline, rest = self.batch_template.split('\n',1)
1119 firstline, rest = self.batch_template.split('\n',1)
1120 self.batch_template = u'\n'.join([firstline, self.job_array_template, rest])
1120 self.batch_template = u'\n'.join([firstline, self.job_array_template, rest])
1121
1121
1122 def start(self, n):
1122 def start(self, n):
1123 """Start n copies of the process using a batch system."""
1123 """Start n copies of the process using a batch system."""
1124 self.log.debug("Starting %s: %r", self.__class__.__name__, self.args)
1124 self.log.debug("Starting %s: %r", self.__class__.__name__, self.args)
1125 # Here we save profile_dir in the context so they
1125 # Here we save profile_dir in the context so they
1126 # can be used in the batch script template as {profile_dir}
1126 # can be used in the batch script template as {profile_dir}
1127 self.write_batch_script(n)
1127 self.write_batch_script(n)
1128 output = check_output(self.args, env=os.environ)
1128 output = check_output(self.args, env=os.environ)
1129 output = output.decode(DEFAULT_ENCODING, 'replace')
1129 output = output.decode(DEFAULT_ENCODING, 'replace')
1130
1130
1131 job_id = self.parse_job_id(output)
1131 job_id = self.parse_job_id(output)
1132 self.notify_start(job_id)
1132 self.notify_start(job_id)
1133 return job_id
1133 return job_id
1134
1134
1135 def stop(self):
1135 def stop(self):
1136 try:
1136 try:
1137 p = Popen(self.delete_command+[self.job_id], env=os.environ,
1137 p = Popen(self.delete_command+[self.job_id], env=os.environ,
1138 stdout=PIPE, stderr=PIPE)
1138 stdout=PIPE, stderr=PIPE)
1139 out, err = p.communicate()
1139 out, err = p.communicate()
1140 output = out + err
1140 output = out + err
1141 except:
1141 except:
1142 self.log.exception("Problem stopping cluster with command: %s" %
1142 self.log.exception("Problem stopping cluster with command: %s" %
1143 (self.delete_command + [self.job_id]))
1143 (self.delete_command + [self.job_id]))
1144 output = ""
1144 output = ""
1145 output = output.decode(DEFAULT_ENCODING, 'replace')
1145 output = output.decode(DEFAULT_ENCODING, 'replace')
1146 self.notify_stop(dict(job_id=self.job_id, output=output)) # Pass the output of the kill cmd
1146 self.notify_stop(dict(job_id=self.job_id, output=output)) # Pass the output of the kill cmd
1147 return output
1147 return output
1148
1148
1149
1149
1150 class PBSLauncher(BatchSystemLauncher):
1150 class PBSLauncher(BatchSystemLauncher):
1151 """A BatchSystemLauncher subclass for PBS."""
1151 """A BatchSystemLauncher subclass for PBS."""
1152
1152
1153 submit_command = List(['qsub'], config=True,
1153 submit_command = List(['qsub'], config=True,
1154 help="The PBS submit command ['qsub']")
1154 help="The PBS submit command ['qsub']")
1155 delete_command = List(['qdel'], config=True,
1155 delete_command = List(['qdel'], config=True,
1156 help="The PBS delete command ['qsub']")
1156 help="The PBS delete command ['qsub']")
1157 job_id_regexp = CRegExp(r'\d+', config=True,
1157 job_id_regexp = CRegExp(r'\d+', config=True,
1158 help="Regular expresion for identifying the job ID [r'\d+']")
1158 help="Regular expresion for identifying the job ID [r'\d+']")
1159
1159
1160 batch_file = Unicode(u'')
1160 batch_file = Unicode(u'')
1161 job_array_regexp = CRegExp('#PBS\W+-t\W+[\w\d\-\$]+')
1161 job_array_regexp = CRegExp('#PBS\W+-t\W+[\w\d\-\$]+')
1162 job_array_template = Unicode('#PBS -t 1-{n}')
1162 job_array_template = Unicode('#PBS -t 1-{n}')
1163 queue_regexp = CRegExp('#PBS\W+-q\W+\$?\w+')
1163 queue_regexp = CRegExp('#PBS\W+-q\W+\$?\w+')
1164 queue_template = Unicode('#PBS -q {queue}')
1164 queue_template = Unicode('#PBS -q {queue}')
1165
1165
1166
1166
1167 class PBSControllerLauncher(PBSLauncher, BatchClusterAppMixin):
1167 class PBSControllerLauncher(PBSLauncher, BatchClusterAppMixin):
1168 """Launch a controller using PBS."""
1168 """Launch a controller using PBS."""
1169
1169
1170 batch_file_name = Unicode(u'pbs_controller', config=True,
1170 batch_file_name = Unicode(u'pbs_controller', config=True,
1171 help="batch file name for the controller job.")
1171 help="batch file name for the controller job.")
1172 default_template= Unicode("""#!/bin/sh
1172 default_template= Unicode("""#!/bin/sh
1173 #PBS -V
1173 #PBS -V
1174 #PBS -N ipcontroller
1174 #PBS -N ipcontroller
1175 %s --log-to-file --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1175 %s --log-to-file --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1176 """%(' '.join(map(pipes.quote, ipcontroller_cmd_argv))))
1176 """%(' '.join(map(pipes.quote, ipcontroller_cmd_argv))))
1177
1177
1178 def start(self):
1178 def start(self):
1179 """Start the controller by profile or profile_dir."""
1179 """Start the controller by profile or profile_dir."""
1180 return super(PBSControllerLauncher, self).start(1)
1180 return super(PBSControllerLauncher, self).start(1)
1181
1181
1182
1182
1183 class PBSEngineSetLauncher(PBSLauncher, BatchClusterAppMixin):
1183 class PBSEngineSetLauncher(PBSLauncher, BatchClusterAppMixin):
1184 """Launch Engines using PBS"""
1184 """Launch Engines using PBS"""
1185 batch_file_name = Unicode(u'pbs_engines', config=True,
1185 batch_file_name = Unicode(u'pbs_engines', config=True,
1186 help="batch file name for the engine(s) job.")
1186 help="batch file name for the engine(s) job.")
1187 default_template= Unicode(u"""#!/bin/sh
1187 default_template= Unicode(u"""#!/bin/sh
1188 #PBS -V
1188 #PBS -V
1189 #PBS -N ipengine
1189 #PBS -N ipengine
1190 %s --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1190 %s --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1191 """%(' '.join(map(pipes.quote,ipengine_cmd_argv))))
1191 """%(' '.join(map(pipes.quote,ipengine_cmd_argv))))
1192
1192
1193
1193
1194 #SGE is very similar to PBS
1194 #SGE is very similar to PBS
1195
1195
1196 class SGELauncher(PBSLauncher):
1196 class SGELauncher(PBSLauncher):
1197 """Sun GridEngine is a PBS clone with slightly different syntax"""
1197 """Sun GridEngine is a PBS clone with slightly different syntax"""
1198 job_array_regexp = CRegExp('#\$\W+\-t')
1198 job_array_regexp = CRegExp('#\$\W+\-t')
1199 job_array_template = Unicode('#$ -t 1-{n}')
1199 job_array_template = Unicode('#$ -t 1-{n}')
1200 queue_regexp = CRegExp('#\$\W+-q\W+\$?\w+')
1200 queue_regexp = CRegExp('#\$\W+-q\W+\$?\w+')
1201 queue_template = Unicode('#$ -q {queue}')
1201 queue_template = Unicode('#$ -q {queue}')
1202
1202
1203
1203
1204 class SGEControllerLauncher(SGELauncher, BatchClusterAppMixin):
1204 class SGEControllerLauncher(SGELauncher, BatchClusterAppMixin):
1205 """Launch a controller using SGE."""
1205 """Launch a controller using SGE."""
1206
1206
1207 batch_file_name = Unicode(u'sge_controller', config=True,
1207 batch_file_name = Unicode(u'sge_controller', config=True,
1208 help="batch file name for the ipontroller job.")
1208 help="batch file name for the ipontroller job.")
1209 default_template= Unicode(u"""#$ -V
1209 default_template= Unicode(u"""#$ -V
1210 #$ -S /bin/sh
1210 #$ -S /bin/sh
1211 #$ -N ipcontroller
1211 #$ -N ipcontroller
1212 %s --log-to-file --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1212 %s --log-to-file --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1213 """%(' '.join(map(pipes.quote, ipcontroller_cmd_argv))))
1213 """%(' '.join(map(pipes.quote, ipcontroller_cmd_argv))))
1214
1214
1215 def start(self):
1215 def start(self):
1216 """Start the controller by profile or profile_dir."""
1216 """Start the controller by profile or profile_dir."""
1217 return super(SGEControllerLauncher, self).start(1)
1217 return super(SGEControllerLauncher, self).start(1)
1218
1218
1219
1219
1220 class SGEEngineSetLauncher(SGELauncher, BatchClusterAppMixin):
1220 class SGEEngineSetLauncher(SGELauncher, BatchClusterAppMixin):
1221 """Launch Engines with SGE"""
1221 """Launch Engines with SGE"""
1222 batch_file_name = Unicode(u'sge_engines', config=True,
1222 batch_file_name = Unicode(u'sge_engines', config=True,
1223 help="batch file name for the engine(s) job.")
1223 help="batch file name for the engine(s) job.")
1224 default_template = Unicode("""#$ -V
1224 default_template = Unicode("""#$ -V
1225 #$ -S /bin/sh
1225 #$ -S /bin/sh
1226 #$ -N ipengine
1226 #$ -N ipengine
1227 %s --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1227 %s --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1228 """%(' '.join(map(pipes.quote, ipengine_cmd_argv))))
1228 """%(' '.join(map(pipes.quote, ipengine_cmd_argv))))
1229
1229
1230
1230
1231 # LSF launchers
1231 # LSF launchers
1232
1232
1233 class LSFLauncher(BatchSystemLauncher):
1233 class LSFLauncher(BatchSystemLauncher):
1234 """A BatchSystemLauncher subclass for LSF."""
1234 """A BatchSystemLauncher subclass for LSF."""
1235
1235
1236 submit_command = List(['bsub'], config=True,
1236 submit_command = List(['bsub'], config=True,
1237 help="The PBS submit command ['bsub']")
1237 help="The PBS submit command ['bsub']")
1238 delete_command = List(['bkill'], config=True,
1238 delete_command = List(['bkill'], config=True,
1239 help="The PBS delete command ['bkill']")
1239 help="The PBS delete command ['bkill']")
1240 job_id_regexp = CRegExp(r'\d+', config=True,
1240 job_id_regexp = CRegExp(r'\d+', config=True,
1241 help="Regular expresion for identifying the job ID [r'\d+']")
1241 help="Regular expresion for identifying the job ID [r'\d+']")
1242
1242
1243 batch_file = Unicode(u'')
1243 batch_file = Unicode(u'')
1244 job_array_regexp = CRegExp('#BSUB[ \t]-J+\w+\[\d+-\d+\]')
1244 job_array_regexp = CRegExp('#BSUB[ \t]-J+\w+\[\d+-\d+\]')
1245 job_array_template = Unicode('#BSUB -J ipengine[1-{n}]')
1245 job_array_template = Unicode('#BSUB -J ipengine[1-{n}]')
1246 queue_regexp = CRegExp('#BSUB[ \t]+-q[ \t]+\w+')
1246 queue_regexp = CRegExp('#BSUB[ \t]+-q[ \t]+\w+')
1247 queue_template = Unicode('#BSUB -q {queue}')
1247 queue_template = Unicode('#BSUB -q {queue}')
1248
1248
1249 def start(self, n):
1249 def start(self, n):
1250 """Start n copies of the process using LSF batch system.
1250 """Start n copies of the process using LSF batch system.
1251 This cant inherit from the base class because bsub expects
1251 This cant inherit from the base class because bsub expects
1252 to be piped a shell script in order to honor the #BSUB directives :
1252 to be piped a shell script in order to honor the #BSUB directives :
1253 bsub < script
1253 bsub < script
1254 """
1254 """
1255 # Here we save profile_dir in the context so they
1255 # Here we save profile_dir in the context so they
1256 # can be used in the batch script template as {profile_dir}
1256 # can be used in the batch script template as {profile_dir}
1257 self.write_batch_script(n)
1257 self.write_batch_script(n)
1258 piped_cmd = self.args[0]+'<\"'+self.args[1]+'\"'
1258 piped_cmd = self.args[0]+'<\"'+self.args[1]+'\"'
1259 self.log.debug("Starting %s: %s", self.__class__.__name__, piped_cmd)
1259 self.log.debug("Starting %s: %s", self.__class__.__name__, piped_cmd)
1260 p = Popen(piped_cmd, shell=True,env=os.environ,stdout=PIPE)
1260 p = Popen(piped_cmd, shell=True,env=os.environ,stdout=PIPE)
1261 output,err = p.communicate()
1261 output,err = p.communicate()
1262 output = output.decode(DEFAULT_ENCODING, 'replace')
1262 output = output.decode(DEFAULT_ENCODING, 'replace')
1263 job_id = self.parse_job_id(output)
1263 job_id = self.parse_job_id(output)
1264 self.notify_start(job_id)
1264 self.notify_start(job_id)
1265 return job_id
1265 return job_id
1266
1266
1267
1267
1268 class LSFControllerLauncher(LSFLauncher, BatchClusterAppMixin):
1268 class LSFControllerLauncher(LSFLauncher, BatchClusterAppMixin):
1269 """Launch a controller using LSF."""
1269 """Launch a controller using LSF."""
1270
1270
1271 batch_file_name = Unicode(u'lsf_controller', config=True,
1271 batch_file_name = Unicode(u'lsf_controller', config=True,
1272 help="batch file name for the controller job.")
1272 help="batch file name for the controller job.")
1273 default_template= Unicode("""#!/bin/sh
1273 default_template= Unicode("""#!/bin/sh
1274 #BSUB -J ipcontroller
1274 #BSUB -J ipcontroller
1275 #BSUB -oo ipcontroller.o.%%J
1275 #BSUB -oo ipcontroller.o.%%J
1276 #BSUB -eo ipcontroller.e.%%J
1276 #BSUB -eo ipcontroller.e.%%J
1277 %s --log-to-file --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1277 %s --log-to-file --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1278 """%(' '.join(map(pipes.quote,ipcontroller_cmd_argv))))
1278 """%(' '.join(map(pipes.quote,ipcontroller_cmd_argv))))
1279
1279
1280 def start(self):
1280 def start(self):
1281 """Start the controller by profile or profile_dir."""
1281 """Start the controller by profile or profile_dir."""
1282 return super(LSFControllerLauncher, self).start(1)
1282 return super(LSFControllerLauncher, self).start(1)
1283
1283
1284
1284
1285 class LSFEngineSetLauncher(LSFLauncher, BatchClusterAppMixin):
1285 class LSFEngineSetLauncher(LSFLauncher, BatchClusterAppMixin):
1286 """Launch Engines using LSF"""
1286 """Launch Engines using LSF"""
1287 batch_file_name = Unicode(u'lsf_engines', config=True,
1287 batch_file_name = Unicode(u'lsf_engines', config=True,
1288 help="batch file name for the engine(s) job.")
1288 help="batch file name for the engine(s) job.")
1289 default_template= Unicode(u"""#!/bin/sh
1289 default_template= Unicode(u"""#!/bin/sh
1290 #BSUB -oo ipengine.o.%%J
1290 #BSUB -oo ipengine.o.%%J
1291 #BSUB -eo ipengine.e.%%J
1291 #BSUB -eo ipengine.e.%%J
1292 %s --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1292 %s --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1293 """%(' '.join(map(pipes.quote, ipengine_cmd_argv))))
1293 """%(' '.join(map(pipes.quote, ipengine_cmd_argv))))
1294
1294
1295
1295
1296
1296
1297 class HTCondorLauncher(BatchSystemLauncher):
1297 class HTCondorLauncher(BatchSystemLauncher):
1298 """A BatchSystemLauncher subclass for HTCondor.
1298 """A BatchSystemLauncher subclass for HTCondor.
1299
1299
1300 HTCondor requires that we launch the ipengine/ipcontroller scripts rather
1300 HTCondor requires that we launch the ipengine/ipcontroller scripts rather
1301 that the python instance but otherwise is very similar to PBS. This is because
1301 that the python instance but otherwise is very similar to PBS. This is because
1302 HTCondor destroys sys.executable when launching remote processes - a launched
1302 HTCondor destroys sys.executable when launching remote processes - a launched
1303 python process depends on sys.executable to effectively evaluate its
1303 python process depends on sys.executable to effectively evaluate its
1304 module search paths. Without it, regardless of which python interpreter you launch
1304 module search paths. Without it, regardless of which python interpreter you launch
1305 you will get the to built in module search paths.
1305 you will get the to built in module search paths.
1306
1306
1307 We use the ip{cluster, engine, controller} scripts as our executable to circumvent
1307 We use the ip{cluster, engine, controller} scripts as our executable to circumvent
1308 this - the mechanism of shebanged scripts means that the python binary will be
1308 this - the mechanism of shebanged scripts means that the python binary will be
1309 launched with argv[0] set to the *location of the ip{cluster, engine, controller}
1309 launched with argv[0] set to the *location of the ip{cluster, engine, controller}
1310 scripts on the remote node*. This means you need to take care that:
1310 scripts on the remote node*. This means you need to take care that:
1311 a. Your remote nodes have their paths configured correctly, with the ipengine and ipcontroller
1311
1312 of the python environment you wish to execute code in having top precedence.
1312 a. Your remote nodes have their paths configured correctly, with the ipengine and ipcontroller
1313 b. This functionality is untested on Windows.
1313 of the python environment you wish to execute code in having top precedence.
1314 b. This functionality is untested on Windows.
1314
1315
1315 If you need different behavior, consider making you own template.
1316 If you need different behavior, consider making you own template.
1316 """
1317 """
1317
1318
1318 submit_command = List(['condor_submit'], config=True,
1319 submit_command = List(['condor_submit'], config=True,
1319 help="The HTCondor submit command ['condor_submit']")
1320 help="The HTCondor submit command ['condor_submit']")
1320 delete_command = List(['condor_rm'], config=True,
1321 delete_command = List(['condor_rm'], config=True,
1321 help="The HTCondor delete command ['condor_rm']")
1322 help="The HTCondor delete command ['condor_rm']")
1322 job_id_regexp = CRegExp(r'(\d+)\.$', config=True,
1323 job_id_regexp = CRegExp(r'(\d+)\.$', config=True,
1323 help="Regular expression for identifying the job ID [r'(\d+)\.$']")
1324 help="Regular expression for identifying the job ID [r'(\d+)\.$']")
1324 job_id_regexp_group = Integer(1, config=True,
1325 job_id_regexp_group = Integer(1, config=True,
1325 help="""The group we wish to match in job_id_regexp [1]""")
1326 help="""The group we wish to match in job_id_regexp [1]""")
1326
1327
1327 job_array_regexp = CRegExp('queue\W+\$')
1328 job_array_regexp = CRegExp('queue\W+\$')
1328 job_array_template = Unicode('queue {n}')
1329 job_array_template = Unicode('queue {n}')
1329
1330
1330
1331
1331 def _insert_job_array_in_script(self):
1332 def _insert_job_array_in_script(self):
1332 """Inserts a job array if required into the batch script.
1333 """Inserts a job array if required into the batch script.
1333 """
1334 """
1334 if not self.job_array_regexp.search(self.batch_template):
1335 if not self.job_array_regexp.search(self.batch_template):
1335 self.log.debug("adding job array settings to batch script")
1336 self.log.debug("adding job array settings to batch script")
1336 #HTCondor requires that the job array goes at the bottom of the script
1337 #HTCondor requires that the job array goes at the bottom of the script
1337 self.batch_template = '\n'.join([self.batch_template,
1338 self.batch_template = '\n'.join([self.batch_template,
1338 self.job_array_template])
1339 self.job_array_template])
1339
1340
1340 def _insert_queue_in_script(self):
1341 def _insert_queue_in_script(self):
1341 """AFAIK, HTCondor doesn't have a concept of multiple queues that can be
1342 """AFAIK, HTCondor doesn't have a concept of multiple queues that can be
1342 specified in the script.
1343 specified in the script.
1343 """
1344 """
1344 pass
1345 pass
1345
1346
1346
1347
1347 class HTCondorControllerLauncher(HTCondorLauncher, BatchClusterAppMixin):
1348 class HTCondorControllerLauncher(HTCondorLauncher, BatchClusterAppMixin):
1348 """Launch a controller using HTCondor."""
1349 """Launch a controller using HTCondor."""
1349
1350
1350 batch_file_name = Unicode(u'htcondor_controller', config=True,
1351 batch_file_name = Unicode(u'htcondor_controller', config=True,
1351 help="batch file name for the controller job.")
1352 help="batch file name for the controller job.")
1352 default_template = Unicode(r"""
1353 default_template = Unicode(r"""
1353 universe = vanilla
1354 universe = vanilla
1354 executable = ipcontroller
1355 executable = ipcontroller
1355 # by default we expect a shared file system
1356 # by default we expect a shared file system
1356 transfer_executable = False
1357 transfer_executable = False
1357 arguments = --log-to-file '--profile-dir={profile_dir}' --cluster-id='{cluster_id}'
1358 arguments = --log-to-file '--profile-dir={profile_dir}' --cluster-id='{cluster_id}'
1358 """)
1359 """)
1359
1360
1360 def start(self):
1361 def start(self):
1361 """Start the controller by profile or profile_dir."""
1362 """Start the controller by profile or profile_dir."""
1362 return super(HTCondorControllerLauncher, self).start(1)
1363 return super(HTCondorControllerLauncher, self).start(1)
1363
1364
1364
1365
1365 class HTCondorEngineSetLauncher(HTCondorLauncher, BatchClusterAppMixin):
1366 class HTCondorEngineSetLauncher(HTCondorLauncher, BatchClusterAppMixin):
1366 """Launch Engines using HTCondor"""
1367 """Launch Engines using HTCondor"""
1367 batch_file_name = Unicode(u'htcondor_engines', config=True,
1368 batch_file_name = Unicode(u'htcondor_engines', config=True,
1368 help="batch file name for the engine(s) job.")
1369 help="batch file name for the engine(s) job.")
1369 default_template = Unicode("""
1370 default_template = Unicode("""
1370 universe = vanilla
1371 universe = vanilla
1371 executable = ipengine
1372 executable = ipengine
1372 # by default we expect a shared file system
1373 # by default we expect a shared file system
1373 transfer_executable = False
1374 transfer_executable = False
1374 arguments = "--log-to-file '--profile-dir={profile_dir}' '--cluster-id={cluster_id}'"
1375 arguments = "--log-to-file '--profile-dir={profile_dir}' '--cluster-id={cluster_id}'"
1375 """)
1376 """)
1376
1377
1377
1378
1378 #-----------------------------------------------------------------------------
1379 #-----------------------------------------------------------------------------
1379 # A launcher for ipcluster itself!
1380 # A launcher for ipcluster itself!
1380 #-----------------------------------------------------------------------------
1381 #-----------------------------------------------------------------------------
1381
1382
1382
1383
1383 class IPClusterLauncher(LocalProcessLauncher):
1384 class IPClusterLauncher(LocalProcessLauncher):
1384 """Launch the ipcluster program in an external process."""
1385 """Launch the ipcluster program in an external process."""
1385
1386
1386 ipcluster_cmd = List(ipcluster_cmd_argv, config=True,
1387 ipcluster_cmd = List(ipcluster_cmd_argv, config=True,
1387 help="Popen command for ipcluster")
1388 help="Popen command for ipcluster")
1388 ipcluster_args = List(
1389 ipcluster_args = List(
1389 ['--clean-logs=True', '--log-to-file', '--log-level=%i'%logging.INFO], config=True,
1390 ['--clean-logs=True', '--log-to-file', '--log-level=%i'%logging.INFO], config=True,
1390 help="Command line arguments to pass to ipcluster.")
1391 help="Command line arguments to pass to ipcluster.")
1391 ipcluster_subcommand = Unicode('start')
1392 ipcluster_subcommand = Unicode('start')
1392 profile = Unicode('default')
1393 profile = Unicode('default')
1393 n = Integer(2)
1394 n = Integer(2)
1394
1395
1395 def find_args(self):
1396 def find_args(self):
1396 return self.ipcluster_cmd + [self.ipcluster_subcommand] + \
1397 return self.ipcluster_cmd + [self.ipcluster_subcommand] + \
1397 ['--n=%i'%self.n, '--profile=%s'%self.profile] + \
1398 ['--n=%i'%self.n, '--profile=%s'%self.profile] + \
1398 self.ipcluster_args
1399 self.ipcluster_args
1399
1400
1400 def start(self):
1401 def start(self):
1401 return super(IPClusterLauncher, self).start()
1402 return super(IPClusterLauncher, self).start()
1402
1403
1403 #-----------------------------------------------------------------------------
1404 #-----------------------------------------------------------------------------
1404 # Collections of launchers
1405 # Collections of launchers
1405 #-----------------------------------------------------------------------------
1406 #-----------------------------------------------------------------------------
1406
1407
1407 local_launchers = [
1408 local_launchers = [
1408 LocalControllerLauncher,
1409 LocalControllerLauncher,
1409 LocalEngineLauncher,
1410 LocalEngineLauncher,
1410 LocalEngineSetLauncher,
1411 LocalEngineSetLauncher,
1411 ]
1412 ]
1412 mpi_launchers = [
1413 mpi_launchers = [
1413 MPILauncher,
1414 MPILauncher,
1414 MPIControllerLauncher,
1415 MPIControllerLauncher,
1415 MPIEngineSetLauncher,
1416 MPIEngineSetLauncher,
1416 ]
1417 ]
1417 ssh_launchers = [
1418 ssh_launchers = [
1418 SSHLauncher,
1419 SSHLauncher,
1419 SSHControllerLauncher,
1420 SSHControllerLauncher,
1420 SSHEngineLauncher,
1421 SSHEngineLauncher,
1421 SSHEngineSetLauncher,
1422 SSHEngineSetLauncher,
1422 SSHProxyEngineSetLauncher,
1423 SSHProxyEngineSetLauncher,
1423 ]
1424 ]
1424 winhpc_launchers = [
1425 winhpc_launchers = [
1425 WindowsHPCLauncher,
1426 WindowsHPCLauncher,
1426 WindowsHPCControllerLauncher,
1427 WindowsHPCControllerLauncher,
1427 WindowsHPCEngineSetLauncher,
1428 WindowsHPCEngineSetLauncher,
1428 ]
1429 ]
1429 pbs_launchers = [
1430 pbs_launchers = [
1430 PBSLauncher,
1431 PBSLauncher,
1431 PBSControllerLauncher,
1432 PBSControllerLauncher,
1432 PBSEngineSetLauncher,
1433 PBSEngineSetLauncher,
1433 ]
1434 ]
1434 sge_launchers = [
1435 sge_launchers = [
1435 SGELauncher,
1436 SGELauncher,
1436 SGEControllerLauncher,
1437 SGEControllerLauncher,
1437 SGEEngineSetLauncher,
1438 SGEEngineSetLauncher,
1438 ]
1439 ]
1439 lsf_launchers = [
1440 lsf_launchers = [
1440 LSFLauncher,
1441 LSFLauncher,
1441 LSFControllerLauncher,
1442 LSFControllerLauncher,
1442 LSFEngineSetLauncher,
1443 LSFEngineSetLauncher,
1443 ]
1444 ]
1444 htcondor_launchers = [
1445 htcondor_launchers = [
1445 HTCondorLauncher,
1446 HTCondorLauncher,
1446 HTCondorControllerLauncher,
1447 HTCondorControllerLauncher,
1447 HTCondorEngineSetLauncher,
1448 HTCondorEngineSetLauncher,
1448 ]
1449 ]
1449 all_launchers = local_launchers + mpi_launchers + ssh_launchers + winhpc_launchers\
1450 all_launchers = local_launchers + mpi_launchers + ssh_launchers + winhpc_launchers\
1450 + pbs_launchers + sge_launchers + lsf_launchers + htcondor_launchers
1451 + pbs_launchers + sge_launchers + lsf_launchers + htcondor_launchers
@@ -1,835 +1,834 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """Sphinx directive to support embedded IPython code.
2 """Sphinx directive to support embedded IPython code.
3
3
4 This directive allows pasting of entire interactive IPython sessions, prompts
4 This directive allows pasting of entire interactive IPython sessions, prompts
5 and all, and their code will actually get re-executed at doc build time, with
5 and all, and their code will actually get re-executed at doc build time, with
6 all prompts renumbered sequentially. It also allows you to input code as a pure
6 all prompts renumbered sequentially. It also allows you to input code as a pure
7 python input by giving the argument python to the directive. The output looks
7 python input by giving the argument python to the directive. The output looks
8 like an interactive ipython section.
8 like an interactive ipython section.
9
9
10 To enable this directive, simply list it in your Sphinx ``conf.py`` file
10 To enable this directive, simply list it in your Sphinx ``conf.py`` file
11 (making sure the directory where you placed it is visible to sphinx, as is
11 (making sure the directory where you placed it is visible to sphinx, as is
12 needed for all Sphinx directives).
12 needed for all Sphinx directives).
13
13
14 By default this directive assumes that your prompts are unchanged IPython ones,
14 By default this directive assumes that your prompts are unchanged IPython ones,
15 but this can be customized. The configurable options that can be placed in
15 but this can be customized. The configurable options that can be placed in
16 conf.py are
16 conf.py are
17
17
18 ipython_savefig_dir:
18 ipython_savefig_dir:
19 The directory in which to save the figures. This is relative to the
19 The directory in which to save the figures. This is relative to the
20 Sphinx source directory. The default is `html_static_path`.
20 Sphinx source directory. The default is `html_static_path`.
21 ipython_rgxin:
21 ipython_rgxin:
22 The compiled regular expression to denote the start of IPython input
22 The compiled regular expression to denote the start of IPython input
23 lines. The default is re.compile('In \[(\d+)\]:\s?(.*)\s*'). You
23 lines. The default is re.compile('In \[(\d+)\]:\s?(.*)\s*'). You
24 shouldn't need to change this.
24 shouldn't need to change this.
25 ipython_rgxout:
25 ipython_rgxout:
26 The compiled regular expression to denote the start of IPython output
26 The compiled regular expression to denote the start of IPython output
27 lines. The default is re.compile('Out\[(\d+)\]:\s?(.*)\s*'). You
27 lines. The default is re.compile('Out\[(\d+)\]:\s?(.*)\s*'). You
28 shouldn't need to change this.
28 shouldn't need to change this.
29 ipython_promptin:
29 ipython_promptin:
30 The string to represent the IPython input prompt in the generated ReST.
30 The string to represent the IPython input prompt in the generated ReST.
31 The default is 'In [%d]:'. This expects that the line numbers are used
31 The default is 'In [%d]:'. This expects that the line numbers are used
32 in the prompt.
32 in the prompt.
33 ipython_promptout:
33 ipython_promptout:
34
35 The string to represent the IPython prompt in the generated ReST. The
34 The string to represent the IPython prompt in the generated ReST. The
36 default is 'Out [%d]:'. This expects that the line numbers are used
35 default is 'Out [%d]:'. This expects that the line numbers are used
37 in the prompt.
36 in the prompt.
38
37
39 ToDo
38 ToDo
40 ----
39 ----
41
40
42 - Turn the ad-hoc test() function into a real test suite.
41 - Turn the ad-hoc test() function into a real test suite.
43 - Break up ipython-specific functionality from matplotlib stuff into better
42 - Break up ipython-specific functionality from matplotlib stuff into better
44 separated code.
43 separated code.
45
44
46 Authors
45 Authors
47 -------
46 -------
48
47
49 - John D Hunter: orignal author.
48 - John D Hunter: orignal author.
50 - Fernando Perez: refactoring, documentation, cleanups, port to 0.11.
49 - Fernando Perez: refactoring, documentation, cleanups, port to 0.11.
51 - VΓ‘clavΕ milauer <eudoxos-AT-arcig.cz>: Prompt generalizations.
50 - VΓ‘clavΕ milauer <eudoxos-AT-arcig.cz>: Prompt generalizations.
52 - Skipper Seabold, refactoring, cleanups, pure python addition
51 - Skipper Seabold, refactoring, cleanups, pure python addition
53 """
52 """
54 from __future__ import print_function
53 from __future__ import print_function
55
54
56 #-----------------------------------------------------------------------------
55 #-----------------------------------------------------------------------------
57 # Imports
56 # Imports
58 #-----------------------------------------------------------------------------
57 #-----------------------------------------------------------------------------
59
58
60 # Stdlib
59 # Stdlib
61 import os
60 import os
62 import re
61 import re
63 import sys
62 import sys
64 import tempfile
63 import tempfile
65 import ast
64 import ast
66
65
67 # To keep compatibility with various python versions
66 # To keep compatibility with various python versions
68 try:
67 try:
69 from hashlib import md5
68 from hashlib import md5
70 except ImportError:
69 except ImportError:
71 from md5 import md5
70 from md5 import md5
72
71
73 # Third-party
72 # Third-party
74 import matplotlib
73 import matplotlib
75 import sphinx
74 import sphinx
76 from docutils.parsers.rst import directives
75 from docutils.parsers.rst import directives
77 from docutils import nodes
76 from docutils import nodes
78 from sphinx.util.compat import Directive
77 from sphinx.util.compat import Directive
79
78
80 matplotlib.use('Agg')
79 matplotlib.use('Agg')
81
80
82 # Our own
81 # Our own
83 from IPython import Config, InteractiveShell
82 from IPython import Config, InteractiveShell
84 from IPython.core.profiledir import ProfileDir
83 from IPython.core.profiledir import ProfileDir
85 from IPython.utils import io
84 from IPython.utils import io
86 from IPython.utils.py3compat import PY3
85 from IPython.utils.py3compat import PY3
87
86
88 if PY3:
87 if PY3:
89 from io import StringIO
88 from io import StringIO
90 else:
89 else:
91 from StringIO import StringIO
90 from StringIO import StringIO
92
91
93 #-----------------------------------------------------------------------------
92 #-----------------------------------------------------------------------------
94 # Globals
93 # Globals
95 #-----------------------------------------------------------------------------
94 #-----------------------------------------------------------------------------
96 # for tokenizing blocks
95 # for tokenizing blocks
97 COMMENT, INPUT, OUTPUT = range(3)
96 COMMENT, INPUT, OUTPUT = range(3)
98
97
99 #-----------------------------------------------------------------------------
98 #-----------------------------------------------------------------------------
100 # Functions and class declarations
99 # Functions and class declarations
101 #-----------------------------------------------------------------------------
100 #-----------------------------------------------------------------------------
102 def block_parser(part, rgxin, rgxout, fmtin, fmtout):
101 def block_parser(part, rgxin, rgxout, fmtin, fmtout):
103 """
102 """
104 part is a string of ipython text, comprised of at most one
103 part is a string of ipython text, comprised of at most one
105 input, one ouput, comments, and blank lines. The block parser
104 input, one ouput, comments, and blank lines. The block parser
106 parses the text into a list of::
105 parses the text into a list of::
107
106
108 blocks = [ (TOKEN0, data0), (TOKEN1, data1), ...]
107 blocks = [ (TOKEN0, data0), (TOKEN1, data1), ...]
109
108
110 where TOKEN is one of [COMMENT | INPUT | OUTPUT ] and
109 where TOKEN is one of [COMMENT | INPUT | OUTPUT ] and
111 data is, depending on the type of token::
110 data is, depending on the type of token::
112
111
113 COMMENT : the comment string
112 COMMENT : the comment string
114
113
115 INPUT: the (DECORATOR, INPUT_LINE, REST) where
114 INPUT: the (DECORATOR, INPUT_LINE, REST) where
116 DECORATOR: the input decorator (or None)
115 DECORATOR: the input decorator (or None)
117 INPUT_LINE: the input as string (possibly multi-line)
116 INPUT_LINE: the input as string (possibly multi-line)
118 REST : any stdout generated by the input line (not OUTPUT)
117 REST : any stdout generated by the input line (not OUTPUT)
119
118
120
119
121 OUTPUT: the output string, possibly multi-line
120 OUTPUT: the output string, possibly multi-line
122 """
121 """
123
122
124 block = []
123 block = []
125 lines = part.split('\n')
124 lines = part.split('\n')
126 N = len(lines)
125 N = len(lines)
127 i = 0
126 i = 0
128 decorator = None
127 decorator = None
129 while 1:
128 while 1:
130
129
131 if i==N:
130 if i==N:
132 # nothing left to parse -- the last line
131 # nothing left to parse -- the last line
133 break
132 break
134
133
135 line = lines[i]
134 line = lines[i]
136 i += 1
135 i += 1
137 line_stripped = line.strip()
136 line_stripped = line.strip()
138 if line_stripped.startswith('#'):
137 if line_stripped.startswith('#'):
139 block.append((COMMENT, line))
138 block.append((COMMENT, line))
140 continue
139 continue
141
140
142 if line_stripped.startswith('@'):
141 if line_stripped.startswith('@'):
143 # we're assuming at most one decorator -- may need to
142 # we're assuming at most one decorator -- may need to
144 # rethink
143 # rethink
145 decorator = line_stripped
144 decorator = line_stripped
146 continue
145 continue
147
146
148 # does this look like an input line?
147 # does this look like an input line?
149 matchin = rgxin.match(line)
148 matchin = rgxin.match(line)
150 if matchin:
149 if matchin:
151 lineno, inputline = int(matchin.group(1)), matchin.group(2)
150 lineno, inputline = int(matchin.group(1)), matchin.group(2)
152
151
153 # the ....: continuation string
152 # the ....: continuation string
154 continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
153 continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
155 Nc = len(continuation)
154 Nc = len(continuation)
156 # input lines can continue on for more than one line, if
155 # input lines can continue on for more than one line, if
157 # we have a '\' line continuation char or a function call
156 # we have a '\' line continuation char or a function call
158 # echo line 'print'. The input line can only be
157 # echo line 'print'. The input line can only be
159 # terminated by the end of the block or an output line, so
158 # terminated by the end of the block or an output line, so
160 # we parse out the rest of the input line if it is
159 # we parse out the rest of the input line if it is
161 # multiline as well as any echo text
160 # multiline as well as any echo text
162
161
163 rest = []
162 rest = []
164 while i<N:
163 while i<N:
165
164
166 # look ahead; if the next line is blank, or a comment, or
165 # look ahead; if the next line is blank, or a comment, or
167 # an output line, we're done
166 # an output line, we're done
168
167
169 nextline = lines[i]
168 nextline = lines[i]
170 matchout = rgxout.match(nextline)
169 matchout = rgxout.match(nextline)
171 #print "nextline=%s, continuation=%s, starts=%s"%(nextline, continuation, nextline.startswith(continuation))
170 #print "nextline=%s, continuation=%s, starts=%s"%(nextline, continuation, nextline.startswith(continuation))
172 if matchout or nextline.startswith('#'):
171 if matchout or nextline.startswith('#'):
173 break
172 break
174 elif nextline.startswith(continuation):
173 elif nextline.startswith(continuation):
175 inputline += '\n' + nextline[Nc:]
174 inputline += '\n' + nextline[Nc:]
176 else:
175 else:
177 rest.append(nextline)
176 rest.append(nextline)
178 i+= 1
177 i+= 1
179
178
180 block.append((INPUT, (decorator, inputline, '\n'.join(rest))))
179 block.append((INPUT, (decorator, inputline, '\n'.join(rest))))
181 continue
180 continue
182
181
183 # if it looks like an output line grab all the text to the end
182 # if it looks like an output line grab all the text to the end
184 # of the block
183 # of the block
185 matchout = rgxout.match(line)
184 matchout = rgxout.match(line)
186 if matchout:
185 if matchout:
187 lineno, output = int(matchout.group(1)), matchout.group(2)
186 lineno, output = int(matchout.group(1)), matchout.group(2)
188 if i<N-1:
187 if i<N-1:
189 output = '\n'.join([output] + lines[i:])
188 output = '\n'.join([output] + lines[i:])
190
189
191 block.append((OUTPUT, output))
190 block.append((OUTPUT, output))
192 break
191 break
193
192
194 return block
193 return block
195
194
196 class EmbeddedSphinxShell(object):
195 class EmbeddedSphinxShell(object):
197 """An embedded IPython instance to run inside Sphinx"""
196 """An embedded IPython instance to run inside Sphinx"""
198
197
199 def __init__(self):
198 def __init__(self):
200
199
201 self.cout = StringIO()
200 self.cout = StringIO()
202
201
203
202
204 # Create config object for IPython
203 # Create config object for IPython
205 config = Config()
204 config = Config()
206 config.Global.display_banner = False
205 config.Global.display_banner = False
207 config.Global.exec_lines = ['import numpy as np',
206 config.Global.exec_lines = ['import numpy as np',
208 'from pylab import *'
207 'from pylab import *'
209 ]
208 ]
210 config.InteractiveShell.autocall = False
209 config.InteractiveShell.autocall = False
211 config.InteractiveShell.autoindent = False
210 config.InteractiveShell.autoindent = False
212 config.InteractiveShell.colors = 'NoColor'
211 config.InteractiveShell.colors = 'NoColor'
213
212
214 # create a profile so instance history isn't saved
213 # create a profile so instance history isn't saved
215 tmp_profile_dir = tempfile.mkdtemp(prefix='profile_')
214 tmp_profile_dir = tempfile.mkdtemp(prefix='profile_')
216 profname = 'auto_profile_sphinx_build'
215 profname = 'auto_profile_sphinx_build'
217 pdir = os.path.join(tmp_profile_dir,profname)
216 pdir = os.path.join(tmp_profile_dir,profname)
218 profile = ProfileDir.create_profile_dir(pdir)
217 profile = ProfileDir.create_profile_dir(pdir)
219
218
220 # Create and initialize ipython, but don't start its mainloop
219 # Create and initialize ipython, but don't start its mainloop
221 IP = InteractiveShell.instance(config=config, profile_dir=profile)
220 IP = InteractiveShell.instance(config=config, profile_dir=profile)
222 # io.stdout redirect must be done *after* instantiating InteractiveShell
221 # io.stdout redirect must be done *after* instantiating InteractiveShell
223 io.stdout = self.cout
222 io.stdout = self.cout
224 io.stderr = self.cout
223 io.stderr = self.cout
225
224
226 # For debugging, so we can see normal output, use this:
225 # For debugging, so we can see normal output, use this:
227 #from IPython.utils.io import Tee
226 #from IPython.utils.io import Tee
228 #io.stdout = Tee(self.cout, channel='stdout') # dbg
227 #io.stdout = Tee(self.cout, channel='stdout') # dbg
229 #io.stderr = Tee(self.cout, channel='stderr') # dbg
228 #io.stderr = Tee(self.cout, channel='stderr') # dbg
230
229
231 # Store a few parts of IPython we'll need.
230 # Store a few parts of IPython we'll need.
232 self.IP = IP
231 self.IP = IP
233 self.user_ns = self.IP.user_ns
232 self.user_ns = self.IP.user_ns
234 self.user_global_ns = self.IP.user_global_ns
233 self.user_global_ns = self.IP.user_global_ns
235
234
236 self.input = ''
235 self.input = ''
237 self.output = ''
236 self.output = ''
238
237
239 self.is_verbatim = False
238 self.is_verbatim = False
240 self.is_doctest = False
239 self.is_doctest = False
241 self.is_suppress = False
240 self.is_suppress = False
242
241
243 # on the first call to the savefig decorator, we'll import
242 # on the first call to the savefig decorator, we'll import
244 # pyplot as plt so we can make a call to the plt.gcf().savefig
243 # pyplot as plt so we can make a call to the plt.gcf().savefig
245 self._pyplot_imported = False
244 self._pyplot_imported = False
246
245
247 def clear_cout(self):
246 def clear_cout(self):
248 self.cout.seek(0)
247 self.cout.seek(0)
249 self.cout.truncate(0)
248 self.cout.truncate(0)
250
249
251 def process_input_line(self, line, store_history=True):
250 def process_input_line(self, line, store_history=True):
252 """process the input, capturing stdout"""
251 """process the input, capturing stdout"""
253 #print "input='%s'"%self.input
252 #print "input='%s'"%self.input
254 stdout = sys.stdout
253 stdout = sys.stdout
255 splitter = self.IP.input_splitter
254 splitter = self.IP.input_splitter
256 try:
255 try:
257 sys.stdout = self.cout
256 sys.stdout = self.cout
258 splitter.push(line)
257 splitter.push(line)
259 more = splitter.push_accepts_more()
258 more = splitter.push_accepts_more()
260 if not more:
259 if not more:
261 source_raw = splitter.source_raw_reset()[1]
260 source_raw = splitter.source_raw_reset()[1]
262 self.IP.run_cell(source_raw, store_history=store_history)
261 self.IP.run_cell(source_raw, store_history=store_history)
263 finally:
262 finally:
264 sys.stdout = stdout
263 sys.stdout = stdout
265
264
266 def process_image(self, decorator):
265 def process_image(self, decorator):
267 """
266 """
268 # build out an image directive like
267 # build out an image directive like
269 # .. image:: somefile.png
268 # .. image:: somefile.png
270 # :width 4in
269 # :width 4in
271 #
270 #
272 # from an input like
271 # from an input like
273 # savefig somefile.png width=4in
272 # savefig somefile.png width=4in
274 """
273 """
275 savefig_dir = self.savefig_dir
274 savefig_dir = self.savefig_dir
276 source_dir = self.source_dir
275 source_dir = self.source_dir
277 saveargs = decorator.split(' ')
276 saveargs = decorator.split(' ')
278 filename = saveargs[1]
277 filename = saveargs[1]
279 # insert relative path to image file in source
278 # insert relative path to image file in source
280 outfile = os.path.relpath(os.path.join(savefig_dir,filename),
279 outfile = os.path.relpath(os.path.join(savefig_dir,filename),
281 source_dir)
280 source_dir)
282
281
283 imagerows = ['.. image:: %s'%outfile]
282 imagerows = ['.. image:: %s'%outfile]
284
283
285 for kwarg in saveargs[2:]:
284 for kwarg in saveargs[2:]:
286 arg, val = kwarg.split('=')
285 arg, val = kwarg.split('=')
287 arg = arg.strip()
286 arg = arg.strip()
288 val = val.strip()
287 val = val.strip()
289 imagerows.append(' :%s: %s'%(arg, val))
288 imagerows.append(' :%s: %s'%(arg, val))
290
289
291 image_file = os.path.basename(outfile) # only return file name
290 image_file = os.path.basename(outfile) # only return file name
292 image_directive = '\n'.join(imagerows)
291 image_directive = '\n'.join(imagerows)
293 return image_file, image_directive
292 return image_file, image_directive
294
293
295
294
296 # Callbacks for each type of token
295 # Callbacks for each type of token
297 def process_input(self, data, input_prompt, lineno):
296 def process_input(self, data, input_prompt, lineno):
298 """Process data block for INPUT token."""
297 """Process data block for INPUT token."""
299 decorator, input, rest = data
298 decorator, input, rest = data
300 image_file = None
299 image_file = None
301 image_directive = None
300 image_directive = None
302 #print 'INPUT:', data # dbg
301 #print 'INPUT:', data # dbg
303 is_verbatim = decorator=='@verbatim' or self.is_verbatim
302 is_verbatim = decorator=='@verbatim' or self.is_verbatim
304 is_doctest = decorator=='@doctest' or self.is_doctest
303 is_doctest = decorator=='@doctest' or self.is_doctest
305 is_suppress = decorator=='@suppress' or self.is_suppress
304 is_suppress = decorator=='@suppress' or self.is_suppress
306 is_savefig = decorator is not None and \
305 is_savefig = decorator is not None and \
307 decorator.startswith('@savefig')
306 decorator.startswith('@savefig')
308
307
309 input_lines = input.split('\n')
308 input_lines = input.split('\n')
310 if len(input_lines) > 1:
309 if len(input_lines) > 1:
311 if input_lines[-1] != "":
310 if input_lines[-1] != "":
312 input_lines.append('') # make sure there's a blank line
311 input_lines.append('') # make sure there's a blank line
313 # so splitter buffer gets reset
312 # so splitter buffer gets reset
314
313
315 continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
314 continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
316 Nc = len(continuation)
315 Nc = len(continuation)
317
316
318 if is_savefig:
317 if is_savefig:
319 image_file, image_directive = self.process_image(decorator)
318 image_file, image_directive = self.process_image(decorator)
320
319
321 ret = []
320 ret = []
322 is_semicolon = False
321 is_semicolon = False
323
322
324 for i, line in enumerate(input_lines):
323 for i, line in enumerate(input_lines):
325 if line.endswith(';'):
324 if line.endswith(';'):
326 is_semicolon = True
325 is_semicolon = True
327
326
328 if i==0:
327 if i==0:
329 # process the first input line
328 # process the first input line
330 if is_verbatim:
329 if is_verbatim:
331 self.process_input_line('')
330 self.process_input_line('')
332 self.IP.execution_count += 1 # increment it anyway
331 self.IP.execution_count += 1 # increment it anyway
333 else:
332 else:
334 # only submit the line in non-verbatim mode
333 # only submit the line in non-verbatim mode
335 self.process_input_line(line, store_history=True)
334 self.process_input_line(line, store_history=True)
336 formatted_line = '%s %s'%(input_prompt, line)
335 formatted_line = '%s %s'%(input_prompt, line)
337 else:
336 else:
338 # process a continuation line
337 # process a continuation line
339 if not is_verbatim:
338 if not is_verbatim:
340 self.process_input_line(line, store_history=True)
339 self.process_input_line(line, store_history=True)
341
340
342 formatted_line = '%s %s'%(continuation, line)
341 formatted_line = '%s %s'%(continuation, line)
343
342
344 if not is_suppress:
343 if not is_suppress:
345 ret.append(formatted_line)
344 ret.append(formatted_line)
346
345
347 if not is_suppress and len(rest.strip()) and is_verbatim:
346 if not is_suppress and len(rest.strip()) and is_verbatim:
348 # the "rest" is the standard output of the
347 # the "rest" is the standard output of the
349 # input, which needs to be added in
348 # input, which needs to be added in
350 # verbatim mode
349 # verbatim mode
351 ret.append(rest)
350 ret.append(rest)
352
351
353 self.cout.seek(0)
352 self.cout.seek(0)
354 output = self.cout.read()
353 output = self.cout.read()
355 if not is_suppress and not is_semicolon:
354 if not is_suppress and not is_semicolon:
356 ret.append(output)
355 ret.append(output)
357 elif is_semicolon: # get spacing right
356 elif is_semicolon: # get spacing right
358 ret.append('')
357 ret.append('')
359
358
360 self.cout.truncate(0)
359 self.cout.truncate(0)
361 return (ret, input_lines, output, is_doctest, image_file,
360 return (ret, input_lines, output, is_doctest, image_file,
362 image_directive)
361 image_directive)
363 #print 'OUTPUT', output # dbg
362 #print 'OUTPUT', output # dbg
364
363
365 def process_output(self, data, output_prompt,
364 def process_output(self, data, output_prompt,
366 input_lines, output, is_doctest, image_file):
365 input_lines, output, is_doctest, image_file):
367 """Process data block for OUTPUT token."""
366 """Process data block for OUTPUT token."""
368 if is_doctest:
367 if is_doctest:
369 submitted = data.strip()
368 submitted = data.strip()
370 found = output
369 found = output
371 if found is not None:
370 if found is not None:
372 found = found.strip()
371 found = found.strip()
373
372
374 # XXX - fperez: in 0.11, 'output' never comes with the prompt
373 # XXX - fperez: in 0.11, 'output' never comes with the prompt
375 # in it, just the actual output text. So I think all this code
374 # in it, just the actual output text. So I think all this code
376 # can be nuked...
375 # can be nuked...
377
376
378 # the above comment does not appear to be accurate... (minrk)
377 # the above comment does not appear to be accurate... (minrk)
379
378
380 ind = found.find(output_prompt)
379 ind = found.find(output_prompt)
381 if ind<0:
380 if ind<0:
382 e='output prompt="%s" does not match out line=%s' % \
381 e='output prompt="%s" does not match out line=%s' % \
383 (output_prompt, found)
382 (output_prompt, found)
384 raise RuntimeError(e)
383 raise RuntimeError(e)
385 found = found[len(output_prompt):].strip()
384 found = found[len(output_prompt):].strip()
386
385
387 if found!=submitted:
386 if found!=submitted:
388 e = ('doctest failure for input_lines="%s" with '
387 e = ('doctest failure for input_lines="%s" with '
389 'found_output="%s" and submitted output="%s"' %
388 'found_output="%s" and submitted output="%s"' %
390 (input_lines, found, submitted) )
389 (input_lines, found, submitted) )
391 raise RuntimeError(e)
390 raise RuntimeError(e)
392 #print 'doctest PASSED for input_lines="%s" with found_output="%s" and submitted output="%s"'%(input_lines, found, submitted)
391 #print 'doctest PASSED for input_lines="%s" with found_output="%s" and submitted output="%s"'%(input_lines, found, submitted)
393
392
394 def process_comment(self, data):
393 def process_comment(self, data):
395 """Process data fPblock for COMMENT token."""
394 """Process data fPblock for COMMENT token."""
396 if not self.is_suppress:
395 if not self.is_suppress:
397 return [data]
396 return [data]
398
397
399 def save_image(self, image_file):
398 def save_image(self, image_file):
400 """
399 """
401 Saves the image file to disk.
400 Saves the image file to disk.
402 """
401 """
403 self.ensure_pyplot()
402 self.ensure_pyplot()
404 command = 'plt.gcf().savefig("%s")'%image_file
403 command = 'plt.gcf().savefig("%s")'%image_file
405 #print 'SAVEFIG', command # dbg
404 #print 'SAVEFIG', command # dbg
406 self.process_input_line('bookmark ipy_thisdir', store_history=False)
405 self.process_input_line('bookmark ipy_thisdir', store_history=False)
407 self.process_input_line('cd -b ipy_savedir', store_history=False)
406 self.process_input_line('cd -b ipy_savedir', store_history=False)
408 self.process_input_line(command, store_history=False)
407 self.process_input_line(command, store_history=False)
409 self.process_input_line('cd -b ipy_thisdir', store_history=False)
408 self.process_input_line('cd -b ipy_thisdir', store_history=False)
410 self.process_input_line('bookmark -d ipy_thisdir', store_history=False)
409 self.process_input_line('bookmark -d ipy_thisdir', store_history=False)
411 self.clear_cout()
410 self.clear_cout()
412
411
413
412
414 def process_block(self, block):
413 def process_block(self, block):
415 """
414 """
416 process block from the block_parser and return a list of processed lines
415 process block from the block_parser and return a list of processed lines
417 """
416 """
418 ret = []
417 ret = []
419 output = None
418 output = None
420 input_lines = None
419 input_lines = None
421 lineno = self.IP.execution_count
420 lineno = self.IP.execution_count
422
421
423 input_prompt = self.promptin%lineno
422 input_prompt = self.promptin%lineno
424 output_prompt = self.promptout%lineno
423 output_prompt = self.promptout%lineno
425 image_file = None
424 image_file = None
426 image_directive = None
425 image_directive = None
427
426
428 for token, data in block:
427 for token, data in block:
429 if token==COMMENT:
428 if token==COMMENT:
430 out_data = self.process_comment(data)
429 out_data = self.process_comment(data)
431 elif token==INPUT:
430 elif token==INPUT:
432 (out_data, input_lines, output, is_doctest, image_file,
431 (out_data, input_lines, output, is_doctest, image_file,
433 image_directive) = \
432 image_directive) = \
434 self.process_input(data, input_prompt, lineno)
433 self.process_input(data, input_prompt, lineno)
435 elif token==OUTPUT:
434 elif token==OUTPUT:
436 out_data = \
435 out_data = \
437 self.process_output(data, output_prompt,
436 self.process_output(data, output_prompt,
438 input_lines, output, is_doctest,
437 input_lines, output, is_doctest,
439 image_file)
438 image_file)
440 if out_data:
439 if out_data:
441 ret.extend(out_data)
440 ret.extend(out_data)
442
441
443 # save the image files
442 # save the image files
444 if image_file is not None:
443 if image_file is not None:
445 self.save_image(image_file)
444 self.save_image(image_file)
446
445
447 return ret, image_directive
446 return ret, image_directive
448
447
449 def ensure_pyplot(self):
448 def ensure_pyplot(self):
450 if self._pyplot_imported:
449 if self._pyplot_imported:
451 return
450 return
452 self.process_input_line('import matplotlib.pyplot as plt',
451 self.process_input_line('import matplotlib.pyplot as plt',
453 store_history=False)
452 store_history=False)
454
453
455 def process_pure_python(self, content):
454 def process_pure_python(self, content):
456 """
455 """
457 content is a list of strings. it is unedited directive conent
456 content is a list of strings. it is unedited directive conent
458
457
459 This runs it line by line in the InteractiveShell, prepends
458 This runs it line by line in the InteractiveShell, prepends
460 prompts as needed capturing stderr and stdout, then returns
459 prompts as needed capturing stderr and stdout, then returns
461 the content as a list as if it were ipython code
460 the content as a list as if it were ipython code
462 """
461 """
463 output = []
462 output = []
464 savefig = False # keep up with this to clear figure
463 savefig = False # keep up with this to clear figure
465 multiline = False # to handle line continuation
464 multiline = False # to handle line continuation
466 multiline_start = None
465 multiline_start = None
467 fmtin = self.promptin
466 fmtin = self.promptin
468
467
469 ct = 0
468 ct = 0
470
469
471 for lineno, line in enumerate(content):
470 for lineno, line in enumerate(content):
472
471
473 line_stripped = line.strip()
472 line_stripped = line.strip()
474 if not len(line):
473 if not len(line):
475 output.append(line)
474 output.append(line)
476 continue
475 continue
477
476
478 # handle decorators
477 # handle decorators
479 if line_stripped.startswith('@'):
478 if line_stripped.startswith('@'):
480 output.extend([line])
479 output.extend([line])
481 if 'savefig' in line:
480 if 'savefig' in line:
482 savefig = True # and need to clear figure
481 savefig = True # and need to clear figure
483 continue
482 continue
484
483
485 # handle comments
484 # handle comments
486 if line_stripped.startswith('#'):
485 if line_stripped.startswith('#'):
487 output.extend([line])
486 output.extend([line])
488 continue
487 continue
489
488
490 # deal with lines checking for multiline
489 # deal with lines checking for multiline
491 continuation = u' %s:'% ''.join(['.']*(len(str(ct))+2))
490 continuation = u' %s:'% ''.join(['.']*(len(str(ct))+2))
492 if not multiline:
491 if not multiline:
493 modified = u"%s %s" % (fmtin % ct, line_stripped)
492 modified = u"%s %s" % (fmtin % ct, line_stripped)
494 output.append(modified)
493 output.append(modified)
495 ct += 1
494 ct += 1
496 try:
495 try:
497 ast.parse(line_stripped)
496 ast.parse(line_stripped)
498 output.append(u'')
497 output.append(u'')
499 except Exception: # on a multiline
498 except Exception: # on a multiline
500 multiline = True
499 multiline = True
501 multiline_start = lineno
500 multiline_start = lineno
502 else: # still on a multiline
501 else: # still on a multiline
503 modified = u'%s %s' % (continuation, line)
502 modified = u'%s %s' % (continuation, line)
504 output.append(modified)
503 output.append(modified)
505
504
506 # if the next line is indented, it should be part of multiline
505 # if the next line is indented, it should be part of multiline
507 if len(content) > lineno + 1:
506 if len(content) > lineno + 1:
508 nextline = content[lineno + 1]
507 nextline = content[lineno + 1]
509 if len(nextline) - len(nextline.lstrip()) > 3:
508 if len(nextline) - len(nextline.lstrip()) > 3:
510 continue
509 continue
511 try:
510 try:
512 mod = ast.parse(
511 mod = ast.parse(
513 '\n'.join(content[multiline_start:lineno+1]))
512 '\n'.join(content[multiline_start:lineno+1]))
514 if isinstance(mod.body[0], ast.FunctionDef):
513 if isinstance(mod.body[0], ast.FunctionDef):
515 # check to see if we have the whole function
514 # check to see if we have the whole function
516 for element in mod.body[0].body:
515 for element in mod.body[0].body:
517 if isinstance(element, ast.Return):
516 if isinstance(element, ast.Return):
518 multiline = False
517 multiline = False
519 else:
518 else:
520 output.append(u'')
519 output.append(u'')
521 multiline = False
520 multiline = False
522 except Exception:
521 except Exception:
523 pass
522 pass
524
523
525 if savefig: # clear figure if plotted
524 if savefig: # clear figure if plotted
526 self.ensure_pyplot()
525 self.ensure_pyplot()
527 self.process_input_line('plt.clf()', store_history=False)
526 self.process_input_line('plt.clf()', store_history=False)
528 self.clear_cout()
527 self.clear_cout()
529 savefig = False
528 savefig = False
530
529
531 return output
530 return output
532
531
533 class IPythonDirective(Directive):
532 class IPythonDirective(Directive):
534
533
535 has_content = True
534 has_content = True
536 required_arguments = 0
535 required_arguments = 0
537 optional_arguments = 4 # python, suppress, verbatim, doctest
536 optional_arguments = 4 # python, suppress, verbatim, doctest
538 final_argumuent_whitespace = True
537 final_argumuent_whitespace = True
539 option_spec = { 'python': directives.unchanged,
538 option_spec = { 'python': directives.unchanged,
540 'suppress' : directives.flag,
539 'suppress' : directives.flag,
541 'verbatim' : directives.flag,
540 'verbatim' : directives.flag,
542 'doctest' : directives.flag,
541 'doctest' : directives.flag,
543 }
542 }
544
543
545 shell = None
544 shell = None
546
545
547 seen_docs = set()
546 seen_docs = set()
548
547
549 def get_config_options(self):
548 def get_config_options(self):
550 # contains sphinx configuration variables
549 # contains sphinx configuration variables
551 config = self.state.document.settings.env.config
550 config = self.state.document.settings.env.config
552
551
553 # get config variables to set figure output directory
552 # get config variables to set figure output directory
554 confdir = self.state.document.settings.env.app.confdir
553 confdir = self.state.document.settings.env.app.confdir
555 savefig_dir = config.ipython_savefig_dir
554 savefig_dir = config.ipython_savefig_dir
556 source_dir = os.path.dirname(self.state.document.current_source)
555 source_dir = os.path.dirname(self.state.document.current_source)
557 if savefig_dir is None:
556 if savefig_dir is None:
558 savefig_dir = config.html_static_path
557 savefig_dir = config.html_static_path
559 if isinstance(savefig_dir, list):
558 if isinstance(savefig_dir, list):
560 savefig_dir = savefig_dir[0] # safe to assume only one path?
559 savefig_dir = savefig_dir[0] # safe to assume only one path?
561 savefig_dir = os.path.join(confdir, savefig_dir)
560 savefig_dir = os.path.join(confdir, savefig_dir)
562
561
563 # get regex and prompt stuff
562 # get regex and prompt stuff
564 rgxin = config.ipython_rgxin
563 rgxin = config.ipython_rgxin
565 rgxout = config.ipython_rgxout
564 rgxout = config.ipython_rgxout
566 promptin = config.ipython_promptin
565 promptin = config.ipython_promptin
567 promptout = config.ipython_promptout
566 promptout = config.ipython_promptout
568
567
569 return savefig_dir, source_dir, rgxin, rgxout, promptin, promptout
568 return savefig_dir, source_dir, rgxin, rgxout, promptin, promptout
570
569
571 def setup(self):
570 def setup(self):
572 if self.shell is None:
571 if self.shell is None:
573 self.shell = EmbeddedSphinxShell()
572 self.shell = EmbeddedSphinxShell()
574 # reset the execution count if we haven't processed this doc
573 # reset the execution count if we haven't processed this doc
575 #NOTE: this may be borked if there are multiple seen_doc tmp files
574 #NOTE: this may be borked if there are multiple seen_doc tmp files
576 #check time stamp?
575 #check time stamp?
577
576
578 if not self.state.document.current_source in self.seen_docs:
577 if not self.state.document.current_source in self.seen_docs:
579 self.shell.IP.history_manager.reset()
578 self.shell.IP.history_manager.reset()
580 self.shell.IP.execution_count = 1
579 self.shell.IP.execution_count = 1
581 self.seen_docs.add(self.state.document.current_source)
580 self.seen_docs.add(self.state.document.current_source)
582
581
583
582
584
583
585 # get config values
584 # get config values
586 (savefig_dir, source_dir, rgxin,
585 (savefig_dir, source_dir, rgxin,
587 rgxout, promptin, promptout) = self.get_config_options()
586 rgxout, promptin, promptout) = self.get_config_options()
588
587
589 # and attach to shell so we don't have to pass them around
588 # and attach to shell so we don't have to pass them around
590 self.shell.rgxin = rgxin
589 self.shell.rgxin = rgxin
591 self.shell.rgxout = rgxout
590 self.shell.rgxout = rgxout
592 self.shell.promptin = promptin
591 self.shell.promptin = promptin
593 self.shell.promptout = promptout
592 self.shell.promptout = promptout
594 self.shell.savefig_dir = savefig_dir
593 self.shell.savefig_dir = savefig_dir
595 self.shell.source_dir = source_dir
594 self.shell.source_dir = source_dir
596
595
597 # setup bookmark for saving figures directory
596 # setup bookmark for saving figures directory
598
597
599 self.shell.process_input_line('bookmark ipy_savedir %s'%savefig_dir,
598 self.shell.process_input_line('bookmark ipy_savedir %s'%savefig_dir,
600 store_history=False)
599 store_history=False)
601 self.shell.clear_cout()
600 self.shell.clear_cout()
602
601
603 return rgxin, rgxout, promptin, promptout
602 return rgxin, rgxout, promptin, promptout
604
603
605
604
606 def teardown(self):
605 def teardown(self):
607 # delete last bookmark
606 # delete last bookmark
608 self.shell.process_input_line('bookmark -d ipy_savedir',
607 self.shell.process_input_line('bookmark -d ipy_savedir',
609 store_history=False)
608 store_history=False)
610 self.shell.clear_cout()
609 self.shell.clear_cout()
611
610
612 def run(self):
611 def run(self):
613 debug = False
612 debug = False
614
613
615 #TODO, any reason block_parser can't be a method of embeddable shell
614 #TODO, any reason block_parser can't be a method of embeddable shell
616 # then we wouldn't have to carry these around
615 # then we wouldn't have to carry these around
617 rgxin, rgxout, promptin, promptout = self.setup()
616 rgxin, rgxout, promptin, promptout = self.setup()
618
617
619 options = self.options
618 options = self.options
620 self.shell.is_suppress = 'suppress' in options
619 self.shell.is_suppress = 'suppress' in options
621 self.shell.is_doctest = 'doctest' in options
620 self.shell.is_doctest = 'doctest' in options
622 self.shell.is_verbatim = 'verbatim' in options
621 self.shell.is_verbatim = 'verbatim' in options
623
622
624
623
625 # handle pure python code
624 # handle pure python code
626 if 'python' in self.arguments:
625 if 'python' in self.arguments:
627 content = self.content
626 content = self.content
628 self.content = self.shell.process_pure_python(content)
627 self.content = self.shell.process_pure_python(content)
629
628
630 parts = '\n'.join(self.content).split('\n\n')
629 parts = '\n'.join(self.content).split('\n\n')
631
630
632 lines = ['.. code-block:: ipython','']
631 lines = ['.. code-block:: ipython','']
633 figures = []
632 figures = []
634
633
635 for part in parts:
634 for part in parts:
636
635
637 block = block_parser(part, rgxin, rgxout, promptin, promptout)
636 block = block_parser(part, rgxin, rgxout, promptin, promptout)
638
637
639 if len(block):
638 if len(block):
640 rows, figure = self.shell.process_block(block)
639 rows, figure = self.shell.process_block(block)
641 for row in rows:
640 for row in rows:
642 lines.extend([' %s'%line for line in row.split('\n')])
641 lines.extend([' %s'%line for line in row.split('\n')])
643
642
644 if figure is not None:
643 if figure is not None:
645 figures.append(figure)
644 figures.append(figure)
646
645
647 #text = '\n'.join(lines)
646 #text = '\n'.join(lines)
648 #figs = '\n'.join(figures)
647 #figs = '\n'.join(figures)
649
648
650 for figure in figures:
649 for figure in figures:
651 lines.append('')
650 lines.append('')
652 lines.extend(figure.split('\n'))
651 lines.extend(figure.split('\n'))
653 lines.append('')
652 lines.append('')
654
653
655 #print lines
654 #print lines
656 if len(lines)>2:
655 if len(lines)>2:
657 if debug:
656 if debug:
658 print('\n'.join(lines))
657 print('\n'.join(lines))
659 else: #NOTE: this raises some errors, what's it for?
658 else: #NOTE: this raises some errors, what's it for?
660 #print 'INSERTING %d lines'%len(lines)
659 #print 'INSERTING %d lines'%len(lines)
661 self.state_machine.insert_input(
660 self.state_machine.insert_input(
662 lines, self.state_machine.input_lines.source(0))
661 lines, self.state_machine.input_lines.source(0))
663
662
664 text = '\n'.join(lines)
663 text = '\n'.join(lines)
665 txtnode = nodes.literal_block(text, text)
664 txtnode = nodes.literal_block(text, text)
666 txtnode['language'] = 'ipython'
665 txtnode['language'] = 'ipython'
667 #imgnode = nodes.image(figs)
666 #imgnode = nodes.image(figs)
668
667
669 # cleanup
668 # cleanup
670 self.teardown()
669 self.teardown()
671
670
672 return []#, imgnode]
671 return []#, imgnode]
673
672
674 # Enable as a proper Sphinx directive
673 # Enable as a proper Sphinx directive
675 def setup(app):
674 def setup(app):
676 setup.app = app
675 setup.app = app
677
676
678 app.add_directive('ipython', IPythonDirective)
677 app.add_directive('ipython', IPythonDirective)
679 app.add_config_value('ipython_savefig_dir', None, True)
678 app.add_config_value('ipython_savefig_dir', None, True)
680 app.add_config_value('ipython_rgxin',
679 app.add_config_value('ipython_rgxin',
681 re.compile('In \[(\d+)\]:\s?(.*)\s*'), True)
680 re.compile('In \[(\d+)\]:\s?(.*)\s*'), True)
682 app.add_config_value('ipython_rgxout',
681 app.add_config_value('ipython_rgxout',
683 re.compile('Out\[(\d+)\]:\s?(.*)\s*'), True)
682 re.compile('Out\[(\d+)\]:\s?(.*)\s*'), True)
684 app.add_config_value('ipython_promptin', 'In [%d]:', True)
683 app.add_config_value('ipython_promptin', 'In [%d]:', True)
685 app.add_config_value('ipython_promptout', 'Out[%d]:', True)
684 app.add_config_value('ipython_promptout', 'Out[%d]:', True)
686
685
687
686
688 # Simple smoke test, needs to be converted to a proper automatic test.
687 # Simple smoke test, needs to be converted to a proper automatic test.
689 def test():
688 def test():
690
689
691 examples = [
690 examples = [
692 r"""
691 r"""
693 In [9]: pwd
692 In [9]: pwd
694 Out[9]: '/home/jdhunter/py4science/book'
693 Out[9]: '/home/jdhunter/py4science/book'
695
694
696 In [10]: cd bookdata/
695 In [10]: cd bookdata/
697 /home/jdhunter/py4science/book/bookdata
696 /home/jdhunter/py4science/book/bookdata
698
697
699 In [2]: from pylab import *
698 In [2]: from pylab import *
700
699
701 In [2]: ion()
700 In [2]: ion()
702
701
703 In [3]: im = imread('stinkbug.png')
702 In [3]: im = imread('stinkbug.png')
704
703
705 @savefig mystinkbug.png width=4in
704 @savefig mystinkbug.png width=4in
706 In [4]: imshow(im)
705 In [4]: imshow(im)
707 Out[4]: <matplotlib.image.AxesImage object at 0x39ea850>
706 Out[4]: <matplotlib.image.AxesImage object at 0x39ea850>
708
707
709 """,
708 """,
710 r"""
709 r"""
711
710
712 In [1]: x = 'hello world'
711 In [1]: x = 'hello world'
713
712
714 # string methods can be
713 # string methods can be
715 # used to alter the string
714 # used to alter the string
716 @doctest
715 @doctest
717 In [2]: x.upper()
716 In [2]: x.upper()
718 Out[2]: 'HELLO WORLD'
717 Out[2]: 'HELLO WORLD'
719
718
720 @verbatim
719 @verbatim
721 In [3]: x.st<TAB>
720 In [3]: x.st<TAB>
722 x.startswith x.strip
721 x.startswith x.strip
723 """,
722 """,
724 r"""
723 r"""
725
724
726 In [130]: url = 'http://ichart.finance.yahoo.com/table.csv?s=CROX\
725 In [130]: url = 'http://ichart.finance.yahoo.com/table.csv?s=CROX\
727 .....: &d=9&e=22&f=2009&g=d&a=1&br=8&c=2006&ignore=.csv'
726 .....: &d=9&e=22&f=2009&g=d&a=1&br=8&c=2006&ignore=.csv'
728
727
729 In [131]: print url.split('&')
728 In [131]: print url.split('&')
730 ['http://ichart.finance.yahoo.com/table.csv?s=CROX', 'd=9', 'e=22', 'f=2009', 'g=d', 'a=1', 'b=8', 'c=2006', 'ignore=.csv']
729 ['http://ichart.finance.yahoo.com/table.csv?s=CROX', 'd=9', 'e=22', 'f=2009', 'g=d', 'a=1', 'b=8', 'c=2006', 'ignore=.csv']
731
730
732 In [60]: import urllib
731 In [60]: import urllib
733
732
734 """,
733 """,
735 r"""\
734 r"""\
736
735
737 In [133]: import numpy.random
736 In [133]: import numpy.random
738
737
739 @suppress
738 @suppress
740 In [134]: numpy.random.seed(2358)
739 In [134]: numpy.random.seed(2358)
741
740
742 @doctest
741 @doctest
743 In [135]: numpy.random.rand(10,2)
742 In [135]: numpy.random.rand(10,2)
744 Out[135]:
743 Out[135]:
745 array([[ 0.64524308, 0.59943846],
744 array([[ 0.64524308, 0.59943846],
746 [ 0.47102322, 0.8715456 ],
745 [ 0.47102322, 0.8715456 ],
747 [ 0.29370834, 0.74776844],
746 [ 0.29370834, 0.74776844],
748 [ 0.99539577, 0.1313423 ],
747 [ 0.99539577, 0.1313423 ],
749 [ 0.16250302, 0.21103583],
748 [ 0.16250302, 0.21103583],
750 [ 0.81626524, 0.1312433 ],
749 [ 0.81626524, 0.1312433 ],
751 [ 0.67338089, 0.72302393],
750 [ 0.67338089, 0.72302393],
752 [ 0.7566368 , 0.07033696],
751 [ 0.7566368 , 0.07033696],
753 [ 0.22591016, 0.77731835],
752 [ 0.22591016, 0.77731835],
754 [ 0.0072729 , 0.34273127]])
753 [ 0.0072729 , 0.34273127]])
755
754
756 """,
755 """,
757
756
758 r"""
757 r"""
759 In [106]: print x
758 In [106]: print x
760 jdh
759 jdh
761
760
762 In [109]: for i in range(10):
761 In [109]: for i in range(10):
763 .....: print i
762 .....: print i
764 .....:
763 .....:
765 .....:
764 .....:
766 0
765 0
767 1
766 1
768 2
767 2
769 3
768 3
770 4
769 4
771 5
770 5
772 6
771 6
773 7
772 7
774 8
773 8
775 9
774 9
776 """,
775 """,
777
776
778 r"""
777 r"""
779
778
780 In [144]: from pylab import *
779 In [144]: from pylab import *
781
780
782 In [145]: ion()
781 In [145]: ion()
783
782
784 # use a semicolon to suppress the output
783 # use a semicolon to suppress the output
785 @savefig test_hist.png width=4in
784 @savefig test_hist.png width=4in
786 In [151]: hist(np.random.randn(10000), 100);
785 In [151]: hist(np.random.randn(10000), 100);
787
786
788
787
789 @savefig test_plot.png width=4in
788 @savefig test_plot.png width=4in
790 In [151]: plot(np.random.randn(10000), 'o');
789 In [151]: plot(np.random.randn(10000), 'o');
791 """,
790 """,
792
791
793 r"""
792 r"""
794 # use a semicolon to suppress the output
793 # use a semicolon to suppress the output
795 In [151]: plt.clf()
794 In [151]: plt.clf()
796
795
797 @savefig plot_simple.png width=4in
796 @savefig plot_simple.png width=4in
798 In [151]: plot([1,2,3])
797 In [151]: plot([1,2,3])
799
798
800 @savefig hist_simple.png width=4in
799 @savefig hist_simple.png width=4in
801 In [151]: hist(np.random.randn(10000), 100);
800 In [151]: hist(np.random.randn(10000), 100);
802
801
803 """,
802 """,
804 r"""
803 r"""
805 # update the current fig
804 # update the current fig
806 In [151]: ylabel('number')
805 In [151]: ylabel('number')
807
806
808 In [152]: title('normal distribution')
807 In [152]: title('normal distribution')
809
808
810
809
811 @savefig hist_with_text.png
810 @savefig hist_with_text.png
812 In [153]: grid(True)
811 In [153]: grid(True)
813
812
814 """,
813 """,
815 ]
814 ]
816 # skip local-file depending first example:
815 # skip local-file depending first example:
817 examples = examples[1:]
816 examples = examples[1:]
818
817
819 #ipython_directive.DEBUG = True # dbg
818 #ipython_directive.DEBUG = True # dbg
820 #options = dict(suppress=True) # dbg
819 #options = dict(suppress=True) # dbg
821 options = dict()
820 options = dict()
822 for example in examples:
821 for example in examples:
823 content = example.split('\n')
822 content = example.split('\n')
824 IPythonDirective('debug', arguments=None, options=options,
823 IPythonDirective('debug', arguments=None, options=options,
825 content=content, lineno=0,
824 content=content, lineno=0,
826 content_offset=None, block_text=None,
825 content_offset=None, block_text=None,
827 state=None, state_machine=None,
826 state=None, state_machine=None,
828 )
827 )
829
828
830 # Run test suite as a script
829 # Run test suite as a script
831 if __name__=='__main__':
830 if __name__=='__main__':
832 if not os.path.isdir('_static'):
831 if not os.path.isdir('_static'):
833 os.mkdir('_static')
832 os.mkdir('_static')
834 test()
833 test()
835 print('All OK? Check figures in _static/')
834 print('All OK? Check figures in _static/')
@@ -1,167 +1,168 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 """
2 """
3 Older utilities that are not being used.
3 Older utilities that are not being used.
4
4
5 WARNING: IF YOU NEED TO USE ONE OF THESE FUNCTIONS, PLEASE FIRST MOVE IT
5 WARNING: IF YOU NEED TO USE ONE OF THESE FUNCTIONS, PLEASE FIRST MOVE IT
6 TO ANOTHER APPROPRIATE MODULE IN IPython.utils.
6 TO ANOTHER APPROPRIATE MODULE IN IPython.utils.
7 """
7 """
8
8
9 #-----------------------------------------------------------------------------
9 #-----------------------------------------------------------------------------
10 # Copyright (C) 2008-2011 The IPython Development Team
10 # Copyright (C) 2008-2011 The IPython Development Team
11 #
11 #
12 # Distributed under the terms of the BSD License. The full license is in
12 # Distributed under the terms of the BSD License. The full license is in
13 # the file COPYING, distributed as part of this software.
13 # the file COPYING, distributed as part of this software.
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15
15
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17 # Imports
17 # Imports
18 #-----------------------------------------------------------------------------
18 #-----------------------------------------------------------------------------
19
19
20 import sys
20 import sys
21 import warnings
21 import warnings
22
22
23 from IPython.utils.warn import warn
23 from IPython.utils.warn import warn
24
24
25 #-----------------------------------------------------------------------------
25 #-----------------------------------------------------------------------------
26 # Code
26 # Code
27 #-----------------------------------------------------------------------------
27 #-----------------------------------------------------------------------------
28
28
29
29
30 def mutex_opts(dict,ex_op):
30 def mutex_opts(dict,ex_op):
31 """Check for presence of mutually exclusive keys in a dict.
31 """Check for presence of mutually exclusive keys in a dict.
32
32
33 Call: mutex_opts(dict,[[op1a,op1b],[op2a,op2b]...]"""
33 Call: mutex_opts(dict,[[op1a,op1b],[op2a,op2b]...]"""
34 for op1,op2 in ex_op:
34 for op1,op2 in ex_op:
35 if op1 in dict and op2 in dict:
35 if op1 in dict and op2 in dict:
36 raise ValueError('\n*** ERROR in Arguments *** '\
36 raise ValueError('\n*** ERROR in Arguments *** '\
37 'Options '+op1+' and '+op2+' are mutually exclusive.')
37 'Options '+op1+' and '+op2+' are mutually exclusive.')
38
38
39
39
40 class EvalDict:
40 class EvalDict:
41 """
41 """
42 Emulate a dict which evaluates its contents in the caller's frame.
42 Emulate a dict which evaluates its contents in the caller's frame.
43
43
44 Usage:
44 Usage:
45 >>> number = 19
45 >>> number = 19
46
46
47 >>> text = "python"
47 >>> text = "python"
48
48
49 >>> print("%(text.capitalize())s %(number/9.0).1f rules!" % EvalDict())
49 >>> print("%(text.capitalize())s %(number/9.0).1f rules!" % EvalDict())
50 Python 2.1 rules!
50 Python 2.1 rules!
51 """
51 """
52
52
53 # This version is due to sismex01@hebmex.com on c.l.py, and is basically a
53 # This version is due to sismex01@hebmex.com on c.l.py, and is basically a
54 # modified (shorter) version of:
54 # modified (shorter) version of:
55 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66018 by
55 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66018 by
56 # Skip Montanaro (skip@pobox.com).
56 # Skip Montanaro (skip@pobox.com).
57
57
58 def __getitem__(self, name):
58 def __getitem__(self, name):
59 frame = sys._getframe(1)
59 frame = sys._getframe(1)
60 return eval(name, frame.f_globals, frame.f_locals)
60 return eval(name, frame.f_globals, frame.f_locals)
61
61
62 EvalString = EvalDict # for backwards compatibility
62 EvalString = EvalDict # for backwards compatibility
63
63
64
64
65 def all_belong(candidates,checklist):
65 def all_belong(candidates,checklist):
66 """Check whether a list of items ALL appear in a given list of options.
66 """Check whether a list of items ALL appear in a given list of options.
67
67
68 Returns a single 1 or 0 value."""
68 Returns a single 1 or 0 value."""
69
69
70 return 1-(0 in [x in checklist for x in candidates])
70 return 1-(0 in [x in checklist for x in candidates])
71
71
72
72
73 def belong(candidates,checklist):
73 def belong(candidates,checklist):
74 """Check whether a list of items appear in a given list of options.
74 """Check whether a list of items appear in a given list of options.
75
75
76 Returns a list of 1 and 0, one for each candidate given."""
76 Returns a list of 1 and 0, one for each candidate given."""
77
77
78 return [x in checklist for x in candidates]
78 return [x in checklist for x in candidates]
79
79
80
80
81 def with_obj(object, **args):
81 def with_obj(object, **args):
82 """Set multiple attributes for an object, similar to Pascal's with.
82 """Set multiple attributes for an object, similar to Pascal's with.
83
83
84 Example:
84 Example::
85 with_obj(jim,
85
86 born = 1960,
86 with_obj(jim,
87 haircolour = 'Brown',
87 born = 1960,
88 eyecolour = 'Green')
88 haircolour = 'Brown',
89 eyecolour = 'Green')
89
90
90 Credit: Greg Ewing, in
91 Credit: Greg Ewing, in
91 http://mail.python.org/pipermail/python-list/2001-May/040703.html.
92 http://mail.python.org/pipermail/python-list/2001-May/040703.html.
92
93
93 NOTE: up until IPython 0.7.2, this was called simply 'with', but 'with'
94 NOTE: up until IPython 0.7.2, this was called simply 'with', but 'with'
94 has become a keyword for Python 2.5, so we had to rename it."""
95 has become a keyword for Python 2.5, so we had to rename it."""
95
96
96 object.__dict__.update(args)
97 object.__dict__.update(args)
97
98
98
99
99 def map_method(method,object_list,*argseq,**kw):
100 def map_method(method,object_list,*argseq,**kw):
100 """map_method(method,object_list,*args,**kw) -> list
101 """map_method(method,object_list,*args,**kw) -> list
101
102
102 Return a list of the results of applying the methods to the items of the
103 Return a list of the results of applying the methods to the items of the
103 argument sequence(s). If more than one sequence is given, the method is
104 argument sequence(s). If more than one sequence is given, the method is
104 called with an argument list consisting of the corresponding item of each
105 called with an argument list consisting of the corresponding item of each
105 sequence. All sequences must be of the same length.
106 sequence. All sequences must be of the same length.
106
107
107 Keyword arguments are passed verbatim to all objects called.
108 Keyword arguments are passed verbatim to all objects called.
108
109
109 This is Python code, so it's not nearly as fast as the builtin map()."""
110 This is Python code, so it's not nearly as fast as the builtin map()."""
110
111
111 out_list = []
112 out_list = []
112 idx = 0
113 idx = 0
113 for object in object_list:
114 for object in object_list:
114 try:
115 try:
115 handler = getattr(object, method)
116 handler = getattr(object, method)
116 except AttributeError:
117 except AttributeError:
117 out_list.append(None)
118 out_list.append(None)
118 else:
119 else:
119 if argseq:
120 if argseq:
120 args = map(lambda lst:lst[idx],argseq)
121 args = map(lambda lst:lst[idx],argseq)
121 #print 'ob',object,'hand',handler,'ar',args # dbg
122 #print 'ob',object,'hand',handler,'ar',args # dbg
122 out_list.append(handler(args,**kw))
123 out_list.append(handler(args,**kw))
123 else:
124 else:
124 out_list.append(handler(**kw))
125 out_list.append(handler(**kw))
125 idx += 1
126 idx += 1
126 return out_list
127 return out_list
127
128
128
129
129 def import_fail_info(mod_name,fns=None):
130 def import_fail_info(mod_name,fns=None):
130 """Inform load failure for a module."""
131 """Inform load failure for a module."""
131
132
132 if fns == None:
133 if fns == None:
133 warn("Loading of %s failed." % (mod_name,))
134 warn("Loading of %s failed." % (mod_name,))
134 else:
135 else:
135 warn("Loading of %s from %s failed." % (fns,mod_name))
136 warn("Loading of %s from %s failed." % (fns,mod_name))
136
137
137
138
138 class NotGiven: pass
139 class NotGiven: pass
139
140
140 def popkey(dct,key,default=NotGiven):
141 def popkey(dct,key,default=NotGiven):
141 """Return dct[key] and delete dct[key].
142 """Return dct[key] and delete dct[key].
142
143
143 If default is given, return it if dct[key] doesn't exist, otherwise raise
144 If default is given, return it if dct[key] doesn't exist, otherwise raise
144 KeyError. """
145 KeyError. """
145
146
146 try:
147 try:
147 val = dct[key]
148 val = dct[key]
148 except KeyError:
149 except KeyError:
149 if default is NotGiven:
150 if default is NotGiven:
150 raise
151 raise
151 else:
152 else:
152 return default
153 return default
153 else:
154 else:
154 del dct[key]
155 del dct[key]
155 return val
156 return val
156
157
157
158
158 def wrap_deprecated(func, suggest = '<nothing>'):
159 def wrap_deprecated(func, suggest = '<nothing>'):
159 def newFunc(*args, **kwargs):
160 def newFunc(*args, **kwargs):
160 warnings.warn("Call to deprecated function %s, use %s instead" %
161 warnings.warn("Call to deprecated function %s, use %s instead" %
161 ( func.__name__, suggest),
162 ( func.__name__, suggest),
162 category=DeprecationWarning,
163 category=DeprecationWarning,
163 stacklevel = 2)
164 stacklevel = 2)
164 return func(*args, **kwargs)
165 return func(*args, **kwargs)
165 return newFunc
166 return newFunc
166
167
167
168
General Comments 0
You need to be logged in to leave comments. Login now