Show More
@@ -1,206 +1,206 b'' | |||
|
1 | 1 | """Base class to manage the interaction with a running kernel |
|
2 | 2 | """ |
|
3 | 3 | |
|
4 | 4 | #----------------------------------------------------------------------------- |
|
5 | 5 | # Copyright (C) 2013 The IPython Development Team |
|
6 | 6 | # |
|
7 | 7 | # Distributed under the terms of the BSD License. The full license is in |
|
8 | 8 | # the file COPYING, distributed as part of this software. |
|
9 | 9 | #----------------------------------------------------------------------------- |
|
10 | 10 | |
|
11 | 11 | #----------------------------------------------------------------------------- |
|
12 | 12 | # Imports |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | |
|
15 | 15 | from __future__ import absolute_import |
|
16 | 16 | |
|
17 | 17 | import zmq |
|
18 | 18 | |
|
19 | 19 | # Local imports |
|
20 | 20 | from IPython.config.configurable import LoggingConfigurable |
|
21 | 21 | from IPython.utils.traitlets import ( |
|
22 | 22 | Any, Instance, Type, |
|
23 | 23 | ) |
|
24 | 24 | |
|
25 | 25 | from .zmq.session import Session |
|
26 | 26 | from .channels import ( |
|
27 | 27 | ShellChannel, IOPubChannel, |
|
28 | 28 | HBChannel, StdInChannel, |
|
29 | 29 | ) |
|
30 | 30 | from .clientabc import KernelClientABC |
|
31 | 31 | from .connect import ConnectionFileMixin |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | #----------------------------------------------------------------------------- |
|
35 | 35 | # Main kernel client class |
|
36 | 36 | #----------------------------------------------------------------------------- |
|
37 | 37 | |
|
38 | 38 | class KernelClient(LoggingConfigurable, ConnectionFileMixin): |
|
39 | 39 | """Communicates with a single kernel on any host via zmq channels. |
|
40 | 40 | |
|
41 | 41 | There are four channels associated with each kernel: |
|
42 | 42 | |
|
43 | 43 | * shell: for request/reply calls to the kernel. |
|
44 | 44 | * iopub: for the kernel to publish results to frontends. |
|
45 | 45 | * hb: for monitoring the kernel's heartbeat. |
|
46 | 46 | * stdin: for frontends to reply to raw_input calls in the kernel. |
|
47 | 47 | |
|
48 | 48 | The methods of the channels are exposed as methods of the client itself |
|
49 | 49 | (KernelClient.execute, complete, history, etc.). |
|
50 | 50 | See the channels themselves for documentation of these methods. |
|
51 | 51 | |
|
52 | 52 | """ |
|
53 | 53 | |
|
54 | 54 | # The PyZMQ Context to use for communication with the kernel. |
|
55 | 55 | context = Instance(zmq.Context) |
|
56 | 56 | def _context_default(self): |
|
57 | 57 | return zmq.Context.instance() |
|
58 | 58 | |
|
59 | 59 | # The Session to use for communication with the kernel. |
|
60 | 60 | session = Instance(Session) |
|
61 | 61 | def _session_default(self): |
|
62 | 62 | return Session(parent=self) |
|
63 | 63 | |
|
64 | 64 | # The classes to use for the various channels |
|
65 | 65 | shell_channel_class = Type(ShellChannel) |
|
66 | 66 | iopub_channel_class = Type(IOPubChannel) |
|
67 | 67 | stdin_channel_class = Type(StdInChannel) |
|
68 | 68 | hb_channel_class = Type(HBChannel) |
|
69 | 69 | |
|
70 | 70 | # Protected traits |
|
71 | 71 | _shell_channel = Any |
|
72 | 72 | _iopub_channel = Any |
|
73 | 73 | _stdin_channel = Any |
|
74 | 74 | _hb_channel = Any |
|
75 | 75 | |
|
76 | 76 | #-------------------------------------------------------------------------- |
|
77 | 77 | # Channel proxy methods |
|
78 | 78 | #-------------------------------------------------------------------------- |
|
79 | 79 | |
|
80 | 80 | def _get_msg(channel, *args, **kwargs): |
|
81 | 81 | return channel.get_msg(*args, **kwargs) |
|
82 | 82 | |
|
83 | 83 | def get_shell_msg(self, *args, **kwargs): |
|
84 | 84 | """Get a message from the shell channel""" |
|
85 | 85 | return self.shell_channel.get_msg(*args, **kwargs) |
|
86 | 86 | |
|
87 | 87 | def get_iopub_msg(self, *args, **kwargs): |
|
88 | 88 | """Get a message from the iopub channel""" |
|
89 | 89 | return self.iopub_channel.get_msg(*args, **kwargs) |
|
90 | 90 | |
|
91 | 91 | def get_stdin_msg(self, *args, **kwargs): |
|
92 | 92 | """Get a message from the stdin channel""" |
|
93 | 93 | return self.stdin_channel.get_msg(*args, **kwargs) |
|
94 | 94 | |
|
95 | 95 | #-------------------------------------------------------------------------- |
|
96 | 96 | # Channel management methods |
|
97 | 97 | #-------------------------------------------------------------------------- |
|
98 | 98 | |
|
99 | 99 | def start_channels(self, shell=True, iopub=True, stdin=True, hb=True): |
|
100 | 100 | """Starts the channels for this kernel. |
|
101 | 101 | |
|
102 | 102 | This will create the channels if they do not exist and then start |
|
103 | 103 | them (their activity runs in a thread). If port numbers of 0 are |
|
104 | 104 | being used (random ports) then you must first call |
|
105 |
:meth |
|
|
105 | :meth:`start_kernel`. If the channels have been stopped and you | |
|
106 | 106 | call this, :class:`RuntimeError` will be raised. |
|
107 | 107 | """ |
|
108 | 108 | if shell: |
|
109 | 109 | self.shell_channel.start() |
|
110 | 110 | for method in self.shell_channel.proxy_methods: |
|
111 | 111 | setattr(self, method, getattr(self.shell_channel, method)) |
|
112 | 112 | if iopub: |
|
113 | 113 | self.iopub_channel.start() |
|
114 | 114 | for method in self.iopub_channel.proxy_methods: |
|
115 | 115 | setattr(self, method, getattr(self.iopub_channel, method)) |
|
116 | 116 | if stdin: |
|
117 | 117 | self.stdin_channel.start() |
|
118 | 118 | for method in self.stdin_channel.proxy_methods: |
|
119 | 119 | setattr(self, method, getattr(self.stdin_channel, method)) |
|
120 | 120 | self.shell_channel.allow_stdin = True |
|
121 | 121 | else: |
|
122 | 122 | self.shell_channel.allow_stdin = False |
|
123 | 123 | if hb: |
|
124 | 124 | self.hb_channel.start() |
|
125 | 125 | |
|
126 | 126 | def stop_channels(self): |
|
127 | 127 | """Stops all the running channels for this kernel. |
|
128 | 128 | |
|
129 | 129 | This stops their event loops and joins their threads. |
|
130 | 130 | """ |
|
131 | 131 | if self.shell_channel.is_alive(): |
|
132 | 132 | self.shell_channel.stop() |
|
133 | 133 | if self.iopub_channel.is_alive(): |
|
134 | 134 | self.iopub_channel.stop() |
|
135 | 135 | if self.stdin_channel.is_alive(): |
|
136 | 136 | self.stdin_channel.stop() |
|
137 | 137 | if self.hb_channel.is_alive(): |
|
138 | 138 | self.hb_channel.stop() |
|
139 | 139 | |
|
140 | 140 | @property |
|
141 | 141 | def channels_running(self): |
|
142 | 142 | """Are any of the channels created and running?""" |
|
143 | 143 | return (self.shell_channel.is_alive() or self.iopub_channel.is_alive() or |
|
144 | 144 | self.stdin_channel.is_alive() or self.hb_channel.is_alive()) |
|
145 | 145 | |
|
146 | 146 | @property |
|
147 | 147 | def shell_channel(self): |
|
148 | 148 | """Get the shell channel object for this kernel.""" |
|
149 | 149 | if self._shell_channel is None: |
|
150 | 150 | url = self._make_url('shell') |
|
151 | 151 | self.log.debug("connecting shell channel to %s", url) |
|
152 | 152 | self._shell_channel = self.shell_channel_class( |
|
153 | 153 | self.context, self.session, url |
|
154 | 154 | ) |
|
155 | 155 | return self._shell_channel |
|
156 | 156 | |
|
157 | 157 | @property |
|
158 | 158 | def iopub_channel(self): |
|
159 | 159 | """Get the iopub channel object for this kernel.""" |
|
160 | 160 | if self._iopub_channel is None: |
|
161 | 161 | url = self._make_url('iopub') |
|
162 | 162 | self.log.debug("connecting iopub channel to %s", url) |
|
163 | 163 | self._iopub_channel = self.iopub_channel_class( |
|
164 | 164 | self.context, self.session, url |
|
165 | 165 | ) |
|
166 | 166 | return self._iopub_channel |
|
167 | 167 | |
|
168 | 168 | @property |
|
169 | 169 | def stdin_channel(self): |
|
170 | 170 | """Get the stdin channel object for this kernel.""" |
|
171 | 171 | if self._stdin_channel is None: |
|
172 | 172 | url = self._make_url('stdin') |
|
173 | 173 | self.log.debug("connecting stdin channel to %s", url) |
|
174 | 174 | self._stdin_channel = self.stdin_channel_class( |
|
175 | 175 | self.context, self.session, url |
|
176 | 176 | ) |
|
177 | 177 | return self._stdin_channel |
|
178 | 178 | |
|
179 | 179 | @property |
|
180 | 180 | def hb_channel(self): |
|
181 | 181 | """Get the hb channel object for this kernel.""" |
|
182 | 182 | if self._hb_channel is None: |
|
183 | 183 | url = self._make_url('hb') |
|
184 | 184 | self.log.debug("connecting heartbeat channel to %s", url) |
|
185 | 185 | self._hb_channel = self.hb_channel_class( |
|
186 | 186 | self.context, self.session, url |
|
187 | 187 | ) |
|
188 | 188 | return self._hb_channel |
|
189 | 189 | |
|
190 | 190 | def is_alive(self): |
|
191 | 191 | """Is the kernel process still running?""" |
|
192 | 192 | if self._hb_channel is not None: |
|
193 | 193 | # We didn't start the kernel with this KernelManager so we |
|
194 | 194 | # use the heartbeat. |
|
195 | 195 | return self._hb_channel.is_beating() |
|
196 | 196 | else: |
|
197 | 197 | # no heartbeat and not local, we can't tell if it's running, |
|
198 | 198 | # so naively return True |
|
199 | 199 | return True |
|
200 | 200 | |
|
201 | 201 | |
|
202 | 202 | #----------------------------------------------------------------------------- |
|
203 | 203 | # ABC Registration |
|
204 | 204 | #----------------------------------------------------------------------------- |
|
205 | 205 | |
|
206 | 206 | KernelClientABC.register(KernelClient) |
@@ -1,848 +1,850 b'' | |||
|
1 | 1 | """Session object for building, serializing, sending, and receiving messages in |
|
2 | 2 | IPython. The Session object supports serialization, HMAC signatures, and |
|
3 | 3 | metadata on messages. |
|
4 | 4 | |
|
5 | 5 | Also defined here are utilities for working with Sessions: |
|
6 | 6 | * A SessionFactory to be used as a base class for configurables that work with |
|
7 | 7 | Sessions. |
|
8 | 8 | * A Message object for convenience that allows attribute-access to the msg dict. |
|
9 | 9 | |
|
10 | 10 | Authors: |
|
11 | 11 | |
|
12 | 12 | * Min RK |
|
13 | 13 | * Brian Granger |
|
14 | 14 | * Fernando Perez |
|
15 | 15 | """ |
|
16 | 16 | #----------------------------------------------------------------------------- |
|
17 | 17 | # Copyright (C) 2010-2011 The IPython Development Team |
|
18 | 18 | # |
|
19 | 19 | # Distributed under the terms of the BSD License. The full license is in |
|
20 | 20 | # the file COPYING, distributed as part of this software. |
|
21 | 21 | #----------------------------------------------------------------------------- |
|
22 | 22 | |
|
23 | 23 | #----------------------------------------------------------------------------- |
|
24 | 24 | # Imports |
|
25 | 25 | #----------------------------------------------------------------------------- |
|
26 | 26 | |
|
27 | 27 | import hashlib |
|
28 | 28 | import hmac |
|
29 | 29 | import logging |
|
30 | 30 | import os |
|
31 | 31 | import pprint |
|
32 | 32 | import random |
|
33 | 33 | import uuid |
|
34 | 34 | from datetime import datetime |
|
35 | 35 | |
|
36 | 36 | try: |
|
37 | 37 | import cPickle |
|
38 | 38 | pickle = cPickle |
|
39 | 39 | except: |
|
40 | 40 | cPickle = None |
|
41 | 41 | import pickle |
|
42 | 42 | |
|
43 | 43 | import zmq |
|
44 | 44 | from zmq.utils import jsonapi |
|
45 | 45 | from zmq.eventloop.ioloop import IOLoop |
|
46 | 46 | from zmq.eventloop.zmqstream import ZMQStream |
|
47 | 47 | |
|
48 | 48 | from IPython.config.configurable import Configurable, LoggingConfigurable |
|
49 | 49 | from IPython.utils import io |
|
50 | 50 | from IPython.utils.importstring import import_item |
|
51 | 51 | from IPython.utils.jsonutil import extract_dates, squash_dates, date_default |
|
52 | 52 | from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type, |
|
53 | 53 | iteritems) |
|
54 | 54 | from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set, |
|
55 | 55 | DottedObjectName, CUnicode, Dict, Integer, |
|
56 | 56 | TraitError, |
|
57 | 57 | ) |
|
58 | 58 | from IPython.kernel.zmq.serialize import MAX_ITEMS, MAX_BYTES |
|
59 | 59 | |
|
60 | 60 | #----------------------------------------------------------------------------- |
|
61 | 61 | # utility functions |
|
62 | 62 | #----------------------------------------------------------------------------- |
|
63 | 63 | |
|
64 | 64 | def squash_unicode(obj): |
|
65 | 65 | """coerce unicode back to bytestrings.""" |
|
66 | 66 | if isinstance(obj,dict): |
|
67 | 67 | for key in obj.keys(): |
|
68 | 68 | obj[key] = squash_unicode(obj[key]) |
|
69 | 69 | if isinstance(key, unicode_type): |
|
70 | 70 | obj[squash_unicode(key)] = obj.pop(key) |
|
71 | 71 | elif isinstance(obj, list): |
|
72 | 72 | for i,v in enumerate(obj): |
|
73 | 73 | obj[i] = squash_unicode(v) |
|
74 | 74 | elif isinstance(obj, unicode_type): |
|
75 | 75 | obj = obj.encode('utf8') |
|
76 | 76 | return obj |
|
77 | 77 | |
|
78 | 78 | #----------------------------------------------------------------------------- |
|
79 | 79 | # globals and defaults |
|
80 | 80 | #----------------------------------------------------------------------------- |
|
81 | 81 | |
|
82 | 82 | # ISO8601-ify datetime objects |
|
83 | 83 | json_packer = lambda obj: jsonapi.dumps(obj, default=date_default) |
|
84 | 84 | json_unpacker = lambda s: jsonapi.loads(s) |
|
85 | 85 | |
|
86 | 86 | pickle_packer = lambda o: pickle.dumps(squash_dates(o),-1) |
|
87 | 87 | pickle_unpacker = pickle.loads |
|
88 | 88 | |
|
89 | 89 | default_packer = json_packer |
|
90 | 90 | default_unpacker = json_unpacker |
|
91 | 91 | |
|
92 | 92 | DELIM = b"<IDS|MSG>" |
|
93 | 93 | # singleton dummy tracker, which will always report as done |
|
94 | 94 | DONE = zmq.MessageTracker() |
|
95 | 95 | |
|
96 | 96 | #----------------------------------------------------------------------------- |
|
97 | 97 | # Mixin tools for apps that use Sessions |
|
98 | 98 | #----------------------------------------------------------------------------- |
|
99 | 99 | |
|
100 | 100 | session_aliases = dict( |
|
101 | 101 | ident = 'Session.session', |
|
102 | 102 | user = 'Session.username', |
|
103 | 103 | keyfile = 'Session.keyfile', |
|
104 | 104 | ) |
|
105 | 105 | |
|
106 | 106 | session_flags = { |
|
107 | 107 | 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())), |
|
108 | 108 | 'keyfile' : '' }}, |
|
109 | 109 | """Use HMAC digests for authentication of messages. |
|
110 | 110 | Setting this flag will generate a new UUID to use as the HMAC key. |
|
111 | 111 | """), |
|
112 | 112 | 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }}, |
|
113 | 113 | """Don't authenticate messages."""), |
|
114 | 114 | } |
|
115 | 115 | |
|
116 | 116 | def default_secure(cfg): |
|
117 | 117 | """Set the default behavior for a config environment to be secure. |
|
118 | 118 | |
|
119 | 119 | If Session.key/keyfile have not been set, set Session.key to |
|
120 | 120 | a new random UUID. |
|
121 | 121 | """ |
|
122 | 122 | |
|
123 | 123 | if 'Session' in cfg: |
|
124 | 124 | if 'key' in cfg.Session or 'keyfile' in cfg.Session: |
|
125 | 125 | return |
|
126 | 126 | # key/keyfile not specified, generate new UUID: |
|
127 | 127 | cfg.Session.key = str_to_bytes(str(uuid.uuid4())) |
|
128 | 128 | |
|
129 | 129 | |
|
130 | 130 | #----------------------------------------------------------------------------- |
|
131 | 131 | # Classes |
|
132 | 132 | #----------------------------------------------------------------------------- |
|
133 | 133 | |
|
134 | 134 | class SessionFactory(LoggingConfigurable): |
|
135 | 135 | """The Base class for configurables that have a Session, Context, logger, |
|
136 | 136 | and IOLoop. |
|
137 | 137 | """ |
|
138 | 138 | |
|
139 | 139 | logname = Unicode('') |
|
140 | 140 | def _logname_changed(self, name, old, new): |
|
141 | 141 | self.log = logging.getLogger(new) |
|
142 | 142 | |
|
143 | 143 | # not configurable: |
|
144 | 144 | context = Instance('zmq.Context') |
|
145 | 145 | def _context_default(self): |
|
146 | 146 | return zmq.Context.instance() |
|
147 | 147 | |
|
148 | 148 | session = Instance('IPython.kernel.zmq.session.Session') |
|
149 | 149 | |
|
150 | 150 | loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False) |
|
151 | 151 | def _loop_default(self): |
|
152 | 152 | return IOLoop.instance() |
|
153 | 153 | |
|
154 | 154 | def __init__(self, **kwargs): |
|
155 | 155 | super(SessionFactory, self).__init__(**kwargs) |
|
156 | 156 | |
|
157 | 157 | if self.session is None: |
|
158 | 158 | # construct the session |
|
159 | 159 | self.session = Session(**kwargs) |
|
160 | 160 | |
|
161 | 161 | |
|
162 | 162 | class Message(object): |
|
163 | 163 | """A simple message object that maps dict keys to attributes. |
|
164 | 164 | |
|
165 | 165 | A Message can be created from a dict and a dict from a Message instance |
|
166 | 166 | simply by calling dict(msg_obj).""" |
|
167 | 167 | |
|
168 | 168 | def __init__(self, msg_dict): |
|
169 | 169 | dct = self.__dict__ |
|
170 | 170 | for k, v in iteritems(dict(msg_dict)): |
|
171 | 171 | if isinstance(v, dict): |
|
172 | 172 | v = Message(v) |
|
173 | 173 | dct[k] = v |
|
174 | 174 | |
|
175 | 175 | # Having this iterator lets dict(msg_obj) work out of the box. |
|
176 | 176 | def __iter__(self): |
|
177 | 177 | return iter(iteritems(self.__dict__)) |
|
178 | 178 | |
|
179 | 179 | def __repr__(self): |
|
180 | 180 | return repr(self.__dict__) |
|
181 | 181 | |
|
182 | 182 | def __str__(self): |
|
183 | 183 | return pprint.pformat(self.__dict__) |
|
184 | 184 | |
|
185 | 185 | def __contains__(self, k): |
|
186 | 186 | return k in self.__dict__ |
|
187 | 187 | |
|
188 | 188 | def __getitem__(self, k): |
|
189 | 189 | return self.__dict__[k] |
|
190 | 190 | |
|
191 | 191 | |
|
192 | 192 | def msg_header(msg_id, msg_type, username, session): |
|
193 | 193 | date = datetime.now() |
|
194 | 194 | return locals() |
|
195 | 195 | |
|
196 | 196 | def extract_header(msg_or_header): |
|
197 | 197 | """Given a message or header, return the header.""" |
|
198 | 198 | if not msg_or_header: |
|
199 | 199 | return {} |
|
200 | 200 | try: |
|
201 | 201 | # See if msg_or_header is the entire message. |
|
202 | 202 | h = msg_or_header['header'] |
|
203 | 203 | except KeyError: |
|
204 | 204 | try: |
|
205 | 205 | # See if msg_or_header is just the header |
|
206 | 206 | h = msg_or_header['msg_id'] |
|
207 | 207 | except KeyError: |
|
208 | 208 | raise |
|
209 | 209 | else: |
|
210 | 210 | h = msg_or_header |
|
211 | 211 | if not isinstance(h, dict): |
|
212 | 212 | h = dict(h) |
|
213 | 213 | return h |
|
214 | 214 | |
|
215 | 215 | class Session(Configurable): |
|
216 | 216 | """Object for handling serialization and sending of messages. |
|
217 | 217 | |
|
218 | 218 | The Session object handles building messages and sending them |
|
219 | 219 | with ZMQ sockets or ZMQStream objects. Objects can communicate with each |
|
220 | 220 | other over the network via Session objects, and only need to work with the |
|
221 | 221 | dict-based IPython message spec. The Session will handle |
|
222 | 222 | serialization/deserialization, security, and metadata. |
|
223 | 223 | |
|
224 | 224 | Sessions support configurable serialiization via packer/unpacker traits, |
|
225 | 225 | and signing with HMAC digests via the key/keyfile traits. |
|
226 | 226 | |
|
227 | 227 | Parameters |
|
228 | 228 | ---------- |
|
229 | 229 | |
|
230 | 230 | debug : bool |
|
231 | 231 | whether to trigger extra debugging statements |
|
232 | 232 | packer/unpacker : str : 'json', 'pickle' or import_string |
|
233 | 233 | importstrings for methods to serialize message parts. If just |
|
234 | 234 | 'json' or 'pickle', predefined JSON and pickle packers will be used. |
|
235 | 235 | Otherwise, the entire importstring must be used. |
|
236 | 236 | |
|
237 | 237 | The functions must accept at least valid JSON input, and output *bytes*. |
|
238 | 238 | |
|
239 | 239 | For example, to use msgpack: |
|
240 | 240 | packer = 'msgpack.packb', unpacker='msgpack.unpackb' |
|
241 | 241 | pack/unpack : callables |
|
242 | 242 | You can also set the pack/unpack callables for serialization directly. |
|
243 | 243 | session : bytes |
|
244 | 244 | the ID of this Session object. The default is to generate a new UUID. |
|
245 | 245 | username : unicode |
|
246 | 246 | username added to message headers. The default is to ask the OS. |
|
247 | 247 | key : bytes |
|
248 | 248 | The key used to initialize an HMAC signature. If unset, messages |
|
249 | 249 | will not be signed or checked. |
|
250 | 250 | keyfile : filepath |
|
251 | 251 | The file containing a key. If this is set, `key` will be initialized |
|
252 | 252 | to the contents of the file. |
|
253 | 253 | |
|
254 | 254 | """ |
|
255 | 255 | |
|
256 | 256 | debug=Bool(False, config=True, help="""Debug output in the Session""") |
|
257 | 257 | |
|
258 | 258 | packer = DottedObjectName('json',config=True, |
|
259 | 259 | help="""The name of the packer for serializing messages. |
|
260 | 260 | Should be one of 'json', 'pickle', or an import name |
|
261 | 261 | for a custom callable serializer.""") |
|
262 | 262 | def _packer_changed(self, name, old, new): |
|
263 | 263 | if new.lower() == 'json': |
|
264 | 264 | self.pack = json_packer |
|
265 | 265 | self.unpack = json_unpacker |
|
266 | 266 | self.unpacker = new |
|
267 | 267 | elif new.lower() == 'pickle': |
|
268 | 268 | self.pack = pickle_packer |
|
269 | 269 | self.unpack = pickle_unpacker |
|
270 | 270 | self.unpacker = new |
|
271 | 271 | else: |
|
272 | 272 | self.pack = import_item(str(new)) |
|
273 | 273 | |
|
274 | 274 | unpacker = DottedObjectName('json', config=True, |
|
275 | 275 | help="""The name of the unpacker for unserializing messages. |
|
276 | 276 | Only used with custom functions for `packer`.""") |
|
277 | 277 | def _unpacker_changed(self, name, old, new): |
|
278 | 278 | if new.lower() == 'json': |
|
279 | 279 | self.pack = json_packer |
|
280 | 280 | self.unpack = json_unpacker |
|
281 | 281 | self.packer = new |
|
282 | 282 | elif new.lower() == 'pickle': |
|
283 | 283 | self.pack = pickle_packer |
|
284 | 284 | self.unpack = pickle_unpacker |
|
285 | 285 | self.packer = new |
|
286 | 286 | else: |
|
287 | 287 | self.unpack = import_item(str(new)) |
|
288 | 288 | |
|
289 | 289 | session = CUnicode(u'', config=True, |
|
290 | 290 | help="""The UUID identifying this session.""") |
|
291 | 291 | def _session_default(self): |
|
292 | 292 | u = unicode_type(uuid.uuid4()) |
|
293 | 293 | self.bsession = u.encode('ascii') |
|
294 | 294 | return u |
|
295 | 295 | |
|
296 | 296 | def _session_changed(self, name, old, new): |
|
297 | 297 | self.bsession = self.session.encode('ascii') |
|
298 | 298 | |
|
299 | 299 | # bsession is the session as bytes |
|
300 | 300 | bsession = CBytes(b'') |
|
301 | 301 | |
|
302 | 302 | username = Unicode(str_to_unicode(os.environ.get('USER', 'username')), |
|
303 | 303 | help="""Username for the Session. Default is your system username.""", |
|
304 | 304 | config=True) |
|
305 | 305 | |
|
306 | 306 | metadata = Dict({}, config=True, |
|
307 | 307 | help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""") |
|
308 | 308 | |
|
309 | 309 | # message signature related traits: |
|
310 | 310 | |
|
311 | 311 | key = CBytes(b'', config=True, |
|
312 | 312 | help="""execution key, for extra authentication.""") |
|
313 | 313 | def _key_changed(self, name, old, new): |
|
314 | 314 | if new: |
|
315 | 315 | self.auth = hmac.HMAC(new, digestmod=self.digest_mod) |
|
316 | 316 | else: |
|
317 | 317 | self.auth = None |
|
318 | 318 | |
|
319 | 319 | signature_scheme = Unicode('hmac-sha256', config=True, |
|
320 | 320 | help="""The digest scheme used to construct the message signatures. |
|
321 | 321 | Must have the form 'hmac-HASH'.""") |
|
322 | 322 | def _signature_scheme_changed(self, name, old, new): |
|
323 | 323 | if not new.startswith('hmac-'): |
|
324 | 324 | raise TraitError("signature_scheme must start with 'hmac-', got %r" % new) |
|
325 | 325 | hash_name = new.split('-', 1)[1] |
|
326 | 326 | try: |
|
327 | 327 | self.digest_mod = getattr(hashlib, hash_name) |
|
328 | 328 | except AttributeError: |
|
329 | 329 | raise TraitError("hashlib has no such attribute: %s" % hash_name) |
|
330 | 330 | |
|
331 | 331 | digest_mod = Any() |
|
332 | 332 | def _digest_mod_default(self): |
|
333 | 333 | return hashlib.sha256 |
|
334 | 334 | |
|
335 | 335 | auth = Instance(hmac.HMAC) |
|
336 | 336 | |
|
337 | 337 | digest_history = Set() |
|
338 | 338 | digest_history_size = Integer(2**16, config=True, |
|
339 | 339 | help="""The maximum number of digests to remember. |
|
340 | 340 | |
|
341 | 341 | The digest history will be culled when it exceeds this value. |
|
342 | 342 | """ |
|
343 | 343 | ) |
|
344 | 344 | |
|
345 | 345 | keyfile = Unicode('', config=True, |
|
346 | 346 | help="""path to file containing execution key.""") |
|
347 | 347 | def _keyfile_changed(self, name, old, new): |
|
348 | 348 | with open(new, 'rb') as f: |
|
349 | 349 | self.key = f.read().strip() |
|
350 | 350 | |
|
351 | 351 | # for protecting against sends from forks |
|
352 | 352 | pid = Integer() |
|
353 | 353 | |
|
354 | 354 | # serialization traits: |
|
355 | 355 | |
|
356 | 356 | pack = Any(default_packer) # the actual packer function |
|
357 | 357 | def _pack_changed(self, name, old, new): |
|
358 | 358 | if not callable(new): |
|
359 | 359 | raise TypeError("packer must be callable, not %s"%type(new)) |
|
360 | 360 | |
|
361 | 361 | unpack = Any(default_unpacker) # the actual packer function |
|
362 | 362 | def _unpack_changed(self, name, old, new): |
|
363 | 363 | # unpacker is not checked - it is assumed to be |
|
364 | 364 | if not callable(new): |
|
365 | 365 | raise TypeError("unpacker must be callable, not %s"%type(new)) |
|
366 | 366 | |
|
367 | 367 | # thresholds: |
|
368 | 368 | copy_threshold = Integer(2**16, config=True, |
|
369 | 369 | help="Threshold (in bytes) beyond which a buffer should be sent without copying.") |
|
370 | 370 | buffer_threshold = Integer(MAX_BYTES, config=True, |
|
371 | 371 | help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.") |
|
372 | 372 | item_threshold = Integer(MAX_ITEMS, config=True, |
|
373 | 373 | help="""The maximum number of items for a container to be introspected for custom serialization. |
|
374 | 374 | Containers larger than this are pickled outright. |
|
375 | 375 | """ |
|
376 | 376 | ) |
|
377 | 377 | |
|
378 | 378 | |
|
379 | 379 | def __init__(self, **kwargs): |
|
380 | 380 | """create a Session object |
|
381 | 381 | |
|
382 | 382 | Parameters |
|
383 | 383 | ---------- |
|
384 | 384 | |
|
385 | 385 | debug : bool |
|
386 | 386 | whether to trigger extra debugging statements |
|
387 | 387 | packer/unpacker : str : 'json', 'pickle' or import_string |
|
388 | 388 | importstrings for methods to serialize message parts. If just |
|
389 | 389 | 'json' or 'pickle', predefined JSON and pickle packers will be used. |
|
390 | 390 | Otherwise, the entire importstring must be used. |
|
391 | 391 | |
|
392 | 392 | The functions must accept at least valid JSON input, and output |
|
393 | 393 | *bytes*. |
|
394 | 394 | |
|
395 | 395 | For example, to use msgpack: |
|
396 | 396 | packer = 'msgpack.packb', unpacker='msgpack.unpackb' |
|
397 | 397 | pack/unpack : callables |
|
398 | 398 | You can also set the pack/unpack callables for serialization |
|
399 | 399 | directly. |
|
400 | 400 | session : unicode (must be ascii) |
|
401 | 401 | the ID of this Session object. The default is to generate a new |
|
402 | 402 | UUID. |
|
403 | 403 | bsession : bytes |
|
404 | 404 | The session as bytes |
|
405 | 405 | username : unicode |
|
406 | 406 | username added to message headers. The default is to ask the OS. |
|
407 | 407 | key : bytes |
|
408 | 408 | The key used to initialize an HMAC signature. If unset, messages |
|
409 | 409 | will not be signed or checked. |
|
410 | 410 | signature_scheme : str |
|
411 | 411 | The message digest scheme. Currently must be of the form 'hmac-HASH', |
|
412 | 412 | where 'HASH' is a hashing function available in Python's hashlib. |
|
413 | 413 | The default is 'hmac-sha256'. |
|
414 | 414 | This is ignored if 'key' is empty. |
|
415 | 415 | keyfile : filepath |
|
416 | 416 | The file containing a key. If this is set, `key` will be |
|
417 | 417 | initialized to the contents of the file. |
|
418 | 418 | """ |
|
419 | 419 | super(Session, self).__init__(**kwargs) |
|
420 | 420 | self._check_packers() |
|
421 | 421 | self.none = self.pack({}) |
|
422 | 422 | # ensure self._session_default() if necessary, so bsession is defined: |
|
423 | 423 | self.session |
|
424 | 424 | self.pid = os.getpid() |
|
425 | 425 | |
|
426 | 426 | @property |
|
427 | 427 | def msg_id(self): |
|
428 | 428 | """always return new uuid""" |
|
429 | 429 | return str(uuid.uuid4()) |
|
430 | 430 | |
|
431 | 431 | def _check_packers(self): |
|
432 | 432 | """check packers for datetime support.""" |
|
433 | 433 | pack = self.pack |
|
434 | 434 | unpack = self.unpack |
|
435 | 435 | |
|
436 | 436 | # check simple serialization |
|
437 | 437 | msg = dict(a=[1,'hi']) |
|
438 | 438 | try: |
|
439 | 439 | packed = pack(msg) |
|
440 | 440 | except Exception as e: |
|
441 | 441 | msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}" |
|
442 | 442 | if self.packer == 'json': |
|
443 | 443 | jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod |
|
444 | 444 | else: |
|
445 | 445 | jsonmsg = "" |
|
446 | 446 | raise ValueError( |
|
447 | 447 | msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg) |
|
448 | 448 | ) |
|
449 | 449 | |
|
450 | 450 | # ensure packed message is bytes |
|
451 | 451 | if not isinstance(packed, bytes): |
|
452 | 452 | raise ValueError("message packed to %r, but bytes are required"%type(packed)) |
|
453 | 453 | |
|
454 | 454 | # check that unpack is pack's inverse |
|
455 | 455 | try: |
|
456 | 456 | unpacked = unpack(packed) |
|
457 | 457 | assert unpacked == msg |
|
458 | 458 | except Exception as e: |
|
459 | 459 | msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}" |
|
460 | 460 | if self.packer == 'json': |
|
461 | 461 | jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod |
|
462 | 462 | else: |
|
463 | 463 | jsonmsg = "" |
|
464 | 464 | raise ValueError( |
|
465 | 465 | msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg) |
|
466 | 466 | ) |
|
467 | 467 | |
|
468 | 468 | # check datetime support |
|
469 | 469 | msg = dict(t=datetime.now()) |
|
470 | 470 | try: |
|
471 | 471 | unpacked = unpack(pack(msg)) |
|
472 | 472 | if isinstance(unpacked['t'], datetime): |
|
473 | 473 | raise ValueError("Shouldn't deserialize to datetime") |
|
474 | 474 | except Exception: |
|
475 | 475 | self.pack = lambda o: pack(squash_dates(o)) |
|
476 | 476 | self.unpack = lambda s: unpack(s) |
|
477 | 477 | |
|
478 | 478 | def msg_header(self, msg_type): |
|
479 | 479 | return msg_header(self.msg_id, msg_type, self.username, self.session) |
|
480 | 480 | |
|
481 | 481 | def msg(self, msg_type, content=None, parent=None, header=None, metadata=None): |
|
482 | 482 | """Return the nested message dict. |
|
483 | 483 | |
|
484 | 484 | This format is different from what is sent over the wire. The |
|
485 | 485 | serialize/unserialize methods converts this nested message dict to the wire |
|
486 | 486 | format, which is a list of message parts. |
|
487 | 487 | """ |
|
488 | 488 | msg = {} |
|
489 | 489 | header = self.msg_header(msg_type) if header is None else header |
|
490 | 490 | msg['header'] = header |
|
491 | 491 | msg['msg_id'] = header['msg_id'] |
|
492 | 492 | msg['msg_type'] = header['msg_type'] |
|
493 | 493 | msg['parent_header'] = {} if parent is None else extract_header(parent) |
|
494 | 494 | msg['content'] = {} if content is None else content |
|
495 | 495 | msg['metadata'] = self.metadata.copy() |
|
496 | 496 | if metadata is not None: |
|
497 | 497 | msg['metadata'].update(metadata) |
|
498 | 498 | return msg |
|
499 | 499 | |
|
500 | 500 | def sign(self, msg_list): |
|
501 | 501 | """Sign a message with HMAC digest. If no auth, return b''. |
|
502 | 502 | |
|
503 | 503 | Parameters |
|
504 | 504 | ---------- |
|
505 | 505 | msg_list : list |
|
506 | 506 | The [p_header,p_parent,p_content] part of the message list. |
|
507 | 507 | """ |
|
508 | 508 | if self.auth is None: |
|
509 | 509 | return b'' |
|
510 | 510 | h = self.auth.copy() |
|
511 | 511 | for m in msg_list: |
|
512 | 512 | h.update(m) |
|
513 | 513 | return str_to_bytes(h.hexdigest()) |
|
514 | 514 | |
|
515 | 515 | def serialize(self, msg, ident=None): |
|
516 | 516 | """Serialize the message components to bytes. |
|
517 | 517 | |
|
518 | 518 | This is roughly the inverse of unserialize. The serialize/unserialize |
|
519 | 519 | methods work with full message lists, whereas pack/unpack work with |
|
520 | 520 | the individual message parts in the message list. |
|
521 | 521 | |
|
522 | 522 | Parameters |
|
523 | 523 | ---------- |
|
524 | 524 | msg : dict or Message |
|
525 | 525 | The nexted message dict as returned by the self.msg method. |
|
526 | 526 | |
|
527 | 527 | Returns |
|
528 | 528 | ------- |
|
529 | 529 | msg_list : list |
|
530 | The list of bytes objects to be sent with the format: | |
|
531 | [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_metadata,p_content, | |
|
532 | buffer1,buffer2,...]. In this list, the p_* entities are | |
|
533 | the packed or serialized versions, so if JSON is used, these | |
|
534 | are utf8 encoded JSON strings. | |
|
530 | The list of bytes objects to be sent with the format:: | |
|
531 | ||
|
532 | [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent, | |
|
533 | p_metadata, p_content, buffer1, buffer2, ...] | |
|
534 | ||
|
535 | In this list, the ``p_*`` entities are the packed or serialized | |
|
536 | versions, so if JSON is used, these are utf8 encoded JSON strings. | |
|
535 | 537 | """ |
|
536 | 538 | content = msg.get('content', {}) |
|
537 | 539 | if content is None: |
|
538 | 540 | content = self.none |
|
539 | 541 | elif isinstance(content, dict): |
|
540 | 542 | content = self.pack(content) |
|
541 | 543 | elif isinstance(content, bytes): |
|
542 | 544 | # content is already packed, as in a relayed message |
|
543 | 545 | pass |
|
544 | 546 | elif isinstance(content, unicode_type): |
|
545 | 547 | # should be bytes, but JSON often spits out unicode |
|
546 | 548 | content = content.encode('utf8') |
|
547 | 549 | else: |
|
548 | 550 | raise TypeError("Content incorrect type: %s"%type(content)) |
|
549 | 551 | |
|
550 | 552 | real_message = [self.pack(msg['header']), |
|
551 | 553 | self.pack(msg['parent_header']), |
|
552 | 554 | self.pack(msg['metadata']), |
|
553 | 555 | content, |
|
554 | 556 | ] |
|
555 | 557 | |
|
556 | 558 | to_send = [] |
|
557 | 559 | |
|
558 | 560 | if isinstance(ident, list): |
|
559 | 561 | # accept list of idents |
|
560 | 562 | to_send.extend(ident) |
|
561 | 563 | elif ident is not None: |
|
562 | 564 | to_send.append(ident) |
|
563 | 565 | to_send.append(DELIM) |
|
564 | 566 | |
|
565 | 567 | signature = self.sign(real_message) |
|
566 | 568 | to_send.append(signature) |
|
567 | 569 | |
|
568 | 570 | to_send.extend(real_message) |
|
569 | 571 | |
|
570 | 572 | return to_send |
|
571 | 573 | |
|
572 | 574 | def send(self, stream, msg_or_type, content=None, parent=None, ident=None, |
|
573 | 575 | buffers=None, track=False, header=None, metadata=None): |
|
574 | 576 | """Build and send a message via stream or socket. |
|
575 | 577 | |
|
576 | 578 | The message format used by this function internally is as follows: |
|
577 | 579 | |
|
578 | 580 | [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content, |
|
579 | 581 | buffer1,buffer2,...] |
|
580 | 582 | |
|
581 | 583 | The serialize/unserialize methods convert the nested message dict into this |
|
582 | 584 | format. |
|
583 | 585 | |
|
584 | 586 | Parameters |
|
585 | 587 | ---------- |
|
586 | 588 | |
|
587 | 589 | stream : zmq.Socket or ZMQStream |
|
588 | 590 | The socket-like object used to send the data. |
|
589 | 591 | msg_or_type : str or Message/dict |
|
590 | 592 | Normally, msg_or_type will be a msg_type unless a message is being |
|
591 | 593 | sent more than once. If a header is supplied, this can be set to |
|
592 | 594 | None and the msg_type will be pulled from the header. |
|
593 | 595 | |
|
594 | 596 | content : dict or None |
|
595 | 597 | The content of the message (ignored if msg_or_type is a message). |
|
596 | 598 | header : dict or None |
|
597 | 599 | The header dict for the message (ignored if msg_to_type is a message). |
|
598 | 600 | parent : Message or dict or None |
|
599 | 601 | The parent or parent header describing the parent of this message |
|
600 | 602 | (ignored if msg_or_type is a message). |
|
601 | 603 | ident : bytes or list of bytes |
|
602 | 604 | The zmq.IDENTITY routing path. |
|
603 | 605 | metadata : dict or None |
|
604 | 606 | The metadata describing the message |
|
605 | 607 | buffers : list or None |
|
606 | 608 | The already-serialized buffers to be appended to the message. |
|
607 | 609 | track : bool |
|
608 | 610 | Whether to track. Only for use with Sockets, because ZMQStream |
|
609 | 611 | objects cannot track messages. |
|
610 | 612 | |
|
611 | 613 | |
|
612 | 614 | Returns |
|
613 | 615 | ------- |
|
614 | 616 | msg : dict |
|
615 | 617 | The constructed message. |
|
616 | 618 | """ |
|
617 | 619 | if not isinstance(stream, zmq.Socket): |
|
618 | 620 | # ZMQStreams and dummy sockets do not support tracking. |
|
619 | 621 | track = False |
|
620 | 622 | |
|
621 | 623 | if isinstance(msg_or_type, (Message, dict)): |
|
622 | 624 | # We got a Message or message dict, not a msg_type so don't |
|
623 | 625 | # build a new Message. |
|
624 | 626 | msg = msg_or_type |
|
625 | 627 | else: |
|
626 | 628 | msg = self.msg(msg_or_type, content=content, parent=parent, |
|
627 | 629 | header=header, metadata=metadata) |
|
628 | 630 | if not os.getpid() == self.pid: |
|
629 | 631 | io.rprint("WARNING: attempted to send message from fork") |
|
630 | 632 | io.rprint(msg) |
|
631 | 633 | return |
|
632 | 634 | buffers = [] if buffers is None else buffers |
|
633 | 635 | to_send = self.serialize(msg, ident) |
|
634 | 636 | to_send.extend(buffers) |
|
635 | 637 | longest = max([ len(s) for s in to_send ]) |
|
636 | 638 | copy = (longest < self.copy_threshold) |
|
637 | 639 | |
|
638 | 640 | if buffers and track and not copy: |
|
639 | 641 | # only really track when we are doing zero-copy buffers |
|
640 | 642 | tracker = stream.send_multipart(to_send, copy=False, track=True) |
|
641 | 643 | else: |
|
642 | 644 | # use dummy tracker, which will be done immediately |
|
643 | 645 | tracker = DONE |
|
644 | 646 | stream.send_multipart(to_send, copy=copy) |
|
645 | 647 | |
|
646 | 648 | if self.debug: |
|
647 | 649 | pprint.pprint(msg) |
|
648 | 650 | pprint.pprint(to_send) |
|
649 | 651 | pprint.pprint(buffers) |
|
650 | 652 | |
|
651 | 653 | msg['tracker'] = tracker |
|
652 | 654 | |
|
653 | 655 | return msg |
|
654 | 656 | |
|
655 | 657 | def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None): |
|
656 | 658 | """Send a raw message via ident path. |
|
657 | 659 | |
|
658 | 660 | This method is used to send a already serialized message. |
|
659 | 661 | |
|
660 | 662 | Parameters |
|
661 | 663 | ---------- |
|
662 | 664 | stream : ZMQStream or Socket |
|
663 | 665 | The ZMQ stream or socket to use for sending the message. |
|
664 | 666 | msg_list : list |
|
665 | 667 | The serialized list of messages to send. This only includes the |
|
666 | 668 | [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of |
|
667 | 669 | the message. |
|
668 | 670 | ident : ident or list |
|
669 | 671 | A single ident or a list of idents to use in sending. |
|
670 | 672 | """ |
|
671 | 673 | to_send = [] |
|
672 | 674 | if isinstance(ident, bytes): |
|
673 | 675 | ident = [ident] |
|
674 | 676 | if ident is not None: |
|
675 | 677 | to_send.extend(ident) |
|
676 | 678 | |
|
677 | 679 | to_send.append(DELIM) |
|
678 | 680 | to_send.append(self.sign(msg_list)) |
|
679 | 681 | to_send.extend(msg_list) |
|
680 | 682 | stream.send_multipart(msg_list, flags, copy=copy) |
|
681 | 683 | |
|
682 | 684 | def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True): |
|
683 | 685 | """Receive and unpack a message. |
|
684 | 686 | |
|
685 | 687 | Parameters |
|
686 | 688 | ---------- |
|
687 | 689 | socket : ZMQStream or Socket |
|
688 | 690 | The socket or stream to use in receiving. |
|
689 | 691 | |
|
690 | 692 | Returns |
|
691 | 693 | ------- |
|
692 | 694 | [idents], msg |
|
693 | 695 | [idents] is a list of idents and msg is a nested message dict of |
|
694 | 696 | same format as self.msg returns. |
|
695 | 697 | """ |
|
696 | 698 | if isinstance(socket, ZMQStream): |
|
697 | 699 | socket = socket.socket |
|
698 | 700 | try: |
|
699 | 701 | msg_list = socket.recv_multipart(mode, copy=copy) |
|
700 | 702 | except zmq.ZMQError as e: |
|
701 | 703 | if e.errno == zmq.EAGAIN: |
|
702 | 704 | # We can convert EAGAIN to None as we know in this case |
|
703 | 705 | # recv_multipart won't return None. |
|
704 | 706 | return None,None |
|
705 | 707 | else: |
|
706 | 708 | raise |
|
707 | 709 | # split multipart message into identity list and message dict |
|
708 | 710 | # invalid large messages can cause very expensive string comparisons |
|
709 | 711 | idents, msg_list = self.feed_identities(msg_list, copy) |
|
710 | 712 | try: |
|
711 | 713 | return idents, self.unserialize(msg_list, content=content, copy=copy) |
|
712 | 714 | except Exception as e: |
|
713 | 715 | # TODO: handle it |
|
714 | 716 | raise e |
|
715 | 717 | |
|
716 | 718 | def feed_identities(self, msg_list, copy=True): |
|
717 | 719 | """Split the identities from the rest of the message. |
|
718 | 720 | |
|
719 | 721 | Feed until DELIM is reached, then return the prefix as idents and |
|
720 | 722 | remainder as msg_list. This is easily broken by setting an IDENT to DELIM, |
|
721 | 723 | but that would be silly. |
|
722 | 724 | |
|
723 | 725 | Parameters |
|
724 | 726 | ---------- |
|
725 | 727 | msg_list : a list of Message or bytes objects |
|
726 | 728 | The message to be split. |
|
727 | 729 | copy : bool |
|
728 | 730 | flag determining whether the arguments are bytes or Messages |
|
729 | 731 | |
|
730 | 732 | Returns |
|
731 | 733 | ------- |
|
732 | 734 | (idents, msg_list) : two lists |
|
733 | 735 | idents will always be a list of bytes, each of which is a ZMQ |
|
734 | 736 | identity. msg_list will be a list of bytes or zmq.Messages of the |
|
735 | 737 | form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and |
|
736 | 738 | should be unpackable/unserializable via self.unserialize at this |
|
737 | 739 | point. |
|
738 | 740 | """ |
|
739 | 741 | if copy: |
|
740 | 742 | idx = msg_list.index(DELIM) |
|
741 | 743 | return msg_list[:idx], msg_list[idx+1:] |
|
742 | 744 | else: |
|
743 | 745 | failed = True |
|
744 | 746 | for idx,m in enumerate(msg_list): |
|
745 | 747 | if m.bytes == DELIM: |
|
746 | 748 | failed = False |
|
747 | 749 | break |
|
748 | 750 | if failed: |
|
749 | 751 | raise ValueError("DELIM not in msg_list") |
|
750 | 752 | idents, msg_list = msg_list[:idx], msg_list[idx+1:] |
|
751 | 753 | return [m.bytes for m in idents], msg_list |
|
752 | 754 | |
|
753 | 755 | def _add_digest(self, signature): |
|
754 | 756 | """add a digest to history to protect against replay attacks""" |
|
755 | 757 | if self.digest_history_size == 0: |
|
756 | 758 | # no history, never add digests |
|
757 | 759 | return |
|
758 | 760 | |
|
759 | 761 | self.digest_history.add(signature) |
|
760 | 762 | if len(self.digest_history) > self.digest_history_size: |
|
761 | 763 | # threshold reached, cull 10% |
|
762 | 764 | self._cull_digest_history() |
|
763 | 765 | |
|
764 | 766 | def _cull_digest_history(self): |
|
765 | 767 | """cull the digest history |
|
766 | 768 | |
|
767 | 769 | Removes a randomly selected 10% of the digest history |
|
768 | 770 | """ |
|
769 | 771 | current = len(self.digest_history) |
|
770 | 772 | n_to_cull = max(int(current // 10), current - self.digest_history_size) |
|
771 | 773 | if n_to_cull >= current: |
|
772 | 774 | self.digest_history = set() |
|
773 | 775 | return |
|
774 | 776 | to_cull = random.sample(self.digest_history, n_to_cull) |
|
775 | 777 | self.digest_history.difference_update(to_cull) |
|
776 | 778 | |
|
777 | 779 | def unserialize(self, msg_list, content=True, copy=True): |
|
778 | 780 | """Unserialize a msg_list to a nested message dict. |
|
779 | 781 | |
|
780 | 782 | This is roughly the inverse of serialize. The serialize/unserialize |
|
781 | 783 | methods work with full message lists, whereas pack/unpack work with |
|
782 | 784 | the individual message parts in the message list. |
|
783 | 785 | |
|
784 | 786 | Parameters |
|
785 | 787 | ---------- |
|
786 | 788 | msg_list : list of bytes or Message objects |
|
787 | 789 | The list of message parts of the form [HMAC,p_header,p_parent, |
|
788 | 790 | p_metadata,p_content,buffer1,buffer2,...]. |
|
789 | 791 | content : bool (True) |
|
790 | 792 | Whether to unpack the content dict (True), or leave it packed |
|
791 | 793 | (False). |
|
792 | 794 | copy : bool (True) |
|
793 | 795 | Whether to return the bytes (True), or the non-copying Message |
|
794 | 796 | object in each place (False). |
|
795 | 797 | |
|
796 | 798 | Returns |
|
797 | 799 | ------- |
|
798 | 800 | msg : dict |
|
799 | 801 | The nested message dict with top-level keys [header, parent_header, |
|
800 | 802 | content, buffers]. |
|
801 | 803 | """ |
|
802 | 804 | minlen = 5 |
|
803 | 805 | message = {} |
|
804 | 806 | if not copy: |
|
805 | 807 | for i in range(minlen): |
|
806 | 808 | msg_list[i] = msg_list[i].bytes |
|
807 | 809 | if self.auth is not None: |
|
808 | 810 | signature = msg_list[0] |
|
809 | 811 | if not signature: |
|
810 | 812 | raise ValueError("Unsigned Message") |
|
811 | 813 | if signature in self.digest_history: |
|
812 | 814 | raise ValueError("Duplicate Signature: %r" % signature) |
|
813 | 815 | self._add_digest(signature) |
|
814 | 816 | check = self.sign(msg_list[1:5]) |
|
815 | 817 | if not signature == check: |
|
816 | 818 | raise ValueError("Invalid Signature: %r" % signature) |
|
817 | 819 | if not len(msg_list) >= minlen: |
|
818 | 820 | raise TypeError("malformed message, must have at least %i elements"%minlen) |
|
819 | 821 | header = self.unpack(msg_list[1]) |
|
820 | 822 | message['header'] = extract_dates(header) |
|
821 | 823 | message['msg_id'] = header['msg_id'] |
|
822 | 824 | message['msg_type'] = header['msg_type'] |
|
823 | 825 | message['parent_header'] = extract_dates(self.unpack(msg_list[2])) |
|
824 | 826 | message['metadata'] = self.unpack(msg_list[3]) |
|
825 | 827 | if content: |
|
826 | 828 | message['content'] = self.unpack(msg_list[4]) |
|
827 | 829 | else: |
|
828 | 830 | message['content'] = msg_list[4] |
|
829 | 831 | |
|
830 | 832 | message['buffers'] = msg_list[5:] |
|
831 | 833 | return message |
|
832 | 834 | |
|
833 | 835 | def test_msg2obj(): |
|
834 | 836 | am = dict(x=1) |
|
835 | 837 | ao = Message(am) |
|
836 | 838 | assert ao.x == am['x'] |
|
837 | 839 | |
|
838 | 840 | am['y'] = dict(z=1) |
|
839 | 841 | ao = Message(am) |
|
840 | 842 | assert ao.y.z == am['y']['z'] |
|
841 | 843 | |
|
842 | 844 | k1, k2 = 'y', 'z' |
|
843 | 845 | assert ao[k1][k2] == am[k1][k2] |
|
844 | 846 | |
|
845 | 847 | am2 = dict(ao) |
|
846 | 848 | assert am['x'] == am2['x'] |
|
847 | 849 | assert am['y']['z'] == am2['y']['z'] |
|
848 | 850 |
@@ -1,624 +1,624 b'' | |||
|
1 | 1 | """A ZMQ-based subclass of InteractiveShell. |
|
2 | 2 | |
|
3 | 3 | This code is meant to ease the refactoring of the base InteractiveShell into |
|
4 | 4 | something with a cleaner architecture for 2-process use, without actually |
|
5 | 5 | breaking InteractiveShell itself. So we're doing something a bit ugly, where |
|
6 | 6 | we subclass and override what we want to fix. Once this is working well, we |
|
7 | 7 | can go back to the base class and refactor the code for a cleaner inheritance |
|
8 | 8 | implementation that doesn't rely on so much monkeypatching. |
|
9 | 9 | |
|
10 | 10 | But this lets us maintain a fully working IPython as we develop the new |
|
11 | 11 | machinery. This should thus be thought of as scaffolding. |
|
12 | 12 | """ |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | # Imports |
|
15 | 15 | #----------------------------------------------------------------------------- |
|
16 | 16 | from __future__ import print_function |
|
17 | 17 | |
|
18 | 18 | # Stdlib |
|
19 | 19 | import os |
|
20 | 20 | import sys |
|
21 | 21 | import time |
|
22 | 22 | |
|
23 | 23 | # System library imports |
|
24 | 24 | from zmq.eventloop import ioloop |
|
25 | 25 | |
|
26 | 26 | # Our own |
|
27 | 27 | from IPython.core.interactiveshell import ( |
|
28 | 28 | InteractiveShell, InteractiveShellABC |
|
29 | 29 | ) |
|
30 | 30 | from IPython.core import page |
|
31 | 31 | from IPython.core.autocall import ZMQExitAutocall |
|
32 | 32 | from IPython.core.displaypub import DisplayPublisher |
|
33 | 33 | from IPython.core.error import UsageError |
|
34 | 34 | from IPython.core.magics import MacroToEdit, CodeMagics |
|
35 | 35 | from IPython.core.magic import magics_class, line_magic, Magics |
|
36 | 36 | from IPython.core.payloadpage import install_payload_page |
|
37 | 37 | from IPython.display import display, Javascript |
|
38 | 38 | from IPython.kernel.inprocess.socket import SocketABC |
|
39 | 39 | from IPython.kernel import ( |
|
40 | 40 | get_connection_file, get_connection_info, connect_qtconsole |
|
41 | 41 | ) |
|
42 | 42 | from IPython.testing.skipdoctest import skip_doctest |
|
43 | 43 | from IPython.utils import openpy |
|
44 | 44 | from IPython.utils.jsonutil import json_clean, encode_images |
|
45 | 45 | from IPython.utils.process import arg_split |
|
46 | 46 | from IPython.utils import py3compat |
|
47 | 47 | from IPython.utils.py3compat import unicode_type |
|
48 | 48 | from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes, Any |
|
49 | 49 | from IPython.utils.warn import error |
|
50 | 50 | from IPython.kernel.zmq.displayhook import ZMQShellDisplayHook |
|
51 | 51 | from IPython.kernel.zmq.datapub import ZMQDataPublisher |
|
52 | 52 | from IPython.kernel.zmq.session import extract_header |
|
53 | 53 | from IPython.kernel.comm import CommManager |
|
54 | 54 | from .session import Session |
|
55 | 55 | |
|
56 | 56 | #----------------------------------------------------------------------------- |
|
57 | 57 | # Functions and classes |
|
58 | 58 | #----------------------------------------------------------------------------- |
|
59 | 59 | |
|
60 | 60 | class ZMQDisplayPublisher(DisplayPublisher): |
|
61 | 61 | """A display publisher that publishes data using a ZeroMQ PUB socket.""" |
|
62 | 62 | |
|
63 | 63 | session = Instance(Session) |
|
64 | 64 | pub_socket = Instance(SocketABC) |
|
65 | 65 | parent_header = Dict({}) |
|
66 | 66 | topic = CBytes(b'display_data') |
|
67 | 67 | |
|
68 | 68 | def set_parent(self, parent): |
|
69 | 69 | """Set the parent for outbound messages.""" |
|
70 | 70 | self.parent_header = extract_header(parent) |
|
71 | 71 | |
|
72 | 72 | def _flush_streams(self): |
|
73 | 73 | """flush IO Streams prior to display""" |
|
74 | 74 | sys.stdout.flush() |
|
75 | 75 | sys.stderr.flush() |
|
76 | 76 | |
|
77 | 77 | def publish(self, source, data, metadata=None): |
|
78 | 78 | self._flush_streams() |
|
79 | 79 | if metadata is None: |
|
80 | 80 | metadata = {} |
|
81 | 81 | self._validate_data(source, data, metadata) |
|
82 | 82 | content = {} |
|
83 | 83 | content['source'] = source |
|
84 | 84 | content['data'] = encode_images(data) |
|
85 | 85 | content['metadata'] = metadata |
|
86 | 86 | self.session.send( |
|
87 | 87 | self.pub_socket, u'display_data', json_clean(content), |
|
88 | 88 | parent=self.parent_header, ident=self.topic, |
|
89 | 89 | ) |
|
90 | 90 | |
|
91 | 91 | def clear_output(self, wait=False): |
|
92 | 92 | content = dict(wait=wait) |
|
93 | 93 | |
|
94 | 94 | print('\r', file=sys.stdout, end='') |
|
95 | 95 | print('\r', file=sys.stderr, end='') |
|
96 | 96 | self._flush_streams() |
|
97 | 97 | |
|
98 | 98 | self.session.send( |
|
99 | 99 | self.pub_socket, u'clear_output', content, |
|
100 | 100 | parent=self.parent_header, ident=self.topic, |
|
101 | 101 | ) |
|
102 | 102 | |
|
103 | 103 | @magics_class |
|
104 | 104 | class KernelMagics(Magics): |
|
105 | 105 | #------------------------------------------------------------------------ |
|
106 | 106 | # Magic overrides |
|
107 | 107 | #------------------------------------------------------------------------ |
|
108 | 108 | # Once the base class stops inheriting from magic, this code needs to be |
|
109 | 109 | # moved into a separate machinery as well. For now, at least isolate here |
|
110 | 110 | # the magics which this class needs to implement differently from the base |
|
111 | 111 | # class, or that are unique to it. |
|
112 | 112 | |
|
113 | 113 | @line_magic |
|
114 | 114 | def doctest_mode(self, parameter_s=''): |
|
115 | 115 | """Toggle doctest mode on and off. |
|
116 | 116 | |
|
117 | 117 | This mode is intended to make IPython behave as much as possible like a |
|
118 | 118 | plain Python shell, from the perspective of how its prompts, exceptions |
|
119 | 119 | and output look. This makes it easy to copy and paste parts of a |
|
120 | 120 | session into doctests. It does so by: |
|
121 | 121 | |
|
122 | 122 | - Changing the prompts to the classic ``>>>`` ones. |
|
123 | 123 | - Changing the exception reporting mode to 'Plain'. |
|
124 | 124 | - Disabling pretty-printing of output. |
|
125 | 125 | |
|
126 | 126 | Note that IPython also supports the pasting of code snippets that have |
|
127 | 127 | leading '>>>' and '...' prompts in them. This means that you can paste |
|
128 | 128 | doctests from files or docstrings (even if they have leading |
|
129 | 129 | whitespace), and the code will execute correctly. You can then use |
|
130 | 130 | '%history -t' to see the translated history; this will give you the |
|
131 | 131 | input after removal of all the leading prompts and whitespace, which |
|
132 | 132 | can be pasted back into an editor. |
|
133 | 133 | |
|
134 | 134 | With these features, you can switch into this mode easily whenever you |
|
135 | 135 | need to do testing and changes to doctests, without having to leave |
|
136 | 136 | your existing IPython session. |
|
137 | 137 | """ |
|
138 | 138 | |
|
139 | 139 | from IPython.utils.ipstruct import Struct |
|
140 | 140 | |
|
141 | 141 | # Shorthands |
|
142 | 142 | shell = self.shell |
|
143 | 143 | disp_formatter = self.shell.display_formatter |
|
144 | 144 | ptformatter = disp_formatter.formatters['text/plain'] |
|
145 | 145 | # dstore is a data store kept in the instance metadata bag to track any |
|
146 | 146 | # changes we make, so we can undo them later. |
|
147 | 147 | dstore = shell.meta.setdefault('doctest_mode', Struct()) |
|
148 | 148 | save_dstore = dstore.setdefault |
|
149 | 149 | |
|
150 | 150 | # save a few values we'll need to recover later |
|
151 | 151 | mode = save_dstore('mode', False) |
|
152 | 152 | save_dstore('rc_pprint', ptformatter.pprint) |
|
153 | 153 | save_dstore('rc_active_types',disp_formatter.active_types) |
|
154 | 154 | save_dstore('xmode', shell.InteractiveTB.mode) |
|
155 | 155 | |
|
156 | 156 | if mode == False: |
|
157 | 157 | # turn on |
|
158 | 158 | ptformatter.pprint = False |
|
159 | 159 | disp_formatter.active_types = ['text/plain'] |
|
160 | 160 | shell.magic('xmode Plain') |
|
161 | 161 | else: |
|
162 | 162 | # turn off |
|
163 | 163 | ptformatter.pprint = dstore.rc_pprint |
|
164 | 164 | disp_formatter.active_types = dstore.rc_active_types |
|
165 | 165 | shell.magic("xmode " + dstore.xmode) |
|
166 | 166 | |
|
167 | 167 | # Store new mode and inform on console |
|
168 | 168 | dstore.mode = bool(1-int(mode)) |
|
169 | 169 | mode_label = ['OFF','ON'][dstore.mode] |
|
170 | 170 | print('Doctest mode is:', mode_label) |
|
171 | 171 | |
|
172 | 172 | # Send the payload back so that clients can modify their prompt display |
|
173 | 173 | payload = dict( |
|
174 | 174 | source='doctest_mode', |
|
175 | 175 | mode=dstore.mode) |
|
176 | 176 | shell.payload_manager.write_payload(payload) |
|
177 | 177 | |
|
178 | 178 | |
|
179 | 179 | _find_edit_target = CodeMagics._find_edit_target |
|
180 | 180 | |
|
181 | 181 | @skip_doctest |
|
182 | 182 | @line_magic |
|
183 | 183 | def edit(self, parameter_s='', last_call=['','']): |
|
184 | 184 | """Bring up an editor and execute the resulting code. |
|
185 | 185 | |
|
186 | 186 | Usage: |
|
187 | 187 | %edit [options] [args] |
|
188 | 188 | |
|
189 | 189 | %edit runs an external text editor. You will need to set the command for |
|
190 | 190 | this editor via the ``TerminalInteractiveShell.editor`` option in your |
|
191 | 191 | configuration file before it will work. |
|
192 | 192 | |
|
193 | 193 | This command allows you to conveniently edit multi-line code right in |
|
194 | 194 | your IPython session. |
|
195 | 195 | |
|
196 | 196 | If called without arguments, %edit opens up an empty editor with a |
|
197 | 197 | temporary file and will execute the contents of this file when you |
|
198 | 198 | close it (don't forget to save it!). |
|
199 | 199 | |
|
200 | ||
|
201 | 200 | Options: |
|
202 | 201 | |
|
203 | -n <number>: open the editor at a specified line number. By default, | |
|
204 | the IPython editor hook uses the unix syntax 'editor +N filename', but | |
|
205 | you can configure this by providing your own modified hook if your | |
|
206 | favorite editor supports line-number specifications with a different | |
|
207 | syntax. | |
|
208 | ||
|
209 | -p: this will call the editor with the same data as the previous time | |
|
210 | it was used, regardless of how long ago (in your current session) it | |
|
211 | was. | |
|
212 | ||
|
213 | -r: use 'raw' input. This option only applies to input taken from the | |
|
214 | user's history. By default, the 'processed' history is used, so that | |
|
215 | magics are loaded in their transformed version to valid Python. If | |
|
216 | this option is given, the raw input as typed as the command line is | |
|
217 | used instead. When you exit the editor, it will be executed by | |
|
218 | IPython's own processor. | |
|
219 | ||
|
220 | -x: do not execute the edited code immediately upon exit. This is | |
|
221 | mainly useful if you are editing programs which need to be called with | |
|
222 | command line arguments, which you can then do using %run. | |
|
223 | ||
|
202 | -n <number> | |
|
203 | Open the editor at a specified line number. By default, the IPython | |
|
204 | editor hook uses the unix syntax 'editor +N filename', but you can | |
|
205 | configure this by providing your own modified hook if your favorite | |
|
206 | editor supports line-number specifications with a different syntax. | |
|
207 | ||
|
208 | -p | |
|
209 | Call the editor with the same data as the previous time it was used, | |
|
210 | regardless of how long ago (in your current session) it was. | |
|
211 | ||
|
212 | -r | |
|
213 | Use 'raw' input. This option only applies to input taken from the | |
|
214 | user's history. By default, the 'processed' history is used, so that | |
|
215 | magics are loaded in their transformed version to valid Python. If | |
|
216 | this option is given, the raw input as typed as the command line is | |
|
217 | used instead. When you exit the editor, it will be executed by | |
|
218 | IPython's own processor. | |
|
219 | ||
|
220 | -x | |
|
221 | Do not execute the edited code immediately upon exit. This is mainly | |
|
222 | useful if you are editing programs which need to be called with | |
|
223 | command line arguments, which you can then do using %run. | |
|
224 | 224 | |
|
225 | 225 | Arguments: |
|
226 | 226 | |
|
227 | 227 | If arguments are given, the following possibilites exist: |
|
228 | 228 | |
|
229 | 229 | - The arguments are numbers or pairs of colon-separated numbers (like |
|
230 | 1 4:8 9). These are interpreted as lines of previous input to be | |
|
231 | loaded into the editor. The syntax is the same of the %macro command. | |
|
230 | 1 4:8 9). These are interpreted as lines of previous input to be | |
|
231 | loaded into the editor. The syntax is the same of the %macro command. | |
|
232 | 232 | |
|
233 | 233 | - If the argument doesn't start with a number, it is evaluated as a |
|
234 | variable and its contents loaded into the editor. You can thus edit | |
|
235 | any string which contains python code (including the result of | |
|
236 | previous edits). | |
|
234 | variable and its contents loaded into the editor. You can thus edit | |
|
235 | any string which contains python code (including the result of | |
|
236 | previous edits). | |
|
237 | 237 | |
|
238 | 238 | - If the argument is the name of an object (other than a string), |
|
239 | IPython will try to locate the file where it was defined and open the | |
|
240 | editor at the point where it is defined. You can use `%edit function` | |
|
241 | to load an editor exactly at the point where 'function' is defined, | |
|
242 | edit it and have the file be executed automatically. | |
|
239 | IPython will try to locate the file where it was defined and open the | |
|
240 | editor at the point where it is defined. You can use ``%edit function`` | |
|
241 | to load an editor exactly at the point where 'function' is defined, | |
|
242 | edit it and have the file be executed automatically. | |
|
243 | 243 | |
|
244 | If the object is a macro (see %macro for details), this opens up your | |
|
245 | specified editor with a temporary file containing the macro's data. | |
|
246 | Upon exit, the macro is reloaded with the contents of the file. | |
|
244 | If the object is a macro (see %macro for details), this opens up your | |
|
245 | specified editor with a temporary file containing the macro's data. | |
|
246 | Upon exit, the macro is reloaded with the contents of the file. | |
|
247 | 247 | |
|
248 | Note: opening at an exact line is only supported under Unix, and some | |
|
249 | editors (like kedit and gedit up to Gnome 2.8) do not understand the | |
|
250 | '+NUMBER' parameter necessary for this feature. Good editors like | |
|
251 | (X)Emacs, vi, jed, pico and joe all do. | |
|
248 | Note: opening at an exact line is only supported under Unix, and some | |
|
249 | editors (like kedit and gedit up to Gnome 2.8) do not understand the | |
|
250 | '+NUMBER' parameter necessary for this feature. Good editors like | |
|
251 | (X)Emacs, vi, jed, pico and joe all do. | |
|
252 | 252 | |
|
253 | 253 | - If the argument is not found as a variable, IPython will look for a |
|
254 | file with that name (adding .py if necessary) and load it into the | |
|
255 | editor. It will execute its contents with execfile() when you exit, | |
|
256 | loading any code in the file into your interactive namespace. | |
|
254 | file with that name (adding .py if necessary) and load it into the | |
|
255 | editor. It will execute its contents with execfile() when you exit, | |
|
256 | loading any code in the file into your interactive namespace. | |
|
257 | 257 | |
|
258 | 258 | After executing your code, %edit will return as output the code you |
|
259 | 259 | typed in the editor (except when it was an existing file). This way |
|
260 | 260 | you can reload the code in further invocations of %edit as a variable, |
|
261 | via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of | |
|
261 | via ``_<NUMBER>` or ``Out[<NUMBER>]``, where <NUMBER> is the prompt number of | |
|
262 | 262 | the output. |
|
263 | 263 | |
|
264 | 264 | Note that %edit is also available through the alias %ed. |
|
265 | 265 | |
|
266 | 266 | This is an example of creating a simple function inside the editor and |
|
267 | then modifying it. First, start up the editor: | |
|
267 | then modifying it. First, start up the editor:: | |
|
268 | 268 | |
|
269 | In [1]: ed | |
|
270 | Editing... done. Executing edited code... | |
|
271 | Out[1]: 'def foo():n print "foo() was defined in an editing session"n' | |
|
269 | In [1]: ed | |
|
270 | Editing... done. Executing edited code... | |
|
271 | Out[1]: 'def foo():n print "foo() was defined in an editing session"n' | |
|
272 | 272 | |
|
273 | We can then call the function foo(): | |
|
273 | We can then call the function foo():: | |
|
274 | 274 | |
|
275 | In [2]: foo() | |
|
276 | foo() was defined in an editing session | |
|
275 | In [2]: foo() | |
|
276 | foo() was defined in an editing session | |
|
277 | 277 | |
|
278 |
Now we edit foo. |
|
|
279 | (temporary) file where foo() was previously defined: | |
|
278 | Now we edit foo. IPython automatically loads the editor with the | |
|
279 | (temporary) file where foo() was previously defined:: | |
|
280 | 280 | |
|
281 | In [3]: ed foo | |
|
282 | Editing... done. Executing edited code... | |
|
281 | In [3]: ed foo | |
|
282 | Editing... done. Executing edited code... | |
|
283 | 283 | |
|
284 | And if we call foo() again we get the modified version: | |
|
284 | And if we call foo() again we get the modified version:: | |
|
285 | 285 | |
|
286 | In [4]: foo() | |
|
287 | foo() has now been changed! | |
|
286 | In [4]: foo() | |
|
287 | foo() has now been changed! | |
|
288 | 288 | |
|
289 | 289 | Here is an example of how to edit a code snippet successive |
|
290 | times. First we call the editor: | |
|
290 | times. First we call the editor:: | |
|
291 | 291 | |
|
292 | In [5]: ed | |
|
293 | Editing... done. Executing edited code... | |
|
294 | hello | |
|
295 | Out[5]: "print 'hello'n" | |
|
292 | In [5]: ed | |
|
293 | Editing... done. Executing edited code... | |
|
294 | hello | |
|
295 | Out[5]: "print 'hello'n" | |
|
296 | 296 | |
|
297 | Now we call it again with the previous output (stored in _): | |
|
297 | Now we call it again with the previous output (stored in _):: | |
|
298 | 298 | |
|
299 | In [6]: ed _ | |
|
300 | Editing... done. Executing edited code... | |
|
301 | hello world | |
|
302 | Out[6]: "print 'hello world'n" | |
|
299 | In [6]: ed _ | |
|
300 | Editing... done. Executing edited code... | |
|
301 | hello world | |
|
302 | Out[6]: "print 'hello world'n" | |
|
303 | 303 | |
|
304 | Now we call it with the output #8 (stored in _8, also as Out[8]): | |
|
304 | Now we call it with the output #8 (stored in ``_8``, also as Out[8]):: | |
|
305 | 305 | |
|
306 | In [7]: ed _8 | |
|
307 | Editing... done. Executing edited code... | |
|
308 | hello again | |
|
309 | Out[7]: "print 'hello again'n" | |
|
306 | In [7]: ed _8 | |
|
307 | Editing... done. Executing edited code... | |
|
308 | hello again | |
|
309 | Out[7]: "print 'hello again'n" | |
|
310 | 310 | """ |
|
311 | 311 | |
|
312 | 312 | opts,args = self.parse_options(parameter_s,'prn:') |
|
313 | 313 | |
|
314 | 314 | try: |
|
315 | 315 | filename, lineno, _ = CodeMagics._find_edit_target(self.shell, args, opts, last_call) |
|
316 | 316 | except MacroToEdit as e: |
|
317 | 317 | # TODO: Implement macro editing over 2 processes. |
|
318 | 318 | print("Macro editing not yet implemented in 2-process model.") |
|
319 | 319 | return |
|
320 | 320 | |
|
321 | 321 | # Make sure we send to the client an absolute path, in case the working |
|
322 | 322 | # directory of client and kernel don't match |
|
323 | 323 | filename = os.path.abspath(filename) |
|
324 | 324 | |
|
325 | 325 | payload = { |
|
326 | 326 | 'source' : 'edit_magic', |
|
327 | 327 | 'filename' : filename, |
|
328 | 328 | 'line_number' : lineno |
|
329 | 329 | } |
|
330 | 330 | self.shell.payload_manager.write_payload(payload) |
|
331 | 331 | |
|
332 | 332 | # A few magics that are adapted to the specifics of using pexpect and a |
|
333 | 333 | # remote terminal |
|
334 | 334 | |
|
335 | 335 | @line_magic |
|
336 | 336 | def clear(self, arg_s): |
|
337 | 337 | """Clear the terminal.""" |
|
338 | 338 | if os.name == 'posix': |
|
339 | 339 | self.shell.system("clear") |
|
340 | 340 | else: |
|
341 | 341 | self.shell.system("cls") |
|
342 | 342 | |
|
343 | 343 | if os.name == 'nt': |
|
344 | 344 | # This is the usual name in windows |
|
345 | 345 | cls = line_magic('cls')(clear) |
|
346 | 346 | |
|
347 | 347 | # Terminal pagers won't work over pexpect, but we do have our own pager |
|
348 | 348 | |
|
349 | 349 | @line_magic |
|
350 | 350 | def less(self, arg_s): |
|
351 | 351 | """Show a file through the pager. |
|
352 | 352 | |
|
353 | 353 | Files ending in .py are syntax-highlighted.""" |
|
354 | 354 | if not arg_s: |
|
355 | 355 | raise UsageError('Missing filename.') |
|
356 | 356 | |
|
357 | 357 | cont = open(arg_s).read() |
|
358 | 358 | if arg_s.endswith('.py'): |
|
359 | 359 | cont = self.shell.pycolorize(openpy.read_py_file(arg_s, skip_encoding_cookie=False)) |
|
360 | 360 | else: |
|
361 | 361 | cont = open(arg_s).read() |
|
362 | 362 | page.page(cont) |
|
363 | 363 | |
|
364 | 364 | more = line_magic('more')(less) |
|
365 | 365 | |
|
366 | 366 | # Man calls a pager, so we also need to redefine it |
|
367 | 367 | if os.name == 'posix': |
|
368 | 368 | @line_magic |
|
369 | 369 | def man(self, arg_s): |
|
370 | 370 | """Find the man page for the given command and display in pager.""" |
|
371 | 371 | page.page(self.shell.getoutput('man %s | col -b' % arg_s, |
|
372 | 372 | split=False)) |
|
373 | 373 | |
|
374 | 374 | @line_magic |
|
375 | 375 | def connect_info(self, arg_s): |
|
376 | 376 | """Print information for connecting other clients to this kernel |
|
377 | 377 | |
|
378 | 378 | It will print the contents of this session's connection file, as well as |
|
379 | 379 | shortcuts for local clients. |
|
380 | 380 | |
|
381 | 381 | In the simplest case, when called from the most recently launched kernel, |
|
382 | 382 | secondary clients can be connected, simply with: |
|
383 | 383 | |
|
384 | 384 | $> ipython <app> --existing |
|
385 | 385 | |
|
386 | 386 | """ |
|
387 | 387 | |
|
388 | 388 | from IPython.core.application import BaseIPythonApplication as BaseIPApp |
|
389 | 389 | |
|
390 | 390 | if BaseIPApp.initialized(): |
|
391 | 391 | app = BaseIPApp.instance() |
|
392 | 392 | security_dir = app.profile_dir.security_dir |
|
393 | 393 | profile = app.profile |
|
394 | 394 | else: |
|
395 | 395 | profile = 'default' |
|
396 | 396 | security_dir = '' |
|
397 | 397 | |
|
398 | 398 | try: |
|
399 | 399 | connection_file = get_connection_file() |
|
400 | 400 | info = get_connection_info(unpack=False) |
|
401 | 401 | except Exception as e: |
|
402 | 402 | error("Could not get connection info: %r" % e) |
|
403 | 403 | return |
|
404 | 404 | |
|
405 | 405 | # add profile flag for non-default profile |
|
406 | 406 | profile_flag = "--profile %s" % profile if profile != 'default' else "" |
|
407 | 407 | |
|
408 | 408 | # if it's in the security dir, truncate to basename |
|
409 | 409 | if security_dir == os.path.dirname(connection_file): |
|
410 | 410 | connection_file = os.path.basename(connection_file) |
|
411 | 411 | |
|
412 | 412 | |
|
413 | 413 | print (info + '\n') |
|
414 | 414 | print ("Paste the above JSON into a file, and connect with:\n" |
|
415 | 415 | " $> ipython <app> --existing <file>\n" |
|
416 | 416 | "or, if you are local, you can connect with just:\n" |
|
417 | 417 | " $> ipython <app> --existing {0} {1}\n" |
|
418 | 418 | "or even just:\n" |
|
419 | 419 | " $> ipython <app> --existing {1}\n" |
|
420 | 420 | "if this is the most recent IPython session you have started.".format( |
|
421 | 421 | connection_file, profile_flag |
|
422 | 422 | ) |
|
423 | 423 | ) |
|
424 | 424 | |
|
425 | 425 | @line_magic |
|
426 | 426 | def qtconsole(self, arg_s): |
|
427 | 427 | """Open a qtconsole connected to this kernel. |
|
428 | 428 | |
|
429 | 429 | Useful for connecting a qtconsole to running notebooks, for better |
|
430 | 430 | debugging. |
|
431 | 431 | """ |
|
432 | 432 | |
|
433 | 433 | # %qtconsole should imply bind_kernel for engines: |
|
434 | 434 | try: |
|
435 | 435 | from IPython.parallel import bind_kernel |
|
436 | 436 | except ImportError: |
|
437 | 437 | # technically possible, because parallel has higher pyzmq min-version |
|
438 | 438 | pass |
|
439 | 439 | else: |
|
440 | 440 | bind_kernel() |
|
441 | 441 | |
|
442 | 442 | try: |
|
443 | 443 | p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix')) |
|
444 | 444 | except Exception as e: |
|
445 | 445 | error("Could not start qtconsole: %r" % e) |
|
446 | 446 | return |
|
447 | 447 | |
|
448 | 448 | @line_magic |
|
449 | 449 | def autosave(self, arg_s): |
|
450 | 450 | """Set the autosave interval in the notebook (in seconds). |
|
451 | 451 | |
|
452 | 452 | The default value is 120, or two minutes. |
|
453 | 453 | ``%autosave 0`` will disable autosave. |
|
454 | 454 | |
|
455 | 455 | This magic only has an effect when called from the notebook interface. |
|
456 | 456 | It has no effect when called in a startup file. |
|
457 | 457 | """ |
|
458 | 458 | |
|
459 | 459 | try: |
|
460 | 460 | interval = int(arg_s) |
|
461 | 461 | except ValueError: |
|
462 | 462 | raise UsageError("%%autosave requires an integer, got %r" % arg_s) |
|
463 | 463 | |
|
464 | 464 | # javascript wants milliseconds |
|
465 | 465 | milliseconds = 1000 * interval |
|
466 | 466 | display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds), |
|
467 | 467 | include=['application/javascript'] |
|
468 | 468 | ) |
|
469 | 469 | if interval: |
|
470 | 470 | print("Autosaving every %i seconds" % interval) |
|
471 | 471 | else: |
|
472 | 472 | print("Autosave disabled") |
|
473 | 473 | |
|
474 | 474 | |
|
475 | 475 | class ZMQInteractiveShell(InteractiveShell): |
|
476 | 476 | """A subclass of InteractiveShell for ZMQ.""" |
|
477 | 477 | |
|
478 | 478 | displayhook_class = Type(ZMQShellDisplayHook) |
|
479 | 479 | display_pub_class = Type(ZMQDisplayPublisher) |
|
480 | 480 | data_pub_class = Type(ZMQDataPublisher) |
|
481 | 481 | kernel = Any() |
|
482 | 482 | parent_header = Any() |
|
483 | 483 | |
|
484 | 484 | # Override the traitlet in the parent class, because there's no point using |
|
485 | 485 | # readline for the kernel. Can be removed when the readline code is moved |
|
486 | 486 | # to the terminal frontend. |
|
487 | 487 | colors_force = CBool(True) |
|
488 | 488 | readline_use = CBool(False) |
|
489 | 489 | # autoindent has no meaning in a zmqshell, and attempting to enable it |
|
490 | 490 | # will print a warning in the absence of readline. |
|
491 | 491 | autoindent = CBool(False) |
|
492 | 492 | |
|
493 | 493 | exiter = Instance(ZMQExitAutocall) |
|
494 | 494 | def _exiter_default(self): |
|
495 | 495 | return ZMQExitAutocall(self) |
|
496 | 496 | |
|
497 | 497 | def _exit_now_changed(self, name, old, new): |
|
498 | 498 | """stop eventloop when exit_now fires""" |
|
499 | 499 | if new: |
|
500 | 500 | loop = ioloop.IOLoop.instance() |
|
501 | 501 | loop.add_timeout(time.time()+0.1, loop.stop) |
|
502 | 502 | |
|
503 | 503 | keepkernel_on_exit = None |
|
504 | 504 | |
|
505 | 505 | # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no |
|
506 | 506 | # interactive input being read; we provide event loop support in ipkernel |
|
507 | 507 | @staticmethod |
|
508 | 508 | def enable_gui(gui): |
|
509 | 509 | from .eventloops import enable_gui as real_enable_gui |
|
510 | 510 | try: |
|
511 | 511 | real_enable_gui(gui) |
|
512 | 512 | except ValueError as e: |
|
513 | 513 | raise UsageError("%s" % e) |
|
514 | 514 | |
|
515 | 515 | def init_environment(self): |
|
516 | 516 | """Configure the user's environment. |
|
517 | 517 | |
|
518 | 518 | """ |
|
519 | 519 | env = os.environ |
|
520 | 520 | # These two ensure 'ls' produces nice coloring on BSD-derived systems |
|
521 | 521 | env['TERM'] = 'xterm-color' |
|
522 | 522 | env['CLICOLOR'] = '1' |
|
523 | 523 | # Since normal pagers don't work at all (over pexpect we don't have |
|
524 | 524 | # single-key control of the subprocess), try to disable paging in |
|
525 | 525 | # subprocesses as much as possible. |
|
526 | 526 | env['PAGER'] = 'cat' |
|
527 | 527 | env['GIT_PAGER'] = 'cat' |
|
528 | 528 | |
|
529 | 529 | # And install the payload version of page. |
|
530 | 530 | install_payload_page() |
|
531 | 531 | |
|
532 | 532 | def auto_rewrite_input(self, cmd): |
|
533 | 533 | """Called to show the auto-rewritten input for autocall and friends. |
|
534 | 534 | |
|
535 | 535 | FIXME: this payload is currently not correctly processed by the |
|
536 | 536 | frontend. |
|
537 | 537 | """ |
|
538 | 538 | new = self.prompt_manager.render('rewrite') + cmd |
|
539 | 539 | payload = dict( |
|
540 | 540 | source='auto_rewrite_input', |
|
541 | 541 | transformed_input=new, |
|
542 | 542 | ) |
|
543 | 543 | self.payload_manager.write_payload(payload) |
|
544 | 544 | |
|
545 | 545 | def ask_exit(self): |
|
546 | 546 | """Engage the exit actions.""" |
|
547 | 547 | self.exit_now = True |
|
548 | 548 | payload = dict( |
|
549 | 549 | source='ask_exit', |
|
550 | 550 | exit=True, |
|
551 | 551 | keepkernel=self.keepkernel_on_exit, |
|
552 | 552 | ) |
|
553 | 553 | self.payload_manager.write_payload(payload) |
|
554 | 554 | |
|
555 | 555 | def _showtraceback(self, etype, evalue, stb): |
|
556 | 556 | |
|
557 | 557 | exc_content = { |
|
558 | 558 | u'traceback' : stb, |
|
559 | 559 | u'ename' : unicode_type(etype.__name__), |
|
560 | 560 | u'evalue' : py3compat.safe_unicode(evalue), |
|
561 | 561 | } |
|
562 | 562 | |
|
563 | 563 | dh = self.displayhook |
|
564 | 564 | # Send exception info over pub socket for other clients than the caller |
|
565 | 565 | # to pick up |
|
566 | 566 | topic = None |
|
567 | 567 | if dh.topic: |
|
568 | 568 | topic = dh.topic.replace(b'pyout', b'pyerr') |
|
569 | 569 | |
|
570 | 570 | exc_msg = dh.session.send(dh.pub_socket, u'pyerr', json_clean(exc_content), dh.parent_header, ident=topic) |
|
571 | 571 | |
|
572 | 572 | # FIXME - Hack: store exception info in shell object. Right now, the |
|
573 | 573 | # caller is reading this info after the fact, we need to fix this logic |
|
574 | 574 | # to remove this hack. Even uglier, we need to store the error status |
|
575 | 575 | # here, because in the main loop, the logic that sets it is being |
|
576 | 576 | # skipped because runlines swallows the exceptions. |
|
577 | 577 | exc_content[u'status'] = u'error' |
|
578 | 578 | self._reply_content = exc_content |
|
579 | 579 | # /FIXME |
|
580 | 580 | |
|
581 | 581 | return exc_content |
|
582 | 582 | |
|
583 | 583 | def set_next_input(self, text): |
|
584 | 584 | """Send the specified text to the frontend to be presented at the next |
|
585 | 585 | input cell.""" |
|
586 | 586 | payload = dict( |
|
587 | 587 | source='set_next_input', |
|
588 | 588 | text=text |
|
589 | 589 | ) |
|
590 | 590 | self.payload_manager.write_payload(payload) |
|
591 | 591 | |
|
592 | 592 | def set_parent(self, parent): |
|
593 | 593 | """Set the parent header for associating output with its triggering input""" |
|
594 | 594 | self.parent_header = parent |
|
595 | 595 | self.displayhook.set_parent(parent) |
|
596 | 596 | self.display_pub.set_parent(parent) |
|
597 | 597 | self.data_pub.set_parent(parent) |
|
598 | 598 | try: |
|
599 | 599 | sys.stdout.set_parent(parent) |
|
600 | 600 | except AttributeError: |
|
601 | 601 | pass |
|
602 | 602 | try: |
|
603 | 603 | sys.stderr.set_parent(parent) |
|
604 | 604 | except AttributeError: |
|
605 | 605 | pass |
|
606 | 606 | |
|
607 | 607 | def get_parent(self): |
|
608 | 608 | return self.parent_header |
|
609 | 609 | |
|
610 | 610 | #------------------------------------------------------------------------- |
|
611 | 611 | # Things related to magics |
|
612 | 612 | #------------------------------------------------------------------------- |
|
613 | 613 | |
|
614 | 614 | def init_magics(self): |
|
615 | 615 | super(ZMQInteractiveShell, self).init_magics() |
|
616 | 616 | self.register_magics(KernelMagics) |
|
617 | 617 | self.magics_manager.register_alias('ed', 'edit') |
|
618 | 618 | |
|
619 | 619 | def init_comms(self): |
|
620 | 620 | self.comm_manager = CommManager(shell=self, parent=self) |
|
621 | 621 | self.configurables.append(self.comm_manager) |
|
622 | 622 | |
|
623 | 623 | |
|
624 | 624 | InteractiveShellABC.register(ZMQInteractiveShell) |
@@ -1,486 +1,491 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Manage background (threaded) jobs conveniently from an interactive shell. |
|
3 | 3 | |
|
4 | 4 | This module provides a BackgroundJobManager class. This is the main class |
|
5 | 5 | meant for public usage, it implements an object which can create and manage |
|
6 | 6 | new background jobs. |
|
7 | 7 | |
|
8 | 8 | It also provides the actual job classes managed by these BackgroundJobManager |
|
9 | 9 | objects, see their docstrings below. |
|
10 | 10 | |
|
11 | 11 | |
|
12 | 12 | This system was inspired by discussions with B. Granger and the |
|
13 | 13 | BackgroundCommand class described in the book Python Scripting for |
|
14 | 14 | Computational Science, by H. P. Langtangen: |
|
15 | 15 | |
|
16 | 16 | http://folk.uio.no/hpl/scripting |
|
17 | 17 | |
|
18 | 18 | (although ultimately no code from this text was used, as IPython's system is a |
|
19 | 19 | separate implementation). |
|
20 | 20 | |
|
21 | 21 | An example notebook is provided in our documentation illustrating interactive |
|
22 | 22 | use of the system. |
|
23 | 23 | """ |
|
24 | 24 | from __future__ import print_function |
|
25 | 25 | |
|
26 | 26 | #***************************************************************************** |
|
27 | 27 | # Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu> |
|
28 | 28 | # |
|
29 | 29 | # Distributed under the terms of the BSD License. The full license is in |
|
30 | 30 | # the file COPYING, distributed as part of this software. |
|
31 | 31 | #***************************************************************************** |
|
32 | 32 | |
|
33 | 33 | # Code begins |
|
34 | 34 | import sys |
|
35 | 35 | import threading |
|
36 | 36 | |
|
37 | 37 | from IPython import get_ipython |
|
38 | 38 | from IPython.core.ultratb import AutoFormattedTB |
|
39 | 39 | from IPython.utils.warn import error |
|
40 | 40 | from IPython.utils.py3compat import string_types |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | class BackgroundJobManager(object): |
|
44 | 44 | """Class to manage a pool of backgrounded threaded jobs. |
|
45 | 45 | |
|
46 | 46 | Below, we assume that 'jobs' is a BackgroundJobManager instance. |
|
47 | 47 | |
|
48 | 48 | Usage summary (see the method docstrings for details): |
|
49 | 49 | |
|
50 | 50 | jobs.new(...) -> start a new job |
|
51 | 51 | |
|
52 | 52 | jobs() or jobs.status() -> print status summary of all jobs |
|
53 | 53 | |
|
54 | 54 | jobs[N] -> returns job number N. |
|
55 | 55 | |
|
56 | 56 | foo = jobs[N].result -> assign to variable foo the result of job N |
|
57 | 57 | |
|
58 | 58 | jobs[N].traceback() -> print the traceback of dead job N |
|
59 | 59 | |
|
60 | 60 | jobs.remove(N) -> remove (finished) job N |
|
61 | 61 | |
|
62 | 62 | jobs.flush() -> remove all finished jobs |
|
63 | 63 | |
|
64 | 64 | As a convenience feature, BackgroundJobManager instances provide the |
|
65 | 65 | utility result and traceback methods which retrieve the corresponding |
|
66 | 66 | information from the jobs list: |
|
67 | 67 | |
|
68 | 68 | jobs.result(N) <--> jobs[N].result |
|
69 | 69 | jobs.traceback(N) <--> jobs[N].traceback() |
|
70 | 70 | |
|
71 | 71 | While this appears minor, it allows you to use tab completion |
|
72 | 72 | interactively on the job manager instance. |
|
73 | 73 | """ |
|
74 | 74 | |
|
75 | 75 | def __init__(self): |
|
76 | 76 | # Lists for job management, accessed via a property to ensure they're |
|
77 | 77 | # up to date.x |
|
78 | 78 | self._running = [] |
|
79 | 79 | self._completed = [] |
|
80 | 80 | self._dead = [] |
|
81 | 81 | # A dict of all jobs, so users can easily access any of them |
|
82 | 82 | self.all = {} |
|
83 | 83 | # For reporting |
|
84 | 84 | self._comp_report = [] |
|
85 | 85 | self._dead_report = [] |
|
86 | 86 | # Store status codes locally for fast lookups |
|
87 | 87 | self._s_created = BackgroundJobBase.stat_created_c |
|
88 | 88 | self._s_running = BackgroundJobBase.stat_running_c |
|
89 | 89 | self._s_completed = BackgroundJobBase.stat_completed_c |
|
90 | 90 | self._s_dead = BackgroundJobBase.stat_dead_c |
|
91 | 91 | |
|
92 | 92 | @property |
|
93 | 93 | def running(self): |
|
94 | 94 | self._update_status() |
|
95 | 95 | return self._running |
|
96 | 96 | |
|
97 | 97 | @property |
|
98 | 98 | def dead(self): |
|
99 | 99 | self._update_status() |
|
100 | 100 | return self._dead |
|
101 | 101 | |
|
102 | 102 | @property |
|
103 | 103 | def completed(self): |
|
104 | 104 | self._update_status() |
|
105 | 105 | return self._completed |
|
106 | 106 | |
|
107 | 107 | def new(self, func_or_exp, *args, **kwargs): |
|
108 | 108 | """Add a new background job and start it in a separate thread. |
|
109 | 109 | |
|
110 | 110 | There are two types of jobs which can be created: |
|
111 | 111 | |
|
112 | 112 | 1. Jobs based on expressions which can be passed to an eval() call. |
|
113 | 113 | The expression must be given as a string. For example: |
|
114 | 114 | |
|
115 | 115 | job_manager.new('myfunc(x,y,z=1)'[,glob[,loc]]) |
|
116 | 116 | |
|
117 | 117 | The given expression is passed to eval(), along with the optional |
|
118 | 118 | global/local dicts provided. If no dicts are given, they are |
|
119 | 119 | extracted automatically from the caller's frame. |
|
120 | 120 | |
|
121 | 121 | A Python statement is NOT a valid eval() expression. Basically, you |
|
122 | 122 | can only use as an eval() argument something which can go on the right |
|
123 | 123 | of an '=' sign and be assigned to a variable. |
|
124 | 124 | |
|
125 | 125 | For example,"print 'hello'" is not valid, but '2+3' is. |
|
126 | 126 | |
|
127 | 127 | 2. Jobs given a function object, optionally passing additional |
|
128 | 128 | positional arguments: |
|
129 | 129 | |
|
130 | 130 | job_manager.new(myfunc, x, y) |
|
131 | 131 | |
|
132 | 132 | The function is called with the given arguments. |
|
133 | 133 | |
|
134 | 134 | If you need to pass keyword arguments to your function, you must |
|
135 | 135 | supply them as a dict named kw: |
|
136 | 136 | |
|
137 | 137 | job_manager.new(myfunc, x, y, kw=dict(z=1)) |
|
138 | 138 | |
|
139 | 139 | The reason for this assymmetry is that the new() method needs to |
|
140 | 140 | maintain access to its own keywords, and this prevents name collisions |
|
141 | 141 | between arguments to new() and arguments to your own functions. |
|
142 | 142 | |
|
143 | 143 | In both cases, the result is stored in the job.result field of the |
|
144 | 144 | background job object. |
|
145 | 145 | |
|
146 | 146 | You can set `daemon` attribute of the thread by giving the keyword |
|
147 | 147 | argument `daemon`. |
|
148 | 148 | |
|
149 | 149 | Notes and caveats: |
|
150 | 150 | |
|
151 | 151 | 1. All threads running share the same standard output. Thus, if your |
|
152 | 152 | background jobs generate output, it will come out on top of whatever |
|
153 | 153 | you are currently writing. For this reason, background jobs are best |
|
154 | 154 | used with silent functions which simply return their output. |
|
155 | 155 | |
|
156 | 156 | 2. Threads also all work within the same global namespace, and this |
|
157 | 157 | system does not lock interactive variables. So if you send job to the |
|
158 | 158 | background which operates on a mutable object for a long time, and |
|
159 | 159 | start modifying that same mutable object interactively (or in another |
|
160 | 160 | backgrounded job), all sorts of bizarre behaviour will occur. |
|
161 | 161 | |
|
162 | 162 | 3. If a background job is spending a lot of time inside a C extension |
|
163 | 163 | module which does not release the Python Global Interpreter Lock |
|
164 | 164 | (GIL), this will block the IPython prompt. This is simply because the |
|
165 | 165 | Python interpreter can only switch between threads at Python |
|
166 | 166 | bytecodes. While the execution is inside C code, the interpreter must |
|
167 | 167 | simply wait unless the extension module releases the GIL. |
|
168 | 168 | |
|
169 | 169 | 4. There is no way, due to limitations in the Python threads library, |
|
170 | 170 | to kill a thread once it has started.""" |
|
171 | 171 | |
|
172 | 172 | if callable(func_or_exp): |
|
173 | 173 | kw = kwargs.get('kw',{}) |
|
174 | 174 | job = BackgroundJobFunc(func_or_exp,*args,**kw) |
|
175 | 175 | elif isinstance(func_or_exp, string_types): |
|
176 | 176 | if not args: |
|
177 | 177 | frame = sys._getframe(1) |
|
178 | 178 | glob, loc = frame.f_globals, frame.f_locals |
|
179 | 179 | elif len(args)==1: |
|
180 | 180 | glob = loc = args[0] |
|
181 | 181 | elif len(args)==2: |
|
182 | 182 | glob,loc = args |
|
183 | 183 | else: |
|
184 | 184 | raise ValueError( |
|
185 | 185 | 'Expression jobs take at most 2 args (globals,locals)') |
|
186 | 186 | job = BackgroundJobExpr(func_or_exp, glob, loc) |
|
187 | 187 | else: |
|
188 | 188 | raise TypeError('invalid args for new job') |
|
189 | 189 | |
|
190 | 190 | if kwargs.get('daemon', False): |
|
191 | 191 | job.daemon = True |
|
192 | 192 | job.num = len(self.all)+1 if self.all else 0 |
|
193 | 193 | self.running.append(job) |
|
194 | 194 | self.all[job.num] = job |
|
195 | 195 | print('Starting job # %s in a separate thread.' % job.num) |
|
196 | 196 | job.start() |
|
197 | 197 | return job |
|
198 | 198 | |
|
199 | 199 | def __getitem__(self, job_key): |
|
200 | 200 | num = job_key if isinstance(job_key, int) else job_key.num |
|
201 | 201 | return self.all[num] |
|
202 | 202 | |
|
203 | 203 | def __call__(self): |
|
204 | 204 | """An alias to self.status(), |
|
205 | 205 | |
|
206 | 206 | This allows you to simply call a job manager instance much like the |
|
207 | 207 | Unix `jobs` shell command.""" |
|
208 | 208 | |
|
209 | 209 | return self.status() |
|
210 | 210 | |
|
211 | 211 | def _update_status(self): |
|
212 | 212 | """Update the status of the job lists. |
|
213 | 213 | |
|
214 | 214 | This method moves finished jobs to one of two lists: |
|
215 | 215 | - self.completed: jobs which completed successfully |
|
216 | 216 | - self.dead: jobs which finished but died. |
|
217 | 217 | |
|
218 | 218 | It also copies those jobs to corresponding _report lists. These lists |
|
219 | 219 | are used to report jobs completed/dead since the last update, and are |
|
220 | 220 | then cleared by the reporting function after each call.""" |
|
221 | 221 | |
|
222 | 222 | # Status codes |
|
223 | 223 | srun, scomp, sdead = self._s_running, self._s_completed, self._s_dead |
|
224 | 224 | # State lists, use the actual lists b/c the public names are properties |
|
225 | 225 | # that call this very function on access |
|
226 | 226 | running, completed, dead = self._running, self._completed, self._dead |
|
227 | 227 | |
|
228 | 228 | # Now, update all state lists |
|
229 | 229 | for num, job in enumerate(running): |
|
230 | 230 | stat = job.stat_code |
|
231 | 231 | if stat == srun: |
|
232 | 232 | continue |
|
233 | 233 | elif stat == scomp: |
|
234 | 234 | completed.append(job) |
|
235 | 235 | self._comp_report.append(job) |
|
236 | 236 | running[num] = False |
|
237 | 237 | elif stat == sdead: |
|
238 | 238 | dead.append(job) |
|
239 | 239 | self._dead_report.append(job) |
|
240 | 240 | running[num] = False |
|
241 | 241 | # Remove dead/completed jobs from running list |
|
242 | 242 | running[:] = filter(None, running) |
|
243 | 243 | |
|
244 | 244 | def _group_report(self,group,name): |
|
245 | 245 | """Report summary for a given job group. |
|
246 | 246 | |
|
247 | 247 | Return True if the group had any elements.""" |
|
248 | 248 | |
|
249 | 249 | if group: |
|
250 | 250 | print('%s jobs:' % name) |
|
251 | 251 | for job in group: |
|
252 | 252 | print('%s : %s' % (job.num,job)) |
|
253 | 253 | print() |
|
254 | 254 | return True |
|
255 | 255 | |
|
256 | 256 | def _group_flush(self,group,name): |
|
257 | 257 | """Flush a given job group |
|
258 | 258 | |
|
259 | 259 | Return True if the group had any elements.""" |
|
260 | 260 | |
|
261 | 261 | njobs = len(group) |
|
262 | 262 | if njobs: |
|
263 | 263 | plural = {1:''}.setdefault(njobs,'s') |
|
264 | 264 | print('Flushing %s %s job%s.' % (njobs,name,plural)) |
|
265 | 265 | group[:] = [] |
|
266 | 266 | return True |
|
267 | 267 | |
|
268 | 268 | def _status_new(self): |
|
269 | 269 | """Print the status of newly finished jobs. |
|
270 | 270 | |
|
271 | 271 | Return True if any new jobs are reported. |
|
272 | 272 | |
|
273 | 273 | This call resets its own state every time, so it only reports jobs |
|
274 | 274 | which have finished since the last time it was called.""" |
|
275 | 275 | |
|
276 | 276 | self._update_status() |
|
277 | 277 | new_comp = self._group_report(self._comp_report, 'Completed') |
|
278 | 278 | new_dead = self._group_report(self._dead_report, |
|
279 | 279 | 'Dead, call jobs.traceback() for details') |
|
280 | 280 | self._comp_report[:] = [] |
|
281 | 281 | self._dead_report[:] = [] |
|
282 | 282 | return new_comp or new_dead |
|
283 | 283 | |
|
284 | 284 | def status(self,verbose=0): |
|
285 | 285 | """Print a status of all jobs currently being managed.""" |
|
286 | 286 | |
|
287 | 287 | self._update_status() |
|
288 | 288 | self._group_report(self.running,'Running') |
|
289 | 289 | self._group_report(self.completed,'Completed') |
|
290 | 290 | self._group_report(self.dead,'Dead') |
|
291 | 291 | # Also flush the report queues |
|
292 | 292 | self._comp_report[:] = [] |
|
293 | 293 | self._dead_report[:] = [] |
|
294 | 294 | |
|
295 | 295 | def remove(self,num): |
|
296 | 296 | """Remove a finished (completed or dead) job.""" |
|
297 | 297 | |
|
298 | 298 | try: |
|
299 | 299 | job = self.all[num] |
|
300 | 300 | except KeyError: |
|
301 | 301 | error('Job #%s not found' % num) |
|
302 | 302 | else: |
|
303 | 303 | stat_code = job.stat_code |
|
304 | 304 | if stat_code == self._s_running: |
|
305 | 305 | error('Job #%s is still running, it can not be removed.' % num) |
|
306 | 306 | return |
|
307 | 307 | elif stat_code == self._s_completed: |
|
308 | 308 | self.completed.remove(job) |
|
309 | 309 | elif stat_code == self._s_dead: |
|
310 | 310 | self.dead.remove(job) |
|
311 | 311 | |
|
312 | 312 | def flush(self): |
|
313 | 313 | """Flush all finished jobs (completed and dead) from lists. |
|
314 | 314 | |
|
315 | 315 | Running jobs are never flushed. |
|
316 | 316 | |
|
317 | 317 | It first calls _status_new(), to update info. If any jobs have |
|
318 | 318 | completed since the last _status_new() call, the flush operation |
|
319 | 319 | aborts.""" |
|
320 | 320 | |
|
321 | 321 | # Remove the finished jobs from the master dict |
|
322 | 322 | alljobs = self.all |
|
323 | 323 | for job in self.completed+self.dead: |
|
324 | 324 | del(alljobs[job.num]) |
|
325 | 325 | |
|
326 | 326 | # Now flush these lists completely |
|
327 | 327 | fl_comp = self._group_flush(self.completed, 'Completed') |
|
328 | 328 | fl_dead = self._group_flush(self.dead, 'Dead') |
|
329 | 329 | if not (fl_comp or fl_dead): |
|
330 | 330 | print('No jobs to flush.') |
|
331 | 331 | |
|
332 | 332 | def result(self,num): |
|
333 | 333 | """result(N) -> return the result of job N.""" |
|
334 | 334 | try: |
|
335 | 335 | return self.all[num].result |
|
336 | 336 | except KeyError: |
|
337 | 337 | error('Job #%s not found' % num) |
|
338 | 338 | |
|
339 | 339 | def _traceback(self, job): |
|
340 | 340 | num = job if isinstance(job, int) else job.num |
|
341 | 341 | try: |
|
342 | 342 | self.all[num].traceback() |
|
343 | 343 | except KeyError: |
|
344 | 344 | error('Job #%s not found' % num) |
|
345 | 345 | |
|
346 | 346 | def traceback(self, job=None): |
|
347 | 347 | if job is None: |
|
348 | 348 | self._update_status() |
|
349 | 349 | for deadjob in self.dead: |
|
350 | 350 | print("Traceback for: %r" % deadjob) |
|
351 | 351 | self._traceback(deadjob) |
|
352 | 352 | print() |
|
353 | 353 | else: |
|
354 | 354 | self._traceback(job) |
|
355 | 355 | |
|
356 | 356 | |
|
357 | 357 | class BackgroundJobBase(threading.Thread): |
|
358 | 358 | """Base class to build BackgroundJob classes. |
|
359 | 359 | |
|
360 | 360 | The derived classes must implement: |
|
361 | 361 | |
|
362 | 362 | - Their own __init__, since the one here raises NotImplementedError. The |
|
363 | derived constructor must call self._init() at the end, to provide common | |
|
364 | initialization. | |
|
363 | derived constructor must call self._init() at the end, to provide common | |
|
364 | initialization. | |
|
365 | 365 | |
|
366 | 366 | - A strform attribute used in calls to __str__. |
|
367 | 367 | |
|
368 | 368 | - A call() method, which will make the actual execution call and must |
|
369 |
return a value to be held in the 'result' field of the job object. |
|
|
369 | return a value to be held in the 'result' field of the job object. | |
|
370 | """ | |
|
370 | 371 | |
|
371 | 372 | # Class constants for status, in string and as numerical codes (when |
|
372 | 373 | # updating jobs lists, we don't want to do string comparisons). This will |
|
373 | 374 | # be done at every user prompt, so it has to be as fast as possible |
|
374 | 375 | stat_created = 'Created'; stat_created_c = 0 |
|
375 | 376 | stat_running = 'Running'; stat_running_c = 1 |
|
376 | 377 | stat_completed = 'Completed'; stat_completed_c = 2 |
|
377 | 378 | stat_dead = 'Dead (Exception), call jobs.traceback() for details' |
|
378 | 379 | stat_dead_c = -1 |
|
379 | 380 | |
|
380 | 381 | def __init__(self): |
|
382 | """Must be implemented in subclasses. | |
|
383 | ||
|
384 | Subclasses must call :meth:`_init` for standard initialisation. | |
|
385 | """ | |
|
381 | 386 | raise NotImplementedError("This class can not be instantiated directly.") |
|
382 | 387 | |
|
383 | 388 | def _init(self): |
|
384 | 389 | """Common initialization for all BackgroundJob objects""" |
|
385 | 390 | |
|
386 | 391 | for attr in ['call','strform']: |
|
387 | 392 | assert hasattr(self,attr), "Missing attribute <%s>" % attr |
|
388 | 393 | |
|
389 | 394 | # The num tag can be set by an external job manager |
|
390 | 395 | self.num = None |
|
391 | 396 | |
|
392 | 397 | self.status = BackgroundJobBase.stat_created |
|
393 | 398 | self.stat_code = BackgroundJobBase.stat_created_c |
|
394 | 399 | self.finished = False |
|
395 | 400 | self.result = '<BackgroundJob has not completed>' |
|
396 | 401 | |
|
397 | 402 | # reuse the ipython traceback handler if we can get to it, otherwise |
|
398 | 403 | # make a new one |
|
399 | 404 | try: |
|
400 | 405 | make_tb = get_ipython().InteractiveTB.text |
|
401 | 406 | except: |
|
402 | 407 | make_tb = AutoFormattedTB(mode = 'Context', |
|
403 | 408 | color_scheme='NoColor', |
|
404 | 409 | tb_offset = 1).text |
|
405 | 410 | # Note that the actual API for text() requires the three args to be |
|
406 | 411 | # passed in, so we wrap it in a simple lambda. |
|
407 | 412 | self._make_tb = lambda : make_tb(None, None, None) |
|
408 | 413 | |
|
409 | 414 | # Hold a formatted traceback if one is generated. |
|
410 | 415 | self._tb = None |
|
411 | 416 | |
|
412 | 417 | threading.Thread.__init__(self) |
|
413 | 418 | |
|
414 | 419 | def __str__(self): |
|
415 | 420 | return self.strform |
|
416 | 421 | |
|
417 | 422 | def __repr__(self): |
|
418 | 423 | return '<BackgroundJob #%d: %s>' % (self.num, self.strform) |
|
419 | 424 | |
|
420 | 425 | def traceback(self): |
|
421 | 426 | print(self._tb) |
|
422 | 427 | |
|
423 | 428 | def run(self): |
|
424 | 429 | try: |
|
425 | 430 | self.status = BackgroundJobBase.stat_running |
|
426 | 431 | self.stat_code = BackgroundJobBase.stat_running_c |
|
427 | 432 | self.result = self.call() |
|
428 | 433 | except: |
|
429 | 434 | self.status = BackgroundJobBase.stat_dead |
|
430 | 435 | self.stat_code = BackgroundJobBase.stat_dead_c |
|
431 | 436 | self.finished = None |
|
432 | 437 | self.result = ('<BackgroundJob died, call jobs.traceback() for details>') |
|
433 | 438 | self._tb = self._make_tb() |
|
434 | 439 | else: |
|
435 | 440 | self.status = BackgroundJobBase.stat_completed |
|
436 | 441 | self.stat_code = BackgroundJobBase.stat_completed_c |
|
437 | 442 | self.finished = True |
|
438 | 443 | |
|
439 | 444 | |
|
440 | 445 | class BackgroundJobExpr(BackgroundJobBase): |
|
441 | 446 | """Evaluate an expression as a background job (uses a separate thread).""" |
|
442 | 447 | |
|
443 | 448 | def __init__(self, expression, glob=None, loc=None): |
|
444 | 449 | """Create a new job from a string which can be fed to eval(). |
|
445 | 450 | |
|
446 | 451 | global/locals dicts can be provided, which will be passed to the eval |
|
447 | 452 | call.""" |
|
448 | 453 | |
|
449 | 454 | # fail immediately if the given expression can't be compiled |
|
450 | 455 | self.code = compile(expression,'<BackgroundJob compilation>','eval') |
|
451 | 456 | |
|
452 | 457 | glob = {} if glob is None else glob |
|
453 | 458 | loc = {} if loc is None else loc |
|
454 | 459 | self.expression = self.strform = expression |
|
455 | 460 | self.glob = glob |
|
456 | 461 | self.loc = loc |
|
457 | 462 | self._init() |
|
458 | 463 | |
|
459 | 464 | def call(self): |
|
460 | 465 | return eval(self.code,self.glob,self.loc) |
|
461 | 466 | |
|
462 | 467 | |
|
463 | 468 | class BackgroundJobFunc(BackgroundJobBase): |
|
464 | 469 | """Run a function call as a background job (uses a separate thread).""" |
|
465 | 470 | |
|
466 | 471 | def __init__(self, func, *args, **kwargs): |
|
467 | 472 | """Create a new job from a callable object. |
|
468 | 473 | |
|
469 | 474 | Any positional arguments and keyword args given to this constructor |
|
470 | 475 | after the initial callable are passed directly to it.""" |
|
471 | 476 | |
|
472 | 477 | if not callable(func): |
|
473 | 478 | raise TypeError( |
|
474 | 479 | 'first argument to BackgroundJobFunc must be callable') |
|
475 | 480 | |
|
476 | 481 | self.func = func |
|
477 | 482 | self.args = args |
|
478 | 483 | self.kwargs = kwargs |
|
479 | 484 | # The string form will only include the function passed, because |
|
480 | 485 | # generating string representations of the arguments is a potentially |
|
481 | 486 | # _very_ expensive operation (e.g. with large arrays). |
|
482 | 487 | self.strform = str(func) |
|
483 | 488 | self._init() |
|
484 | 489 | |
|
485 | 490 | def call(self): |
|
486 | 491 | return self.func(*self.args, **self.kwargs) |
@@ -1,270 +1,273 b'' | |||
|
1 | 1 | """This module defines Exporter, a highly configurable converter |
|
2 | 2 | that uses Jinja2 to export notebook files into different formats. |
|
3 | 3 | """ |
|
4 | 4 | |
|
5 | 5 | #----------------------------------------------------------------------------- |
|
6 | 6 | # Copyright (c) 2013, the IPython Development Team. |
|
7 | 7 | # |
|
8 | 8 | # Distributed under the terms of the Modified BSD License. |
|
9 | 9 | # |
|
10 | 10 | # The full license is in the file COPYING.txt, distributed with this software. |
|
11 | 11 | #----------------------------------------------------------------------------- |
|
12 | 12 | |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | # Imports |
|
15 | 15 | #----------------------------------------------------------------------------- |
|
16 | 16 | |
|
17 | 17 | from __future__ import print_function, absolute_import |
|
18 | 18 | |
|
19 | 19 | # Stdlib imports |
|
20 | 20 | import io |
|
21 | 21 | import os |
|
22 | 22 | import copy |
|
23 | 23 | import collections |
|
24 | 24 | import datetime |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | # IPython imports |
|
28 | 28 | from IPython.config.configurable import LoggingConfigurable |
|
29 | 29 | from IPython.config import Config |
|
30 | 30 | from IPython.nbformat import current as nbformat |
|
31 | 31 | from IPython.utils.traitlets import MetaHasTraits, Unicode, List |
|
32 | 32 | from IPython.utils.importstring import import_item |
|
33 | 33 | from IPython.utils import text, py3compat |
|
34 | 34 | |
|
35 | 35 | from IPython.nbconvert import preprocessors as nbpreprocessors |
|
36 | 36 | |
|
37 | 37 | |
|
38 | 38 | #----------------------------------------------------------------------------- |
|
39 | 39 | # Class |
|
40 | 40 | #----------------------------------------------------------------------------- |
|
41 | 41 | |
|
42 | 42 | class ResourcesDict(collections.defaultdict): |
|
43 | 43 | def __missing__(self, key): |
|
44 | 44 | return '' |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | class Exporter(LoggingConfigurable): |
|
48 | 48 | """ |
|
49 | 49 | Class containing methods that sequentially run a list of preprocessors on a |
|
50 | 50 | NotebookNode object and then return the modified NotebookNode object and |
|
51 | 51 | accompanying resources dict. |
|
52 | 52 | """ |
|
53 | 53 | |
|
54 | 54 | file_extension = Unicode( |
|
55 | 55 | 'txt', config=True, |
|
56 | 56 | help="Extension of the file that should be written to disk" |
|
57 | 57 | ) |
|
58 | 58 | |
|
59 | 59 | #Configurability, allows the user to easily add filters and preprocessors. |
|
60 | 60 | preprocessors = List(config=True, |
|
61 | 61 | help="""List of preprocessors, by name or namespace, to enable.""") |
|
62 | 62 | |
|
63 | 63 | _preprocessors = None |
|
64 | 64 | |
|
65 | 65 | default_preprocessors = List(['IPython.nbconvert.preprocessors.coalesce_streams', |
|
66 | 66 | 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', |
|
67 | 67 | 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', |
|
68 | 68 | 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', |
|
69 | 69 | 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', |
|
70 | 70 | 'IPython.nbconvert.preprocessors.LatexPreprocessor', |
|
71 | 71 | 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'], |
|
72 | 72 | config=True, |
|
73 | 73 | help="""List of preprocessors available by default, by name, namespace, |
|
74 | 74 | instance, or type.""") |
|
75 | 75 | |
|
76 | 76 | |
|
77 | 77 | def __init__(self, config=None, **kw): |
|
78 | 78 | """ |
|
79 | 79 | Public constructor |
|
80 | 80 | |
|
81 | 81 | Parameters |
|
82 | 82 | ---------- |
|
83 | 83 | config : config |
|
84 | 84 | User configuration instance. |
|
85 | 85 | """ |
|
86 | 86 | with_default_config = self.default_config |
|
87 | 87 | if config: |
|
88 | 88 | with_default_config.merge(config) |
|
89 | 89 | |
|
90 | 90 | super(Exporter, self).__init__(config=with_default_config, **kw) |
|
91 | 91 | |
|
92 | 92 | self._init_preprocessors() |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | @property |
|
96 | 96 | def default_config(self): |
|
97 | 97 | return Config() |
|
98 | 98 | |
|
99 | 99 | |
|
100 | 100 | def from_notebook_node(self, nb, resources=None, **kw): |
|
101 | 101 | """ |
|
102 | 102 | Convert a notebook from a notebook node instance. |
|
103 | 103 | |
|
104 | 104 | Parameters |
|
105 | 105 | ---------- |
|
106 | nb : Notebook node | |
|
107 | resources : dict (**kw) | |
|
108 | of additional resources that can be accessed read/write by | |
|
109 | preprocessors. | |
|
106 | nb : :class:`~IPython.nbformat.v3.nbbase.NotebookNode` | |
|
107 | Notebook node | |
|
108 | resources : dict | |
|
109 | Additional resources that can be accessed read/write by | |
|
110 | preprocessors and filters. | |
|
111 | **kw | |
|
112 | Ignored (?) | |
|
110 | 113 | """ |
|
111 | 114 | nb_copy = copy.deepcopy(nb) |
|
112 | 115 | resources = self._init_resources(resources) |
|
113 | 116 | |
|
114 | 117 | # Preprocess |
|
115 | 118 | nb_copy, resources = self._preprocess(nb_copy, resources) |
|
116 | 119 | |
|
117 | 120 | return nb_copy, resources |
|
118 | 121 | |
|
119 | 122 | |
|
120 | 123 | def from_filename(self, filename, resources=None, **kw): |
|
121 | 124 | """ |
|
122 | 125 | Convert a notebook from a notebook file. |
|
123 | 126 | |
|
124 | 127 | Parameters |
|
125 | 128 | ---------- |
|
126 | 129 | filename : str |
|
127 | 130 | Full filename of the notebook file to open and convert. |
|
128 | 131 | """ |
|
129 | 132 | |
|
130 | 133 | # Pull the metadata from the filesystem. |
|
131 | 134 | if resources is None: |
|
132 | 135 | resources = ResourcesDict() |
|
133 | 136 | if not 'metadata' in resources or resources['metadata'] == '': |
|
134 | 137 | resources['metadata'] = ResourcesDict() |
|
135 | 138 | basename = os.path.basename(filename) |
|
136 | 139 | notebook_name = basename[:basename.rfind('.')] |
|
137 | 140 | resources['metadata']['name'] = notebook_name |
|
138 | 141 | |
|
139 | 142 | modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(filename)) |
|
140 | 143 | resources['metadata']['modified_date'] = modified_date.strftime(text.date_format) |
|
141 | 144 | |
|
142 | 145 | with io.open(filename) as f: |
|
143 | 146 | return self.from_notebook_node(nbformat.read(f, 'json'), resources=resources, **kw) |
|
144 | 147 | |
|
145 | 148 | |
|
146 | 149 | def from_file(self, file_stream, resources=None, **kw): |
|
147 | 150 | """ |
|
148 | 151 | Convert a notebook from a notebook file. |
|
149 | 152 | |
|
150 | 153 | Parameters |
|
151 | 154 | ---------- |
|
152 | 155 | file_stream : file-like object |
|
153 | 156 | Notebook file-like object to convert. |
|
154 | 157 | """ |
|
155 | 158 | return self.from_notebook_node(nbformat.read(file_stream, 'json'), resources=resources, **kw) |
|
156 | 159 | |
|
157 | 160 | |
|
158 | 161 | def register_preprocessor(self, preprocessor, enabled=False): |
|
159 | 162 | """ |
|
160 | 163 | Register a preprocessor. |
|
161 | 164 | Preprocessors are classes that act upon the notebook before it is |
|
162 | 165 | passed into the Jinja templating engine. preprocessors are also |
|
163 | 166 | capable of passing additional information to the Jinja |
|
164 | 167 | templating engine. |
|
165 | 168 | |
|
166 | 169 | Parameters |
|
167 | 170 | ---------- |
|
168 | 171 | preprocessor : preprocessor |
|
169 | 172 | """ |
|
170 | 173 | if preprocessor is None: |
|
171 | 174 | raise TypeError('preprocessor') |
|
172 | 175 | isclass = isinstance(preprocessor, type) |
|
173 | 176 | constructed = not isclass |
|
174 | 177 | |
|
175 | 178 | # Handle preprocessor's registration based on it's type |
|
176 | 179 | if constructed and isinstance(preprocessor, py3compat.string_types): |
|
177 | 180 | # Preprocessor is a string, import the namespace and recursively call |
|
178 | 181 | # this register_preprocessor method |
|
179 | 182 | preprocessor_cls = import_item(preprocessor) |
|
180 | 183 | return self.register_preprocessor(preprocessor_cls, enabled) |
|
181 | 184 | |
|
182 | 185 | if constructed and hasattr(preprocessor, '__call__'): |
|
183 | 186 | # Preprocessor is a function, no need to construct it. |
|
184 | 187 | # Register and return the preprocessor. |
|
185 | 188 | if enabled: |
|
186 | 189 | preprocessor.enabled = True |
|
187 | 190 | self._preprocessors.append(preprocessor) |
|
188 | 191 | return preprocessor |
|
189 | 192 | |
|
190 | 193 | elif isclass and isinstance(preprocessor, MetaHasTraits): |
|
191 | 194 | # Preprocessor is configurable. Make sure to pass in new default for |
|
192 | 195 | # the enabled flag if one was specified. |
|
193 | 196 | self.register_preprocessor(preprocessor(parent=self), enabled) |
|
194 | 197 | |
|
195 | 198 | elif isclass: |
|
196 | 199 | # Preprocessor is not configurable, construct it |
|
197 | 200 | self.register_preprocessor(preprocessor(), enabled) |
|
198 | 201 | |
|
199 | 202 | else: |
|
200 | 203 | # Preprocessor is an instance of something without a __call__ |
|
201 | 204 | # attribute. |
|
202 | 205 | raise TypeError('preprocessor') |
|
203 | 206 | |
|
204 | 207 | |
|
205 | 208 | def _init_preprocessors(self): |
|
206 | 209 | """ |
|
207 | 210 | Register all of the preprocessors needed for this exporter, disabled |
|
208 | 211 | unless specified explicitly. |
|
209 | 212 | """ |
|
210 | 213 | if self._preprocessors is None: |
|
211 | 214 | self._preprocessors = [] |
|
212 | 215 | |
|
213 | 216 | #Load default preprocessors (not necessarly enabled by default). |
|
214 | 217 | if self.default_preprocessors: |
|
215 | 218 | for preprocessor in self.default_preprocessors: |
|
216 | 219 | self.register_preprocessor(preprocessor) |
|
217 | 220 | |
|
218 | 221 | #Load user preprocessors. Enable by default. |
|
219 | 222 | if self.preprocessors: |
|
220 | 223 | for preprocessor in self.preprocessors: |
|
221 | 224 | self.register_preprocessor(preprocessor, enabled=True) |
|
222 | 225 | |
|
223 | 226 | |
|
224 | 227 | def _init_resources(self, resources): |
|
225 | 228 | |
|
226 | 229 | #Make sure the resources dict is of ResourcesDict type. |
|
227 | 230 | if resources is None: |
|
228 | 231 | resources = ResourcesDict() |
|
229 | 232 | if not isinstance(resources, ResourcesDict): |
|
230 | 233 | new_resources = ResourcesDict() |
|
231 | 234 | new_resources.update(resources) |
|
232 | 235 | resources = new_resources |
|
233 | 236 | |
|
234 | 237 | #Make sure the metadata extension exists in resources |
|
235 | 238 | if 'metadata' in resources: |
|
236 | 239 | if not isinstance(resources['metadata'], ResourcesDict): |
|
237 | 240 | resources['metadata'] = ResourcesDict(resources['metadata']) |
|
238 | 241 | else: |
|
239 | 242 | resources['metadata'] = ResourcesDict() |
|
240 | 243 | if not resources['metadata']['name']: |
|
241 | 244 | resources['metadata']['name'] = 'Notebook' |
|
242 | 245 | |
|
243 | 246 | #Set the output extension |
|
244 | 247 | resources['output_extension'] = self.file_extension |
|
245 | 248 | return resources |
|
246 | 249 | |
|
247 | 250 | |
|
248 | 251 | def _preprocess(self, nb, resources): |
|
249 | 252 | """ |
|
250 | 253 | Preprocess the notebook before passing it into the Jinja engine. |
|
251 | 254 | To preprocess the notebook is to apply all of the |
|
252 | 255 | |
|
253 | 256 | Parameters |
|
254 | 257 | ---------- |
|
255 | 258 | nb : notebook node |
|
256 | 259 | notebook that is being exported. |
|
257 | 260 | resources : a dict of additional resources that |
|
258 | 261 | can be accessed read/write by preprocessors |
|
259 | 262 | """ |
|
260 | 263 | |
|
261 | 264 | # Do a copy.deepcopy first, |
|
262 | 265 | # we are never safe enough with what the preprocessors could do. |
|
263 | 266 | nbc = copy.deepcopy(nb) |
|
264 | 267 | resc = copy.deepcopy(resources) |
|
265 | 268 | |
|
266 | 269 | #Run each preprocessor on the notebook. Carry the output along |
|
267 | 270 | #to each preprocessor |
|
268 | 271 | for preprocessor in self._preprocessors: |
|
269 | 272 | nbc, resc = preprocessor(nbc, resc) |
|
270 | 273 | return nbc, resc |
@@ -1,312 +1,313 b'' | |||
|
1 | 1 | """This module defines Exporter, a highly configurable converter |
|
2 | 2 | that uses Jinja2 to export notebook files into different formats. |
|
3 | 3 | """ |
|
4 | 4 | |
|
5 | 5 | #----------------------------------------------------------------------------- |
|
6 | 6 | # Copyright (c) 2013, the IPython Development Team. |
|
7 | 7 | # |
|
8 | 8 | # Distributed under the terms of the Modified BSD License. |
|
9 | 9 | # |
|
10 | 10 | # The full license is in the file COPYING.txt, distributed with this software. |
|
11 | 11 | #----------------------------------------------------------------------------- |
|
12 | 12 | |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | # Imports |
|
15 | 15 | #----------------------------------------------------------------------------- |
|
16 | 16 | |
|
17 | 17 | from __future__ import print_function, absolute_import |
|
18 | 18 | |
|
19 | 19 | # Stdlib imports |
|
20 | 20 | import os |
|
21 | 21 | |
|
22 | 22 | # other libs/dependencies |
|
23 | 23 | from jinja2 import Environment, FileSystemLoader, ChoiceLoader, TemplateNotFound |
|
24 | 24 | |
|
25 | 25 | # IPython imports |
|
26 | 26 | from IPython.utils.traitlets import MetaHasTraits, Unicode, List, Dict, Any |
|
27 | 27 | from IPython.utils.importstring import import_item |
|
28 | 28 | from IPython.utils import py3compat, text |
|
29 | 29 | |
|
30 | 30 | from IPython.nbconvert import filters |
|
31 | 31 | from .exporter import Exporter |
|
32 | 32 | |
|
33 | 33 | #----------------------------------------------------------------------------- |
|
34 | 34 | # Globals and constants |
|
35 | 35 | #----------------------------------------------------------------------------- |
|
36 | 36 | |
|
37 | 37 | #Jinja2 extensions to load. |
|
38 | 38 | JINJA_EXTENSIONS = ['jinja2.ext.loopcontrols'] |
|
39 | 39 | |
|
40 | 40 | default_filters = { |
|
41 | 41 | 'indent': text.indent, |
|
42 | 42 | 'markdown2html': filters.markdown2html, |
|
43 | 43 | 'ansi2html': filters.ansi2html, |
|
44 | 44 | 'filter_data_type': filters.DataTypeFilter, |
|
45 | 45 | 'get_lines': filters.get_lines, |
|
46 | 46 | 'highlight2html': filters.Highlight2Html, |
|
47 | 47 | 'highlight2latex': filters.Highlight2Latex, |
|
48 | 48 | 'ipython2python': filters.ipython2python, |
|
49 | 49 | 'posix_path': filters.posix_path, |
|
50 | 50 | 'markdown2latex': filters.markdown2latex, |
|
51 | 51 | 'markdown2rst': filters.markdown2rst, |
|
52 | 52 | 'comment_lines': filters.comment_lines, |
|
53 | 53 | 'strip_ansi': filters.strip_ansi, |
|
54 | 54 | 'strip_dollars': filters.strip_dollars, |
|
55 | 55 | 'strip_files_prefix': filters.strip_files_prefix, |
|
56 | 56 | 'html2text' : filters.html2text, |
|
57 | 57 | 'add_anchor': filters.add_anchor, |
|
58 | 58 | 'ansi2latex': filters.ansi2latex, |
|
59 | 59 | 'wrap_text': filters.wrap_text, |
|
60 | 60 | 'escape_latex': filters.escape_latex, |
|
61 | 61 | 'citation2latex': filters.citation2latex, |
|
62 | 62 | 'path2url': filters.path2url, |
|
63 | 63 | 'add_prompts': filters.add_prompts, |
|
64 | 64 | } |
|
65 | 65 | |
|
66 | 66 | #----------------------------------------------------------------------------- |
|
67 | 67 | # Class |
|
68 | 68 | #----------------------------------------------------------------------------- |
|
69 | 69 | |
|
70 | 70 | class TemplateExporter(Exporter): |
|
71 | 71 | """ |
|
72 | 72 | Exports notebooks into other file formats. Uses Jinja 2 templating engine |
|
73 | 73 | to output new formats. Inherit from this class if you are creating a new |
|
74 | 74 | template type along with new filters/preprocessors. If the filters/ |
|
75 | 75 | preprocessors provided by default suffice, there is no need to inherit from |
|
76 | 76 | this class. Instead, override the template_file and file_extension |
|
77 | 77 | traits via a config file. |
|
78 | 78 | |
|
79 | 79 | {filters} |
|
80 | 80 | """ |
|
81 | 81 | |
|
82 | 82 | # finish the docstring |
|
83 | 83 | __doc__ = __doc__.format(filters = '- '+'\n - '.join(default_filters.keys())) |
|
84 | 84 | |
|
85 | 85 | |
|
86 | 86 | template_file = Unicode(u'default', |
|
87 | 87 | config=True, |
|
88 | 88 | help="Name of the template file to use") |
|
89 | 89 | def _template_file_changed(self, name, old, new): |
|
90 | 90 | if new == 'default': |
|
91 | 91 | self.template_file = self.default_template |
|
92 | 92 | else: |
|
93 | 93 | self.template_file = new |
|
94 | 94 | self.template = None |
|
95 | 95 | self._load_template() |
|
96 | 96 | |
|
97 | 97 | default_template = Unicode(u'') |
|
98 | 98 | template = Any() |
|
99 | 99 | environment = Any() |
|
100 | 100 | |
|
101 | 101 | template_path = List(['.'], config=True) |
|
102 | 102 | def _template_path_changed(self, name, old, new): |
|
103 | 103 | self._load_template() |
|
104 | 104 | |
|
105 | 105 | default_template_path = Unicode( |
|
106 | 106 | os.path.join("..", "templates"), |
|
107 | 107 | help="Path where the template files are located.") |
|
108 | 108 | |
|
109 | 109 | template_skeleton_path = Unicode( |
|
110 | 110 | os.path.join("..", "templates", "skeleton"), |
|
111 | 111 | help="Path where the template skeleton files are located.") |
|
112 | 112 | |
|
113 | 113 | #Jinja block definitions |
|
114 | 114 | jinja_comment_block_start = Unicode("", config=True) |
|
115 | 115 | jinja_comment_block_end = Unicode("", config=True) |
|
116 | 116 | jinja_variable_block_start = Unicode("", config=True) |
|
117 | 117 | jinja_variable_block_end = Unicode("", config=True) |
|
118 | 118 | jinja_logic_block_start = Unicode("", config=True) |
|
119 | 119 | jinja_logic_block_end = Unicode("", config=True) |
|
120 | 120 | |
|
121 | 121 | #Extension that the template files use. |
|
122 | 122 | template_extension = Unicode(".tpl", config=True) |
|
123 | 123 | |
|
124 | 124 | filters = Dict(config=True, |
|
125 | 125 | help="""Dictionary of filters, by name and namespace, to add to the Jinja |
|
126 | 126 | environment.""") |
|
127 | 127 | |
|
128 | 128 | |
|
129 | 129 | def __init__(self, config=None, extra_loaders=None, **kw): |
|
130 | 130 | """ |
|
131 | 131 | Public constructor |
|
132 | 132 | |
|
133 | 133 | Parameters |
|
134 | 134 | ---------- |
|
135 | 135 | config : config |
|
136 | 136 | User configuration instance. |
|
137 | 137 | extra_loaders : list[of Jinja Loaders] |
|
138 | 138 | ordered list of Jinja loader to find templates. Will be tried in order |
|
139 | 139 | before the default FileSystem ones. |
|
140 | 140 | template : str (optional, kw arg) |
|
141 | 141 | Template to use when exporting. |
|
142 | 142 | """ |
|
143 | 143 | super(TemplateExporter, self).__init__(config=config, **kw) |
|
144 | 144 | |
|
145 | 145 | #Init |
|
146 | 146 | self._init_template() |
|
147 | 147 | self._init_environment(extra_loaders=extra_loaders) |
|
148 | 148 | self._init_preprocessors() |
|
149 | 149 | self._init_filters() |
|
150 | 150 | |
|
151 | 151 | |
|
152 | 152 | def _load_template(self): |
|
153 | 153 | """Load the Jinja template object from the template file |
|
154 | 154 | |
|
155 | 155 | This is a no-op if the template attribute is already defined, |
|
156 | 156 | or the Jinja environment is not setup yet. |
|
157 | 157 | |
|
158 | 158 | This is triggered by various trait changes that would change the template. |
|
159 | 159 | """ |
|
160 | 160 | if self.template is not None: |
|
161 | 161 | return |
|
162 | 162 | # called too early, do nothing |
|
163 | 163 | if self.environment is None: |
|
164 | 164 | return |
|
165 | 165 | # Try different template names during conversion. First try to load the |
|
166 | 166 | # template by name with extension added, then try loading the template |
|
167 | 167 | # as if the name is explicitly specified, then try the name as a |
|
168 | 168 | # 'flavor', and lastly just try to load the template by module name. |
|
169 | 169 | module_name = self.__module__.rsplit('.', 1)[-1] |
|
170 | 170 | try_names = [] |
|
171 | 171 | if self.template_file: |
|
172 | 172 | try_names.extend([ |
|
173 | 173 | self.template_file + self.template_extension, |
|
174 | 174 | self.template_file, |
|
175 | 175 | module_name + '_' + self.template_file + self.template_extension, |
|
176 | 176 | ]) |
|
177 | 177 | try_names.append(module_name + self.template_extension) |
|
178 | 178 | for try_name in try_names: |
|
179 | 179 | self.log.debug("Attempting to load template %s", try_name) |
|
180 | 180 | try: |
|
181 | 181 | self.template = self.environment.get_template(try_name) |
|
182 | 182 | except (TemplateNotFound, IOError): |
|
183 | 183 | pass |
|
184 | 184 | except Exception as e: |
|
185 | 185 | self.log.warn("Unexpected exception loading template: %s", try_name, exc_info=True) |
|
186 | 186 | else: |
|
187 | 187 | self.log.info("Loaded template %s", try_name) |
|
188 | 188 | break |
|
189 | 189 | |
|
190 | 190 | def from_notebook_node(self, nb, resources=None, **kw): |
|
191 | 191 | """ |
|
192 | 192 | Convert a notebook from a notebook node instance. |
|
193 | 193 | |
|
194 | 194 | Parameters |
|
195 | 195 | ---------- |
|
196 | nb : Notebook node | |
|
197 | resources : dict (**kw) | |
|
198 | of additional resources that can be accessed read/write by | |
|
199 | preprocessors and filters. | |
|
196 | nb : :class:`~IPython.nbformat.v3.nbbase.NotebookNode` | |
|
197 | Notebook node | |
|
198 | resources : dict | |
|
199 | Additional resources that can be accessed read/write by | |
|
200 | preprocessors and filters. | |
|
200 | 201 | """ |
|
201 | 202 | nb_copy, resources = super(TemplateExporter, self).from_notebook_node(nb, resources, **kw) |
|
202 | 203 | |
|
203 | 204 | self._load_template() |
|
204 | 205 | |
|
205 | 206 | if self.template is not None: |
|
206 | 207 | output = self.template.render(nb=nb_copy, resources=resources) |
|
207 | 208 | else: |
|
208 | 209 | raise IOError('template file "%s" could not be found' % self.template_file) |
|
209 | 210 | return output, resources |
|
210 | 211 | |
|
211 | 212 | |
|
212 | 213 | def register_filter(self, name, jinja_filter): |
|
213 | 214 | """ |
|
214 | 215 | Register a filter. |
|
215 | 216 | A filter is a function that accepts and acts on one string. |
|
216 | 217 | The filters are accesible within the Jinja templating engine. |
|
217 | 218 | |
|
218 | 219 | Parameters |
|
219 | 220 | ---------- |
|
220 | 221 | name : str |
|
221 | 222 | name to give the filter in the Jinja engine |
|
222 | 223 | filter : filter |
|
223 | 224 | """ |
|
224 | 225 | if jinja_filter is None: |
|
225 | 226 | raise TypeError('filter') |
|
226 | 227 | isclass = isinstance(jinja_filter, type) |
|
227 | 228 | constructed = not isclass |
|
228 | 229 | |
|
229 | 230 | #Handle filter's registration based on it's type |
|
230 | 231 | if constructed and isinstance(jinja_filter, py3compat.string_types): |
|
231 | 232 | #filter is a string, import the namespace and recursively call |
|
232 | 233 | #this register_filter method |
|
233 | 234 | filter_cls = import_item(jinja_filter) |
|
234 | 235 | return self.register_filter(name, filter_cls) |
|
235 | 236 | |
|
236 | 237 | if constructed and hasattr(jinja_filter, '__call__'): |
|
237 | 238 | #filter is a function, no need to construct it. |
|
238 | 239 | self.environment.filters[name] = jinja_filter |
|
239 | 240 | return jinja_filter |
|
240 | 241 | |
|
241 | 242 | elif isclass and isinstance(jinja_filter, MetaHasTraits): |
|
242 | 243 | #filter is configurable. Make sure to pass in new default for |
|
243 | 244 | #the enabled flag if one was specified. |
|
244 | 245 | filter_instance = jinja_filter(parent=self) |
|
245 | 246 | self.register_filter(name, filter_instance ) |
|
246 | 247 | |
|
247 | 248 | elif isclass: |
|
248 | 249 | #filter is not configurable, construct it |
|
249 | 250 | filter_instance = jinja_filter() |
|
250 | 251 | self.register_filter(name, filter_instance) |
|
251 | 252 | |
|
252 | 253 | else: |
|
253 | 254 | #filter is an instance of something without a __call__ |
|
254 | 255 | #attribute. |
|
255 | 256 | raise TypeError('filter') |
|
256 | 257 | |
|
257 | 258 | |
|
258 | 259 | def _init_template(self): |
|
259 | 260 | """ |
|
260 | 261 | Make sure a template name is specified. If one isn't specified, try to |
|
261 | 262 | build one from the information we know. |
|
262 | 263 | """ |
|
263 | 264 | self._template_file_changed('template_file', self.template_file, self.template_file) |
|
264 | 265 | |
|
265 | 266 | |
|
266 | 267 | def _init_environment(self, extra_loaders=None): |
|
267 | 268 | """ |
|
268 | 269 | Create the Jinja templating environment. |
|
269 | 270 | """ |
|
270 | 271 | here = os.path.dirname(os.path.realpath(__file__)) |
|
271 | 272 | loaders = [] |
|
272 | 273 | if extra_loaders: |
|
273 | 274 | loaders.extend(extra_loaders) |
|
274 | 275 | |
|
275 | 276 | paths = self.template_path |
|
276 | 277 | paths.extend([os.path.join(here, self.default_template_path), |
|
277 | 278 | os.path.join(here, self.template_skeleton_path)]) |
|
278 | 279 | loaders.append(FileSystemLoader(paths)) |
|
279 | 280 | |
|
280 | 281 | self.environment = Environment( |
|
281 | 282 | loader= ChoiceLoader(loaders), |
|
282 | 283 | extensions=JINJA_EXTENSIONS |
|
283 | 284 | ) |
|
284 | 285 | |
|
285 | 286 | #Set special Jinja2 syntax that will not conflict with latex. |
|
286 | 287 | if self.jinja_logic_block_start: |
|
287 | 288 | self.environment.block_start_string = self.jinja_logic_block_start |
|
288 | 289 | if self.jinja_logic_block_end: |
|
289 | 290 | self.environment.block_end_string = self.jinja_logic_block_end |
|
290 | 291 | if self.jinja_variable_block_start: |
|
291 | 292 | self.environment.variable_start_string = self.jinja_variable_block_start |
|
292 | 293 | if self.jinja_variable_block_end: |
|
293 | 294 | self.environment.variable_end_string = self.jinja_variable_block_end |
|
294 | 295 | if self.jinja_comment_block_start: |
|
295 | 296 | self.environment.comment_start_string = self.jinja_comment_block_start |
|
296 | 297 | if self.jinja_comment_block_end: |
|
297 | 298 | self.environment.comment_end_string = self.jinja_comment_block_end |
|
298 | 299 | |
|
299 | 300 | |
|
300 | 301 | def _init_filters(self): |
|
301 | 302 | """ |
|
302 | 303 | Register all of the filters required for the exporter. |
|
303 | 304 | """ |
|
304 | 305 | |
|
305 | 306 | #Add default filters to the Jinja2 environment |
|
306 | 307 | for key, value in default_filters.items(): |
|
307 | 308 | self.register_filter(key, value) |
|
308 | 309 | |
|
309 | 310 | #Load user filters. Overwrite existing filters if need be. |
|
310 | 311 | if self.filters: |
|
311 | 312 | for key, user_filter in self.filters.items(): |
|
312 | 313 | self.register_filter(key, user_filter) |
@@ -1,1450 +1,1451 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """ |
|
3 | 3 | Facilities for launching IPython processes asynchronously. |
|
4 | 4 | |
|
5 | 5 | Authors: |
|
6 | 6 | |
|
7 | 7 | * Brian Granger |
|
8 | 8 | * MinRK |
|
9 | 9 | """ |
|
10 | 10 | |
|
11 | 11 | #----------------------------------------------------------------------------- |
|
12 | 12 | # Copyright (C) 2008-2011 The IPython Development Team |
|
13 | 13 | # |
|
14 | 14 | # Distributed under the terms of the BSD License. The full license is in |
|
15 | 15 | # the file COPYING, distributed as part of this software. |
|
16 | 16 | #----------------------------------------------------------------------------- |
|
17 | 17 | |
|
18 | 18 | #----------------------------------------------------------------------------- |
|
19 | 19 | # Imports |
|
20 | 20 | #----------------------------------------------------------------------------- |
|
21 | 21 | |
|
22 | 22 | import copy |
|
23 | 23 | import logging |
|
24 | 24 | import os |
|
25 | 25 | import pipes |
|
26 | 26 | import stat |
|
27 | 27 | import sys |
|
28 | 28 | import time |
|
29 | 29 | |
|
30 | 30 | # signal imports, handling various platforms, versions |
|
31 | 31 | |
|
32 | 32 | from signal import SIGINT, SIGTERM |
|
33 | 33 | try: |
|
34 | 34 | from signal import SIGKILL |
|
35 | 35 | except ImportError: |
|
36 | 36 | # Windows |
|
37 | 37 | SIGKILL=SIGTERM |
|
38 | 38 | |
|
39 | 39 | try: |
|
40 | 40 | # Windows >= 2.7, 3.2 |
|
41 | 41 | from signal import CTRL_C_EVENT as SIGINT |
|
42 | 42 | except ImportError: |
|
43 | 43 | pass |
|
44 | 44 | |
|
45 | 45 | from subprocess import Popen, PIPE, STDOUT |
|
46 | 46 | try: |
|
47 | 47 | from subprocess import check_output |
|
48 | 48 | except ImportError: |
|
49 | 49 | # pre-2.7, define check_output with Popen |
|
50 | 50 | def check_output(*args, **kwargs): |
|
51 | 51 | kwargs.update(dict(stdout=PIPE)) |
|
52 | 52 | p = Popen(*args, **kwargs) |
|
53 | 53 | out,err = p.communicate() |
|
54 | 54 | return out |
|
55 | 55 | |
|
56 | 56 | from zmq.eventloop import ioloop |
|
57 | 57 | |
|
58 | 58 | from IPython.config.application import Application |
|
59 | 59 | from IPython.config.configurable import LoggingConfigurable |
|
60 | 60 | from IPython.utils.text import EvalFormatter |
|
61 | 61 | from IPython.utils.traitlets import ( |
|
62 | 62 | Any, Integer, CFloat, List, Unicode, Dict, Instance, HasTraits, CRegExp |
|
63 | 63 | ) |
|
64 | 64 | from IPython.utils.encoding import DEFAULT_ENCODING |
|
65 | 65 | from IPython.utils.path import get_home_dir |
|
66 | 66 | from IPython.utils.process import find_cmd, FindCmdError |
|
67 | 67 | from IPython.utils.py3compat import iteritems, itervalues |
|
68 | 68 | |
|
69 | 69 | from .win32support import forward_read_events |
|
70 | 70 | |
|
71 | 71 | from .winhpcjob import IPControllerTask, IPEngineTask, IPControllerJob, IPEngineSetJob |
|
72 | 72 | |
|
73 | 73 | WINDOWS = os.name == 'nt' |
|
74 | 74 | |
|
75 | 75 | #----------------------------------------------------------------------------- |
|
76 | 76 | # Paths to the kernel apps |
|
77 | 77 | #----------------------------------------------------------------------------- |
|
78 | 78 | |
|
79 | 79 | cmd = "from IPython.parallel.apps.%s import launch_new_instance; launch_new_instance()" |
|
80 | 80 | |
|
81 | 81 | ipcluster_cmd_argv = [sys.executable, "-c", cmd % "ipclusterapp"] |
|
82 | 82 | |
|
83 | 83 | ipengine_cmd_argv = [sys.executable, "-c", cmd % "ipengineapp"] |
|
84 | 84 | |
|
85 | 85 | ipcontroller_cmd_argv = [sys.executable, "-c", cmd % "ipcontrollerapp"] |
|
86 | 86 | |
|
87 | 87 | #----------------------------------------------------------------------------- |
|
88 | 88 | # Base launchers and errors |
|
89 | 89 | #----------------------------------------------------------------------------- |
|
90 | 90 | |
|
91 | 91 | class LauncherError(Exception): |
|
92 | 92 | pass |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | class ProcessStateError(LauncherError): |
|
96 | 96 | pass |
|
97 | 97 | |
|
98 | 98 | |
|
99 | 99 | class UnknownStatus(LauncherError): |
|
100 | 100 | pass |
|
101 | 101 | |
|
102 | 102 | |
|
103 | 103 | class BaseLauncher(LoggingConfigurable): |
|
104 | 104 | """An asbtraction for starting, stopping and signaling a process.""" |
|
105 | 105 | |
|
106 | 106 | # In all of the launchers, the work_dir is where child processes will be |
|
107 | 107 | # run. This will usually be the profile_dir, but may not be. any work_dir |
|
108 | 108 | # passed into the __init__ method will override the config value. |
|
109 | 109 | # This should not be used to set the work_dir for the actual engine |
|
110 | 110 | # and controller. Instead, use their own config files or the |
|
111 | 111 | # controller_args, engine_args attributes of the launchers to add |
|
112 | 112 | # the work_dir option. |
|
113 | 113 | work_dir = Unicode(u'.') |
|
114 | 114 | loop = Instance('zmq.eventloop.ioloop.IOLoop') |
|
115 | 115 | |
|
116 | 116 | start_data = Any() |
|
117 | 117 | stop_data = Any() |
|
118 | 118 | |
|
119 | 119 | def _loop_default(self): |
|
120 | 120 | return ioloop.IOLoop.instance() |
|
121 | 121 | |
|
122 | 122 | def __init__(self, work_dir=u'.', config=None, **kwargs): |
|
123 | 123 | super(BaseLauncher, self).__init__(work_dir=work_dir, config=config, **kwargs) |
|
124 | 124 | self.state = 'before' # can be before, running, after |
|
125 | 125 | self.stop_callbacks = [] |
|
126 | 126 | self.start_data = None |
|
127 | 127 | self.stop_data = None |
|
128 | 128 | |
|
129 | 129 | @property |
|
130 | 130 | def args(self): |
|
131 | 131 | """A list of cmd and args that will be used to start the process. |
|
132 | 132 | |
|
133 | 133 | This is what is passed to :func:`spawnProcess` and the first element |
|
134 | 134 | will be the process name. |
|
135 | 135 | """ |
|
136 | 136 | return self.find_args() |
|
137 | 137 | |
|
138 | 138 | def find_args(self): |
|
139 | 139 | """The ``.args`` property calls this to find the args list. |
|
140 | 140 | |
|
141 | 141 | Subcommand should implement this to construct the cmd and args. |
|
142 | 142 | """ |
|
143 | 143 | raise NotImplementedError('find_args must be implemented in a subclass') |
|
144 | 144 | |
|
145 | 145 | @property |
|
146 | 146 | def arg_str(self): |
|
147 | 147 | """The string form of the program arguments.""" |
|
148 | 148 | return ' '.join(self.args) |
|
149 | 149 | |
|
150 | 150 | @property |
|
151 | 151 | def running(self): |
|
152 | 152 | """Am I running.""" |
|
153 | 153 | if self.state == 'running': |
|
154 | 154 | return True |
|
155 | 155 | else: |
|
156 | 156 | return False |
|
157 | 157 | |
|
158 | 158 | def start(self): |
|
159 | 159 | """Start the process.""" |
|
160 | 160 | raise NotImplementedError('start must be implemented in a subclass') |
|
161 | 161 | |
|
162 | 162 | def stop(self): |
|
163 | 163 | """Stop the process and notify observers of stopping. |
|
164 | 164 | |
|
165 | 165 | This method will return None immediately. |
|
166 | 166 | To observe the actual process stopping, see :meth:`on_stop`. |
|
167 | 167 | """ |
|
168 | 168 | raise NotImplementedError('stop must be implemented in a subclass') |
|
169 | 169 | |
|
170 | 170 | def on_stop(self, f): |
|
171 | 171 | """Register a callback to be called with this Launcher's stop_data |
|
172 | 172 | when the process actually finishes. |
|
173 | 173 | """ |
|
174 | 174 | if self.state=='after': |
|
175 | 175 | return f(self.stop_data) |
|
176 | 176 | else: |
|
177 | 177 | self.stop_callbacks.append(f) |
|
178 | 178 | |
|
179 | 179 | def notify_start(self, data): |
|
180 | 180 | """Call this to trigger startup actions. |
|
181 | 181 | |
|
182 | 182 | This logs the process startup and sets the state to 'running'. It is |
|
183 | 183 | a pass-through so it can be used as a callback. |
|
184 | 184 | """ |
|
185 | 185 | |
|
186 | 186 | self.log.debug('Process %r started: %r', self.args[0], data) |
|
187 | 187 | self.start_data = data |
|
188 | 188 | self.state = 'running' |
|
189 | 189 | return data |
|
190 | 190 | |
|
191 | 191 | def notify_stop(self, data): |
|
192 | 192 | """Call this to trigger process stop actions. |
|
193 | 193 | |
|
194 | 194 | This logs the process stopping and sets the state to 'after'. Call |
|
195 | 195 | this to trigger callbacks registered via :meth:`on_stop`.""" |
|
196 | 196 | |
|
197 | 197 | self.log.debug('Process %r stopped: %r', self.args[0], data) |
|
198 | 198 | self.stop_data = data |
|
199 | 199 | self.state = 'after' |
|
200 | 200 | for i in range(len(self.stop_callbacks)): |
|
201 | 201 | d = self.stop_callbacks.pop() |
|
202 | 202 | d(data) |
|
203 | 203 | return data |
|
204 | 204 | |
|
205 | 205 | def signal(self, sig): |
|
206 | 206 | """Signal the process. |
|
207 | 207 | |
|
208 | 208 | Parameters |
|
209 | 209 | ---------- |
|
210 | 210 | sig : str or int |
|
211 | 211 | 'KILL', 'INT', etc., or any signal number |
|
212 | 212 | """ |
|
213 | 213 | raise NotImplementedError('signal must be implemented in a subclass') |
|
214 | 214 | |
|
215 | 215 | class ClusterAppMixin(HasTraits): |
|
216 | 216 | """MixIn for cluster args as traits""" |
|
217 | 217 | profile_dir=Unicode('') |
|
218 | 218 | cluster_id=Unicode('') |
|
219 | 219 | |
|
220 | 220 | @property |
|
221 | 221 | def cluster_args(self): |
|
222 | 222 | return ['--profile-dir', self.profile_dir, '--cluster-id', self.cluster_id] |
|
223 | 223 | |
|
224 | 224 | class ControllerMixin(ClusterAppMixin): |
|
225 | 225 | controller_cmd = List(ipcontroller_cmd_argv, config=True, |
|
226 | 226 | help="""Popen command to launch ipcontroller.""") |
|
227 | 227 | # Command line arguments to ipcontroller. |
|
228 | 228 | controller_args = List(['--log-to-file','--log-level=%i' % logging.INFO], config=True, |
|
229 | 229 | help="""command-line args to pass to ipcontroller""") |
|
230 | 230 | |
|
231 | 231 | class EngineMixin(ClusterAppMixin): |
|
232 | 232 | engine_cmd = List(ipengine_cmd_argv, config=True, |
|
233 | 233 | help="""command to launch the Engine.""") |
|
234 | 234 | # Command line arguments for ipengine. |
|
235 | 235 | engine_args = List(['--log-to-file','--log-level=%i' % logging.INFO], config=True, |
|
236 | 236 | help="command-line arguments to pass to ipengine" |
|
237 | 237 | ) |
|
238 | 238 | |
|
239 | 239 | |
|
240 | 240 | #----------------------------------------------------------------------------- |
|
241 | 241 | # Local process launchers |
|
242 | 242 | #----------------------------------------------------------------------------- |
|
243 | 243 | |
|
244 | 244 | |
|
245 | 245 | class LocalProcessLauncher(BaseLauncher): |
|
246 | 246 | """Start and stop an external process in an asynchronous manner. |
|
247 | 247 | |
|
248 | 248 | This will launch the external process with a working directory of |
|
249 | 249 | ``self.work_dir``. |
|
250 | 250 | """ |
|
251 | 251 | |
|
252 | 252 | # This is used to to construct self.args, which is passed to |
|
253 | 253 | # spawnProcess. |
|
254 | 254 | cmd_and_args = List([]) |
|
255 | 255 | poll_frequency = Integer(100) # in ms |
|
256 | 256 | |
|
257 | 257 | def __init__(self, work_dir=u'.', config=None, **kwargs): |
|
258 | 258 | super(LocalProcessLauncher, self).__init__( |
|
259 | 259 | work_dir=work_dir, config=config, **kwargs |
|
260 | 260 | ) |
|
261 | 261 | self.process = None |
|
262 | 262 | self.poller = None |
|
263 | 263 | |
|
264 | 264 | def find_args(self): |
|
265 | 265 | return self.cmd_and_args |
|
266 | 266 | |
|
267 | 267 | def start(self): |
|
268 | 268 | self.log.debug("Starting %s: %r", self.__class__.__name__, self.args) |
|
269 | 269 | if self.state == 'before': |
|
270 | 270 | self.process = Popen(self.args, |
|
271 | 271 | stdout=PIPE,stderr=PIPE,stdin=PIPE, |
|
272 | 272 | env=os.environ, |
|
273 | 273 | cwd=self.work_dir |
|
274 | 274 | ) |
|
275 | 275 | if WINDOWS: |
|
276 | 276 | self.stdout = forward_read_events(self.process.stdout) |
|
277 | 277 | self.stderr = forward_read_events(self.process.stderr) |
|
278 | 278 | else: |
|
279 | 279 | self.stdout = self.process.stdout.fileno() |
|
280 | 280 | self.stderr = self.process.stderr.fileno() |
|
281 | 281 | self.loop.add_handler(self.stdout, self.handle_stdout, self.loop.READ) |
|
282 | 282 | self.loop.add_handler(self.stderr, self.handle_stderr, self.loop.READ) |
|
283 | 283 | self.poller = ioloop.PeriodicCallback(self.poll, self.poll_frequency, self.loop) |
|
284 | 284 | self.poller.start() |
|
285 | 285 | self.notify_start(self.process.pid) |
|
286 | 286 | else: |
|
287 | 287 | s = 'The process was already started and has state: %r' % self.state |
|
288 | 288 | raise ProcessStateError(s) |
|
289 | 289 | |
|
290 | 290 | def stop(self): |
|
291 | 291 | return self.interrupt_then_kill() |
|
292 | 292 | |
|
293 | 293 | def signal(self, sig): |
|
294 | 294 | if self.state == 'running': |
|
295 | 295 | if WINDOWS and sig != SIGINT: |
|
296 | 296 | # use Windows tree-kill for better child cleanup |
|
297 | 297 | check_output(['taskkill', '-pid', str(self.process.pid), '-t', '-f']) |
|
298 | 298 | else: |
|
299 | 299 | self.process.send_signal(sig) |
|
300 | 300 | |
|
301 | 301 | def interrupt_then_kill(self, delay=2.0): |
|
302 | 302 | """Send INT, wait a delay and then send KILL.""" |
|
303 | 303 | try: |
|
304 | 304 | self.signal(SIGINT) |
|
305 | 305 | except Exception: |
|
306 | 306 | self.log.debug("interrupt failed") |
|
307 | 307 | pass |
|
308 | 308 | self.killer = ioloop.DelayedCallback(lambda : self.signal(SIGKILL), delay*1000, self.loop) |
|
309 | 309 | self.killer.start() |
|
310 | 310 | |
|
311 | 311 | # callbacks, etc: |
|
312 | 312 | |
|
313 | 313 | def handle_stdout(self, fd, events): |
|
314 | 314 | if WINDOWS: |
|
315 | 315 | line = self.stdout.recv() |
|
316 | 316 | else: |
|
317 | 317 | line = self.process.stdout.readline() |
|
318 | 318 | # a stopped process will be readable but return empty strings |
|
319 | 319 | if line: |
|
320 | 320 | self.log.debug(line[:-1]) |
|
321 | 321 | else: |
|
322 | 322 | self.poll() |
|
323 | 323 | |
|
324 | 324 | def handle_stderr(self, fd, events): |
|
325 | 325 | if WINDOWS: |
|
326 | 326 | line = self.stderr.recv() |
|
327 | 327 | else: |
|
328 | 328 | line = self.process.stderr.readline() |
|
329 | 329 | # a stopped process will be readable but return empty strings |
|
330 | 330 | if line: |
|
331 | 331 | self.log.debug(line[:-1]) |
|
332 | 332 | else: |
|
333 | 333 | self.poll() |
|
334 | 334 | |
|
335 | 335 | def poll(self): |
|
336 | 336 | status = self.process.poll() |
|
337 | 337 | if status is not None: |
|
338 | 338 | self.poller.stop() |
|
339 | 339 | self.loop.remove_handler(self.stdout) |
|
340 | 340 | self.loop.remove_handler(self.stderr) |
|
341 | 341 | self.notify_stop(dict(exit_code=status, pid=self.process.pid)) |
|
342 | 342 | return status |
|
343 | 343 | |
|
344 | 344 | class LocalControllerLauncher(LocalProcessLauncher, ControllerMixin): |
|
345 | 345 | """Launch a controller as a regular external process.""" |
|
346 | 346 | |
|
347 | 347 | def find_args(self): |
|
348 | 348 | return self.controller_cmd + self.cluster_args + self.controller_args |
|
349 | 349 | |
|
350 | 350 | def start(self): |
|
351 | 351 | """Start the controller by profile_dir.""" |
|
352 | 352 | return super(LocalControllerLauncher, self).start() |
|
353 | 353 | |
|
354 | 354 | |
|
355 | 355 | class LocalEngineLauncher(LocalProcessLauncher, EngineMixin): |
|
356 | 356 | """Launch a single engine as a regular externall process.""" |
|
357 | 357 | |
|
358 | 358 | def find_args(self): |
|
359 | 359 | return self.engine_cmd + self.cluster_args + self.engine_args |
|
360 | 360 | |
|
361 | 361 | |
|
362 | 362 | class LocalEngineSetLauncher(LocalEngineLauncher): |
|
363 | 363 | """Launch a set of engines as regular external processes.""" |
|
364 | 364 | |
|
365 | 365 | delay = CFloat(0.1, config=True, |
|
366 | 366 | help="""delay (in seconds) between starting each engine after the first. |
|
367 | 367 | This can help force the engines to get their ids in order, or limit |
|
368 | 368 | process flood when starting many engines.""" |
|
369 | 369 | ) |
|
370 | 370 | |
|
371 | 371 | # launcher class |
|
372 | 372 | launcher_class = LocalEngineLauncher |
|
373 | 373 | |
|
374 | 374 | launchers = Dict() |
|
375 | 375 | stop_data = Dict() |
|
376 | 376 | |
|
377 | 377 | def __init__(self, work_dir=u'.', config=None, **kwargs): |
|
378 | 378 | super(LocalEngineSetLauncher, self).__init__( |
|
379 | 379 | work_dir=work_dir, config=config, **kwargs |
|
380 | 380 | ) |
|
381 | 381 | self.stop_data = {} |
|
382 | 382 | |
|
383 | 383 | def start(self, n): |
|
384 | 384 | """Start n engines by profile or profile_dir.""" |
|
385 | 385 | dlist = [] |
|
386 | 386 | for i in range(n): |
|
387 | 387 | if i > 0: |
|
388 | 388 | time.sleep(self.delay) |
|
389 | 389 | el = self.launcher_class(work_dir=self.work_dir, parent=self, log=self.log, |
|
390 | 390 | profile_dir=self.profile_dir, cluster_id=self.cluster_id, |
|
391 | 391 | ) |
|
392 | 392 | |
|
393 | 393 | # Copy the engine args over to each engine launcher. |
|
394 | 394 | el.engine_cmd = copy.deepcopy(self.engine_cmd) |
|
395 | 395 | el.engine_args = copy.deepcopy(self.engine_args) |
|
396 | 396 | el.on_stop(self._notice_engine_stopped) |
|
397 | 397 | d = el.start() |
|
398 | 398 | self.launchers[i] = el |
|
399 | 399 | dlist.append(d) |
|
400 | 400 | self.notify_start(dlist) |
|
401 | 401 | return dlist |
|
402 | 402 | |
|
403 | 403 | def find_args(self): |
|
404 | 404 | return ['engine set'] |
|
405 | 405 | |
|
406 | 406 | def signal(self, sig): |
|
407 | 407 | dlist = [] |
|
408 | 408 | for el in itervalues(self.launchers): |
|
409 | 409 | d = el.signal(sig) |
|
410 | 410 | dlist.append(d) |
|
411 | 411 | return dlist |
|
412 | 412 | |
|
413 | 413 | def interrupt_then_kill(self, delay=1.0): |
|
414 | 414 | dlist = [] |
|
415 | 415 | for el in itervalues(self.launchers): |
|
416 | 416 | d = el.interrupt_then_kill(delay) |
|
417 | 417 | dlist.append(d) |
|
418 | 418 | return dlist |
|
419 | 419 | |
|
420 | 420 | def stop(self): |
|
421 | 421 | return self.interrupt_then_kill() |
|
422 | 422 | |
|
423 | 423 | def _notice_engine_stopped(self, data): |
|
424 | 424 | pid = data['pid'] |
|
425 | 425 | for idx,el in iteritems(self.launchers): |
|
426 | 426 | if el.process.pid == pid: |
|
427 | 427 | break |
|
428 | 428 | self.launchers.pop(idx) |
|
429 | 429 | self.stop_data[idx] = data |
|
430 | 430 | if not self.launchers: |
|
431 | 431 | self.notify_stop(self.stop_data) |
|
432 | 432 | |
|
433 | 433 | |
|
434 | 434 | #----------------------------------------------------------------------------- |
|
435 | 435 | # MPI launchers |
|
436 | 436 | #----------------------------------------------------------------------------- |
|
437 | 437 | |
|
438 | 438 | |
|
439 | 439 | class MPILauncher(LocalProcessLauncher): |
|
440 | 440 | """Launch an external process using mpiexec.""" |
|
441 | 441 | |
|
442 | 442 | mpi_cmd = List(['mpiexec'], config=True, |
|
443 | 443 | help="The mpiexec command to use in starting the process." |
|
444 | 444 | ) |
|
445 | 445 | mpi_args = List([], config=True, |
|
446 | 446 | help="The command line arguments to pass to mpiexec." |
|
447 | 447 | ) |
|
448 | 448 | program = List(['date'], |
|
449 | 449 | help="The program to start via mpiexec.") |
|
450 | 450 | program_args = List([], |
|
451 | 451 | help="The command line argument to the program." |
|
452 | 452 | ) |
|
453 | 453 | n = Integer(1) |
|
454 | 454 | |
|
455 | 455 | def __init__(self, *args, **kwargs): |
|
456 | 456 | # deprecation for old MPIExec names: |
|
457 | 457 | config = kwargs.get('config', {}) |
|
458 | 458 | for oldname in ('MPIExecLauncher', 'MPIExecControllerLauncher', 'MPIExecEngineSetLauncher'): |
|
459 | 459 | deprecated = config.get(oldname) |
|
460 | 460 | if deprecated: |
|
461 | 461 | newname = oldname.replace('MPIExec', 'MPI') |
|
462 | 462 | config[newname].update(deprecated) |
|
463 | 463 | self.log.warn("WARNING: %s name has been deprecated, use %s", oldname, newname) |
|
464 | 464 | |
|
465 | 465 | super(MPILauncher, self).__init__(*args, **kwargs) |
|
466 | 466 | |
|
467 | 467 | def find_args(self): |
|
468 | 468 | """Build self.args using all the fields.""" |
|
469 | 469 | return self.mpi_cmd + ['-n', str(self.n)] + self.mpi_args + \ |
|
470 | 470 | self.program + self.program_args |
|
471 | 471 | |
|
472 | 472 | def start(self, n): |
|
473 | 473 | """Start n instances of the program using mpiexec.""" |
|
474 | 474 | self.n = n |
|
475 | 475 | return super(MPILauncher, self).start() |
|
476 | 476 | |
|
477 | 477 | |
|
478 | 478 | class MPIControllerLauncher(MPILauncher, ControllerMixin): |
|
479 | 479 | """Launch a controller using mpiexec.""" |
|
480 | 480 | |
|
481 | 481 | # alias back to *non-configurable* program[_args] for use in find_args() |
|
482 | 482 | # this way all Controller/EngineSetLaunchers have the same form, rather |
|
483 | 483 | # than *some* having `program_args` and others `controller_args` |
|
484 | 484 | @property |
|
485 | 485 | def program(self): |
|
486 | 486 | return self.controller_cmd |
|
487 | 487 | |
|
488 | 488 | @property |
|
489 | 489 | def program_args(self): |
|
490 | 490 | return self.cluster_args + self.controller_args |
|
491 | 491 | |
|
492 | 492 | def start(self): |
|
493 | 493 | """Start the controller by profile_dir.""" |
|
494 | 494 | return super(MPIControllerLauncher, self).start(1) |
|
495 | 495 | |
|
496 | 496 | |
|
497 | 497 | class MPIEngineSetLauncher(MPILauncher, EngineMixin): |
|
498 | 498 | """Launch engines using mpiexec""" |
|
499 | 499 | |
|
500 | 500 | # alias back to *non-configurable* program[_args] for use in find_args() |
|
501 | 501 | # this way all Controller/EngineSetLaunchers have the same form, rather |
|
502 | 502 | # than *some* having `program_args` and others `controller_args` |
|
503 | 503 | @property |
|
504 | 504 | def program(self): |
|
505 | 505 | return self.engine_cmd |
|
506 | 506 | |
|
507 | 507 | @property |
|
508 | 508 | def program_args(self): |
|
509 | 509 | return self.cluster_args + self.engine_args |
|
510 | 510 | |
|
511 | 511 | def start(self, n): |
|
512 | 512 | """Start n engines by profile or profile_dir.""" |
|
513 | 513 | self.n = n |
|
514 | 514 | return super(MPIEngineSetLauncher, self).start(n) |
|
515 | 515 | |
|
516 | 516 | # deprecated MPIExec names |
|
517 | 517 | class DeprecatedMPILauncher(object): |
|
518 | 518 | def warn(self): |
|
519 | 519 | oldname = self.__class__.__name__ |
|
520 | 520 | newname = oldname.replace('MPIExec', 'MPI') |
|
521 | 521 | self.log.warn("WARNING: %s name is deprecated, use %s", oldname, newname) |
|
522 | 522 | |
|
523 | 523 | class MPIExecLauncher(MPILauncher, DeprecatedMPILauncher): |
|
524 | 524 | """Deprecated, use MPILauncher""" |
|
525 | 525 | def __init__(self, *args, **kwargs): |
|
526 | 526 | super(MPIExecLauncher, self).__init__(*args, **kwargs) |
|
527 | 527 | self.warn() |
|
528 | 528 | |
|
529 | 529 | class MPIExecControllerLauncher(MPIControllerLauncher, DeprecatedMPILauncher): |
|
530 | 530 | """Deprecated, use MPIControllerLauncher""" |
|
531 | 531 | def __init__(self, *args, **kwargs): |
|
532 | 532 | super(MPIExecControllerLauncher, self).__init__(*args, **kwargs) |
|
533 | 533 | self.warn() |
|
534 | 534 | |
|
535 | 535 | class MPIExecEngineSetLauncher(MPIEngineSetLauncher, DeprecatedMPILauncher): |
|
536 | 536 | """Deprecated, use MPIEngineSetLauncher""" |
|
537 | 537 | def __init__(self, *args, **kwargs): |
|
538 | 538 | super(MPIExecEngineSetLauncher, self).__init__(*args, **kwargs) |
|
539 | 539 | self.warn() |
|
540 | 540 | |
|
541 | 541 | |
|
542 | 542 | #----------------------------------------------------------------------------- |
|
543 | 543 | # SSH launchers |
|
544 | 544 | #----------------------------------------------------------------------------- |
|
545 | 545 | |
|
546 | 546 | # TODO: Get SSH Launcher back to level of sshx in 0.10.2 |
|
547 | 547 | |
|
548 | 548 | class SSHLauncher(LocalProcessLauncher): |
|
549 | 549 | """A minimal launcher for ssh. |
|
550 | 550 | |
|
551 | 551 | To be useful this will probably have to be extended to use the ``sshx`` |
|
552 | 552 | idea for environment variables. There could be other things this needs |
|
553 | 553 | as well. |
|
554 | 554 | """ |
|
555 | 555 | |
|
556 | 556 | ssh_cmd = List(['ssh'], config=True, |
|
557 | 557 | help="command for starting ssh") |
|
558 | 558 | ssh_args = List(['-tt'], config=True, |
|
559 | 559 | help="args to pass to ssh") |
|
560 | 560 | scp_cmd = List(['scp'], config=True, |
|
561 | 561 | help="command for sending files") |
|
562 | 562 | program = List(['date'], |
|
563 | 563 | help="Program to launch via ssh") |
|
564 | 564 | program_args = List([], |
|
565 | 565 | help="args to pass to remote program") |
|
566 | 566 | hostname = Unicode('', config=True, |
|
567 | 567 | help="hostname on which to launch the program") |
|
568 | 568 | user = Unicode('', config=True, |
|
569 | 569 | help="username for ssh") |
|
570 | 570 | location = Unicode('', config=True, |
|
571 | 571 | help="user@hostname location for ssh in one setting") |
|
572 | 572 | to_fetch = List([], config=True, |
|
573 | 573 | help="List of (remote, local) files to fetch after starting") |
|
574 | 574 | to_send = List([], config=True, |
|
575 | 575 | help="List of (local, remote) files to send before starting") |
|
576 | 576 | |
|
577 | 577 | def _hostname_changed(self, name, old, new): |
|
578 | 578 | if self.user: |
|
579 | 579 | self.location = u'%s@%s' % (self.user, new) |
|
580 | 580 | else: |
|
581 | 581 | self.location = new |
|
582 | 582 | |
|
583 | 583 | def _user_changed(self, name, old, new): |
|
584 | 584 | self.location = u'%s@%s' % (new, self.hostname) |
|
585 | 585 | |
|
586 | 586 | def find_args(self): |
|
587 | 587 | return self.ssh_cmd + self.ssh_args + [self.location] + \ |
|
588 | 588 | list(map(pipes.quote, self.program + self.program_args)) |
|
589 | 589 | |
|
590 | 590 | def _send_file(self, local, remote): |
|
591 | 591 | """send a single file""" |
|
592 | 592 | remote = "%s:%s" % (self.location, remote) |
|
593 | 593 | for i in range(10): |
|
594 | 594 | if not os.path.exists(local): |
|
595 | 595 | self.log.debug("waiting for %s" % local) |
|
596 | 596 | time.sleep(1) |
|
597 | 597 | else: |
|
598 | 598 | break |
|
599 | 599 | self.log.info("sending %s to %s", local, remote) |
|
600 | 600 | check_output(self.scp_cmd + [local, remote]) |
|
601 | 601 | |
|
602 | 602 | def send_files(self): |
|
603 | 603 | """send our files (called before start)""" |
|
604 | 604 | if not self.to_send: |
|
605 | 605 | return |
|
606 | 606 | for local_file, remote_file in self.to_send: |
|
607 | 607 | self._send_file(local_file, remote_file) |
|
608 | 608 | |
|
609 | 609 | def _fetch_file(self, remote, local): |
|
610 | 610 | """fetch a single file""" |
|
611 | 611 | full_remote = "%s:%s" % (self.location, remote) |
|
612 | 612 | self.log.info("fetching %s from %s", local, full_remote) |
|
613 | 613 | for i in range(10): |
|
614 | 614 | # wait up to 10s for remote file to exist |
|
615 | 615 | check = check_output(self.ssh_cmd + self.ssh_args + \ |
|
616 | 616 | [self.location, 'test -e', remote, "&& echo 'yes' || echo 'no'"]) |
|
617 | 617 | check = check.decode(DEFAULT_ENCODING, 'replace').strip() |
|
618 | 618 | if check == u'no': |
|
619 | 619 | time.sleep(1) |
|
620 | 620 | elif check == u'yes': |
|
621 | 621 | break |
|
622 | 622 | check_output(self.scp_cmd + [full_remote, local]) |
|
623 | 623 | |
|
624 | 624 | def fetch_files(self): |
|
625 | 625 | """fetch remote files (called after start)""" |
|
626 | 626 | if not self.to_fetch: |
|
627 | 627 | return |
|
628 | 628 | for remote_file, local_file in self.to_fetch: |
|
629 | 629 | self._fetch_file(remote_file, local_file) |
|
630 | 630 | |
|
631 | 631 | def start(self, hostname=None, user=None): |
|
632 | 632 | if hostname is not None: |
|
633 | 633 | self.hostname = hostname |
|
634 | 634 | if user is not None: |
|
635 | 635 | self.user = user |
|
636 | 636 | |
|
637 | 637 | self.send_files() |
|
638 | 638 | super(SSHLauncher, self).start() |
|
639 | 639 | self.fetch_files() |
|
640 | 640 | |
|
641 | 641 | def signal(self, sig): |
|
642 | 642 | if self.state == 'running': |
|
643 | 643 | # send escaped ssh connection-closer |
|
644 | 644 | self.process.stdin.write('~.') |
|
645 | 645 | self.process.stdin.flush() |
|
646 | 646 | |
|
647 | 647 | class SSHClusterLauncher(SSHLauncher, ClusterAppMixin): |
|
648 | 648 | |
|
649 | 649 | remote_profile_dir = Unicode('', config=True, |
|
650 | 650 | help="""The remote profile_dir to use. |
|
651 | 651 | |
|
652 | 652 | If not specified, use calling profile, stripping out possible leading homedir. |
|
653 | 653 | """) |
|
654 | 654 | |
|
655 | 655 | def _profile_dir_changed(self, name, old, new): |
|
656 | 656 | if not self.remote_profile_dir: |
|
657 | 657 | # trigger remote_profile_dir_default logic again, |
|
658 | 658 | # in case it was already triggered before profile_dir was set |
|
659 | 659 | self.remote_profile_dir = self._strip_home(new) |
|
660 | 660 | |
|
661 | 661 | @staticmethod |
|
662 | 662 | def _strip_home(path): |
|
663 | 663 | """turns /home/you/.ipython/profile_foo into .ipython/profile_foo""" |
|
664 | 664 | home = get_home_dir() |
|
665 | 665 | if not home.endswith('/'): |
|
666 | 666 | home = home+'/' |
|
667 | 667 | |
|
668 | 668 | if path.startswith(home): |
|
669 | 669 | return path[len(home):] |
|
670 | 670 | else: |
|
671 | 671 | return path |
|
672 | 672 | |
|
673 | 673 | def _remote_profile_dir_default(self): |
|
674 | 674 | return self._strip_home(self.profile_dir) |
|
675 | 675 | |
|
676 | 676 | def _cluster_id_changed(self, name, old, new): |
|
677 | 677 | if new: |
|
678 | 678 | raise ValueError("cluster id not supported by SSH launchers") |
|
679 | 679 | |
|
680 | 680 | @property |
|
681 | 681 | def cluster_args(self): |
|
682 | 682 | return ['--profile-dir', self.remote_profile_dir] |
|
683 | 683 | |
|
684 | 684 | class SSHControllerLauncher(SSHClusterLauncher, ControllerMixin): |
|
685 | 685 | |
|
686 | 686 | # alias back to *non-configurable* program[_args] for use in find_args() |
|
687 | 687 | # this way all Controller/EngineSetLaunchers have the same form, rather |
|
688 | 688 | # than *some* having `program_args` and others `controller_args` |
|
689 | 689 | |
|
690 | 690 | def _controller_cmd_default(self): |
|
691 | 691 | return ['ipcontroller'] |
|
692 | 692 | |
|
693 | 693 | @property |
|
694 | 694 | def program(self): |
|
695 | 695 | return self.controller_cmd |
|
696 | 696 | |
|
697 | 697 | @property |
|
698 | 698 | def program_args(self): |
|
699 | 699 | return self.cluster_args + self.controller_args |
|
700 | 700 | |
|
701 | 701 | def _to_fetch_default(self): |
|
702 | 702 | return [ |
|
703 | 703 | (os.path.join(self.remote_profile_dir, 'security', cf), |
|
704 | 704 | os.path.join(self.profile_dir, 'security', cf),) |
|
705 | 705 | for cf in ('ipcontroller-client.json', 'ipcontroller-engine.json') |
|
706 | 706 | ] |
|
707 | 707 | |
|
708 | 708 | class SSHEngineLauncher(SSHClusterLauncher, EngineMixin): |
|
709 | 709 | |
|
710 | 710 | # alias back to *non-configurable* program[_args] for use in find_args() |
|
711 | 711 | # this way all Controller/EngineSetLaunchers have the same form, rather |
|
712 | 712 | # than *some* having `program_args` and others `controller_args` |
|
713 | 713 | |
|
714 | 714 | def _engine_cmd_default(self): |
|
715 | 715 | return ['ipengine'] |
|
716 | 716 | |
|
717 | 717 | @property |
|
718 | 718 | def program(self): |
|
719 | 719 | return self.engine_cmd |
|
720 | 720 | |
|
721 | 721 | @property |
|
722 | 722 | def program_args(self): |
|
723 | 723 | return self.cluster_args + self.engine_args |
|
724 | 724 | |
|
725 | 725 | def _to_send_default(self): |
|
726 | 726 | return [ |
|
727 | 727 | (os.path.join(self.profile_dir, 'security', cf), |
|
728 | 728 | os.path.join(self.remote_profile_dir, 'security', cf)) |
|
729 | 729 | for cf in ('ipcontroller-client.json', 'ipcontroller-engine.json') |
|
730 | 730 | ] |
|
731 | 731 | |
|
732 | 732 | |
|
733 | 733 | class SSHEngineSetLauncher(LocalEngineSetLauncher): |
|
734 | 734 | launcher_class = SSHEngineLauncher |
|
735 | 735 | engines = Dict(config=True, |
|
736 | 736 | help="""dict of engines to launch. This is a dict by hostname of ints, |
|
737 | 737 | corresponding to the number of engines to start on that host.""") |
|
738 | 738 | |
|
739 | 739 | def _engine_cmd_default(self): |
|
740 | 740 | return ['ipengine'] |
|
741 | 741 | |
|
742 | 742 | @property |
|
743 | 743 | def engine_count(self): |
|
744 | 744 | """determine engine count from `engines` dict""" |
|
745 | 745 | count = 0 |
|
746 | 746 | for n in itervalues(self.engines): |
|
747 | 747 | if isinstance(n, (tuple,list)): |
|
748 | 748 | n,args = n |
|
749 | 749 | count += n |
|
750 | 750 | return count |
|
751 | 751 | |
|
752 | 752 | def start(self, n): |
|
753 | 753 | """Start engines by profile or profile_dir. |
|
754 | 754 | `n` is ignored, and the `engines` config property is used instead. |
|
755 | 755 | """ |
|
756 | 756 | |
|
757 | 757 | dlist = [] |
|
758 | 758 | for host, n in iteritems(self.engines): |
|
759 | 759 | if isinstance(n, (tuple, list)): |
|
760 | 760 | n, args = n |
|
761 | 761 | else: |
|
762 | 762 | args = copy.deepcopy(self.engine_args) |
|
763 | 763 | |
|
764 | 764 | if '@' in host: |
|
765 | 765 | user,host = host.split('@',1) |
|
766 | 766 | else: |
|
767 | 767 | user=None |
|
768 | 768 | for i in range(n): |
|
769 | 769 | if i > 0: |
|
770 | 770 | time.sleep(self.delay) |
|
771 | 771 | el = self.launcher_class(work_dir=self.work_dir, parent=self, log=self.log, |
|
772 | 772 | profile_dir=self.profile_dir, cluster_id=self.cluster_id, |
|
773 | 773 | ) |
|
774 | 774 | if i > 0: |
|
775 | 775 | # only send files for the first engine on each host |
|
776 | 776 | el.to_send = [] |
|
777 | 777 | |
|
778 | 778 | # Copy the engine args over to each engine launcher. |
|
779 | 779 | el.engine_cmd = self.engine_cmd |
|
780 | 780 | el.engine_args = args |
|
781 | 781 | el.on_stop(self._notice_engine_stopped) |
|
782 | 782 | d = el.start(user=user, hostname=host) |
|
783 | 783 | self.launchers[ "%s/%i" % (host,i) ] = el |
|
784 | 784 | dlist.append(d) |
|
785 | 785 | self.notify_start(dlist) |
|
786 | 786 | return dlist |
|
787 | 787 | |
|
788 | 788 | |
|
789 | 789 | class SSHProxyEngineSetLauncher(SSHClusterLauncher): |
|
790 | 790 | """Launcher for calling |
|
791 | 791 | `ipcluster engines` on a remote machine. |
|
792 | 792 | |
|
793 | 793 | Requires that remote profile is already configured. |
|
794 | 794 | """ |
|
795 | 795 | |
|
796 | 796 | n = Integer() |
|
797 | 797 | ipcluster_cmd = List(['ipcluster'], config=True) |
|
798 | 798 | |
|
799 | 799 | @property |
|
800 | 800 | def program(self): |
|
801 | 801 | return self.ipcluster_cmd + ['engines'] |
|
802 | 802 | |
|
803 | 803 | @property |
|
804 | 804 | def program_args(self): |
|
805 | 805 | return ['-n', str(self.n), '--profile-dir', self.remote_profile_dir] |
|
806 | 806 | |
|
807 | 807 | def _to_send_default(self): |
|
808 | 808 | return [ |
|
809 | 809 | (os.path.join(self.profile_dir, 'security', cf), |
|
810 | 810 | os.path.join(self.remote_profile_dir, 'security', cf)) |
|
811 | 811 | for cf in ('ipcontroller-client.json', 'ipcontroller-engine.json') |
|
812 | 812 | ] |
|
813 | 813 | |
|
814 | 814 | def start(self, n): |
|
815 | 815 | self.n = n |
|
816 | 816 | super(SSHProxyEngineSetLauncher, self).start() |
|
817 | 817 | |
|
818 | 818 | |
|
819 | 819 | #----------------------------------------------------------------------------- |
|
820 | 820 | # Windows HPC Server 2008 scheduler launchers |
|
821 | 821 | #----------------------------------------------------------------------------- |
|
822 | 822 | |
|
823 | 823 | |
|
824 | 824 | # This is only used on Windows. |
|
825 | 825 | def find_job_cmd(): |
|
826 | 826 | if WINDOWS: |
|
827 | 827 | try: |
|
828 | 828 | return find_cmd('job') |
|
829 | 829 | except (FindCmdError, ImportError): |
|
830 | 830 | # ImportError will be raised if win32api is not installed |
|
831 | 831 | return 'job' |
|
832 | 832 | else: |
|
833 | 833 | return 'job' |
|
834 | 834 | |
|
835 | 835 | |
|
836 | 836 | class WindowsHPCLauncher(BaseLauncher): |
|
837 | 837 | |
|
838 | 838 | job_id_regexp = CRegExp(r'\d+', config=True, |
|
839 | 839 | help="""A regular expression used to get the job id from the output of the |
|
840 | 840 | submit_command. """ |
|
841 | 841 | ) |
|
842 | 842 | job_file_name = Unicode(u'ipython_job.xml', config=True, |
|
843 | 843 | help="The filename of the instantiated job script.") |
|
844 | 844 | # The full path to the instantiated job script. This gets made dynamically |
|
845 | 845 | # by combining the work_dir with the job_file_name. |
|
846 | 846 | job_file = Unicode(u'') |
|
847 | 847 | scheduler = Unicode('', config=True, |
|
848 | 848 | help="The hostname of the scheduler to submit the job to.") |
|
849 | 849 | job_cmd = Unicode(find_job_cmd(), config=True, |
|
850 | 850 | help="The command for submitting jobs.") |
|
851 | 851 | |
|
852 | 852 | def __init__(self, work_dir=u'.', config=None, **kwargs): |
|
853 | 853 | super(WindowsHPCLauncher, self).__init__( |
|
854 | 854 | work_dir=work_dir, config=config, **kwargs |
|
855 | 855 | ) |
|
856 | 856 | |
|
857 | 857 | @property |
|
858 | 858 | def job_file(self): |
|
859 | 859 | return os.path.join(self.work_dir, self.job_file_name) |
|
860 | 860 | |
|
861 | 861 | def write_job_file(self, n): |
|
862 | 862 | raise NotImplementedError("Implement write_job_file in a subclass.") |
|
863 | 863 | |
|
864 | 864 | def find_args(self): |
|
865 | 865 | return [u'job.exe'] |
|
866 | 866 | |
|
867 | 867 | def parse_job_id(self, output): |
|
868 | 868 | """Take the output of the submit command and return the job id.""" |
|
869 | 869 | m = self.job_id_regexp.search(output) |
|
870 | 870 | if m is not None: |
|
871 | 871 | job_id = m.group() |
|
872 | 872 | else: |
|
873 | 873 | raise LauncherError("Job id couldn't be determined: %s" % output) |
|
874 | 874 | self.job_id = job_id |
|
875 | 875 | self.log.info('Job started with id: %r', job_id) |
|
876 | 876 | return job_id |
|
877 | 877 | |
|
878 | 878 | def start(self, n): |
|
879 | 879 | """Start n copies of the process using the Win HPC job scheduler.""" |
|
880 | 880 | self.write_job_file(n) |
|
881 | 881 | args = [ |
|
882 | 882 | 'submit', |
|
883 | 883 | '/jobfile:%s' % self.job_file, |
|
884 | 884 | '/scheduler:%s' % self.scheduler |
|
885 | 885 | ] |
|
886 | 886 | self.log.debug("Starting Win HPC Job: %s" % (self.job_cmd + ' ' + ' '.join(args),)) |
|
887 | 887 | |
|
888 | 888 | output = check_output([self.job_cmd]+args, |
|
889 | 889 | env=os.environ, |
|
890 | 890 | cwd=self.work_dir, |
|
891 | 891 | stderr=STDOUT |
|
892 | 892 | ) |
|
893 | 893 | output = output.decode(DEFAULT_ENCODING, 'replace') |
|
894 | 894 | job_id = self.parse_job_id(output) |
|
895 | 895 | self.notify_start(job_id) |
|
896 | 896 | return job_id |
|
897 | 897 | |
|
898 | 898 | def stop(self): |
|
899 | 899 | args = [ |
|
900 | 900 | 'cancel', |
|
901 | 901 | self.job_id, |
|
902 | 902 | '/scheduler:%s' % self.scheduler |
|
903 | 903 | ] |
|
904 | 904 | self.log.info("Stopping Win HPC Job: %s" % (self.job_cmd + ' ' + ' '.join(args),)) |
|
905 | 905 | try: |
|
906 | 906 | output = check_output([self.job_cmd]+args, |
|
907 | 907 | env=os.environ, |
|
908 | 908 | cwd=self.work_dir, |
|
909 | 909 | stderr=STDOUT |
|
910 | 910 | ) |
|
911 | 911 | output = output.decode(DEFAULT_ENCODING, 'replace') |
|
912 | 912 | except: |
|
913 | 913 | output = u'The job already appears to be stopped: %r' % self.job_id |
|
914 | 914 | self.notify_stop(dict(job_id=self.job_id, output=output)) # Pass the output of the kill cmd |
|
915 | 915 | return output |
|
916 | 916 | |
|
917 | 917 | |
|
918 | 918 | class WindowsHPCControllerLauncher(WindowsHPCLauncher, ClusterAppMixin): |
|
919 | 919 | |
|
920 | 920 | job_file_name = Unicode(u'ipcontroller_job.xml', config=True, |
|
921 | 921 | help="WinHPC xml job file.") |
|
922 | 922 | controller_args = List([], config=False, |
|
923 | 923 | help="extra args to pass to ipcontroller") |
|
924 | 924 | |
|
925 | 925 | def write_job_file(self, n): |
|
926 | 926 | job = IPControllerJob(parent=self) |
|
927 | 927 | |
|
928 | 928 | t = IPControllerTask(parent=self) |
|
929 | 929 | # The tasks work directory is *not* the actual work directory of |
|
930 | 930 | # the controller. It is used as the base path for the stdout/stderr |
|
931 | 931 | # files that the scheduler redirects to. |
|
932 | 932 | t.work_directory = self.profile_dir |
|
933 | 933 | # Add the profile_dir and from self.start(). |
|
934 | 934 | t.controller_args.extend(self.cluster_args) |
|
935 | 935 | t.controller_args.extend(self.controller_args) |
|
936 | 936 | job.add_task(t) |
|
937 | 937 | |
|
938 | 938 | self.log.debug("Writing job description file: %s", self.job_file) |
|
939 | 939 | job.write(self.job_file) |
|
940 | 940 | |
|
941 | 941 | @property |
|
942 | 942 | def job_file(self): |
|
943 | 943 | return os.path.join(self.profile_dir, self.job_file_name) |
|
944 | 944 | |
|
945 | 945 | def start(self): |
|
946 | 946 | """Start the controller by profile_dir.""" |
|
947 | 947 | return super(WindowsHPCControllerLauncher, self).start(1) |
|
948 | 948 | |
|
949 | 949 | |
|
950 | 950 | class WindowsHPCEngineSetLauncher(WindowsHPCLauncher, ClusterAppMixin): |
|
951 | 951 | |
|
952 | 952 | job_file_name = Unicode(u'ipengineset_job.xml', config=True, |
|
953 | 953 | help="jobfile for ipengines job") |
|
954 | 954 | engine_args = List([], config=False, |
|
955 | 955 | help="extra args to pas to ipengine") |
|
956 | 956 | |
|
957 | 957 | def write_job_file(self, n): |
|
958 | 958 | job = IPEngineSetJob(parent=self) |
|
959 | 959 | |
|
960 | 960 | for i in range(n): |
|
961 | 961 | t = IPEngineTask(parent=self) |
|
962 | 962 | # The tasks work directory is *not* the actual work directory of |
|
963 | 963 | # the engine. It is used as the base path for the stdout/stderr |
|
964 | 964 | # files that the scheduler redirects to. |
|
965 | 965 | t.work_directory = self.profile_dir |
|
966 | 966 | # Add the profile_dir and from self.start(). |
|
967 | 967 | t.engine_args.extend(self.cluster_args) |
|
968 | 968 | t.engine_args.extend(self.engine_args) |
|
969 | 969 | job.add_task(t) |
|
970 | 970 | |
|
971 | 971 | self.log.debug("Writing job description file: %s", self.job_file) |
|
972 | 972 | job.write(self.job_file) |
|
973 | 973 | |
|
974 | 974 | @property |
|
975 | 975 | def job_file(self): |
|
976 | 976 | return os.path.join(self.profile_dir, self.job_file_name) |
|
977 | 977 | |
|
978 | 978 | def start(self, n): |
|
979 | 979 | """Start the controller by profile_dir.""" |
|
980 | 980 | return super(WindowsHPCEngineSetLauncher, self).start(n) |
|
981 | 981 | |
|
982 | 982 | |
|
983 | 983 | #----------------------------------------------------------------------------- |
|
984 | 984 | # Batch (PBS) system launchers |
|
985 | 985 | #----------------------------------------------------------------------------- |
|
986 | 986 | |
|
987 | 987 | class BatchClusterAppMixin(ClusterAppMixin): |
|
988 | 988 | """ClusterApp mixin that updates the self.context dict, rather than cl-args.""" |
|
989 | 989 | def _profile_dir_changed(self, name, old, new): |
|
990 | 990 | self.context[name] = new |
|
991 | 991 | _cluster_id_changed = _profile_dir_changed |
|
992 | 992 | |
|
993 | 993 | def _profile_dir_default(self): |
|
994 | 994 | self.context['profile_dir'] = '' |
|
995 | 995 | return '' |
|
996 | 996 | def _cluster_id_default(self): |
|
997 | 997 | self.context['cluster_id'] = '' |
|
998 | 998 | return '' |
|
999 | 999 | |
|
1000 | 1000 | |
|
1001 | 1001 | class BatchSystemLauncher(BaseLauncher): |
|
1002 | 1002 | """Launch an external process using a batch system. |
|
1003 | 1003 | |
|
1004 | 1004 | This class is designed to work with UNIX batch systems like PBS, LSF, |
|
1005 | 1005 | GridEngine, etc. The overall model is that there are different commands |
|
1006 | 1006 | like qsub, qdel, etc. that handle the starting and stopping of the process. |
|
1007 | 1007 | |
|
1008 | 1008 | This class also has the notion of a batch script. The ``batch_template`` |
|
1009 | 1009 | attribute can be set to a string that is a template for the batch script. |
|
1010 | 1010 | This template is instantiated using string formatting. Thus the template can |
|
1011 | 1011 | use {n} fot the number of instances. Subclasses can add additional variables |
|
1012 | 1012 | to the template dict. |
|
1013 | 1013 | """ |
|
1014 | 1014 | |
|
1015 | 1015 | # Subclasses must fill these in. See PBSEngineSet |
|
1016 | 1016 | submit_command = List([''], config=True, |
|
1017 | 1017 | help="The name of the command line program used to submit jobs.") |
|
1018 | 1018 | delete_command = List([''], config=True, |
|
1019 | 1019 | help="The name of the command line program used to delete jobs.") |
|
1020 | 1020 | job_id_regexp = CRegExp('', config=True, |
|
1021 | 1021 | help="""A regular expression used to get the job id from the output of the |
|
1022 | 1022 | submit_command.""") |
|
1023 | 1023 | job_id_regexp_group = Integer(0, config=True, |
|
1024 | 1024 | help="""The group we wish to match in job_id_regexp (0 to match all)""") |
|
1025 | 1025 | batch_template = Unicode('', config=True, |
|
1026 | 1026 | help="The string that is the batch script template itself.") |
|
1027 | 1027 | batch_template_file = Unicode(u'', config=True, |
|
1028 | 1028 | help="The file that contains the batch template.") |
|
1029 | 1029 | batch_file_name = Unicode(u'batch_script', config=True, |
|
1030 | 1030 | help="The filename of the instantiated batch script.") |
|
1031 | 1031 | queue = Unicode(u'', config=True, |
|
1032 | 1032 | help="The PBS Queue.") |
|
1033 | 1033 | |
|
1034 | 1034 | def _queue_changed(self, name, old, new): |
|
1035 | 1035 | self.context[name] = new |
|
1036 | 1036 | |
|
1037 | 1037 | n = Integer(1) |
|
1038 | 1038 | _n_changed = _queue_changed |
|
1039 | 1039 | |
|
1040 | 1040 | # not configurable, override in subclasses |
|
1041 | 1041 | # PBS Job Array regex |
|
1042 | 1042 | job_array_regexp = CRegExp('') |
|
1043 | 1043 | job_array_template = Unicode('') |
|
1044 | 1044 | # PBS Queue regex |
|
1045 | 1045 | queue_regexp = CRegExp('') |
|
1046 | 1046 | queue_template = Unicode('') |
|
1047 | 1047 | # The default batch template, override in subclasses |
|
1048 | 1048 | default_template = Unicode('') |
|
1049 | 1049 | # The full path to the instantiated batch script. |
|
1050 | 1050 | batch_file = Unicode(u'') |
|
1051 | 1051 | # the format dict used with batch_template: |
|
1052 | 1052 | context = Dict() |
|
1053 | 1053 | |
|
1054 | 1054 | def _context_default(self): |
|
1055 | 1055 | """load the default context with the default values for the basic keys |
|
1056 | 1056 | |
|
1057 | 1057 | because the _trait_changed methods only load the context if they |
|
1058 | 1058 | are set to something other than the default value. |
|
1059 | 1059 | """ |
|
1060 | 1060 | return dict(n=1, queue=u'', profile_dir=u'', cluster_id=u'') |
|
1061 | 1061 | |
|
1062 | 1062 | # the Formatter instance for rendering the templates: |
|
1063 | 1063 | formatter = Instance(EvalFormatter, (), {}) |
|
1064 | 1064 | |
|
1065 | 1065 | def find_args(self): |
|
1066 | 1066 | return self.submit_command + [self.batch_file] |
|
1067 | 1067 | |
|
1068 | 1068 | def __init__(self, work_dir=u'.', config=None, **kwargs): |
|
1069 | 1069 | super(BatchSystemLauncher, self).__init__( |
|
1070 | 1070 | work_dir=work_dir, config=config, **kwargs |
|
1071 | 1071 | ) |
|
1072 | 1072 | self.batch_file = os.path.join(self.work_dir, self.batch_file_name) |
|
1073 | 1073 | |
|
1074 | 1074 | def parse_job_id(self, output): |
|
1075 | 1075 | """Take the output of the submit command and return the job id.""" |
|
1076 | 1076 | m = self.job_id_regexp.search(output) |
|
1077 | 1077 | if m is not None: |
|
1078 | 1078 | job_id = m.group(self.job_id_regexp_group) |
|
1079 | 1079 | else: |
|
1080 | 1080 | raise LauncherError("Job id couldn't be determined: %s" % output) |
|
1081 | 1081 | self.job_id = job_id |
|
1082 | 1082 | self.log.info('Job submitted with job id: %r', job_id) |
|
1083 | 1083 | return job_id |
|
1084 | 1084 | |
|
1085 | 1085 | def write_batch_script(self, n): |
|
1086 | 1086 | """Instantiate and write the batch script to the work_dir.""" |
|
1087 | 1087 | self.n = n |
|
1088 | 1088 | # first priority is batch_template if set |
|
1089 | 1089 | if self.batch_template_file and not self.batch_template: |
|
1090 | 1090 | # second priority is batch_template_file |
|
1091 | 1091 | with open(self.batch_template_file) as f: |
|
1092 | 1092 | self.batch_template = f.read() |
|
1093 | 1093 | if not self.batch_template: |
|
1094 | 1094 | # third (last) priority is default_template |
|
1095 | 1095 | self.batch_template = self.default_template |
|
1096 | 1096 | # add jobarray or queue lines to user-specified template |
|
1097 | 1097 | # note that this is *only* when user did not specify a template. |
|
1098 | 1098 | self._insert_queue_in_script() |
|
1099 | 1099 | self._insert_job_array_in_script() |
|
1100 | 1100 | script_as_string = self.formatter.format(self.batch_template, **self.context) |
|
1101 | 1101 | self.log.debug('Writing batch script: %s', self.batch_file) |
|
1102 | 1102 | with open(self.batch_file, 'w') as f: |
|
1103 | 1103 | f.write(script_as_string) |
|
1104 | 1104 | os.chmod(self.batch_file, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) |
|
1105 | 1105 | |
|
1106 | 1106 | def _insert_queue_in_script(self): |
|
1107 | 1107 | """Inserts a queue if required into the batch script. |
|
1108 | 1108 | """ |
|
1109 | 1109 | if self.queue and not self.queue_regexp.search(self.batch_template): |
|
1110 | 1110 | self.log.debug("adding PBS queue settings to batch script") |
|
1111 | 1111 | firstline, rest = self.batch_template.split('\n',1) |
|
1112 | 1112 | self.batch_template = u'\n'.join([firstline, self.queue_template, rest]) |
|
1113 | 1113 | |
|
1114 | 1114 | def _insert_job_array_in_script(self): |
|
1115 | 1115 | """Inserts a job array if required into the batch script. |
|
1116 | 1116 | """ |
|
1117 | 1117 | if not self.job_array_regexp.search(self.batch_template): |
|
1118 | 1118 | self.log.debug("adding job array settings to batch script") |
|
1119 | 1119 | firstline, rest = self.batch_template.split('\n',1) |
|
1120 | 1120 | self.batch_template = u'\n'.join([firstline, self.job_array_template, rest]) |
|
1121 | 1121 | |
|
1122 | 1122 | def start(self, n): |
|
1123 | 1123 | """Start n copies of the process using a batch system.""" |
|
1124 | 1124 | self.log.debug("Starting %s: %r", self.__class__.__name__, self.args) |
|
1125 | 1125 | # Here we save profile_dir in the context so they |
|
1126 | 1126 | # can be used in the batch script template as {profile_dir} |
|
1127 | 1127 | self.write_batch_script(n) |
|
1128 | 1128 | output = check_output(self.args, env=os.environ) |
|
1129 | 1129 | output = output.decode(DEFAULT_ENCODING, 'replace') |
|
1130 | 1130 | |
|
1131 | 1131 | job_id = self.parse_job_id(output) |
|
1132 | 1132 | self.notify_start(job_id) |
|
1133 | 1133 | return job_id |
|
1134 | 1134 | |
|
1135 | 1135 | def stop(self): |
|
1136 | 1136 | try: |
|
1137 | 1137 | p = Popen(self.delete_command+[self.job_id], env=os.environ, |
|
1138 | 1138 | stdout=PIPE, stderr=PIPE) |
|
1139 | 1139 | out, err = p.communicate() |
|
1140 | 1140 | output = out + err |
|
1141 | 1141 | except: |
|
1142 | 1142 | self.log.exception("Problem stopping cluster with command: %s" % |
|
1143 | 1143 | (self.delete_command + [self.job_id])) |
|
1144 | 1144 | output = "" |
|
1145 | 1145 | output = output.decode(DEFAULT_ENCODING, 'replace') |
|
1146 | 1146 | self.notify_stop(dict(job_id=self.job_id, output=output)) # Pass the output of the kill cmd |
|
1147 | 1147 | return output |
|
1148 | 1148 | |
|
1149 | 1149 | |
|
1150 | 1150 | class PBSLauncher(BatchSystemLauncher): |
|
1151 | 1151 | """A BatchSystemLauncher subclass for PBS.""" |
|
1152 | 1152 | |
|
1153 | 1153 | submit_command = List(['qsub'], config=True, |
|
1154 | 1154 | help="The PBS submit command ['qsub']") |
|
1155 | 1155 | delete_command = List(['qdel'], config=True, |
|
1156 | 1156 | help="The PBS delete command ['qsub']") |
|
1157 | 1157 | job_id_regexp = CRegExp(r'\d+', config=True, |
|
1158 | 1158 | help="Regular expresion for identifying the job ID [r'\d+']") |
|
1159 | 1159 | |
|
1160 | 1160 | batch_file = Unicode(u'') |
|
1161 | 1161 | job_array_regexp = CRegExp('#PBS\W+-t\W+[\w\d\-\$]+') |
|
1162 | 1162 | job_array_template = Unicode('#PBS -t 1-{n}') |
|
1163 | 1163 | queue_regexp = CRegExp('#PBS\W+-q\W+\$?\w+') |
|
1164 | 1164 | queue_template = Unicode('#PBS -q {queue}') |
|
1165 | 1165 | |
|
1166 | 1166 | |
|
1167 | 1167 | class PBSControllerLauncher(PBSLauncher, BatchClusterAppMixin): |
|
1168 | 1168 | """Launch a controller using PBS.""" |
|
1169 | 1169 | |
|
1170 | 1170 | batch_file_name = Unicode(u'pbs_controller', config=True, |
|
1171 | 1171 | help="batch file name for the controller job.") |
|
1172 | 1172 | default_template= Unicode("""#!/bin/sh |
|
1173 | 1173 | #PBS -V |
|
1174 | 1174 | #PBS -N ipcontroller |
|
1175 | 1175 | %s --log-to-file --profile-dir="{profile_dir}" --cluster-id="{cluster_id}" |
|
1176 | 1176 | """%(' '.join(map(pipes.quote, ipcontroller_cmd_argv)))) |
|
1177 | 1177 | |
|
1178 | 1178 | def start(self): |
|
1179 | 1179 | """Start the controller by profile or profile_dir.""" |
|
1180 | 1180 | return super(PBSControllerLauncher, self).start(1) |
|
1181 | 1181 | |
|
1182 | 1182 | |
|
1183 | 1183 | class PBSEngineSetLauncher(PBSLauncher, BatchClusterAppMixin): |
|
1184 | 1184 | """Launch Engines using PBS""" |
|
1185 | 1185 | batch_file_name = Unicode(u'pbs_engines', config=True, |
|
1186 | 1186 | help="batch file name for the engine(s) job.") |
|
1187 | 1187 | default_template= Unicode(u"""#!/bin/sh |
|
1188 | 1188 | #PBS -V |
|
1189 | 1189 | #PBS -N ipengine |
|
1190 | 1190 | %s --profile-dir="{profile_dir}" --cluster-id="{cluster_id}" |
|
1191 | 1191 | """%(' '.join(map(pipes.quote,ipengine_cmd_argv)))) |
|
1192 | 1192 | |
|
1193 | 1193 | |
|
1194 | 1194 | #SGE is very similar to PBS |
|
1195 | 1195 | |
|
1196 | 1196 | class SGELauncher(PBSLauncher): |
|
1197 | 1197 | """Sun GridEngine is a PBS clone with slightly different syntax""" |
|
1198 | 1198 | job_array_regexp = CRegExp('#\$\W+\-t') |
|
1199 | 1199 | job_array_template = Unicode('#$ -t 1-{n}') |
|
1200 | 1200 | queue_regexp = CRegExp('#\$\W+-q\W+\$?\w+') |
|
1201 | 1201 | queue_template = Unicode('#$ -q {queue}') |
|
1202 | 1202 | |
|
1203 | 1203 | |
|
1204 | 1204 | class SGEControllerLauncher(SGELauncher, BatchClusterAppMixin): |
|
1205 | 1205 | """Launch a controller using SGE.""" |
|
1206 | 1206 | |
|
1207 | 1207 | batch_file_name = Unicode(u'sge_controller', config=True, |
|
1208 | 1208 | help="batch file name for the ipontroller job.") |
|
1209 | 1209 | default_template= Unicode(u"""#$ -V |
|
1210 | 1210 | #$ -S /bin/sh |
|
1211 | 1211 | #$ -N ipcontroller |
|
1212 | 1212 | %s --log-to-file --profile-dir="{profile_dir}" --cluster-id="{cluster_id}" |
|
1213 | 1213 | """%(' '.join(map(pipes.quote, ipcontroller_cmd_argv)))) |
|
1214 | 1214 | |
|
1215 | 1215 | def start(self): |
|
1216 | 1216 | """Start the controller by profile or profile_dir.""" |
|
1217 | 1217 | return super(SGEControllerLauncher, self).start(1) |
|
1218 | 1218 | |
|
1219 | 1219 | |
|
1220 | 1220 | class SGEEngineSetLauncher(SGELauncher, BatchClusterAppMixin): |
|
1221 | 1221 | """Launch Engines with SGE""" |
|
1222 | 1222 | batch_file_name = Unicode(u'sge_engines', config=True, |
|
1223 | 1223 | help="batch file name for the engine(s) job.") |
|
1224 | 1224 | default_template = Unicode("""#$ -V |
|
1225 | 1225 | #$ -S /bin/sh |
|
1226 | 1226 | #$ -N ipengine |
|
1227 | 1227 | %s --profile-dir="{profile_dir}" --cluster-id="{cluster_id}" |
|
1228 | 1228 | """%(' '.join(map(pipes.quote, ipengine_cmd_argv)))) |
|
1229 | 1229 | |
|
1230 | 1230 | |
|
1231 | 1231 | # LSF launchers |
|
1232 | 1232 | |
|
1233 | 1233 | class LSFLauncher(BatchSystemLauncher): |
|
1234 | 1234 | """A BatchSystemLauncher subclass for LSF.""" |
|
1235 | 1235 | |
|
1236 | 1236 | submit_command = List(['bsub'], config=True, |
|
1237 | 1237 | help="The PBS submit command ['bsub']") |
|
1238 | 1238 | delete_command = List(['bkill'], config=True, |
|
1239 | 1239 | help="The PBS delete command ['bkill']") |
|
1240 | 1240 | job_id_regexp = CRegExp(r'\d+', config=True, |
|
1241 | 1241 | help="Regular expresion for identifying the job ID [r'\d+']") |
|
1242 | 1242 | |
|
1243 | 1243 | batch_file = Unicode(u'') |
|
1244 | 1244 | job_array_regexp = CRegExp('#BSUB[ \t]-J+\w+\[\d+-\d+\]') |
|
1245 | 1245 | job_array_template = Unicode('#BSUB -J ipengine[1-{n}]') |
|
1246 | 1246 | queue_regexp = CRegExp('#BSUB[ \t]+-q[ \t]+\w+') |
|
1247 | 1247 | queue_template = Unicode('#BSUB -q {queue}') |
|
1248 | 1248 | |
|
1249 | 1249 | def start(self, n): |
|
1250 | 1250 | """Start n copies of the process using LSF batch system. |
|
1251 | 1251 | This cant inherit from the base class because bsub expects |
|
1252 | 1252 | to be piped a shell script in order to honor the #BSUB directives : |
|
1253 | 1253 | bsub < script |
|
1254 | 1254 | """ |
|
1255 | 1255 | # Here we save profile_dir in the context so they |
|
1256 | 1256 | # can be used in the batch script template as {profile_dir} |
|
1257 | 1257 | self.write_batch_script(n) |
|
1258 | 1258 | piped_cmd = self.args[0]+'<\"'+self.args[1]+'\"' |
|
1259 | 1259 | self.log.debug("Starting %s: %s", self.__class__.__name__, piped_cmd) |
|
1260 | 1260 | p = Popen(piped_cmd, shell=True,env=os.environ,stdout=PIPE) |
|
1261 | 1261 | output,err = p.communicate() |
|
1262 | 1262 | output = output.decode(DEFAULT_ENCODING, 'replace') |
|
1263 | 1263 | job_id = self.parse_job_id(output) |
|
1264 | 1264 | self.notify_start(job_id) |
|
1265 | 1265 | return job_id |
|
1266 | 1266 | |
|
1267 | 1267 | |
|
1268 | 1268 | class LSFControllerLauncher(LSFLauncher, BatchClusterAppMixin): |
|
1269 | 1269 | """Launch a controller using LSF.""" |
|
1270 | 1270 | |
|
1271 | 1271 | batch_file_name = Unicode(u'lsf_controller', config=True, |
|
1272 | 1272 | help="batch file name for the controller job.") |
|
1273 | 1273 | default_template= Unicode("""#!/bin/sh |
|
1274 | 1274 | #BSUB -J ipcontroller |
|
1275 | 1275 | #BSUB -oo ipcontroller.o.%%J |
|
1276 | 1276 | #BSUB -eo ipcontroller.e.%%J |
|
1277 | 1277 | %s --log-to-file --profile-dir="{profile_dir}" --cluster-id="{cluster_id}" |
|
1278 | 1278 | """%(' '.join(map(pipes.quote,ipcontroller_cmd_argv)))) |
|
1279 | 1279 | |
|
1280 | 1280 | def start(self): |
|
1281 | 1281 | """Start the controller by profile or profile_dir.""" |
|
1282 | 1282 | return super(LSFControllerLauncher, self).start(1) |
|
1283 | 1283 | |
|
1284 | 1284 | |
|
1285 | 1285 | class LSFEngineSetLauncher(LSFLauncher, BatchClusterAppMixin): |
|
1286 | 1286 | """Launch Engines using LSF""" |
|
1287 | 1287 | batch_file_name = Unicode(u'lsf_engines', config=True, |
|
1288 | 1288 | help="batch file name for the engine(s) job.") |
|
1289 | 1289 | default_template= Unicode(u"""#!/bin/sh |
|
1290 | 1290 | #BSUB -oo ipengine.o.%%J |
|
1291 | 1291 | #BSUB -eo ipengine.e.%%J |
|
1292 | 1292 | %s --profile-dir="{profile_dir}" --cluster-id="{cluster_id}" |
|
1293 | 1293 | """%(' '.join(map(pipes.quote, ipengine_cmd_argv)))) |
|
1294 | 1294 | |
|
1295 | 1295 | |
|
1296 | 1296 | |
|
1297 | 1297 | class HTCondorLauncher(BatchSystemLauncher): |
|
1298 | 1298 | """A BatchSystemLauncher subclass for HTCondor. |
|
1299 | 1299 | |
|
1300 | 1300 | HTCondor requires that we launch the ipengine/ipcontroller scripts rather |
|
1301 | 1301 | that the python instance but otherwise is very similar to PBS. This is because |
|
1302 | 1302 | HTCondor destroys sys.executable when launching remote processes - a launched |
|
1303 | 1303 | python process depends on sys.executable to effectively evaluate its |
|
1304 | 1304 | module search paths. Without it, regardless of which python interpreter you launch |
|
1305 | 1305 | you will get the to built in module search paths. |
|
1306 | 1306 | |
|
1307 | 1307 | We use the ip{cluster, engine, controller} scripts as our executable to circumvent |
|
1308 | 1308 | this - the mechanism of shebanged scripts means that the python binary will be |
|
1309 | 1309 | launched with argv[0] set to the *location of the ip{cluster, engine, controller} |
|
1310 | 1310 | scripts on the remote node*. This means you need to take care that: |
|
1311 | a. Your remote nodes have their paths configured correctly, with the ipengine and ipcontroller | |
|
1312 | of the python environment you wish to execute code in having top precedence. | |
|
1313 | b. This functionality is untested on Windows. | |
|
1311 | ||
|
1312 | a. Your remote nodes have their paths configured correctly, with the ipengine and ipcontroller | |
|
1313 | of the python environment you wish to execute code in having top precedence. | |
|
1314 | b. This functionality is untested on Windows. | |
|
1314 | 1315 | |
|
1315 | 1316 | If you need different behavior, consider making you own template. |
|
1316 | 1317 | """ |
|
1317 | 1318 | |
|
1318 | 1319 | submit_command = List(['condor_submit'], config=True, |
|
1319 | 1320 | help="The HTCondor submit command ['condor_submit']") |
|
1320 | 1321 | delete_command = List(['condor_rm'], config=True, |
|
1321 | 1322 | help="The HTCondor delete command ['condor_rm']") |
|
1322 | 1323 | job_id_regexp = CRegExp(r'(\d+)\.$', config=True, |
|
1323 | 1324 | help="Regular expression for identifying the job ID [r'(\d+)\.$']") |
|
1324 | 1325 | job_id_regexp_group = Integer(1, config=True, |
|
1325 | 1326 | help="""The group we wish to match in job_id_regexp [1]""") |
|
1326 | 1327 | |
|
1327 | 1328 | job_array_regexp = CRegExp('queue\W+\$') |
|
1328 | 1329 | job_array_template = Unicode('queue {n}') |
|
1329 | 1330 | |
|
1330 | 1331 | |
|
1331 | 1332 | def _insert_job_array_in_script(self): |
|
1332 | 1333 | """Inserts a job array if required into the batch script. |
|
1333 | 1334 | """ |
|
1334 | 1335 | if not self.job_array_regexp.search(self.batch_template): |
|
1335 | 1336 | self.log.debug("adding job array settings to batch script") |
|
1336 | 1337 | #HTCondor requires that the job array goes at the bottom of the script |
|
1337 | 1338 | self.batch_template = '\n'.join([self.batch_template, |
|
1338 | 1339 | self.job_array_template]) |
|
1339 | 1340 | |
|
1340 | 1341 | def _insert_queue_in_script(self): |
|
1341 | 1342 | """AFAIK, HTCondor doesn't have a concept of multiple queues that can be |
|
1342 | 1343 | specified in the script. |
|
1343 | 1344 | """ |
|
1344 | 1345 | pass |
|
1345 | 1346 | |
|
1346 | 1347 | |
|
1347 | 1348 | class HTCondorControllerLauncher(HTCondorLauncher, BatchClusterAppMixin): |
|
1348 | 1349 | """Launch a controller using HTCondor.""" |
|
1349 | 1350 | |
|
1350 | 1351 | batch_file_name = Unicode(u'htcondor_controller', config=True, |
|
1351 | 1352 | help="batch file name for the controller job.") |
|
1352 | 1353 | default_template = Unicode(r""" |
|
1353 | 1354 | universe = vanilla |
|
1354 | 1355 | executable = ipcontroller |
|
1355 | 1356 | # by default we expect a shared file system |
|
1356 | 1357 | transfer_executable = False |
|
1357 | 1358 | arguments = --log-to-file '--profile-dir={profile_dir}' --cluster-id='{cluster_id}' |
|
1358 | 1359 | """) |
|
1359 | 1360 | |
|
1360 | 1361 | def start(self): |
|
1361 | 1362 | """Start the controller by profile or profile_dir.""" |
|
1362 | 1363 | return super(HTCondorControllerLauncher, self).start(1) |
|
1363 | 1364 | |
|
1364 | 1365 | |
|
1365 | 1366 | class HTCondorEngineSetLauncher(HTCondorLauncher, BatchClusterAppMixin): |
|
1366 | 1367 | """Launch Engines using HTCondor""" |
|
1367 | 1368 | batch_file_name = Unicode(u'htcondor_engines', config=True, |
|
1368 | 1369 | help="batch file name for the engine(s) job.") |
|
1369 | 1370 | default_template = Unicode(""" |
|
1370 | 1371 | universe = vanilla |
|
1371 | 1372 | executable = ipengine |
|
1372 | 1373 | # by default we expect a shared file system |
|
1373 | 1374 | transfer_executable = False |
|
1374 | 1375 | arguments = "--log-to-file '--profile-dir={profile_dir}' '--cluster-id={cluster_id}'" |
|
1375 | 1376 | """) |
|
1376 | 1377 | |
|
1377 | 1378 | |
|
1378 | 1379 | #----------------------------------------------------------------------------- |
|
1379 | 1380 | # A launcher for ipcluster itself! |
|
1380 | 1381 | #----------------------------------------------------------------------------- |
|
1381 | 1382 | |
|
1382 | 1383 | |
|
1383 | 1384 | class IPClusterLauncher(LocalProcessLauncher): |
|
1384 | 1385 | """Launch the ipcluster program in an external process.""" |
|
1385 | 1386 | |
|
1386 | 1387 | ipcluster_cmd = List(ipcluster_cmd_argv, config=True, |
|
1387 | 1388 | help="Popen command for ipcluster") |
|
1388 | 1389 | ipcluster_args = List( |
|
1389 | 1390 | ['--clean-logs=True', '--log-to-file', '--log-level=%i'%logging.INFO], config=True, |
|
1390 | 1391 | help="Command line arguments to pass to ipcluster.") |
|
1391 | 1392 | ipcluster_subcommand = Unicode('start') |
|
1392 | 1393 | profile = Unicode('default') |
|
1393 | 1394 | n = Integer(2) |
|
1394 | 1395 | |
|
1395 | 1396 | def find_args(self): |
|
1396 | 1397 | return self.ipcluster_cmd + [self.ipcluster_subcommand] + \ |
|
1397 | 1398 | ['--n=%i'%self.n, '--profile=%s'%self.profile] + \ |
|
1398 | 1399 | self.ipcluster_args |
|
1399 | 1400 | |
|
1400 | 1401 | def start(self): |
|
1401 | 1402 | return super(IPClusterLauncher, self).start() |
|
1402 | 1403 | |
|
1403 | 1404 | #----------------------------------------------------------------------------- |
|
1404 | 1405 | # Collections of launchers |
|
1405 | 1406 | #----------------------------------------------------------------------------- |
|
1406 | 1407 | |
|
1407 | 1408 | local_launchers = [ |
|
1408 | 1409 | LocalControllerLauncher, |
|
1409 | 1410 | LocalEngineLauncher, |
|
1410 | 1411 | LocalEngineSetLauncher, |
|
1411 | 1412 | ] |
|
1412 | 1413 | mpi_launchers = [ |
|
1413 | 1414 | MPILauncher, |
|
1414 | 1415 | MPIControllerLauncher, |
|
1415 | 1416 | MPIEngineSetLauncher, |
|
1416 | 1417 | ] |
|
1417 | 1418 | ssh_launchers = [ |
|
1418 | 1419 | SSHLauncher, |
|
1419 | 1420 | SSHControllerLauncher, |
|
1420 | 1421 | SSHEngineLauncher, |
|
1421 | 1422 | SSHEngineSetLauncher, |
|
1422 | 1423 | SSHProxyEngineSetLauncher, |
|
1423 | 1424 | ] |
|
1424 | 1425 | winhpc_launchers = [ |
|
1425 | 1426 | WindowsHPCLauncher, |
|
1426 | 1427 | WindowsHPCControllerLauncher, |
|
1427 | 1428 | WindowsHPCEngineSetLauncher, |
|
1428 | 1429 | ] |
|
1429 | 1430 | pbs_launchers = [ |
|
1430 | 1431 | PBSLauncher, |
|
1431 | 1432 | PBSControllerLauncher, |
|
1432 | 1433 | PBSEngineSetLauncher, |
|
1433 | 1434 | ] |
|
1434 | 1435 | sge_launchers = [ |
|
1435 | 1436 | SGELauncher, |
|
1436 | 1437 | SGEControllerLauncher, |
|
1437 | 1438 | SGEEngineSetLauncher, |
|
1438 | 1439 | ] |
|
1439 | 1440 | lsf_launchers = [ |
|
1440 | 1441 | LSFLauncher, |
|
1441 | 1442 | LSFControllerLauncher, |
|
1442 | 1443 | LSFEngineSetLauncher, |
|
1443 | 1444 | ] |
|
1444 | 1445 | htcondor_launchers = [ |
|
1445 | 1446 | HTCondorLauncher, |
|
1446 | 1447 | HTCondorControllerLauncher, |
|
1447 | 1448 | HTCondorEngineSetLauncher, |
|
1448 | 1449 | ] |
|
1449 | 1450 | all_launchers = local_launchers + mpi_launchers + ssh_launchers + winhpc_launchers\ |
|
1450 | 1451 | + pbs_launchers + sge_launchers + lsf_launchers + htcondor_launchers |
@@ -1,835 +1,834 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Sphinx directive to support embedded IPython code. |
|
3 | 3 | |
|
4 | 4 | This directive allows pasting of entire interactive IPython sessions, prompts |
|
5 | 5 | and all, and their code will actually get re-executed at doc build time, with |
|
6 | 6 | all prompts renumbered sequentially. It also allows you to input code as a pure |
|
7 | 7 | python input by giving the argument python to the directive. The output looks |
|
8 | 8 | like an interactive ipython section. |
|
9 | 9 | |
|
10 | 10 | To enable this directive, simply list it in your Sphinx ``conf.py`` file |
|
11 | 11 | (making sure the directory where you placed it is visible to sphinx, as is |
|
12 | 12 | needed for all Sphinx directives). |
|
13 | 13 | |
|
14 | 14 | By default this directive assumes that your prompts are unchanged IPython ones, |
|
15 | 15 | but this can be customized. The configurable options that can be placed in |
|
16 | 16 | conf.py are |
|
17 | 17 | |
|
18 | 18 | ipython_savefig_dir: |
|
19 | 19 | The directory in which to save the figures. This is relative to the |
|
20 | 20 | Sphinx source directory. The default is `html_static_path`. |
|
21 | 21 | ipython_rgxin: |
|
22 | 22 | The compiled regular expression to denote the start of IPython input |
|
23 | 23 | lines. The default is re.compile('In \[(\d+)\]:\s?(.*)\s*'). You |
|
24 | 24 | shouldn't need to change this. |
|
25 | 25 | ipython_rgxout: |
|
26 | 26 | The compiled regular expression to denote the start of IPython output |
|
27 | 27 | lines. The default is re.compile('Out\[(\d+)\]:\s?(.*)\s*'). You |
|
28 | 28 | shouldn't need to change this. |
|
29 | 29 | ipython_promptin: |
|
30 | 30 | The string to represent the IPython input prompt in the generated ReST. |
|
31 | 31 | The default is 'In [%d]:'. This expects that the line numbers are used |
|
32 | 32 | in the prompt. |
|
33 | 33 | ipython_promptout: |
|
34 | ||
|
35 | 34 | The string to represent the IPython prompt in the generated ReST. The |
|
36 | 35 | default is 'Out [%d]:'. This expects that the line numbers are used |
|
37 | 36 | in the prompt. |
|
38 | 37 | |
|
39 | 38 | ToDo |
|
40 | 39 | ---- |
|
41 | 40 | |
|
42 | 41 | - Turn the ad-hoc test() function into a real test suite. |
|
43 | 42 | - Break up ipython-specific functionality from matplotlib stuff into better |
|
44 | 43 | separated code. |
|
45 | 44 | |
|
46 | 45 | Authors |
|
47 | 46 | ------- |
|
48 | 47 | |
|
49 | 48 | - John D Hunter: orignal author. |
|
50 | 49 | - Fernando Perez: refactoring, documentation, cleanups, port to 0.11. |
|
51 | 50 | - VΓ‘clavΕ milauer <eudoxos-AT-arcig.cz>: Prompt generalizations. |
|
52 | 51 | - Skipper Seabold, refactoring, cleanups, pure python addition |
|
53 | 52 | """ |
|
54 | 53 | from __future__ import print_function |
|
55 | 54 | |
|
56 | 55 | #----------------------------------------------------------------------------- |
|
57 | 56 | # Imports |
|
58 | 57 | #----------------------------------------------------------------------------- |
|
59 | 58 | |
|
60 | 59 | # Stdlib |
|
61 | 60 | import os |
|
62 | 61 | import re |
|
63 | 62 | import sys |
|
64 | 63 | import tempfile |
|
65 | 64 | import ast |
|
66 | 65 | |
|
67 | 66 | # To keep compatibility with various python versions |
|
68 | 67 | try: |
|
69 | 68 | from hashlib import md5 |
|
70 | 69 | except ImportError: |
|
71 | 70 | from md5 import md5 |
|
72 | 71 | |
|
73 | 72 | # Third-party |
|
74 | 73 | import matplotlib |
|
75 | 74 | import sphinx |
|
76 | 75 | from docutils.parsers.rst import directives |
|
77 | 76 | from docutils import nodes |
|
78 | 77 | from sphinx.util.compat import Directive |
|
79 | 78 | |
|
80 | 79 | matplotlib.use('Agg') |
|
81 | 80 | |
|
82 | 81 | # Our own |
|
83 | 82 | from IPython import Config, InteractiveShell |
|
84 | 83 | from IPython.core.profiledir import ProfileDir |
|
85 | 84 | from IPython.utils import io |
|
86 | 85 | from IPython.utils.py3compat import PY3 |
|
87 | 86 | |
|
88 | 87 | if PY3: |
|
89 | 88 | from io import StringIO |
|
90 | 89 | else: |
|
91 | 90 | from StringIO import StringIO |
|
92 | 91 | |
|
93 | 92 | #----------------------------------------------------------------------------- |
|
94 | 93 | # Globals |
|
95 | 94 | #----------------------------------------------------------------------------- |
|
96 | 95 | # for tokenizing blocks |
|
97 | 96 | COMMENT, INPUT, OUTPUT = range(3) |
|
98 | 97 | |
|
99 | 98 | #----------------------------------------------------------------------------- |
|
100 | 99 | # Functions and class declarations |
|
101 | 100 | #----------------------------------------------------------------------------- |
|
102 | 101 | def block_parser(part, rgxin, rgxout, fmtin, fmtout): |
|
103 | 102 | """ |
|
104 | 103 | part is a string of ipython text, comprised of at most one |
|
105 | 104 | input, one ouput, comments, and blank lines. The block parser |
|
106 | 105 | parses the text into a list of:: |
|
107 | 106 | |
|
108 | 107 | blocks = [ (TOKEN0, data0), (TOKEN1, data1), ...] |
|
109 | 108 | |
|
110 | 109 | where TOKEN is one of [COMMENT | INPUT | OUTPUT ] and |
|
111 | 110 | data is, depending on the type of token:: |
|
112 | 111 | |
|
113 | 112 | COMMENT : the comment string |
|
114 | 113 | |
|
115 | 114 | INPUT: the (DECORATOR, INPUT_LINE, REST) where |
|
116 | 115 | DECORATOR: the input decorator (or None) |
|
117 | 116 | INPUT_LINE: the input as string (possibly multi-line) |
|
118 | 117 | REST : any stdout generated by the input line (not OUTPUT) |
|
119 | 118 | |
|
120 | 119 | |
|
121 | 120 | OUTPUT: the output string, possibly multi-line |
|
122 | 121 | """ |
|
123 | 122 | |
|
124 | 123 | block = [] |
|
125 | 124 | lines = part.split('\n') |
|
126 | 125 | N = len(lines) |
|
127 | 126 | i = 0 |
|
128 | 127 | decorator = None |
|
129 | 128 | while 1: |
|
130 | 129 | |
|
131 | 130 | if i==N: |
|
132 | 131 | # nothing left to parse -- the last line |
|
133 | 132 | break |
|
134 | 133 | |
|
135 | 134 | line = lines[i] |
|
136 | 135 | i += 1 |
|
137 | 136 | line_stripped = line.strip() |
|
138 | 137 | if line_stripped.startswith('#'): |
|
139 | 138 | block.append((COMMENT, line)) |
|
140 | 139 | continue |
|
141 | 140 | |
|
142 | 141 | if line_stripped.startswith('@'): |
|
143 | 142 | # we're assuming at most one decorator -- may need to |
|
144 | 143 | # rethink |
|
145 | 144 | decorator = line_stripped |
|
146 | 145 | continue |
|
147 | 146 | |
|
148 | 147 | # does this look like an input line? |
|
149 | 148 | matchin = rgxin.match(line) |
|
150 | 149 | if matchin: |
|
151 | 150 | lineno, inputline = int(matchin.group(1)), matchin.group(2) |
|
152 | 151 | |
|
153 | 152 | # the ....: continuation string |
|
154 | 153 | continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2)) |
|
155 | 154 | Nc = len(continuation) |
|
156 | 155 | # input lines can continue on for more than one line, if |
|
157 | 156 | # we have a '\' line continuation char or a function call |
|
158 | 157 | # echo line 'print'. The input line can only be |
|
159 | 158 | # terminated by the end of the block or an output line, so |
|
160 | 159 | # we parse out the rest of the input line if it is |
|
161 | 160 | # multiline as well as any echo text |
|
162 | 161 | |
|
163 | 162 | rest = [] |
|
164 | 163 | while i<N: |
|
165 | 164 | |
|
166 | 165 | # look ahead; if the next line is blank, or a comment, or |
|
167 | 166 | # an output line, we're done |
|
168 | 167 | |
|
169 | 168 | nextline = lines[i] |
|
170 | 169 | matchout = rgxout.match(nextline) |
|
171 | 170 | #print "nextline=%s, continuation=%s, starts=%s"%(nextline, continuation, nextline.startswith(continuation)) |
|
172 | 171 | if matchout or nextline.startswith('#'): |
|
173 | 172 | break |
|
174 | 173 | elif nextline.startswith(continuation): |
|
175 | 174 | inputline += '\n' + nextline[Nc:] |
|
176 | 175 | else: |
|
177 | 176 | rest.append(nextline) |
|
178 | 177 | i+= 1 |
|
179 | 178 | |
|
180 | 179 | block.append((INPUT, (decorator, inputline, '\n'.join(rest)))) |
|
181 | 180 | continue |
|
182 | 181 | |
|
183 | 182 | # if it looks like an output line grab all the text to the end |
|
184 | 183 | # of the block |
|
185 | 184 | matchout = rgxout.match(line) |
|
186 | 185 | if matchout: |
|
187 | 186 | lineno, output = int(matchout.group(1)), matchout.group(2) |
|
188 | 187 | if i<N-1: |
|
189 | 188 | output = '\n'.join([output] + lines[i:]) |
|
190 | 189 | |
|
191 | 190 | block.append((OUTPUT, output)) |
|
192 | 191 | break |
|
193 | 192 | |
|
194 | 193 | return block |
|
195 | 194 | |
|
196 | 195 | class EmbeddedSphinxShell(object): |
|
197 | 196 | """An embedded IPython instance to run inside Sphinx""" |
|
198 | 197 | |
|
199 | 198 | def __init__(self): |
|
200 | 199 | |
|
201 | 200 | self.cout = StringIO() |
|
202 | 201 | |
|
203 | 202 | |
|
204 | 203 | # Create config object for IPython |
|
205 | 204 | config = Config() |
|
206 | 205 | config.Global.display_banner = False |
|
207 | 206 | config.Global.exec_lines = ['import numpy as np', |
|
208 | 207 | 'from pylab import *' |
|
209 | 208 | ] |
|
210 | 209 | config.InteractiveShell.autocall = False |
|
211 | 210 | config.InteractiveShell.autoindent = False |
|
212 | 211 | config.InteractiveShell.colors = 'NoColor' |
|
213 | 212 | |
|
214 | 213 | # create a profile so instance history isn't saved |
|
215 | 214 | tmp_profile_dir = tempfile.mkdtemp(prefix='profile_') |
|
216 | 215 | profname = 'auto_profile_sphinx_build' |
|
217 | 216 | pdir = os.path.join(tmp_profile_dir,profname) |
|
218 | 217 | profile = ProfileDir.create_profile_dir(pdir) |
|
219 | 218 | |
|
220 | 219 | # Create and initialize ipython, but don't start its mainloop |
|
221 | 220 | IP = InteractiveShell.instance(config=config, profile_dir=profile) |
|
222 | 221 | # io.stdout redirect must be done *after* instantiating InteractiveShell |
|
223 | 222 | io.stdout = self.cout |
|
224 | 223 | io.stderr = self.cout |
|
225 | 224 | |
|
226 | 225 | # For debugging, so we can see normal output, use this: |
|
227 | 226 | #from IPython.utils.io import Tee |
|
228 | 227 | #io.stdout = Tee(self.cout, channel='stdout') # dbg |
|
229 | 228 | #io.stderr = Tee(self.cout, channel='stderr') # dbg |
|
230 | 229 | |
|
231 | 230 | # Store a few parts of IPython we'll need. |
|
232 | 231 | self.IP = IP |
|
233 | 232 | self.user_ns = self.IP.user_ns |
|
234 | 233 | self.user_global_ns = self.IP.user_global_ns |
|
235 | 234 | |
|
236 | 235 | self.input = '' |
|
237 | 236 | self.output = '' |
|
238 | 237 | |
|
239 | 238 | self.is_verbatim = False |
|
240 | 239 | self.is_doctest = False |
|
241 | 240 | self.is_suppress = False |
|
242 | 241 | |
|
243 | 242 | # on the first call to the savefig decorator, we'll import |
|
244 | 243 | # pyplot as plt so we can make a call to the plt.gcf().savefig |
|
245 | 244 | self._pyplot_imported = False |
|
246 | 245 | |
|
247 | 246 | def clear_cout(self): |
|
248 | 247 | self.cout.seek(0) |
|
249 | 248 | self.cout.truncate(0) |
|
250 | 249 | |
|
251 | 250 | def process_input_line(self, line, store_history=True): |
|
252 | 251 | """process the input, capturing stdout""" |
|
253 | 252 | #print "input='%s'"%self.input |
|
254 | 253 | stdout = sys.stdout |
|
255 | 254 | splitter = self.IP.input_splitter |
|
256 | 255 | try: |
|
257 | 256 | sys.stdout = self.cout |
|
258 | 257 | splitter.push(line) |
|
259 | 258 | more = splitter.push_accepts_more() |
|
260 | 259 | if not more: |
|
261 | 260 | source_raw = splitter.source_raw_reset()[1] |
|
262 | 261 | self.IP.run_cell(source_raw, store_history=store_history) |
|
263 | 262 | finally: |
|
264 | 263 | sys.stdout = stdout |
|
265 | 264 | |
|
266 | 265 | def process_image(self, decorator): |
|
267 | 266 | """ |
|
268 | 267 | # build out an image directive like |
|
269 | 268 | # .. image:: somefile.png |
|
270 | 269 | # :width 4in |
|
271 | 270 | # |
|
272 | 271 | # from an input like |
|
273 | 272 | # savefig somefile.png width=4in |
|
274 | 273 | """ |
|
275 | 274 | savefig_dir = self.savefig_dir |
|
276 | 275 | source_dir = self.source_dir |
|
277 | 276 | saveargs = decorator.split(' ') |
|
278 | 277 | filename = saveargs[1] |
|
279 | 278 | # insert relative path to image file in source |
|
280 | 279 | outfile = os.path.relpath(os.path.join(savefig_dir,filename), |
|
281 | 280 | source_dir) |
|
282 | 281 | |
|
283 | 282 | imagerows = ['.. image:: %s'%outfile] |
|
284 | 283 | |
|
285 | 284 | for kwarg in saveargs[2:]: |
|
286 | 285 | arg, val = kwarg.split('=') |
|
287 | 286 | arg = arg.strip() |
|
288 | 287 | val = val.strip() |
|
289 | 288 | imagerows.append(' :%s: %s'%(arg, val)) |
|
290 | 289 | |
|
291 | 290 | image_file = os.path.basename(outfile) # only return file name |
|
292 | 291 | image_directive = '\n'.join(imagerows) |
|
293 | 292 | return image_file, image_directive |
|
294 | 293 | |
|
295 | 294 | |
|
296 | 295 | # Callbacks for each type of token |
|
297 | 296 | def process_input(self, data, input_prompt, lineno): |
|
298 | 297 | """Process data block for INPUT token.""" |
|
299 | 298 | decorator, input, rest = data |
|
300 | 299 | image_file = None |
|
301 | 300 | image_directive = None |
|
302 | 301 | #print 'INPUT:', data # dbg |
|
303 | 302 | is_verbatim = decorator=='@verbatim' or self.is_verbatim |
|
304 | 303 | is_doctest = decorator=='@doctest' or self.is_doctest |
|
305 | 304 | is_suppress = decorator=='@suppress' or self.is_suppress |
|
306 | 305 | is_savefig = decorator is not None and \ |
|
307 | 306 | decorator.startswith('@savefig') |
|
308 | 307 | |
|
309 | 308 | input_lines = input.split('\n') |
|
310 | 309 | if len(input_lines) > 1: |
|
311 | 310 | if input_lines[-1] != "": |
|
312 | 311 | input_lines.append('') # make sure there's a blank line |
|
313 | 312 | # so splitter buffer gets reset |
|
314 | 313 | |
|
315 | 314 | continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2)) |
|
316 | 315 | Nc = len(continuation) |
|
317 | 316 | |
|
318 | 317 | if is_savefig: |
|
319 | 318 | image_file, image_directive = self.process_image(decorator) |
|
320 | 319 | |
|
321 | 320 | ret = [] |
|
322 | 321 | is_semicolon = False |
|
323 | 322 | |
|
324 | 323 | for i, line in enumerate(input_lines): |
|
325 | 324 | if line.endswith(';'): |
|
326 | 325 | is_semicolon = True |
|
327 | 326 | |
|
328 | 327 | if i==0: |
|
329 | 328 | # process the first input line |
|
330 | 329 | if is_verbatim: |
|
331 | 330 | self.process_input_line('') |
|
332 | 331 | self.IP.execution_count += 1 # increment it anyway |
|
333 | 332 | else: |
|
334 | 333 | # only submit the line in non-verbatim mode |
|
335 | 334 | self.process_input_line(line, store_history=True) |
|
336 | 335 | formatted_line = '%s %s'%(input_prompt, line) |
|
337 | 336 | else: |
|
338 | 337 | # process a continuation line |
|
339 | 338 | if not is_verbatim: |
|
340 | 339 | self.process_input_line(line, store_history=True) |
|
341 | 340 | |
|
342 | 341 | formatted_line = '%s %s'%(continuation, line) |
|
343 | 342 | |
|
344 | 343 | if not is_suppress: |
|
345 | 344 | ret.append(formatted_line) |
|
346 | 345 | |
|
347 | 346 | if not is_suppress and len(rest.strip()) and is_verbatim: |
|
348 | 347 | # the "rest" is the standard output of the |
|
349 | 348 | # input, which needs to be added in |
|
350 | 349 | # verbatim mode |
|
351 | 350 | ret.append(rest) |
|
352 | 351 | |
|
353 | 352 | self.cout.seek(0) |
|
354 | 353 | output = self.cout.read() |
|
355 | 354 | if not is_suppress and not is_semicolon: |
|
356 | 355 | ret.append(output) |
|
357 | 356 | elif is_semicolon: # get spacing right |
|
358 | 357 | ret.append('') |
|
359 | 358 | |
|
360 | 359 | self.cout.truncate(0) |
|
361 | 360 | return (ret, input_lines, output, is_doctest, image_file, |
|
362 | 361 | image_directive) |
|
363 | 362 | #print 'OUTPUT', output # dbg |
|
364 | 363 | |
|
365 | 364 | def process_output(self, data, output_prompt, |
|
366 | 365 | input_lines, output, is_doctest, image_file): |
|
367 | 366 | """Process data block for OUTPUT token.""" |
|
368 | 367 | if is_doctest: |
|
369 | 368 | submitted = data.strip() |
|
370 | 369 | found = output |
|
371 | 370 | if found is not None: |
|
372 | 371 | found = found.strip() |
|
373 | 372 | |
|
374 | 373 | # XXX - fperez: in 0.11, 'output' never comes with the prompt |
|
375 | 374 | # in it, just the actual output text. So I think all this code |
|
376 | 375 | # can be nuked... |
|
377 | 376 | |
|
378 | 377 | # the above comment does not appear to be accurate... (minrk) |
|
379 | 378 | |
|
380 | 379 | ind = found.find(output_prompt) |
|
381 | 380 | if ind<0: |
|
382 | 381 | e='output prompt="%s" does not match out line=%s' % \ |
|
383 | 382 | (output_prompt, found) |
|
384 | 383 | raise RuntimeError(e) |
|
385 | 384 | found = found[len(output_prompt):].strip() |
|
386 | 385 | |
|
387 | 386 | if found!=submitted: |
|
388 | 387 | e = ('doctest failure for input_lines="%s" with ' |
|
389 | 388 | 'found_output="%s" and submitted output="%s"' % |
|
390 | 389 | (input_lines, found, submitted) ) |
|
391 | 390 | raise RuntimeError(e) |
|
392 | 391 | #print 'doctest PASSED for input_lines="%s" with found_output="%s" and submitted output="%s"'%(input_lines, found, submitted) |
|
393 | 392 | |
|
394 | 393 | def process_comment(self, data): |
|
395 | 394 | """Process data fPblock for COMMENT token.""" |
|
396 | 395 | if not self.is_suppress: |
|
397 | 396 | return [data] |
|
398 | 397 | |
|
399 | 398 | def save_image(self, image_file): |
|
400 | 399 | """ |
|
401 | 400 | Saves the image file to disk. |
|
402 | 401 | """ |
|
403 | 402 | self.ensure_pyplot() |
|
404 | 403 | command = 'plt.gcf().savefig("%s")'%image_file |
|
405 | 404 | #print 'SAVEFIG', command # dbg |
|
406 | 405 | self.process_input_line('bookmark ipy_thisdir', store_history=False) |
|
407 | 406 | self.process_input_line('cd -b ipy_savedir', store_history=False) |
|
408 | 407 | self.process_input_line(command, store_history=False) |
|
409 | 408 | self.process_input_line('cd -b ipy_thisdir', store_history=False) |
|
410 | 409 | self.process_input_line('bookmark -d ipy_thisdir', store_history=False) |
|
411 | 410 | self.clear_cout() |
|
412 | 411 | |
|
413 | 412 | |
|
414 | 413 | def process_block(self, block): |
|
415 | 414 | """ |
|
416 | 415 | process block from the block_parser and return a list of processed lines |
|
417 | 416 | """ |
|
418 | 417 | ret = [] |
|
419 | 418 | output = None |
|
420 | 419 | input_lines = None |
|
421 | 420 | lineno = self.IP.execution_count |
|
422 | 421 | |
|
423 | 422 | input_prompt = self.promptin%lineno |
|
424 | 423 | output_prompt = self.promptout%lineno |
|
425 | 424 | image_file = None |
|
426 | 425 | image_directive = None |
|
427 | 426 | |
|
428 | 427 | for token, data in block: |
|
429 | 428 | if token==COMMENT: |
|
430 | 429 | out_data = self.process_comment(data) |
|
431 | 430 | elif token==INPUT: |
|
432 | 431 | (out_data, input_lines, output, is_doctest, image_file, |
|
433 | 432 | image_directive) = \ |
|
434 | 433 | self.process_input(data, input_prompt, lineno) |
|
435 | 434 | elif token==OUTPUT: |
|
436 | 435 | out_data = \ |
|
437 | 436 | self.process_output(data, output_prompt, |
|
438 | 437 | input_lines, output, is_doctest, |
|
439 | 438 | image_file) |
|
440 | 439 | if out_data: |
|
441 | 440 | ret.extend(out_data) |
|
442 | 441 | |
|
443 | 442 | # save the image files |
|
444 | 443 | if image_file is not None: |
|
445 | 444 | self.save_image(image_file) |
|
446 | 445 | |
|
447 | 446 | return ret, image_directive |
|
448 | 447 | |
|
449 | 448 | def ensure_pyplot(self): |
|
450 | 449 | if self._pyplot_imported: |
|
451 | 450 | return |
|
452 | 451 | self.process_input_line('import matplotlib.pyplot as plt', |
|
453 | 452 | store_history=False) |
|
454 | 453 | |
|
455 | 454 | def process_pure_python(self, content): |
|
456 | 455 | """ |
|
457 | 456 | content is a list of strings. it is unedited directive conent |
|
458 | 457 | |
|
459 | 458 | This runs it line by line in the InteractiveShell, prepends |
|
460 | 459 | prompts as needed capturing stderr and stdout, then returns |
|
461 | 460 | the content as a list as if it were ipython code |
|
462 | 461 | """ |
|
463 | 462 | output = [] |
|
464 | 463 | savefig = False # keep up with this to clear figure |
|
465 | 464 | multiline = False # to handle line continuation |
|
466 | 465 | multiline_start = None |
|
467 | 466 | fmtin = self.promptin |
|
468 | 467 | |
|
469 | 468 | ct = 0 |
|
470 | 469 | |
|
471 | 470 | for lineno, line in enumerate(content): |
|
472 | 471 | |
|
473 | 472 | line_stripped = line.strip() |
|
474 | 473 | if not len(line): |
|
475 | 474 | output.append(line) |
|
476 | 475 | continue |
|
477 | 476 | |
|
478 | 477 | # handle decorators |
|
479 | 478 | if line_stripped.startswith('@'): |
|
480 | 479 | output.extend([line]) |
|
481 | 480 | if 'savefig' in line: |
|
482 | 481 | savefig = True # and need to clear figure |
|
483 | 482 | continue |
|
484 | 483 | |
|
485 | 484 | # handle comments |
|
486 | 485 | if line_stripped.startswith('#'): |
|
487 | 486 | output.extend([line]) |
|
488 | 487 | continue |
|
489 | 488 | |
|
490 | 489 | # deal with lines checking for multiline |
|
491 | 490 | continuation = u' %s:'% ''.join(['.']*(len(str(ct))+2)) |
|
492 | 491 | if not multiline: |
|
493 | 492 | modified = u"%s %s" % (fmtin % ct, line_stripped) |
|
494 | 493 | output.append(modified) |
|
495 | 494 | ct += 1 |
|
496 | 495 | try: |
|
497 | 496 | ast.parse(line_stripped) |
|
498 | 497 | output.append(u'') |
|
499 | 498 | except Exception: # on a multiline |
|
500 | 499 | multiline = True |
|
501 | 500 | multiline_start = lineno |
|
502 | 501 | else: # still on a multiline |
|
503 | 502 | modified = u'%s %s' % (continuation, line) |
|
504 | 503 | output.append(modified) |
|
505 | 504 | |
|
506 | 505 | # if the next line is indented, it should be part of multiline |
|
507 | 506 | if len(content) > lineno + 1: |
|
508 | 507 | nextline = content[lineno + 1] |
|
509 | 508 | if len(nextline) - len(nextline.lstrip()) > 3: |
|
510 | 509 | continue |
|
511 | 510 | try: |
|
512 | 511 | mod = ast.parse( |
|
513 | 512 | '\n'.join(content[multiline_start:lineno+1])) |
|
514 | 513 | if isinstance(mod.body[0], ast.FunctionDef): |
|
515 | 514 | # check to see if we have the whole function |
|
516 | 515 | for element in mod.body[0].body: |
|
517 | 516 | if isinstance(element, ast.Return): |
|
518 | 517 | multiline = False |
|
519 | 518 | else: |
|
520 | 519 | output.append(u'') |
|
521 | 520 | multiline = False |
|
522 | 521 | except Exception: |
|
523 | 522 | pass |
|
524 | 523 | |
|
525 | 524 | if savefig: # clear figure if plotted |
|
526 | 525 | self.ensure_pyplot() |
|
527 | 526 | self.process_input_line('plt.clf()', store_history=False) |
|
528 | 527 | self.clear_cout() |
|
529 | 528 | savefig = False |
|
530 | 529 | |
|
531 | 530 | return output |
|
532 | 531 | |
|
533 | 532 | class IPythonDirective(Directive): |
|
534 | 533 | |
|
535 | 534 | has_content = True |
|
536 | 535 | required_arguments = 0 |
|
537 | 536 | optional_arguments = 4 # python, suppress, verbatim, doctest |
|
538 | 537 | final_argumuent_whitespace = True |
|
539 | 538 | option_spec = { 'python': directives.unchanged, |
|
540 | 539 | 'suppress' : directives.flag, |
|
541 | 540 | 'verbatim' : directives.flag, |
|
542 | 541 | 'doctest' : directives.flag, |
|
543 | 542 | } |
|
544 | 543 | |
|
545 | 544 | shell = None |
|
546 | 545 | |
|
547 | 546 | seen_docs = set() |
|
548 | 547 | |
|
549 | 548 | def get_config_options(self): |
|
550 | 549 | # contains sphinx configuration variables |
|
551 | 550 | config = self.state.document.settings.env.config |
|
552 | 551 | |
|
553 | 552 | # get config variables to set figure output directory |
|
554 | 553 | confdir = self.state.document.settings.env.app.confdir |
|
555 | 554 | savefig_dir = config.ipython_savefig_dir |
|
556 | 555 | source_dir = os.path.dirname(self.state.document.current_source) |
|
557 | 556 | if savefig_dir is None: |
|
558 | 557 | savefig_dir = config.html_static_path |
|
559 | 558 | if isinstance(savefig_dir, list): |
|
560 | 559 | savefig_dir = savefig_dir[0] # safe to assume only one path? |
|
561 | 560 | savefig_dir = os.path.join(confdir, savefig_dir) |
|
562 | 561 | |
|
563 | 562 | # get regex and prompt stuff |
|
564 | 563 | rgxin = config.ipython_rgxin |
|
565 | 564 | rgxout = config.ipython_rgxout |
|
566 | 565 | promptin = config.ipython_promptin |
|
567 | 566 | promptout = config.ipython_promptout |
|
568 | 567 | |
|
569 | 568 | return savefig_dir, source_dir, rgxin, rgxout, promptin, promptout |
|
570 | 569 | |
|
571 | 570 | def setup(self): |
|
572 | 571 | if self.shell is None: |
|
573 | 572 | self.shell = EmbeddedSphinxShell() |
|
574 | 573 | # reset the execution count if we haven't processed this doc |
|
575 | 574 | #NOTE: this may be borked if there are multiple seen_doc tmp files |
|
576 | 575 | #check time stamp? |
|
577 | 576 | |
|
578 | 577 | if not self.state.document.current_source in self.seen_docs: |
|
579 | 578 | self.shell.IP.history_manager.reset() |
|
580 | 579 | self.shell.IP.execution_count = 1 |
|
581 | 580 | self.seen_docs.add(self.state.document.current_source) |
|
582 | 581 | |
|
583 | 582 | |
|
584 | 583 | |
|
585 | 584 | # get config values |
|
586 | 585 | (savefig_dir, source_dir, rgxin, |
|
587 | 586 | rgxout, promptin, promptout) = self.get_config_options() |
|
588 | 587 | |
|
589 | 588 | # and attach to shell so we don't have to pass them around |
|
590 | 589 | self.shell.rgxin = rgxin |
|
591 | 590 | self.shell.rgxout = rgxout |
|
592 | 591 | self.shell.promptin = promptin |
|
593 | 592 | self.shell.promptout = promptout |
|
594 | 593 | self.shell.savefig_dir = savefig_dir |
|
595 | 594 | self.shell.source_dir = source_dir |
|
596 | 595 | |
|
597 | 596 | # setup bookmark for saving figures directory |
|
598 | 597 | |
|
599 | 598 | self.shell.process_input_line('bookmark ipy_savedir %s'%savefig_dir, |
|
600 | 599 | store_history=False) |
|
601 | 600 | self.shell.clear_cout() |
|
602 | 601 | |
|
603 | 602 | return rgxin, rgxout, promptin, promptout |
|
604 | 603 | |
|
605 | 604 | |
|
606 | 605 | def teardown(self): |
|
607 | 606 | # delete last bookmark |
|
608 | 607 | self.shell.process_input_line('bookmark -d ipy_savedir', |
|
609 | 608 | store_history=False) |
|
610 | 609 | self.shell.clear_cout() |
|
611 | 610 | |
|
612 | 611 | def run(self): |
|
613 | 612 | debug = False |
|
614 | 613 | |
|
615 | 614 | #TODO, any reason block_parser can't be a method of embeddable shell |
|
616 | 615 | # then we wouldn't have to carry these around |
|
617 | 616 | rgxin, rgxout, promptin, promptout = self.setup() |
|
618 | 617 | |
|
619 | 618 | options = self.options |
|
620 | 619 | self.shell.is_suppress = 'suppress' in options |
|
621 | 620 | self.shell.is_doctest = 'doctest' in options |
|
622 | 621 | self.shell.is_verbatim = 'verbatim' in options |
|
623 | 622 | |
|
624 | 623 | |
|
625 | 624 | # handle pure python code |
|
626 | 625 | if 'python' in self.arguments: |
|
627 | 626 | content = self.content |
|
628 | 627 | self.content = self.shell.process_pure_python(content) |
|
629 | 628 | |
|
630 | 629 | parts = '\n'.join(self.content).split('\n\n') |
|
631 | 630 | |
|
632 | 631 | lines = ['.. code-block:: ipython',''] |
|
633 | 632 | figures = [] |
|
634 | 633 | |
|
635 | 634 | for part in parts: |
|
636 | 635 | |
|
637 | 636 | block = block_parser(part, rgxin, rgxout, promptin, promptout) |
|
638 | 637 | |
|
639 | 638 | if len(block): |
|
640 | 639 | rows, figure = self.shell.process_block(block) |
|
641 | 640 | for row in rows: |
|
642 | 641 | lines.extend([' %s'%line for line in row.split('\n')]) |
|
643 | 642 | |
|
644 | 643 | if figure is not None: |
|
645 | 644 | figures.append(figure) |
|
646 | 645 | |
|
647 | 646 | #text = '\n'.join(lines) |
|
648 | 647 | #figs = '\n'.join(figures) |
|
649 | 648 | |
|
650 | 649 | for figure in figures: |
|
651 | 650 | lines.append('') |
|
652 | 651 | lines.extend(figure.split('\n')) |
|
653 | 652 | lines.append('') |
|
654 | 653 | |
|
655 | 654 | #print lines |
|
656 | 655 | if len(lines)>2: |
|
657 | 656 | if debug: |
|
658 | 657 | print('\n'.join(lines)) |
|
659 | 658 | else: #NOTE: this raises some errors, what's it for? |
|
660 | 659 | #print 'INSERTING %d lines'%len(lines) |
|
661 | 660 | self.state_machine.insert_input( |
|
662 | 661 | lines, self.state_machine.input_lines.source(0)) |
|
663 | 662 | |
|
664 | 663 | text = '\n'.join(lines) |
|
665 | 664 | txtnode = nodes.literal_block(text, text) |
|
666 | 665 | txtnode['language'] = 'ipython' |
|
667 | 666 | #imgnode = nodes.image(figs) |
|
668 | 667 | |
|
669 | 668 | # cleanup |
|
670 | 669 | self.teardown() |
|
671 | 670 | |
|
672 | 671 | return []#, imgnode] |
|
673 | 672 | |
|
674 | 673 | # Enable as a proper Sphinx directive |
|
675 | 674 | def setup(app): |
|
676 | 675 | setup.app = app |
|
677 | 676 | |
|
678 | 677 | app.add_directive('ipython', IPythonDirective) |
|
679 | 678 | app.add_config_value('ipython_savefig_dir', None, True) |
|
680 | 679 | app.add_config_value('ipython_rgxin', |
|
681 | 680 | re.compile('In \[(\d+)\]:\s?(.*)\s*'), True) |
|
682 | 681 | app.add_config_value('ipython_rgxout', |
|
683 | 682 | re.compile('Out\[(\d+)\]:\s?(.*)\s*'), True) |
|
684 | 683 | app.add_config_value('ipython_promptin', 'In [%d]:', True) |
|
685 | 684 | app.add_config_value('ipython_promptout', 'Out[%d]:', True) |
|
686 | 685 | |
|
687 | 686 | |
|
688 | 687 | # Simple smoke test, needs to be converted to a proper automatic test. |
|
689 | 688 | def test(): |
|
690 | 689 | |
|
691 | 690 | examples = [ |
|
692 | 691 | r""" |
|
693 | 692 | In [9]: pwd |
|
694 | 693 | Out[9]: '/home/jdhunter/py4science/book' |
|
695 | 694 | |
|
696 | 695 | In [10]: cd bookdata/ |
|
697 | 696 | /home/jdhunter/py4science/book/bookdata |
|
698 | 697 | |
|
699 | 698 | In [2]: from pylab import * |
|
700 | 699 | |
|
701 | 700 | In [2]: ion() |
|
702 | 701 | |
|
703 | 702 | In [3]: im = imread('stinkbug.png') |
|
704 | 703 | |
|
705 | 704 | @savefig mystinkbug.png width=4in |
|
706 | 705 | In [4]: imshow(im) |
|
707 | 706 | Out[4]: <matplotlib.image.AxesImage object at 0x39ea850> |
|
708 | 707 | |
|
709 | 708 | """, |
|
710 | 709 | r""" |
|
711 | 710 | |
|
712 | 711 | In [1]: x = 'hello world' |
|
713 | 712 | |
|
714 | 713 | # string methods can be |
|
715 | 714 | # used to alter the string |
|
716 | 715 | @doctest |
|
717 | 716 | In [2]: x.upper() |
|
718 | 717 | Out[2]: 'HELLO WORLD' |
|
719 | 718 | |
|
720 | 719 | @verbatim |
|
721 | 720 | In [3]: x.st<TAB> |
|
722 | 721 | x.startswith x.strip |
|
723 | 722 | """, |
|
724 | 723 | r""" |
|
725 | 724 | |
|
726 | 725 | In [130]: url = 'http://ichart.finance.yahoo.com/table.csv?s=CROX\ |
|
727 | 726 | .....: &d=9&e=22&f=2009&g=d&a=1&br=8&c=2006&ignore=.csv' |
|
728 | 727 | |
|
729 | 728 | In [131]: print url.split('&') |
|
730 | 729 | ['http://ichart.finance.yahoo.com/table.csv?s=CROX', 'd=9', 'e=22', 'f=2009', 'g=d', 'a=1', 'b=8', 'c=2006', 'ignore=.csv'] |
|
731 | 730 | |
|
732 | 731 | In [60]: import urllib |
|
733 | 732 | |
|
734 | 733 | """, |
|
735 | 734 | r"""\ |
|
736 | 735 | |
|
737 | 736 | In [133]: import numpy.random |
|
738 | 737 | |
|
739 | 738 | @suppress |
|
740 | 739 | In [134]: numpy.random.seed(2358) |
|
741 | 740 | |
|
742 | 741 | @doctest |
|
743 | 742 | In [135]: numpy.random.rand(10,2) |
|
744 | 743 | Out[135]: |
|
745 | 744 | array([[ 0.64524308, 0.59943846], |
|
746 | 745 | [ 0.47102322, 0.8715456 ], |
|
747 | 746 | [ 0.29370834, 0.74776844], |
|
748 | 747 | [ 0.99539577, 0.1313423 ], |
|
749 | 748 | [ 0.16250302, 0.21103583], |
|
750 | 749 | [ 0.81626524, 0.1312433 ], |
|
751 | 750 | [ 0.67338089, 0.72302393], |
|
752 | 751 | [ 0.7566368 , 0.07033696], |
|
753 | 752 | [ 0.22591016, 0.77731835], |
|
754 | 753 | [ 0.0072729 , 0.34273127]]) |
|
755 | 754 | |
|
756 | 755 | """, |
|
757 | 756 | |
|
758 | 757 | r""" |
|
759 | 758 | In [106]: print x |
|
760 | 759 | jdh |
|
761 | 760 | |
|
762 | 761 | In [109]: for i in range(10): |
|
763 | 762 | .....: print i |
|
764 | 763 | .....: |
|
765 | 764 | .....: |
|
766 | 765 | 0 |
|
767 | 766 | 1 |
|
768 | 767 | 2 |
|
769 | 768 | 3 |
|
770 | 769 | 4 |
|
771 | 770 | 5 |
|
772 | 771 | 6 |
|
773 | 772 | 7 |
|
774 | 773 | 8 |
|
775 | 774 | 9 |
|
776 | 775 | """, |
|
777 | 776 | |
|
778 | 777 | r""" |
|
779 | 778 | |
|
780 | 779 | In [144]: from pylab import * |
|
781 | 780 | |
|
782 | 781 | In [145]: ion() |
|
783 | 782 | |
|
784 | 783 | # use a semicolon to suppress the output |
|
785 | 784 | @savefig test_hist.png width=4in |
|
786 | 785 | In [151]: hist(np.random.randn(10000), 100); |
|
787 | 786 | |
|
788 | 787 | |
|
789 | 788 | @savefig test_plot.png width=4in |
|
790 | 789 | In [151]: plot(np.random.randn(10000), 'o'); |
|
791 | 790 | """, |
|
792 | 791 | |
|
793 | 792 | r""" |
|
794 | 793 | # use a semicolon to suppress the output |
|
795 | 794 | In [151]: plt.clf() |
|
796 | 795 | |
|
797 | 796 | @savefig plot_simple.png width=4in |
|
798 | 797 | In [151]: plot([1,2,3]) |
|
799 | 798 | |
|
800 | 799 | @savefig hist_simple.png width=4in |
|
801 | 800 | In [151]: hist(np.random.randn(10000), 100); |
|
802 | 801 | |
|
803 | 802 | """, |
|
804 | 803 | r""" |
|
805 | 804 | # update the current fig |
|
806 | 805 | In [151]: ylabel('number') |
|
807 | 806 | |
|
808 | 807 | In [152]: title('normal distribution') |
|
809 | 808 | |
|
810 | 809 | |
|
811 | 810 | @savefig hist_with_text.png |
|
812 | 811 | In [153]: grid(True) |
|
813 | 812 | |
|
814 | 813 | """, |
|
815 | 814 | ] |
|
816 | 815 | # skip local-file depending first example: |
|
817 | 816 | examples = examples[1:] |
|
818 | 817 | |
|
819 | 818 | #ipython_directive.DEBUG = True # dbg |
|
820 | 819 | #options = dict(suppress=True) # dbg |
|
821 | 820 | options = dict() |
|
822 | 821 | for example in examples: |
|
823 | 822 | content = example.split('\n') |
|
824 | 823 | IPythonDirective('debug', arguments=None, options=options, |
|
825 | 824 | content=content, lineno=0, |
|
826 | 825 | content_offset=None, block_text=None, |
|
827 | 826 | state=None, state_machine=None, |
|
828 | 827 | ) |
|
829 | 828 | |
|
830 | 829 | # Run test suite as a script |
|
831 | 830 | if __name__=='__main__': |
|
832 | 831 | if not os.path.isdir('_static'): |
|
833 | 832 | os.mkdir('_static') |
|
834 | 833 | test() |
|
835 | 834 | print('All OK? Check figures in _static/') |
@@ -1,167 +1,168 b'' | |||
|
1 | 1 | # encoding: utf-8 |
|
2 | 2 | """ |
|
3 | 3 | Older utilities that are not being used. |
|
4 | 4 | |
|
5 | 5 | WARNING: IF YOU NEED TO USE ONE OF THESE FUNCTIONS, PLEASE FIRST MOVE IT |
|
6 | 6 | TO ANOTHER APPROPRIATE MODULE IN IPython.utils. |
|
7 | 7 | """ |
|
8 | 8 | |
|
9 | 9 | #----------------------------------------------------------------------------- |
|
10 | 10 | # Copyright (C) 2008-2011 The IPython Development Team |
|
11 | 11 | # |
|
12 | 12 | # Distributed under the terms of the BSD License. The full license is in |
|
13 | 13 | # the file COPYING, distributed as part of this software. |
|
14 | 14 | #----------------------------------------------------------------------------- |
|
15 | 15 | |
|
16 | 16 | #----------------------------------------------------------------------------- |
|
17 | 17 | # Imports |
|
18 | 18 | #----------------------------------------------------------------------------- |
|
19 | 19 | |
|
20 | 20 | import sys |
|
21 | 21 | import warnings |
|
22 | 22 | |
|
23 | 23 | from IPython.utils.warn import warn |
|
24 | 24 | |
|
25 | 25 | #----------------------------------------------------------------------------- |
|
26 | 26 | # Code |
|
27 | 27 | #----------------------------------------------------------------------------- |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | def mutex_opts(dict,ex_op): |
|
31 | 31 | """Check for presence of mutually exclusive keys in a dict. |
|
32 | 32 | |
|
33 | 33 | Call: mutex_opts(dict,[[op1a,op1b],[op2a,op2b]...]""" |
|
34 | 34 | for op1,op2 in ex_op: |
|
35 | 35 | if op1 in dict and op2 in dict: |
|
36 | 36 | raise ValueError('\n*** ERROR in Arguments *** '\ |
|
37 | 37 | 'Options '+op1+' and '+op2+' are mutually exclusive.') |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | class EvalDict: |
|
41 | 41 | """ |
|
42 | 42 | Emulate a dict which evaluates its contents in the caller's frame. |
|
43 | 43 | |
|
44 | 44 | Usage: |
|
45 | 45 | >>> number = 19 |
|
46 | 46 | |
|
47 | 47 | >>> text = "python" |
|
48 | 48 | |
|
49 | 49 | >>> print("%(text.capitalize())s %(number/9.0).1f rules!" % EvalDict()) |
|
50 | 50 | Python 2.1 rules! |
|
51 | 51 | """ |
|
52 | 52 | |
|
53 | 53 | # This version is due to sismex01@hebmex.com on c.l.py, and is basically a |
|
54 | 54 | # modified (shorter) version of: |
|
55 | 55 | # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66018 by |
|
56 | 56 | # Skip Montanaro (skip@pobox.com). |
|
57 | 57 | |
|
58 | 58 | def __getitem__(self, name): |
|
59 | 59 | frame = sys._getframe(1) |
|
60 | 60 | return eval(name, frame.f_globals, frame.f_locals) |
|
61 | 61 | |
|
62 | 62 | EvalString = EvalDict # for backwards compatibility |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | def all_belong(candidates,checklist): |
|
66 | 66 | """Check whether a list of items ALL appear in a given list of options. |
|
67 | 67 | |
|
68 | 68 | Returns a single 1 or 0 value.""" |
|
69 | 69 | |
|
70 | 70 | return 1-(0 in [x in checklist for x in candidates]) |
|
71 | 71 | |
|
72 | 72 | |
|
73 | 73 | def belong(candidates,checklist): |
|
74 | 74 | """Check whether a list of items appear in a given list of options. |
|
75 | 75 | |
|
76 | 76 | Returns a list of 1 and 0, one for each candidate given.""" |
|
77 | 77 | |
|
78 | 78 | return [x in checklist for x in candidates] |
|
79 | 79 | |
|
80 | 80 | |
|
81 | 81 | def with_obj(object, **args): |
|
82 | 82 | """Set multiple attributes for an object, similar to Pascal's with. |
|
83 | 83 | |
|
84 | Example: | |
|
85 | with_obj(jim, | |
|
86 | born = 1960, | |
|
87 | haircolour = 'Brown', | |
|
88 |
|
|
|
84 | Example:: | |
|
85 | ||
|
86 | with_obj(jim, | |
|
87 | born = 1960, | |
|
88 | haircolour = 'Brown', | |
|
89 | eyecolour = 'Green') | |
|
89 | 90 | |
|
90 | 91 | Credit: Greg Ewing, in |
|
91 | 92 | http://mail.python.org/pipermail/python-list/2001-May/040703.html. |
|
92 | 93 | |
|
93 | 94 | NOTE: up until IPython 0.7.2, this was called simply 'with', but 'with' |
|
94 | 95 | has become a keyword for Python 2.5, so we had to rename it.""" |
|
95 | 96 | |
|
96 | 97 | object.__dict__.update(args) |
|
97 | 98 | |
|
98 | 99 | |
|
99 | 100 | def map_method(method,object_list,*argseq,**kw): |
|
100 | 101 | """map_method(method,object_list,*args,**kw) -> list |
|
101 | 102 | |
|
102 | 103 | Return a list of the results of applying the methods to the items of the |
|
103 | 104 | argument sequence(s). If more than one sequence is given, the method is |
|
104 | 105 | called with an argument list consisting of the corresponding item of each |
|
105 | 106 | sequence. All sequences must be of the same length. |
|
106 | 107 | |
|
107 | 108 | Keyword arguments are passed verbatim to all objects called. |
|
108 | 109 | |
|
109 | 110 | This is Python code, so it's not nearly as fast as the builtin map().""" |
|
110 | 111 | |
|
111 | 112 | out_list = [] |
|
112 | 113 | idx = 0 |
|
113 | 114 | for object in object_list: |
|
114 | 115 | try: |
|
115 | 116 | handler = getattr(object, method) |
|
116 | 117 | except AttributeError: |
|
117 | 118 | out_list.append(None) |
|
118 | 119 | else: |
|
119 | 120 | if argseq: |
|
120 | 121 | args = map(lambda lst:lst[idx],argseq) |
|
121 | 122 | #print 'ob',object,'hand',handler,'ar',args # dbg |
|
122 | 123 | out_list.append(handler(args,**kw)) |
|
123 | 124 | else: |
|
124 | 125 | out_list.append(handler(**kw)) |
|
125 | 126 | idx += 1 |
|
126 | 127 | return out_list |
|
127 | 128 | |
|
128 | 129 | |
|
129 | 130 | def import_fail_info(mod_name,fns=None): |
|
130 | 131 | """Inform load failure for a module.""" |
|
131 | 132 | |
|
132 | 133 | if fns == None: |
|
133 | 134 | warn("Loading of %s failed." % (mod_name,)) |
|
134 | 135 | else: |
|
135 | 136 | warn("Loading of %s from %s failed." % (fns,mod_name)) |
|
136 | 137 | |
|
137 | 138 | |
|
138 | 139 | class NotGiven: pass |
|
139 | 140 | |
|
140 | 141 | def popkey(dct,key,default=NotGiven): |
|
141 | 142 | """Return dct[key] and delete dct[key]. |
|
142 | 143 | |
|
143 | 144 | If default is given, return it if dct[key] doesn't exist, otherwise raise |
|
144 | 145 | KeyError. """ |
|
145 | 146 | |
|
146 | 147 | try: |
|
147 | 148 | val = dct[key] |
|
148 | 149 | except KeyError: |
|
149 | 150 | if default is NotGiven: |
|
150 | 151 | raise |
|
151 | 152 | else: |
|
152 | 153 | return default |
|
153 | 154 | else: |
|
154 | 155 | del dct[key] |
|
155 | 156 | return val |
|
156 | 157 | |
|
157 | 158 | |
|
158 | 159 | def wrap_deprecated(func, suggest = '<nothing>'): |
|
159 | 160 | def newFunc(*args, **kwargs): |
|
160 | 161 | warnings.warn("Call to deprecated function %s, use %s instead" % |
|
161 | 162 | ( func.__name__, suggest), |
|
162 | 163 | category=DeprecationWarning, |
|
163 | 164 | stacklevel = 2) |
|
164 | 165 | return func(*args, **kwargs) |
|
165 | 166 | return newFunc |
|
166 | 167 | |
|
167 | 168 |
General Comments 0
You need to be logged in to leave comments.
Login now