Show More
@@ -1,272 +1,257 | |||
|
1 | 1 | # coding: utf-8 |
|
2 | 2 | """Tornado handlers for WebSocket <-> ZMQ sockets.""" |
|
3 | 3 | |
|
4 | 4 | # Copyright (c) IPython Development Team. |
|
5 | 5 | # Distributed under the terms of the Modified BSD License. |
|
6 | 6 | |
|
7 | 7 | import os |
|
8 | 8 | import json |
|
9 | 9 | import struct |
|
10 | 10 | import warnings |
|
11 | 11 | |
|
12 | 12 | try: |
|
13 | 13 | from urllib.parse import urlparse # Py 3 |
|
14 | 14 | except ImportError: |
|
15 | 15 | from urlparse import urlparse # Py 2 |
|
16 | 16 | |
|
17 | 17 | import tornado |
|
18 | 18 | from tornado import gen, ioloop, web |
|
19 | 19 | from tornado.websocket import WebSocketHandler |
|
20 | 20 | |
|
21 | 21 | from IPython.kernel.zmq.session import Session |
|
22 | 22 | from IPython.utils.jsonutil import date_default, extract_dates |
|
23 | 23 | from IPython.utils.py3compat import cast_unicode |
|
24 | 24 | |
|
25 | 25 | from .handlers import IPythonHandler |
|
26 | 26 | |
|
27 | 27 | def serialize_binary_message(msg): |
|
28 | 28 | """serialize a message as a binary blob |
|
29 | 29 | |
|
30 | 30 | Header: |
|
31 | 31 | |
|
32 | 32 | 4 bytes: number of msg parts (nbufs) as 32b int |
|
33 | 33 | 4 * nbufs bytes: offset for each buffer as integer as 32b int |
|
34 | 34 | |
|
35 | 35 | Offsets are from the start of the buffer, including the header. |
|
36 | 36 | |
|
37 | 37 | Returns |
|
38 | 38 | ------- |
|
39 | 39 | |
|
40 | 40 | The message serialized to bytes. |
|
41 | 41 | |
|
42 | 42 | """ |
|
43 | 43 | # don't modify msg or buffer list in-place |
|
44 | 44 | msg = msg.copy() |
|
45 | 45 | buffers = list(msg.pop('buffers')) |
|
46 | 46 | bmsg = json.dumps(msg, default=date_default).encode('utf8') |
|
47 | 47 | buffers.insert(0, bmsg) |
|
48 | 48 | nbufs = len(buffers) |
|
49 | 49 | offsets = [4 * (nbufs + 1)] |
|
50 | 50 | for buf in buffers[:-1]: |
|
51 | 51 | offsets.append(offsets[-1] + len(buf)) |
|
52 | 52 | offsets_buf = struct.pack('!' + 'I' * (nbufs + 1), nbufs, *offsets) |
|
53 | 53 | buffers.insert(0, offsets_buf) |
|
54 | 54 | return b''.join(buffers) |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | def deserialize_binary_message(bmsg): |
|
58 | 58 | """deserialize a message from a binary blog |
|
59 | 59 | |
|
60 | 60 | Header: |
|
61 | 61 | |
|
62 | 62 | 4 bytes: number of msg parts (nbufs) as 32b int |
|
63 | 63 | 4 * nbufs bytes: offset for each buffer as integer as 32b int |
|
64 | 64 | |
|
65 | 65 | Offsets are from the start of the buffer, including the header. |
|
66 | 66 | |
|
67 | 67 | Returns |
|
68 | 68 | ------- |
|
69 | 69 | |
|
70 | 70 | message dictionary |
|
71 | 71 | """ |
|
72 | 72 | nbufs = struct.unpack('!i', bmsg[:4])[0] |
|
73 | 73 | offsets = list(struct.unpack('!' + 'I' * nbufs, bmsg[4:4*(nbufs+1)])) |
|
74 | 74 | offsets.append(None) |
|
75 | 75 | bufs = [] |
|
76 | 76 | for start, stop in zip(offsets[:-1], offsets[1:]): |
|
77 | 77 | bufs.append(bmsg[start:stop]) |
|
78 | 78 | msg = json.loads(bufs[0].decode('utf8')) |
|
79 | 79 | msg['header'] = extract_dates(msg['header']) |
|
80 | 80 | msg['parent_header'] = extract_dates(msg['parent_header']) |
|
81 | 81 | msg['buffers'] = bufs[1:] |
|
82 | 82 | return msg |
|
83 | 83 | |
|
84 | 84 | # ping interval for keeping websockets alive (30 seconds) |
|
85 | 85 | WS_PING_INTERVAL = 30000 |
|
86 | 86 | |
|
87 | 87 | if os.environ.get('IPYTHON_ALLOW_DRAFT_WEBSOCKETS_FOR_PHANTOMJS', False): |
|
88 | 88 | warnings.warn("""Allowing draft76 websocket connections! |
|
89 | 89 | This should only be done for testing with phantomjs!""") |
|
90 | 90 | from IPython.html import allow76 |
|
91 | 91 | WebSocketHandler = allow76.AllowDraftWebSocketHandler |
|
92 | 92 | # draft 76 doesn't support ping |
|
93 | 93 | WS_PING_INTERVAL = 0 |
|
94 | 94 | |
|
95 | 95 | class ZMQStreamHandler(WebSocketHandler): |
|
96 | 96 | |
|
97 | 97 | def check_origin(self, origin): |
|
98 | 98 | """Check Origin == Host or Access-Control-Allow-Origin. |
|
99 | 99 | |
|
100 | 100 | Tornado >= 4 calls this method automatically, raising 403 if it returns False. |
|
101 | 101 | We call it explicitly in `open` on Tornado < 4. |
|
102 | 102 | """ |
|
103 | 103 | if self.allow_origin == '*': |
|
104 | 104 | return True |
|
105 | 105 | |
|
106 | 106 | host = self.request.headers.get("Host") |
|
107 | 107 | |
|
108 | 108 | # If no header is provided, assume we can't verify origin |
|
109 | 109 | if origin is None: |
|
110 | 110 | self.log.warn("Missing Origin header, rejecting WebSocket connection.") |
|
111 | 111 | return False |
|
112 | 112 | if host is None: |
|
113 | 113 | self.log.warn("Missing Host header, rejecting WebSocket connection.") |
|
114 | 114 | return False |
|
115 | 115 | |
|
116 | 116 | origin = origin.lower() |
|
117 | 117 | origin_host = urlparse(origin).netloc |
|
118 | 118 | |
|
119 | 119 | # OK if origin matches host |
|
120 | 120 | if origin_host == host: |
|
121 | 121 | return True |
|
122 | 122 | |
|
123 | 123 | # Check CORS headers |
|
124 | 124 | if self.allow_origin: |
|
125 | 125 | allow = self.allow_origin == origin |
|
126 | 126 | elif self.allow_origin_pat: |
|
127 | 127 | allow = bool(self.allow_origin_pat.match(origin)) |
|
128 | 128 | else: |
|
129 | 129 | # No CORS headers deny the request |
|
130 | 130 | allow = False |
|
131 | 131 | if not allow: |
|
132 | 132 | self.log.warn("Blocking Cross Origin WebSocket Attempt. Origin: %s, Host: %s", |
|
133 | 133 | origin, host, |
|
134 | 134 | ) |
|
135 | 135 | return allow |
|
136 | 136 | |
|
137 | 137 | def clear_cookie(self, *args, **kwargs): |
|
138 | 138 | """meaningless for websockets""" |
|
139 | 139 | pass |
|
140 | 140 | |
|
141 | 141 | def _reserialize_reply(self, msg_list): |
|
142 | 142 | """Reserialize a reply message using JSON. |
|
143 | 143 | |
|
144 | 144 | This takes the msg list from the ZMQ socket, deserializes it using |
|
145 | 145 | self.session and then serializes the result using JSON. This method |
|
146 | 146 | should be used by self._on_zmq_reply to build messages that can |
|
147 | 147 | be sent back to the browser. |
|
148 | 148 | """ |
|
149 | 149 | idents, msg_list = self.session.feed_identities(msg_list) |
|
150 | 150 | msg = self.session.deserialize(msg_list) |
|
151 | 151 | if msg['buffers']: |
|
152 | 152 | buf = serialize_binary_message(msg) |
|
153 | 153 | return buf |
|
154 | 154 | else: |
|
155 | 155 | smsg = json.dumps(msg, default=date_default) |
|
156 | 156 | return cast_unicode(smsg) |
|
157 | 157 | |
|
158 | 158 | def _on_zmq_reply(self, msg_list): |
|
159 | 159 | # Sometimes this gets triggered when the on_close method is scheduled in the |
|
160 | 160 | # eventloop but hasn't been called. |
|
161 | 161 | if self.stream.closed(): return |
|
162 | 162 | try: |
|
163 | 163 | msg = self._reserialize_reply(msg_list) |
|
164 | 164 | except Exception: |
|
165 | 165 | self.log.critical("Malformed message: %r" % msg_list, exc_info=True) |
|
166 | 166 | else: |
|
167 | 167 | self.write_message(msg, binary=isinstance(msg, bytes)) |
|
168 | 168 | |
|
169 | 169 | class AuthenticatedZMQStreamHandler(ZMQStreamHandler, IPythonHandler): |
|
170 | 170 | ping_callback = None |
|
171 | 171 | last_ping = 0 |
|
172 | 172 | last_pong = 0 |
|
173 | 173 | |
|
174 | 174 | @property |
|
175 | 175 | def ping_interval(self): |
|
176 | 176 | """The interval for websocket keep-alive pings. |
|
177 | 177 | |
|
178 | 178 | Set ws_ping_interval = 0 to disable pings. |
|
179 | 179 | """ |
|
180 | 180 | return self.settings.get('ws_ping_interval', WS_PING_INTERVAL) |
|
181 | 181 | |
|
182 | 182 | @property |
|
183 | 183 | def ping_timeout(self): |
|
184 | 184 | """If no ping is received in this many milliseconds, |
|
185 | 185 | close the websocket connection (VPNs, etc. can fail to cleanly close ws connections). |
|
186 | 186 | Default is max of 3 pings or 30 seconds. |
|
187 | 187 | """ |
|
188 | 188 | return self.settings.get('ws_ping_timeout', |
|
189 | 189 | max(3 * self.ping_interval, WS_PING_INTERVAL) |
|
190 | 190 | ) |
|
191 | 191 | |
|
192 | 192 | def set_default_headers(self): |
|
193 | 193 | """Undo the set_default_headers in IPythonHandler |
|
194 | 194 | |
|
195 | 195 | which doesn't make sense for websockets |
|
196 | 196 | """ |
|
197 | 197 | pass |
|
198 | 198 | |
|
199 | 199 | def pre_get(self): |
|
200 | 200 | """Run before finishing the GET request |
|
201 | 201 | |
|
202 | 202 | Extend this method to add logic that should fire before |
|
203 | 203 | the websocket finishes completing. |
|
204 | 204 | """ |
|
205 | # Check to see that origin matches host directly, including ports | |
|
206 | # Tornado 4 already does CORS checking | |
|
207 | if tornado.version_info[0] < 4: | |
|
208 | if not self.check_origin(self.get_origin()): | |
|
209 | raise web.HTTPError(403) | |
|
210 | ||
|
211 | 205 | # authenticate the request before opening the websocket |
|
212 | 206 | if self.get_current_user() is None: |
|
213 | 207 | self.log.warn("Couldn't authenticate WebSocket connection") |
|
214 | 208 | raise web.HTTPError(403) |
|
215 | 209 | |
|
216 | 210 | if self.get_argument('session_id', False): |
|
217 | 211 | self.session.session = cast_unicode(self.get_argument('session_id')) |
|
218 | 212 | else: |
|
219 | 213 | self.log.warn("No session ID specified") |
|
220 | 214 | |
|
221 | 215 | @gen.coroutine |
|
222 | 216 | def get(self, *args, **kwargs): |
|
223 | 217 | # pre_get can be a coroutine in subclasses |
|
224 | 218 | # assign and yield in two step to avoid tornado 3 issues |
|
225 | 219 | res = self.pre_get() |
|
226 | 220 | yield gen.maybe_future(res) |
|
227 | # FIXME: only do super get on tornado β₯ 4 | |
|
228 | # tornado 3 has no get, will raise 405 | |
|
229 | if tornado.version_info >= (4,): | |
|
230 | super(AuthenticatedZMQStreamHandler, self).get(*args, **kwargs) | |
|
221 | super(AuthenticatedZMQStreamHandler, self).get(*args, **kwargs) | |
|
231 | 222 | |
|
232 | 223 | def initialize(self): |
|
233 | 224 | self.log.debug("Initializing websocket connection %s", self.request.path) |
|
234 | 225 | self.session = Session(config=self.config) |
|
235 | 226 | |
|
236 | 227 | def open(self, *args, **kwargs): |
|
237 | 228 | self.log.debug("Opening websocket %s", self.request.path) |
|
238 | if tornado.version_info < (4,): | |
|
239 | try: | |
|
240 | self.get(*self.open_args, **self.open_kwargs) | |
|
241 | except web.HTTPError: | |
|
242 | self.close() | |
|
243 | raise | |
|
244 | 229 | |
|
245 | 230 | # start the pinging |
|
246 | 231 | if self.ping_interval > 0: |
|
247 | 232 | self.last_ping = ioloop.IOLoop.instance().time() # Remember time of last ping |
|
248 | 233 | self.last_pong = self.last_ping |
|
249 | 234 | self.ping_callback = ioloop.PeriodicCallback(self.send_ping, self.ping_interval) |
|
250 | 235 | self.ping_callback.start() |
|
251 | 236 | |
|
252 | 237 | def send_ping(self): |
|
253 | 238 | """send a ping to keep the websocket alive""" |
|
254 | 239 | if self.stream.closed() and self.ping_callback is not None: |
|
255 | 240 | self.ping_callback.stop() |
|
256 | 241 | return |
|
257 | 242 | |
|
258 | 243 | # check for timeout on pong. Make sure that we really have sent a recent ping in |
|
259 | 244 | # case the machine with both server and client has been suspended since the last ping. |
|
260 | 245 | now = ioloop.IOLoop.instance().time() |
|
261 | 246 | since_last_pong = 1e3 * (now - self.last_pong) |
|
262 | 247 | since_last_ping = 1e3 * (now - self.last_ping) |
|
263 | 248 | if since_last_ping < 2*self.ping_interval and since_last_pong > self.ping_timeout: |
|
264 | 249 | self.log.warn("WebSocket ping timeout after %i ms.", since_last_pong) |
|
265 | 250 | self.close() |
|
266 | 251 | return |
|
267 | 252 | |
|
268 | 253 | self.ping(b'') |
|
269 | 254 | self.last_ping = now |
|
270 | 255 | |
|
271 | 256 | def on_pong(self, data): |
|
272 | 257 | self.last_pong = ioloop.IOLoop.instance().time() |
@@ -1,987 +1,987 | |||
|
1 | 1 | # coding: utf-8 |
|
2 | 2 | """A tornado based IPython notebook server.""" |
|
3 | 3 | |
|
4 | 4 | # Copyright (c) IPython Development Team. |
|
5 | 5 | # Distributed under the terms of the Modified BSD License. |
|
6 | 6 | |
|
7 | 7 | from __future__ import print_function |
|
8 | 8 | |
|
9 | 9 | import base64 |
|
10 | 10 | import errno |
|
11 | 11 | import io |
|
12 | 12 | import json |
|
13 | 13 | import logging |
|
14 | 14 | import os |
|
15 | 15 | import random |
|
16 | 16 | import re |
|
17 | 17 | import select |
|
18 | 18 | import signal |
|
19 | 19 | import socket |
|
20 | 20 | import sys |
|
21 | 21 | import threading |
|
22 | 22 | import time |
|
23 | 23 | import webbrowser |
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | # check for pyzmq 2.1.11 |
|
27 | 27 | from IPython.utils.zmqrelated import check_for_zmq |
|
28 | 28 | check_for_zmq('2.1.11', 'IPython.html') |
|
29 | 29 | |
|
30 | 30 | from jinja2 import Environment, FileSystemLoader |
|
31 | 31 | |
|
32 | 32 | # Install the pyzmq ioloop. This has to be done before anything else from |
|
33 | 33 | # tornado is imported. |
|
34 | 34 | from zmq.eventloop import ioloop |
|
35 | 35 | ioloop.install() |
|
36 | 36 | |
|
37 | 37 | # check for tornado 3.1.0 |
|
38 |
msg = "The IPython Notebook requires tornado >= |
|
|
38 | msg = "The IPython Notebook requires tornado >= 4.0" | |
|
39 | 39 | try: |
|
40 | 40 | import tornado |
|
41 | 41 | except ImportError: |
|
42 | 42 | raise ImportError(msg) |
|
43 | 43 | try: |
|
44 | 44 | version_info = tornado.version_info |
|
45 | 45 | except AttributeError: |
|
46 | 46 | raise ImportError(msg + ", but you have < 1.1.0") |
|
47 |
if version_info < ( |
|
|
47 | if version_info < (4,0): | |
|
48 | 48 | raise ImportError(msg + ", but you have %s" % tornado.version) |
|
49 | 49 | |
|
50 | 50 | from tornado import httpserver |
|
51 | 51 | from tornado import web |
|
52 | 52 | from tornado.log import LogFormatter, app_log, access_log, gen_log |
|
53 | 53 | |
|
54 | 54 | from IPython.html import ( |
|
55 | 55 | DEFAULT_STATIC_FILES_PATH, |
|
56 | 56 | DEFAULT_TEMPLATE_PATH_LIST, |
|
57 | 57 | ) |
|
58 | 58 | from .base.handlers import Template404 |
|
59 | 59 | from .log import log_request |
|
60 | 60 | from .services.kernels.kernelmanager import MappingKernelManager |
|
61 | 61 | from .services.contents.manager import ContentsManager |
|
62 | 62 | from .services.contents.filemanager import FileContentsManager |
|
63 | 63 | from .services.clusters.clustermanager import ClusterManager |
|
64 | 64 | from .services.sessions.sessionmanager import SessionManager |
|
65 | 65 | |
|
66 | 66 | from .base.handlers import AuthenticatedFileHandler, FileFindHandler |
|
67 | 67 | |
|
68 | 68 | from IPython.config import Config |
|
69 | 69 | from IPython.config.application import catch_config_error, boolean_flag |
|
70 | 70 | from IPython.core.application import ( |
|
71 | 71 | BaseIPythonApplication, base_flags, base_aliases, |
|
72 | 72 | ) |
|
73 | 73 | from IPython.core.profiledir import ProfileDir |
|
74 | 74 | from IPython.kernel import KernelManager |
|
75 | 75 | from IPython.kernel.kernelspec import KernelSpecManager |
|
76 | 76 | from IPython.kernel.zmq.session import default_secure, Session |
|
77 | 77 | from IPython.nbformat.sign import NotebookNotary |
|
78 | 78 | from IPython.utils.importstring import import_item |
|
79 | 79 | from IPython.utils import submodule |
|
80 | 80 | from IPython.utils.process import check_pid |
|
81 | 81 | from IPython.utils.traitlets import ( |
|
82 | 82 | Dict, Unicode, Integer, List, Bool, Bytes, Instance, |
|
83 | 83 | DottedObjectName, TraitError, |
|
84 | 84 | ) |
|
85 | 85 | from IPython.utils import py3compat |
|
86 | 86 | from IPython.utils.path import filefind, get_ipython_dir |
|
87 | 87 | |
|
88 | 88 | from .utils import url_path_join |
|
89 | 89 | |
|
90 | 90 | #----------------------------------------------------------------------------- |
|
91 | 91 | # Module globals |
|
92 | 92 | #----------------------------------------------------------------------------- |
|
93 | 93 | |
|
94 | 94 | _examples = """ |
|
95 | 95 | ipython notebook # start the notebook |
|
96 | 96 | ipython notebook --profile=sympy # use the sympy profile |
|
97 | 97 | ipython notebook --certfile=mycert.pem # use SSL/TLS certificate |
|
98 | 98 | """ |
|
99 | 99 | |
|
100 | 100 | #----------------------------------------------------------------------------- |
|
101 | 101 | # Helper functions |
|
102 | 102 | #----------------------------------------------------------------------------- |
|
103 | 103 | |
|
104 | 104 | def random_ports(port, n): |
|
105 | 105 | """Generate a list of n random ports near the given port. |
|
106 | 106 | |
|
107 | 107 | The first 5 ports will be sequential, and the remaining n-5 will be |
|
108 | 108 | randomly selected in the range [port-2*n, port+2*n]. |
|
109 | 109 | """ |
|
110 | 110 | for i in range(min(5, n)): |
|
111 | 111 | yield port + i |
|
112 | 112 | for i in range(n-5): |
|
113 | 113 | yield max(1, port + random.randint(-2*n, 2*n)) |
|
114 | 114 | |
|
115 | 115 | def load_handlers(name): |
|
116 | 116 | """Load the (URL pattern, handler) tuples for each component.""" |
|
117 | 117 | name = 'IPython.html.' + name |
|
118 | 118 | mod = __import__(name, fromlist=['default_handlers']) |
|
119 | 119 | return mod.default_handlers |
|
120 | 120 | |
|
121 | 121 | #----------------------------------------------------------------------------- |
|
122 | 122 | # The Tornado web application |
|
123 | 123 | #----------------------------------------------------------------------------- |
|
124 | 124 | |
|
125 | 125 | class NotebookWebApplication(web.Application): |
|
126 | 126 | |
|
127 | 127 | def __init__(self, ipython_app, kernel_manager, contents_manager, |
|
128 | 128 | cluster_manager, session_manager, kernel_spec_manager, log, |
|
129 | 129 | base_url, default_url, settings_overrides, jinja_env_options): |
|
130 | 130 | |
|
131 | 131 | settings = self.init_settings( |
|
132 | 132 | ipython_app, kernel_manager, contents_manager, cluster_manager, |
|
133 | 133 | session_manager, kernel_spec_manager, log, base_url, default_url, |
|
134 | 134 | settings_overrides, jinja_env_options) |
|
135 | 135 | handlers = self.init_handlers(settings) |
|
136 | 136 | |
|
137 | 137 | super(NotebookWebApplication, self).__init__(handlers, **settings) |
|
138 | 138 | |
|
139 | 139 | def init_settings(self, ipython_app, kernel_manager, contents_manager, |
|
140 | 140 | cluster_manager, session_manager, kernel_spec_manager, |
|
141 | 141 | log, base_url, default_url, settings_overrides, |
|
142 | 142 | jinja_env_options=None): |
|
143 | 143 | |
|
144 | 144 | _template_path = settings_overrides.get( |
|
145 | 145 | "template_path", |
|
146 | 146 | ipython_app.template_file_path, |
|
147 | 147 | ) |
|
148 | 148 | if isinstance(_template_path, str): |
|
149 | 149 | _template_path = (_template_path,) |
|
150 | 150 | template_path = [os.path.expanduser(path) for path in _template_path] |
|
151 | 151 | |
|
152 | 152 | jenv_opt = jinja_env_options if jinja_env_options else {} |
|
153 | 153 | env = Environment(loader=FileSystemLoader(template_path), **jenv_opt) |
|
154 | 154 | settings = dict( |
|
155 | 155 | # basics |
|
156 | 156 | log_function=log_request, |
|
157 | 157 | base_url=base_url, |
|
158 | 158 | default_url=default_url, |
|
159 | 159 | template_path=template_path, |
|
160 | 160 | static_path=ipython_app.static_file_path, |
|
161 | 161 | static_handler_class = FileFindHandler, |
|
162 | 162 | static_url_prefix = url_path_join(base_url,'/static/'), |
|
163 | 163 | |
|
164 | 164 | # authentication |
|
165 | 165 | cookie_secret=ipython_app.cookie_secret, |
|
166 | 166 | login_url=url_path_join(base_url,'/login'), |
|
167 | 167 | password=ipython_app.password, |
|
168 | 168 | |
|
169 | 169 | # managers |
|
170 | 170 | kernel_manager=kernel_manager, |
|
171 | 171 | contents_manager=contents_manager, |
|
172 | 172 | cluster_manager=cluster_manager, |
|
173 | 173 | session_manager=session_manager, |
|
174 | 174 | kernel_spec_manager=kernel_spec_manager, |
|
175 | 175 | |
|
176 | 176 | # IPython stuff |
|
177 | 177 | nbextensions_path = ipython_app.nbextensions_path, |
|
178 | 178 | websocket_url=ipython_app.websocket_url, |
|
179 | 179 | mathjax_url=ipython_app.mathjax_url, |
|
180 | 180 | config=ipython_app.config, |
|
181 | 181 | jinja2_env=env, |
|
182 | 182 | terminals_available=False, # Set later if terminals are available |
|
183 | 183 | ) |
|
184 | 184 | |
|
185 | 185 | # allow custom overrides for the tornado web app. |
|
186 | 186 | settings.update(settings_overrides) |
|
187 | 187 | return settings |
|
188 | 188 | |
|
189 | 189 | def init_handlers(self, settings): |
|
190 | 190 | # Load the (URL pattern, handler) tuples for each component. |
|
191 | 191 | handlers = [] |
|
192 | 192 | handlers.extend(load_handlers('base.handlers')) |
|
193 | 193 | handlers.extend(load_handlers('tree.handlers')) |
|
194 | 194 | handlers.extend(load_handlers('auth.login')) |
|
195 | 195 | handlers.extend(load_handlers('auth.logout')) |
|
196 | 196 | handlers.extend(load_handlers('files.handlers')) |
|
197 | 197 | handlers.extend(load_handlers('notebook.handlers')) |
|
198 | 198 | handlers.extend(load_handlers('nbconvert.handlers')) |
|
199 | 199 | handlers.extend(load_handlers('kernelspecs.handlers')) |
|
200 | 200 | handlers.extend(load_handlers('services.kernels.handlers')) |
|
201 | 201 | handlers.extend(load_handlers('services.contents.handlers')) |
|
202 | 202 | handlers.extend(load_handlers('services.clusters.handlers')) |
|
203 | 203 | handlers.extend(load_handlers('services.sessions.handlers')) |
|
204 | 204 | handlers.extend(load_handlers('services.nbconvert.handlers')) |
|
205 | 205 | handlers.extend(load_handlers('services.kernelspecs.handlers')) |
|
206 | 206 | handlers.append( |
|
207 | 207 | (r"/nbextensions/(.*)", FileFindHandler, {'path' : settings['nbextensions_path']}), |
|
208 | 208 | ) |
|
209 | 209 | # set the URL that will be redirected from `/` |
|
210 | 210 | handlers.append( |
|
211 | 211 | (r'/?', web.RedirectHandler, { |
|
212 | 212 | 'url' : url_path_join(settings['base_url'], settings['default_url']), |
|
213 | 213 | 'permanent': False, # want 302, not 301 |
|
214 | 214 | }) |
|
215 | 215 | ) |
|
216 | 216 | # prepend base_url onto the patterns that we match |
|
217 | 217 | new_handlers = [] |
|
218 | 218 | for handler in handlers: |
|
219 | 219 | pattern = url_path_join(settings['base_url'], handler[0]) |
|
220 | 220 | new_handler = tuple([pattern] + list(handler[1:])) |
|
221 | 221 | new_handlers.append(new_handler) |
|
222 | 222 | # add 404 on the end, which will catch everything that falls through |
|
223 | 223 | new_handlers.append((r'(.*)', Template404)) |
|
224 | 224 | return new_handlers |
|
225 | 225 | |
|
226 | 226 | |
|
227 | 227 | class NbserverListApp(BaseIPythonApplication): |
|
228 | 228 | |
|
229 | 229 | description="List currently running notebook servers in this profile." |
|
230 | 230 | |
|
231 | 231 | flags = dict( |
|
232 | 232 | json=({'NbserverListApp': {'json': True}}, |
|
233 | 233 | "Produce machine-readable JSON output."), |
|
234 | 234 | ) |
|
235 | 235 | |
|
236 | 236 | json = Bool(False, config=True, |
|
237 | 237 | help="If True, each line of output will be a JSON object with the " |
|
238 | 238 | "details from the server info file.") |
|
239 | 239 | |
|
240 | 240 | def start(self): |
|
241 | 241 | if not self.json: |
|
242 | 242 | print("Currently running servers:") |
|
243 | 243 | for serverinfo in list_running_servers(self.profile): |
|
244 | 244 | if self.json: |
|
245 | 245 | print(json.dumps(serverinfo)) |
|
246 | 246 | else: |
|
247 | 247 | print(serverinfo['url'], "::", serverinfo['notebook_dir']) |
|
248 | 248 | |
|
249 | 249 | #----------------------------------------------------------------------------- |
|
250 | 250 | # Aliases and Flags |
|
251 | 251 | #----------------------------------------------------------------------------- |
|
252 | 252 | |
|
253 | 253 | flags = dict(base_flags) |
|
254 | 254 | flags['no-browser']=( |
|
255 | 255 | {'NotebookApp' : {'open_browser' : False}}, |
|
256 | 256 | "Don't open the notebook in a browser after startup." |
|
257 | 257 | ) |
|
258 | 258 | flags['pylab']=( |
|
259 | 259 | {'NotebookApp' : {'pylab' : 'warn'}}, |
|
260 | 260 | "DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib." |
|
261 | 261 | ) |
|
262 | 262 | flags['no-mathjax']=( |
|
263 | 263 | {'NotebookApp' : {'enable_mathjax' : False}}, |
|
264 | 264 | """Disable MathJax |
|
265 | 265 | |
|
266 | 266 | MathJax is the javascript library IPython uses to render math/LaTeX. It is |
|
267 | 267 | very large, so you may want to disable it if you have a slow internet |
|
268 | 268 | connection, or for offline use of the notebook. |
|
269 | 269 | |
|
270 | 270 | When disabled, equations etc. will appear as their untransformed TeX source. |
|
271 | 271 | """ |
|
272 | 272 | ) |
|
273 | 273 | |
|
274 | 274 | # Add notebook manager flags |
|
275 | 275 | flags.update(boolean_flag('script', 'FileContentsManager.save_script', |
|
276 | 276 | 'DEPRECATED, IGNORED', |
|
277 | 277 | 'DEPRECATED, IGNORED')) |
|
278 | 278 | |
|
279 | 279 | aliases = dict(base_aliases) |
|
280 | 280 | |
|
281 | 281 | aliases.update({ |
|
282 | 282 | 'ip': 'NotebookApp.ip', |
|
283 | 283 | 'port': 'NotebookApp.port', |
|
284 | 284 | 'port-retries': 'NotebookApp.port_retries', |
|
285 | 285 | 'transport': 'KernelManager.transport', |
|
286 | 286 | 'keyfile': 'NotebookApp.keyfile', |
|
287 | 287 | 'certfile': 'NotebookApp.certfile', |
|
288 | 288 | 'notebook-dir': 'NotebookApp.notebook_dir', |
|
289 | 289 | 'browser': 'NotebookApp.browser', |
|
290 | 290 | 'pylab': 'NotebookApp.pylab', |
|
291 | 291 | }) |
|
292 | 292 | |
|
293 | 293 | #----------------------------------------------------------------------------- |
|
294 | 294 | # NotebookApp |
|
295 | 295 | #----------------------------------------------------------------------------- |
|
296 | 296 | |
|
297 | 297 | class NotebookApp(BaseIPythonApplication): |
|
298 | 298 | |
|
299 | 299 | name = 'ipython-notebook' |
|
300 | 300 | |
|
301 | 301 | description = """ |
|
302 | 302 | The IPython HTML Notebook. |
|
303 | 303 | |
|
304 | 304 | This launches a Tornado based HTML Notebook Server that serves up an |
|
305 | 305 | HTML5/Javascript Notebook client. |
|
306 | 306 | """ |
|
307 | 307 | examples = _examples |
|
308 | 308 | aliases = aliases |
|
309 | 309 | flags = flags |
|
310 | 310 | |
|
311 | 311 | classes = [ |
|
312 | 312 | KernelManager, ProfileDir, Session, MappingKernelManager, |
|
313 | 313 | ContentsManager, FileContentsManager, NotebookNotary, |
|
314 | 314 | ] |
|
315 | 315 | flags = Dict(flags) |
|
316 | 316 | aliases = Dict(aliases) |
|
317 | 317 | |
|
318 | 318 | subcommands = dict( |
|
319 | 319 | list=(NbserverListApp, NbserverListApp.description.splitlines()[0]), |
|
320 | 320 | ) |
|
321 | 321 | |
|
322 | 322 | kernel_argv = List(Unicode) |
|
323 | 323 | |
|
324 | 324 | _log_formatter_cls = LogFormatter |
|
325 | 325 | |
|
326 | 326 | def _log_level_default(self): |
|
327 | 327 | return logging.INFO |
|
328 | 328 | |
|
329 | 329 | def _log_datefmt_default(self): |
|
330 | 330 | """Exclude date from default date format""" |
|
331 | 331 | return "%H:%M:%S" |
|
332 | 332 | |
|
333 | 333 | def _log_format_default(self): |
|
334 | 334 | """override default log format to include time""" |
|
335 | 335 | return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" |
|
336 | 336 | |
|
337 | 337 | # create requested profiles by default, if they don't exist: |
|
338 | 338 | auto_create = Bool(True) |
|
339 | 339 | |
|
340 | 340 | # file to be opened in the notebook server |
|
341 | 341 | file_to_run = Unicode('', config=True) |
|
342 | 342 | def _file_to_run_changed(self, name, old, new): |
|
343 | 343 | path, base = os.path.split(new) |
|
344 | 344 | if path: |
|
345 | 345 | self.file_to_run = base |
|
346 | 346 | self.notebook_dir = path |
|
347 | 347 | |
|
348 | 348 | # Network related information |
|
349 | 349 | |
|
350 | 350 | allow_origin = Unicode('', config=True, |
|
351 | 351 | help="""Set the Access-Control-Allow-Origin header |
|
352 | 352 | |
|
353 | 353 | Use '*' to allow any origin to access your server. |
|
354 | 354 | |
|
355 | 355 | Takes precedence over allow_origin_pat. |
|
356 | 356 | """ |
|
357 | 357 | ) |
|
358 | 358 | |
|
359 | 359 | allow_origin_pat = Unicode('', config=True, |
|
360 | 360 | help="""Use a regular expression for the Access-Control-Allow-Origin header |
|
361 | 361 | |
|
362 | 362 | Requests from an origin matching the expression will get replies with: |
|
363 | 363 | |
|
364 | 364 | Access-Control-Allow-Origin: origin |
|
365 | 365 | |
|
366 | 366 | where `origin` is the origin of the request. |
|
367 | 367 | |
|
368 | 368 | Ignored if allow_origin is set. |
|
369 | 369 | """ |
|
370 | 370 | ) |
|
371 | 371 | |
|
372 | 372 | allow_credentials = Bool(False, config=True, |
|
373 | 373 | help="Set the Access-Control-Allow-Credentials: true header" |
|
374 | 374 | ) |
|
375 | 375 | |
|
376 | 376 | default_url = Unicode('/tree', config=True, |
|
377 | 377 | help="The default URL to redirect to from `/`" |
|
378 | 378 | ) |
|
379 | 379 | |
|
380 | 380 | ip = Unicode('localhost', config=True, |
|
381 | 381 | help="The IP address the notebook server will listen on." |
|
382 | 382 | ) |
|
383 | 383 | |
|
384 | 384 | def _ip_changed(self, name, old, new): |
|
385 | 385 | if new == u'*': self.ip = u'' |
|
386 | 386 | |
|
387 | 387 | port = Integer(8888, config=True, |
|
388 | 388 | help="The port the notebook server will listen on." |
|
389 | 389 | ) |
|
390 | 390 | port_retries = Integer(50, config=True, |
|
391 | 391 | help="The number of additional ports to try if the specified port is not available." |
|
392 | 392 | ) |
|
393 | 393 | |
|
394 | 394 | certfile = Unicode(u'', config=True, |
|
395 | 395 | help="""The full path to an SSL/TLS certificate file.""" |
|
396 | 396 | ) |
|
397 | 397 | |
|
398 | 398 | keyfile = Unicode(u'', config=True, |
|
399 | 399 | help="""The full path to a private key file for usage with SSL/TLS.""" |
|
400 | 400 | ) |
|
401 | 401 | |
|
402 | 402 | cookie_secret_file = Unicode(config=True, |
|
403 | 403 | help="""The file where the cookie secret is stored.""" |
|
404 | 404 | ) |
|
405 | 405 | def _cookie_secret_file_default(self): |
|
406 | 406 | if self.profile_dir is None: |
|
407 | 407 | return '' |
|
408 | 408 | return os.path.join(self.profile_dir.security_dir, 'notebook_cookie_secret') |
|
409 | 409 | |
|
410 | 410 | cookie_secret = Bytes(b'', config=True, |
|
411 | 411 | help="""The random bytes used to secure cookies. |
|
412 | 412 | By default this is a new random number every time you start the Notebook. |
|
413 | 413 | Set it to a value in a config file to enable logins to persist across server sessions. |
|
414 | 414 | |
|
415 | 415 | Note: Cookie secrets should be kept private, do not share config files with |
|
416 | 416 | cookie_secret stored in plaintext (you can read the value from a file). |
|
417 | 417 | """ |
|
418 | 418 | ) |
|
419 | 419 | def _cookie_secret_default(self): |
|
420 | 420 | if os.path.exists(self.cookie_secret_file): |
|
421 | 421 | with io.open(self.cookie_secret_file, 'rb') as f: |
|
422 | 422 | return f.read() |
|
423 | 423 | else: |
|
424 | 424 | secret = base64.encodestring(os.urandom(1024)) |
|
425 | 425 | self._write_cookie_secret_file(secret) |
|
426 | 426 | return secret |
|
427 | 427 | |
|
428 | 428 | def _write_cookie_secret_file(self, secret): |
|
429 | 429 | """write my secret to my secret_file""" |
|
430 | 430 | self.log.info("Writing notebook server cookie secret to %s", self.cookie_secret_file) |
|
431 | 431 | with io.open(self.cookie_secret_file, 'wb') as f: |
|
432 | 432 | f.write(secret) |
|
433 | 433 | try: |
|
434 | 434 | os.chmod(self.cookie_secret_file, 0o600) |
|
435 | 435 | except OSError: |
|
436 | 436 | self.log.warn( |
|
437 | 437 | "Could not set permissions on %s", |
|
438 | 438 | self.cookie_secret_file |
|
439 | 439 | ) |
|
440 | 440 | |
|
441 | 441 | password = Unicode(u'', config=True, |
|
442 | 442 | help="""Hashed password to use for web authentication. |
|
443 | 443 | |
|
444 | 444 | To generate, type in a python/IPython shell: |
|
445 | 445 | |
|
446 | 446 | from IPython.lib import passwd; passwd() |
|
447 | 447 | |
|
448 | 448 | The string should be of the form type:salt:hashed-password. |
|
449 | 449 | """ |
|
450 | 450 | ) |
|
451 | 451 | |
|
452 | 452 | open_browser = Bool(True, config=True, |
|
453 | 453 | help="""Whether to open in a browser after starting. |
|
454 | 454 | The specific browser used is platform dependent and |
|
455 | 455 | determined by the python standard library `webbrowser` |
|
456 | 456 | module, unless it is overridden using the --browser |
|
457 | 457 | (NotebookApp.browser) configuration option. |
|
458 | 458 | """) |
|
459 | 459 | |
|
460 | 460 | browser = Unicode(u'', config=True, |
|
461 | 461 | help="""Specify what command to use to invoke a web |
|
462 | 462 | browser when opening the notebook. If not specified, the |
|
463 | 463 | default browser will be determined by the `webbrowser` |
|
464 | 464 | standard library module, which allows setting of the |
|
465 | 465 | BROWSER environment variable to override it. |
|
466 | 466 | """) |
|
467 | 467 | |
|
468 | 468 | webapp_settings = Dict(config=True, |
|
469 | 469 | help="DEPRECATED, use tornado_settings" |
|
470 | 470 | ) |
|
471 | 471 | def _webapp_settings_changed(self, name, old, new): |
|
472 | 472 | self.log.warn("\n webapp_settings is deprecated, use tornado_settings.\n") |
|
473 | 473 | self.tornado_settings = new |
|
474 | 474 | |
|
475 | 475 | tornado_settings = Dict(config=True, |
|
476 | 476 | help="Supply overrides for the tornado.web.Application that the " |
|
477 | 477 | "IPython notebook uses.") |
|
478 | 478 | |
|
479 | 479 | jinja_environment_options = Dict(config=True, |
|
480 | 480 | help="Supply extra arguments that will be passed to Jinja environment.") |
|
481 | 481 | |
|
482 | 482 | |
|
483 | 483 | enable_mathjax = Bool(True, config=True, |
|
484 | 484 | help="""Whether to enable MathJax for typesetting math/TeX |
|
485 | 485 | |
|
486 | 486 | MathJax is the javascript library IPython uses to render math/LaTeX. It is |
|
487 | 487 | very large, so you may want to disable it if you have a slow internet |
|
488 | 488 | connection, or for offline use of the notebook. |
|
489 | 489 | |
|
490 | 490 | When disabled, equations etc. will appear as their untransformed TeX source. |
|
491 | 491 | """ |
|
492 | 492 | ) |
|
493 | 493 | def _enable_mathjax_changed(self, name, old, new): |
|
494 | 494 | """set mathjax url to empty if mathjax is disabled""" |
|
495 | 495 | if not new: |
|
496 | 496 | self.mathjax_url = u'' |
|
497 | 497 | |
|
498 | 498 | base_url = Unicode('/', config=True, |
|
499 | 499 | help='''The base URL for the notebook server. |
|
500 | 500 | |
|
501 | 501 | Leading and trailing slashes can be omitted, |
|
502 | 502 | and will automatically be added. |
|
503 | 503 | ''') |
|
504 | 504 | def _base_url_changed(self, name, old, new): |
|
505 | 505 | if not new.startswith('/'): |
|
506 | 506 | self.base_url = '/'+new |
|
507 | 507 | elif not new.endswith('/'): |
|
508 | 508 | self.base_url = new+'/' |
|
509 | 509 | |
|
510 | 510 | base_project_url = Unicode('/', config=True, help="""DEPRECATED use base_url""") |
|
511 | 511 | def _base_project_url_changed(self, name, old, new): |
|
512 | 512 | self.log.warn("base_project_url is deprecated, use base_url") |
|
513 | 513 | self.base_url = new |
|
514 | 514 | |
|
515 | 515 | extra_static_paths = List(Unicode, config=True, |
|
516 | 516 | help="""Extra paths to search for serving static files. |
|
517 | 517 | |
|
518 | 518 | This allows adding javascript/css to be available from the notebook server machine, |
|
519 | 519 | or overriding individual files in the IPython""" |
|
520 | 520 | ) |
|
521 | 521 | def _extra_static_paths_default(self): |
|
522 | 522 | return [os.path.join(self.profile_dir.location, 'static')] |
|
523 | 523 | |
|
524 | 524 | @property |
|
525 | 525 | def static_file_path(self): |
|
526 | 526 | """return extra paths + the default location""" |
|
527 | 527 | return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] |
|
528 | 528 | |
|
529 | 529 | extra_template_paths = List(Unicode, config=True, |
|
530 | 530 | help="""Extra paths to search for serving jinja templates. |
|
531 | 531 | |
|
532 | 532 | Can be used to override templates from IPython.html.templates.""" |
|
533 | 533 | ) |
|
534 | 534 | def _extra_template_paths_default(self): |
|
535 | 535 | return [] |
|
536 | 536 | |
|
537 | 537 | @property |
|
538 | 538 | def template_file_path(self): |
|
539 | 539 | """return extra paths + the default locations""" |
|
540 | 540 | return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST |
|
541 | 541 | |
|
542 | 542 | nbextensions_path = List(Unicode, config=True, |
|
543 | 543 | help="""paths for Javascript extensions. By default, this is just IPYTHONDIR/nbextensions""" |
|
544 | 544 | ) |
|
545 | 545 | def _nbextensions_path_default(self): |
|
546 | 546 | return [os.path.join(get_ipython_dir(), 'nbextensions')] |
|
547 | 547 | |
|
548 | 548 | websocket_url = Unicode("", config=True, |
|
549 | 549 | help="""The base URL for websockets, |
|
550 | 550 | if it differs from the HTTP server (hint: it almost certainly doesn't). |
|
551 | 551 | |
|
552 | 552 | Should be in the form of an HTTP origin: ws[s]://hostname[:port] |
|
553 | 553 | """ |
|
554 | 554 | ) |
|
555 | 555 | mathjax_url = Unicode("", config=True, |
|
556 | 556 | help="""The url for MathJax.js.""" |
|
557 | 557 | ) |
|
558 | 558 | def _mathjax_url_default(self): |
|
559 | 559 | if not self.enable_mathjax: |
|
560 | 560 | return u'' |
|
561 | 561 | static_url_prefix = self.tornado_settings.get("static_url_prefix", |
|
562 | 562 | url_path_join(self.base_url, "static") |
|
563 | 563 | ) |
|
564 | 564 | |
|
565 | 565 | # try local mathjax, either in nbextensions/mathjax or static/mathjax |
|
566 | 566 | for (url_prefix, search_path) in [ |
|
567 | 567 | (url_path_join(self.base_url, "nbextensions"), self.nbextensions_path), |
|
568 | 568 | (static_url_prefix, self.static_file_path), |
|
569 | 569 | ]: |
|
570 | 570 | self.log.debug("searching for local mathjax in %s", search_path) |
|
571 | 571 | try: |
|
572 | 572 | mathjax = filefind(os.path.join('mathjax', 'MathJax.js'), search_path) |
|
573 | 573 | except IOError: |
|
574 | 574 | continue |
|
575 | 575 | else: |
|
576 | 576 | url = url_path_join(url_prefix, u"mathjax/MathJax.js") |
|
577 | 577 | self.log.info("Serving local MathJax from %s at %s", mathjax, url) |
|
578 | 578 | return url |
|
579 | 579 | |
|
580 | 580 | # no local mathjax, serve from CDN |
|
581 | 581 | url = u"https://cdn.mathjax.org/mathjax/latest/MathJax.js" |
|
582 | 582 | self.log.info("Using MathJax from CDN: %s", url) |
|
583 | 583 | return url |
|
584 | 584 | |
|
585 | 585 | def _mathjax_url_changed(self, name, old, new): |
|
586 | 586 | if new and not self.enable_mathjax: |
|
587 | 587 | # enable_mathjax=False overrides mathjax_url |
|
588 | 588 | self.mathjax_url = u'' |
|
589 | 589 | else: |
|
590 | 590 | self.log.info("Using MathJax: %s", new) |
|
591 | 591 | |
|
592 | 592 | contents_manager_class = DottedObjectName('IPython.html.services.contents.filemanager.FileContentsManager', |
|
593 | 593 | config=True, |
|
594 | 594 | help='The notebook manager class to use.' |
|
595 | 595 | ) |
|
596 | 596 | kernel_manager_class = DottedObjectName('IPython.html.services.kernels.kernelmanager.MappingKernelManager', |
|
597 | 597 | config=True, |
|
598 | 598 | help='The kernel manager class to use.' |
|
599 | 599 | ) |
|
600 | 600 | session_manager_class = DottedObjectName('IPython.html.services.sessions.sessionmanager.SessionManager', |
|
601 | 601 | config=True, |
|
602 | 602 | help='The session manager class to use.' |
|
603 | 603 | ) |
|
604 | 604 | cluster_manager_class = DottedObjectName('IPython.html.services.clusters.clustermanager.ClusterManager', |
|
605 | 605 | config=True, |
|
606 | 606 | help='The cluster manager class to use.' |
|
607 | 607 | ) |
|
608 | 608 | |
|
609 | 609 | kernel_spec_manager = Instance(KernelSpecManager) |
|
610 | 610 | |
|
611 | 611 | def _kernel_spec_manager_default(self): |
|
612 | 612 | return KernelSpecManager(ipython_dir=self.ipython_dir) |
|
613 | 613 | |
|
614 | 614 | trust_xheaders = Bool(False, config=True, |
|
615 | 615 | help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" |
|
616 | 616 | "sent by the upstream reverse proxy. Necessary if the proxy handles SSL") |
|
617 | 617 | ) |
|
618 | 618 | |
|
619 | 619 | info_file = Unicode() |
|
620 | 620 | |
|
621 | 621 | def _info_file_default(self): |
|
622 | 622 | info_file = "nbserver-%s.json"%os.getpid() |
|
623 | 623 | return os.path.join(self.profile_dir.security_dir, info_file) |
|
624 | 624 | |
|
625 | 625 | notebook_dir = Unicode(py3compat.getcwd(), config=True, |
|
626 | 626 | help="The directory to use for notebooks and kernels." |
|
627 | 627 | ) |
|
628 | 628 | |
|
629 | 629 | pylab = Unicode('disabled', config=True, |
|
630 | 630 | help=""" |
|
631 | 631 | DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. |
|
632 | 632 | """ |
|
633 | 633 | ) |
|
634 | 634 | def _pylab_changed(self, name, old, new): |
|
635 | 635 | """when --pylab is specified, display a warning and exit""" |
|
636 | 636 | if new != 'warn': |
|
637 | 637 | backend = ' %s' % new |
|
638 | 638 | else: |
|
639 | 639 | backend = '' |
|
640 | 640 | self.log.error("Support for specifying --pylab on the command line has been removed.") |
|
641 | 641 | self.log.error( |
|
642 | 642 | "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.".format(backend) |
|
643 | 643 | ) |
|
644 | 644 | self.exit(1) |
|
645 | 645 | |
|
646 | 646 | def _notebook_dir_changed(self, name, old, new): |
|
647 | 647 | """Do a bit of validation of the notebook dir.""" |
|
648 | 648 | if not os.path.isabs(new): |
|
649 | 649 | # If we receive a non-absolute path, make it absolute. |
|
650 | 650 | self.notebook_dir = os.path.abspath(new) |
|
651 | 651 | return |
|
652 | 652 | if not os.path.isdir(new): |
|
653 | 653 | raise TraitError("No such notebook dir: %r" % new) |
|
654 | 654 | |
|
655 | 655 | # setting App.notebook_dir implies setting notebook and kernel dirs as well |
|
656 | 656 | self.config.FileContentsManager.root_dir = new |
|
657 | 657 | self.config.MappingKernelManager.root_dir = new |
|
658 | 658 | |
|
659 | 659 | |
|
660 | 660 | def parse_command_line(self, argv=None): |
|
661 | 661 | super(NotebookApp, self).parse_command_line(argv) |
|
662 | 662 | |
|
663 | 663 | if self.extra_args: |
|
664 | 664 | arg0 = self.extra_args[0] |
|
665 | 665 | f = os.path.abspath(arg0) |
|
666 | 666 | self.argv.remove(arg0) |
|
667 | 667 | if not os.path.exists(f): |
|
668 | 668 | self.log.critical("No such file or directory: %s", f) |
|
669 | 669 | self.exit(1) |
|
670 | 670 | |
|
671 | 671 | # Use config here, to ensure that it takes higher priority than |
|
672 | 672 | # anything that comes from the profile. |
|
673 | 673 | c = Config() |
|
674 | 674 | if os.path.isdir(f): |
|
675 | 675 | c.NotebookApp.notebook_dir = f |
|
676 | 676 | elif os.path.isfile(f): |
|
677 | 677 | c.NotebookApp.file_to_run = f |
|
678 | 678 | self.update_config(c) |
|
679 | 679 | |
|
680 | 680 | def init_kernel_argv(self): |
|
681 | 681 | """construct the kernel arguments""" |
|
682 | 682 | # Kernel should get *absolute* path to profile directory |
|
683 | 683 | self.kernel_argv = ["--profile-dir", self.profile_dir.location] |
|
684 | 684 | |
|
685 | 685 | def init_configurables(self): |
|
686 | 686 | # force Session default to be secure |
|
687 | 687 | default_secure(self.config) |
|
688 | 688 | kls = import_item(self.kernel_manager_class) |
|
689 | 689 | self.kernel_manager = kls( |
|
690 | 690 | parent=self, log=self.log, kernel_argv=self.kernel_argv, |
|
691 | 691 | connection_dir = self.profile_dir.security_dir, |
|
692 | 692 | ) |
|
693 | 693 | kls = import_item(self.contents_manager_class) |
|
694 | 694 | self.contents_manager = kls(parent=self, log=self.log) |
|
695 | 695 | kls = import_item(self.session_manager_class) |
|
696 | 696 | self.session_manager = kls(parent=self, log=self.log, |
|
697 | 697 | kernel_manager=self.kernel_manager, |
|
698 | 698 | contents_manager=self.contents_manager) |
|
699 | 699 | kls = import_item(self.cluster_manager_class) |
|
700 | 700 | self.cluster_manager = kls(parent=self, log=self.log) |
|
701 | 701 | self.cluster_manager.update_profiles() |
|
702 | 702 | |
|
703 | 703 | def init_logging(self): |
|
704 | 704 | # This prevents double log messages because tornado use a root logger that |
|
705 | 705 | # self.log is a child of. The logging module dipatches log messages to a log |
|
706 | 706 | # and all of its ancenstors until propagate is set to False. |
|
707 | 707 | self.log.propagate = False |
|
708 | 708 | |
|
709 | 709 | for log in app_log, access_log, gen_log: |
|
710 | 710 | # consistent log output name (NotebookApp instead of tornado.access, etc.) |
|
711 | 711 | log.name = self.log.name |
|
712 | 712 | # hook up tornado 3's loggers to our app handlers |
|
713 | 713 | logger = logging.getLogger('tornado') |
|
714 | 714 | logger.propagate = True |
|
715 | 715 | logger.parent = self.log |
|
716 | 716 | logger.setLevel(self.log.level) |
|
717 | 717 | |
|
718 | 718 | def init_webapp(self): |
|
719 | 719 | """initialize tornado webapp and httpserver""" |
|
720 | 720 | self.tornado_settings['allow_origin'] = self.allow_origin |
|
721 | 721 | if self.allow_origin_pat: |
|
722 | 722 | self.tornado_settings['allow_origin_pat'] = re.compile(self.allow_origin_pat) |
|
723 | 723 | self.tornado_settings['allow_credentials'] = self.allow_credentials |
|
724 | 724 | |
|
725 | 725 | self.web_app = NotebookWebApplication( |
|
726 | 726 | self, self.kernel_manager, self.contents_manager, |
|
727 | 727 | self.cluster_manager, self.session_manager, self.kernel_spec_manager, |
|
728 | 728 | self.log, self.base_url, self.default_url, self.tornado_settings, |
|
729 | 729 | self.jinja_environment_options |
|
730 | 730 | ) |
|
731 | 731 | if self.certfile: |
|
732 | 732 | ssl_options = dict(certfile=self.certfile) |
|
733 | 733 | if self.keyfile: |
|
734 | 734 | ssl_options['keyfile'] = self.keyfile |
|
735 | 735 | else: |
|
736 | 736 | ssl_options = None |
|
737 | 737 | self.web_app.password = self.password |
|
738 | 738 | self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options, |
|
739 | 739 | xheaders=self.trust_xheaders) |
|
740 | 740 | if not self.ip: |
|
741 | 741 | warning = "WARNING: The notebook server is listening on all IP addresses" |
|
742 | 742 | if ssl_options is None: |
|
743 | 743 | self.log.critical(warning + " and not using encryption. This " |
|
744 | 744 | "is not recommended.") |
|
745 | 745 | if not self.password: |
|
746 | 746 | self.log.critical(warning + " and not using authentication. " |
|
747 | 747 | "This is highly insecure and not recommended.") |
|
748 | 748 | success = None |
|
749 | 749 | for port in random_ports(self.port, self.port_retries+1): |
|
750 | 750 | try: |
|
751 | 751 | self.http_server.listen(port, self.ip) |
|
752 | 752 | except socket.error as e: |
|
753 | 753 | if e.errno == errno.EADDRINUSE: |
|
754 | 754 | self.log.info('The port %i is already in use, trying another random port.' % port) |
|
755 | 755 | continue |
|
756 | 756 | elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)): |
|
757 | 757 | self.log.warn("Permission to listen on port %i denied" % port) |
|
758 | 758 | continue |
|
759 | 759 | else: |
|
760 | 760 | raise |
|
761 | 761 | else: |
|
762 | 762 | self.port = port |
|
763 | 763 | success = True |
|
764 | 764 | break |
|
765 | 765 | if not success: |
|
766 | 766 | self.log.critical('ERROR: the notebook server could not be started because ' |
|
767 | 767 | 'no available port could be found.') |
|
768 | 768 | self.exit(1) |
|
769 | 769 | |
|
770 | 770 | @property |
|
771 | 771 | def display_url(self): |
|
772 | 772 | ip = self.ip if self.ip else '[all ip addresses on your system]' |
|
773 | 773 | return self._url(ip) |
|
774 | 774 | |
|
775 | 775 | @property |
|
776 | 776 | def connection_url(self): |
|
777 | 777 | ip = self.ip if self.ip else 'localhost' |
|
778 | 778 | return self._url(ip) |
|
779 | 779 | |
|
780 | 780 | def _url(self, ip): |
|
781 | 781 | proto = 'https' if self.certfile else 'http' |
|
782 | 782 | return "%s://%s:%i%s" % (proto, ip, self.port, self.base_url) |
|
783 | 783 | |
|
784 | 784 | def init_terminals(self): |
|
785 | 785 | try: |
|
786 | 786 | from .terminal import initialize |
|
787 | 787 | initialize(self.web_app) |
|
788 | 788 | self.web_app.settings['terminals_available'] = True |
|
789 | 789 | except ImportError as e: |
|
790 | 790 | self.log.info("Terminals not available (error was %s)", e) |
|
791 | 791 | |
|
792 | 792 | def init_signal(self): |
|
793 | 793 | if not sys.platform.startswith('win'): |
|
794 | 794 | signal.signal(signal.SIGINT, self._handle_sigint) |
|
795 | 795 | signal.signal(signal.SIGTERM, self._signal_stop) |
|
796 | 796 | if hasattr(signal, 'SIGUSR1'): |
|
797 | 797 | # Windows doesn't support SIGUSR1 |
|
798 | 798 | signal.signal(signal.SIGUSR1, self._signal_info) |
|
799 | 799 | if hasattr(signal, 'SIGINFO'): |
|
800 | 800 | # only on BSD-based systems |
|
801 | 801 | signal.signal(signal.SIGINFO, self._signal_info) |
|
802 | 802 | |
|
803 | 803 | def _handle_sigint(self, sig, frame): |
|
804 | 804 | """SIGINT handler spawns confirmation dialog""" |
|
805 | 805 | # register more forceful signal handler for ^C^C case |
|
806 | 806 | signal.signal(signal.SIGINT, self._signal_stop) |
|
807 | 807 | # request confirmation dialog in bg thread, to avoid |
|
808 | 808 | # blocking the App |
|
809 | 809 | thread = threading.Thread(target=self._confirm_exit) |
|
810 | 810 | thread.daemon = True |
|
811 | 811 | thread.start() |
|
812 | 812 | |
|
813 | 813 | def _restore_sigint_handler(self): |
|
814 | 814 | """callback for restoring original SIGINT handler""" |
|
815 | 815 | signal.signal(signal.SIGINT, self._handle_sigint) |
|
816 | 816 | |
|
817 | 817 | def _confirm_exit(self): |
|
818 | 818 | """confirm shutdown on ^C |
|
819 | 819 | |
|
820 | 820 | A second ^C, or answering 'y' within 5s will cause shutdown, |
|
821 | 821 | otherwise original SIGINT handler will be restored. |
|
822 | 822 | |
|
823 | 823 | This doesn't work on Windows. |
|
824 | 824 | """ |
|
825 | 825 | info = self.log.info |
|
826 | 826 | info('interrupted') |
|
827 | 827 | print(self.notebook_info()) |
|
828 | 828 | sys.stdout.write("Shutdown this notebook server (y/[n])? ") |
|
829 | 829 | sys.stdout.flush() |
|
830 | 830 | r,w,x = select.select([sys.stdin], [], [], 5) |
|
831 | 831 | if r: |
|
832 | 832 | line = sys.stdin.readline() |
|
833 | 833 | if line.lower().startswith('y') and 'n' not in line.lower(): |
|
834 | 834 | self.log.critical("Shutdown confirmed") |
|
835 | 835 | ioloop.IOLoop.instance().stop() |
|
836 | 836 | return |
|
837 | 837 | else: |
|
838 | 838 | print("No answer for 5s:", end=' ') |
|
839 | 839 | print("resuming operation...") |
|
840 | 840 | # no answer, or answer is no: |
|
841 | 841 | # set it back to original SIGINT handler |
|
842 | 842 | # use IOLoop.add_callback because signal.signal must be called |
|
843 | 843 | # from main thread |
|
844 | 844 | ioloop.IOLoop.instance().add_callback(self._restore_sigint_handler) |
|
845 | 845 | |
|
846 | 846 | def _signal_stop(self, sig, frame): |
|
847 | 847 | self.log.critical("received signal %s, stopping", sig) |
|
848 | 848 | ioloop.IOLoop.instance().stop() |
|
849 | 849 | |
|
850 | 850 | def _signal_info(self, sig, frame): |
|
851 | 851 | print(self.notebook_info()) |
|
852 | 852 | |
|
853 | 853 | def init_components(self): |
|
854 | 854 | """Check the components submodule, and warn if it's unclean""" |
|
855 | 855 | status = submodule.check_submodule_status() |
|
856 | 856 | if status == 'missing': |
|
857 | 857 | self.log.warn("components submodule missing, running `git submodule update`") |
|
858 | 858 | submodule.update_submodules(submodule.ipython_parent()) |
|
859 | 859 | elif status == 'unclean': |
|
860 | 860 | self.log.warn("components submodule unclean, you may see 404s on static/components") |
|
861 | 861 | self.log.warn("run `setup.py submodule` or `git submodule update` to update") |
|
862 | 862 | |
|
863 | 863 | @catch_config_error |
|
864 | 864 | def initialize(self, argv=None): |
|
865 | 865 | super(NotebookApp, self).initialize(argv) |
|
866 | 866 | self.init_logging() |
|
867 | 867 | self.init_kernel_argv() |
|
868 | 868 | self.init_configurables() |
|
869 | 869 | self.init_components() |
|
870 | 870 | self.init_webapp() |
|
871 | 871 | self.init_terminals() |
|
872 | 872 | self.init_signal() |
|
873 | 873 | |
|
874 | 874 | def cleanup_kernels(self): |
|
875 | 875 | """Shutdown all kernels. |
|
876 | 876 | |
|
877 | 877 | The kernels will shutdown themselves when this process no longer exists, |
|
878 | 878 | but explicit shutdown allows the KernelManagers to cleanup the connection files. |
|
879 | 879 | """ |
|
880 | 880 | self.log.info('Shutting down kernels') |
|
881 | 881 | self.kernel_manager.shutdown_all() |
|
882 | 882 | |
|
883 | 883 | def notebook_info(self): |
|
884 | 884 | "Return the current working directory and the server url information" |
|
885 | 885 | info = self.contents_manager.info_string() + "\n" |
|
886 | 886 | info += "%d active kernels \n" % len(self.kernel_manager._kernels) |
|
887 | 887 | return info + "The IPython Notebook is running at: %s" % self.display_url |
|
888 | 888 | |
|
889 | 889 | def server_info(self): |
|
890 | 890 | """Return a JSONable dict of information about this server.""" |
|
891 | 891 | return {'url': self.connection_url, |
|
892 | 892 | 'hostname': self.ip if self.ip else 'localhost', |
|
893 | 893 | 'port': self.port, |
|
894 | 894 | 'secure': bool(self.certfile), |
|
895 | 895 | 'base_url': self.base_url, |
|
896 | 896 | 'notebook_dir': os.path.abspath(self.notebook_dir), |
|
897 | 897 | 'pid': os.getpid() |
|
898 | 898 | } |
|
899 | 899 | |
|
900 | 900 | def write_server_info_file(self): |
|
901 | 901 | """Write the result of server_info() to the JSON file info_file.""" |
|
902 | 902 | with open(self.info_file, 'w') as f: |
|
903 | 903 | json.dump(self.server_info(), f, indent=2) |
|
904 | 904 | |
|
905 | 905 | def remove_server_info_file(self): |
|
906 | 906 | """Remove the nbserver-<pid>.json file created for this server. |
|
907 | 907 | |
|
908 | 908 | Ignores the error raised when the file has already been removed. |
|
909 | 909 | """ |
|
910 | 910 | try: |
|
911 | 911 | os.unlink(self.info_file) |
|
912 | 912 | except OSError as e: |
|
913 | 913 | if e.errno != errno.ENOENT: |
|
914 | 914 | raise |
|
915 | 915 | |
|
916 | 916 | def start(self): |
|
917 | 917 | """ Start the IPython Notebook server app, after initialization |
|
918 | 918 | |
|
919 | 919 | This method takes no arguments so all configuration and initialization |
|
920 | 920 | must be done prior to calling this method.""" |
|
921 | 921 | if self.subapp is not None: |
|
922 | 922 | return self.subapp.start() |
|
923 | 923 | |
|
924 | 924 | info = self.log.info |
|
925 | 925 | for line in self.notebook_info().split("\n"): |
|
926 | 926 | info(line) |
|
927 | 927 | info("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).") |
|
928 | 928 | |
|
929 | 929 | self.write_server_info_file() |
|
930 | 930 | |
|
931 | 931 | if self.open_browser or self.file_to_run: |
|
932 | 932 | try: |
|
933 | 933 | browser = webbrowser.get(self.browser or None) |
|
934 | 934 | except webbrowser.Error as e: |
|
935 | 935 | self.log.warn('No web browser found: %s.' % e) |
|
936 | 936 | browser = None |
|
937 | 937 | |
|
938 | 938 | if self.file_to_run: |
|
939 | 939 | fullpath = os.path.join(self.notebook_dir, self.file_to_run) |
|
940 | 940 | if not os.path.exists(fullpath): |
|
941 | 941 | self.log.critical("%s does not exist" % fullpath) |
|
942 | 942 | self.exit(1) |
|
943 | 943 | |
|
944 | 944 | uri = url_path_join('notebooks', self.file_to_run) |
|
945 | 945 | else: |
|
946 | 946 | uri = 'tree' |
|
947 | 947 | if browser: |
|
948 | 948 | b = lambda : browser.open(url_path_join(self.connection_url, uri), |
|
949 | 949 | new=2) |
|
950 | 950 | threading.Thread(target=b).start() |
|
951 | 951 | try: |
|
952 | 952 | ioloop.IOLoop.instance().start() |
|
953 | 953 | except KeyboardInterrupt: |
|
954 | 954 | info("Interrupted...") |
|
955 | 955 | finally: |
|
956 | 956 | self.cleanup_kernels() |
|
957 | 957 | self.remove_server_info_file() |
|
958 | 958 | |
|
959 | 959 | |
|
960 | 960 | def list_running_servers(profile='default'): |
|
961 | 961 | """Iterate over the server info files of running notebook servers. |
|
962 | 962 | |
|
963 | 963 | Given a profile name, find nbserver-* files in the security directory of |
|
964 | 964 | that profile, and yield dicts of their information, each one pertaining to |
|
965 | 965 | a currently running notebook server instance. |
|
966 | 966 | """ |
|
967 | 967 | pd = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), name=profile) |
|
968 | 968 | for file in os.listdir(pd.security_dir): |
|
969 | 969 | if file.startswith('nbserver-'): |
|
970 | 970 | with io.open(os.path.join(pd.security_dir, file), encoding='utf-8') as f: |
|
971 | 971 | info = json.load(f) |
|
972 | 972 | |
|
973 | 973 | # Simple check whether that process is really still running |
|
974 | 974 | if check_pid(info['pid']): |
|
975 | 975 | yield info |
|
976 | 976 | else: |
|
977 | 977 | # If the process has died, try to delete its info file |
|
978 | 978 | try: |
|
979 | 979 | os.unlink(file) |
|
980 | 980 | except OSError: |
|
981 | 981 | pass # TODO: This should warn or log or something |
|
982 | 982 | #----------------------------------------------------------------------------- |
|
983 | 983 | # Main entry point |
|
984 | 984 | #----------------------------------------------------------------------------- |
|
985 | 985 | |
|
986 | 986 | launch_new_instance = NotebookApp.launch_instance |
|
987 | 987 |
@@ -1,48 +1,35 | |||
|
1 | 1 | #encoding: utf-8 |
|
2 | 2 | """Tornado handlers for the terminal emulator.""" |
|
3 | 3 | |
|
4 | 4 | # Copyright (c) IPython Development Team. |
|
5 | 5 | # Distributed under the terms of the Modified BSD License. |
|
6 | 6 | |
|
7 | 7 | import tornado |
|
8 | 8 | from tornado import web |
|
9 | 9 | import terminado |
|
10 | 10 | from ..base.handlers import IPythonHandler |
|
11 | 11 | |
|
12 | 12 | class TerminalHandler(IPythonHandler): |
|
13 | 13 | """Render the terminal interface.""" |
|
14 | 14 | @web.authenticated |
|
15 | 15 | def get(self, term_name): |
|
16 | 16 | self.write(self.render_template('terminal.html', |
|
17 | 17 | ws_path="terminals/websocket/%s" % term_name)) |
|
18 | 18 | |
|
19 | 19 | class NewTerminalHandler(IPythonHandler): |
|
20 | 20 | """Redirect to a new terminal.""" |
|
21 | 21 | @web.authenticated |
|
22 | 22 | def get(self): |
|
23 | 23 | name, _ = self.application.terminal_manager.new_named_terminal() |
|
24 | 24 | self.redirect(name, permanent=False) |
|
25 | 25 | |
|
26 | 26 | class TermSocket(terminado.TermSocket, IPythonHandler): |
|
27 | 27 | def get(self, *args, **kwargs): |
|
28 | 28 | if not self.get_current_user(): |
|
29 | 29 | raise web.HTTPError(403) |
|
30 | ||
|
31 | # FIXME: only do super get on tornado β₯ 4 | |
|
32 | # tornado 3 has no get, will raise 405 | |
|
33 | if tornado.version_info >= (4,): | |
|
34 | return super(TermSocket, self).get(*args, **kwargs) | |
|
30 | return super(TermSocket, self).get(*args, **kwargs) | |
|
35 | 31 | |
|
36 | 32 | def clear_cookie(self, *args, **kwargs): |
|
37 | 33 | """meaningless for websockets""" |
|
38 | 34 | pass |
|
39 | 35 | |
|
40 | def open(self, *args, **kwargs): | |
|
41 | if tornado.version_info < (4,): | |
|
42 | try: | |
|
43 | self.get(*self.open_args, **self.open_kwargs) | |
|
44 | except web.HTTPError: | |
|
45 | self.close() | |
|
46 | raise | |
|
47 | ||
|
48 | super(TermSocket, self).open(*args, **kwargs) |
@@ -1,519 +1,519 | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """IPython Test Suite Runner. |
|
3 | 3 | |
|
4 | 4 | This module provides a main entry point to a user script to test IPython |
|
5 | 5 | itself from the command line. There are two ways of running this script: |
|
6 | 6 | |
|
7 | 7 | 1. With the syntax `iptest all`. This runs our entire test suite by |
|
8 | 8 | calling this script (with different arguments) recursively. This |
|
9 | 9 | causes modules and package to be tested in different processes, using nose |
|
10 | 10 | or trial where appropriate. |
|
11 | 11 | 2. With the regular nose syntax, like `iptest -vvs IPython`. In this form |
|
12 | 12 | the script simply calls nose, but with special command line flags and |
|
13 | 13 | plugins loaded. |
|
14 | 14 | |
|
15 | 15 | """ |
|
16 | 16 | |
|
17 | 17 | # Copyright (c) IPython Development Team. |
|
18 | 18 | # Distributed under the terms of the Modified BSD License. |
|
19 | 19 | |
|
20 | 20 | from __future__ import print_function |
|
21 | 21 | |
|
22 | 22 | import glob |
|
23 | 23 | from io import BytesIO |
|
24 | 24 | import os |
|
25 | 25 | import os.path as path |
|
26 | 26 | import sys |
|
27 | 27 | from threading import Thread, Lock, Event |
|
28 | 28 | import warnings |
|
29 | 29 | |
|
30 | 30 | import nose.plugins.builtin |
|
31 | 31 | from nose.plugins.xunit import Xunit |
|
32 | 32 | from nose import SkipTest |
|
33 | 33 | from nose.core import TestProgram |
|
34 | 34 | from nose.plugins import Plugin |
|
35 | 35 | from nose.util import safe_str |
|
36 | 36 | |
|
37 | 37 | from IPython.utils.process import is_cmd_found |
|
38 | 38 | from IPython.utils.py3compat import bytes_to_str |
|
39 | 39 | from IPython.utils.importstring import import_item |
|
40 | 40 | from IPython.testing.plugin.ipdoctest import IPythonDoctest |
|
41 | 41 | from IPython.external.decorators import KnownFailure, knownfailureif |
|
42 | 42 | |
|
43 | 43 | pjoin = path.join |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | #----------------------------------------------------------------------------- |
|
47 | 47 | # Globals |
|
48 | 48 | #----------------------------------------------------------------------------- |
|
49 | 49 | |
|
50 | 50 | |
|
51 | 51 | #----------------------------------------------------------------------------- |
|
52 | 52 | # Warnings control |
|
53 | 53 | #----------------------------------------------------------------------------- |
|
54 | 54 | |
|
55 | 55 | # Twisted generates annoying warnings with Python 2.6, as will do other code |
|
56 | 56 | # that imports 'sets' as of today |
|
57 | 57 | warnings.filterwarnings('ignore', 'the sets module is deprecated', |
|
58 | 58 | DeprecationWarning ) |
|
59 | 59 | |
|
60 | 60 | # This one also comes from Twisted |
|
61 | 61 | warnings.filterwarnings('ignore', 'the sha module is deprecated', |
|
62 | 62 | DeprecationWarning) |
|
63 | 63 | |
|
64 | 64 | # Wx on Fedora11 spits these out |
|
65 | 65 | warnings.filterwarnings('ignore', 'wxPython/wxWidgets release number mismatch', |
|
66 | 66 | UserWarning) |
|
67 | 67 | |
|
68 | 68 | # ------------------------------------------------------------------------------ |
|
69 | 69 | # Monkeypatch Xunit to count known failures as skipped. |
|
70 | 70 | # ------------------------------------------------------------------------------ |
|
71 | 71 | def monkeypatch_xunit(): |
|
72 | 72 | try: |
|
73 | 73 | knownfailureif(True)(lambda: None)() |
|
74 | 74 | except Exception as e: |
|
75 | 75 | KnownFailureTest = type(e) |
|
76 | 76 | |
|
77 | 77 | def addError(self, test, err, capt=None): |
|
78 | 78 | if issubclass(err[0], KnownFailureTest): |
|
79 | 79 | err = (SkipTest,) + err[1:] |
|
80 | 80 | return self.orig_addError(test, err, capt) |
|
81 | 81 | |
|
82 | 82 | Xunit.orig_addError = Xunit.addError |
|
83 | 83 | Xunit.addError = addError |
|
84 | 84 | |
|
85 | 85 | #----------------------------------------------------------------------------- |
|
86 | 86 | # Check which dependencies are installed and greater than minimum version. |
|
87 | 87 | #----------------------------------------------------------------------------- |
|
88 | 88 | def extract_version(mod): |
|
89 | 89 | return mod.__version__ |
|
90 | 90 | |
|
91 | 91 | def test_for(item, min_version=None, callback=extract_version): |
|
92 | 92 | """Test to see if item is importable, and optionally check against a minimum |
|
93 | 93 | version. |
|
94 | 94 | |
|
95 | 95 | If min_version is given, the default behavior is to check against the |
|
96 | 96 | `__version__` attribute of the item, but specifying `callback` allows you to |
|
97 | 97 | extract the value you are interested in. e.g:: |
|
98 | 98 | |
|
99 | 99 | In [1]: import sys |
|
100 | 100 | |
|
101 | 101 | In [2]: from IPython.testing.iptest import test_for |
|
102 | 102 | |
|
103 | 103 | In [3]: test_for('sys', (2,6), callback=lambda sys: sys.version_info) |
|
104 | 104 | Out[3]: True |
|
105 | 105 | |
|
106 | 106 | """ |
|
107 | 107 | try: |
|
108 | 108 | check = import_item(item) |
|
109 | 109 | except (ImportError, RuntimeError): |
|
110 | 110 | # GTK reports Runtime error if it can't be initialized even if it's |
|
111 | 111 | # importable. |
|
112 | 112 | return False |
|
113 | 113 | else: |
|
114 | 114 | if min_version: |
|
115 | 115 | if callback: |
|
116 | 116 | # extra processing step to get version to compare |
|
117 | 117 | check = callback(check) |
|
118 | 118 | |
|
119 | 119 | return check >= min_version |
|
120 | 120 | else: |
|
121 | 121 | return True |
|
122 | 122 | |
|
123 | 123 | # Global dict where we can store information on what we have and what we don't |
|
124 | 124 | # have available at test run time |
|
125 | 125 | have = {} |
|
126 | 126 | |
|
127 | 127 | have['curses'] = test_for('_curses') |
|
128 | 128 | have['matplotlib'] = test_for('matplotlib') |
|
129 | 129 | have['numpy'] = test_for('numpy') |
|
130 | 130 | have['pexpect'] = test_for('IPython.external.pexpect') |
|
131 | 131 | have['pymongo'] = test_for('pymongo') |
|
132 | 132 | have['pygments'] = test_for('pygments') |
|
133 | 133 | have['qt'] = test_for('IPython.external.qt') |
|
134 | 134 | have['sqlite3'] = test_for('sqlite3') |
|
135 |
have['tornado'] = test_for('tornado.version_info', ( |
|
|
135 | have['tornado'] = test_for('tornado.version_info', (4,0), callback=None) | |
|
136 | 136 | have['jinja2'] = test_for('jinja2') |
|
137 | 137 | have['mistune'] = test_for('mistune') |
|
138 | 138 | have['requests'] = test_for('requests') |
|
139 | 139 | have['sphinx'] = test_for('sphinx') |
|
140 | 140 | have['jsonschema'] = test_for('jsonschema') |
|
141 | 141 | have['terminado'] = test_for('terminado') |
|
142 | 142 | have['casperjs'] = is_cmd_found('casperjs') |
|
143 | 143 | have['phantomjs'] = is_cmd_found('phantomjs') |
|
144 | 144 | have['slimerjs'] = is_cmd_found('slimerjs') |
|
145 | 145 | |
|
146 | 146 | min_zmq = (2,1,11) |
|
147 | 147 | |
|
148 | 148 | have['zmq'] = test_for('zmq.pyzmq_version_info', min_zmq, callback=lambda x: x()) |
|
149 | 149 | |
|
150 | 150 | #----------------------------------------------------------------------------- |
|
151 | 151 | # Test suite definitions |
|
152 | 152 | #----------------------------------------------------------------------------- |
|
153 | 153 | |
|
154 | 154 | test_group_names = ['parallel', 'kernel', 'kernel.inprocess', 'config', 'core', |
|
155 | 155 | 'extensions', 'lib', 'terminal', 'testing', 'utils', |
|
156 | 156 | 'nbformat', 'qt', 'html', 'nbconvert' |
|
157 | 157 | ] |
|
158 | 158 | |
|
159 | 159 | class TestSection(object): |
|
160 | 160 | def __init__(self, name, includes): |
|
161 | 161 | self.name = name |
|
162 | 162 | self.includes = includes |
|
163 | 163 | self.excludes = [] |
|
164 | 164 | self.dependencies = [] |
|
165 | 165 | self.enabled = True |
|
166 | 166 | |
|
167 | 167 | def exclude(self, module): |
|
168 | 168 | if not module.startswith('IPython'): |
|
169 | 169 | module = self.includes[0] + "." + module |
|
170 | 170 | self.excludes.append(module.replace('.', os.sep)) |
|
171 | 171 | |
|
172 | 172 | def requires(self, *packages): |
|
173 | 173 | self.dependencies.extend(packages) |
|
174 | 174 | |
|
175 | 175 | @property |
|
176 | 176 | def will_run(self): |
|
177 | 177 | return self.enabled and all(have[p] for p in self.dependencies) |
|
178 | 178 | |
|
179 | 179 | # Name -> (include, exclude, dependencies_met) |
|
180 | 180 | test_sections = {n:TestSection(n, ['IPython.%s' % n]) for n in test_group_names} |
|
181 | 181 | |
|
182 | 182 | # Exclusions and dependencies |
|
183 | 183 | # --------------------------- |
|
184 | 184 | |
|
185 | 185 | # core: |
|
186 | 186 | sec = test_sections['core'] |
|
187 | 187 | if not have['sqlite3']: |
|
188 | 188 | sec.exclude('tests.test_history') |
|
189 | 189 | sec.exclude('history') |
|
190 | 190 | if not have['matplotlib']: |
|
191 | 191 | sec.exclude('pylabtools'), |
|
192 | 192 | sec.exclude('tests.test_pylabtools') |
|
193 | 193 | |
|
194 | 194 | # lib: |
|
195 | 195 | sec = test_sections['lib'] |
|
196 | 196 | if not have['zmq']: |
|
197 | 197 | sec.exclude('kernel') |
|
198 | 198 | # We do this unconditionally, so that the test suite doesn't import |
|
199 | 199 | # gtk, changing the default encoding and masking some unicode bugs. |
|
200 | 200 | sec.exclude('inputhookgtk') |
|
201 | 201 | # We also do this unconditionally, because wx can interfere with Unix signals. |
|
202 | 202 | # There are currently no tests for it anyway. |
|
203 | 203 | sec.exclude('inputhookwx') |
|
204 | 204 | # Testing inputhook will need a lot of thought, to figure out |
|
205 | 205 | # how to have tests that don't lock up with the gui event |
|
206 | 206 | # loops in the picture |
|
207 | 207 | sec.exclude('inputhook') |
|
208 | 208 | |
|
209 | 209 | # testing: |
|
210 | 210 | sec = test_sections['testing'] |
|
211 | 211 | # These have to be skipped on win32 because they use echo, rm, cd, etc. |
|
212 | 212 | # See ticket https://github.com/ipython/ipython/issues/87 |
|
213 | 213 | if sys.platform == 'win32': |
|
214 | 214 | sec.exclude('plugin.test_exampleip') |
|
215 | 215 | sec.exclude('plugin.dtexample') |
|
216 | 216 | |
|
217 | 217 | # terminal: |
|
218 | 218 | if (not have['pexpect']) or (not have['zmq']): |
|
219 | 219 | test_sections['terminal'].exclude('console') |
|
220 | 220 | |
|
221 | 221 | # parallel |
|
222 | 222 | sec = test_sections['parallel'] |
|
223 | 223 | sec.requires('zmq') |
|
224 | 224 | if not have['pymongo']: |
|
225 | 225 | sec.exclude('controller.mongodb') |
|
226 | 226 | sec.exclude('tests.test_mongodb') |
|
227 | 227 | |
|
228 | 228 | # kernel: |
|
229 | 229 | sec = test_sections['kernel'] |
|
230 | 230 | sec.requires('zmq') |
|
231 | 231 | # The in-process kernel tests are done in a separate section |
|
232 | 232 | sec.exclude('inprocess') |
|
233 | 233 | # importing gtk sets the default encoding, which we want to avoid |
|
234 | 234 | sec.exclude('zmq.gui.gtkembed') |
|
235 | 235 | sec.exclude('zmq.gui.gtk3embed') |
|
236 | 236 | if not have['matplotlib']: |
|
237 | 237 | sec.exclude('zmq.pylab') |
|
238 | 238 | |
|
239 | 239 | # kernel.inprocess: |
|
240 | 240 | test_sections['kernel.inprocess'].requires('zmq') |
|
241 | 241 | |
|
242 | 242 | # extensions: |
|
243 | 243 | sec = test_sections['extensions'] |
|
244 | 244 | # This is deprecated in favour of rpy2 |
|
245 | 245 | sec.exclude('rmagic') |
|
246 | 246 | # autoreload does some strange stuff, so move it to its own test section |
|
247 | 247 | sec.exclude('autoreload') |
|
248 | 248 | sec.exclude('tests.test_autoreload') |
|
249 | 249 | test_sections['autoreload'] = TestSection('autoreload', |
|
250 | 250 | ['IPython.extensions.autoreload', 'IPython.extensions.tests.test_autoreload']) |
|
251 | 251 | test_group_names.append('autoreload') |
|
252 | 252 | |
|
253 | 253 | # qt: |
|
254 | 254 | test_sections['qt'].requires('zmq', 'qt', 'pygments') |
|
255 | 255 | |
|
256 | 256 | # html: |
|
257 | 257 | sec = test_sections['html'] |
|
258 | 258 | sec.requires('zmq', 'tornado', 'requests', 'sqlite3', 'jsonschema') |
|
259 | 259 | # The notebook 'static' directory contains JS, css and other |
|
260 | 260 | # files for web serving. Occasionally projects may put a .py |
|
261 | 261 | # file in there (MathJax ships a conf.py), so we might as |
|
262 | 262 | # well play it safe and skip the whole thing. |
|
263 | 263 | sec.exclude('static') |
|
264 | 264 | sec.exclude('tasks') |
|
265 | 265 | if not have['jinja2']: |
|
266 | 266 | sec.exclude('notebookapp') |
|
267 | 267 | if not have['pygments'] or not have['jinja2']: |
|
268 | 268 | sec.exclude('nbconvert') |
|
269 | 269 | if not have['terminado']: |
|
270 | 270 | sec.exclude('terminal') |
|
271 | 271 | |
|
272 | 272 | # config: |
|
273 | 273 | # Config files aren't really importable stand-alone |
|
274 | 274 | test_sections['config'].exclude('profile') |
|
275 | 275 | |
|
276 | 276 | # nbconvert: |
|
277 | 277 | sec = test_sections['nbconvert'] |
|
278 | 278 | sec.requires('pygments', 'jinja2', 'jsonschema', 'mistune') |
|
279 | 279 | # Exclude nbconvert directories containing config files used to test. |
|
280 | 280 | # Executing the config files with iptest would cause an exception. |
|
281 | 281 | sec.exclude('tests.files') |
|
282 | 282 | sec.exclude('exporters.tests.files') |
|
283 | 283 | if not have['tornado']: |
|
284 | 284 | sec.exclude('nbconvert.post_processors.serve') |
|
285 | 285 | sec.exclude('nbconvert.post_processors.tests.test_serve') |
|
286 | 286 | |
|
287 | 287 | # nbformat: |
|
288 | 288 | test_sections['nbformat'].requires('jsonschema') |
|
289 | 289 | |
|
290 | 290 | #----------------------------------------------------------------------------- |
|
291 | 291 | # Functions and classes |
|
292 | 292 | #----------------------------------------------------------------------------- |
|
293 | 293 | |
|
294 | 294 | def check_exclusions_exist(): |
|
295 | 295 | from IPython.utils.path import get_ipython_package_dir |
|
296 | 296 | from IPython.utils.warn import warn |
|
297 | 297 | parent = os.path.dirname(get_ipython_package_dir()) |
|
298 | 298 | for sec in test_sections: |
|
299 | 299 | for pattern in sec.exclusions: |
|
300 | 300 | fullpath = pjoin(parent, pattern) |
|
301 | 301 | if not os.path.exists(fullpath) and not glob.glob(fullpath + '.*'): |
|
302 | 302 | warn("Excluding nonexistent file: %r" % pattern) |
|
303 | 303 | |
|
304 | 304 | |
|
305 | 305 | class ExclusionPlugin(Plugin): |
|
306 | 306 | """A nose plugin to effect our exclusions of files and directories. |
|
307 | 307 | """ |
|
308 | 308 | name = 'exclusions' |
|
309 | 309 | score = 3000 # Should come before any other plugins |
|
310 | 310 | |
|
311 | 311 | def __init__(self, exclude_patterns=None): |
|
312 | 312 | """ |
|
313 | 313 | Parameters |
|
314 | 314 | ---------- |
|
315 | 315 | |
|
316 | 316 | exclude_patterns : sequence of strings, optional |
|
317 | 317 | Filenames containing these patterns (as raw strings, not as regular |
|
318 | 318 | expressions) are excluded from the tests. |
|
319 | 319 | """ |
|
320 | 320 | self.exclude_patterns = exclude_patterns or [] |
|
321 | 321 | super(ExclusionPlugin, self).__init__() |
|
322 | 322 | |
|
323 | 323 | def options(self, parser, env=os.environ): |
|
324 | 324 | Plugin.options(self, parser, env) |
|
325 | 325 | |
|
326 | 326 | def configure(self, options, config): |
|
327 | 327 | Plugin.configure(self, options, config) |
|
328 | 328 | # Override nose trying to disable plugin. |
|
329 | 329 | self.enabled = True |
|
330 | 330 | |
|
331 | 331 | def wantFile(self, filename): |
|
332 | 332 | """Return whether the given filename should be scanned for tests. |
|
333 | 333 | """ |
|
334 | 334 | if any(pat in filename for pat in self.exclude_patterns): |
|
335 | 335 | return False |
|
336 | 336 | return None |
|
337 | 337 | |
|
338 | 338 | def wantDirectory(self, directory): |
|
339 | 339 | """Return whether the given directory should be scanned for tests. |
|
340 | 340 | """ |
|
341 | 341 | if any(pat in directory for pat in self.exclude_patterns): |
|
342 | 342 | return False |
|
343 | 343 | return None |
|
344 | 344 | |
|
345 | 345 | |
|
346 | 346 | class StreamCapturer(Thread): |
|
347 | 347 | daemon = True # Don't hang if main thread crashes |
|
348 | 348 | started = False |
|
349 | 349 | def __init__(self, echo=False): |
|
350 | 350 | super(StreamCapturer, self).__init__() |
|
351 | 351 | self.echo = echo |
|
352 | 352 | self.streams = [] |
|
353 | 353 | self.buffer = BytesIO() |
|
354 | 354 | self.readfd, self.writefd = os.pipe() |
|
355 | 355 | self.buffer_lock = Lock() |
|
356 | 356 | self.stop = Event() |
|
357 | 357 | |
|
358 | 358 | def run(self): |
|
359 | 359 | self.started = True |
|
360 | 360 | |
|
361 | 361 | while not self.stop.is_set(): |
|
362 | 362 | chunk = os.read(self.readfd, 1024) |
|
363 | 363 | |
|
364 | 364 | with self.buffer_lock: |
|
365 | 365 | self.buffer.write(chunk) |
|
366 | 366 | if self.echo: |
|
367 | 367 | sys.stdout.write(bytes_to_str(chunk)) |
|
368 | 368 | |
|
369 | 369 | os.close(self.readfd) |
|
370 | 370 | os.close(self.writefd) |
|
371 | 371 | |
|
372 | 372 | def reset_buffer(self): |
|
373 | 373 | with self.buffer_lock: |
|
374 | 374 | self.buffer.truncate(0) |
|
375 | 375 | self.buffer.seek(0) |
|
376 | 376 | |
|
377 | 377 | def get_buffer(self): |
|
378 | 378 | with self.buffer_lock: |
|
379 | 379 | return self.buffer.getvalue() |
|
380 | 380 | |
|
381 | 381 | def ensure_started(self): |
|
382 | 382 | if not self.started: |
|
383 | 383 | self.start() |
|
384 | 384 | |
|
385 | 385 | def halt(self): |
|
386 | 386 | """Safely stop the thread.""" |
|
387 | 387 | if not self.started: |
|
388 | 388 | return |
|
389 | 389 | |
|
390 | 390 | self.stop.set() |
|
391 | 391 | os.write(self.writefd, b'wake up') # Ensure we're not locked in a read() |
|
392 | 392 | self.join() |
|
393 | 393 | |
|
394 | 394 | class SubprocessStreamCapturePlugin(Plugin): |
|
395 | 395 | name='subprocstreams' |
|
396 | 396 | def __init__(self): |
|
397 | 397 | Plugin.__init__(self) |
|
398 | 398 | self.stream_capturer = StreamCapturer() |
|
399 | 399 | self.destination = os.environ.get('IPTEST_SUBPROC_STREAMS', 'capture') |
|
400 | 400 | # This is ugly, but distant parts of the test machinery need to be able |
|
401 | 401 | # to redirect streams, so we make the object globally accessible. |
|
402 | 402 | nose.iptest_stdstreams_fileno = self.get_write_fileno |
|
403 | 403 | |
|
404 | 404 | def get_write_fileno(self): |
|
405 | 405 | if self.destination == 'capture': |
|
406 | 406 | self.stream_capturer.ensure_started() |
|
407 | 407 | return self.stream_capturer.writefd |
|
408 | 408 | elif self.destination == 'discard': |
|
409 | 409 | return os.open(os.devnull, os.O_WRONLY) |
|
410 | 410 | else: |
|
411 | 411 | return sys.__stdout__.fileno() |
|
412 | 412 | |
|
413 | 413 | def configure(self, options, config): |
|
414 | 414 | Plugin.configure(self, options, config) |
|
415 | 415 | # Override nose trying to disable plugin. |
|
416 | 416 | if self.destination == 'capture': |
|
417 | 417 | self.enabled = True |
|
418 | 418 | |
|
419 | 419 | def startTest(self, test): |
|
420 | 420 | # Reset log capture |
|
421 | 421 | self.stream_capturer.reset_buffer() |
|
422 | 422 | |
|
423 | 423 | def formatFailure(self, test, err): |
|
424 | 424 | # Show output |
|
425 | 425 | ec, ev, tb = err |
|
426 | 426 | captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace') |
|
427 | 427 | if captured.strip(): |
|
428 | 428 | ev = safe_str(ev) |
|
429 | 429 | out = [ev, '>> begin captured subprocess output <<', |
|
430 | 430 | captured, |
|
431 | 431 | '>> end captured subprocess output <<'] |
|
432 | 432 | return ec, '\n'.join(out), tb |
|
433 | 433 | |
|
434 | 434 | return err |
|
435 | 435 | |
|
436 | 436 | formatError = formatFailure |
|
437 | 437 | |
|
438 | 438 | def finalize(self, result): |
|
439 | 439 | self.stream_capturer.halt() |
|
440 | 440 | |
|
441 | 441 | |
|
442 | 442 | def run_iptest(): |
|
443 | 443 | """Run the IPython test suite using nose. |
|
444 | 444 | |
|
445 | 445 | This function is called when this script is **not** called with the form |
|
446 | 446 | `iptest all`. It simply calls nose with appropriate command line flags |
|
447 | 447 | and accepts all of the standard nose arguments. |
|
448 | 448 | """ |
|
449 | 449 | # Apply our monkeypatch to Xunit |
|
450 | 450 | if '--with-xunit' in sys.argv and not hasattr(Xunit, 'orig_addError'): |
|
451 | 451 | monkeypatch_xunit() |
|
452 | 452 | |
|
453 | 453 | warnings.filterwarnings('ignore', |
|
454 | 454 | 'This will be removed soon. Use IPython.testing.util instead') |
|
455 | 455 | |
|
456 | 456 | arg1 = sys.argv[1] |
|
457 | 457 | if arg1 in test_sections: |
|
458 | 458 | section = test_sections[arg1] |
|
459 | 459 | sys.argv[1:2] = section.includes |
|
460 | 460 | elif arg1.startswith('IPython.') and arg1[8:] in test_sections: |
|
461 | 461 | section = test_sections[arg1[8:]] |
|
462 | 462 | sys.argv[1:2] = section.includes |
|
463 | 463 | else: |
|
464 | 464 | section = TestSection(arg1, includes=[arg1]) |
|
465 | 465 | |
|
466 | 466 | |
|
467 | 467 | argv = sys.argv + [ '--detailed-errors', # extra info in tracebacks |
|
468 | 468 | |
|
469 | 469 | '--with-ipdoctest', |
|
470 | 470 | '--ipdoctest-tests','--ipdoctest-extension=txt', |
|
471 | 471 | |
|
472 | 472 | # We add --exe because of setuptools' imbecility (it |
|
473 | 473 | # blindly does chmod +x on ALL files). Nose does the |
|
474 | 474 | # right thing and it tries to avoid executables, |
|
475 | 475 | # setuptools unfortunately forces our hand here. This |
|
476 | 476 | # has been discussed on the distutils list and the |
|
477 | 477 | # setuptools devs refuse to fix this problem! |
|
478 | 478 | '--exe', |
|
479 | 479 | ] |
|
480 | 480 | if '-a' not in argv and '-A' not in argv: |
|
481 | 481 | argv = argv + ['-a', '!crash'] |
|
482 | 482 | |
|
483 | 483 | if nose.__version__ >= '0.11': |
|
484 | 484 | # I don't fully understand why we need this one, but depending on what |
|
485 | 485 | # directory the test suite is run from, if we don't give it, 0 tests |
|
486 | 486 | # get run. Specifically, if the test suite is run from the source dir |
|
487 | 487 | # with an argument (like 'iptest.py IPython.core', 0 tests are run, |
|
488 | 488 | # even if the same call done in this directory works fine). It appears |
|
489 | 489 | # that if the requested package is in the current dir, nose bails early |
|
490 | 490 | # by default. Since it's otherwise harmless, leave it in by default |
|
491 | 491 | # for nose >= 0.11, though unfortunately nose 0.10 doesn't support it. |
|
492 | 492 | argv.append('--traverse-namespace') |
|
493 | 493 | |
|
494 | 494 | # use our plugin for doctesting. It will remove the standard doctest plugin |
|
495 | 495 | # if it finds it enabled |
|
496 | 496 | plugins = [ExclusionPlugin(section.excludes), IPythonDoctest(), KnownFailure(), |
|
497 | 497 | SubprocessStreamCapturePlugin() ] |
|
498 | 498 | |
|
499 | 499 | # Use working directory set by parent process (see iptestcontroller) |
|
500 | 500 | if 'IPTEST_WORKING_DIR' in os.environ: |
|
501 | 501 | os.chdir(os.environ['IPTEST_WORKING_DIR']) |
|
502 | 502 | |
|
503 | 503 | # We need a global ipython running in this process, but the special |
|
504 | 504 | # in-process group spawns its own IPython kernels, so for *that* group we |
|
505 | 505 | # must avoid also opening the global one (otherwise there's a conflict of |
|
506 | 506 | # singletons). Ultimately the solution to this problem is to refactor our |
|
507 | 507 | # assumptions about what needs to be a singleton and what doesn't (app |
|
508 | 508 | # objects should, individual shells shouldn't). But for now, this |
|
509 | 509 | # workaround allows the test suite for the inprocess module to complete. |
|
510 | 510 | if 'kernel.inprocess' not in section.name: |
|
511 | 511 | from IPython.testing import globalipapp |
|
512 | 512 | globalipapp.start_ipython() |
|
513 | 513 | |
|
514 | 514 | # Now nose can run |
|
515 | 515 | TestProgram(argv=argv, addplugins=plugins) |
|
516 | 516 | |
|
517 | 517 | if __name__ == '__main__': |
|
518 | 518 | run_iptest() |
|
519 | 519 |
@@ -1,355 +1,355 | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # -*- coding: utf-8 -*- |
|
3 | 3 | """Setup script for IPython. |
|
4 | 4 | |
|
5 | 5 | Under Posix environments it works like a typical setup.py script. |
|
6 | 6 | Under Windows, the command sdist is not supported, since IPython |
|
7 | 7 | requires utilities which are not available under Windows.""" |
|
8 | 8 | |
|
9 | 9 | #----------------------------------------------------------------------------- |
|
10 | 10 | # Copyright (c) 2008-2011, IPython Development Team. |
|
11 | 11 | # Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu> |
|
12 | 12 | # Copyright (c) 2001, Janko Hauser <jhauser@zscout.de> |
|
13 | 13 | # Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu> |
|
14 | 14 | # |
|
15 | 15 | # Distributed under the terms of the Modified BSD License. |
|
16 | 16 | # |
|
17 | 17 | # The full license is in the file COPYING.rst, distributed with this software. |
|
18 | 18 | #----------------------------------------------------------------------------- |
|
19 | 19 | |
|
20 | 20 | #----------------------------------------------------------------------------- |
|
21 | 21 | # Minimal Python version sanity check |
|
22 | 22 | #----------------------------------------------------------------------------- |
|
23 | 23 | from __future__ import print_function |
|
24 | 24 | |
|
25 | 25 | import sys |
|
26 | 26 | |
|
27 | 27 | # This check is also made in IPython/__init__, don't forget to update both when |
|
28 | 28 | # changing Python version requirements. |
|
29 | 29 | v = sys.version_info |
|
30 | 30 | if v[:2] < (2,7) or (v[0] >= 3 and v[:2] < (3,3)): |
|
31 | 31 | error = "ERROR: IPython requires Python version 2.7 or 3.3 or above." |
|
32 | 32 | print(error, file=sys.stderr) |
|
33 | 33 | sys.exit(1) |
|
34 | 34 | |
|
35 | 35 | PY3 = (sys.version_info[0] >= 3) |
|
36 | 36 | |
|
37 | 37 | # At least we're on the python version we need, move on. |
|
38 | 38 | |
|
39 | 39 | #------------------------------------------------------------------------------- |
|
40 | 40 | # Imports |
|
41 | 41 | #------------------------------------------------------------------------------- |
|
42 | 42 | |
|
43 | 43 | # Stdlib imports |
|
44 | 44 | import os |
|
45 | 45 | import shutil |
|
46 | 46 | |
|
47 | 47 | from glob import glob |
|
48 | 48 | |
|
49 | 49 | # BEFORE importing distutils, remove MANIFEST. distutils doesn't properly |
|
50 | 50 | # update it when the contents of directories change. |
|
51 | 51 | if os.path.exists('MANIFEST'): os.remove('MANIFEST') |
|
52 | 52 | |
|
53 | 53 | from distutils.core import setup |
|
54 | 54 | |
|
55 | 55 | # Our own imports |
|
56 | 56 | from setupbase import target_update |
|
57 | 57 | |
|
58 | 58 | from setupbase import ( |
|
59 | 59 | setup_args, |
|
60 | 60 | find_packages, |
|
61 | 61 | find_package_data, |
|
62 | 62 | check_package_data_first, |
|
63 | 63 | find_entry_points, |
|
64 | 64 | build_scripts_entrypt, |
|
65 | 65 | find_data_files, |
|
66 | 66 | check_for_dependencies, |
|
67 | 67 | git_prebuild, |
|
68 | 68 | check_submodule_status, |
|
69 | 69 | update_submodules, |
|
70 | 70 | require_submodules, |
|
71 | 71 | UpdateSubmodules, |
|
72 | 72 | get_bdist_wheel, |
|
73 | 73 | CompileCSS, |
|
74 | 74 | JavascriptVersion, |
|
75 | 75 | css_js_prerelease, |
|
76 | 76 | install_symlinked, |
|
77 | 77 | install_lib_symlink, |
|
78 | 78 | install_scripts_for_symlink, |
|
79 | 79 | unsymlink, |
|
80 | 80 | ) |
|
81 | 81 | from setupext import setupext |
|
82 | 82 | |
|
83 | 83 | isfile = os.path.isfile |
|
84 | 84 | pjoin = os.path.join |
|
85 | 85 | |
|
86 | 86 | #----------------------------------------------------------------------------- |
|
87 | 87 | # Function definitions |
|
88 | 88 | #----------------------------------------------------------------------------- |
|
89 | 89 | |
|
90 | 90 | def cleanup(): |
|
91 | 91 | """Clean up the junk left around by the build process""" |
|
92 | 92 | if "develop" not in sys.argv and "egg_info" not in sys.argv: |
|
93 | 93 | try: |
|
94 | 94 | shutil.rmtree('ipython.egg-info') |
|
95 | 95 | except: |
|
96 | 96 | try: |
|
97 | 97 | os.unlink('ipython.egg-info') |
|
98 | 98 | except: |
|
99 | 99 | pass |
|
100 | 100 | |
|
101 | 101 | #------------------------------------------------------------------------------- |
|
102 | 102 | # Handle OS specific things |
|
103 | 103 | #------------------------------------------------------------------------------- |
|
104 | 104 | |
|
105 | 105 | if os.name in ('nt','dos'): |
|
106 | 106 | os_name = 'windows' |
|
107 | 107 | else: |
|
108 | 108 | os_name = os.name |
|
109 | 109 | |
|
110 | 110 | # Under Windows, 'sdist' has not been supported. Now that the docs build with |
|
111 | 111 | # Sphinx it might work, but let's not turn it on until someone confirms that it |
|
112 | 112 | # actually works. |
|
113 | 113 | if os_name == 'windows' and 'sdist' in sys.argv: |
|
114 | 114 | print('The sdist command is not available under Windows. Exiting.') |
|
115 | 115 | sys.exit(1) |
|
116 | 116 | |
|
117 | 117 | #------------------------------------------------------------------------------- |
|
118 | 118 | # Make sure we aren't trying to run without submodules |
|
119 | 119 | #------------------------------------------------------------------------------- |
|
120 | 120 | here = os.path.abspath(os.path.dirname(__file__)) |
|
121 | 121 | |
|
122 | 122 | def require_clean_submodules(): |
|
123 | 123 | """Check on git submodules before distutils can do anything |
|
124 | 124 | |
|
125 | 125 | Since distutils cannot be trusted to update the tree |
|
126 | 126 | after everything has been set in motion, |
|
127 | 127 | this is not a distutils command. |
|
128 | 128 | """ |
|
129 | 129 | # PACKAGERS: Add a return here to skip checks for git submodules |
|
130 | 130 | |
|
131 | 131 | # don't do anything if nothing is actually supposed to happen |
|
132 | 132 | for do_nothing in ('-h', '--help', '--help-commands', 'clean', 'submodule'): |
|
133 | 133 | if do_nothing in sys.argv: |
|
134 | 134 | return |
|
135 | 135 | |
|
136 | 136 | status = check_submodule_status(here) |
|
137 | 137 | |
|
138 | 138 | if status == "missing": |
|
139 | 139 | print("checking out submodules for the first time") |
|
140 | 140 | update_submodules(here) |
|
141 | 141 | elif status == "unclean": |
|
142 | 142 | print('\n'.join([ |
|
143 | 143 | "Cannot build / install IPython with unclean submodules", |
|
144 | 144 | "Please update submodules with", |
|
145 | 145 | " python setup.py submodule", |
|
146 | 146 | "or", |
|
147 | 147 | " git submodule update", |
|
148 | 148 | "or commit any submodule changes you have made." |
|
149 | 149 | ])) |
|
150 | 150 | sys.exit(1) |
|
151 | 151 | |
|
152 | 152 | require_clean_submodules() |
|
153 | 153 | |
|
154 | 154 | #------------------------------------------------------------------------------- |
|
155 | 155 | # Things related to the IPython documentation |
|
156 | 156 | #------------------------------------------------------------------------------- |
|
157 | 157 | |
|
158 | 158 | # update the manuals when building a source dist |
|
159 | 159 | if len(sys.argv) >= 2 and sys.argv[1] in ('sdist','bdist_rpm'): |
|
160 | 160 | |
|
161 | 161 | # List of things to be updated. Each entry is a triplet of args for |
|
162 | 162 | # target_update() |
|
163 | 163 | to_update = [ |
|
164 | 164 | # FIXME - Disabled for now: we need to redo an automatic way |
|
165 | 165 | # of generating the magic info inside the rst. |
|
166 | 166 | #('docs/magic.tex', |
|
167 | 167 | #['IPython/Magic.py'], |
|
168 | 168 | #"cd doc && ./update_magic.sh" ), |
|
169 | 169 | |
|
170 | 170 | ('docs/man/ipcluster.1.gz', |
|
171 | 171 | ['docs/man/ipcluster.1'], |
|
172 | 172 | 'cd docs/man && gzip -9c ipcluster.1 > ipcluster.1.gz'), |
|
173 | 173 | |
|
174 | 174 | ('docs/man/ipcontroller.1.gz', |
|
175 | 175 | ['docs/man/ipcontroller.1'], |
|
176 | 176 | 'cd docs/man && gzip -9c ipcontroller.1 > ipcontroller.1.gz'), |
|
177 | 177 | |
|
178 | 178 | ('docs/man/ipengine.1.gz', |
|
179 | 179 | ['docs/man/ipengine.1'], |
|
180 | 180 | 'cd docs/man && gzip -9c ipengine.1 > ipengine.1.gz'), |
|
181 | 181 | |
|
182 | 182 | ('docs/man/ipython.1.gz', |
|
183 | 183 | ['docs/man/ipython.1'], |
|
184 | 184 | 'cd docs/man && gzip -9c ipython.1 > ipython.1.gz'), |
|
185 | 185 | |
|
186 | 186 | ] |
|
187 | 187 | |
|
188 | 188 | |
|
189 | 189 | [ target_update(*t) for t in to_update ] |
|
190 | 190 | |
|
191 | 191 | #--------------------------------------------------------------------------- |
|
192 | 192 | # Find all the packages, package data, and data_files |
|
193 | 193 | #--------------------------------------------------------------------------- |
|
194 | 194 | |
|
195 | 195 | packages = find_packages() |
|
196 | 196 | package_data = find_package_data() |
|
197 | 197 | |
|
198 | 198 | data_files = find_data_files() |
|
199 | 199 | |
|
200 | 200 | setup_args['packages'] = packages |
|
201 | 201 | setup_args['package_data'] = package_data |
|
202 | 202 | setup_args['data_files'] = data_files |
|
203 | 203 | |
|
204 | 204 | #--------------------------------------------------------------------------- |
|
205 | 205 | # custom distutils commands |
|
206 | 206 | #--------------------------------------------------------------------------- |
|
207 | 207 | # imports here, so they are after setuptools import if there was one |
|
208 | 208 | from distutils.command.sdist import sdist |
|
209 | 209 | from distutils.command.upload import upload |
|
210 | 210 | |
|
211 | 211 | class UploadWindowsInstallers(upload): |
|
212 | 212 | |
|
213 | 213 | description = "Upload Windows installers to PyPI (only used from tools/release_windows.py)" |
|
214 | 214 | user_options = upload.user_options + [ |
|
215 | 215 | ('files=', 'f', 'exe file (or glob) to upload') |
|
216 | 216 | ] |
|
217 | 217 | def initialize_options(self): |
|
218 | 218 | upload.initialize_options(self) |
|
219 | 219 | meta = self.distribution.metadata |
|
220 | 220 | base = '{name}-{version}'.format( |
|
221 | 221 | name=meta.get_name(), |
|
222 | 222 | version=meta.get_version() |
|
223 | 223 | ) |
|
224 | 224 | self.files = os.path.join('dist', '%s.*.exe' % base) |
|
225 | 225 | |
|
226 | 226 | def run(self): |
|
227 | 227 | for dist_file in glob(self.files): |
|
228 | 228 | self.upload_file('bdist_wininst', 'any', dist_file) |
|
229 | 229 | |
|
230 | 230 | setup_args['cmdclass'] = { |
|
231 | 231 | 'build_py': css_js_prerelease( |
|
232 | 232 | check_package_data_first(git_prebuild('IPython')), |
|
233 | 233 | strict=False), |
|
234 | 234 | 'sdist' : css_js_prerelease(git_prebuild('IPython', sdist)), |
|
235 | 235 | 'upload_wininst' : UploadWindowsInstallers, |
|
236 | 236 | 'submodule' : UpdateSubmodules, |
|
237 | 237 | 'css' : CompileCSS, |
|
238 | 238 | 'symlink': install_symlinked, |
|
239 | 239 | 'install_lib_symlink': install_lib_symlink, |
|
240 | 240 | 'install_scripts_sym': install_scripts_for_symlink, |
|
241 | 241 | 'unsymlink': unsymlink, |
|
242 | 242 | 'jsversion' : JavascriptVersion, |
|
243 | 243 | } |
|
244 | 244 | |
|
245 | 245 | #--------------------------------------------------------------------------- |
|
246 | 246 | # Handle scripts, dependencies, and setuptools specific things |
|
247 | 247 | #--------------------------------------------------------------------------- |
|
248 | 248 | |
|
249 | 249 | # For some commands, use setuptools. Note that we do NOT list install here! |
|
250 | 250 | # If you want a setuptools-enhanced install, just run 'setupegg.py install' |
|
251 | 251 | needs_setuptools = set(('develop', 'release', 'bdist_egg', 'bdist_rpm', |
|
252 | 252 | 'bdist', 'bdist_dumb', 'bdist_wininst', 'bdist_wheel', |
|
253 | 253 | 'egg_info', 'easy_install', 'upload', 'install_egg_info', |
|
254 | 254 | )) |
|
255 | 255 | if sys.platform == 'win32': |
|
256 | 256 | # Depend on setuptools for install on *Windows only* |
|
257 | 257 | # If we get script-installation working without setuptools, |
|
258 | 258 | # then we can back off, but until then use it. |
|
259 | 259 | # See Issue #369 on GitHub for more |
|
260 | 260 | needs_setuptools.add('install') |
|
261 | 261 | |
|
262 | 262 | if len(needs_setuptools.intersection(sys.argv)) > 0: |
|
263 | 263 | import setuptools |
|
264 | 264 | |
|
265 | 265 | # This dict is used for passing extra arguments that are setuptools |
|
266 | 266 | # specific to setup |
|
267 | 267 | setuptools_extra_args = {} |
|
268 | 268 | |
|
269 | 269 | # setuptools requirements |
|
270 | 270 | |
|
271 | 271 | extras_require = dict( |
|
272 | 272 | parallel = ['pyzmq>=2.1.11'], |
|
273 | 273 | qtconsole = ['pyzmq>=2.1.11', 'pygments'], |
|
274 | 274 | zmq = ['pyzmq>=2.1.11'], |
|
275 | 275 | doc = ['Sphinx>=1.1', 'numpydoc'], |
|
276 | 276 | test = ['nose>=0.10.1', 'requests'], |
|
277 | 277 | terminal = [], |
|
278 | 278 | nbformat = ['jsonschema>=2.0'], |
|
279 |
notebook = ['tornado>= |
|
|
279 | notebook = ['tornado>=4.0', 'pyzmq>=2.1.11', 'jinja2', 'pygments', 'mistune>=0.3.1'], | |
|
280 | 280 | nbconvert = ['pygments', 'jinja2', 'mistune>=0.3.1'] |
|
281 | 281 | ) |
|
282 | 282 | |
|
283 | 283 | if sys.version_info < (3, 3): |
|
284 | 284 | extras_require['test'].append('mock') |
|
285 | 285 | |
|
286 | 286 | extras_require['notebook'].extend(extras_require['nbformat']) |
|
287 | 287 | extras_require['nbconvert'].extend(extras_require['nbformat']) |
|
288 | 288 | |
|
289 | 289 | everything = set() |
|
290 | 290 | for deps in extras_require.values(): |
|
291 | 291 | everything.update(deps) |
|
292 | 292 | extras_require['all'] = everything |
|
293 | 293 | |
|
294 | 294 | install_requires = [] |
|
295 | 295 | |
|
296 | 296 | # add readline |
|
297 | 297 | if sys.platform == 'darwin': |
|
298 | 298 | if any(arg.startswith('bdist') for arg in sys.argv) or not setupext.check_for_readline(): |
|
299 | 299 | install_requires.append('gnureadline') |
|
300 | 300 | elif sys.platform.startswith('win'): |
|
301 | 301 | extras_require['terminal'].append('pyreadline>=2.0') |
|
302 | 302 | |
|
303 | 303 | |
|
304 | 304 | if 'setuptools' in sys.modules: |
|
305 | 305 | # setup.py develop should check for submodules |
|
306 | 306 | from setuptools.command.develop import develop |
|
307 | 307 | setup_args['cmdclass']['develop'] = require_submodules(develop) |
|
308 | 308 | setup_args['cmdclass']['bdist_wheel'] = css_js_prerelease(get_bdist_wheel()) |
|
309 | 309 | |
|
310 | 310 | setuptools_extra_args['zip_safe'] = False |
|
311 | 311 | setuptools_extra_args['entry_points'] = {'console_scripts':find_entry_points()} |
|
312 | 312 | setup_args['extras_require'] = extras_require |
|
313 | 313 | requires = setup_args['install_requires'] = install_requires |
|
314 | 314 | |
|
315 | 315 | # Script to be run by the windows binary installer after the default setup |
|
316 | 316 | # routine, to add shortcuts and similar windows-only things. Windows |
|
317 | 317 | # post-install scripts MUST reside in the scripts/ dir, otherwise distutils |
|
318 | 318 | # doesn't find them. |
|
319 | 319 | if 'bdist_wininst' in sys.argv: |
|
320 | 320 | if len(sys.argv) > 2 and \ |
|
321 | 321 | ('sdist' in sys.argv or 'bdist_rpm' in sys.argv): |
|
322 | 322 | print >> sys.stderr, "ERROR: bdist_wininst must be run alone. Exiting." |
|
323 | 323 | sys.exit(1) |
|
324 | 324 | setup_args['data_files'].append( |
|
325 | 325 | ['Scripts', ('scripts/ipython.ico', 'scripts/ipython_nb.ico')]) |
|
326 | 326 | setup_args['scripts'] = [pjoin('scripts','ipython_win_post_install.py')] |
|
327 | 327 | setup_args['options'] = {"bdist_wininst": |
|
328 | 328 | {"install_script": |
|
329 | 329 | "ipython_win_post_install.py"}} |
|
330 | 330 | |
|
331 | 331 | else: |
|
332 | 332 | # If we are installing without setuptools, call this function which will |
|
333 | 333 | # check for dependencies an inform the user what is needed. This is |
|
334 | 334 | # just to make life easy for users. |
|
335 | 335 | for install_cmd in ('install', 'symlink'): |
|
336 | 336 | if install_cmd in sys.argv: |
|
337 | 337 | check_for_dependencies() |
|
338 | 338 | break |
|
339 | 339 | # scripts has to be a non-empty list, or install_scripts isn't called |
|
340 | 340 | setup_args['scripts'] = [e.split('=')[0].strip() for e in find_entry_points()] |
|
341 | 341 | |
|
342 | 342 | setup_args['cmdclass']['build_scripts'] = build_scripts_entrypt |
|
343 | 343 | |
|
344 | 344 | #--------------------------------------------------------------------------- |
|
345 | 345 | # Do the actual setup now |
|
346 | 346 | #--------------------------------------------------------------------------- |
|
347 | 347 | |
|
348 | 348 | setup_args.update(setuptools_extra_args) |
|
349 | 349 | |
|
350 | 350 | def main(): |
|
351 | 351 | setup(**setup_args) |
|
352 | 352 | cleanup() |
|
353 | 353 | |
|
354 | 354 | if __name__ == '__main__': |
|
355 | 355 | main() |
General Comments 0
You need to be logged in to leave comments.
Login now