Show More
@@ -1,257 +1,260 | |||||
1 | # coding: utf-8 |
|
1 | # coding: utf-8 | |
2 | """Tornado handlers for WebSocket <-> ZMQ sockets.""" |
|
2 | """Tornado handlers for WebSocket <-> ZMQ sockets.""" | |
3 |
|
3 | |||
4 | # Copyright (c) IPython Development Team. |
|
4 | # Copyright (c) IPython Development Team. | |
5 | # Distributed under the terms of the Modified BSD License. |
|
5 | # Distributed under the terms of the Modified BSD License. | |
6 |
|
6 | |||
7 | import os |
|
7 | import os | |
8 | import json |
|
8 | import json | |
9 | import struct |
|
9 | import struct | |
10 | import warnings |
|
10 | import warnings | |
11 |
|
11 | |||
12 | try: |
|
12 | try: | |
13 | from urllib.parse import urlparse # Py 3 |
|
13 | from urllib.parse import urlparse # Py 3 | |
14 | except ImportError: |
|
14 | except ImportError: | |
15 | from urlparse import urlparse # Py 2 |
|
15 | from urlparse import urlparse # Py 2 | |
16 |
|
16 | |||
17 | import tornado |
|
17 | import tornado | |
18 | from tornado import gen, ioloop, web |
|
18 | from tornado import gen, ioloop, web | |
19 | from tornado.websocket import WebSocketHandler |
|
19 | from tornado.websocket import WebSocketHandler | |
20 |
|
20 | |||
21 | from IPython.kernel.zmq.session import Session |
|
21 | from IPython.kernel.zmq.session import Session | |
22 | from IPython.utils.jsonutil import date_default, extract_dates |
|
22 | from IPython.utils.jsonutil import date_default, extract_dates | |
23 | from IPython.utils.py3compat import cast_unicode |
|
23 | from IPython.utils.py3compat import cast_unicode | |
24 |
|
24 | |||
25 | from .handlers import IPythonHandler |
|
25 | from .handlers import IPythonHandler | |
26 |
|
26 | |||
27 | def serialize_binary_message(msg): |
|
27 | def serialize_binary_message(msg): | |
28 | """serialize a message as a binary blob |
|
28 | """serialize a message as a binary blob | |
29 |
|
29 | |||
30 | Header: |
|
30 | Header: | |
31 |
|
31 | |||
32 | 4 bytes: number of msg parts (nbufs) as 32b int |
|
32 | 4 bytes: number of msg parts (nbufs) as 32b int | |
33 | 4 * nbufs bytes: offset for each buffer as integer as 32b int |
|
33 | 4 * nbufs bytes: offset for each buffer as integer as 32b int | |
34 |
|
34 | |||
35 | Offsets are from the start of the buffer, including the header. |
|
35 | Offsets are from the start of the buffer, including the header. | |
36 |
|
36 | |||
37 | Returns |
|
37 | Returns | |
38 | ------- |
|
38 | ------- | |
39 |
|
39 | |||
40 | The message serialized to bytes. |
|
40 | The message serialized to bytes. | |
41 |
|
41 | |||
42 | """ |
|
42 | """ | |
43 | # don't modify msg or buffer list in-place |
|
43 | # don't modify msg or buffer list in-place | |
44 | msg = msg.copy() |
|
44 | msg = msg.copy() | |
45 | buffers = list(msg.pop('buffers')) |
|
45 | buffers = list(msg.pop('buffers')) | |
46 | bmsg = json.dumps(msg, default=date_default).encode('utf8') |
|
46 | bmsg = json.dumps(msg, default=date_default).encode('utf8') | |
47 | buffers.insert(0, bmsg) |
|
47 | buffers.insert(0, bmsg) | |
48 | nbufs = len(buffers) |
|
48 | nbufs = len(buffers) | |
49 | offsets = [4 * (nbufs + 1)] |
|
49 | offsets = [4 * (nbufs + 1)] | |
50 | for buf in buffers[:-1]: |
|
50 | for buf in buffers[:-1]: | |
51 | offsets.append(offsets[-1] + len(buf)) |
|
51 | offsets.append(offsets[-1] + len(buf)) | |
52 | offsets_buf = struct.pack('!' + 'I' * (nbufs + 1), nbufs, *offsets) |
|
52 | offsets_buf = struct.pack('!' + 'I' * (nbufs + 1), nbufs, *offsets) | |
53 | buffers.insert(0, offsets_buf) |
|
53 | buffers.insert(0, offsets_buf) | |
54 | return b''.join(buffers) |
|
54 | return b''.join(buffers) | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | def deserialize_binary_message(bmsg): |
|
57 | def deserialize_binary_message(bmsg): | |
58 | """deserialize a message from a binary blog |
|
58 | """deserialize a message from a binary blog | |
59 |
|
59 | |||
60 | Header: |
|
60 | Header: | |
61 |
|
61 | |||
62 | 4 bytes: number of msg parts (nbufs) as 32b int |
|
62 | 4 bytes: number of msg parts (nbufs) as 32b int | |
63 | 4 * nbufs bytes: offset for each buffer as integer as 32b int |
|
63 | 4 * nbufs bytes: offset for each buffer as integer as 32b int | |
64 |
|
64 | |||
65 | Offsets are from the start of the buffer, including the header. |
|
65 | Offsets are from the start of the buffer, including the header. | |
66 |
|
66 | |||
67 | Returns |
|
67 | Returns | |
68 | ------- |
|
68 | ------- | |
69 |
|
69 | |||
70 | message dictionary |
|
70 | message dictionary | |
71 | """ |
|
71 | """ | |
72 | nbufs = struct.unpack('!i', bmsg[:4])[0] |
|
72 | nbufs = struct.unpack('!i', bmsg[:4])[0] | |
73 | offsets = list(struct.unpack('!' + 'I' * nbufs, bmsg[4:4*(nbufs+1)])) |
|
73 | offsets = list(struct.unpack('!' + 'I' * nbufs, bmsg[4:4*(nbufs+1)])) | |
74 | offsets.append(None) |
|
74 | offsets.append(None) | |
75 | bufs = [] |
|
75 | bufs = [] | |
76 | for start, stop in zip(offsets[:-1], offsets[1:]): |
|
76 | for start, stop in zip(offsets[:-1], offsets[1:]): | |
77 | bufs.append(bmsg[start:stop]) |
|
77 | bufs.append(bmsg[start:stop]) | |
78 | msg = json.loads(bufs[0].decode('utf8')) |
|
78 | msg = json.loads(bufs[0].decode('utf8')) | |
79 | msg['header'] = extract_dates(msg['header']) |
|
79 | msg['header'] = extract_dates(msg['header']) | |
80 | msg['parent_header'] = extract_dates(msg['parent_header']) |
|
80 | msg['parent_header'] = extract_dates(msg['parent_header']) | |
81 | msg['buffers'] = bufs[1:] |
|
81 | msg['buffers'] = bufs[1:] | |
82 | return msg |
|
82 | return msg | |
83 |
|
83 | |||
84 | # ping interval for keeping websockets alive (30 seconds) |
|
84 | # ping interval for keeping websockets alive (30 seconds) | |
85 | WS_PING_INTERVAL = 30000 |
|
85 | WS_PING_INTERVAL = 30000 | |
86 |
|
86 | |||
87 | if os.environ.get('IPYTHON_ALLOW_DRAFT_WEBSOCKETS_FOR_PHANTOMJS', False): |
|
87 | if os.environ.get('IPYTHON_ALLOW_DRAFT_WEBSOCKETS_FOR_PHANTOMJS', False): | |
88 | warnings.warn("""Allowing draft76 websocket connections! |
|
88 | warnings.warn("""Allowing draft76 websocket connections! | |
89 | This should only be done for testing with phantomjs!""") |
|
89 | This should only be done for testing with phantomjs!""") | |
90 | from IPython.html import allow76 |
|
90 | from IPython.html import allow76 | |
91 | WebSocketHandler = allow76.AllowDraftWebSocketHandler |
|
91 | WebSocketHandler = allow76.AllowDraftWebSocketHandler | |
92 | # draft 76 doesn't support ping |
|
92 | # draft 76 doesn't support ping | |
93 | WS_PING_INTERVAL = 0 |
|
93 | WS_PING_INTERVAL = 0 | |
94 |
|
94 | |||
95 | class ZMQStreamHandler(WebSocketHandler): |
|
95 | class ZMQStreamHandler(WebSocketHandler): | |
96 |
|
96 | |||
97 | def check_origin(self, origin): |
|
97 | def check_origin(self, origin): | |
98 | """Check Origin == Host or Access-Control-Allow-Origin. |
|
98 | """Check Origin == Host or Access-Control-Allow-Origin. | |
99 |
|
99 | |||
100 | Tornado >= 4 calls this method automatically, raising 403 if it returns False. |
|
100 | Tornado >= 4 calls this method automatically, raising 403 if it returns False. | |
101 | We call it explicitly in `open` on Tornado < 4. |
|
101 | We call it explicitly in `open` on Tornado < 4. | |
102 | """ |
|
102 | """ | |
103 | if self.allow_origin == '*': |
|
103 | if self.allow_origin == '*': | |
104 | return True |
|
104 | return True | |
105 |
|
105 | |||
106 | host = self.request.headers.get("Host") |
|
106 | host = self.request.headers.get("Host") | |
107 |
|
107 | |||
108 | # If no header is provided, assume we can't verify origin |
|
108 | # If no header is provided, assume we can't verify origin | |
109 | if origin is None: |
|
109 | if origin is None: | |
110 | self.log.warn("Missing Origin header, rejecting WebSocket connection.") |
|
110 | self.log.warn("Missing Origin header, rejecting WebSocket connection.") | |
111 | return False |
|
111 | return False | |
112 | if host is None: |
|
112 | if host is None: | |
113 | self.log.warn("Missing Host header, rejecting WebSocket connection.") |
|
113 | self.log.warn("Missing Host header, rejecting WebSocket connection.") | |
114 | return False |
|
114 | return False | |
115 |
|
115 | |||
116 | origin = origin.lower() |
|
116 | origin = origin.lower() | |
117 | origin_host = urlparse(origin).netloc |
|
117 | origin_host = urlparse(origin).netloc | |
118 |
|
118 | |||
119 | # OK if origin matches host |
|
119 | # OK if origin matches host | |
120 | if origin_host == host: |
|
120 | if origin_host == host: | |
121 | return True |
|
121 | return True | |
122 |
|
122 | |||
123 | # Check CORS headers |
|
123 | # Check CORS headers | |
124 | if self.allow_origin: |
|
124 | if self.allow_origin: | |
125 | allow = self.allow_origin == origin |
|
125 | allow = self.allow_origin == origin | |
126 | elif self.allow_origin_pat: |
|
126 | elif self.allow_origin_pat: | |
127 | allow = bool(self.allow_origin_pat.match(origin)) |
|
127 | allow = bool(self.allow_origin_pat.match(origin)) | |
128 | else: |
|
128 | else: | |
129 | # No CORS headers deny the request |
|
129 | # No CORS headers deny the request | |
130 | allow = False |
|
130 | allow = False | |
131 | if not allow: |
|
131 | if not allow: | |
132 | self.log.warn("Blocking Cross Origin WebSocket Attempt. Origin: %s, Host: %s", |
|
132 | self.log.warn("Blocking Cross Origin WebSocket Attempt. Origin: %s, Host: %s", | |
133 | origin, host, |
|
133 | origin, host, | |
134 | ) |
|
134 | ) | |
135 | return allow |
|
135 | return allow | |
136 |
|
136 | |||
137 | def clear_cookie(self, *args, **kwargs): |
|
137 | def clear_cookie(self, *args, **kwargs): | |
138 | """meaningless for websockets""" |
|
138 | """meaningless for websockets""" | |
139 | pass |
|
139 | pass | |
140 |
|
140 | |||
141 | def _reserialize_reply(self, msg_list): |
|
141 | def _reserialize_reply(self, msg_list): | |
142 | """Reserialize a reply message using JSON. |
|
142 | """Reserialize a reply message using JSON. | |
143 |
|
143 | |||
144 | This takes the msg list from the ZMQ socket, deserializes it using |
|
144 | This takes the msg list from the ZMQ socket, deserializes it using | |
145 | self.session and then serializes the result using JSON. This method |
|
145 | self.session and then serializes the result using JSON. This method | |
146 | should be used by self._on_zmq_reply to build messages that can |
|
146 | should be used by self._on_zmq_reply to build messages that can | |
147 | be sent back to the browser. |
|
147 | be sent back to the browser. | |
148 | """ |
|
148 | """ | |
149 | idents, msg_list = self.session.feed_identities(msg_list) |
|
149 | idents, msg_list = self.session.feed_identities(msg_list) | |
150 | msg = self.session.deserialize(msg_list) |
|
150 | msg = self.session.deserialize(msg_list) | |
151 | if msg['buffers']: |
|
151 | if msg['buffers']: | |
152 | buf = serialize_binary_message(msg) |
|
152 | buf = serialize_binary_message(msg) | |
153 | return buf |
|
153 | return buf | |
154 | else: |
|
154 | else: | |
155 | smsg = json.dumps(msg, default=date_default) |
|
155 | smsg = json.dumps(msg, default=date_default) | |
156 | return cast_unicode(smsg) |
|
156 | return cast_unicode(smsg) | |
157 |
|
157 | |||
158 | def _on_zmq_reply(self, msg_list): |
|
158 | def _on_zmq_reply(self, msg_list): | |
159 | # Sometimes this gets triggered when the on_close method is scheduled in the |
|
159 | # Sometimes this gets triggered when the on_close method is scheduled in the | |
160 | # eventloop but hasn't been called. |
|
160 | # eventloop but hasn't been called. | |
161 | if self.stream.closed(): return |
|
161 | if self.stream.closed(): return | |
162 | try: |
|
162 | try: | |
163 | msg = self._reserialize_reply(msg_list) |
|
163 | msg = self._reserialize_reply(msg_list) | |
164 | except Exception: |
|
164 | except Exception: | |
165 | self.log.critical("Malformed message: %r" % msg_list, exc_info=True) |
|
165 | self.log.critical("Malformed message: %r" % msg_list, exc_info=True) | |
166 | else: |
|
166 | else: | |
167 | self.write_message(msg, binary=isinstance(msg, bytes)) |
|
167 | self.write_message(msg, binary=isinstance(msg, bytes)) | |
168 |
|
168 | |||
169 | class AuthenticatedZMQStreamHandler(ZMQStreamHandler, IPythonHandler): |
|
169 | class AuthenticatedZMQStreamHandler(ZMQStreamHandler, IPythonHandler): | |
170 | ping_callback = None |
|
170 | ping_callback = None | |
171 | last_ping = 0 |
|
171 | last_ping = 0 | |
172 | last_pong = 0 |
|
172 | last_pong = 0 | |
173 |
|
173 | |||
174 | @property |
|
174 | @property | |
175 | def ping_interval(self): |
|
175 | def ping_interval(self): | |
176 | """The interval for websocket keep-alive pings. |
|
176 | """The interval for websocket keep-alive pings. | |
177 |
|
177 | |||
178 | Set ws_ping_interval = 0 to disable pings. |
|
178 | Set ws_ping_interval = 0 to disable pings. | |
179 | """ |
|
179 | """ | |
180 | return self.settings.get('ws_ping_interval', WS_PING_INTERVAL) |
|
180 | return self.settings.get('ws_ping_interval', WS_PING_INTERVAL) | |
181 |
|
181 | |||
182 | @property |
|
182 | @property | |
183 | def ping_timeout(self): |
|
183 | def ping_timeout(self): | |
184 | """If no ping is received in this many milliseconds, |
|
184 | """If no ping is received in this many milliseconds, | |
185 | close the websocket connection (VPNs, etc. can fail to cleanly close ws connections). |
|
185 | close the websocket connection (VPNs, etc. can fail to cleanly close ws connections). | |
186 | Default is max of 3 pings or 30 seconds. |
|
186 | Default is max of 3 pings or 30 seconds. | |
187 | """ |
|
187 | """ | |
188 | return self.settings.get('ws_ping_timeout', |
|
188 | return self.settings.get('ws_ping_timeout', | |
189 | max(3 * self.ping_interval, WS_PING_INTERVAL) |
|
189 | max(3 * self.ping_interval, WS_PING_INTERVAL) | |
190 | ) |
|
190 | ) | |
191 |
|
191 | |||
192 | def set_default_headers(self): |
|
192 | def set_default_headers(self): | |
193 | """Undo the set_default_headers in IPythonHandler |
|
193 | """Undo the set_default_headers in IPythonHandler | |
194 |
|
194 | |||
195 | which doesn't make sense for websockets |
|
195 | which doesn't make sense for websockets | |
196 | """ |
|
196 | """ | |
197 | pass |
|
197 | pass | |
198 |
|
198 | |||
199 | def pre_get(self): |
|
199 | def pre_get(self): | |
200 | """Run before finishing the GET request |
|
200 | """Run before finishing the GET request | |
201 |
|
201 | |||
202 | Extend this method to add logic that should fire before |
|
202 | Extend this method to add logic that should fire before | |
203 | the websocket finishes completing. |
|
203 | the websocket finishes completing. | |
204 | """ |
|
204 | """ | |
205 | # authenticate the request before opening the websocket |
|
205 | # authenticate the request before opening the websocket | |
206 | if self.get_current_user() is None: |
|
206 | if self.get_current_user() is None: | |
207 | self.log.warn("Couldn't authenticate WebSocket connection") |
|
207 | self.log.warn("Couldn't authenticate WebSocket connection") | |
208 | raise web.HTTPError(403) |
|
208 | raise web.HTTPError(403) | |
209 |
|
209 | |||
210 | if self.get_argument('session_id', False): |
|
210 | if self.get_argument('session_id', False): | |
211 | self.session.session = cast_unicode(self.get_argument('session_id')) |
|
211 | self.session.session = cast_unicode(self.get_argument('session_id')) | |
212 | else: |
|
212 | else: | |
213 | self.log.warn("No session ID specified") |
|
213 | self.log.warn("No session ID specified") | |
214 |
|
214 | |||
215 | @gen.coroutine |
|
215 | @gen.coroutine | |
216 | def get(self, *args, **kwargs): |
|
216 | def get(self, *args, **kwargs): | |
217 | # pre_get can be a coroutine in subclasses |
|
217 | # pre_get can be a coroutine in subclasses | |
218 | # assign and yield in two step to avoid tornado 3 issues |
|
218 | # assign and yield in two step to avoid tornado 3 issues | |
219 | res = self.pre_get() |
|
219 | res = self.pre_get() | |
220 | yield gen.maybe_future(res) |
|
220 | yield gen.maybe_future(res) | |
221 | super(AuthenticatedZMQStreamHandler, self).get(*args, **kwargs) |
|
221 | super(AuthenticatedZMQStreamHandler, self).get(*args, **kwargs) | |
222 |
|
222 | |||
223 | def initialize(self): |
|
223 | def initialize(self): | |
224 | self.log.debug("Initializing websocket connection %s", self.request.path) |
|
224 | self.log.debug("Initializing websocket connection %s", self.request.path) | |
225 | self.session = Session(config=self.config) |
|
225 | self.session = Session(config=self.config) | |
226 |
|
226 | |||
227 | def open(self, *args, **kwargs): |
|
227 | def open(self, *args, **kwargs): | |
228 | self.log.debug("Opening websocket %s", self.request.path) |
|
228 | self.log.debug("Opening websocket %s", self.request.path) | |
229 |
|
229 | |||
230 | # start the pinging |
|
230 | # start the pinging | |
231 | if self.ping_interval > 0: |
|
231 | if self.ping_interval > 0: | |
232 | self.last_ping = ioloop.IOLoop.instance().time() # Remember time of last ping |
|
232 | loop = ioloop.IOLoop.current() | |
|
233 | self.last_ping = loop.time() # Remember time of last ping | |||
233 | self.last_pong = self.last_ping |
|
234 | self.last_pong = self.last_ping | |
234 |
self.ping_callback = ioloop.PeriodicCallback( |
|
235 | self.ping_callback = ioloop.PeriodicCallback( | |
|
236 | self.send_ping, self.ping_interval, io_loop=loop, | |||
|
237 | ) | |||
235 | self.ping_callback.start() |
|
238 | self.ping_callback.start() | |
236 |
|
239 | |||
237 | def send_ping(self): |
|
240 | def send_ping(self): | |
238 | """send a ping to keep the websocket alive""" |
|
241 | """send a ping to keep the websocket alive""" | |
239 | if self.stream.closed() and self.ping_callback is not None: |
|
242 | if self.stream.closed() and self.ping_callback is not None: | |
240 | self.ping_callback.stop() |
|
243 | self.ping_callback.stop() | |
241 | return |
|
244 | return | |
242 |
|
245 | |||
243 | # check for timeout on pong. Make sure that we really have sent a recent ping in |
|
246 | # check for timeout on pong. Make sure that we really have sent a recent ping in | |
244 | # case the machine with both server and client has been suspended since the last ping. |
|
247 | # case the machine with both server and client has been suspended since the last ping. | |
245 |
now = ioloop.IOLoop. |
|
248 | now = ioloop.IOLoop.current().time() | |
246 | since_last_pong = 1e3 * (now - self.last_pong) |
|
249 | since_last_pong = 1e3 * (now - self.last_pong) | |
247 | since_last_ping = 1e3 * (now - self.last_ping) |
|
250 | since_last_ping = 1e3 * (now - self.last_ping) | |
248 | if since_last_ping < 2*self.ping_interval and since_last_pong > self.ping_timeout: |
|
251 | if since_last_ping < 2*self.ping_interval and since_last_pong > self.ping_timeout: | |
249 | self.log.warn("WebSocket ping timeout after %i ms.", since_last_pong) |
|
252 | self.log.warn("WebSocket ping timeout after %i ms.", since_last_pong) | |
250 | self.close() |
|
253 | self.close() | |
251 | return |
|
254 | return | |
252 |
|
255 | |||
253 | self.ping(b'') |
|
256 | self.ping(b'') | |
254 | self.last_ping = now |
|
257 | self.last_ping = now | |
255 |
|
258 | |||
256 | def on_pong(self, data): |
|
259 | def on_pong(self, data): | |
257 |
self.last_pong = ioloop.IOLoop. |
|
260 | self.last_pong = ioloop.IOLoop.current().time() |
@@ -1,1042 +1,1050 | |||||
1 | # coding: utf-8 |
|
1 | # coding: utf-8 | |
2 | """A tornado based IPython notebook server.""" |
|
2 | """A tornado based IPython notebook server.""" | |
3 |
|
3 | |||
4 | # Copyright (c) IPython Development Team. |
|
4 | # Copyright (c) IPython Development Team. | |
5 | # Distributed under the terms of the Modified BSD License. |
|
5 | # Distributed under the terms of the Modified BSD License. | |
6 |
|
6 | |||
7 | from __future__ import print_function |
|
7 | from __future__ import print_function | |
8 |
|
8 | |||
9 | import base64 |
|
9 | import base64 | |
10 | import datetime |
|
10 | import datetime | |
11 | import errno |
|
11 | import errno | |
12 | import io |
|
12 | import io | |
13 | import json |
|
13 | import json | |
14 | import logging |
|
14 | import logging | |
15 | import os |
|
15 | import os | |
16 | import random |
|
16 | import random | |
17 | import re |
|
17 | import re | |
18 | import select |
|
18 | import select | |
19 | import signal |
|
19 | import signal | |
20 | import socket |
|
20 | import socket | |
21 | import sys |
|
21 | import sys | |
22 | import threading |
|
22 | import threading | |
23 | import time |
|
23 | import time | |
24 | import webbrowser |
|
24 | import webbrowser | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | # check for pyzmq 2.1.11 |
|
27 | # check for pyzmq 2.1.11 | |
28 | from IPython.utils.zmqrelated import check_for_zmq |
|
28 | from IPython.utils.zmqrelated import check_for_zmq | |
29 | check_for_zmq('2.1.11', 'IPython.html') |
|
29 | check_for_zmq('2.1.11', 'IPython.html') | |
30 |
|
30 | |||
31 | from jinja2 import Environment, FileSystemLoader |
|
31 | from jinja2 import Environment, FileSystemLoader | |
32 |
|
32 | |||
33 | # Install the pyzmq ioloop. This has to be done before anything else from |
|
33 | # Install the pyzmq ioloop. This has to be done before anything else from | |
34 | # tornado is imported. |
|
34 | # tornado is imported. | |
35 | from zmq.eventloop import ioloop |
|
35 | from zmq.eventloop import ioloop | |
36 | ioloop.install() |
|
36 | ioloop.install() | |
37 |
|
37 | |||
38 | # check for tornado 3.1.0 |
|
38 | # check for tornado 3.1.0 | |
39 | msg = "The IPython Notebook requires tornado >= 4.0" |
|
39 | msg = "The IPython Notebook requires tornado >= 4.0" | |
40 | try: |
|
40 | try: | |
41 | import tornado |
|
41 | import tornado | |
42 | except ImportError: |
|
42 | except ImportError: | |
43 | raise ImportError(msg) |
|
43 | raise ImportError(msg) | |
44 | try: |
|
44 | try: | |
45 | version_info = tornado.version_info |
|
45 | version_info = tornado.version_info | |
46 | except AttributeError: |
|
46 | except AttributeError: | |
47 | raise ImportError(msg + ", but you have < 1.1.0") |
|
47 | raise ImportError(msg + ", but you have < 1.1.0") | |
48 | if version_info < (4,0): |
|
48 | if version_info < (4,0): | |
49 | raise ImportError(msg + ", but you have %s" % tornado.version) |
|
49 | raise ImportError(msg + ", but you have %s" % tornado.version) | |
50 |
|
50 | |||
51 | from tornado import httpserver |
|
51 | from tornado import httpserver | |
52 | from tornado import web |
|
52 | from tornado import web | |
53 | from tornado.log import LogFormatter, app_log, access_log, gen_log |
|
53 | from tornado.log import LogFormatter, app_log, access_log, gen_log | |
54 |
|
54 | |||
55 | from IPython.html import ( |
|
55 | from IPython.html import ( | |
56 | DEFAULT_STATIC_FILES_PATH, |
|
56 | DEFAULT_STATIC_FILES_PATH, | |
57 | DEFAULT_TEMPLATE_PATH_LIST, |
|
57 | DEFAULT_TEMPLATE_PATH_LIST, | |
58 | ) |
|
58 | ) | |
59 | from .base.handlers import Template404 |
|
59 | from .base.handlers import Template404 | |
60 | from .log import log_request |
|
60 | from .log import log_request | |
61 | from .services.kernels.kernelmanager import MappingKernelManager |
|
61 | from .services.kernels.kernelmanager import MappingKernelManager | |
62 | from .services.contents.manager import ContentsManager |
|
62 | from .services.contents.manager import ContentsManager | |
63 | from .services.contents.filemanager import FileContentsManager |
|
63 | from .services.contents.filemanager import FileContentsManager | |
64 | from .services.clusters.clustermanager import ClusterManager |
|
64 | from .services.clusters.clustermanager import ClusterManager | |
65 | from .services.sessions.sessionmanager import SessionManager |
|
65 | from .services.sessions.sessionmanager import SessionManager | |
66 |
|
66 | |||
67 | from .base.handlers import AuthenticatedFileHandler, FileFindHandler |
|
67 | from .base.handlers import AuthenticatedFileHandler, FileFindHandler | |
68 |
|
68 | |||
69 | from IPython.config import Config |
|
69 | from IPython.config import Config | |
70 | from IPython.config.application import catch_config_error, boolean_flag |
|
70 | from IPython.config.application import catch_config_error, boolean_flag | |
71 | from IPython.core.application import ( |
|
71 | from IPython.core.application import ( | |
72 | BaseIPythonApplication, base_flags, base_aliases, |
|
72 | BaseIPythonApplication, base_flags, base_aliases, | |
73 | ) |
|
73 | ) | |
74 | from IPython.core.profiledir import ProfileDir |
|
74 | from IPython.core.profiledir import ProfileDir | |
75 | from IPython.kernel import KernelManager |
|
75 | from IPython.kernel import KernelManager | |
76 | from IPython.kernel.kernelspec import KernelSpecManager |
|
76 | from IPython.kernel.kernelspec import KernelSpecManager | |
77 | from IPython.kernel.zmq.session import default_secure, Session |
|
77 | from IPython.kernel.zmq.session import default_secure, Session | |
78 | from IPython.nbformat.sign import NotebookNotary |
|
78 | from IPython.nbformat.sign import NotebookNotary | |
79 | from IPython.utils.importstring import import_item |
|
79 | from IPython.utils.importstring import import_item | |
80 | from IPython.utils import submodule |
|
80 | from IPython.utils import submodule | |
81 | from IPython.utils.process import check_pid |
|
81 | from IPython.utils.process import check_pid | |
82 | from IPython.utils.traitlets import ( |
|
82 | from IPython.utils.traitlets import ( | |
83 | Dict, Unicode, Integer, List, Bool, Bytes, Instance, |
|
83 | Dict, Unicode, Integer, List, Bool, Bytes, Instance, | |
84 | DottedObjectName, TraitError, |
|
84 | DottedObjectName, TraitError, | |
85 | ) |
|
85 | ) | |
86 | from IPython.utils import py3compat |
|
86 | from IPython.utils import py3compat | |
87 | from IPython.utils.path import filefind, get_ipython_dir |
|
87 | from IPython.utils.path import filefind, get_ipython_dir | |
88 | from IPython.utils.sysinfo import get_sys_info |
|
88 | from IPython.utils.sysinfo import get_sys_info | |
89 |
|
89 | |||
90 | from .utils import url_path_join |
|
90 | from .utils import url_path_join | |
91 |
|
91 | |||
92 | #----------------------------------------------------------------------------- |
|
92 | #----------------------------------------------------------------------------- | |
93 | # Module globals |
|
93 | # Module globals | |
94 | #----------------------------------------------------------------------------- |
|
94 | #----------------------------------------------------------------------------- | |
95 |
|
95 | |||
96 | _examples = """ |
|
96 | _examples = """ | |
97 | ipython notebook # start the notebook |
|
97 | ipython notebook # start the notebook | |
98 | ipython notebook --profile=sympy # use the sympy profile |
|
98 | ipython notebook --profile=sympy # use the sympy profile | |
99 | ipython notebook --certfile=mycert.pem # use SSL/TLS certificate |
|
99 | ipython notebook --certfile=mycert.pem # use SSL/TLS certificate | |
100 | """ |
|
100 | """ | |
101 |
|
101 | |||
102 | #----------------------------------------------------------------------------- |
|
102 | #----------------------------------------------------------------------------- | |
103 | # Helper functions |
|
103 | # Helper functions | |
104 | #----------------------------------------------------------------------------- |
|
104 | #----------------------------------------------------------------------------- | |
105 |
|
105 | |||
106 | def random_ports(port, n): |
|
106 | def random_ports(port, n): | |
107 | """Generate a list of n random ports near the given port. |
|
107 | """Generate a list of n random ports near the given port. | |
108 |
|
108 | |||
109 | The first 5 ports will be sequential, and the remaining n-5 will be |
|
109 | The first 5 ports will be sequential, and the remaining n-5 will be | |
110 | randomly selected in the range [port-2*n, port+2*n]. |
|
110 | randomly selected in the range [port-2*n, port+2*n]. | |
111 | """ |
|
111 | """ | |
112 | for i in range(min(5, n)): |
|
112 | for i in range(min(5, n)): | |
113 | yield port + i |
|
113 | yield port + i | |
114 | for i in range(n-5): |
|
114 | for i in range(n-5): | |
115 | yield max(1, port + random.randint(-2*n, 2*n)) |
|
115 | yield max(1, port + random.randint(-2*n, 2*n)) | |
116 |
|
116 | |||
117 | def load_handlers(name): |
|
117 | def load_handlers(name): | |
118 | """Load the (URL pattern, handler) tuples for each component.""" |
|
118 | """Load the (URL pattern, handler) tuples for each component.""" | |
119 | name = 'IPython.html.' + name |
|
119 | name = 'IPython.html.' + name | |
120 | mod = __import__(name, fromlist=['default_handlers']) |
|
120 | mod = __import__(name, fromlist=['default_handlers']) | |
121 | return mod.default_handlers |
|
121 | return mod.default_handlers | |
122 |
|
122 | |||
123 | #----------------------------------------------------------------------------- |
|
123 | #----------------------------------------------------------------------------- | |
124 | # The Tornado web application |
|
124 | # The Tornado web application | |
125 | #----------------------------------------------------------------------------- |
|
125 | #----------------------------------------------------------------------------- | |
126 |
|
126 | |||
127 | class NotebookWebApplication(web.Application): |
|
127 | class NotebookWebApplication(web.Application): | |
128 |
|
128 | |||
129 | def __init__(self, ipython_app, kernel_manager, contents_manager, |
|
129 | def __init__(self, ipython_app, kernel_manager, contents_manager, | |
130 | cluster_manager, session_manager, kernel_spec_manager, |
|
130 | cluster_manager, session_manager, kernel_spec_manager, | |
131 | config_manager, log, |
|
131 | config_manager, log, | |
132 | base_url, default_url, settings_overrides, jinja_env_options): |
|
132 | base_url, default_url, settings_overrides, jinja_env_options): | |
133 |
|
133 | |||
134 | settings = self.init_settings( |
|
134 | settings = self.init_settings( | |
135 | ipython_app, kernel_manager, contents_manager, cluster_manager, |
|
135 | ipython_app, kernel_manager, contents_manager, cluster_manager, | |
136 | session_manager, kernel_spec_manager, config_manager, log, base_url, |
|
136 | session_manager, kernel_spec_manager, config_manager, log, base_url, | |
137 | default_url, settings_overrides, jinja_env_options) |
|
137 | default_url, settings_overrides, jinja_env_options) | |
138 | handlers = self.init_handlers(settings) |
|
138 | handlers = self.init_handlers(settings) | |
139 |
|
139 | |||
140 | super(NotebookWebApplication, self).__init__(handlers, **settings) |
|
140 | super(NotebookWebApplication, self).__init__(handlers, **settings) | |
141 |
|
141 | |||
142 | def init_settings(self, ipython_app, kernel_manager, contents_manager, |
|
142 | def init_settings(self, ipython_app, kernel_manager, contents_manager, | |
143 | cluster_manager, session_manager, kernel_spec_manager, |
|
143 | cluster_manager, session_manager, kernel_spec_manager, | |
144 | config_manager, |
|
144 | config_manager, | |
145 | log, base_url, default_url, settings_overrides, |
|
145 | log, base_url, default_url, settings_overrides, | |
146 | jinja_env_options=None): |
|
146 | jinja_env_options=None): | |
147 |
|
147 | |||
148 | _template_path = settings_overrides.get( |
|
148 | _template_path = settings_overrides.get( | |
149 | "template_path", |
|
149 | "template_path", | |
150 | ipython_app.template_file_path, |
|
150 | ipython_app.template_file_path, | |
151 | ) |
|
151 | ) | |
152 | if isinstance(_template_path, str): |
|
152 | if isinstance(_template_path, str): | |
153 | _template_path = (_template_path,) |
|
153 | _template_path = (_template_path,) | |
154 | template_path = [os.path.expanduser(path) for path in _template_path] |
|
154 | template_path = [os.path.expanduser(path) for path in _template_path] | |
155 |
|
155 | |||
156 | jenv_opt = jinja_env_options if jinja_env_options else {} |
|
156 | jenv_opt = jinja_env_options if jinja_env_options else {} | |
157 | env = Environment(loader=FileSystemLoader(template_path), **jenv_opt) |
|
157 | env = Environment(loader=FileSystemLoader(template_path), **jenv_opt) | |
158 |
|
158 | |||
159 | sys_info = get_sys_info() |
|
159 | sys_info = get_sys_info() | |
160 | if sys_info['commit_source'] == 'repository': |
|
160 | if sys_info['commit_source'] == 'repository': | |
161 | # don't cache (rely on 304) when working from master |
|
161 | # don't cache (rely on 304) when working from master | |
162 | version_hash = '' |
|
162 | version_hash = '' | |
163 | else: |
|
163 | else: | |
164 | # reset the cache on server restart |
|
164 | # reset the cache on server restart | |
165 | version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S") |
|
165 | version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S") | |
166 |
|
166 | |||
167 | settings = dict( |
|
167 | settings = dict( | |
168 | # basics |
|
168 | # basics | |
169 | log_function=log_request, |
|
169 | log_function=log_request, | |
170 | base_url=base_url, |
|
170 | base_url=base_url, | |
171 | default_url=default_url, |
|
171 | default_url=default_url, | |
172 | template_path=template_path, |
|
172 | template_path=template_path, | |
173 | static_path=ipython_app.static_file_path, |
|
173 | static_path=ipython_app.static_file_path, | |
174 | static_handler_class = FileFindHandler, |
|
174 | static_handler_class = FileFindHandler, | |
175 | static_url_prefix = url_path_join(base_url,'/static/'), |
|
175 | static_url_prefix = url_path_join(base_url,'/static/'), | |
176 | static_handler_args = { |
|
176 | static_handler_args = { | |
177 | # don't cache custom.js |
|
177 | # don't cache custom.js | |
178 | 'no_cache_paths': [url_path_join(base_url, 'static', 'custom')], |
|
178 | 'no_cache_paths': [url_path_join(base_url, 'static', 'custom')], | |
179 | }, |
|
179 | }, | |
180 | version_hash=version_hash, |
|
180 | version_hash=version_hash, | |
181 |
|
181 | |||
182 | # authentication |
|
182 | # authentication | |
183 | cookie_secret=ipython_app.cookie_secret, |
|
183 | cookie_secret=ipython_app.cookie_secret, | |
184 | login_url=url_path_join(base_url,'/login'), |
|
184 | login_url=url_path_join(base_url,'/login'), | |
185 | password=ipython_app.password, |
|
185 | password=ipython_app.password, | |
186 |
|
186 | |||
187 | # managers |
|
187 | # managers | |
188 | kernel_manager=kernel_manager, |
|
188 | kernel_manager=kernel_manager, | |
189 | contents_manager=contents_manager, |
|
189 | contents_manager=contents_manager, | |
190 | cluster_manager=cluster_manager, |
|
190 | cluster_manager=cluster_manager, | |
191 | session_manager=session_manager, |
|
191 | session_manager=session_manager, | |
192 | kernel_spec_manager=kernel_spec_manager, |
|
192 | kernel_spec_manager=kernel_spec_manager, | |
193 | config_manager=config_manager, |
|
193 | config_manager=config_manager, | |
194 |
|
194 | |||
195 | # IPython stuff |
|
195 | # IPython stuff | |
196 | nbextensions_path = ipython_app.nbextensions_path, |
|
196 | nbextensions_path = ipython_app.nbextensions_path, | |
197 | websocket_url=ipython_app.websocket_url, |
|
197 | websocket_url=ipython_app.websocket_url, | |
198 | mathjax_url=ipython_app.mathjax_url, |
|
198 | mathjax_url=ipython_app.mathjax_url, | |
199 | config=ipython_app.config, |
|
199 | config=ipython_app.config, | |
200 | jinja2_env=env, |
|
200 | jinja2_env=env, | |
201 | terminals_available=False, # Set later if terminals are available |
|
201 | terminals_available=False, # Set later if terminals are available | |
202 | ) |
|
202 | ) | |
203 |
|
203 | |||
204 | # allow custom overrides for the tornado web app. |
|
204 | # allow custom overrides for the tornado web app. | |
205 | settings.update(settings_overrides) |
|
205 | settings.update(settings_overrides) | |
206 | return settings |
|
206 | return settings | |
207 |
|
207 | |||
208 | def init_handlers(self, settings): |
|
208 | def init_handlers(self, settings): | |
209 | """Load the (URL pattern, handler) tuples for each component.""" |
|
209 | """Load the (URL pattern, handler) tuples for each component.""" | |
210 |
|
210 | |||
211 | # Order matters. The first handler to match the URL will handle the request. |
|
211 | # Order matters. The first handler to match the URL will handle the request. | |
212 | handlers = [] |
|
212 | handlers = [] | |
213 | handlers.extend(load_handlers('tree.handlers')) |
|
213 | handlers.extend(load_handlers('tree.handlers')) | |
214 | handlers.extend(load_handlers('auth.login')) |
|
214 | handlers.extend(load_handlers('auth.login')) | |
215 | handlers.extend(load_handlers('auth.logout')) |
|
215 | handlers.extend(load_handlers('auth.logout')) | |
216 | handlers.extend(load_handlers('files.handlers')) |
|
216 | handlers.extend(load_handlers('files.handlers')) | |
217 | handlers.extend(load_handlers('notebook.handlers')) |
|
217 | handlers.extend(load_handlers('notebook.handlers')) | |
218 | handlers.extend(load_handlers('nbconvert.handlers')) |
|
218 | handlers.extend(load_handlers('nbconvert.handlers')) | |
219 | handlers.extend(load_handlers('kernelspecs.handlers')) |
|
219 | handlers.extend(load_handlers('kernelspecs.handlers')) | |
220 | handlers.extend(load_handlers('edit.handlers')) |
|
220 | handlers.extend(load_handlers('edit.handlers')) | |
221 | handlers.extend(load_handlers('services.config.handlers')) |
|
221 | handlers.extend(load_handlers('services.config.handlers')) | |
222 | handlers.extend(load_handlers('services.kernels.handlers')) |
|
222 | handlers.extend(load_handlers('services.kernels.handlers')) | |
223 | handlers.extend(load_handlers('services.contents.handlers')) |
|
223 | handlers.extend(load_handlers('services.contents.handlers')) | |
224 | handlers.extend(load_handlers('services.clusters.handlers')) |
|
224 | handlers.extend(load_handlers('services.clusters.handlers')) | |
225 | handlers.extend(load_handlers('services.sessions.handlers')) |
|
225 | handlers.extend(load_handlers('services.sessions.handlers')) | |
226 | handlers.extend(load_handlers('services.nbconvert.handlers')) |
|
226 | handlers.extend(load_handlers('services.nbconvert.handlers')) | |
227 | handlers.extend(load_handlers('services.kernelspecs.handlers')) |
|
227 | handlers.extend(load_handlers('services.kernelspecs.handlers')) | |
228 | handlers.extend(load_handlers('services.security.handlers')) |
|
228 | handlers.extend(load_handlers('services.security.handlers')) | |
229 | handlers.append( |
|
229 | handlers.append( | |
230 | (r"/nbextensions/(.*)", FileFindHandler, { |
|
230 | (r"/nbextensions/(.*)", FileFindHandler, { | |
231 | 'path': settings['nbextensions_path'], |
|
231 | 'path': settings['nbextensions_path'], | |
232 | 'no_cache_paths': ['/'], # don't cache anything in nbextensions |
|
232 | 'no_cache_paths': ['/'], # don't cache anything in nbextensions | |
233 | }), |
|
233 | }), | |
234 | ) |
|
234 | ) | |
235 | # register base handlers last |
|
235 | # register base handlers last | |
236 | handlers.extend(load_handlers('base.handlers')) |
|
236 | handlers.extend(load_handlers('base.handlers')) | |
237 | # set the URL that will be redirected from `/` |
|
237 | # set the URL that will be redirected from `/` | |
238 | handlers.append( |
|
238 | handlers.append( | |
239 | (r'/?', web.RedirectHandler, { |
|
239 | (r'/?', web.RedirectHandler, { | |
240 | 'url' : url_path_join(settings['base_url'], settings['default_url']), |
|
240 | 'url' : url_path_join(settings['base_url'], settings['default_url']), | |
241 | 'permanent': False, # want 302, not 301 |
|
241 | 'permanent': False, # want 302, not 301 | |
242 | }) |
|
242 | }) | |
243 | ) |
|
243 | ) | |
244 | # prepend base_url onto the patterns that we match |
|
244 | # prepend base_url onto the patterns that we match | |
245 | new_handlers = [] |
|
245 | new_handlers = [] | |
246 | for handler in handlers: |
|
246 | for handler in handlers: | |
247 | pattern = url_path_join(settings['base_url'], handler[0]) |
|
247 | pattern = url_path_join(settings['base_url'], handler[0]) | |
248 | new_handler = tuple([pattern] + list(handler[1:])) |
|
248 | new_handler = tuple([pattern] + list(handler[1:])) | |
249 | new_handlers.append(new_handler) |
|
249 | new_handlers.append(new_handler) | |
250 | # add 404 on the end, which will catch everything that falls through |
|
250 | # add 404 on the end, which will catch everything that falls through | |
251 | new_handlers.append((r'(.*)', Template404)) |
|
251 | new_handlers.append((r'(.*)', Template404)) | |
252 | return new_handlers |
|
252 | return new_handlers | |
253 |
|
253 | |||
254 |
|
254 | |||
255 | class NbserverListApp(BaseIPythonApplication): |
|
255 | class NbserverListApp(BaseIPythonApplication): | |
256 |
|
256 | |||
257 | description="List currently running notebook servers in this profile." |
|
257 | description="List currently running notebook servers in this profile." | |
258 |
|
258 | |||
259 | flags = dict( |
|
259 | flags = dict( | |
260 | json=({'NbserverListApp': {'json': True}}, |
|
260 | json=({'NbserverListApp': {'json': True}}, | |
261 | "Produce machine-readable JSON output."), |
|
261 | "Produce machine-readable JSON output."), | |
262 | ) |
|
262 | ) | |
263 |
|
263 | |||
264 | json = Bool(False, config=True, |
|
264 | json = Bool(False, config=True, | |
265 | help="If True, each line of output will be a JSON object with the " |
|
265 | help="If True, each line of output will be a JSON object with the " | |
266 | "details from the server info file.") |
|
266 | "details from the server info file.") | |
267 |
|
267 | |||
268 | def start(self): |
|
268 | def start(self): | |
269 | if not self.json: |
|
269 | if not self.json: | |
270 | print("Currently running servers:") |
|
270 | print("Currently running servers:") | |
271 | for serverinfo in list_running_servers(self.profile): |
|
271 | for serverinfo in list_running_servers(self.profile): | |
272 | if self.json: |
|
272 | if self.json: | |
273 | print(json.dumps(serverinfo)) |
|
273 | print(json.dumps(serverinfo)) | |
274 | else: |
|
274 | else: | |
275 | print(serverinfo['url'], "::", serverinfo['notebook_dir']) |
|
275 | print(serverinfo['url'], "::", serverinfo['notebook_dir']) | |
276 |
|
276 | |||
277 | #----------------------------------------------------------------------------- |
|
277 | #----------------------------------------------------------------------------- | |
278 | # Aliases and Flags |
|
278 | # Aliases and Flags | |
279 | #----------------------------------------------------------------------------- |
|
279 | #----------------------------------------------------------------------------- | |
280 |
|
280 | |||
281 | flags = dict(base_flags) |
|
281 | flags = dict(base_flags) | |
282 | flags['no-browser']=( |
|
282 | flags['no-browser']=( | |
283 | {'NotebookApp' : {'open_browser' : False}}, |
|
283 | {'NotebookApp' : {'open_browser' : False}}, | |
284 | "Don't open the notebook in a browser after startup." |
|
284 | "Don't open the notebook in a browser after startup." | |
285 | ) |
|
285 | ) | |
286 | flags['pylab']=( |
|
286 | flags['pylab']=( | |
287 | {'NotebookApp' : {'pylab' : 'warn'}}, |
|
287 | {'NotebookApp' : {'pylab' : 'warn'}}, | |
288 | "DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib." |
|
288 | "DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib." | |
289 | ) |
|
289 | ) | |
290 | flags['no-mathjax']=( |
|
290 | flags['no-mathjax']=( | |
291 | {'NotebookApp' : {'enable_mathjax' : False}}, |
|
291 | {'NotebookApp' : {'enable_mathjax' : False}}, | |
292 | """Disable MathJax |
|
292 | """Disable MathJax | |
293 |
|
293 | |||
294 | MathJax is the javascript library IPython uses to render math/LaTeX. It is |
|
294 | MathJax is the javascript library IPython uses to render math/LaTeX. It is | |
295 | very large, so you may want to disable it if you have a slow internet |
|
295 | very large, so you may want to disable it if you have a slow internet | |
296 | connection, or for offline use of the notebook. |
|
296 | connection, or for offline use of the notebook. | |
297 |
|
297 | |||
298 | When disabled, equations etc. will appear as their untransformed TeX source. |
|
298 | When disabled, equations etc. will appear as their untransformed TeX source. | |
299 | """ |
|
299 | """ | |
300 | ) |
|
300 | ) | |
301 |
|
301 | |||
302 | # Add notebook manager flags |
|
302 | # Add notebook manager flags | |
303 | flags.update(boolean_flag('script', 'FileContentsManager.save_script', |
|
303 | flags.update(boolean_flag('script', 'FileContentsManager.save_script', | |
304 | 'DEPRECATED, IGNORED', |
|
304 | 'DEPRECATED, IGNORED', | |
305 | 'DEPRECATED, IGNORED')) |
|
305 | 'DEPRECATED, IGNORED')) | |
306 |
|
306 | |||
307 | aliases = dict(base_aliases) |
|
307 | aliases = dict(base_aliases) | |
308 |
|
308 | |||
309 | aliases.update({ |
|
309 | aliases.update({ | |
310 | 'ip': 'NotebookApp.ip', |
|
310 | 'ip': 'NotebookApp.ip', | |
311 | 'port': 'NotebookApp.port', |
|
311 | 'port': 'NotebookApp.port', | |
312 | 'port-retries': 'NotebookApp.port_retries', |
|
312 | 'port-retries': 'NotebookApp.port_retries', | |
313 | 'transport': 'KernelManager.transport', |
|
313 | 'transport': 'KernelManager.transport', | |
314 | 'keyfile': 'NotebookApp.keyfile', |
|
314 | 'keyfile': 'NotebookApp.keyfile', | |
315 | 'certfile': 'NotebookApp.certfile', |
|
315 | 'certfile': 'NotebookApp.certfile', | |
316 | 'notebook-dir': 'NotebookApp.notebook_dir', |
|
316 | 'notebook-dir': 'NotebookApp.notebook_dir', | |
317 | 'browser': 'NotebookApp.browser', |
|
317 | 'browser': 'NotebookApp.browser', | |
318 | 'pylab': 'NotebookApp.pylab', |
|
318 | 'pylab': 'NotebookApp.pylab', | |
319 | }) |
|
319 | }) | |
320 |
|
320 | |||
321 | #----------------------------------------------------------------------------- |
|
321 | #----------------------------------------------------------------------------- | |
322 | # NotebookApp |
|
322 | # NotebookApp | |
323 | #----------------------------------------------------------------------------- |
|
323 | #----------------------------------------------------------------------------- | |
324 |
|
324 | |||
325 | class NotebookApp(BaseIPythonApplication): |
|
325 | class NotebookApp(BaseIPythonApplication): | |
326 |
|
326 | |||
327 | name = 'ipython-notebook' |
|
327 | name = 'ipython-notebook' | |
328 |
|
328 | |||
329 | description = """ |
|
329 | description = """ | |
330 | The IPython HTML Notebook. |
|
330 | The IPython HTML Notebook. | |
331 |
|
331 | |||
332 | This launches a Tornado based HTML Notebook Server that serves up an |
|
332 | This launches a Tornado based HTML Notebook Server that serves up an | |
333 | HTML5/Javascript Notebook client. |
|
333 | HTML5/Javascript Notebook client. | |
334 | """ |
|
334 | """ | |
335 | examples = _examples |
|
335 | examples = _examples | |
336 | aliases = aliases |
|
336 | aliases = aliases | |
337 | flags = flags |
|
337 | flags = flags | |
338 |
|
338 | |||
339 | classes = [ |
|
339 | classes = [ | |
340 | KernelManager, ProfileDir, Session, MappingKernelManager, |
|
340 | KernelManager, ProfileDir, Session, MappingKernelManager, | |
341 | ContentsManager, FileContentsManager, NotebookNotary, |
|
341 | ContentsManager, FileContentsManager, NotebookNotary, | |
342 | ] |
|
342 | ] | |
343 | flags = Dict(flags) |
|
343 | flags = Dict(flags) | |
344 | aliases = Dict(aliases) |
|
344 | aliases = Dict(aliases) | |
345 |
|
345 | |||
346 | subcommands = dict( |
|
346 | subcommands = dict( | |
347 | list=(NbserverListApp, NbserverListApp.description.splitlines()[0]), |
|
347 | list=(NbserverListApp, NbserverListApp.description.splitlines()[0]), | |
348 | ) |
|
348 | ) | |
349 |
|
349 | |||
350 | ipython_kernel_argv = List(Unicode) |
|
350 | ipython_kernel_argv = List(Unicode) | |
351 |
|
351 | |||
352 | _log_formatter_cls = LogFormatter |
|
352 | _log_formatter_cls = LogFormatter | |
353 |
|
353 | |||
354 | def _log_level_default(self): |
|
354 | def _log_level_default(self): | |
355 | return logging.INFO |
|
355 | return logging.INFO | |
356 |
|
356 | |||
357 | def _log_datefmt_default(self): |
|
357 | def _log_datefmt_default(self): | |
358 | """Exclude date from default date format""" |
|
358 | """Exclude date from default date format""" | |
359 | return "%H:%M:%S" |
|
359 | return "%H:%M:%S" | |
360 |
|
360 | |||
361 | def _log_format_default(self): |
|
361 | def _log_format_default(self): | |
362 | """override default log format to include time""" |
|
362 | """override default log format to include time""" | |
363 | return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" |
|
363 | return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" | |
364 |
|
364 | |||
365 | # create requested profiles by default, if they don't exist: |
|
365 | # create requested profiles by default, if they don't exist: | |
366 | auto_create = Bool(True) |
|
366 | auto_create = Bool(True) | |
367 |
|
367 | |||
368 | # file to be opened in the notebook server |
|
368 | # file to be opened in the notebook server | |
369 | file_to_run = Unicode('', config=True) |
|
369 | file_to_run = Unicode('', config=True) | |
370 |
|
370 | |||
371 | # Network related information |
|
371 | # Network related information | |
372 |
|
372 | |||
373 | allow_origin = Unicode('', config=True, |
|
373 | allow_origin = Unicode('', config=True, | |
374 | help="""Set the Access-Control-Allow-Origin header |
|
374 | help="""Set the Access-Control-Allow-Origin header | |
375 |
|
375 | |||
376 | Use '*' to allow any origin to access your server. |
|
376 | Use '*' to allow any origin to access your server. | |
377 |
|
377 | |||
378 | Takes precedence over allow_origin_pat. |
|
378 | Takes precedence over allow_origin_pat. | |
379 | """ |
|
379 | """ | |
380 | ) |
|
380 | ) | |
381 |
|
381 | |||
382 | allow_origin_pat = Unicode('', config=True, |
|
382 | allow_origin_pat = Unicode('', config=True, | |
383 | help="""Use a regular expression for the Access-Control-Allow-Origin header |
|
383 | help="""Use a regular expression for the Access-Control-Allow-Origin header | |
384 |
|
384 | |||
385 | Requests from an origin matching the expression will get replies with: |
|
385 | Requests from an origin matching the expression will get replies with: | |
386 |
|
386 | |||
387 | Access-Control-Allow-Origin: origin |
|
387 | Access-Control-Allow-Origin: origin | |
388 |
|
388 | |||
389 | where `origin` is the origin of the request. |
|
389 | where `origin` is the origin of the request. | |
390 |
|
390 | |||
391 | Ignored if allow_origin is set. |
|
391 | Ignored if allow_origin is set. | |
392 | """ |
|
392 | """ | |
393 | ) |
|
393 | ) | |
394 |
|
394 | |||
395 | allow_credentials = Bool(False, config=True, |
|
395 | allow_credentials = Bool(False, config=True, | |
396 | help="Set the Access-Control-Allow-Credentials: true header" |
|
396 | help="Set the Access-Control-Allow-Credentials: true header" | |
397 | ) |
|
397 | ) | |
398 |
|
398 | |||
399 | default_url = Unicode('/tree', config=True, |
|
399 | default_url = Unicode('/tree', config=True, | |
400 | help="The default URL to redirect to from `/`" |
|
400 | help="The default URL to redirect to from `/`" | |
401 | ) |
|
401 | ) | |
402 |
|
402 | |||
403 | ip = Unicode('localhost', config=True, |
|
403 | ip = Unicode('localhost', config=True, | |
404 | help="The IP address the notebook server will listen on." |
|
404 | help="The IP address the notebook server will listen on." | |
405 | ) |
|
405 | ) | |
406 |
|
406 | |||
407 | def _ip_changed(self, name, old, new): |
|
407 | def _ip_changed(self, name, old, new): | |
408 | if new == u'*': self.ip = u'' |
|
408 | if new == u'*': self.ip = u'' | |
409 |
|
409 | |||
410 | port = Integer(8888, config=True, |
|
410 | port = Integer(8888, config=True, | |
411 | help="The port the notebook server will listen on." |
|
411 | help="The port the notebook server will listen on." | |
412 | ) |
|
412 | ) | |
413 | port_retries = Integer(50, config=True, |
|
413 | port_retries = Integer(50, config=True, | |
414 | help="The number of additional ports to try if the specified port is not available." |
|
414 | help="The number of additional ports to try if the specified port is not available." | |
415 | ) |
|
415 | ) | |
416 |
|
416 | |||
417 | certfile = Unicode(u'', config=True, |
|
417 | certfile = Unicode(u'', config=True, | |
418 | help="""The full path to an SSL/TLS certificate file.""" |
|
418 | help="""The full path to an SSL/TLS certificate file.""" | |
419 | ) |
|
419 | ) | |
420 |
|
420 | |||
421 | keyfile = Unicode(u'', config=True, |
|
421 | keyfile = Unicode(u'', config=True, | |
422 | help="""The full path to a private key file for usage with SSL/TLS.""" |
|
422 | help="""The full path to a private key file for usage with SSL/TLS.""" | |
423 | ) |
|
423 | ) | |
424 |
|
424 | |||
425 | cookie_secret_file = Unicode(config=True, |
|
425 | cookie_secret_file = Unicode(config=True, | |
426 | help="""The file where the cookie secret is stored.""" |
|
426 | help="""The file where the cookie secret is stored.""" | |
427 | ) |
|
427 | ) | |
428 | def _cookie_secret_file_default(self): |
|
428 | def _cookie_secret_file_default(self): | |
429 | if self.profile_dir is None: |
|
429 | if self.profile_dir is None: | |
430 | return '' |
|
430 | return '' | |
431 | return os.path.join(self.profile_dir.security_dir, 'notebook_cookie_secret') |
|
431 | return os.path.join(self.profile_dir.security_dir, 'notebook_cookie_secret') | |
432 |
|
432 | |||
433 | cookie_secret = Bytes(b'', config=True, |
|
433 | cookie_secret = Bytes(b'', config=True, | |
434 | help="""The random bytes used to secure cookies. |
|
434 | help="""The random bytes used to secure cookies. | |
435 | By default this is a new random number every time you start the Notebook. |
|
435 | By default this is a new random number every time you start the Notebook. | |
436 | Set it to a value in a config file to enable logins to persist across server sessions. |
|
436 | Set it to a value in a config file to enable logins to persist across server sessions. | |
437 |
|
437 | |||
438 | Note: Cookie secrets should be kept private, do not share config files with |
|
438 | Note: Cookie secrets should be kept private, do not share config files with | |
439 | cookie_secret stored in plaintext (you can read the value from a file). |
|
439 | cookie_secret stored in plaintext (you can read the value from a file). | |
440 | """ |
|
440 | """ | |
441 | ) |
|
441 | ) | |
442 | def _cookie_secret_default(self): |
|
442 | def _cookie_secret_default(self): | |
443 | if os.path.exists(self.cookie_secret_file): |
|
443 | if os.path.exists(self.cookie_secret_file): | |
444 | with io.open(self.cookie_secret_file, 'rb') as f: |
|
444 | with io.open(self.cookie_secret_file, 'rb') as f: | |
445 | return f.read() |
|
445 | return f.read() | |
446 | else: |
|
446 | else: | |
447 | secret = base64.encodestring(os.urandom(1024)) |
|
447 | secret = base64.encodestring(os.urandom(1024)) | |
448 | self._write_cookie_secret_file(secret) |
|
448 | self._write_cookie_secret_file(secret) | |
449 | return secret |
|
449 | return secret | |
450 |
|
450 | |||
451 | def _write_cookie_secret_file(self, secret): |
|
451 | def _write_cookie_secret_file(self, secret): | |
452 | """write my secret to my secret_file""" |
|
452 | """write my secret to my secret_file""" | |
453 | self.log.info("Writing notebook server cookie secret to %s", self.cookie_secret_file) |
|
453 | self.log.info("Writing notebook server cookie secret to %s", self.cookie_secret_file) | |
454 | with io.open(self.cookie_secret_file, 'wb') as f: |
|
454 | with io.open(self.cookie_secret_file, 'wb') as f: | |
455 | f.write(secret) |
|
455 | f.write(secret) | |
456 | try: |
|
456 | try: | |
457 | os.chmod(self.cookie_secret_file, 0o600) |
|
457 | os.chmod(self.cookie_secret_file, 0o600) | |
458 | except OSError: |
|
458 | except OSError: | |
459 | self.log.warn( |
|
459 | self.log.warn( | |
460 | "Could not set permissions on %s", |
|
460 | "Could not set permissions on %s", | |
461 | self.cookie_secret_file |
|
461 | self.cookie_secret_file | |
462 | ) |
|
462 | ) | |
463 |
|
463 | |||
464 | password = Unicode(u'', config=True, |
|
464 | password = Unicode(u'', config=True, | |
465 | help="""Hashed password to use for web authentication. |
|
465 | help="""Hashed password to use for web authentication. | |
466 |
|
466 | |||
467 | To generate, type in a python/IPython shell: |
|
467 | To generate, type in a python/IPython shell: | |
468 |
|
468 | |||
469 | from IPython.lib import passwd; passwd() |
|
469 | from IPython.lib import passwd; passwd() | |
470 |
|
470 | |||
471 | The string should be of the form type:salt:hashed-password. |
|
471 | The string should be of the form type:salt:hashed-password. | |
472 | """ |
|
472 | """ | |
473 | ) |
|
473 | ) | |
474 |
|
474 | |||
475 | open_browser = Bool(True, config=True, |
|
475 | open_browser = Bool(True, config=True, | |
476 | help="""Whether to open in a browser after starting. |
|
476 | help="""Whether to open in a browser after starting. | |
477 | The specific browser used is platform dependent and |
|
477 | The specific browser used is platform dependent and | |
478 | determined by the python standard library `webbrowser` |
|
478 | determined by the python standard library `webbrowser` | |
479 | module, unless it is overridden using the --browser |
|
479 | module, unless it is overridden using the --browser | |
480 | (NotebookApp.browser) configuration option. |
|
480 | (NotebookApp.browser) configuration option. | |
481 | """) |
|
481 | """) | |
482 |
|
482 | |||
483 | browser = Unicode(u'', config=True, |
|
483 | browser = Unicode(u'', config=True, | |
484 | help="""Specify what command to use to invoke a web |
|
484 | help="""Specify what command to use to invoke a web | |
485 | browser when opening the notebook. If not specified, the |
|
485 | browser when opening the notebook. If not specified, the | |
486 | default browser will be determined by the `webbrowser` |
|
486 | default browser will be determined by the `webbrowser` | |
487 | standard library module, which allows setting of the |
|
487 | standard library module, which allows setting of the | |
488 | BROWSER environment variable to override it. |
|
488 | BROWSER environment variable to override it. | |
489 | """) |
|
489 | """) | |
490 |
|
490 | |||
491 | webapp_settings = Dict(config=True, |
|
491 | webapp_settings = Dict(config=True, | |
492 | help="DEPRECATED, use tornado_settings" |
|
492 | help="DEPRECATED, use tornado_settings" | |
493 | ) |
|
493 | ) | |
494 | def _webapp_settings_changed(self, name, old, new): |
|
494 | def _webapp_settings_changed(self, name, old, new): | |
495 | self.log.warn("\n webapp_settings is deprecated, use tornado_settings.\n") |
|
495 | self.log.warn("\n webapp_settings is deprecated, use tornado_settings.\n") | |
496 | self.tornado_settings = new |
|
496 | self.tornado_settings = new | |
497 |
|
497 | |||
498 | tornado_settings = Dict(config=True, |
|
498 | tornado_settings = Dict(config=True, | |
499 | help="Supply overrides for the tornado.web.Application that the " |
|
499 | help="Supply overrides for the tornado.web.Application that the " | |
500 | "IPython notebook uses.") |
|
500 | "IPython notebook uses.") | |
501 |
|
501 | |||
502 | jinja_environment_options = Dict(config=True, |
|
502 | jinja_environment_options = Dict(config=True, | |
503 | help="Supply extra arguments that will be passed to Jinja environment.") |
|
503 | help="Supply extra arguments that will be passed to Jinja environment.") | |
504 |
|
504 | |||
505 |
|
505 | |||
506 | enable_mathjax = Bool(True, config=True, |
|
506 | enable_mathjax = Bool(True, config=True, | |
507 | help="""Whether to enable MathJax for typesetting math/TeX |
|
507 | help="""Whether to enable MathJax for typesetting math/TeX | |
508 |
|
508 | |||
509 | MathJax is the javascript library IPython uses to render math/LaTeX. It is |
|
509 | MathJax is the javascript library IPython uses to render math/LaTeX. It is | |
510 | very large, so you may want to disable it if you have a slow internet |
|
510 | very large, so you may want to disable it if you have a slow internet | |
511 | connection, or for offline use of the notebook. |
|
511 | connection, or for offline use of the notebook. | |
512 |
|
512 | |||
513 | When disabled, equations etc. will appear as their untransformed TeX source. |
|
513 | When disabled, equations etc. will appear as their untransformed TeX source. | |
514 | """ |
|
514 | """ | |
515 | ) |
|
515 | ) | |
516 | def _enable_mathjax_changed(self, name, old, new): |
|
516 | def _enable_mathjax_changed(self, name, old, new): | |
517 | """set mathjax url to empty if mathjax is disabled""" |
|
517 | """set mathjax url to empty if mathjax is disabled""" | |
518 | if not new: |
|
518 | if not new: | |
519 | self.mathjax_url = u'' |
|
519 | self.mathjax_url = u'' | |
520 |
|
520 | |||
521 | base_url = Unicode('/', config=True, |
|
521 | base_url = Unicode('/', config=True, | |
522 | help='''The base URL for the notebook server. |
|
522 | help='''The base URL for the notebook server. | |
523 |
|
523 | |||
524 | Leading and trailing slashes can be omitted, |
|
524 | Leading and trailing slashes can be omitted, | |
525 | and will automatically be added. |
|
525 | and will automatically be added. | |
526 | ''') |
|
526 | ''') | |
527 | def _base_url_changed(self, name, old, new): |
|
527 | def _base_url_changed(self, name, old, new): | |
528 | if not new.startswith('/'): |
|
528 | if not new.startswith('/'): | |
529 | self.base_url = '/'+new |
|
529 | self.base_url = '/'+new | |
530 | elif not new.endswith('/'): |
|
530 | elif not new.endswith('/'): | |
531 | self.base_url = new+'/' |
|
531 | self.base_url = new+'/' | |
532 |
|
532 | |||
533 | base_project_url = Unicode('/', config=True, help="""DEPRECATED use base_url""") |
|
533 | base_project_url = Unicode('/', config=True, help="""DEPRECATED use base_url""") | |
534 | def _base_project_url_changed(self, name, old, new): |
|
534 | def _base_project_url_changed(self, name, old, new): | |
535 | self.log.warn("base_project_url is deprecated, use base_url") |
|
535 | self.log.warn("base_project_url is deprecated, use base_url") | |
536 | self.base_url = new |
|
536 | self.base_url = new | |
537 |
|
537 | |||
538 | extra_static_paths = List(Unicode, config=True, |
|
538 | extra_static_paths = List(Unicode, config=True, | |
539 | help="""Extra paths to search for serving static files. |
|
539 | help="""Extra paths to search for serving static files. | |
540 |
|
540 | |||
541 | This allows adding javascript/css to be available from the notebook server machine, |
|
541 | This allows adding javascript/css to be available from the notebook server machine, | |
542 | or overriding individual files in the IPython""" |
|
542 | or overriding individual files in the IPython""" | |
543 | ) |
|
543 | ) | |
544 | def _extra_static_paths_default(self): |
|
544 | def _extra_static_paths_default(self): | |
545 | return [os.path.join(self.profile_dir.location, 'static')] |
|
545 | return [os.path.join(self.profile_dir.location, 'static')] | |
546 |
|
546 | |||
547 | @property |
|
547 | @property | |
548 | def static_file_path(self): |
|
548 | def static_file_path(self): | |
549 | """return extra paths + the default location""" |
|
549 | """return extra paths + the default location""" | |
550 | return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] |
|
550 | return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] | |
551 |
|
551 | |||
552 | extra_template_paths = List(Unicode, config=True, |
|
552 | extra_template_paths = List(Unicode, config=True, | |
553 | help="""Extra paths to search for serving jinja templates. |
|
553 | help="""Extra paths to search for serving jinja templates. | |
554 |
|
554 | |||
555 | Can be used to override templates from IPython.html.templates.""" |
|
555 | Can be used to override templates from IPython.html.templates.""" | |
556 | ) |
|
556 | ) | |
557 | def _extra_template_paths_default(self): |
|
557 | def _extra_template_paths_default(self): | |
558 | return [] |
|
558 | return [] | |
559 |
|
559 | |||
560 | @property |
|
560 | @property | |
561 | def template_file_path(self): |
|
561 | def template_file_path(self): | |
562 | """return extra paths + the default locations""" |
|
562 | """return extra paths + the default locations""" | |
563 | return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST |
|
563 | return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST | |
564 |
|
564 | |||
565 | nbextensions_path = List(Unicode, config=True, |
|
565 | nbextensions_path = List(Unicode, config=True, | |
566 | help="""paths for Javascript extensions. By default, this is just IPYTHONDIR/nbextensions""" |
|
566 | help="""paths for Javascript extensions. By default, this is just IPYTHONDIR/nbextensions""" | |
567 | ) |
|
567 | ) | |
568 | def _nbextensions_path_default(self): |
|
568 | def _nbextensions_path_default(self): | |
569 | return [os.path.join(get_ipython_dir(), 'nbextensions')] |
|
569 | return [os.path.join(get_ipython_dir(), 'nbextensions')] | |
570 |
|
570 | |||
571 | websocket_url = Unicode("", config=True, |
|
571 | websocket_url = Unicode("", config=True, | |
572 | help="""The base URL for websockets, |
|
572 | help="""The base URL for websockets, | |
573 | if it differs from the HTTP server (hint: it almost certainly doesn't). |
|
573 | if it differs from the HTTP server (hint: it almost certainly doesn't). | |
574 |
|
574 | |||
575 | Should be in the form of an HTTP origin: ws[s]://hostname[:port] |
|
575 | Should be in the form of an HTTP origin: ws[s]://hostname[:port] | |
576 | """ |
|
576 | """ | |
577 | ) |
|
577 | ) | |
578 | mathjax_url = Unicode("", config=True, |
|
578 | mathjax_url = Unicode("", config=True, | |
579 | help="""The url for MathJax.js.""" |
|
579 | help="""The url for MathJax.js.""" | |
580 | ) |
|
580 | ) | |
581 | def _mathjax_url_default(self): |
|
581 | def _mathjax_url_default(self): | |
582 | if not self.enable_mathjax: |
|
582 | if not self.enable_mathjax: | |
583 | return u'' |
|
583 | return u'' | |
584 | static_url_prefix = self.tornado_settings.get("static_url_prefix", |
|
584 | static_url_prefix = self.tornado_settings.get("static_url_prefix", | |
585 | url_path_join(self.base_url, "static") |
|
585 | url_path_join(self.base_url, "static") | |
586 | ) |
|
586 | ) | |
587 |
|
587 | |||
588 | # try local mathjax, either in nbextensions/mathjax or static/mathjax |
|
588 | # try local mathjax, either in nbextensions/mathjax or static/mathjax | |
589 | for (url_prefix, search_path) in [ |
|
589 | for (url_prefix, search_path) in [ | |
590 | (url_path_join(self.base_url, "nbextensions"), self.nbextensions_path), |
|
590 | (url_path_join(self.base_url, "nbextensions"), self.nbextensions_path), | |
591 | (static_url_prefix, self.static_file_path), |
|
591 | (static_url_prefix, self.static_file_path), | |
592 | ]: |
|
592 | ]: | |
593 | self.log.debug("searching for local mathjax in %s", search_path) |
|
593 | self.log.debug("searching for local mathjax in %s", search_path) | |
594 | try: |
|
594 | try: | |
595 | mathjax = filefind(os.path.join('mathjax', 'MathJax.js'), search_path) |
|
595 | mathjax = filefind(os.path.join('mathjax', 'MathJax.js'), search_path) | |
596 | except IOError: |
|
596 | except IOError: | |
597 | continue |
|
597 | continue | |
598 | else: |
|
598 | else: | |
599 | url = url_path_join(url_prefix, u"mathjax/MathJax.js") |
|
599 | url = url_path_join(url_prefix, u"mathjax/MathJax.js") | |
600 | self.log.info("Serving local MathJax from %s at %s", mathjax, url) |
|
600 | self.log.info("Serving local MathJax from %s at %s", mathjax, url) | |
601 | return url |
|
601 | return url | |
602 |
|
602 | |||
603 | # no local mathjax, serve from CDN |
|
603 | # no local mathjax, serve from CDN | |
604 | url = u"https://cdn.mathjax.org/mathjax/latest/MathJax.js" |
|
604 | url = u"https://cdn.mathjax.org/mathjax/latest/MathJax.js" | |
605 | self.log.info("Using MathJax from CDN: %s", url) |
|
605 | self.log.info("Using MathJax from CDN: %s", url) | |
606 | return url |
|
606 | return url | |
607 |
|
607 | |||
608 | def _mathjax_url_changed(self, name, old, new): |
|
608 | def _mathjax_url_changed(self, name, old, new): | |
609 | if new and not self.enable_mathjax: |
|
609 | if new and not self.enable_mathjax: | |
610 | # enable_mathjax=False overrides mathjax_url |
|
610 | # enable_mathjax=False overrides mathjax_url | |
611 | self.mathjax_url = u'' |
|
611 | self.mathjax_url = u'' | |
612 | else: |
|
612 | else: | |
613 | self.log.info("Using MathJax: %s", new) |
|
613 | self.log.info("Using MathJax: %s", new) | |
614 |
|
614 | |||
615 | contents_manager_class = DottedObjectName('IPython.html.services.contents.filemanager.FileContentsManager', |
|
615 | contents_manager_class = DottedObjectName('IPython.html.services.contents.filemanager.FileContentsManager', | |
616 | config=True, |
|
616 | config=True, | |
617 | help='The notebook manager class to use.' |
|
617 | help='The notebook manager class to use.' | |
618 | ) |
|
618 | ) | |
619 | kernel_manager_class = DottedObjectName('IPython.html.services.kernels.kernelmanager.MappingKernelManager', |
|
619 | kernel_manager_class = DottedObjectName('IPython.html.services.kernels.kernelmanager.MappingKernelManager', | |
620 | config=True, |
|
620 | config=True, | |
621 | help='The kernel manager class to use.' |
|
621 | help='The kernel manager class to use.' | |
622 | ) |
|
622 | ) | |
623 | session_manager_class = DottedObjectName('IPython.html.services.sessions.sessionmanager.SessionManager', |
|
623 | session_manager_class = DottedObjectName('IPython.html.services.sessions.sessionmanager.SessionManager', | |
624 | config=True, |
|
624 | config=True, | |
625 | help='The session manager class to use.' |
|
625 | help='The session manager class to use.' | |
626 | ) |
|
626 | ) | |
627 | cluster_manager_class = DottedObjectName('IPython.html.services.clusters.clustermanager.ClusterManager', |
|
627 | cluster_manager_class = DottedObjectName('IPython.html.services.clusters.clustermanager.ClusterManager', | |
628 | config=True, |
|
628 | config=True, | |
629 | help='The cluster manager class to use.' |
|
629 | help='The cluster manager class to use.' | |
630 | ) |
|
630 | ) | |
631 |
|
631 | |||
632 | config_manager_class = DottedObjectName('IPython.html.services.config.manager.ConfigManager', |
|
632 | config_manager_class = DottedObjectName('IPython.html.services.config.manager.ConfigManager', | |
633 | config = True, |
|
633 | config = True, | |
634 | help='The config manager class to use' |
|
634 | help='The config manager class to use' | |
635 | ) |
|
635 | ) | |
636 |
|
636 | |||
637 | kernel_spec_manager = Instance(KernelSpecManager) |
|
637 | kernel_spec_manager = Instance(KernelSpecManager) | |
638 |
|
638 | |||
639 | def _kernel_spec_manager_default(self): |
|
639 | def _kernel_spec_manager_default(self): | |
640 | return KernelSpecManager(ipython_dir=self.ipython_dir) |
|
640 | return KernelSpecManager(ipython_dir=self.ipython_dir) | |
641 |
|
641 | |||
642 |
|
642 | |||
643 | kernel_spec_manager_class = DottedObjectName('IPython.kernel.kernelspec.KernelSpecManager', |
|
643 | kernel_spec_manager_class = DottedObjectName('IPython.kernel.kernelspec.KernelSpecManager', | |
644 | config=True, |
|
644 | config=True, | |
645 | help=""" |
|
645 | help=""" | |
646 | The kernel spec manager class to use. Should be a subclass |
|
646 | The kernel spec manager class to use. Should be a subclass | |
647 | of `IPython.kernel.kernelspec.KernelSpecManager`. |
|
647 | of `IPython.kernel.kernelspec.KernelSpecManager`. | |
648 |
|
648 | |||
649 | The Api of KernelSpecManager is provisional and might change |
|
649 | The Api of KernelSpecManager is provisional and might change | |
650 | without warning between this version of IPython and the next stable one. |
|
650 | without warning between this version of IPython and the next stable one. | |
651 | """) |
|
651 | """) | |
652 |
|
652 | |||
653 | trust_xheaders = Bool(False, config=True, |
|
653 | trust_xheaders = Bool(False, config=True, | |
654 | help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" |
|
654 | help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" | |
655 | "sent by the upstream reverse proxy. Necessary if the proxy handles SSL") |
|
655 | "sent by the upstream reverse proxy. Necessary if the proxy handles SSL") | |
656 | ) |
|
656 | ) | |
657 |
|
657 | |||
658 | info_file = Unicode() |
|
658 | info_file = Unicode() | |
659 |
|
659 | |||
660 | def _info_file_default(self): |
|
660 | def _info_file_default(self): | |
661 | info_file = "nbserver-%s.json"%os.getpid() |
|
661 | info_file = "nbserver-%s.json"%os.getpid() | |
662 | return os.path.join(self.profile_dir.security_dir, info_file) |
|
662 | return os.path.join(self.profile_dir.security_dir, info_file) | |
663 |
|
663 | |||
664 | pylab = Unicode('disabled', config=True, |
|
664 | pylab = Unicode('disabled', config=True, | |
665 | help=""" |
|
665 | help=""" | |
666 | DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. |
|
666 | DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. | |
667 | """ |
|
667 | """ | |
668 | ) |
|
668 | ) | |
669 | def _pylab_changed(self, name, old, new): |
|
669 | def _pylab_changed(self, name, old, new): | |
670 | """when --pylab is specified, display a warning and exit""" |
|
670 | """when --pylab is specified, display a warning and exit""" | |
671 | if new != 'warn': |
|
671 | if new != 'warn': | |
672 | backend = ' %s' % new |
|
672 | backend = ' %s' % new | |
673 | else: |
|
673 | else: | |
674 | backend = '' |
|
674 | backend = '' | |
675 | self.log.error("Support for specifying --pylab on the command line has been removed.") |
|
675 | self.log.error("Support for specifying --pylab on the command line has been removed.") | |
676 | self.log.error( |
|
676 | self.log.error( | |
677 | "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.".format(backend) |
|
677 | "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.".format(backend) | |
678 | ) |
|
678 | ) | |
679 | self.exit(1) |
|
679 | self.exit(1) | |
680 |
|
680 | |||
681 | notebook_dir = Unicode(config=True, |
|
681 | notebook_dir = Unicode(config=True, | |
682 | help="The directory to use for notebooks and kernels." |
|
682 | help="The directory to use for notebooks and kernels." | |
683 | ) |
|
683 | ) | |
684 |
|
684 | |||
685 | def _notebook_dir_default(self): |
|
685 | def _notebook_dir_default(self): | |
686 | if self.file_to_run: |
|
686 | if self.file_to_run: | |
687 | return os.path.dirname(os.path.abspath(self.file_to_run)) |
|
687 | return os.path.dirname(os.path.abspath(self.file_to_run)) | |
688 | else: |
|
688 | else: | |
689 | return py3compat.getcwd() |
|
689 | return py3compat.getcwd() | |
690 |
|
690 | |||
691 | def _notebook_dir_changed(self, name, old, new): |
|
691 | def _notebook_dir_changed(self, name, old, new): | |
692 | """Do a bit of validation of the notebook dir.""" |
|
692 | """Do a bit of validation of the notebook dir.""" | |
693 | if not os.path.isabs(new): |
|
693 | if not os.path.isabs(new): | |
694 | # If we receive a non-absolute path, make it absolute. |
|
694 | # If we receive a non-absolute path, make it absolute. | |
695 | self.notebook_dir = os.path.abspath(new) |
|
695 | self.notebook_dir = os.path.abspath(new) | |
696 | return |
|
696 | return | |
697 | if not os.path.isdir(new): |
|
697 | if not os.path.isdir(new): | |
698 | raise TraitError("No such notebook dir: %r" % new) |
|
698 | raise TraitError("No such notebook dir: %r" % new) | |
699 |
|
699 | |||
700 | # setting App.notebook_dir implies setting notebook and kernel dirs as well |
|
700 | # setting App.notebook_dir implies setting notebook and kernel dirs as well | |
701 | self.config.FileContentsManager.root_dir = new |
|
701 | self.config.FileContentsManager.root_dir = new | |
702 | self.config.MappingKernelManager.root_dir = new |
|
702 | self.config.MappingKernelManager.root_dir = new | |
703 |
|
703 | |||
704 |
|
704 | |||
705 | def parse_command_line(self, argv=None): |
|
705 | def parse_command_line(self, argv=None): | |
706 | super(NotebookApp, self).parse_command_line(argv) |
|
706 | super(NotebookApp, self).parse_command_line(argv) | |
707 |
|
707 | |||
708 | if self.extra_args: |
|
708 | if self.extra_args: | |
709 | arg0 = self.extra_args[0] |
|
709 | arg0 = self.extra_args[0] | |
710 | f = os.path.abspath(arg0) |
|
710 | f = os.path.abspath(arg0) | |
711 | self.argv.remove(arg0) |
|
711 | self.argv.remove(arg0) | |
712 | if not os.path.exists(f): |
|
712 | if not os.path.exists(f): | |
713 | self.log.critical("No such file or directory: %s", f) |
|
713 | self.log.critical("No such file or directory: %s", f) | |
714 | self.exit(1) |
|
714 | self.exit(1) | |
715 |
|
715 | |||
716 | # Use config here, to ensure that it takes higher priority than |
|
716 | # Use config here, to ensure that it takes higher priority than | |
717 | # anything that comes from the profile. |
|
717 | # anything that comes from the profile. | |
718 | c = Config() |
|
718 | c = Config() | |
719 | if os.path.isdir(f): |
|
719 | if os.path.isdir(f): | |
720 | c.NotebookApp.notebook_dir = f |
|
720 | c.NotebookApp.notebook_dir = f | |
721 | elif os.path.isfile(f): |
|
721 | elif os.path.isfile(f): | |
722 | c.NotebookApp.file_to_run = f |
|
722 | c.NotebookApp.file_to_run = f | |
723 | self.update_config(c) |
|
723 | self.update_config(c) | |
724 |
|
724 | |||
725 | def init_kernel_argv(self): |
|
725 | def init_kernel_argv(self): | |
726 | """add the profile-dir to arguments to be passed to IPython kernels""" |
|
726 | """add the profile-dir to arguments to be passed to IPython kernels""" | |
727 | # FIXME: remove special treatment of IPython kernels |
|
727 | # FIXME: remove special treatment of IPython kernels | |
728 | # Kernel should get *absolute* path to profile directory |
|
728 | # Kernel should get *absolute* path to profile directory | |
729 | self.ipython_kernel_argv = ["--profile-dir", self.profile_dir.location] |
|
729 | self.ipython_kernel_argv = ["--profile-dir", self.profile_dir.location] | |
730 |
|
730 | |||
731 | def init_configurables(self): |
|
731 | def init_configurables(self): | |
732 | # force Session default to be secure |
|
732 | # force Session default to be secure | |
733 | default_secure(self.config) |
|
733 | default_secure(self.config) | |
734 | kls = import_item(self.kernel_spec_manager_class) |
|
734 | kls = import_item(self.kernel_spec_manager_class) | |
735 | self.kernel_spec_manager = kls(ipython_dir=self.ipython_dir) |
|
735 | self.kernel_spec_manager = kls(ipython_dir=self.ipython_dir) | |
736 |
|
736 | |||
737 | kls = import_item(self.kernel_manager_class) |
|
737 | kls = import_item(self.kernel_manager_class) | |
738 | self.kernel_manager = kls( |
|
738 | self.kernel_manager = kls( | |
739 | parent=self, log=self.log, ipython_kernel_argv=self.ipython_kernel_argv, |
|
739 | parent=self, log=self.log, ipython_kernel_argv=self.ipython_kernel_argv, | |
740 | connection_dir = self.profile_dir.security_dir, |
|
740 | connection_dir = self.profile_dir.security_dir, | |
741 | ) |
|
741 | ) | |
742 | kls = import_item(self.contents_manager_class) |
|
742 | kls = import_item(self.contents_manager_class) | |
743 | self.contents_manager = kls(parent=self, log=self.log) |
|
743 | self.contents_manager = kls(parent=self, log=self.log) | |
744 | kls = import_item(self.session_manager_class) |
|
744 | kls = import_item(self.session_manager_class) | |
745 | self.session_manager = kls(parent=self, log=self.log, |
|
745 | self.session_manager = kls(parent=self, log=self.log, | |
746 | kernel_manager=self.kernel_manager, |
|
746 | kernel_manager=self.kernel_manager, | |
747 | contents_manager=self.contents_manager) |
|
747 | contents_manager=self.contents_manager) | |
748 | kls = import_item(self.cluster_manager_class) |
|
748 | kls = import_item(self.cluster_manager_class) | |
749 | self.cluster_manager = kls(parent=self, log=self.log) |
|
749 | self.cluster_manager = kls(parent=self, log=self.log) | |
750 | self.cluster_manager.update_profiles() |
|
750 | self.cluster_manager.update_profiles() | |
751 |
|
751 | |||
752 | kls = import_item(self.config_manager_class) |
|
752 | kls = import_item(self.config_manager_class) | |
753 | self.config_manager = kls(parent=self, log=self.log, |
|
753 | self.config_manager = kls(parent=self, log=self.log, | |
754 | profile_dir=self.profile_dir.location) |
|
754 | profile_dir=self.profile_dir.location) | |
755 |
|
755 | |||
756 | def init_logging(self): |
|
756 | def init_logging(self): | |
757 | # This prevents double log messages because tornado use a root logger that |
|
757 | # This prevents double log messages because tornado use a root logger that | |
758 | # self.log is a child of. The logging module dipatches log messages to a log |
|
758 | # self.log is a child of. The logging module dipatches log messages to a log | |
759 | # and all of its ancenstors until propagate is set to False. |
|
759 | # and all of its ancenstors until propagate is set to False. | |
760 | self.log.propagate = False |
|
760 | self.log.propagate = False | |
761 |
|
761 | |||
762 | for log in app_log, access_log, gen_log: |
|
762 | for log in app_log, access_log, gen_log: | |
763 | # consistent log output name (NotebookApp instead of tornado.access, etc.) |
|
763 | # consistent log output name (NotebookApp instead of tornado.access, etc.) | |
764 | log.name = self.log.name |
|
764 | log.name = self.log.name | |
765 | # hook up tornado 3's loggers to our app handlers |
|
765 | # hook up tornado 3's loggers to our app handlers | |
766 | logger = logging.getLogger('tornado') |
|
766 | logger = logging.getLogger('tornado') | |
767 | logger.propagate = True |
|
767 | logger.propagate = True | |
768 | logger.parent = self.log |
|
768 | logger.parent = self.log | |
769 | logger.setLevel(self.log.level) |
|
769 | logger.setLevel(self.log.level) | |
770 |
|
770 | |||
771 | def init_webapp(self): |
|
771 | def init_webapp(self): | |
772 | """initialize tornado webapp and httpserver""" |
|
772 | """initialize tornado webapp and httpserver""" | |
773 | self.tornado_settings['allow_origin'] = self.allow_origin |
|
773 | self.tornado_settings['allow_origin'] = self.allow_origin | |
774 | if self.allow_origin_pat: |
|
774 | if self.allow_origin_pat: | |
775 | self.tornado_settings['allow_origin_pat'] = re.compile(self.allow_origin_pat) |
|
775 | self.tornado_settings['allow_origin_pat'] = re.compile(self.allow_origin_pat) | |
776 | self.tornado_settings['allow_credentials'] = self.allow_credentials |
|
776 | self.tornado_settings['allow_credentials'] = self.allow_credentials | |
777 |
|
777 | |||
778 | self.web_app = NotebookWebApplication( |
|
778 | self.web_app = NotebookWebApplication( | |
779 | self, self.kernel_manager, self.contents_manager, |
|
779 | self, self.kernel_manager, self.contents_manager, | |
780 | self.cluster_manager, self.session_manager, self.kernel_spec_manager, |
|
780 | self.cluster_manager, self.session_manager, self.kernel_spec_manager, | |
781 | self.config_manager, |
|
781 | self.config_manager, | |
782 | self.log, self.base_url, self.default_url, self.tornado_settings, |
|
782 | self.log, self.base_url, self.default_url, self.tornado_settings, | |
783 | self.jinja_environment_options |
|
783 | self.jinja_environment_options | |
784 | ) |
|
784 | ) | |
785 | if self.certfile: |
|
785 | if self.certfile: | |
786 | ssl_options = dict(certfile=self.certfile) |
|
786 | ssl_options = dict(certfile=self.certfile) | |
787 | if self.keyfile: |
|
787 | if self.keyfile: | |
788 | ssl_options['keyfile'] = self.keyfile |
|
788 | ssl_options['keyfile'] = self.keyfile | |
789 | else: |
|
789 | else: | |
790 | ssl_options = None |
|
790 | ssl_options = None | |
791 | self.web_app.password = self.password |
|
791 | self.web_app.password = self.password | |
792 | self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options, |
|
792 | self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options, | |
793 | xheaders=self.trust_xheaders) |
|
793 | xheaders=self.trust_xheaders) | |
794 | if not self.ip: |
|
794 | if not self.ip: | |
795 | warning = "WARNING: The notebook server is listening on all IP addresses" |
|
795 | warning = "WARNING: The notebook server is listening on all IP addresses" | |
796 | if ssl_options is None: |
|
796 | if ssl_options is None: | |
797 | self.log.critical(warning + " and not using encryption. This " |
|
797 | self.log.critical(warning + " and not using encryption. This " | |
798 | "is not recommended.") |
|
798 | "is not recommended.") | |
799 | if not self.password: |
|
799 | if not self.password: | |
800 | self.log.critical(warning + " and not using authentication. " |
|
800 | self.log.critical(warning + " and not using authentication. " | |
801 | "This is highly insecure and not recommended.") |
|
801 | "This is highly insecure and not recommended.") | |
802 | success = None |
|
802 | success = None | |
803 | for port in random_ports(self.port, self.port_retries+1): |
|
803 | for port in random_ports(self.port, self.port_retries+1): | |
804 | try: |
|
804 | try: | |
805 | self.http_server.listen(port, self.ip) |
|
805 | self.http_server.listen(port, self.ip) | |
806 | except socket.error as e: |
|
806 | except socket.error as e: | |
807 | if e.errno == errno.EADDRINUSE: |
|
807 | if e.errno == errno.EADDRINUSE: | |
808 | self.log.info('The port %i is already in use, trying another random port.' % port) |
|
808 | self.log.info('The port %i is already in use, trying another random port.' % port) | |
809 | continue |
|
809 | continue | |
810 | elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)): |
|
810 | elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)): | |
811 | self.log.warn("Permission to listen on port %i denied" % port) |
|
811 | self.log.warn("Permission to listen on port %i denied" % port) | |
812 | continue |
|
812 | continue | |
813 | else: |
|
813 | else: | |
814 | raise |
|
814 | raise | |
815 | else: |
|
815 | else: | |
816 | self.port = port |
|
816 | self.port = port | |
817 | success = True |
|
817 | success = True | |
818 | break |
|
818 | break | |
819 | if not success: |
|
819 | if not success: | |
820 | self.log.critical('ERROR: the notebook server could not be started because ' |
|
820 | self.log.critical('ERROR: the notebook server could not be started because ' | |
821 | 'no available port could be found.') |
|
821 | 'no available port could be found.') | |
822 | self.exit(1) |
|
822 | self.exit(1) | |
823 |
|
823 | |||
824 | @property |
|
824 | @property | |
825 | def display_url(self): |
|
825 | def display_url(self): | |
826 | ip = self.ip if self.ip else '[all ip addresses on your system]' |
|
826 | ip = self.ip if self.ip else '[all ip addresses on your system]' | |
827 | return self._url(ip) |
|
827 | return self._url(ip) | |
828 |
|
828 | |||
829 | @property |
|
829 | @property | |
830 | def connection_url(self): |
|
830 | def connection_url(self): | |
831 | ip = self.ip if self.ip else 'localhost' |
|
831 | ip = self.ip if self.ip else 'localhost' | |
832 | return self._url(ip) |
|
832 | return self._url(ip) | |
833 |
|
833 | |||
834 | def _url(self, ip): |
|
834 | def _url(self, ip): | |
835 | proto = 'https' if self.certfile else 'http' |
|
835 | proto = 'https' if self.certfile else 'http' | |
836 | return "%s://%s:%i%s" % (proto, ip, self.port, self.base_url) |
|
836 | return "%s://%s:%i%s" % (proto, ip, self.port, self.base_url) | |
837 |
|
837 | |||
838 | def init_terminals(self): |
|
838 | def init_terminals(self): | |
839 | try: |
|
839 | try: | |
840 | from .terminal import initialize |
|
840 | from .terminal import initialize | |
841 | initialize(self.web_app) |
|
841 | initialize(self.web_app) | |
842 | self.web_app.settings['terminals_available'] = True |
|
842 | self.web_app.settings['terminals_available'] = True | |
843 | except ImportError as e: |
|
843 | except ImportError as e: | |
844 | self.log.info("Terminals not available (error was %s)", e) |
|
844 | self.log.info("Terminals not available (error was %s)", e) | |
845 |
|
845 | |||
846 | def init_signal(self): |
|
846 | def init_signal(self): | |
847 | if not sys.platform.startswith('win'): |
|
847 | if not sys.platform.startswith('win'): | |
848 | signal.signal(signal.SIGINT, self._handle_sigint) |
|
848 | signal.signal(signal.SIGINT, self._handle_sigint) | |
849 | signal.signal(signal.SIGTERM, self._signal_stop) |
|
849 | signal.signal(signal.SIGTERM, self._signal_stop) | |
850 | if hasattr(signal, 'SIGUSR1'): |
|
850 | if hasattr(signal, 'SIGUSR1'): | |
851 | # Windows doesn't support SIGUSR1 |
|
851 | # Windows doesn't support SIGUSR1 | |
852 | signal.signal(signal.SIGUSR1, self._signal_info) |
|
852 | signal.signal(signal.SIGUSR1, self._signal_info) | |
853 | if hasattr(signal, 'SIGINFO'): |
|
853 | if hasattr(signal, 'SIGINFO'): | |
854 | # only on BSD-based systems |
|
854 | # only on BSD-based systems | |
855 | signal.signal(signal.SIGINFO, self._signal_info) |
|
855 | signal.signal(signal.SIGINFO, self._signal_info) | |
856 |
|
856 | |||
857 | def _handle_sigint(self, sig, frame): |
|
857 | def _handle_sigint(self, sig, frame): | |
858 | """SIGINT handler spawns confirmation dialog""" |
|
858 | """SIGINT handler spawns confirmation dialog""" | |
859 | # register more forceful signal handler for ^C^C case |
|
859 | # register more forceful signal handler for ^C^C case | |
860 | signal.signal(signal.SIGINT, self._signal_stop) |
|
860 | signal.signal(signal.SIGINT, self._signal_stop) | |
861 | # request confirmation dialog in bg thread, to avoid |
|
861 | # request confirmation dialog in bg thread, to avoid | |
862 | # blocking the App |
|
862 | # blocking the App | |
863 | thread = threading.Thread(target=self._confirm_exit) |
|
863 | thread = threading.Thread(target=self._confirm_exit) | |
864 | thread.daemon = True |
|
864 | thread.daemon = True | |
865 | thread.start() |
|
865 | thread.start() | |
866 |
|
866 | |||
867 | def _restore_sigint_handler(self): |
|
867 | def _restore_sigint_handler(self): | |
868 | """callback for restoring original SIGINT handler""" |
|
868 | """callback for restoring original SIGINT handler""" | |
869 | signal.signal(signal.SIGINT, self._handle_sigint) |
|
869 | signal.signal(signal.SIGINT, self._handle_sigint) | |
870 |
|
870 | |||
871 | def _confirm_exit(self): |
|
871 | def _confirm_exit(self): | |
872 | """confirm shutdown on ^C |
|
872 | """confirm shutdown on ^C | |
873 |
|
873 | |||
874 | A second ^C, or answering 'y' within 5s will cause shutdown, |
|
874 | A second ^C, or answering 'y' within 5s will cause shutdown, | |
875 | otherwise original SIGINT handler will be restored. |
|
875 | otherwise original SIGINT handler will be restored. | |
876 |
|
876 | |||
877 | This doesn't work on Windows. |
|
877 | This doesn't work on Windows. | |
878 | """ |
|
878 | """ | |
879 | info = self.log.info |
|
879 | info = self.log.info | |
880 | info('interrupted') |
|
880 | info('interrupted') | |
881 | print(self.notebook_info()) |
|
881 | print(self.notebook_info()) | |
882 | sys.stdout.write("Shutdown this notebook server (y/[n])? ") |
|
882 | sys.stdout.write("Shutdown this notebook server (y/[n])? ") | |
883 | sys.stdout.flush() |
|
883 | sys.stdout.flush() | |
884 | r,w,x = select.select([sys.stdin], [], [], 5) |
|
884 | r,w,x = select.select([sys.stdin], [], [], 5) | |
885 | if r: |
|
885 | if r: | |
886 | line = sys.stdin.readline() |
|
886 | line = sys.stdin.readline() | |
887 | if line.lower().startswith('y') and 'n' not in line.lower(): |
|
887 | if line.lower().startswith('y') and 'n' not in line.lower(): | |
888 | self.log.critical("Shutdown confirmed") |
|
888 | self.log.critical("Shutdown confirmed") | |
889 |
ioloop.IOLoop. |
|
889 | ioloop.IOLoop.current().stop() | |
890 | return |
|
890 | return | |
891 | else: |
|
891 | else: | |
892 | print("No answer for 5s:", end=' ') |
|
892 | print("No answer for 5s:", end=' ') | |
893 | print("resuming operation...") |
|
893 | print("resuming operation...") | |
894 | # no answer, or answer is no: |
|
894 | # no answer, or answer is no: | |
895 | # set it back to original SIGINT handler |
|
895 | # set it back to original SIGINT handler | |
896 | # use IOLoop.add_callback because signal.signal must be called |
|
896 | # use IOLoop.add_callback because signal.signal must be called | |
897 | # from main thread |
|
897 | # from main thread | |
898 |
ioloop.IOLoop. |
|
898 | ioloop.IOLoop.current().add_callback(self._restore_sigint_handler) | |
899 |
|
899 | |||
900 | def _signal_stop(self, sig, frame): |
|
900 | def _signal_stop(self, sig, frame): | |
901 | self.log.critical("received signal %s, stopping", sig) |
|
901 | self.log.critical("received signal %s, stopping", sig) | |
902 |
ioloop.IOLoop. |
|
902 | ioloop.IOLoop.current().stop() | |
903 |
|
903 | |||
904 | def _signal_info(self, sig, frame): |
|
904 | def _signal_info(self, sig, frame): | |
905 | print(self.notebook_info()) |
|
905 | print(self.notebook_info()) | |
906 |
|
906 | |||
907 | def init_components(self): |
|
907 | def init_components(self): | |
908 | """Check the components submodule, and warn if it's unclean""" |
|
908 | """Check the components submodule, and warn if it's unclean""" | |
909 | status = submodule.check_submodule_status() |
|
909 | status = submodule.check_submodule_status() | |
910 | if status == 'missing': |
|
910 | if status == 'missing': | |
911 | self.log.warn("components submodule missing, running `git submodule update`") |
|
911 | self.log.warn("components submodule missing, running `git submodule update`") | |
912 | submodule.update_submodules(submodule.ipython_parent()) |
|
912 | submodule.update_submodules(submodule.ipython_parent()) | |
913 | elif status == 'unclean': |
|
913 | elif status == 'unclean': | |
914 | self.log.warn("components submodule unclean, you may see 404s on static/components") |
|
914 | self.log.warn("components submodule unclean, you may see 404s on static/components") | |
915 | self.log.warn("run `setup.py submodule` or `git submodule update` to update") |
|
915 | self.log.warn("run `setup.py submodule` or `git submodule update` to update") | |
916 |
|
916 | |||
917 | @catch_config_error |
|
917 | @catch_config_error | |
918 | def initialize(self, argv=None): |
|
918 | def initialize(self, argv=None): | |
919 | super(NotebookApp, self).initialize(argv) |
|
919 | super(NotebookApp, self).initialize(argv) | |
920 | self.init_logging() |
|
920 | self.init_logging() | |
921 | self.init_kernel_argv() |
|
921 | self.init_kernel_argv() | |
922 | self.init_configurables() |
|
922 | self.init_configurables() | |
923 | self.init_components() |
|
923 | self.init_components() | |
924 | self.init_webapp() |
|
924 | self.init_webapp() | |
925 | self.init_terminals() |
|
925 | self.init_terminals() | |
926 | self.init_signal() |
|
926 | self.init_signal() | |
927 |
|
927 | |||
928 | def cleanup_kernels(self): |
|
928 | def cleanup_kernels(self): | |
929 | """Shutdown all kernels. |
|
929 | """Shutdown all kernels. | |
930 |
|
930 | |||
931 | The kernels will shutdown themselves when this process no longer exists, |
|
931 | The kernels will shutdown themselves when this process no longer exists, | |
932 | but explicit shutdown allows the KernelManagers to cleanup the connection files. |
|
932 | but explicit shutdown allows the KernelManagers to cleanup the connection files. | |
933 | """ |
|
933 | """ | |
934 | self.log.info('Shutting down kernels') |
|
934 | self.log.info('Shutting down kernels') | |
935 | self.kernel_manager.shutdown_all() |
|
935 | self.kernel_manager.shutdown_all() | |
936 |
|
936 | |||
937 | def notebook_info(self): |
|
937 | def notebook_info(self): | |
938 | "Return the current working directory and the server url information" |
|
938 | "Return the current working directory and the server url information" | |
939 | info = self.contents_manager.info_string() + "\n" |
|
939 | info = self.contents_manager.info_string() + "\n" | |
940 | info += "%d active kernels \n" % len(self.kernel_manager._kernels) |
|
940 | info += "%d active kernels \n" % len(self.kernel_manager._kernels) | |
941 | return info + "The IPython Notebook is running at: %s" % self.display_url |
|
941 | return info + "The IPython Notebook is running at: %s" % self.display_url | |
942 |
|
942 | |||
943 | def server_info(self): |
|
943 | def server_info(self): | |
944 | """Return a JSONable dict of information about this server.""" |
|
944 | """Return a JSONable dict of information about this server.""" | |
945 | return {'url': self.connection_url, |
|
945 | return {'url': self.connection_url, | |
946 | 'hostname': self.ip if self.ip else 'localhost', |
|
946 | 'hostname': self.ip if self.ip else 'localhost', | |
947 | 'port': self.port, |
|
947 | 'port': self.port, | |
948 | 'secure': bool(self.certfile), |
|
948 | 'secure': bool(self.certfile), | |
949 | 'base_url': self.base_url, |
|
949 | 'base_url': self.base_url, | |
950 | 'notebook_dir': os.path.abspath(self.notebook_dir), |
|
950 | 'notebook_dir': os.path.abspath(self.notebook_dir), | |
951 | 'pid': os.getpid() |
|
951 | 'pid': os.getpid() | |
952 | } |
|
952 | } | |
953 |
|
953 | |||
954 | def write_server_info_file(self): |
|
954 | def write_server_info_file(self): | |
955 | """Write the result of server_info() to the JSON file info_file.""" |
|
955 | """Write the result of server_info() to the JSON file info_file.""" | |
956 | with open(self.info_file, 'w') as f: |
|
956 | with open(self.info_file, 'w') as f: | |
957 | json.dump(self.server_info(), f, indent=2) |
|
957 | json.dump(self.server_info(), f, indent=2) | |
958 |
|
958 | |||
959 | def remove_server_info_file(self): |
|
959 | def remove_server_info_file(self): | |
960 | """Remove the nbserver-<pid>.json file created for this server. |
|
960 | """Remove the nbserver-<pid>.json file created for this server. | |
961 |
|
961 | |||
962 | Ignores the error raised when the file has already been removed. |
|
962 | Ignores the error raised when the file has already been removed. | |
963 | """ |
|
963 | """ | |
964 | try: |
|
964 | try: | |
965 | os.unlink(self.info_file) |
|
965 | os.unlink(self.info_file) | |
966 | except OSError as e: |
|
966 | except OSError as e: | |
967 | if e.errno != errno.ENOENT: |
|
967 | if e.errno != errno.ENOENT: | |
968 | raise |
|
968 | raise | |
969 |
|
969 | |||
970 | def start(self): |
|
970 | def start(self): | |
971 | """ Start the IPython Notebook server app, after initialization |
|
971 | """ Start the IPython Notebook server app, after initialization | |
972 |
|
972 | |||
973 | This method takes no arguments so all configuration and initialization |
|
973 | This method takes no arguments so all configuration and initialization | |
974 | must be done prior to calling this method.""" |
|
974 | must be done prior to calling this method.""" | |
975 | if self.subapp is not None: |
|
975 | if self.subapp is not None: | |
976 | return self.subapp.start() |
|
976 | return self.subapp.start() | |
977 |
|
977 | |||
978 | info = self.log.info |
|
978 | info = self.log.info | |
979 | for line in self.notebook_info().split("\n"): |
|
979 | for line in self.notebook_info().split("\n"): | |
980 | info(line) |
|
980 | info(line) | |
981 | info("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).") |
|
981 | info("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).") | |
982 |
|
982 | |||
983 | self.write_server_info_file() |
|
983 | self.write_server_info_file() | |
984 |
|
984 | |||
985 | if self.open_browser or self.file_to_run: |
|
985 | if self.open_browser or self.file_to_run: | |
986 | try: |
|
986 | try: | |
987 | browser = webbrowser.get(self.browser or None) |
|
987 | browser = webbrowser.get(self.browser or None) | |
988 | except webbrowser.Error as e: |
|
988 | except webbrowser.Error as e: | |
989 | self.log.warn('No web browser found: %s.' % e) |
|
989 | self.log.warn('No web browser found: %s.' % e) | |
990 | browser = None |
|
990 | browser = None | |
991 |
|
991 | |||
992 | if self.file_to_run: |
|
992 | if self.file_to_run: | |
993 | if not os.path.exists(self.file_to_run): |
|
993 | if not os.path.exists(self.file_to_run): | |
994 | self.log.critical("%s does not exist" % self.file_to_run) |
|
994 | self.log.critical("%s does not exist" % self.file_to_run) | |
995 | self.exit(1) |
|
995 | self.exit(1) | |
996 |
|
996 | |||
997 | relpath = os.path.relpath(self.file_to_run, self.notebook_dir) |
|
997 | relpath = os.path.relpath(self.file_to_run, self.notebook_dir) | |
998 | uri = url_path_join('notebooks', *relpath.split(os.sep)) |
|
998 | uri = url_path_join('notebooks', *relpath.split(os.sep)) | |
999 | else: |
|
999 | else: | |
1000 | uri = 'tree' |
|
1000 | uri = 'tree' | |
1001 | if browser: |
|
1001 | if browser: | |
1002 | b = lambda : browser.open(url_path_join(self.connection_url, uri), |
|
1002 | b = lambda : browser.open(url_path_join(self.connection_url, uri), | |
1003 | new=2) |
|
1003 | new=2) | |
1004 | threading.Thread(target=b).start() |
|
1004 | threading.Thread(target=b).start() | |
|
1005 | ||||
|
1006 | self.io_loop = ioloop.IOLoop.current() | |||
1005 | try: |
|
1007 | try: | |
1006 |
|
|
1008 | self.io_loop.start() | |
1007 | except KeyboardInterrupt: |
|
1009 | except KeyboardInterrupt: | |
1008 | info("Interrupted...") |
|
1010 | info("Interrupted...") | |
1009 | finally: |
|
1011 | finally: | |
1010 | self.cleanup_kernels() |
|
1012 | self.cleanup_kernels() | |
1011 | self.remove_server_info_file() |
|
1013 | self.remove_server_info_file() | |
1012 |
|
1014 | |||
|
1015 | def stop(self): | |||
|
1016 | def _stop(): | |||
|
1017 | self.http_server.stop() | |||
|
1018 | self.io_loop.stop() | |||
|
1019 | self.io_loop.add_callback(_stop) | |||
|
1020 | ||||
1013 |
|
1021 | |||
1014 | def list_running_servers(profile='default'): |
|
1022 | def list_running_servers(profile='default'): | |
1015 | """Iterate over the server info files of running notebook servers. |
|
1023 | """Iterate over the server info files of running notebook servers. | |
1016 |
|
1024 | |||
1017 | Given a profile name, find nbserver-* files in the security directory of |
|
1025 | Given a profile name, find nbserver-* files in the security directory of | |
1018 | that profile, and yield dicts of their information, each one pertaining to |
|
1026 | that profile, and yield dicts of their information, each one pertaining to | |
1019 | a currently running notebook server instance. |
|
1027 | a currently running notebook server instance. | |
1020 | """ |
|
1028 | """ | |
1021 | pd = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), name=profile) |
|
1029 | pd = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), name=profile) | |
1022 | for file in os.listdir(pd.security_dir): |
|
1030 | for file in os.listdir(pd.security_dir): | |
1023 | if file.startswith('nbserver-'): |
|
1031 | if file.startswith('nbserver-'): | |
1024 | with io.open(os.path.join(pd.security_dir, file), encoding='utf-8') as f: |
|
1032 | with io.open(os.path.join(pd.security_dir, file), encoding='utf-8') as f: | |
1025 | info = json.load(f) |
|
1033 | info = json.load(f) | |
1026 |
|
1034 | |||
1027 | # Simple check whether that process is really still running |
|
1035 | # Simple check whether that process is really still running | |
1028 | # Also remove leftover files from IPython 2.x without a pid field |
|
1036 | # Also remove leftover files from IPython 2.x without a pid field | |
1029 | if ('pid' in info) and check_pid(info['pid']): |
|
1037 | if ('pid' in info) and check_pid(info['pid']): | |
1030 | yield info |
|
1038 | yield info | |
1031 | else: |
|
1039 | else: | |
1032 | # If the process has died, try to delete its info file |
|
1040 | # If the process has died, try to delete its info file | |
1033 | try: |
|
1041 | try: | |
1034 | os.unlink(file) |
|
1042 | os.unlink(file) | |
1035 | except OSError: |
|
1043 | except OSError: | |
1036 | pass # TODO: This should warn or log or something |
|
1044 | pass # TODO: This should warn or log or something | |
1037 | #----------------------------------------------------------------------------- |
|
1045 | #----------------------------------------------------------------------------- | |
1038 | # Main entry point |
|
1046 | # Main entry point | |
1039 | #----------------------------------------------------------------------------- |
|
1047 | #----------------------------------------------------------------------------- | |
1040 |
|
1048 | |||
1041 | launch_new_instance = NotebookApp.launch_instance |
|
1049 | launch_new_instance = NotebookApp.launch_instance | |
1042 |
|
1050 |
General Comments 0
You need to be logged in to leave comments.
Login now